query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
sequencelengths
4
101
negative_scores
sequencelengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Initialize this game state and set the current player based on is_p1_turn.
def __init__(self, is_p1_turn: bool, side_length: int) -> None: super().__init__(is_p1_turn) self.side_length = side_length # ISSUE: what if node is more than 26 --> no need to handle side more than 5 # construct a list of uppercase and lower case letters alph_lst_upper = list(string.ascii_uppercase) alph_lst_lower = list(string.ascii_lowercase) # alph_lst has a length of 52 alph_lst = alph_lst_upper + alph_lst_lower # assign original value for each ley-line hori_result = [] for i in range(side_length + 1): hori_result.append("@") left_result = [] for i in range(side_length + 1): left_result.append("@") right_result = [] for i in range(side_length + 1): right_result.append("@") self.hori_result = hori_result self.left_result = left_result self.right_result = right_result self.hori_lst = [] self.left_lst = [] self.right_lst = [] # construct horizontal ley-lines n = 2 start_index = 0 end_index = 0 while n <= side_length + 1: end_index = start_index + n self.hori_lst.append(alph_lst[start_index:end_index]) start_index = end_index n += 1 end_index = start_index + side_length self.hori_lst.append(alph_lst[start_index:end_index]) # copy hori_lst hori_copy = [] for item in self.hori_lst: hori_copy.append(item) # construct left ley-lines for i in range(side_length + 1): temp = [] for lst in hori_copy[:len(hori_copy) - 1]: if len(lst) > i: temp.append(lst[i]) self.left_lst.append(temp) for i in range(1, side_length + 1): self.left_lst[i].append(hori_copy[-1][i - 1]) # construct right ley-lines for i in range(-1, side_length * (-1) - 2, -1): temp = [] for lst in hori_copy[:len(hori_copy) - 1]: if len(lst) >= i * (-1): temp.append(lst[i]) self.right_lst.append(temp) self.right_lst = self.right_lst[::-1] for i in range(side_length): self.right_lst[i].append(hori_copy[-1][i])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_first_player(self):\n if self.player2.won_previous:\n self.current_player = self.player2\n else: self.current_player = self.player1", "def __init__(self, is_p1_turn, current_total):\n self.p1_turn = is_p1_turn\n self.current_total = current_total", "def init_turn(self):\n self.before_turn_switches()\n if self.battlefield.win is not None:\n return\n self.before_turn()", "def __init__(self):\n self.__grid = create_grid(\n Settings.SIZE_X, Settings.SIZE_Y, MarkerType.NONE)\n\n self.__turn = 0\n self.__state = GameState.PLAYING\n self.__winner = MarkerType.NONE\n self.__loser = MarkerType.NONE\n\n # Separate counter for turns, because __turn depends on starting player\n self.__turns_played = 0", "def initGameState(self):\n print(\"Setting game state: \")\n self.playGUI = GUI()\n self.playGUI.drawBoard(self.player)", "def __init__(self):\n\n super().__init__()\n self.setup_janggi_game()\n self._game_state = 'UNFINISHED'\n self._player_turn = 'BLUE'", "def __init__(self, player_1):\n self.die = Die()\n self.player_1 = player_1\n self.current_player = self.player_1\n self.turns = 1", "def set_plays(self) -> None:\n player1 = self._get_input('What is the name of player 1?')\n player2 = self._get_input('What is the name of player 2?')\n self.state = State(player1, player2)", "def __init__(self, player1, player2, state_machine, restore = False):\r\n super().__init__()\r\n self.__players[0] = player1\r\n self.__players[1] = player2\r\n self.__player_names[player1] = 'Human'\r\n self.__player_names[player2] = 'Bot'\r\n self.__state_machine = state_machine", "def bcp_player_turn_start(self, player, **kwargs):\n\n if ((self.player and self.player.number != player) or\n not self.player):\n\n self.player = self.player_list[int(player)-1]", "def __init__(self):\r\n self.turn = 0", "def start(self):\n self.player = Player()\n self.dealer = Dealer()\n self.pot = 0\n self.side_bet = 0\n start_game()", "def bcp_game_start(self, **kargs):\n self.bcp_player_add(number=1)\n self.bcp_player_turn_start(player=1)\n self.events.post('game_started', **kargs)", "def start(self):\n self.__init__()\n self.set_n_players()\n self.init_players()\n self.init_territory_selection_phase()\n self.init_troop_deployment_phase()\n # self.game_phase()", "def set_turn(self):\n if self.status == self.PLAYER_TURN:\n return\n self.status = self.PLAYER_TURN\n self.client.send_player_turn(10)", "def __init__(self, player1AI = False, player2AI = False):\n\t\tself.tick = 0\n\t\tself.player1AI = player1AI\n\t\tself.player2AI = player2AI\n\t\tself.selectionIndex = [0, 0]\n\t\tself.colours = [\"#ff6363\", \"#ffc163\", \"#88de68\", \"#63c6ff\", \"#ffffff\", \"#000000\"]\n\t\tself.playerReady = [False, False]\n\t\tself.playerKeys = {0: [\"W\", \"S\", \"SPACE\"], 1: [\"⭡\", \"⭣\", \"ENTER\"]}\n\t\tself.timeSinceReady = 0\n\t\tself.headings = [\n\t\t\t\"Player 1:\" if not self.player1AI else \"Computer:\",\n\t\t\t\"Player 2:\" if not self.player2AI else \"Computer:\"\n\t\t]\n\t\tself.itemSpacing = 0.15", "def initGame(self):\n self.map = {}\n self.blocks = Group()\n self.Coins =Group()\n self.players = Group()\n self.player1 = Player(1525,75,2)\n self.players.add(self.player1)\n if self.playernum == 2:\n self.player2 = Player(75,825,1)\n self.players.add(self.player2)\n else:\n self.player2 = False", "def __init__(self,state,player=WHITE):\n if(state==None):\n self.gameState = dict()\n for x in range(0,WIDTH):\n for y in range(0,HEIGHT):\n self.gameState[x,y] = EMPTY\n for x in range(0,WIDTH):\n self.gameState[x,BSTARTROW] = BLACK#Blacks starting row\n self.gameState[x,WSTARTROW] = WHITE#Whites starting row\n #whites.append(Board.pawn(Board.pos(x,WSTARTROW),WHITE))\n #blacks.append(Board.pawn(Board.pos(x,BSTARTROW),BLACK))\n else:\n self.gameState = state\n \n self.whoseTurn = player\n self.cachedWin = False # set to True in winFor() if\n self.cachedWinner = None", "def setup_new_game(self):\r\n self._player = Player()\r\n self._stats = GameStats(self._bb_settings)\r\n self._scoreboard = Scoreboard(self._bb_settings, self._screen)", "def __init__(self, ui, player1, player2, istournament=False, tournamentgame=None):\n self.player1 = player1\n self.player2 = player2\n self.istournament = istournament\n self.tournamentgame = tournamentgame\n\n self.gameState = [0, 0, 0,\n 0, 0, 0,\n 0, 0, 0]\n\n self.ui = ui\n self.currentplayer = 1\n if random.uniform(0, 1) > 0.5:\n self.currentplayer = -1\n\n self.displayBoard(ui)", "def __init__(self, is_p1_turn: bool, side_length: int, current_board: str,\n current_ley_lines: list):\n super().__init__(is_p1_turn)\n self.current_board = current_board\n self.current_ley_lines = current_ley_lines\n self.side_length = side_length", "def turn_on(self) -> None:\n self._state = self._player.turn_on()", "def __init__(self):\n self._game_state = \"UNFINISHED\"\n self._current_player = \"BLACK\"\n self._game_board = Board()", "def next_player(self):\n if self.player1.turn_status == 1:\n self.player1.turn_status = 0\n self.turn(self.player2)\n else:\n self.player2.turn_status = 0\n self.turn(self.player1)", "def start(self):\n\n # Call the protected _turn method to start the game\n self._turn()", "def set_next_first_player(self):\n if self.current_player == self.player1:\n self.player1.won_previous = True\n self.player2.won_previous = False\n else:\n self.player2.won_previous = True\n self.player1.won_previous = False", "def _init_game(self):\n state, player_id = self.game.init_game()\n if self.record_action:\n self.action_recorder = []\n return self._extract_state(state), player_id", "def initial_move(self):\n\n # Make the first move based on the game we\n # are currently playing, otherwise return\n if isinstance(self.get_game_space(), Gomoku):\n\n # play one stone in the bottom left-hand corner\n self.get_game_space().set_tile(0,6,self.get_affinity())\n\n # the agents are now in play \n self.set_play_status(True)\n self.get_opponent().set_play_status(True)\n\n else:\n print('Unknown game. Returning')\n return None", "def __init__(self):\n\n self.__turn_info = { 'turn': ChessGame.WHITE }\n self.init_board()", "def __init__(self):\n self.deck = Deck()\n self.player1 = Player(INITIAL_CHIPS)\n self.player2 = Player(INITIAL_CHIPS)\n self.flop = []\n self.turn = None\n self.river = None\n\n self.this_player = self.player2\n self.other_player = self.player1 # 一局开始前会对换一次玩家\n\n self.last_action = None", "def start_21game(self):\n self.is_game_start = True\n self.already_has_a_winner = False\n self.player_point = {}\n self.generate_21game_number()\n self.boardcast(self.game_msg)", "def reset(self, *args):\n self.state = GameStates.playing\n self.human = evilrps.Player('Human', self.get_player_choice)\n self.ai = evilrps.Player('AI', evilrps.create_ai())\n self.game = evilrps.Game(self.human, self.ai)", "def __init__(self, master=None):\n super().__init__(master)\n self.masterframe = Frame(self.master)\n self.masterframe.pack()\n self.grid = Grid(3)\n self.canvasSize = 100\n self.mX = 0\n self.mY = 0\n self.working = True\n # True is Player 1 and False is Player 2\n self.turn = True\n self.createCanvas()", "def __init__(self,player1: Player = ManualPlayer(\"P1\"),\\\r\n player2: Player = ManualPlayer(\"P2\")):\r\n\r\n self.board = np.zeros((BOARD_SIZE,BOARD_SIZE)\\\r\n ,dtype=np.int8)\r\n self.board[3,3] = '2'\r\n self.board[4,4] = '2'\r\n self.board[3,4] = '1'\r\n self.board[4,3] = '1' \r\n\r\n self.players = []\r\n self.players.append(player1)\r\n self.players.append(player2)\r\n self.turn = 1\r\n self.count = 0", "def __init__(self, player1, player2):\n self.players = [player1, player2]\n self.tokens = {\n ' ': ' ',\n player1: 'X',\n player2: 'O',\n }\n self.score = {\n player1: 0,\n player2: 0,\n }\n self.moves = None\n self.winner = None\n self.turn = ''\n self.reset()", "def change_player_state(self):\n if self.active_player.get() is True:\n # Get game phase and unlock respective buttons?\n # or should game do that\n pass\n else:\n pass\n #self.disable_all_buttons()", "def start(self):\n # Call the protected _turn method to start the game\n self._end_time = time.time() + 60\n self._turn()", "def start_game(self, **kwargs):\n\n success, info = self.gms.start_game(\n player=kwargs.get('player', 'x'),\n first_turn=raw_input('Would you like to go first? y/n\\n') == 'y'\n )\n if success:\n if info['status_code'] == core_constants.GAME_STATUS_HUMAN_MOVE_REQUIRED:\n print(self.gms.game.get_board_state_pretty())\n self.play_human_move()\n else:\n print(info['messages'][0])", "def reset(self, starting_player):\n self.__turn = starting_player.value\n self.__turns_played = 0\n self.__winner = MarkerType.NONE\n self.__loser = MarkerType.NONE\n\n for y in range(Settings.SIZE_Y):\n for x in range(Settings.SIZE_X):\n self.__grid[y][x] = MarkerType.NONE\n\n self.__state = GameState.PLAYING", "def init_game_setting(self):\n self.state.state_counter_while_testing += 1", "def start_game(self):\n self.code = code.get_random_num()\n self.Player1 = self.get_player(1)\n self.Player2 = self.get_player(2)\n attempt = self.Player1.make_guess()\n guess.guess_lists(attempt, self.code)\n right_answer_list = guess.return_answer()\n num_guessed_list = guess.return_player_guess()\n check.check(num_guessed_list, right_answer_list)\n attempt = self.Player2.make_guess()\n guess.guess_lists(attempt, self.code)\n right_answer_list = guess.return_answer()\n num_guessed_list = guess.return_player_guess()\n output = check.check(num_guessed_list, right_answer_list)\n play = end_game.end_game(output)\n if play == True:\n self.keep_playing()", "def __init__(self, players, gamespace, start_state, transitions, setup, finish, get, post):\n\n self.players = players\n self.gamespace = gamespace\n self.start_state = start_state\n self.transitions = transitions\n self.setup = setup\n self.finish = finish\n self.get = get\n self.post = post\n\n self.turn = 1", "def prep_game1(self):\n game_str1 = str(self.stats.player1_game)\n self.game_image1 = self.font.render(game_str1, True, self.text_colour, self.settings.bg_colour)\n\n # Position the level to the bottom of the screen\n self.game_rect1 = self.game_image1.get_rect()\n width, height = self.game_rect1.size\n self.game_rect1.x = self.settings.screen_width/3\n self.game_rect1.y = self.settings.screen_height - height", "def __init__(self, p1_starts: bool) -> None:\n side_length = int(input(\"Enter the side length of the board: \"))\n self.current_state = StonehengeState(p1_starts, side_length)", "def __init__(self):\n # Current player\n self.player = X\n\n # Board\n self.board = [\n [None, None, None],\n [None, None, None],\n [None, None, None]\n ]\n\n # Winner\n self.winner = None\n\n # Game over\n self._gameover = False", "def begin_turn(self):\n pass", "def setCurrentPlayer(self):\n self.current_player = next(self.get_player_order_fn())", "def __init__(self, player, gamestate):\n self.player = player\n self.gamestate = gamestate", "def start_turn(self, turn_number: int):\n self.turn = turn_number", "def switch_player(self):\n self.player = Nim.other_player(self.player)", "def __init__( self, prevState = None ): ###PLEASE NOTE THIS THAT THE __init__ method is here and this is where GameState() starts\n if prevState != None: # Initial state\n self.data = GameStateData(prevState.data) ##This statement imports the GameStateData object from the GameStateData class in game.py. This object contains a data packet documenting the state of the game.\n\n else:\n self.data = GameStateData()\n \"\"\"\n self._foodEaten = None\n self._capsuleEaten = None\n self._agentMoved = None\n self._lose = False\n self._win = False\n self.scoreChange = 0\n \"\"\"", "def start_of_game(self):\n pass", "def play(self):\n\n player1_turn = True\n\n while True:\n if player1_turn:\n self.player_turn(self.player1, self.player2)\n if self.lost(self.player2):\n print(\"Game Over!! You sank {}'s ships!\".format(\n self.player2.name))\n break\n player1_turn = False\n else:\n self.player_turn(self.player2, self.player1)\n if self.lost(self.player1):\n print(\"Game Over!! You sank {}'s ships!\".format(\n self.player1.name))\n break\n player1_turn = True", "def initializeGame(self):\n # Fill deck with cards and shuffle it\n self.deck.fill(104)\n self.deck.shuffle()\n #print \"Deck initialized\"\n\n # Initialize the field\n self.field.initialize(self.deck.draw(4))\n self.field.sortField()\n #self.field.printField()\n\n # Set players to initial state again\n # Distribute cards and set bulls to 0\n for p in self.players:\n p.bulls = 0\n p.setHand(self.deck.draw(10))", "def init_game(self):\n self.view.carregar_jogadores_possiveis(self._possible_players_list())\n self.view.put_view_in_main_loop()", "def switchPlayer(self):\n self.player = Nim.otherPlayer(self.player)", "def start_game(self):\n p1_move = True\n is_all_moves_over = False\n while not is_all_moves_over:\n\n while p1_move and not is_all_moves_over:\n p1 = int(input(\"Player 1 pos:\"))\n is_all_moves_over, p1_move = self.play('p1', p1, p1_move)\n\n while not p1_move and not is_all_moves_over:\n p2 = int(input(\"Player 2 pos:\"))\n is_all_moves_over, p1_move = self.play('p2', p2, p1_move)\n\n print(\"Game Ended in Draw\")", "def feed(self):\n self.phase.set(2)\n # Start with first player\n #self.first_player\n pass", "def evolve(self):\n # Start with first player\n self.phase.set(1)\n\n #self.first_player\n \n # Autopass turn if no cards left for player\n \n \n pass", "def before_turn(self, playerInfo):\n self.PlayerInfo = playerInfo", "def __init__(self):\n self.numShipsPerPlayer = 0\n self.playerType = 1 # Whether P2 is a human (1) or AI (2-4 for difficulty)\n \n self.grid = Grid()\n self.shipDir = 0 # Direction of the ship currently being placed (index of c.DIRS)\n self.lenShip = 1 # Length of the ship to place next\n \n self.p1Ships = []\n self.p2Ships = []\n \n # Number of special shots each player has (gain one every 10 rounds)\n self.round = 0\n self.p1_special_shots = 0\n self.p2_special_shots = 0\n \n self.is_P1_turn = False\n self.is_placing = False\n self.is_shooting = False\n self.in_transition = False\n \n self.msg = \"\" # Message to display below game board", "def start_game(self):\n while self.can_deal:\n self.take_turn()", "def __init__(self):\n self.board = Board()\n #self.player1 = player1\n #self.player2 = player2\n self.winner = None", "def __init__(self, players):\n\n # Define the players\n self.players = players\n\n # Define who starts the game\n self.nplayer = 1 \n\n # Define the board\n self.board = [0] * 9", "def init_game_setting(self):\r\n pass", "def init_new_game(self):\n self.game = get_new_game(self.game_config)", "def setupNewGame(self):\r\n self.level = 1\r\n self.num_cows = 2\r\n self.num_farmers = 1\r\n self.levelHeading = Text(self.gameDisplay, 120, 425, 175, self.light_orange, \"Farm 1\")\r\n self.shield_indicator.image = self.greenShield\r\n updatedHeading = self.levelHeading\r\n self.startUX[0] = updatedHeading", "def start_game(self):\n\n\t\tpass", "def _set_init_pose(self):\n #raw_input(\"INIT SPEED PRESS\")\n self.move_base(self.init_linear_speed_vector,\n self.init_angular_turn_speed,\n epsilon=0.05,\n update_rate=10)\n # We Issue the landing command to be sure it starts landing\n #raw_input(\"LAND PRESS\")\n # self.land()\n\n return True", "def set_ready(self):\n if self.game.has_started() or self.status == self.PLAYER_READY:\n return\n self.status = self.PLAYER_READY\n self.game.player_is_ready()", "def __init__(self, board_value, *args, **kwargs):\n\n # the value which will represent the player behind the scenes\n self.board_value = board_value\n self.turn_count = 0", "def play_game(self, is_first_player):\n state = [0] * (self.SIZE * self.SIZE)\n turn_number = 0\n player_number = -1 if is_first_player else 1\n\n print('Your bot is playing {} ({})'.format(\n 'first' if is_first_player else 'second',\n 'black' if self.index == 0 else 'white'\n ))\n try:\n for turn_number in range(self.SIZE * self.SIZE):\n is_my_turn = bool(turn_number % 2) != is_first_player\n\n if is_my_turn:\n turn_space = self.play_turn(state, player_number)\n self.send({'type': 'Move', 'move': turn_space})\n\n update = self.recv_type('PlayerMove')\n state[update['move']] = (\n player_number if is_my_turn else -player_number\n )\n if 'winner' in update:\n won = self.index == update.get('winner')\n if update.get('winner') == -2:\n print('The game was a draw.')\n else:\n print('Your bot has {} the game'.format(\n 'won' if won else 'lost'\n ))\n return\n finally:\n is_first_player_black = (\n is_first_player and self.index == 0 or\n not is_first_player and self.index == 1\n )\n print(self.render_state(state, is_first_player_black))", "def _prepare_first_step(self):\n if self.townhalls:\n self._game_info.player_start_location = self.townhalls.first.position\n self._game_info.map_ramps, self._game_info.vision_blockers = self._game_info._find_ramps_and_vision_blockers()", "def initialize_game_params(self):\r\n\r\n self.is_new_game = True\r\n self.is_game_over = False\r\n self.is_game_lost = False\r\n self.is_left_mouse_down = False\r\n self.is_right_mouse_down = False\r\n self.num_of_hidden_non_mines_tiles = self.rows * self.cols - self.num_of_mines", "def __init__(self, board: Board, player: Player, ai: Ai, banner=\"tttBanner.txt\"):\n\n # setting instance variables\n self.board = board\n self.player = player\n self.ai = ai\n self.banner = banner\n\n # which player has the current turn\n self.whoseTurn = self.player", "def __init__(self, player1, player2):\n # # players of the game {player1name: {color: , red_marbles:}}\n # self._players = {player1[0]: {\"name\": player1[0], \"color\": player1[1]},\n # player2[0]: {\"name\": player2[0], \"color\": player2[1]}}\n # # empty board, no marbles yet\n # self._board = self.create_board()\n # # current player's turn\n # self._turn = None\n # # winner state\n # self._winner = None\n # # red marbles captured for each player, needs addition of black and white marbles\n # self._captured = {player1[0]: 0, player2[0]: 0}\n pass", "def __init__(self):\n self.played_pos = []\n self.grid = [['-', '-', '-'],\n ['-', '-', '-'],\n ['-', '-', '-']]\n self.player_played_pos = {'p1': set(), 'p2': set()}", "def player_reset(self):\n\n logging.info(\n f\"Player.player_reset(): Player {self.id} score and time reset\\n\")\n self.id = 0\n self.name = \"\"\n self.phone = \"\"\n self.score = 0 # Running sum of player's score\n self.state = \"innactive\"\n self.ball_id = \"\"\n self.start_x = 0 # start pos of object thrown in game\n self.angle = 0 # angle of ball movement\n self.velocity = 0 # velocity of ball\n # leaderboard\n self.game_over = False\n self.date = str(datetime.date.today()) # required for leaderboard", "def player(self):\n legal = self.board.legal_move(self.black)\n if(len(legal) == 0):\n self.p_no_move = 1\n print(\"No legal move for player!\")\n self.computer_turn = True\n self.player_turn = False", "def __init__(self, player1, player2):\n self.player1 = Player(player1)\n self.player2 = Player(player2)\n self.die = Die()\n self.turn(self.player1)", "def start_gameloop(self):\n print(\"Game Loop starting...\")\n while True:\n current_turn = self.who_goes_first()\n print('The ' + current_turn + ' will go first.')\n while self.is_active:\n if current_turn == \"player\":\n self.board.draw()\n move = get_player_move(\n self.board.positions, self.board.is_position_availible)\n self.board.make_move(move, self.player_letter)\n current_turn = \"computer\"\n else:\n move = self.npc.get_move(self.board)\n self.board.make_move(move, self.npc.letter)\n current_turn = \"player\"\n if self.board.is_winner(self.player_letter):\n self.board.draw()\n print(\"You won!\")\n self.is_active = False\n if self.board.is_winner(self.npc.letter):\n self.board.draw()\n print(\"You lost!\")\n self.is_active = False\n if self.board.is_board_full():\n self.board.draw()\n print(\"Tie\")\n self.is_active = False\n if request_play_again() is False:\n break\n self.is_active = True\n self.board = Board(request_board_size())", "def start_game(self) -> None:\n self.init_game()\n self.play()", "def initialize(self):\n self.currState = self.startState", "def init_player():\n global active_track_idx\n global track_last_slided_pos\n global track_last_paused_pos\n global track_total_play_time \n\n # INITIALIZE Player\n active_track_idx = -1\n cancel_update_play_time_loop()\n cancel_track_end_event_loop()\n track_status.set(\"---\")\n track_title.set(\"--- : \")\n play_pause_btn.configure(image=play_img)\n track_last_slided_pos = 0\n track_last_paused_pos = 0\n track_total_play_time = 0\n track_pos_label.configure(text=\"00:00\")\n track_length_label.configure(text=\"00:00\")\n track_pos_slider.configure(state=\"disabled\")\n track_pos.set(0)", "def restart(self):\r\n\r\n self.pot = 0\r\n self.actions = 0\r\n self.previous_bet = self.small_blind\r\n self.initiate_blind(self.small_blind + self.big_blind)\r\n\r\n for player in self.players:\r\n player.credits = self.starting_credits\r\n\r\n # Let the first player begin\r\n self.active_player = (self.active_player + 1) % len(self.players)\r\n self.players[self.active_player].active = True\r\n\r\n self.players[self.active_player - 1].flip_cards()\r\n self.community_cards.flip_cards()\r\n\r\n self.deck_model = DeckModel()\r\n\r\n for player in self.players:\r\n player.new_cards(self.deck_model)\r\n\r\n output_text = \"Starting game...\\n{} post the big blind [${}]\\n{} post the small blind [${}]\".format(\r\n self.players[(self.active_player + 1) % len(self.players)].name, self.big_blind,\r\n self.players[self.active_player].name, self.small_blind)\r\n\r\n message = \"Player {} won!\".format(self.players[1].name)\r\n self.game_message.emit(message)\r\n\r\n self.new_pot.emit()\r\n self.new_credits.emit()\r\n self.new_output.emit(output_text)", "def __init__(self):\n self.TurnOnRoom = False", "def __init__(self):\n super(CamTacToe, self).__init__()\n self.state = [' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ']\n self.history = [self.state[:]]\n self.player_char = 'x'\n self.opponent_char = 'o'\n self.difficulty = 0\n self.player_start = 0", "def startGame(self):\n\n\t\tfor name in self.players.keys():\n\t\t\tself.startPlayerGame((name, 0))\n\t\tself.setupGuiSignals()", "def set_game_params(self, board):\n self.board = board.copy()\n self.n_rows = len(self.board[0]) # cols number\n self.n_cols = len(self.board) # rows number\n self.fruits_ttl = min(self.n_rows,self.n_cols)+1\n player_pos = np.where(board == 1)\n rival_pos = np.where(board == 2)\n self.locations[PLAYER] = tuple(ax[0] for ax in player_pos)\n self.locations[RIVAL] = tuple(ax[0] for ax in rival_pos)\n self.turns = 0\n self.max_turns = reachables(self.board,self.locations[PLAYER])\n self.player_turns = self.max_turns // 2", "def start(self):\n while self.turns <= 7:\n # print()\n # print(\"This is turn {}.\".format(self.turns))\n turn = Turn(self.current_player, self.die)\n turn.run()\n self.current_player.score += turn.score\n # print(\"{}'s score is now {}\".format(self.current_player, self.current_player.score))\n self.turns += 1\n # print()\n # print(\"You have reached 7 turns. Game over.\")\n # print(\"Your total score is {}.\".format(self.current_player.score))", "def start(self):\n self.save_checkpoint(\"setup\")\n\n logging.info(\"Starting game...\")\n body = render_message(\n \"welcome.html\",\n game_name=self.name,\n night_end=self.night_end.strftime(\"%I:%M %p\"),\n day_end=self.day_end.strftime(\"%I:%M %p\"),\n players=self.game.players,\n )\n self.send_message(mafia.events.PUBLIC, \"%s: Start\" % self.name, body)\n self.game.begin()\n self.started = True\n\n self.save_checkpoint(\"start\")", "def switch_player(self):\n if self.playerOne:\n # sets the chip color to blue\n self.red = 0\n self.blue = 255\n # switch the player to player 2 and change the caption\n self.playerOne = False\n pygame.display.set_caption('Connect4 - Player 2')\n else:\n # sets the chip color to red\n self.red = 250\n self.blue = 0\n # switch the player to player 1 and change the caption\n self.playerOne = True\n pygame.display.set_caption('Connect4 - Player 1')", "def setup(player, state):\n global ME\n global THEM\n global STATE\n\n player = player.upper()\n assert player == 'X' or player == 'O'\n ME = player\n THEM = 'O' if player is 'X' else 'X'\n assert state\n STATE = state", "def switchTurn(self):\n\n # Widget for player 1\n if self.frame1.state() == 'normal':\n self.frame2.deiconify()\n self.frame1.withdraw()\n self.frame1.update()\n self.frame2.update()\n if self.message[0]:\n showDialogBox(self.message[0]) # announce\n self.message[0] = None\n game2.canvas.tag_bind('square', '<Button-1>', game2.fire)\n\n # Widget for player 2\n else:\n self.frame1.deiconify()\n self.frame2.withdraw()\n self.frame1.update()\n self.frame2.update()\n if game2.isComputer == 1:\n self.frame1.after(500)\n game1.computer_fire()\n else:\n if self.message[1]:\n showDialogBox(self.message[1]) # announce\n self.message[1] = None\n game1.canvas.tag_bind('square', '<Button-1>', game1.fire)", "def setupStage(self):\n print(\"(\" + str(self.HOST) + \", \" + str(self.PORT) +\"):: Initiating setup stage\", file=self.logs)\n\n # Holds player map and name information here. Will be used to create objects later.\n mapVotes = []\n playerNames = {}\n colors = [\"red\", \"blue\"]\n\n gameState = {\n \"ready\": False,\n \"game\": None \n }\n while True:\n # Continuously saves logging information to a text file:\n self.logs.close()\n self.logs = open(str(self.filepath)+\"/_logs/\"+ str(self.PORT) + \".txt\", \"a+\")\n\n # Gets all the events from the game window. A.k.a., do stuff here.\n inboundData = self.socket.recvfrom(1024) # Gets bundle of data from clients\n data = inboundData[0] # Separates data from address\n address = inboundData[1]\n\n # Keeps track of how often the server recieves information from each client.\n updatedTime = time.time() \n self.clientUpdateTimes[str(address)] = updatedTime\n\n ########\n self.bitsIn += sys.getsizeof(data)\n\n address = inboundData[1] # Separates address from data\n data = pickle.loads(data) # Unpickles data back into a python dict\n\n command = data['command']\n if command != None: \n # Takes in information from both players\n if command == \"SUBMIT\":\n pName = data['playerName']\n mVote = data['mapVote']\n\n mapVotes.append(mVote)\n playerNames[str(address)] = pName\n \n # Both votes are in. Chooses a map, builds the Board object.\n if len(mapVotes) == 2:\n # Only chooses one map for both players\n if self.map == None:\n mapTuple = random.choice(mapVotes)\n size = mapTuple[0]\n m = mapTuple[1]\n\n if size == \"SMALL\":\n randomMap = MapGenerator((5,7), m)\n self.map = randomMap.getMap()\n mapString = self.map\n width = randomMap.getDimensions()[0]\n height = randomMap.getDimensions()[1]\n tokens = randomMap.getTokens()\n if size == \"MEDIUM\":\n randomMap = MapGenerator((7,9), m)\n self.map = randomMap.getMap()\n mapString = self.map\n width = randomMap.getDimensions()[0]\n height = randomMap.getDimensions()[1]\n tokens = randomMap.getTokens()\n if size == \"BIG\":\n randomMap = MapGenerator((10,12), m)\n self.map = randomMap.getMap()\n mapString = self.map\n width = randomMap.getDimensions()[0]\n height = randomMap.getDimensions()[1]\n tokens = randomMap.getTokens()\n if size == \"HUGE\":\n randomMap = MapGenerator((12,15), m)\n self.map = randomMap.getMap()\n mapString = self.map\n width = randomMap.getDimensions()[0]\n height = randomMap.getDimensions()[1]\n tokens = randomMap.getTokens()\n if size == \"RANDOM\":\n randWidth = random.randint(5, 13)\n randHeight = random.randint(5, 13)\n\n randomMap = MapGenerator((randWidth,randHeight), m)\n self.map = randomMap.getMap()\n mapString = self.map\n width = randomMap.getDimensions()[0]\n height = randomMap.getDimensions()[1]\n tokens = randomMap.getTokens()\n\n # Builds the game board\n self.board = Board(width, height, mapString)\n\n # Both players' names have been entered, creates Player objects.\\\n # Appends player objects to state variable. \n if len(playerNames) == 2 and len(colors) > 0:\n p = Player(playerNames[str(address)], colors.pop(), None, tokens, address)\n self.players.append(p)\n \n # Player objects and Board object have both been created.\n # Builds the Game object, stores it, then tells the PlayerViews its ready.\n if len(self.players) == 2 and self.board != None:\n self.game = Game(self.board, self.players[0], self.players[1])\n gameState['game'] = self.game\n gameState['ready'] = True\n\n # Sends data to both players simultaneously\n for client in self.clients:\n outboundData = pickle.dumps(gameState)\n self.socket.sendto(outboundData, client)\n break\n\n # Packages up data and sends it back to the client\n outboundData = pickle.dumps(gameState)\n\n ######\n self.bitsOut += sys.getsizeof(outboundData)\n\n self.socket.sendto(outboundData, address)\n \n # Check client connections here\n self.checkClientConnections(time.time())", "def __init__(self, board, turn):\n self.player = turn\n self.roll = self.roll_dice()\n #array of applied board states\n self.moves = []\n self.board = board\n self.generate_valid_moves()", "def __init__(self, turn, game):\n\t\tself.turn = turn\n\t\tself.game = game\n\t\tself.gameBoard = game.returnBoard()", "def __init__(self):\n self.start()\n while self.player.money > 0:\n self.game_loop()\n if self.player.money > 0:\n if not play_again():\n break\n elif self.player.money == 0:\n no_more_money()\n self.reset_table()", "def start_game(self):\n self.board = Board(num_tableaus=self.tableau_qty, num_decks=self.decks, deal_3=self.deal_3)\n self.board.init_move_dict()\n self.board.deal(self.deck)\n\n if self.api_use:\n self.init_game_api()\n elif self.commandline:\n self.init_cl_game()\n else:\n self.init_pygame()", "def start(self, board, player1, player2):\n # Check to see who goes first\n if random.randint(0, 1) == 0:\n self.printGoFirst(player1)\n\n while True:\n # Make a move and check if that move wins/draws the game\n board.makeMove(player1.getNextMove(board, self.letter1), self.letter1)\n board.printBoard()\n if board.isWinner(self.letter1):\n self.printWinner(player1)\n break\n if board.isFull():\n print \"The game is drawn\"\n break\n\n # Make a move and check if that move wins/draws the game\n board.makeMove(player2.getNextMove(board, self.letter2), self.letter2)\n board.printBoard()\n if board.isWinner(self.letter2):\n self.printWinner(player2)\n break\n if board.isFull():\n print \"The game is drawn\"\n break\n else:\n self.printGoFirst(player2)\n\n while True:\n # Make a move and check if that move wins/draws the game\n board.makeMove(player2.getNextMove(board, self.letter1), self.letter1)\n board.printBoard()\n if board.isWinner(self.letter1):\n self.printWinner(player2)\n break\n if board.isFull():\n print \"The game is drawn\"\n break\n\n # Make a move and check if that move wins/draws the game\n board.makeMove(player1.getNextMove(board, self.letter2), self.letter2)\n board.printBoard()\n if board.isWinner(self.letter2):\n self.printWinner(player1)\n break\n if board.isFull():\n print \"The game is drawn\"\n break", "def update_player_turn(self):\n\n if self.get_player_turn() != 'BLUE':\n\n self._player_turn = 'BLUE'\n\n else:\n\n self._player_turn = 'RED'" ]
[ "0.69196415", "0.69193554", "0.65942466", "0.65815157", "0.65624744", "0.6533628", "0.6411285", "0.6408902", "0.63886917", "0.63440305", "0.62498075", "0.6223506", "0.6214494", "0.6212468", "0.6181791", "0.61318034", "0.61269957", "0.6113165", "0.6105516", "0.60997057", "0.60623753", "0.6047337", "0.60449654", "0.6034377", "0.5988005", "0.5980125", "0.5971257", "0.5960737", "0.5958279", "0.59457356", "0.59314924", "0.58974963", "0.5878718", "0.58707875", "0.58665985", "0.58622926", "0.5833316", "0.5832381", "0.5828017", "0.5812129", "0.5804887", "0.5801867", "0.5786732", "0.5774835", "0.57742465", "0.57646567", "0.5764289", "0.57603294", "0.5747525", "0.57456076", "0.57028955", "0.56945336", "0.5694108", "0.56922066", "0.5689086", "0.5688307", "0.56729454", "0.56670666", "0.56532955", "0.5650965", "0.5647997", "0.5646616", "0.5644747", "0.56419116", "0.563529", "0.5623836", "0.5611248", "0.5603879", "0.56025237", "0.5597379", "0.5593418", "0.5590058", "0.55881244", "0.5572522", "0.55654275", "0.5520996", "0.55203056", "0.5512435", "0.55108374", "0.5509519", "0.5506407", "0.55049825", "0.5503459", "0.5503315", "0.55028516", "0.5501116", "0.549766", "0.54945135", "0.5488938", "0.54857427", "0.5484425", "0.548267", "0.54758793", "0.54756963", "0.54725254", "0.5471699", "0.5470793", "0.547007", "0.5463785", "0.5460697", "0.54533726" ]
0.0
-1
Return a string representation of the current state of the game.
def __str__(self) -> str: side = self.side_length hori_lst = self.hori_lst hori_result = self.hori_result left_lst = self.left_lst left_result = self.left_result right_lst = self.right_lst right_result = self.right_result total_line='' for i in range(2 * side + 5): # empty line string line = '' if i % 2 == 0: lineindex = int(i / 2) if lineindex <= side: # get the first 2 left result if lineindex == 0: # print('first line') for ia in range(3*(side+1)): line += ' ' line += left_result[0] line += ' ' line += left_result[1] # general case of combing the results and list together else: if lineindex == side: line += ' ' for ib in range(side - lineindex): line += ' ' line += hori_result[lineindex - 1] for ic in range(len(hori_lst[lineindex - 1])): line += ' - ' line += hori_lst[lineindex - 1][ic] if lineindex != side: line += ' ' line += left_result[lineindex + 1] else: if lineindex == side + 1: # for id in range(): line += ' ' line += hori_result[side] for ie in range(side): line += ' - ' line += hori_lst[side][ie] line += ' ' line += right_result[side] else: # print the last row for all other right resutls # print('right results') for ig in range(9): line += ' ' for ih in range(side): line += right_result[ih] line += ' ' total_line += line + '\n' else: # print stuff for the '/' lineindex2 = int(i / 2) if lineindex2 == 0: for iA in range(3*side+1): line += ' ' line += ' / ' line += ' ' line += " / " elif lineindex2 < side: for iA in range(3 * (1 + side - lineindex2)): line += ' ' # print('lineindex2: '+str(lineindex2)+' '+str(3*(1+side-lineindex2))) for iB in range(lineindex2 + 1): line += '/ \\ ' line += '/' elif lineindex2 == side: #for iC in range(side+1): line += ' ' for iD in range(side): line += '\\ / ' line += '\\' elif lineindex2 == side + 1: for iE in range(8): line += ' ' for iG in range(side): line += '\\ ' total_line += line + '\n' return total_line
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def stateString(self):\n return self._mdp.stateString(self._cur_state);", "def __str__(self):\n s=\"\"\n for y in range(0,HEIGHT):\n for x in range(0,WIDTH):\n s+=str(self.gameState[x,y])\n return s", "def __str__(self):\n s = \"\"\n for r in range(1,self.size+1):\n for c in range(1,self.size+1):\n s += str(self.gameState[r,c])\n return s", "def __repr__(self):\n s = \"\"\n for y in range(0,HEIGHT):\n temp=\"\"\n for x in range(0,WIDTH):\n temp = temp+ str(self.gameState[x,y])\n s += temp+\"\\n\"\n return s", "def state(self):\n\n\t\treturn str(self)", "def state(self):\n return str(self)", "def state(self):\r\n return str(self)", "def __repr__(self):\n string = \"Current state: \\n\"\n if self.state[0] == 0: # We're on the left side\n string += \"M: \"\n string += str(self.state[1]).ljust(10)\n string += \"M: \"\n string += str(TOTAL_NO_MISSIONARIES - self.state[1]).ljust(10)\n string += \"\\n\"\n\n string += \"C: \"\n string += str(self.state[2]).ljust(10)\n string += \"C: \"\n string += str(TOTAL_NO_CANNIBALS - self.state[2]).ljust(10)\n string += \"\\n\"\n\n string += \"Boat position: left\\n\"\n else: # We're on the right side\n string += \"M: \"\n string += str(TOTAL_NO_MISSIONARIES - self.state[1]).ljust(10)\n string += \"M: \"\n string += str(self.state[1])\n string += \"\\n\"\n\n string += \"C: \"\n string += str(TOTAL_NO_CANNIBALS - self.state[2]).ljust(10)\n string += \"C: \"\n string += str(self.state[2]).ljust(10)\n string += \"\\n\"\n\n string += \"Boat position: right\\n\"\n string += \"\\n\"\n return string", "def __str__(self):\n return ''.join(str(e) + ' ' for e in self.state)", "def state(self) -> str:", "def print_state(self):\n print('\\nthe current state is: ' + str(self.state) + '\\n')", "def __repr__(self):\r\n r = str(self.current_instance_state())\r\n return r", "def current_state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"current_state\")", "def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")", "def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")", "def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")", "def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")", "def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")", "def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")", "def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")", "def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")", "def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")", "def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")", "def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")", "def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")", "def display_state(self):\r\n\r\n print('\\n')\r\n print('>>CURRENT STATE')\r\n ct = 0\r\n for i in self.state:\r\n for j in i:\r\n if j == -1:\r\n val = 'X'\r\n else:\r\n val = str(ct)\r\n if len(val) == 1:\r\n print(' ' + val + ' ', end='')\r\n else:\r\n print(val + ' ', end='')\r\n ct += 1\r\n print('\\n')", "def __str__(self):\n return \"\".join(list(map(lambda row: ''.join(row), self.state)))", "def to_string(self):\r\n return '\\n'.join([' '.join([trans.start_state, trans.end_state, trans.symbol])\r\n for trans in self.transitions]) + '\\n' + self.start_state + ' ' + ' '.join(self.final_states)", "def current_state(self) -> str:\n return self._state_storage.state", "def state_message(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state_message\")", "def state(self) -> str:\n return self._state", "def state(self) -> str:\n return self._state", "def state(self) -> str:\n return self._state", "def state(self) -> str:\n return self._state", "def state(self) -> str:\n return self._state", "def __str__(self):\n state = ''\n state += ' '.join([str(x) for x in self.pos]) + ' '\n state += ''.join([str(x) + ' ' + str(y) + ' ' for x,\n y in zip(self.BU, self.BD)])\n for e in self.BF:\n state += ' '.join([str(x) for x in e])\n state += ' '\n state += ' '.join([str(x) for x in self.LU]) + ' '\n state += ' '.join([str(x) for x in self.LD]) + ' '\n\n return state", "def __str__(self):\n state_1 = \"Time: \" + str(self._time)\n state_2 = \"Current Cookies: \" + str(self._current_cookies)\n state_3 = \"CPS: \" + str(self._cps)\n state_4 = \"Total Cookies: \" + str(self._total_cookies)\n return state_1 + \" \" + state_2 + \" \" + state_3 + \" \" + state_4", "def __str__(self):\n # Build the string line by line. Join at the end.\n lines = []\n lines.append(\"Initial State: {{{}}}\".format(self.initial_state))\n lines.append(\n \"Final States: {{{}}}\".format(\n \",\".join(map(str, self.final_states))))\n\n # column headers\n lines.append(\n \"State\\t{}\".format(\"\\t\".join(self.alphabet)))\n\n # For each state, print transitions\n for state_name in range(1, len(self.transitions) + 1):\n line = \"{}\".format(state_name)\n for symbol in self.alphabet:\n line += \"\\t{{{}}}\".format(\n \",\".join(map(str, self.transitions.get(\n state_name, dict()).get(symbol, []))))\n lines.append(line)\n\n return \"\\n\".join(lines)", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def state(self) -> str:\n return pulumi.get(self, \"state\")", "def report_state(self):\n text = \"Status: %d\"%self.state.num;\n if self.state.msg !=\"\":\n text += \", Msg: %s\"%self.state.msg;\n return text;", "def __repr__( self ):\n\n return self.__class__.__name__ + \"( \" + repr(self.state) + \")\";", "def print_state(self):\n\t\tprint self.time, len(self.state['s']), len(self.state['p']), len(self.state['c'])", "def state_message(self) -> str:\n return pulumi.get(self, \"state_message\")", "def __str__(self):\n board = ''\n board_2 = ''\n\n for row in self.from_grid:\n for space in row:\n board += ' ' + space\n board += '\\n'\n\n for row in self.to_grid:\n for space in row:\n board_2 += ' ' + space\n board_2 += '\\n'\n\n return 'Current State:\\n' + board + 'Target State:\\n' + board_2", "def get_current_state(self):\n return self.game.get_current_state()", "def state_to_string(board_state):\n return str(board_state)", "def __repr__(self):\n representantion = ''\n\n for i in range(3):\n for j in range(3):\n representantion += str(self.state[3 * i + j])\n\n if j == 2 and i != 2:\n representantion += '\\n'\n else:\n representantion += ' '\n\n return representantion", "def get_board_state_pretty(self):\n\n board_state = ''\n for i in range(0, 3):\n board_state += ' | '.join([self.board['{}{}'.format(i, j)] for j in range(0, 3)])\n board_state += '\\n'\n return board_state", "def find_state(self, opponent: Player) -> str:\n action_str = actions_to_str(opponent.history[-self.memory_length :])\n return action_str", "def str_state(self) -> str:\n value = \"Not connected\"\n if self.STARTED:\n value = \"Connected\"\n value = f\"{value} to {self.url!r}, CLIENT v{self.PKG_VERSION}, PYTHON v{self.PY_VERSION}\"\n banner = get_env_ax().get(\"AX_BANNER\")\n if banner:\n value = f\"{value} [{banner}]\"\n return value", "def __str__(self):\n line = ''\n line += self.board_state.__str__()\n line += self.move.__str__()\n line += '\\n'\n return line", "def __str__(self):\n return f\"WINS: {self.wins}, LOSSES: {self.losses}, DRAWS: {self.draws}\"", "def silly(self) -> str:\n print(f\"Getting {self._name}'s State\")\n return self._state", "def __str__(self):\n s=''\n for r in range(self.n):\n for c in range(self.n):\n s += str(self.state[r][c]) + ' '\n s += '\\n'\n s += str('hcost') + ' : ' + str(self.hcost)\n s += '\\n'\n return s", "def get_game_state(self):\n return self._current_state", "def _repr_(self):\n if len(self._states_)==0:\n return \"Empty finite state machine\"\n if len(self._states_)==1:\n return \"Finite state machine with 1 state\"\n else:\n return \"Finite state machine with %s states\" % len(self._states_)", "def __str__(self) -> str:\n return \"Grid2d.State[{}]\".format(self.agent_position)", "def get_board_state(self):\n\n board_state = ''\n for i in range(0, 3):\n board_state += ''.join([self.board['{}{}'.format(i, j)] for j in range(0, 3)])\n return board_state", "def state_string(self):\n return AddressStates._to_string(self.state)", "def print_state(self):\n raise AIError(\"Must be implemented in child class!\")", "def game_state(self):\n return self._game_state", "def state(self):\n\t\tif self._state in JOB_PS:\n\t\t\treturn JOB_PS[self._state]\n\t\telse:\n\t\t\treturn str(self._state)", "def state(self) -> str | None:\n return self._state", "def get_game_state(self):\n return self.game_state", "def __str__(self) -> str:\r\n\t\treturn \"{state}\".format(state=self.NextState.__func__.__qualname__)", "def to_str(self, state=None):\n action = (state.action_to_string(state.current_player(), self.action)\n if state else str(self.action))\n return \"{:>3}: sign: {}, value: {:6.1f} / {:4d} = {:6.3f}, {:3d} children\".format(\n action, self.player_sign, self.total_reward, self.explore_count,\n self.explore_count and self.total_reward / self.explore_count,\n len(self.children))", "def state(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"state\")", "def __str__(self):\n s = 'hit '+str(self.hit)+'\\n'\n s+= 'states '+str(self.states)+'\\n'\n s+= 'chi2 '+str(self.chi2)\n return s", "def state(self):\n # type: () -> string_types\n return self._state", "def get_current_state(self):\n return self.world.get_state()", "def _repr_(self):\n if len(self._states_)==0:\n return \"Empty automaton\"\n if len(self._states_)==1:\n return \"Automaton with 1 state\"\n else:\n return \"Automaton with %s states\" % len(self._states_)", "def get_game_state(self):\r\n return self._game_state", "def print_state(self):\n print(self.identifier, \n self.gender, \n self.age,\n self.sexual_activity,\n self.disease_status,\n self.time_since_infection,\n self.number_of_partners,\n self.current_partners)", "def describe_game_state(hand, battlefield, graveyard, library):\n\tlog(\"\")\n\tlog(\"Hand is now:\")\n\tlog(hand)\n\tlog(\"Battlefield is now:\")\n\tlog(battlefield)\n\tlog(\"Graveyard is now:\")\n\tlog(graveyard)\n\tlog(\"Library is now:\")\n\tlog(Counter(library))\n\tlog(\"\")", "def get_state_display(self, obj):\n return obj.get_state_display()", "def __str__(self):\n return \"Current floor: {}\".format(self.current)", "def get_game_state(self):\n return self._game_state", "def get_game_state(self):\n return self._game_state", "def get_game_state(self):\n return self._game_state", "def _repr_(self):\n return \"Transition from %s to %s: %s\" % (repr(self.from_state),\n repr(self.to_state),\n self._in_out_label_())", "def exposed_get_state(self):\n return json.dumps(dict(state=random.choice(self.states)), indent=2)", "def __str__(self):\n sb = ''\n sb += '\\nInterfaceStatus [ ' + self.interface_name + ' ]\\n'\n sb += '\\tLinkState : ' + str(self.InterfaceState.enumval(self.link)) + '\\n'\n sb += '\\tLineProtoState : ' + str(self.InterfaceState.enumval(self.lineproto)) + '\\n'\n return sb" ]
[ "0.83233804", "0.81567377", "0.8026721", "0.7843948", "0.78436744", "0.7827769", "0.775942", "0.77437556", "0.76473355", "0.7591848", "0.7476206", "0.7409211", "0.7405296", "0.7345735", "0.7345735", "0.7345735", "0.7345735", "0.7345735", "0.7345735", "0.7345735", "0.7345735", "0.7345735", "0.7345735", "0.7345735", "0.7345735", "0.72871995", "0.72761095", "0.7228724", "0.7224151", "0.7212231", "0.71666217", "0.71666217", "0.71666217", "0.71666217", "0.71666217", "0.7155525", "0.71168745", "0.71087444", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.710531", "0.7087548", "0.7081863", "0.7076351", "0.70575035", "0.70160186", "0.69822973", "0.6931227", "0.6922838", "0.6921104", "0.6897032", "0.6892696", "0.68654597", "0.6862287", "0.68327665", "0.683153", "0.68305165", "0.6820487", "0.6805661", "0.6766837", "0.67603815", "0.6756178", "0.6751509", "0.675081", "0.67401797", "0.67191976", "0.671434", "0.66977406", "0.6692629", "0.6633945", "0.6626531", "0.66257864", "0.6586764", "0.65841234", "0.6569179", "0.6554272", "0.6544162", "0.65403306", "0.6498713", "0.6498713", "0.6498713", "0.64883095", "0.6485715", "0.64736557" ]
0.0
-1
Return all possible moves that can be applied to this state.
def get_possible_moves(self) -> list: result = [] for lst in self.hori_lst: for item in lst: if item.isalpha(): result.append(item) # add nodes to result if it's not taken and its line is not taken # for i in range(len(self.hori_lst)): # if not self.hori_result[i].isdigit(): # for item in self.hori_lst[i]: # if not item.isdigit(): # result.append(item) # # remove the node from result if its line has been taken # for i in range(len(self.left_lst)): # if self.left_result[i].isdigit(): # for item in self.left_lst[i]: # if item in result: # result.remove(item) # # remove the node from result if its line has been taken # for i in range(len(self.right_lst)): # if self.right_result[i].isdigit(): # for item in self.right_lst[i]: # if item in result: # result.remove(item) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def possible_moves(self):\n states = []\n possible_floors = self.possible_floors()\n possible_items = self.possible_items()\n\n for fl in possible_floors:\n for items in possible_items:\n new_floors = deepcopy(self.floors)\n for item in items:\n new_floors[self.lift_floor].remove(item)\n new_floors[fl].append(item)\n\n if self.validate_floors(new_floors):\n states.append(\n GameState(new_floors, lift_floor=fl, moves=self.moves+1)\n )\n\n return states", "def get_all_possible_moves(self, state):\n move_list = []\n done_finding_moves = False\n any_non_pass_moves = False\n while not done_finding_moves:\n try:\n m = next(self.move_generator) # Gets a (move, state) pair.\n # print(\"next returns: \",m[0]) # Prints out the move. For debugging.\n if m[0] != 'p':\n any_non_pass_moves = True\n move_list.append(m) # Add the move to the list.\n except StopIteration as e:\n done_finding_moves = True\n if not any_non_pass_moves:\n move_list.append(('p',state))\n return move_list", "def moves(self) -> List[List[PushState]]:\n # seen_moves = set()\n possible_moves = [[], []]\n\n if state := self.prune_states(self.state): # Could return None\n size = len(self.prune_states(state))\n else:\n return possible_moves\n\n for count, each_slot in enumerate(state):\n # for count, each_slot in reversed(list(enumerate(state))):\n if each_slot == \"L\" and not possible_moves[0]:\n next_state = deepcopy(state)\n next_state = tuple(self.push(list(next_state), count))\n next_state = self.prune_states(next_state)\n\n # if next_state not in seen_moves:\n # seen_moves.add(next_state)\n possible_moves[0].append(next_state)\n\n elif each_slot == \"R\" and not possible_moves[1]:\n next_state = deepcopy(state)\n next_state = tuple(self.push(list(next_state), count))\n next_state = self.prune_states(next_state)\n\n # if next_state not in seen_moves:\n # seen_moves.add(next_state)\n possible_moves[1].append(next_state)\n\n if possible_moves[0] and possible_moves[1]:\n break\n\n return possible_moves", "def get_possible_states(self) -> List[State]:\n next_states = []\n for action in self._legal_moves():\n next_states.append(self.move(action))\n return next_states", "def generate_possible_moves(self):\r\n\t\t# Moves:\r\n\t\t# 0 - North\r\n\t\t# 1 - East\r\n\t\t# 2 - South\r\n\t\t# 3 - West\r\n\r\n\t\tmoves = []\r\n\r\n\t\tif self.x != 0:\r\n\t\t\tmoves.append(0)\r\n\t\tif self.y != self.n-1:\r\n\t\t\tmoves.append(1)\r\n\t\tif self.x != self.n-1:\r\n\t\t\tmoves.append(2)\r\n\t\tif self.y != 0:\r\n\t\t\tmoves.append(3)\r\n\r\n\t\treturn moves", "def get_all_possible_moves(self):\r\n moves = []\r\n for i in range(8):\r\n for j in range(8):\r\n color = self.board[i][j][0]\r\n if (color == 'b' and not self.turn_white) or (color == 'w' and self.turn_white):\r\n p_type = self.board[i][j][1]\r\n if p_type == 'r':\r\n self.get_rook_moves(i, j, moves)\r\n elif p_type == 'k':\r\n self.get_king_moves(i, j, moves)\r\n elif p_type == 'q':\r\n self.get_queen_moves(i, j, moves)\r\n elif p_type == 'p':\r\n self.get_pawn_moves(i, j, moves)\r\n elif p_type == 'b':\r\n self.get_bishop_moves(i, j, moves)\r\n elif p_type == 'n':\r\n self.get_knight_moves(i, j, moves)\r\n return moves", "def get_all_moves(self):\n # 2d matrix of true/false, true if something can be placed\n legal_move_board = []\n possible_move_list = []\n for row in range(self.size):\n move_row = []\n for col in range(self.size):\n empty = self.board[row][col].state == PegState.EMPTY\n move_row.append(empty)\n if empty:\n possible_move_list.append((row, col))\n legal_move_board.append(move_row)\n \n # every position where something can be placed (list of tuples) (Combined with above)\n \"\"\" possible_move_list = []\n for row in range(self.size):\n for col in range(self.size):\n if legal_move_board[row][col] == True:\n possible_move_list.append((row, col))\n \"\"\"\n return legal_move_board, possible_move_list", "def get_legal_moves(self):\n # for each square in the castle figure out if an moves can occur from it.\n moves = []\n allowed = [self.turn]\n if self.turn == DEFENDER:\n allowed.extend((KING, CASTLE_OCCUPIED))\n it = np.nditer(self.board_state, flags=['multi_index'])\n while not it.finished:\n index = it.multi_index\n curr_loc = it[0]\n if curr_loc in allowed:\n moves.extend(self.get_legal_move_piece(curr_loc, index))\n it.iternext()\n return moves", "def get_possible_moves(self):\n moves = []\n for i in range(1, self.current_total + 1):\n if i ** 2 <= self.current_total:\n moves.append(i ** 2)\n\n return moves", "def get_goat_possible_moves(self) -> List:\n moves = []\n for pos in self.get_all_positions():\n if pos.is_goat():\n addr_from = pos.address\n for addr_to in pos.piece.get_valid_moves():\n moves.append((addr_from, addr_to))\n\n return moves", "def moves(self):\n\n # define a full range, which we can compare against columns,\n # rows, or blocks. they're all the same when stored as sets.\n line = set(range(1, 10))\n moves = []\n\n # iterate every cell on the board\n for row in range(0, 9):\n for col in range(0, 9):\n\n # ignore this cell if it's already filled\n i = self._index(col, row)\n if self.data[i] is not None:\n continue\n\n # fetch the adjacent cells\n row_values = set(self._row(row))\n col_values = set(self._column(col))\n bck_values = set(self._block(col, row))\n\n # subtract the values present in the adjacent cells\n # (since this cell *can't* be of any of those values),\n # to leave the list of possibilities for this cell\n missing = line.difference(row_values, col_values, bck_values)\n\n # if there's only *one* possibility, we've found the\n # solution to this cell\n if len(missing) == 1:\n moves.append((col, row, missing.pop()))\n\n return moves", "def available_moves(self):\n moves = []\n for x, y in self.available_boards:\n moves.extend([self.to_position(x, y, i, j) for (i, j)\n in self.boards[x][y].empty_squares])\n return moves", "def get_possible_moves(self, board):\n possible_moves = []\n\n # search in each direction for possible squares to move to\n for direction in [(0, 1), (0, -1), (1, 0), (-1, 0)]:\n possible_moves.extend(\n self._get_possible_moves_in_dir(board, rank_incr=direction[0], file_incr=direction[1])\n )\n\n return possible_moves", "def get_all_moves(self, castling_allowed=True):\n\n can_move = str.isupper if self.white_to_move else str.islower\n\n valid_moves = set()\n\n for row_num, row in enumerate(self.board):\n for col_num, piece in enumerate(row):\n if piece != EMPTY_SPACE and can_move(piece):\n\n location = (row_num, col_num)\n\n # Everything except the pawn movement\n if piece.lower() in NAME_TO_PIECE:\n valid_moves = valid_moves.union(self._get_standard_moves_for_piece(location, piece))\n\n # Pawn moves\n if piece.lower() == PAWN:\n valid_moves = valid_moves.union(self._get_pawn_moves(location, piece))\n\n # Castling\n if castling_allowed and piece.lower() == KING:\n valid_moves = valid_moves.union(self._get_possible_castles(piece))\n\n return valid_moves", "def find_moves(self):\n\n from itertools import product\n free_position = self.find_free()\n return [list(free_position+i) for i in [[0,1],[1,0],[-1,0],[0,-1]] if tuple(i+free_position) in product(range(self.size),repeat=2)]", "def get_possible_moves(self) -> list:\n p1_count = 0\n p2_count = 0\n ley_line_total = (self.side_length + 1) * 3\n for itype in self.current_ley_lines:\n for line in itype:\n if line[0] == '1':\n p1_count += 1\n if line[0] == '2':\n p2_count += 1\n if p1_count >= ley_line_total / 2 or p2_count >= ley_line_total / 2:\n return []\n moves = []\n for letter in self.current_board:\n if letter.isalpha():\n moves.append(letter)\n return moves", "def possible_moves(self): \n return [a + 1 for a, b in enumerate(self.board) if b == 0]", "def moves(self):\n move_list = []\n for direction in Maze.possible_directions:\n move = Maze.dirs_to_moves[direction]\n if (0 <= (self.location[0]+move[0]) < len(self.grid) and\n 0 <= (self.location[1]+move[1]) < len(self.grid[0]) and\n self.grid[self.location[0]+move[0]][self.location[1]+move[1]] != 'X'):\n move_list.append(move)\n\n return move_list", "def get_valid_moves(self) -> list[int]:\n return self._valid_moves", "def get_all_moves(self, board, player):\n result = []\n for startx in range(8):\n for starty in range(8):\n for destx in range(8):\n for desty in range(8):\n if self.is_legal_move(board, [startx, starty], [destx, desty], player):\n result.append([[startx, starty], [destx, desty]])\n return result", "def get_legal_moves(self):\n\n return self._legal_moves", "def get_moves(self):\n moves = []\n i, j = self._get_coordinates(0) # blank space\n\n if i > 0:\n moves.append(Puzzle(self._swap(i, j, i - 1, j))) # move up\n\n if j < self.PUZZLE_NUM_COLUMNS - 1:\n moves.append(Puzzle(self._swap(i, j, i, j + 1))) # move right\n\n if j > 0:\n moves.append(Puzzle(self._swap(i, j, i, j - 1))) # move left\n\n if i < self.PUZZLE_NUM_ROWS - 1:\n moves.append(Puzzle(self._swap(i, j, i + 1, j))) # move down\n\n return moves", "def legal_moves_in_uci(self):\n\n # get all legal moves. 'legal_moves' is inherited attribute from super class that returns all possible moves\n return [m.uci() for m in list(self.legal_moves)]", "def get_available_moves(self):\n available = []\n row, col = tuple(self.current_pos)\n if row - 1 >= 0 and self.maze[row - 1][col] != 'x':\n available.append('n')\n if row + 1 < len(self.maze) and self.maze[row + 1][col] != 'x':\n available.append('s')\n if col - 1 >= 0 and self.maze[row][col - 1] != 'x':\n available.append('w')\n if col + 1 < len(self.maze[row]) and self.maze[row][col + 1] != 'x':\n available.append('e')\n return available", "def get_possible_moves(board):\n\n possible_moves = []\n\n ret_tuple_left = move_left(board)\n ret_tuple_right = move_right(board)\n ret_tuple_up = move_up(board)\n ret_tuple_down = move_down(board)\n\n if ret_tuple_left[0]:\n possible_moves.append(ret_tuple_left[1])\n if ret_tuple_right[0]:\n possible_moves.append(ret_tuple_right[1])\n if ret_tuple_up[0]:\n possible_moves.append(ret_tuple_up[1])\n if ret_tuple_down[0]:\n possible_moves.append(ret_tuple_down[1])\n\n return possible_moves", "def legalMoves(self):\n moves = []\n indexOfZero = self.tiles.index(0)\n \n if indexOfZero == 0:\n moves.append('Down')\n moves.append('Right')\n elif indexOfZero == 1:\n moves.append('Down')\n moves.append('Left')\n moves.append('Right')\n elif indexOfZero == 2:\n moves.append('Down')\n moves.append('Left')\n elif indexOfZero == 3:\n moves.append('Up')\n moves.append('Down')\n moves.append('Right')\n elif indexOfZero == 4:\n moves.append('Up')\n moves.append('Down')\n moves.append('Left')\n moves.append('Right')\n elif indexOfZero == 5:\n moves.append('Up')\n moves.append('Down')\n moves.append('Left')\n elif indexOfZero == 6:\n moves.append('Up')\n moves.append('Right')\n elif indexOfZero == 7:\n moves.append('Up')\n moves.append('Left')\n moves.append('Right')\n elif indexOfZero == 8:\n moves.append('Up')\n moves.append('Left')\n else:\n print('something wrong with board')\n return moves", "def get_valid_moves(self):\n if self.king:\n valid_moves = [[self.row + 1, self.col + 1],\n [self.row + 1, self.col - 1],\n [self.row - 1, self.col - 1],\n [self.row - 1, self.col + 1]]\n else:\n if self.player == 1:\n valid_moves = [[self.row + 1, self.col + 1],\n [self.row + 1, self.col - 1]]\n else:\n valid_moves = [[self.row - 1, self.col - 1],\n [self.row - 1, self.col + 1]]\n return valid_moves", "def get_moves(self, board):\n self.available_moves = [move for move in board.legal_moves]", "def possible_moves(self):\n lst_of_direcs = []\n for a_car in self.__cars:\n good_moves = a_car.possible_moves()\n new = [(a_car.get_name(),dire, des) for dire, des\\\n in good_moves.items()]\n lst_of_direcs.append(new[0])\n lst_of_direcs.append(new[1])\n return lst_of_direcs", "def get_legal_moves(self):\n moves = []\n if self.player_locations[self.whose_turn] is None:\n return self.get_blank_locations()\n matrix = [(1,0), (-1,0), (0,1), (0,-1), (1,1), (1,-1), (-1, 1), (-1,-1)]\n\n for dx, dy in matrix:\n x,y = self.player_locations[self.whose_turn]\n while x+dx <= xdim and x+dx >= 0 and y+dy <= ydim and y+dy >= 0:\n x = x+dx\n y = y+dx\n if self.board[x][y] : break\n moves.append((x,y))\n return moves", "def __find_all_moves(self, tower) -> list:\r\n choice = []\r\n for height in range(1,len(tower.tower)-2):\r\n for index in range(1,4):\r\n if self.stat_brain.is_valid(height, index, tower):\r\n choice.append((height, index))\r\n \r\n r.shuffle(choice)\r\n return choice", "def get_available_moves(self, board):\n pass", "def moves(self, board_state):\n # pos_moves = generate_moves(board_state) # Naive moves function here\n blacks = board_state.search_board('B')\n # Generate the possible moves required to kill the first black piece\n # on the board\n pos_moves = sorted_generate_moves_piece(board_state, blacks[0])\n return pos_moves", "def mcmc_moves(self):\n return copy.deepcopy(self._mcmc_moves)", "def get_moves(self):\n return self.piece_behavior.get_moves(self.board, self.position)", "def get_potential_moves(self):\n\n return self._potential_moves", "def get_possible_moves(self):\n possible_capture_moves = self.calc_capture_moves()\n if possible_capture_moves:\n # There is at least one capture move. Let's DFS them!\n self_curr_player = self.curr_player\n next_moves = []\n for capture_move in possible_capture_moves:\n if self.board[capture_move[0]][:1] == SOLDIER_COLOR[self_curr_player]:\n next_moves += self.find_following_moves(capture_move, SOLDIER_CAPTURE_MOVES_FROM[self_curr_player])\n else:\n next_moves += self.find_following_moves(capture_move, OFFICER_CAPTURE_MOVES_FROM)\n\n return next_moves\n\n # There were no capture moves. We return the single moves.\n return self.calc_single_moves()", "def openMoves(self):\n arr = []\n for y in range(0,HEIGHT):\n for x in range(0,WIDTH):\n t = self.getPawn(x,y)\n if(t!=None):\n for z in range(-1,2):\n if(self.legalMove(t,z)):\n #move , #newState\n arr.append((t,z))\n return arr", "def checkPossibleMoves(self):\n possibleMovesArray = []\n\n for j in range(self.nrOfCars):\n minMaxChange = self.gridSize - self.length[j] + 1\n possibleMoves = []\n\n for i in range(1,minMaxChange):\n if self.checkMove(j, i) == 0:\n possibleMoves.append(i)\n else:\n break\n for i in range(1,minMaxChange):\n if self.checkMove(j, -i) == 0:\n possibleMoves.append(-i)\n else:\n break\n\n possibleMovesArray.append(possibleMoves)\n\n return possibleMovesArray", "def possible_moves(self, piece):\n def _index(orig, off):\n \"\"\"Helper function to find the new index.\"\"\"\n orig_x, orig_y = orig\n off_x, off_y = off\n return (orig_y - off_y) * self.ncols + (orig_x - off_x)\n\n p_x, p_y = piece\n p_i = _index(piece, (0, 0))\n\n # pass a list of the four corners first for basic possibles\n move_land = [((p_x + i, p_y + j), self.squares[_index(piece, (i, j))])\\\n for i in [-1, 1] for j in [-1, 1]]\n possibles = self.squares[p_i].can_move(piece, move_land)\n\n # next append the new list from jumps\n jump_land = [((p_x + i, p_y + j), self.squares[_index(piece, (i, j))])\\\n for j in [-2, 2] for i in [-2, 2]]\n possibles += self.squares[p_i].can_jump(piece, move_land, jump_land)\n\n # clean out the list of duplicates, although there should be none\n return [m for i, m in enumerate(possibles) if m not in possibles[:i]]", "def get_possibles_moves(board: numpy.ndarray) -> List[Move]:\n return [tuple(k) for k in numpy.argwhere(board == -1) if 0 != k[0] != board.shape[0] - 1 != k[1] != 0]", "def get_tiger_possible_moves(self) -> List:\n moves = []\n for pos in self.get_all_positions():\n if pos.is_tiger():\n addr_from = pos.address\n for addr_to in pos.piece.get_valid_moves():\n moves.append((addr_from, addr_to))\n\n return moves", "def GetMoves(self):\n return [(source, target) for source in self.scores.keys() for target in self.fullGraph.neighbors_iter(source) if target not in self.pathes[source].nodes]", "def moves(self):\n\n moves = list()\n\n for row in range(HEIGHT):\n for col in range(WIDTH):\n\n move = (row, col)\n\n if self.board[row][col] == 9:\n moves.append(move)\n\n if self.board[row][col] == 1 or self.board[row][col] == 2:\n\n move = (row - 1, col)\n\n if self.board[row - 1][col] == 1 or self.board[row - 1][col] == 2:\n\n pass\n\n else:\n\n moves.append(move)\n\n return moves", "def solution(self):\n return [node.move for node in self.path()[1:]]", "def available_moves(board_state):\n for x, y in itertools.product(range(len(board_state)), range(len(board_state[0]))):\n if board_state[x][y] == 0:\n yield (x, y)", "def available_moves(self):\n available_moves = []\n for i in range(self.quadrants_count):\n quadrant_positions = self.play_area[i].available_positions()\n for p in quadrant_positions:\n position = p + i * 9\n for j in range(self.quadrants_count):\n move1 = [str(position), str(j + 1), \"l\"]\n move2 = [str(position), str(j + 1), \"r\"]\n available_moves.append(\" \".join(move1))\n available_moves.append(\" \".join(move2))\n return available_moves", "def legal_moves():\n\tlegal_moves = (\"r\", \"p\", \"s\")\n\treturn legal_moves", "def getMoves(self):\n return self.moves", "def possibleMoves(self,i,j):\n piece = self.board[i][j].piece\n if(piece.pieceCode == \"None\"):\n return []\n \n if(piece.name == \"pawn\"):\n return self.pawnMoves(piece,self.board)\n elif(piece.name == \"king\"):\n return self.kingSteps(self.board,piece.color)\n else:\n return self.pieceMoves(piece,self.board)", "def get_move_list(self):\n return [\n tuple(x) for x in np.argwhere(self.board == HexBoard.EMPTY).tolist()\n ]", "def actions(self):\r\n def create_move(at, to):\r\n return lambda: self._move(at, to)\r\n\r\n moves = []\r\n for i, j in itertools.product(range(self.width),\r\n range(self.width)):\r\n direcs = {'R':(i, j-1),\r\n 'L':(i, j+1),\r\n 'D':(i-1, j),\r\n 'U':(i+1, j)}\r\n\r\n for action, (r, c) in direcs.items():\r\n if r >= 0 and c >= 0 and r < self.width and c < self.width and \\\r\n self.board[r][c] == 0:\r\n move = create_move((i,j), (r,c)), action\r\n moves.append(move)\r\n return moves", "def get_moves(self):", "def get_legal_moves(self, pos: Position, game_board: GameBoard) -> PossibleMoveSet:\n pass", "def legal_moves(self):\n moves = \"\"\n swappable = self.swappable_positions\n empty_position = self.get_position(0)\n\n for s in swappable:\n pos_diff = empty_position[0] - s[0], empty_position[1] - s[1]\n if pos_diff[0] > 0:\n moves += \"U\"\n elif pos_diff[0] < 0:\n moves += \"D\"\n elif pos_diff[1] > 0:\n moves += \"L\"\n elif pos_diff[1] < 0:\n moves += \"R\"\n\n return moves", "def get_valid_moves(self):\r\n validMoves = []\r\n\r\n for x in range(BOARD_SIZE):\r\n for y in range(BOARD_SIZE):\r\n pos = np.array([x,y])\r\n if self.board[pos[0],pos[1]] == 0:\r\n if(self.update_board(pos,_testing=True)):\r\n validMoves.append(pos)\r\n\r\n return validMoves", "def enumerate_moves(self):\n add_ew = lambda x: [x+'e', x+'w']\n allowed_catches = add_ew(self._directions[0])\n moves = []\n # First add the one/two step forward moves\n new_slot = self._board.get_dir(self._current_space, self._directions[0])\n if new_slot and new_slot.is_free():\n moves.append(ChessMove(self._current_space, new_slot))\n if (self._side == BLACK and new_slot.row == self._board.size - 1) or \\\n (self._side == WHITE and new_slot.row == 0):\n moves[-1].add_promotion()\n if (self._side == BLACK and self._current_space.row == 1) or \\\n (self._side == WHITE and self._current_space.row == self._board.size -2):\n new_slot = self._board.get_dir(new_slot, self._directions[0])\n if new_slot and new_slot.is_free():\n moves.append(ChessMove(self._current_space, new_slot))\n\n # Now add all the captures.\n for direction in allowed_catches:\n new_slot = self._board.get_dir(self._current_space, direction)\n if new_slot and new_slot.has_opponent(self._side):\n moves.append(ChessMove(self._current_space, new_slot, [new_slot]))\n if (self._side == BLACK and new_slot.row == self._board.size - 1) or \\\n (self._side == WHITE and new_slot.row == 0):\n moves[-1].add_promotion()\n return moves", "def check_for_moves(self) -> list:\r\n avail_moves = []\r\n for x in range(self.size):\r\n for y in range(self.size):\r\n if self.tags[x][y] is None:\r\n avail_moves.append((x, y))\r\n return avail_moves", "def legal_moves_generator(self, custom=False):\r\n possible_moves = self.null_positions\r\n possible_moves.add('PASS')\r\n temp_state = np.array(self.state)\r\n illegal_moves = set()\r\n for pos in possible_moves:\r\n illegal = True\r\n if pos != 'PASS':\r\n ortho = ORTHOGONAL_POSITIONS[(pos[0], pos[1])]\r\n for p in ortho:\r\n if self.state[p[0]][p[1]] == 0:\r\n illegal = False\r\n break\r\n elif self.to_move != self.board[p[0]][p[1]].color:\r\n if self.board[p[0]][p[1]].liberty == 1:\r\n illegal = False\r\n break\r\n\r\n elif self.state[p[0]][p[1]] == self.to_move:\r\n if self.board[p[0]][p[1]].liberty > 1:\r\n illegal = False\r\n break\r\n if illegal:\r\n illegal_moves.add(pos)\r\n temp_state = np.array(self.state)\r\n continue\r\n\r\n for p in ortho:\r\n if self.to_move != self.board[p[0]][p[1]].color:\r\n if self.board[p[0]][p[1]].liberty == 1:\r\n temp_state[p[0]][p[1]] = 0\r\n\r\n temp_state[pos[0]][pos[1]] = self.to_move\r\n if (temp_state == self.previous_state).all(): # KO RULE CHECK\r\n illegal_moves.add(pos)\r\n temp_state = np.array(self.state)\r\n continue\r\n temp_state = np.array(self.state)\r\n\r\n possible_move_pos = possible_moves - illegal_moves\r\n if custom:\r\n return possible_move_pos\r\n\r\n legal_moves_queue = PriorityQueue()\r\n\r\n for possible_move in possible_move_pos:\r\n move_obj = Move(possible_move, self.to_move, self)\r\n legal_moves_queue.put((-move_obj.priority, move_obj))\r\n return legal_moves_queue", "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def get_untried_moves(self, legal_moves):\n\n\t\t# Find all moves for which this node *does* have children\n\t\ttried_moves = [child.move for child in self.child_nodes]\n\n\t\t# Return all moves that are legal but have not been tried yet\n\t\treturn [move for move in legal_moves if move not in tried_moves]", "def get_all_valid_moves(self, player):\n moves = [] # Stores the possible moves\n capture_move_exists = False # Indicates if a capturing move is possible\n\n for piece in self.get_all_pieces(player):\n valid_moves = self._get_valid_moves(piece)\n\n for move, skip in valid_moves.items():\n moves.append([(piece.row, piece.col), move, skip])\n\n if len(skip) > 0:\n # Checks if there is a move that can capture a piece\n capture_move_exists = True\n\n if capture_move_exists:\n # Only gets the capturing moves if there is one\n eating_moves = []\n for move in moves:\n if len(move[2]) != 0:\n eating_moves.append(move)\n\n moves = eating_moves\n\n return moves", "def legalMoves(self):\n return [c for c in range(self.getWidth()) if len([r for r in range(self.getHeight()) if self.cell[c][r]==EMPTY])>0 ]", "def get_possible_actions(self, state):\n return [LEFT, DOWN, RIGHT, UP]", "def possible_moves(self, pos: Point) -> List[Point]:\n # logger.debug(f\"inside possible_moves {pos}\")\n available_squares = []\n for direction in Direction.cardinal():\n # logger.debug(f\"direction = {direction}\")\n neighbor = pos + direction\n # logger.debug(f\"neighbor = {neighbor}\")\n if neighbor.x < 1 or self.width - 2 < neighbor.x or neighbor.y < 1 or self.height - 2 < neighbor.y:\n # logger.debug(f\"{neighbor} not in bounds\")\n continue\n if self.can_carve(pos, direction):\n # logger.debug(f\"can_carve returned True pos={pos}, direction={direction}\")\n available_squares.append(neighbor)\n # logger.debug(f\"available squares:\")\n # for square in available_squares:\n # logger.debug(f\"square={square}\")\n # logger.add(\"debug.log\")\n return available_squares", "def available_moves(self):\n\n heaps = range(len(self.heaps))\n return [(h, take) for h in range(len(self.heaps))\n for take in range(1, self.heaps[h] + 1)]", "def get_valid_moves(self):\r\n # castling and en-passant rights are stored, because move affects these values\r\n temp_enpassant_possible = self.enpas_pos\r\n temp_castle = CastleRights(self.cr_castle_r.wks, self.cr_castle_r.bks,\r\n self.cr_castle_r.wqs, self.cr_castle_r.bqs)\r\n\r\n # for validating a possible move\r\n #1 all possibile moves are generated\r\n #2 each pos moves are made\r\n #3 generate opponent move\r\n #4 check if any of those moves let the king attacked\r\n #5 moves which let the king in chess are eliminated\r\n #6 the moves are undone\r\n moves = self.get_all_possible_moves() # 1\r\n\r\n # castle moves are directly introduced in valid moves\r\n if not self.turn_white:\r\n self.get_castle_moves(self.bKingPos[0], self.bKingPos[1], moves)\r\n else:\r\n self.get_castle_moves(self.wKingPos[0], self.wKingPos[1], moves)\r\n\r\n for i in range(len(moves) - 1, -1, -1): # 2\r\n self.make_move(moves[i])\r\n # 3 #4\r\n self.turn_white = not self.turn_white\r\n if self.in_check():\r\n moves.remove(moves[i]) # 5\r\n self.turn_white = not self.turn_white\r\n self.undo_move()\r\n\r\n # game ending possibilities\r\n if len(moves) == 0:\r\n if self.in_check():\r\n self.checkMate = True\r\n print(\"Checkmate !\")\r\n else:\r\n self.staleMate = True\r\n print(\"Stalemate !\")\r\n else:\r\n self.checkMate = False\r\n self.staleMate = False\r\n\r\n # the rigths are restored, and the values are not affected\r\n self.enpas_pos = temp_enpassant_possible\r\n self.cr_castle_r = temp_castle\r\n\r\n return moves", "def next_states(self):\n possible_col = []\n for i in range(self.size):\n if self.is_valid_move(self.num_queens_placed, i):\n possible_col.append(i)\n next_states = []\n for column in possible_col:\n next_state = copy.deepcopy(self)\n next_state.add_queen(self.num_queens_placed, column)\n next_states.append(next_state)\n return next_states", "def get_moves(self, board, position):\n current_piece = board.grid[position[1]][position[0]]\n\n moves = []\n\n # For each direction in which the piece can move...\n for direction in self.directions:\n # for each vector in that direction...\n # (once a piece is encountered in a direction,\n # further positions in that direction are unaccessible,\n # therefore break out of inner FOR loop)\n for vector in direction:\n new_position = (position[0] + vector[0], position[1] + vector[1])\n\n # Check if the proposed destination is inbounds\n if board._inbounds(new_position) is False:\n break\n\n other_piece = board.grid[new_position[1]][new_position[0]]\n\n # Check if the proposed destination is occupied by a friendly piece\n if other_piece != \"empty\" and other_piece.player == current_piece.player:\n break\n\n # Check other validity conditions, mainly for pawn\n if self._is_valid_move(vector, current_piece, other_piece) is False:\n break\n\n # The destination is viable, add the move\n moves.append(Move(position, current_piece, new_position, other_piece))\n\n # If there was an enemy piece on the square\n if other_piece != \"empty\":\n break\n\n return moves", "def possible_moves(self, side: models.Side) -> typing.Iterator[\n typing.Tuple[models.Piece, int, int]]:\n raise NotImplementedError", "def moves(self, teleport=False):\n recv = [(self._size[y][x] - self._used[y][x], x, y)\n for x in range(self.xsize) for y in range(self.ysize)]\n recv.sort(reverse=True)\n send = [(self._used[y][x], x, y)\n for x in range(self.xsize) for y in range(self.ysize)\n if self._used[y][x] > 0]\n send.sort()\n # print(\"recv: {}...\".format(str(recv[:5])))\n # print(\"send: {}...\".format(str(send[:5])))\n moves = []\n for avail, x1, y1 in recv:\n for used, x0, y0 in send:\n if avail < used:\n break\n if teleport or (x0 == x1 and abs(y0 - y1) == 1) or (\n y0 == y1 and abs(x0 - x1) == 1):\n self.apply((x0, y0), (x1, y1))\n moves.append((self.score(), self.key(), self.save(), list(self.history)))\n self.undo()\n return moves", "def _get_possible_moves(board, lightcycle):\n result = []\n for diff in ((0, 1, PlayerActions.MOVE_DOWN), (1, 0, PlayerActions.MOVE_RIGHT), (0, -1, PlayerActions.MOVE_UP), (-1, 0, PlayerActions.MOVE_LEFT)):\n next_x = lightcycle['position'][0] + diff[0]\n next_y = lightcycle['position'][1] + diff[1]\n if 0 <= next_x < len(board) and 0 <= next_y < len(board[0]):\n if board[next_x][next_y] in (EMPTY, POWERUP):\n result += [diff]\n return result", "def moves(self):\n move_list = list()\n for i in range(self.n):\n row = self.queens[i][0]\n col = self.queens[i][1]\n for rd in [-1,0,1]:\n for cd in [-1,0,1]:\n if (rd == 0) and (cd == 0):\n continue\n new_pos = [row+rd, col+cd]\n if (new_pos[0] >= 0) and (new_pos[0] < self.n) and (new_pos[1] >= 0) and (new_pos[1] < self.n):\n if not new_pos in self.queens: \n move_list.append([i, new_pos])\n\n return move_list", "def get_available_moves(self, board):\n available_moves = []\n for fieldx in range(len(board)):\n column = []\n for fieldy in range(len(board)):\n legit_move = board[self.posy][self.posx].is_valid_move(board, fieldx, fieldy)\n column.append(legit_move)\n available_moves.append(column)\n return available_moves", "def get_next_moves1(self):\n moves = []\n for i in range(len(self.board)):\n for j in range(len(self.board[i])):\n if self.board[i][j] == \"\":\n next_board = copy.deepcopy(self.board)\n next_board[i][j] = colors[self.turn] + self.turn + \"\\u001b[0m\"\n next_turn = get_opponent(self.turn)\n moves.append(DotsAndBoxesState(next_board, next_turn))\n return moves", "def _get_rules_possibles_moves(cell, board_shape):\n return [(cell[0] + a[0], cell[1] + a[1])\n for a in [(-1, 0), (1, 0), (0, -1), (0, 1)]\n if ((0 <= cell[0] + a[0] < board_shape[0]) and (0 <= cell[1] + a[1] < board_shape[1]))]", "def get_possible_moves(board):\n\tpossible_moves = []\n\n\tfor count, player in enumerate(board):\n\t\tif player is not server_player and player is not user_player:\n\t\t\tpossible_moves.append(count)\n\n\treturn possible_moves", "def api_get_moves(self):\n return self.board.moves", "def get_legal_moves(self, player):\r\n move_list = []\r\n if self._phase == GamePhase.SETUP:\r\n return self._setup_legal_moves(player)\r\n elif self._phase == GamePhase.MOVE:\r\n return self._move_legal_moves(player)\r\n elif self._phase == GamePhase.BUILD:\r\n return self._build_legal_moves(player)\r\n return move_list", "def get_moves(self):\n grid = self.model.grid\n # List of agents we can't overlap with\n no_overlap = [\"wall\", \"human\", \"zombie\"]\n\n if self.agent_type == \"zombie\" or \\\n (\"AvoidingZombie\" not in self.states and os.environ[\"mode\"] == \"5\"):\n no_overlap.append(\"road\")\n\n # Always give the option to stay on your current location(stand still)\n all_cells = self.neighbors()\n free_cells = [self.pos]\n\n # Get rid of cells that we may not move to by iterating through all\n # cells next to the agent, and only adding non-occupied cells\n for cell in all_cells:\n cell_occupied = False\n x, y = cell.pos\n # If there are agents in the current cell, and we are not allowed\n # to overlap with any of those agents, the cell is occupied.\n # Only add cells which are not occupied.\n if not grid.is_cell_empty((x, y)):\n for agent in grid[x][y]:\n if agent.agent_type in no_overlap:\n cell_occupied = True\n break\n if not cell_occupied:\n free_cells.append((x, y))\n return free_cells", "def checkMoves(self,board):\n possibleMoves = []\n\n for c in xrange(0,8):\n for r in xrange(0,8):\n if board.isValidMove(self.tile,c,r):\n possibleMoves.append(c+r*8)\n\n return possibleMoves", "def exec_all_moves(self,level=0):\n\n capts = [self.op_capture_north, self.op_capture_nwest, self.op_capture_neast, self.op_capture_east, self.op_capture_west]\n jmps = [self.op_jump_north, self.op_jump_nwest, self.op_jump_neast]\n moves = [self.op_move_north,self.op_move_nwest, self.op_move_neast]\n result = []\n ops = []\n\n # Pre-select all operations that may be executed\n if self.next_move == self.FREE:\n capturingStarts, otherStarts = self.possible_capture()\n\n # Check for pieces that may capture\n if len(capturingStarts) > 0:\n self.next_move = self.CAPT\n self.next_pieces = capturingStarts\n else:\n self.next_move = self.FREE\n jmps.extend(moves)\n ops = jmps\n self.next_pieces = otherStarts\n\n elif self.next_move == self.CAPT:\n ops = capts\n elif self.next_move == self.JUMP:\n ops = jmps\n elif self.next_move == self.FREE:\n jmps.extend(moves)\n ops = jmps\n elif self.next_move == self.ADDPIECE_2:\n return self.op_add_piece_bot(self.next_pieces)\n\n # Execute possible operations for all viable pieces\n for pos in self.next_pieces:\n for op in ops:\n newState = op(pos)\n\n # Check if op succeeded\n if newState:\n\n # If the next player is the current player than the function is called recursevely, this is done so that outcomes account for successive plays by the same\n # player ( successive jumps and captures or piece addition)\n if newState.curr_player != self.curr_player:\n result.append(newState)\n else:\n result.extend(newState.exec_all_moves(level+1))\n\n\n return result", "def possible_moves(self, board):\n\n coordinate_list = []\n algebraic_from = JanggiGame.translate_to_algebraic(self._location)\n for i, col in enumerate(board):\n for j, row in enumerate(col):\n algebraic_to = JanggiGame.translate_to_algebraic([i,j])\n if self.validate_move(algebraic_from,algebraic_to,board) is True:\n coordinate_list.append([i,j])\n\n return coordinate_list", "def getValidMoves(self, board):\n inputList = []\n for i in range(0, 6):\n if c.canInput(i):\n inputList[i] = 1\n else:\n inputList[i] = 0", "def neighboring_states(self):\n index = self.state.index(0)\n\n if index == 0:\n return [self.move(movement) for movement in ['down', 'right']]\n elif index == 1:\n return [self.move(movement) for movement in ['down', 'left', 'right']]\n elif index == 2:\n return [self.move(movement) for movement in ['down', 'left']]\n elif index == 3:\n return [self.move(movement) for movement in ['up', 'down', 'right']]\n elif index == 4:\n return [self.move(movement) for movement in ['up', 'down', 'left', 'right']]\n elif index == 5:\n return [self.move(movement) for movement in ['up', 'down', 'left']]\n elif index == 6:\n return [self.move(movement) for movement in ['up', 'right']]\n elif index == 7:\n return [self.move(movement) for movement in ['up', 'left', 'right']]\n else:\n # index == 8\n return [self.move(movement) for movement in ['up', 'left']]", "def get_all_moves(board, player):\n moves = []\n if not (player_has_won(board, player) or\n player_has_won(board, utils.get_opponent(player)) or\n (not is_valid_board(board))):\n for index in range(9):\n if board[index] == config.NO_PLAYER:\n moves += [index]\n return moves", "def get_moves(self, x, y):\n\n if not self.piece_at(x, y):\n return set()\n\n moves = self._get_piece_moves(x, y)\n legal = set(moves)\n at = x, y\n for to in moves:\n res, captured = self._make_move(at, to)\n if not res:\n legal.remove(to)\n else:\n self._unmake_move(to, at, captured)\n\n self._check_integrity()\n return legal", "def get_all_available_moves(self, board):\n available_attacks = self.get_available_attacks(board)\n if any(True in sublist for sublist in available_attacks):\n attack = True\n return available_attacks, attack\n else:\n available_moves = self.get_available_moves(board)\n attack = False\n return available_moves, attack", "def possible_moves(state):\n sliders = state['sliders']\n pins = state['pins']\n result = []\n\n # this is a bit repetitive -- could theoretically generalize?\n for i, pin in enumerate(pins):\n x, y = pin\n for dy, direction in [(1, 'down'), (-1, 'up')]:\n new_pin = (x, y+dy)\n move = 'move pin {0} {1}'.format(i, direction)\n if all_open([new_pin], sliders):\n new_state = deepcopy(state)\n new_state['pins'][i] = new_pin\n result.append((move, new_state))\n\n for i, slider in enumerate(sliders):\n coords, offset = slider\n for dx, direction in [(1, 'left'), (-1, 'right')]:\n new_slider = (coords, offset+dx)\n move = 'move slider {0} {1}'.format(i, direction)\n if all_open(pins, [new_slider]):\n new_state = deepcopy(state)\n new_state['sliders'][i] = new_slider\n result.append((move, new_state))\n\n return result", "def get_legal_moves(self, color):\n moves = [] # stores the legal moves.\n # Get all the squares with pieces of the given color.\n for x in range(self.n):\n for y in range(self.n):\n if self[x][y]==0:\n moves.append((x,y))\n return moves", "def get_legal_moves(self, board):\n moves = set()\n capture_moves = set()\n if not (self.field_row*self.color_value == 1 or self.field_row*self.color_value == -6):\n self.pot_moves = {(1*self.color_value, 0)}\n\n for move in self.pot_moves:\n target_row = self.field_row + move[0]\n target_col = self.field_col + move[1]\n if self.path_clear(board, move, target_row, target_col):\n if board.status[target_row, target_col] * self.color_value == 0:\n moves.add(move)\n\n for move in self.pot_capture_moves:\n target_row = self.field_row + move[0]\n target_col = self.field_col + move[1]\n if self.path_clear(board, move, target_row, target_col):\n if board.status[target_row, target_col] * self.color_value < 0:\n capture_moves.add(move)\n self.legal_moves = moves\n self.legal_capture_moves = capture_moves", "def get_legal_nearby_moves(self, nearby_length=1):\n moves = []\n for row, col in self.get_legal_moves():\n if not self._is_nearby_empty(nearby_length, row, col):\n moves.append((row, col))\n\n return moves or None", "def get_possible_moves(self) -> list:\n if self.p1_turn:\n name = '2'\n else:\n name = '1'\n\n count = 0\n for i in self.claim:\n if i == name:\n count += 1\n over = count >= 0.5 * len(self.claim)\n\n moves = []\n if not over:\n for i in self.letters:\n if i.isalpha():\n moves.append(i)\n return moves", "def get_all_game_pieces_potential_moves(self):\n\n board = self.get_board()\n\n for row in board:\n\n for column in row:\n\n if column is not None:\n\n print(column.get_label(), ': ' , column.get_potential_moves())" ]
[ "0.7898512", "0.7898361", "0.7858664", "0.7847656", "0.77333015", "0.7536234", "0.7532586", "0.7498494", "0.7485024", "0.73975164", "0.7325457", "0.7318799", "0.7273196", "0.717939", "0.71593887", "0.7139039", "0.71370745", "0.7127056", "0.71068263", "0.70893526", "0.70703286", "0.70375633", "0.7037482", "0.6990782", "0.69892234", "0.6985179", "0.69833565", "0.696458", "0.69530785", "0.69414324", "0.69323903", "0.6912544", "0.6902707", "0.68960744", "0.6870823", "0.6854117", "0.68539906", "0.68372655", "0.6824167", "0.6819361", "0.6808345", "0.67939645", "0.67903703", "0.6788995", "0.67779225", "0.6776653", "0.6772586", "0.67693275", "0.67573106", "0.67495155", "0.67326236", "0.67104876", "0.67088467", "0.6691167", "0.66743666", "0.66724116", "0.666236", "0.6637017", "0.6627173", "0.66266876", "0.66266876", "0.66266876", "0.66266876", "0.66266876", "0.66266876", "0.66266876", "0.66251546", "0.6596139", "0.65790343", "0.6567534", "0.656679", "0.65602136", "0.65584916", "0.6549397", "0.6520841", "0.65096277", "0.6504938", "0.6486358", "0.64834833", "0.6479586", "0.6459023", "0.6452902", "0.6451493", "0.64011395", "0.6398786", "0.63924736", "0.63883203", "0.6385626", "0.6380522", "0.6372223", "0.6362899", "0.63382816", "0.6338015", "0.6333343", "0.6325536", "0.6323194", "0.6320103", "0.6308194", "0.63055354", "0.6288494" ]
0.6583351
68
Return 'p1' if the current player is Player 1, and 'p2' if the current player is Player 2.
def get_current_player_name(self) -> str: if self.p1_turn: return 'p1' return 'p2'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def other_player(cls, player):\n return 0 if player == 1 else 1", "def checkval(self, P1, P2, winningval):\n if P1 == winningval:\n return \"Player 1\"\n elif P2 == winningval:\n return \"Player 2\"", "def get_current_player(player_one_turn):\n \n # Get appropriate player whether the parameter is True or False\n if player_one_turn == True:\n return 'Player One'\n return 'Player Two'", "def otherPlayer(cls, player):\n return 0 if player == 1 else 1", "def other_player(self, player):\n if player == self.__opponent:\n return self.__pid\n else:\n return self.__opponent", "def get_player(self, number):\n num = int(number)\n assert (num in [1, 2])\n return self.player_1 if num == 1 else self.player_2", "def switch_player(current, player1, player2):\r\n if current == player1:\r\n return player2\r\n else:\r\n return player1", "def player_css_class(p1, p2, cp=None):\n return (\"self_player\" if p1 is p2 else \"other_player\") + (\n \" current_player\" if p1 is cp else \"\")", "def game(p1,p2): \n if (p1 =='piedra' and p2=='tijera') or (p1 == 'tijera' and p2 == 'papel')or(p1 == 'papel' and p2 == 'piedra'):\n return 'gana p1'\n elif(p1==p2):\n return'empate'\n else:\n return'gana p2'", "def __get_other_player(self):\n return engine.Engine.game_manager.players[(self.current_player_index + 1) % 2]", "def next_player(current_player=\"None\"):\n if current_player == \"None\":\n return random.choice([\"Player 1\", \"Player 2\"])\n elif current_player == \"Player 1\":\n return \"Player 2\"\n else:\n return \"Player 1\"", "def __negated_player(self, player):\n\t\treturn self.PLAYER2 if self.current_player == self.PLAYER1 else self.PLAYER1", "def other_player(self):\n return self.get_others_in_group()[0]", "def goesFirst(player1, player2):\r\n first = input('who goes first ? '+ Player.get_name (player1) +' or '+Player.get_name (player2)+' ?')\r\n if first == Player.get_name(player2) :\r\n return player2\r\n elif first == Player.get_name(player1) :\r\n return player1\r\n else:\r\n return goesFirst(player1, player2)", "def get_next_player(current_player: Optional[str]) -> str:\n if current_player == c.X:\n return c.O\n else:\n return c.X", "def swap_player(self):\n\n # if player 1 then switch to player 2\n if self.current_player == 1:\n self.current_player += 1\n else:\n self.current_player -= 1\n self.playing_player = self.players[self.current_player]\n return self.playing_player", "def other(player):\n return 1 - player", "def other(player):\n return 1 - player", "def other(player):\n return 1 - player", "def win(player1, player2):\n if(player1 == 1 and player2 == 3) or (player1 == 2 and player2 == 1) \\\n or (player1 == 3 and player2 == 2):\n return True", "def player(self, state, current_player):\r\n\r\n new_piece, player = self.new_or_old_piece(state)\r\n\r\n if new_piece:\r\n return player\r\n else:\r\n return current_player", "def switch_player(player):\n if player == PLAYERX:\n return PLAYERO\n else:\n return PLAYERX", "def switch_player(player):\n if player == PLAYERX:\n return PLAYERO\n else:\n return PLAYERX", "def get_player2_mark(p1_mark):\r\n if p1_mark == 2:\r\n return markers[0]\r\n else:\r\n return markers[1]", "def opponent(self, player):\r\n # player = core.BLACK (can do this for any static var)\r\n if player == core.BLACK:\r\n return core.WHITE\r\n else:\r\n return core.BLACK", "def get_player(self):\n return 2 - int((np.sum(self.state) % 2))", "def determine_winner(score1, score2):\n if score1 == score2:\n return 'tie'\n elif score1 == 21:\n return 'player1'\n elif score2 == 21:\n return 'player2'\n elif score1 > 21 or score2 > 21:\n if score1 > 21 and score2 > 21:\n if score1 - 21 < score2 - 21:\n return 'player1'\n else:\n return 'player2'\n elif score2 < 21 < score1:\n return 'player2'\n elif score1 < 21 < score2:\n return 'player1'\n elif score1 < 21 and score2 < 21:\n if score1 - score2 > 0:\n return 'player1'\n else:\n return 'player2'\n else:\n return None", "def SelectPlayer(self):\n\n player = input(data['player'])\n if player == \"1\":\n return 0\n elif player == \"2\":\n return 1\n else:\n return 'invalid'", "def other_player_status(p: models.Player):\n return {'id': p.id,\n 'name': p.name,\n 'tricks': p.tricks,\n 'cards': p.card_count,\n 'bid': p.bid}", "def prepare(p1, p2):\n n1 = recv_msg(p1)\n n2 = recv_msg(p2)\n\n out('The name of player 1 is ' + n1)\n out('The name of player 2 is ' + n2)\n\n send_msg(p1, n2)\n send_msg(p2, n1)\n\n return False", "def highCard(p1name, p2name, p1hand, p2hand):\n\tplayer1 = list(p1hand)\n\tplayer2 = list(p2hand)\n\n\tif player1[0] == \"A\" or player1[1] == \"A\":\n\t\tprint(\"%s wins!\" % p1name)", "def get_participating_players(raw_input=raw_input):\n no_players = 0\n while no_players != 1 and no_players != 2:\n inp = raw_input(\"Single player or multiplayer? (1/2): \")\n try:\n no_players = int(inp)\n except ValueError:\n print \"Invalid input - please try again\"\n pass\n\n if no_players is 1:\n return (HumanPlayer('X'), ComputerPlayer('O'))\n else:\n return (HumanPlayer('X'), HumanPlayer('O'))", "def get_player(self,p):\n self._validate(p)\n return p.player()", "def opponent(player):\n return BLACK if player is WHITE else WHITE", "def PlayerCMP(player_x, player_y):\n\n if player_x.GetElo() > player_y.GetElo():\n return -1\n elif player_y.GetElo() > player_x.GetElo():\n return 1\n else:\n return 0", "def set_first_player(self):\n if self.player2.won_previous:\n self.current_player = self.player2\n else: self.current_player = self.player1", "def play(pl1, ch1, pl2, ch2):\n \n if ch1 == ch2:\n print(\"It's a tie.\")\n return None\n if ch1 == 'Rock':\n if ch2 == 'Scissors':\n print(\"Congratulations,\", pl1, \". You WON! Rock beats Scissors!\")\n return pl1\n else:\n print(\"Congratulations,\", pl2, \". You WON! Paper beats Rock!\")\n return pl2\n elif ch1 == 'Scissors':\n if ch2 == 'Rock':\n print(\"Congratulations,\", pl2, \". You WON! Rock beats Scissors!\")\n return pl2\n else:\n print(\"Congratulations,\", pl1, \". You WON! Scissors beat Paper!\")\n return pl1 \n else:\n if ch2 == 'Rock':\n print(\"Congratulations,\", pl1, \". You WON! Paper beats Rock!\")\n return pl1\n else:\n print(\"Congratulations,\", pl2, \". You WON! Scissors beat Paper!\")\n return pl2", "def playOneGame(self, p1, p2, show):\n currentPlayer, otherPlayer = p1, p2\n winner = None\n gameFinished = False\n #\n while not(gameFinished): \n if show:\n self.display() # show the board\n # \n move = currentPlayer.getMove(self.deepCopy())\n if show:\n print currentPlayer.name + ' is playing in column ' , move\n \n if (move == []) or (not move in self.legalMoves()): # for dysfunctional player\n gameFinished = True\n winner = otherPlayer\n else: \n self.makeMove(currentPlayer.colour, move)\n winningColour, gameFinished = self.checkwin()\n if gameFinished:\n winner = currentPlayer\n else:\n currentPlayer, otherPlayer = otherPlayer, currentPlayer\n # if in verbose mode display the outcome of the game\n if show:\n self.display()\n if winner:\n print 'The winner is ', winner.name ,' ' ,\n if winner.colour == WHITE:\n print 'White -'\n else:\n print 'Black +' \n else:\n print 'Game ended in a draw'\n #\n return winner", "def name(who):\r\n if who == 0:\r\n return 'Player 0'\r\n elif who == 1:\r\n return 'Player 1'\r\n else:\r\n return 'An unknown player'", "def eval_winner(p1, p2):\n\n switch = {\n 1: \"Rock\",\n 2: \"Paper\",\n 3: \"Scissors\"}\n if((p1 == 1 and p2 == 3) or (p1 == 2 and p2 == 1) or (p1 == 3 and p2 == 2)):\n print(\"P1 threw \" + switch.get(p1))\n print(\"P2 threw \" + switch.get(p2))\n return 1\n elif((p2 == 1 and p1 == 3) or (p2 == 2 and p1 == 1) or (p2 == 3 and p1 == 2)):\n print(\"P1 threw \" + switch.get(p1))\n print(\"P2 threw \" + switch.get(p2))\n return 2\n else:\n print(\"P1 threw \" + switch.get(p1))\n print(\"P2 threw \" + switch.get(p2))\n return 0", "def get_players():\n return {\"X\": play_human, \"O\": play_ai}", "def get_current_player(self):\r\n\r\n return self.players[(self.turn_number) % len(self.players)].get_id()", "def isPlayerInGame(self, playerName):\n for team, players in self.players.items():\n for player in players:\n if playerName == player.name:\n return True, team\n return False, None", "def who_goes_first(self):\n if random.randint(0, 1) == 0:\n return 'computer'\n return 'player'", "def winner(self):\n # Credit to Dariusz Walczak for inspiration.\n # http://stackoverflow.com/questions/1720421/merge-two-lists-in-python\n moves = [p.possible_moves(p.pieces, self) for p in self.players]\n if False in [mv == [] for mv in moves]:\n return (\"None\")\n else:\n cand = [(p.score, p.name) for p in self.players]\n return (sorted(cand, reverse=True)[0][1])", "def getPlayer(self):\n \n youtube = self.isYouTube()\n player = self.isMediaPlayer()\n \n if youtube and player:\n if self.selection:\n return self.selection\n else:\n return 'external'\n elif youtube or player:\n if not self.isYouTube():\n return 'internal'\n if not self.isMediaPlayer():\n return 'external'\n else:\n return None", "def get_position(self):\n return self.proposition.team_a if self.position else self.proposition.team_b", "def opponent(self):\n \n if self.side == 'X':\n return 'O'\n else:\n return 'X'", "def getMatchup(self, name):\n if self.atHome:\n return (name, self.opponent)\n else:\n return (self.opponent, name)", "def next_player(self):\n if self.player1.turn_status == 1:\n self.player1.turn_status = 0\n self.turn(self.player2)\n else:\n self.player2.turn_status = 0\n self.turn(self.player1)", "def get_winner(player_one_score, player_two_score):\n \n # Get winner depending on which player has highest score or if their \\\n # scores are equal\n if player_one_score > player_two_score:\n return 'Player One wins!'\n elif player_one_score < player_two_score:\n return 'Player Two wins!'\n else:\n return 'Tie game!'", "def player(self):\n return self.players[self.tictactoe.turn]", "def who_plays_first():\n random = randrange(0, 2)\n if random == 0:\n return globals()['computer']\n else:\n return globals()['player']", "def winner(self):\n if (self.player):\n return (0 == reduce(lambda x, y: x+y, self.board.p1vec))\n else:\n return (0 == reduce(lambda x, y: x+y, self.board.p2vec))", "def create_players():\n\n char_pairings = {\"X\":\"O\",\"O\":\"X\"}\n\n # Create player1\n name_1 = input(\"Player 1, what is your name? > \")\n char_1 = \"\"\n \n # Force player to choose valid input\n while char_1 not in char_pairings:\n char_1 = input(\"Would you like to be X or O? > \").upper()\n player_1 = Player(name_1, char_1)\n\n # Create player2\n name_2 = input(\"Player 2, what is your name? > \")\n\n print(\"{}, you are {}.\".format(name_2, char_pairings[char_1]))\n char_2 = char_pairings[char_1]\n\n player_2 = Player(name_2, char_2)\n\n return (player_1, player_2)", "def find_player():\r\n global current_position, previous_position #say that we want to refer to the one above\r\n for row in range(0, len(map1)):\r\n for column in range(0, len(map1[0])):\r\n if map1[row][column] == 'p':\r\n current_position = (row, column)\r\n previous_position = (row, column)\r\n return 0 #exit from the function\r", "def get_opponent(self):\n for cell in self.__state.board.find_workers():\n player = self.__state.board.get_player_id(cell[0], cell[1])\n if not player == self.__pid:\n return player", "def swap_player():\n global current_player\n if current_player == \"X\":\n current_player = \"O\"\n elif current_player == \"O\":\n current_player = \"X\"", "def next_player(self):\n # Counter is a useful class that counts objects.\n count = Counter(self.board)\n if count.get('X', 0) > count.get('O', 0):\n return 'O'\n return 'X'", "def get_opposing_player(self, player_name):\n pass", "def get_current_server_of_player(target_username):\n servers = get_servers()\n\n target_username = target_username.lower()\n real_username = target_username\n found_server = None\n\n for server in servers:\n if not server.players.list:\n continue\n\n players_list = [player.lower() for player in server.players.list]\n\n for player in players_list:\n if target_username in player:\n real_username = player\n found_server = server\n\n return real_username.upper(), found_server", "def play_game(self, p1, p2):\n self.state_manager.init_game()\n state = self.state_manager.get_state()\n players = [p1, p2]\n player = random.choice([1, 2])\n actions = []\n p1_wins = 0\n p2_wins = 0\n while not self.state_manager.is_game_over(state):\n current_agent = players[player - 1]\n actor_chosen_action = current_agent.target_policy(state, player, is_top_policy=True) # is_top_policy = True to ensure that the agents uses the ANET and not the random exploration\n actions.append(actor_chosen_action)\n self.state_manager.perform_action(actor_chosen_action)\n\n state = self.state_manager.get_state()\n player = change_player(player)\n if player == 1:\n p2_wins += 1\n else:\n p1_wins += 1\n winning_agent = players[change_player(player)-1] # Since player is changed in end of while, the winning player at winning state is the previous player\n # print(p1.name + \" vs. \" + p2.name + \", winner: \" + winning_agent.name + \", actions: \" + str(actions))\n self.results[winning_agent.name] += 1\n return p1_wins, p2_wins, actions", "def get_next_player(self, player):\r\n return player * -1", "def get_next_player(self, player):\r\n return player * -1", "def get_entities(self):\n return super().get_entities() + (self.player1,)", "def testPlayer():\n\n print(\"\\n ---------- Test Player ---------\")\n\n failure = False\n hand = {'c':1, 'a':1, 'b':1 ,'d':1, 'o':1, 'e':1}\n\n p1 = Player(1, Hand(6, hand))\n p2 = Player(2, Hand(6, hand))\n\n if not p1.getHand() == hand and p2.getHand() == hand:\n failure = True\n print(\"FAILURE: Se esperaría la mano que se ingresó:\", hand,\"y se está regresando:\", p1.getHand())\n\n if not p1.getIdNum() == '1' and p2.getIdNum() == '2':\n failure = True\n print(\"FAILURE: Se espera que p1 sea el jugador 1 y p2 sea el jugador 2, y se está obteniendo:\", p1.getIdNum(),\n p2.getIdNum())\n\n print(\"Jugador 1\")\n print(\"\\t\", p1.addPoints(5))\n print(\"\\t\", p1.addPoints(12))\n if not p1.getScore() == 17:\n failure = True\n print(\"FAILURE: Se esperan 17 puntos, y se están obteniendo:\", p1.getScore())\n print(p1)\n\n print(\"Jugador 2\")\n print(\"\\t\", p2.addPoints(3))\n print(\"\\t\", p2.addPoints(10))\n if not p2.getScore() == 13:\n failure = True\n print(\"FAILURE: Se esperan 13 puntos, y se están obteniendo:\", p1.getScore())\n print(p2)\n\n if not (p1 > p2) == 1:\n failure = True\n print(\"FAILURE: Se esperaba un 1, indicando que el puntaje del P1 es mayor al del P2. Se está regresando:\",\n p1 > p2)\n if not (p1 < p2) == -1:\n failure = True\n print(\"FAILURE: Se esperaba un -1, indicando que el puntaje del P2 es menor al del P1. Se está regresando:\",\n p2 < p1)\n if (p1 == p2):\n failure = True\n print(\"FAILURE: Se esperaba un valor falso y se está obteniendo:\",\n p2 < p1)\n\n if not failure:\n print(\"SUCCESS: testPlayer()\")\n else:\n print(\"FAILURE: testPlayer()\")", "def switchPlayer():\n\n #escrever o condicional do modo de jogo.\n if (modoDeJogo == \"1\" or modoDeJogo == 1):\n quemJoga = player[1]\n\n if (player[0] == quemJoga):\n quemJoga = \"pc\"\n else: \n quemJoga = player[0]\n\n return quemJoga #quemComeca\n else:\n quemJoga = player[2]\n\n if (player[0] == quemJoga):\n quemJoga = player[1]\n else: \n quemJoga = player[0]\n \n return quemJoga #quemComeca", "def compare(player, computer):\n if player > 21 and computer > 21:\n return \"You went over. You lose\"\n\n if player == computer:\n return \"Draw\"\n elif computer == 0:\n return \"Lose, opponent has Blackjack\"\n elif player == 0:\n return \"Win with a Blackjack\"\n elif player > 21:\n return \"You went over. You lose\"\n elif computer > 21:\n return \"Opponent went over. You win\"\n elif player > computer:\n return \"You win\"\n else:\n return \"You lose\"", "def get_winner(team1,team2,prob):\n\trand_num = float(randint(0,1000)/1000)\n\tprint('prob: %s rand %s' % (str(prob), str(rand_num)) )\n\tif rand_num <= prob:\n\t\twinner = team1\n\telse:\n\t\twinner = team2\n\n\treturn winner", "def check_if_two_players_on_team(self, member_one, member_two):\n\n try:\n self._logger.debug(\"Checking if players are already on team together\")\n self.check_if_db_connected()\n cursor = self._db_conn.cursor()\n\n cursor.execute(\"SELECT player_id FROM player WHERE \\\nfirst_name = '{0}' AND last_name = '{1}' AND nickname = '{2}'\".format(\n member_one[0], member_one[1], member_one[2]))\n player_one_id = cursor.fetchone()[0]\n\n cursor.execute(\"SELECT player_id FROM player WHERE \\\nfirst_name = '{0}' AND last_name = '{1}' AND nickname = '{2}'\".format(\n member_two[0], member_two[1], member_two[2]))\n player_two_id = cursor.fetchone()[0]\n\n cursor.execute(\"SELECT team FROM player_team_xref WHERE \\\nplayer = {0}\".format(player_one_id))\n teams = cursor.fetchall()\n\n #TODO will give false positive if members are on a team of three\n for team in teams:\n cursor.execute(\"SELECT player FROM player_team_xref WHERE \\\nteam = {0}\".format(team[0]))\n players = cursor.fetchall()\n for player in players:\n if player[0] == player_two_id:\n return team[0]\n\n except MySQLdb.OperationalError:\n self._logger.error(\"MySQL operational error occured\")\n traceback.print_exc()\n raise exceptions.DBConnectionError(\"Cannot connect to MySQL server\")\n\n except MySQLdb.ProgrammingError:\n self._logger.error(\"MySQL programming error\")\n traceback.print_exc()\n raise exceptions.DBSyntaxError(\"MySQL syntax error\")\n\n else:\n return False", "def current_player(self) -> Player:\n return self.players[self.player]", "def get_owning_player(self):\n i = 0\n while True:\n try:\n players = self.tiles[i].players\n except IndexError:\n return None\n if len(players) == 1:\n return players[0]\n i += 1", "def same_player(self, other):\n return self.name == other.name \\\n and self.color == other.color", "def get_current_player(self) :\n return self.current_player", "def determine_winner1(self): \r\n sorted_player_rank = self._rank()\r\n print(f\"sorted player rank: {sorted_player_rank}\")\r\n print(f\"winner is player {sorted_player_rank[0]}: with points {sorted_player_rank[0][1]}\")", "def _player_list(self):\n game = self.ctrl.game\n return game.players[self.i_to_player_id(0)], game.players[self.i_to_player_id(1)]", "def get_next_match_pick_first_available(population):\n p1 = None\n for player in population:\n if player.available:\n if p1 is not None:\n return p1, player\n else:\n p1 = player", "def set_next_first_player(self):\n if self.current_player == self.player1:\n self.player1.won_previous = True\n self.player2.won_previous = False\n else:\n self.player2.won_previous = True\n self.player1.won_previous = False", "def winner(self):\n if self.__current_player == 1:\n if self.__fields[0].winner():\n print(self.__players[0]._Player__name + \"is winner!\")\n Game.play = False\n elif self.__current_player == 2:\n if self.__fields[1].winner():\n print(self.__players[1]._Player__name + \"is winner!\")\n Game.play = False", "def get_player(self):\n return MarkerType(self.__turn % 2)", "def check_faced_players(self, pair: tuple[Player]) -> tuple or None:\n player_1 = pair[0]\n player_2 = pair[1]\n\n if player_1.family_name in player_2.faced_players:\n return None\n else:\n return pair", "def get_current_player(self):\n return self.in_game_players[self.curr_player_index]", "def get_player_mode(mode=None):\n if mode == \"1\":\n print(\"You've chosen Solo Mode! Can you beat a computer?\")\n return mode\n elif mode == \"2\":\n print(\"You've chosen Multiplayer Mode! Can you beat a human?\")\n return mode\n else:\n if mode is not None:\n print(\"Unrecognized input. Please enter 1 or 2\\n\")\n mode = input(\"1 or 2 Players? \")\n return get_player_mode(mode)", "def played(p1, p2):\n conn, cur = connect()\n if p1 > p2:\n p1, p2 = p2, p1\n cur.execute(\"SELECT * FROM MATCHES WHERE P1 = %s and P2 = %s;\", (p1, p2,))\n row = cur.fetchone()\n conn.close()\n return row is not None", "def next_player(self,board, prev_player):\r\n opp = self.opponent(prev_player)\r\n isOpp = self.any_legal_move(opp, board)\r\n isPrev = self.any_legal_move(prev_player, board)\r\n if(isOpp==False and isPrev==False):\r\n return None\r\n elif(isOpp == False and isPrev == True):\r\n return prev_player\r\n else:\r\n return opp", "def get_current_player(self):\n return self.current_player", "def i_to_player_id(self, i):\n game = self.ctrl.game\n if self.hot_seat:\n return game.current_player if i == 0 else (1 - game.current_player)\n else:\n return self.main_player_id if i == 0 else (1 - self.main_player_id)", "def get_current_player(self) -> chr:\n return self._players[self._current_player]", "def next_player(board, prev_player):\n opp = Othello.opponent(prev_player)\n if Othello.any_legal_move(opp, board):\n return opp\n elif Othello.any_legal_move(prev_player, board):\n return prev_player\n return None", "def __repr__(self):\n if self.type == Player.HUMAN:\n return(\"Human\")\n elif self.type == Player.RANDOM:\n return (\"Random\")\n elif self.type == Player.MINIMAX:\n return (\"Minimax\")\n elif self.type == Player.ABPRUNE:\n return (\"ab Pruning\")\n elif self.type == Player.CUSTOM:\n return \"Q-Learner\"\n elif self.type == Player.MIX:\n return \"MIX\"", "def choose_starter(p1, p2):\n names = [p1, p2]\n random.shuffle(names)\n return (names[0], names[1])", "def get_result(self):\n\n x = self.rps_data[0][1].upper()\n y = self.rps_data[1][1].upper()\n if x[0] == '|':\n x = x[2:3]\n if y[0] == '|':\n y = y[2:3]\n if x == y:\n self.write_scores(\"Draw\")\n return \"Draw\"\n elif (x == 'R' and y == 'S') or (x == 'S' and y == 'P') or (x == 'P' and y == 'R'):\n self.write_scores(\"First\")\n return \"First\"\n else:\n self.write_scores(\"Second\")\n return \"Second\"", "def switch_player(self):\n if self.playerOne:\n # sets the chip color to blue\n self.red = 0\n self.blue = 255\n # switch the player to player 2 and change the caption\n self.playerOne = False\n pygame.display.set_caption('Connect4 - Player 2')\n else:\n # sets the chip color to red\n self.red = 250\n self.blue = 0\n # switch the player to player 1 and change the caption\n self.playerOne = True\n pygame.display.set_caption('Connect4 - Player 1')", "def find_player(self):\n for y, line in enumerate(self.maze):\n for x, character in enumerate(line):\n if character == \"m\":\n return y, x\n return None", "def getCurrentPlayer(self):\r\n return self.currentPlayer", "def min_players(self):\n return 2", "def play_against(self, p2, print_result=False):\n\n if self == p2:\n print('Invalid match. A player can not compete against themselves.')\n return None\n c1 = c2 = None\n\n while c1 == c2:\n # This loop takes care of ties.\n c1 = self.next_choice(opponent=p2)\n c2 = p2.next_choice(opponent=self)\n Player.record_play(self, c1, p2, c2, winner=None)\n\n if (c1 == 'rock' and c2 == 'paper') or \\\n (c1 == 'paper' and c2 == 'scissors') or \\\n (c1 == 'scissors' and c2 == 'rock'):\n winner = p2\n win_choice = c2\n loser = self\n loss_choice = c1\n else:\n winner = self\n win_choice = c1\n loser = p2\n loss_choice = c2\n\n # Update stats:\n Player.record_play(self, c1, p2, c2, winner)\n # winner.record_win(weapon=win_choice, opponent=loser)\n # loser.record_loss(weapon=loss_choice, opponent=winner)\n if print_result:\n print('{:20s} {} {} {}'.format(winner.name + ' beat ' + loser.name + '.',\n win_choice,\n Player.win_verb[win_choice],\n loss_choice))\n return winner", "def choose_first():\n rand = random.randint(1, 2)\n print(f\"The first is Player-{rand}\")\n return rand", "def check_winner(self):\n for row in self.board.values():\n if all([mark == \"x\" for mark in row]):\n return self.player_1\n elif all([mark == \"o\" for mark in row]):\n return self.player_2\n\n # checks every column\n for i in range(3):\n first_row, second_row, third_row = self.board.values()\n if first_row[i] == \"x\" and second_row[i] == \"x\" and third_row[i] == \"x\":\n return self.player_1\n elif first_row[i] == \"o\" and second_row[i] == \"o\" and third_row[i] == \"o\":\n return self.player_2\n\n # checks the diagonals\n if self.board[\"a\"][0] == \"x\" and self.board[\"b\"][1] == \"x\" and self.board[\"c\"][2] == \"x\":\n return self.player_1\n if self.board[\"a\"][2] == \"o\" and self.board[\"b\"][1] == \"o\" and self.board[\"c\"][0] == \"o\":\n return self.player_2\n\n return None", "def next_player(current_player, players):\n if len(players) == 1:\n return players[0]\n if current_player != players[-1]:\n return players[players.index(current_player) + 1]\n return players[0]" ]
[ "0.7322427", "0.719487", "0.71915984", "0.71570975", "0.7128614", "0.7037134", "0.7009901", "0.69278514", "0.6833396", "0.6795355", "0.6771366", "0.6737287", "0.64921343", "0.6477016", "0.63291126", "0.63171285", "0.62973", "0.62973", "0.62973", "0.62903047", "0.6243002", "0.6198836", "0.6198836", "0.6189466", "0.61864525", "0.6171854", "0.616602", "0.6156774", "0.6140084", "0.6124493", "0.61202204", "0.6098272", "0.6031376", "0.6026504", "0.59964377", "0.5981144", "0.59707814", "0.5953687", "0.59363234", "0.59052956", "0.58987975", "0.5893323", "0.5891219", "0.5874926", "0.58639175", "0.5857463", "0.5853447", "0.58487016", "0.583752", "0.5835545", "0.5811762", "0.58116937", "0.5808192", "0.5801627", "0.5800573", "0.5786872", "0.578335", "0.57832885", "0.57629466", "0.5762412", "0.57622", "0.5729328", "0.57207453", "0.57207453", "0.57094145", "0.570043", "0.56959474", "0.5692853", "0.56889385", "0.5674855", "0.5653701", "0.5653112", "0.5651337", "0.56287384", "0.5616984", "0.5616401", "0.56064427", "0.5601258", "0.55870205", "0.5585471", "0.5582211", "0.5577029", "0.5559085", "0.5554654", "0.55208045", "0.55180645", "0.55160713", "0.5515473", "0.5509239", "0.5496702", "0.5495445", "0.5494228", "0.5485371", "0.54839593", "0.54814255", "0.546821", "0.54600835", "0.54528505", "0.54467523", "0.5442623" ]
0.73625857
0
Return the GameState that results from applying move to this GameState.
def make_move(self, move: Any) -> 'StonehengeState': if type(move) == str: new_state = StonehengeState(not self.p1_turn, self.side_length) # copy the board information from current state # make copy of current state information hori_lst_copy = [] for lst in self.hori_lst: temp = [] for item in lst: temp.append(item) hori_lst_copy.append(temp) left_lst_copy = [] for lst in self.left_lst: temp = [] for item in lst: temp.append(item) left_lst_copy.append(temp) right_lst_copy = [] for lst in self.right_lst: temp = [] for item in lst: temp.append(item) right_lst_copy.append(temp) hori_result_copy = [] for item in self.hori_result: hori_result_copy.append(item) left_result_copy = [] for item in self.left_result: left_result_copy.append(item) right_result_copy = [] for item in self.right_result: right_result_copy.append(item) new_state.hori_lst = hori_lst_copy new_state.hori_result = hori_result_copy new_state.left_lst = left_lst_copy new_state.left_result = left_result_copy new_state.right_lst = right_lst_copy new_state.right_result = right_result_copy # update the new state with str move # parallel nested list data structure lst = [new_state.hori_lst, new_state.left_lst, new_state.right_lst] result = [new_state.hori_result, new_state.left_result, new_state.right_result] # update the cell for i in range(len(lst)): for j in range(len(lst[i])): for k in range(len(lst[i][j])): if lst[i][j][k] == move: # should use the player name of last state, so opposite names if new_state.p1_turn: lst[i][j][k] = "2" else: lst[i][j][k] = "1" # update ley-line marks # the ley-line may belong to a player after this move p1_taken = 0 p2_taken = 0 if result[i][j] != "@": continue for item in lst[i][j]: if item == "1": p1_taken += 1 if item == "2": p2_taken += 1 if float(p1_taken) >= len(lst[i][j]) / 2: result[i][j] = "1" if float(p2_taken) >= len(lst[i][j]) / 2: result[i][j] = "2" ###### CHECK FOR SHALLOW COPY PROBLEM, IF ATTRIBUTE IS UPDATE IN NEW STATE return new_state
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_move(self, game_state: BotGameState) -> BotMove:\n return", "def result(self, board_state, move):\n # Create a copy of the current board state\n output_state = BoardState(other_state=board_state)\n # Swap pieces\n output_state.move_piece(move)\n # Eliminate pieces\n output_state.eliminate_piece()\n return output_state", "def move_state(self):\n return self._move_state", "def apply_move(self, move):\r\n next_board = copy.deepcopy(self.board)\r\n next_board.place(self.next_player, move)\r\n return GameState(next_board, self.next_player.other, move)", "def _state(self):\n return self.State(self.copy._array, self._turn, copy(self._score))", "def get_move(self, game):\n return", "def get_game_state(self):\n return self._current_state", "def get_move(self):\n if self._difficulty == 0:\n return self._get_easy_move()\n else:\n # Different stategies/difficulties can be attached here\n return", "def move(self, state):\n result = None\n self.currentDepthLimit = 0\t\n\tself.transposition = {}\n\tself.counter = 0\n\n\twhile True:\n u = float(\"inf\")\n\t v = float(\"-inf\")\n\t self.counter = 0\n\t result = None\n\t self.transposition = {}\n\t for a in state.actions():\n new = self.min_value(state.result(a), float(\"-inf\"), float(\"inf\"),self.currentDepthLimit)\n\t if new > v:\n\t v = new\n\t result = a\n\n\t elif new == v:\n\t if a.index < result.index:\n\t result = a\n\t if self.is_time_up():\n\t return result\n\t \n\t self.currentDepthLimit += 1\n\t \"\"\"If we never use evaluate function, it means all state are terminated, so return whatever the result is\"\"\"\n\t if self.counter == 0:\n\t break\n\t if self.is_time_up():\n \t return result\n\treturn result", "def result(self, state, move):\n new_state = state[:]\n new_state[move[0]], new_state[move[1]] = new_state[move[1]], new_state[move[0]]\n return new_state", "def game_move(self):\n\t\t# make a note of the player who isn't playing\n\t\tfor x in self.players.keys():\n\t\t\tif x != self.nextPlayer:\n\t\t\t\totherPlayer = x\n\t\t\t\tbreak\n\t\t\n\t\t\n\t\t# If there are no remaining moves for this player, either the other\n\t\t# player has won or it's a draw\n\t\t# self.expansions = 1\n\t\tself.expansionCounter.count = 1\n\t\tif len(self.state.successors()) == 0:\n\t\t\tif self.state.is_win(otherPlayer):\n\t\t\t\treturn (None, otherPlayer)\n\t\t\telse:\n\t\t\t\t# None, None for a draw\n\t\t\t\treturn (None, None)\n\t\t\t\n\t\t# allow the player max_expansions for this turn\n\t\t# self.expansions = self.max_expansions\n\t\tself.expansionCounter.count = self.max_expansions\n\t\t\n\t\tnextPlayer = self.players[self.nextPlayer]\n\t\tlastPlayer = None\n\t\t\n\t\t# player may throw an exception\n\t\ttry:\n\t\t\t# get player's move, make sure we don't modify the current state\n\t\t\tmove = nextPlayer.move(self.state.get_player_state(self.nextPlayer), \n\t\t\t\t\t self.visitedStates)\n\t\t\t# player may give up\n\t\t\tif move.is_forfeit():\n\t\t\t\tprint \"Player\", self.nextPlayer, \"forfeits.\"\n\t\t\t\treturn (None, otherPlayer)\n\t\t\t# player may return illegal move\n\t\t\tif not self.state.is_valid_move(move):\n\t\t\t\tprint \"Illegal move returned by player\", self.nextPlayer, \\\n\t\t\t\t\t\t\"(\", self.players[self.nextPlayer].get_name(), \")\"\n\t\t\t\treturn (move, otherPlayer)\n\t\t\t# this player is now last player\n\t\t\tlastPlayer = self.nextPlayer\n\t\t\t# get the new next player and make the indicated move\n\t\t\tself.nextPlayer, clear = self.state.move(move, True)\n\t\t\tif clear:\n\t\t\t\tself.clear_repeat()\n\t\texcept:\n\t\t\tprint \"Exception thrown by player\", self.nextPlayer, \\\n\t\t\t\t\t\t\"(\", self.players[self.nextPlayer].get_name(), \")\"\n\t\t\tprint\n\t\t\ttraceback.print_exc()\n\t\t\tprint\n\t\t\treturn (None, otherPlayer)\n\t\t\n\t\tos.chdir(self.wd)\n\t\t\n\t\t# may be a repeated state IF the game cycles\n\t\tif self.is_repeat(self.state):\n\t\t\tself.state.handle_cycle()\n\t\t# otherwise, if the game cycles, note that we've been here\n\t\telif self.state.repeats():\n\t\t\tself.visitedStates.add(self.state.repeated_rep())\n\t\t\t\n\t\t# player may have sacrificed the game\n\t\tif self.state.is_win(otherPlayer):\n\t\t\treturn (move, otherPlayer)\n\t\t\n\t\t# player may have won\n\t\tif self.state.is_win(lastPlayer):\n\t\t\treturn (move, lastPlayer)\n\t\t\n\t\t# nobody's won or lost yet\n\t\treturn (move, None)", "def current_state(self):\n\n square_state = np.zeros((4, self.width, self.height))\n if self.states:\n moves, players = np.array(list(zip(*self.states.items())))\n move_curr = moves[players == self.current_player]\n move_oppo = moves[players != self.current_player]\n square_state[0][move_curr // self.width, move_curr % self.height] = 1.0\n square_state[1][move_oppo // self.width, move_oppo % self.height] = 1.0\n\n # last move indication\n square_state[2][self.last_move // self.width, self.last_move % self.height] = 1.0\n\n if len(self.states) % 2 == 0:\n square_state[3][:, :] = 1.0\n\n return square_state[:, ::-1, :]", "def current_state(self):\n\n square_state = np.zeros((4, self.width, self.height))\n if self.states:\n moves, players = np.array(list(zip(*self.states.items())))\n move_curr = moves[players == self.current_player]\n move_oppo = moves[players != self.current_player]\n square_state[0][move_curr // self.width,\n move_curr % self.height] = 1.0\n square_state[1][move_oppo // self.width,\n move_oppo % self.height] = 1.0\n # indicate the last move location\n square_state[2][self.last_move // self.width,\n self.last_move % self.height] = 1.0\n if len(self.states) % 2 == 0:\n square_state[3][:, :] = 1.0 # indicate the colour to play\n return square_state[:, ::-1, :]", "def current_state(self):\n\n square_state = np.zeros((4, self.width, self.height))\n if self.states:\n moves, players = np.array(list(zip(*self.states.items())))\n move_curr = moves[players == self.current_player]\n move_oppo = moves[players != self.current_player]\n square_state[0][move_curr // self.width,\n move_curr % self.height] = 1.0\n square_state[1][move_oppo // self.width,\n move_oppo % self.height] = 1.0\n # indicate the last move location\n square_state[2][self.last_move // self.width,\n self.last_move % self.height] = 1.0\n if len(self.states) % 2 == 0:\n square_state[3][:, :] = 1.0 # indicate the colour to play\n return square_state[:, ::-1, :]", "def get_game_state(self):\n return self.game_state", "def get_new_gamestate(self):", "def movee(self):\n\n #return the initial state if he cant move and he's in the initial state\n if not self.move and self.index == 0:\n return self.path[self.index]\n\n #return the goal state if he's at the goal state\n if self.index == len(self.path):\n return self.path[-1]\n\n #return the next move and increments the index attribute\n nextMove = self.path[self.index]\n self.index += 1\n\n return nextMove", "def game_state(self):\n return self._game_state", "def get_next_move(self, game_state):\n next_move = None\n encoded_game_state = self.__encode_state(game_state)\n\n self.__init_q_values(game_state)\n\n if random.random() < self.epsilon:\n next_move = self.__get_next_random_move(game_state)\n self.__update_epsilon()\n else:\n next_move = self.__get_next_greedy_move(game_state)\n\n self.game_moves_history.append((encoded_game_state, next_move))\n\n return next_move", "def get_game_state(self):\n return self._game_state", "def get_game_state(self):\n return self._game_state", "def get_game_state(self):\n return self._game_state", "def get_game_state(self):\r\n return self._game_state", "def current_state(self):\n\n square_state = np.zeros((4, self.width, self.height))\n if self.state:\n moves, players = np.array(list(zip(*self.state.items())))\n move_curr = moves[players == self.current_player]\n move_oppo = moves[players != self.current_player]\n square_state[0][move_curr // self.width,\n move_curr % self.height] = 1.0\n square_state[1][move_oppo // self.width,\n move_oppo % self.height] = 1.0\n # indicate the last move location\n square_state[2][self.last_move // self.width,\n self.last_move % self.height] = 1.0\n if len(self.state) % 2 == 0:\n square_state[3][:, :] = 1.0 # indicate the colour to play\n return square_state", "def move(self):\r\n if self.last_op_move is None:\r\n return rockyman.move(self)\r\n else:\r\n return self.last_op_move", "def move(self):\n if self.learn is None:\n return random.choice(moves)\n else:\n return self.storedmove", "def find_best_move(state: GameState) -> None:", "def make_move(self, current_state):\n\n\t\tbatch_size = 192\n\n\t\ttest_board = np.zeros((batch_size, 3, 3, 3))\n\t\ttest_cows = np.zeros((batch_size, 2))\n\t\ttest_labels = np.zeros((batch_size, 1)) \n\n\t\tnew_states = current_state.expand_states()\n\n\t\tif len(new_states) == 0:\n\t\t\treturn None\n\n\t\tfor i, state in enumerate(new_states):\n\n\t\t\tdesc = self.state_descriptor(state, self.player_index)\n\t\t\ttest_board[i] = np.asarray(desc[0])\n\t\t\ttest_cows[i] = np.asarray([desc[1], desc[2]])\n\n\t\treturn new_states[self.train_value_function_approximation(\n\t\t\tTrue, (test_board, test_cows, test_labels), len(new_states))]", "def play_move(self,state):\n self.__engine.set_state(state)\n result = self.__engine.getNextState()\n time_elapsed = self.__engine.get_time_elapsed()\n num_nodes = self.__engine.get_num_explored()\n if self.moves == 0:\n self.average_time = time_elapsed\n self.average_nodes = num_nodes\n else:\n self.average_time = ( (self.average_time * self.moves) + time_elapsed ) / (self.moves+1)\n self.average_nodes = ( (self.average_nodes * self.moves) + num_nodes ) / (self.moves+1)\n self.moves += 1\n return result", "def best_move(self):\n if self._move is not None:\n return self._move\n else:\n return self.pass_move", "def get_state(self):\n return {\n \"board\": self.board,\n \"player\": self.player,\n \"winner\": self.winner\n }", "def get_game_state(self):\n\n return self._game_state", "def get_game_state(self):\n\n return self._game_state", "def forecast_move(self, move):\n if move not in get_legal_moves(self):\n raise RuntimeError(\"Attempted forecast of illegal move\")\n newGameState = deepcopy(self)\n newGameState[move[0]][move[1]] = 1\n newGameState.player_locations[whose_turn] = move\n newGameState.whose_turn ^= 1\n return newGameState", "def result(self, state, action):\r\n\r\n sc = copy.deepcopy(state)\r\n new_piece, player = self.new_or_old_piece(state)\r\n current_player, to_action, from_action = action\r\n\r\n # Make the move\r\n sc[to_action[0]][to_action[1]] = current_player\r\n\r\n # There can't be more than 6 pieces in any state.\r\n if not new_piece:\r\n # Now making from place as null again\r\n sc[from_action[0]][from_action[1]] = '-'\r\n\r\n return sc", "def update_game_state(self):\n # if board is not filled out, returns a valid move message\n for row in self.board:\n if 0 in row:\n return \"Valid input\"\n\n # if board is filled out, verifies if solution is valid and updates game state\n self.game_state = alg.check_solution(self.board)\n return self.game_state", "def get_current_state(self):\n return self.game.get_current_state()", "def next_move(self):\n return self.decoded_population[self.current_index]", "def get_action(self, game_state):\n self.observation_history.append(game_state)\n\n my_state = game_state.get_agent_state(self.index)\n my_pos = my_state.get_position()\n if my_pos != nearest_point(my_pos):\n # We're halfway from one position to the next\n return game_state.get_legal_actions(self.index)[0]\n else:\n return self.choose_action(game_state)", "def move(self, state):\n \n self.depth_limit=1\n self.best_utility=-2\n action=None\n while not self.is_time_up():\n self.terminal=True\n self.cache={}\n action=self.alpha_beta_search(state,0)\n if self.terminal==True:\n break\n self.depth_limit=self.depth_limit+1\n \n return action", "def move(state=None, actual_move=None):\n copy = state.copy()\n copy.push(chess.Move.from_uci(uci=actual_move))\n return copy", "def get_valid_move(game_state: BotGameState, strategy: Strategy) -> BotMove:\n return strategy.choose_move(game_state, game_state.get_all_valid_moves())", "def get_greedy_ai_move(self, gamestate):\n multiplier = 1 if gamestate.ai_colour else -1\n best_moves = []\n best_score = self.BLACK_CHECKMATE\n for move in gamestate.get_valid_moves():\n # Execute the move\n (current_row, current_column), (new_row, new_column) = move\n current_piece = gamestate.board.board[current_row][current_column]\n piece_at_new_square = gamestate.board.board[new_row][new_column]\n\n gamestate.board.board[current_row][current_column] = None\n gamestate.board.board[new_row][new_column] = current_piece\n\n if piece_at_new_square:\n if piece_at_new_square.colour:\n gamestate.board.white_pieces.remove(piece_at_new_square)\n else:\n gamestate.board.black_pieces.remove(piece_at_new_square)\n\n # Evaluate the new board state\n score = (\n self.evaluate_board(\n gamestate.board,\n gamestate.white_checkmate,\n gamestate.black_checkmate,\n gamestate.stalemate,\n )\n * multiplier\n )\n\n # Undo the move\n gamestate.board.board[current_row][current_column] = current_piece\n gamestate.board.board[new_row][new_column] = piece_at_new_square\n\n if piece_at_new_square:\n if piece_at_new_square.colour:\n gamestate.board.white_pieces.append(piece_at_new_square)\n else:\n gamestate.board.black_pieces.append(piece_at_new_square)\n\n # Check if best move\n if score > best_score:\n best_score = score\n best_moves = [move]\n elif score == best_score:\n best_moves.append(move)\n\n # If there are no best moves, return a random move.\n if best_moves:\n return self.get_random_move(best_moves)\n return self.get_random_move(gamestate.get_valid_moves())", "def next_move(\r\n self,\r\n state: TwoPlayerGameState,\r\n gui: bool = False,\r\n ) -> TwoPlayerGameState:\r\n\r\n successors = self.generate_successors(state)\r\n\r\n alpha = -np.inf\r\n beta = np.inf\r\n\r\n for successor in successors:\r\n if self.verbose > 1:\r\n print('{}: {}'.format(state.board, alpha))\r\n\r\n successor_alpha_beta_value = self._min_value(\r\n successor,\r\n alpha,\r\n beta,\r\n self.max_depth_minimax,\r\n )\r\n\r\n if (successor_alpha_beta_value > alpha):\r\n alpha = successor_alpha_beta_value\r\n next_state = successor\r\n\r\n if self.verbose > 0:\r\n if self.verbose > 1:\r\n print('\\nGame state before move:\\n')\r\n print(state.board)\r\n print()\r\n print('Alpha value = {:.2g}'.format(alpha))\r\n\r\n return next_state", "def return_state(self):\n\t\treturn self.state", "def move(self, board):\n\n # We record all game positions to feed them into the NN for training with the corresponding updated Q\n # values.\n self.board_position_log.append(board.getState().copy())\n\n nn_input = self.board_state_to_nn_input(board.getState())\n probs, _ = self.get_valid_probs([nn_input], self.q_net, [board])\n probs = probs[0]\n # print(probs)\n # print(type(probs))\n # print(probs.shape)\n # input()\n # print(probs)\n # Most of the time our next move is the one with the highest probability after removing all illegal ones.\n # Occasionally, however we randomly chose a random move to encourage exploration\n if (self.training is True) and \\\n ((self.game_counter < self.pre_training_games) or (np.random.rand(1) < self.random_move_prob)):\n available = []\n for index in range(6):\n if probs[index] != -1.0:\n available.append(index)\n randomOne = random.randint(0,len(available)-1)\n move = available[randomOne]\n else:\n move = np.argmax(probs)\n # We record the action we selected as well as the Q values of the current state for later use when\n # adjusting NN weights.\n self.action_log.append(move)\n\n # We execute the move and return the result\n board.makeMove(move)\n return board.getState(), board.isOver()", "def getReward(self):\n return self._mdp.R[self._prev_state,self._cur_state]", "def move(self):\r\n move = None\r\n if self.last_move is None:\r\n move = rockyman.move(self)\r\n else:\r\n index = the_moves.index(self.last_move) + 1\r\n if index >= len(the_moves):\r\n index = 0\r\n move = the_moves[index]\r\n self.last_move = move\r\n return move", "def makeState(self):\n state = None\n\n if self.ai == True:\n state = AIState(self.game, self.blockSize)\n else:\n state = PlayerState(self.game, self.blockSize)\n\n return state", "def genmove(self, color, game) -> Move:\n # print(color)\n # print(game.play_history)\n # print(self.mc.states)\n if not len(game.play_history) == (len(self.mc.states) - 1):\n # Last play not yet in our states:\n last_player, last_move = game.play_history[-1]\n # pprint(game.play_history)\n # print(last_player, last_move)\n missing_state = self.mc.board.next_state(\n self.mc.states[-1], last_move)\n self.mc.update(missing_state)\n\n # print('Current board in our mc:')\n # _b = self.mc.states[-1][0]\n # _b = self.mc.board.from_tuple(_b)\n # print(_b)\n\n move = self.mc.get_play()\n\n # Update our saved states\n resulting_state = self.mc.board.next_state(\n self.mc.states[-1], move)\n self.mc.update(resulting_state)\n\n return move", "def _get_move(self) -> Tile:\n if not self.game_state:\n raise RuntimeError(\"Cannot call get_move when the game has not started!\")\n if isinstance(self.current_turn, Player):\n return self._get_player_move()\n elif isinstance(self.current_turn, Enemy):\n return self._get_enemy_move()\n else:\n raise TypeError(\"You're trying to move something that isn't a character or an adversary.\")", "def apply(self, gameState):\n pass", "def new_game(self) -> \"State\":\n return State(self, self.__sim.new_game())", "def next_step(self, state, x, y):\n my_board = state\n if not is_new_move(my_board, x, y):\n return my_board, 0, False, {}\n while True:\n state, game_over = self.get_next_state(my_board, x, y)\n if not game_over:\n if is_win(state):\n return state, 1000, True, {}\n else:\n return state, 0, False, {}\n else:\n return state, -100, True, {}", "def chooseMove(self, game):\n return self.randomMove(game)", "def next_move(\r\n self,\r\n state: TwoPlayerGameState,\r\n gui: bool = False,\r\n ) -> TwoPlayerGameState:\r\n\r\n successors = self.generate_successors(state)\r\n\r\n minimax_value = -np.inf\r\n\r\n for successor in successors:\r\n if self.verbose > 1:\r\n print('{}: {}'.format(state.board, minimax_value))\r\n\r\n successor_minimax_value = self._min_value(\r\n successor,\r\n self.max_depth_minimax,\r\n )\r\n\r\n if (successor_minimax_value > minimax_value):\r\n minimax_value = successor_minimax_value\r\n next_state = successor\r\n\r\n if self.verbose > 0:\r\n if self.verbose > 1:\r\n print('\\nGame state before move:\\n')\r\n print(state.board)\r\n print()\r\n print('Minimax value = {:.2g}'.format(minimax_value))\r\n\r\n return next_state", "def get_current_state(self):\n return self.world.get_state()", "def _run_one_game(self):\n sum_reward = 0\n done = False\n state = torch.tensor(self.env.reset(), device=device).view(1, -1)\n losses = list()\n\n while not done:\n\n # Choose action in function of observation and play it\n action = self._select_action(state)\n next_state, reward, done, _ = self.env.step(action.item())\n\n sum_reward += reward\n next_state = torch.tensor(next_state, device=device).view(1, -1)\n reward = torch.tensor([reward], device=device)\n done = torch.tensor([done], device=device)\n \n # Add transition to memory\n self._add_to_memory(state, action, next_state, reward, done)\n\n # Compute loss\n loss = self._optimize_model()\n losses += [loss]\n \n # Prepare next state\n state = next_state\n\n # Wait time_to_sleep second so the user can view the state\n sleep(self.time_to_sleep)\n \n\n return sum_reward, mean(losses)", "def play(self):\r\n state = copy.deepcopy(self.initial_state)\r\n # calculating the best move value and action for the given state\r\n best_action = self.minimax_decision(state)\r\n\r\n # To handle the case where there are no possible moves from the initial state\r\n if best_action[1] not in [(9, 9, 9), (6, 6), (5, 5)]:\r\n # Making the best move corresponding to the initial state\r\n state = copy.deepcopy(self.initial_state)\r\n expected_state = self.result(state, best_action[1])\r\n\r\n # Printing the board state resulting from the best move.\r\n print '{}'.format(self.convert_matrix_rastor(expected_state))", "def move(self, movement):\n index = self.state.index(0)\n\n new_state = self.state.copy()\n\n if movement == 'up':\n new_state[index], new_state[index - 3] = new_state[index - 3], new_state[index]\n elif movement == 'down':\n new_state[index], new_state[index + 3] = new_state[index + 3], new_state[index]\n elif movement == 'left':\n new_state[index], new_state[index - 1] = new_state[index - 1], new_state[index]\n else:\n # movement == 'right'\n new_state[index], new_state[index + 1] = new_state[index + 1], new_state[index]\n \n return new_state", "def currentState(self):\n return self.currentState", "def get_game_state(self):\n return self._game_status", "def get_move(self, last_result):\n return self.moves.pop()", "def next_step(self, state, x, y):\n my_board = state\n if not is_new_move(my_board, x, y):\n return my_board, -1, False, {}\n while True:\n state, game_over = self.get_next_state(my_board, x, y)\n if not game_over:\n if is_win(state):\n return state, 1000, True, {}\n else:\n return state, 5, False, {}\n else:\n return state, -100, True, {}", "def extract_goal_state(self):\n time = rospy.get_time()\n ref_time = time - self.last_time\n future_time = ref_time + self.update_rate\n\n # get state of future time in global trajectory\n return df.compute_output3D(self.global_solution, self.order, self.time[self.future_index], future_time)", "def get_action(self, state):\n # In case of firt call, we initialise the self variables.\n if self.first_call :\n self.initVariable(state)\n\n # Start with Pacman as agent\n agent = PACMAN #0\n\n # Getting legal moves\n legal_moves = getLegalMovingActions(state, agent)\n\n # parents_positions correspond to a \"explored states\" kind of variable\n parents_positions = [(state.getPacmanPosition(),\n state.getGhostPositions(),\n state.getFood().asList())]\n moves_dict = {}\n # We associate with each legal move a score and place the pair in the\n # moves_dict dictionary\n for move in legal_moves:\n # Getting the score\n score = self.minimax(state.generateSuccessor(agent, move),\n (agent+1)%self.nb_agent,\n parents_positions)\n # Placing the pair in the dict\n moves_dict[score] = move\n # We take the move that has the maximum score\n return moves_dict[max(moves_dict)]", "def apply_move(self, start_move, move):\n\t\t# check that the start move and the move are Move objects\n\t\tif not isinstance(move, Move):\n\t\t\tmove = Move(move)\n\t\tif not isinstance(start_move, Move):\n\t\t\tstart_move = Move(start_move)\n\t\t# copy the board\n\t\tnext_board = copy.deepcopy(self.board)\n\t\t# place the move on the next board\n\t\tnext_board.place(self.next_player, start_move.point, move.point)\n\t\treturn GameState(next_board, self.next_player.other, move)", "def nextState(self, piece, pos):\n # Copy current pieceList to new state obj\n nextState = copy.deepcopy(self)\n\n nextState.movePiece(nextState.player.getCurrentPieceList(), piece, pos)\n\n return nextState", "def reset(self):\n if not self.single_agent_mode:\n return self._init_game()\n\n while True:\n state, player_id = self.game.init_game()\n while not player_id == self.active_player:\n self.timestep += 1\n action, _ = self.model.agents[player_id].eval_step(\n self._extract_state(state)\n )\n if not self.model.agents[player_id].use_raw:\n action = self._decode_action(action)\n state, player_id = self.game.step(action)\n\n if not self.game.is_over():\n break\n\n return self._extract_state(state)", "def initial_state(self):\n state = GameState(self.size)\n return state", "def state(self):\n result = self.getResult()\n return result.state", "def get_reward(self):\n accuracy = self.state\n # Use accuracy as reward for now.\n reward = accuracy\n return reward", "def rollout(self, current_state):\n while not self.state_manager.game_over():\n performed_action = self.make_move(current_state, is_rollout=True)\n current_state, _ = self.get_child(current_state, performed_action)\n reward = self.get_reward()\n return reward", "def move(self, gstate: gamestate.Gamestate) -> util.Move:\n moves = gstate.legal_moves_vector(gstate.agents[self.id])\n scores = {move: self.evaluate(gstate.copy, move) for move in moves}\n max_score = max(scores.values())\n max_moves = [move for move in moves if scores[move] == max_score]\n return random.choice(max_moves)", "def successor_state(self):\n return self._successor_state", "def rewardAtState(self, position, speed, action):\n\n # If terminal state, return 0 for subsequent moves and\n # do not update the position nor speed\n if(self.stuck):\n return 0, position, speed\n\n new_pos, new_speed = self.nextState(position, speed, action)\n\n if (new_pos < -1 or abs(new_speed) > 3):\n return -1, new_pos, new_speed\n elif (new_pos > 1 and abs(new_speed) <= 3):\n return 1, new_pos, new_speed\n else:\n return 0, new_pos, new_speed", "def current_state(self):\n square_state = np.zeros((4, self.width, self.height))\n if self.states:\n for i in range(8):\n for j in range(8):\n if self.board_value[i][j]==self.current_player:\n square_state[0][i][j]=1\n elif self.board_value[i][j]!=self.current_player and self.board_value[i][j]!= 0:\n square_state[1][i][j]=1\n # indicate the last move location\n square_state[2][self.last_move // self.width, self.last_move % self.height] = 1.0\n if len(self.states) % 2 == 0:\n square_state[3][:, :] = 1.0 # indicate the colour to play\n return square_state[:, ::-1, :]", "def move(self, action: Action) -> State:\n new_state = State(self.size_h, self.size_v, self.wall_squares, self.boxes, self.storage_locations,\n self.current_location, action)\n\n if action == Action.DOWN:\n down_loc = (new_state.current_location[0] + 1, new_state.current_location[1])\n two_away = (down_loc[0] + 1, down_loc[1])\n new_state.current_location = down_loc\n if down_loc in new_state.boxes:\n new_state.boxes.remove(down_loc)\n new_state.boxes.append(two_away)\n\n elif action == Action.UP:\n up_loc = (new_state.current_location[0] - 1, new_state.current_location[1])\n two_away = (up_loc[0] - 1, up_loc[1])\n new_state.current_location = up_loc\n if up_loc in new_state.boxes:\n new_state.boxes.remove(up_loc)\n new_state.boxes.append(two_away)\n\n elif action == Action.RIGHT:\n right_loc = (new_state.current_location[0], new_state.current_location[1] + 1)\n two_away = (right_loc[0], right_loc[1] + 1)\n new_state.current_location = right_loc\n if right_loc in new_state.boxes:\n new_state.boxes.remove(right_loc)\n new_state.boxes.append(two_away)\n\n elif action == Action.LEFT:\n left_loc = (new_state.current_location[0], new_state.current_location[1] - 1)\n two_away = (left_loc[0], left_loc[1] - 1)\n new_state.current_location = left_loc\n if left_loc in new_state.boxes:\n new_state.boxes.remove(left_loc)\n new_state.boxes.append(two_away)\n\n new_state._validate() # TODO: Remove me for the final product.\n return new_state", "def get_action(self, state):\n self.visited = {}\n utility = -inf\n move = 'STOP'\n\n # We choose the successor with the maximum utility\n for successor in state.generatePacmanSuccessors():\n maxPlayer = True\n score = self.alphabeta(successor[0], -inf, +inf, maxPlayer)\n if utility < score:\n move = successor[1]\n utility = score\n\n # If there's no winning state, we try to to move farther from the ghost\n if utility == -inf:\n dist = -inf\n for successor in state.generatePacmanSuccessors():\n newDist = self.distanceFromGhost(successor[0])\n if not successor[0].isLose() and newDist > dist:\n move = successor[1]\n dist = newDist\n print(utility)\n return move", "def myclone(self):\n new_state = GameState(self.size)\n for rc in self.gameState:\n new_state.gameState[rc] = self.gameState[rc] \n new_state.numRebels = self.numRebels\n new_state.numSith = self.numSith\n new_state.numJedi = self.numJedi\n new_state.numTurns = self.numTurns\n new_state.maxs_turn = self.maxs_turn\n new_state.cachedWin = self.cachedWin\n new_state.cachedWinner = self.cachedWinner\n new_state.stringified = self.__str__()\n \n return new_state", "def executeNextMove(self, gameState, currIndex):\n succScores = util.Counter()\n actions = getLegalActionsNoStop(gameState, currIndex)\n successors = [gameState.generateSuccessor(currIndex, a) for a in actions]\n for s in successors:\n succScores[s] = self.evaluateState(s, currIndex)\n chosenMoveState = max(succScores)\n return chosenMoveState", "def next_move(self, cur_state):\n\n alpha, final_state, min_level, action_took = self.alpha_beta(cur_state, 2, 0, -math.inf, math.inf, math.inf)\n #print(\"-----------------------------------------\")\n #print(\"value = \"+str(alpha)+\", min_level = \"+str(min_level))\n #print(\"previous: top=\"+str(cur_state.top)+\", bottom=\"+str(cur_state.bottom)+\", left=\"+str(cur_state.left)+\", right=\"+str(cur_state.right))\n #print(final_state.pre_state)\n return action_took", "def result(self, state, action):\n # clone the state\n new_state = state.myclone()\n\n\n\n if action==\"Pass\":\n new_state.maxs_turn = not state.maxs_turn\n new_state.numTurns = state.numTurns + 1\n new_state.stringified = new_state.__str__()\n return new_state\n\n # parse the details of the action\n action = action.rstrip().rsplit(\": \")\n type = action[0]\n details = action[1].rsplit(\" --> \")\n start = details[0].rsplit(\" @ \")\n who = start[0]\n source = start[1]\n source = source[1:len(source)-1]\n source = source.rsplit(\",\")\n source = (int(source[0]), int(source[1]))\n if type==\"Attack\":\n end = details[1].rsplit(\" @ \")\n victim = end[0]\n target = end[1]\n target = target[1:len(target)-1]\n target = target.rsplit(\",\")\n target = (int(target[0]), int(target[1]))\n else:\n target = details[1]\n target = target[1:len(target)-1]\n target = target.rsplit(\",\")\n target = (int(target[0]), int(target[1])) \n \n \n if type==\"Attack\":\n if victim==\"Sith\" or victim==\"Rebel\":\n if who==\"Rebel\" and target[0]==1:\n new_state.gameState[source] = ' '\n new_state.gameState[target] = 'J'\n new_state.numJedi += 1\n new_state.numRebels -= 1\n else:\n new_state.gameState[source] = ' '\n new_state.gameState[target] = who[0]\n if victim==\"Rebel\": new_state.numRebels -= 1\n if victim==\"Sith\": new_state.numSith -= 1\n else:\n new_state.gameState[target] = 'S'\n new_state.numSith += 1\n new_state.numJedi -= 1\n else:\n if who==\"Rebel\" and target[0]==1:\n new_state.gameState[source] = ' '\n new_state.gameState[target] = 'J'\n new_state.numJedi += 1\n new_state.numRebels -= 1\n else:\n new_state.gameState[source] = ' '\n new_state.gameState[target] = who[0]\n \n \n new_state.maxs_turn = not state.maxs_turn\n new_state.numTurns = state.numTurns + 1\n self._cache_winner(new_state)\n new_state.stringified = new_state.__str__()\n \n return new_state", "def make_move(self, state, actions):\n\n # Here we are doing an exploration so don't update scores\n # for selected state\n if random.random() < self._exploration:\n action = random.choice(actions)\n self._last_state = self._get_state_for_action(state, action)\n return action\n\n # So we are not doing exploration, here we want to update it\n max_selection = None\n for a, s in self._states_from_actions(state, actions).items():\n state_value = self._state_values[s]\n if max_selection is None or state_value > self._state_values[max_selection[1]]:\n max_selection = (a, s)\n\n if self._last_state is not None:\n self._update_state_value(self._last_state, max_selection[1])\n\n self._last_state = max_selection[1]\n return max_selection[0]", "def _next_state(self, state, action):\n\n # Transition table to define movement for each action\n if self.action_type == 'VonNeumann':\n transitions = {0: [-1, 0], 1: [+1, 0], 2: [0, -1], 3: [0, +1]}\n elif self.action_type == 'Moore':\n transitions = {0: [-1, 0], 1: [+1, 0], 2: [0, -1], 3: [0, +1],\n 4: [-1, +1], 5: [+1, +1], 6: [-1, -1], 7: [+1, -1]}\n\n new_state = [state[0] + transitions[action][0], state[1] + transitions[action][1]]\n if self.maze[new_state[0]][new_state[1]] == 1: # Hit wall, stay there\n return state\n else: # Valid move for 0, 2, 3, 4\n return new_state", "def handle(self) -> State:\n # Check if both players are destroyed\n if self.player2.is_destroyed():\n # Mark the game as over\n self.over = True\n # adds 1 to the number of games played\n self.update_trackers()\n # Return the gameover state\n return self.get_gameover_state()\n\n # Otherwise return the current state\n return super().handle()", "def getGameState(self):\n return None", "def get_action(self, state):\n from graphics_utils import keys_waiting\n from graphics_utils import keys_pressed\n keys = keys_waiting() + keys_pressed()\n if keys != []:\n self.keys = keys\n\n legal = state.get_legal_actions(self.index)\n move = self.get_move(legal)\n\n if move == Directions.STOP:\n # Try to move in the same direction as before\n if self.last_move in legal:\n move = self.last_move\n\n if (self.STOP_KEY in self.keys) and Directions.STOP in legal:\n move = Directions.STOP\n\n if move not in legal:\n move = random.choice(legal)\n\n self.last_move = move\n return move", "def solveOneStep(self):\n ### Student code goes here\n # Mark this move as explored\n self.visited[self.currentState] = True\n\n # Get move to make\n movables = self.gm.getMovables()\n # print(\"EXPLORING GAME STATE \" + str(self.gm.getGameState()) + \"---------------------------------------------------------\")\n to_move = self.currentState.nextChildToVisit # movables index\n # print(\"depth \", self.currentState.depth)\n\n # Return if done\n if self.currentState.state == self.victoryCondition:\n # print(\"DONE\")\n return True\n\n while to_move < len(movables):\n # Make the move\n movable_statement = movables[to_move]\n # print(\"implementing move \", movable_statement)\n self.gm.makeMove(movable_statement)\n\n # Create a new state with this move made\n new_state = self.gm.getGameState()\n\n # Find out if this state has already been explored\n visited = False\n for visited_state in self.visited.keys():\n if visited_state.state == new_state:\n visited = True\n\n # If the new state hasn't been visited then add it as a child then move down to this child\n if not visited:\n new_gs = GameState(new_state, self.currentState.depth + 1, movable_statement)\n new_gs.parent = self.currentState\n self.currentState.children.append(new_gs)\n self.currentState.nextChildToVisit = to_move + 1\n self.currentState = new_gs\n break\n\n # Else skip this state and try going to the next movable statement\n else:\n # print(\"SKIP THIS STATE\")\n self.gm.reverseMove(movable_statement)\n to_move += 1\n\n # Went all the way down to a leaf, backtrack\n if (to_move >= len(movables)):\n self.gm.reverseMove(self.currentState.requiredMovable)\n self.currentState = self.currentState.parent\n\n return False", "def CurrentState(self):\n return self.currentState", "def getSuccessor(self, gameState, action):\n successor = gameState.generateSuccessor(self.index, action)\n pos = successor.getAgentState(self.index).getPosition()\n if pos != util.nearestPoint(pos):\n return successor.generateSuccessor(self.index, action)\n else:\n return successor", "def make_move(self, move):\n if type(move) == str:\n move = int(move)\n\n new_state = SubtractSquareState(not self.p1_turn,\n self.current_total - move)\n return new_state", "def getNextState(self, board, player, action):\n b = self._base_board.with_np_pieces(np_pieces=np.copy(board))\n b.add_stone(action, player)\n return b.np_pieces, -player", "def solveOneStep(self):\n ### Student code goes here\n if self.first_step == False:\n self.first_step = True\n if self.solveOneStep():\n return True\n if self.queue:\n self.gm_init()\n ele = self.queue.get()\n #print (len(ele))\n state = ele[0]\n premoves = ele[1]\n\n for m in premoves:\n self.gm.makeMove(m)\n if state.state == self.victoryCondition:\n return True\n self.visited[state] = True\n print(\"CURRENTSTATE:\")\n print(self.gm.getGameState())\n print(\"*******\")\n moves = self.gm.getMovables()\n for m in moves:\n self.gm.makeMove(m)\n if (((state.parent is not None) and (self.gm.getGameState() == state.parent.state))) or GameState(self.gm.getGameState(), 0, None) in self.visited:\n self.gm.reverseMove(m)\n continue\n self.visited[GameState(self.gm.getGameState(), 0, None)] = True\n new_pmv = [i for i in premoves]\n new_pmv.append(m)\n next_state = GameState(self.gm.getGameState(), state.depth+1, m)\n next_state.parent = state\n state.children.append(next_state)\n self.queue.put([next_state, new_pmv])\n self.gm.reverseMove(m)\n self.currentState = state\n\n #for i in range(len(premoves)-1, -1, -1):\n # mv = premoves[i]\n # self.gm.reverseMove(mv)\n return False", "def getMove(self, board):\r\n moves = self._getAvailableActions(board)\r\n return moves[-1]", "def move(self):\r\n his_move = random.randint(0, 2)\r\n return the_moves[his_move]", "def best_move(self, state, curr_player):\n\t\t# determine opponent's color\n\t\tif curr_player == self.colors[0]:\n\t\t\topp_player = self.colors[1]\n\t\telse:\n\t\t\topp_player = self.colors[0]\n\n\t\treturn self.value(state, curr_player)", "def make_move(self, state):\r\n # intially set drop phase to true\r\n drop_phase = True\r\n move = [] # list to make moves with to return\r\n succ = self.succ(state) # get the successor of this state\r\n # intial postion of board to set up most advantagous spot if its empty\r\n if sum(x.count(self.my_piece) for x in self.board) == 0 and self.board[2][2] == ' ':\r\n move.insert(0, (2, 2))\r\n return move\r\n \r\n # check the number of 'r' and 'b' on board if theres 4 of each drop phase is false\r\n if sum(x.count('r') for x in self.board) == 4 and sum(x.count('b') for x in self.board) == 4:\r\n drop_phase = False\r\n\r\n # if not during drop phase use minimax to make next move from one postion to next\r\n if not drop_phase:\r\n move = []\r\n d = self.Max_value(state, 0)\r\n val = d['val']\r\n m = d['move']\r\n p = d['pos']\r\n f = d['from']\r\n s = sorted(succ, key=lambda e: e['f'])\r\n moveto = s[-1]\r\n move.insert(1, (moveto['from'][0], moveto['from'][1]))\r\n move.insert(0, (moveto['pos'][0], moveto['pos'][1]))\r\n return move # return the from, to move\r\n\r\n else: #else use minimax and to make move during drop phase selecting spot to place AI piece\r\n d = self.Max_value(state, 0)\r\n val = d['val']\r\n m = d['move']\r\n p = d['pos']\r\n hold = []\r\n move = []\r\n n = None\r\n hold = []\r\n for s in succ:\r\n p = s['pos'][0]\r\n p1 = s['pos'][1]\r\n if s['f'] == val and state[p][p1] == ' ':\r\n hold.append(s)\r\n if len(hold) == 1:\r\n row = hold[0]['pos'][0]\r\n col = hold[0]['pos'][1]\r\n else:\r\n f = sorted(hold, key=lambda e: e['pos'])\r\n row = f[0]['pos'][0]\r\n col = f[0]['pos'][1]\r\n\r\n move.insert(0, (row, col)) # return the move \r\n return move", "def get_move(state):\n entry = game_states[get_values(state)]\n options = list()\n\n for move in entry:\n move_result = entry[move]\n if move_result == 'Y':\n return move\n elif move_result == 'N':\n continue\n options.extend([move]*move_result)\n return choice(options)", "def get_state(self):\n return self.get_pose()", "def select_move(self, game_state):\n raise NotImplementedError()" ]
[ "0.7312155", "0.72799444", "0.7227082", "0.71650064", "0.7120976", "0.6810444", "0.67236984", "0.66773134", "0.66662157", "0.66595423", "0.6642064", "0.6626734", "0.65865046", "0.65865046", "0.65685004", "0.65537447", "0.6546836", "0.65373135", "0.653719", "0.6526493", "0.6526493", "0.6526493", "0.65084136", "0.6499627", "0.6495153", "0.64894927", "0.6486272", "0.6478393", "0.6473932", "0.6430725", "0.64262223", "0.6425947", "0.6425947", "0.6409724", "0.6405276", "0.6328083", "0.63209176", "0.62699145", "0.62674046", "0.62580824", "0.6252293", "0.6249869", "0.62445897", "0.6243454", "0.62388736", "0.6232772", "0.6224245", "0.6223757", "0.621695", "0.6209807", "0.6202501", "0.6194529", "0.61922973", "0.61880594", "0.61837614", "0.61831063", "0.6178967", "0.61653817", "0.6163719", "0.61453205", "0.61383736", "0.6129996", "0.61154974", "0.6107663", "0.6104191", "0.60904694", "0.6085625", "0.6077006", "0.6056727", "0.6056135", "0.60484785", "0.60436296", "0.60434663", "0.6041772", "0.60377437", "0.6034026", "0.60330933", "0.6029057", "0.60222256", "0.60209274", "0.60171133", "0.6006812", "0.6001732", "0.60008496", "0.60006034", "0.59995353", "0.5993931", "0.59898967", "0.59879464", "0.598659", "0.59837925", "0.5967105", "0.59618336", "0.5948279", "0.5938917", "0.59349996", "0.59284735", "0.5928356", "0.5927691", "0.5927622", "0.5926241" ]
0.0
-1
Return whether move is a valid move for this GameState.
def is_valid_move(self, move: Any) -> bool: return move in self.get_possible_moves()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_valid_move(self, move):\n if self.game_state[move[0]][move[1]] is not None:\n return False\n return True", "def valid_bool(self):\n return bool(self.piece.validate_move(self.board, self))", "def is_valid_move(self, move):\n if type(move) == str:\n move = int(move)\n\n return move in self.get_possible_moves()", "def valid_move(self, player, move):\n return (True)", "def check_move(self, move):\n\n if str(move) in self.moves_made:\n return False\n return True", "def is_valid_move(state, move):\n row, col = move\n if row not in [1, 2, 3] or col not in [1, 2, 3]:\n print(\"Invalid move! Specify correct game square!\")\n return False\n if state[row-1][col-1] != '_':\n print('Invalid move! Place your marker on a free square!')\n return False\n return True", "def isValidMove(self, move: Move) -> bool:\n # TODO: How do we determine the move type?\n # Some form of duck typing?\n minigame_move_classes = {\n \"BuyPrivateCompany\": \"BuyPrivateCompanyMove\",\n \"BiddingForPrivateCompany\": \"BuyPrivateCompanyMove\",\n }\n return minigame_move_classes.get(self.minigame_class) == move.__class__.__name__", "def is_move_valid(self, direction, reference_board=None):\n # Verify a left move does not take you off the board.\n if (direction == \"l\"):\n if (self._current_loc.get_column() == 0):\n return False\n # Verify an up move does not take you off the board.\n elif (direction == \"u\"):\n # Verify the move does not take you off the board.\n if (self._current_loc.get_row() == 0):\n return False\n # Verify a right move does not take you off the board.\n elif (direction == \"r\"):\n current_row = self._current_loc.get_row()\n max_column_number = len(self._untraversed_board[current_row])\n if self._current_loc.get_column() + 1 == max_column_number:\n return False\n # Verify a down move does not take you off the board.\n elif (direction == \"d\"):\n if self._current_loc.get_row() + 1 == len(self._untraversed_board):\n return False\n else:\n assert False, \"Invalid move direction.\"\n\n # Get the new location for a move in the specified direction.\n new_location = self._calculate_move_location(direction)\n new_row = new_location.get_row()\n new_col = new_location.get_column()\n # Verify the space is available\n if(reference_board is None):\n return BoardPath._untraversed_board[new_row][new_col] != \"#\"\n else:\n return reference_board[new_row][new_col] != \"#\"", "def legal_move(self, move, state = None):\n if state is None:\n state = copy(self.state)\n else:\n state = copy(state)\n return state[move // state.shape[0], move % state.shape[0]] == 0", "def valid_move(self, player, move):\n if self.rounds < len(self.players):\n if ((False in [(self.board).in_bounds(pt) for pt in move])\n or (self.board).overlap(move)\n or not (True in [(pt in player.corners) for pt in move])):\n return (False)\n else:\n return (True)\n\n elif ((False in [(self.board).in_bounds(pt) for pt in move])\n or (self.board).overlap(move)\n or (self.board).adj(player, move)\n or not (self.board).corner(player, move)):\n return (False)\n\n else:\n return (True)", "def move_valid(move):\n return True", "def move_check(self):\r\n \r\n if not self.run:\r\n return False\r\n \r\n if self.get_num_legal_moves() == 0:\r\n SlTrace.lg(\"NO more legal moves!\", \"nolegalmoves\")\r\n ###return False \r\n \r\n if self.new_move:\r\n self.announce_player(\"start_move\")\r\n if SlTrace.trace(\"selected\"):\r\n self.list_selected(\"After start_move\")\r\n self.new_move = False\r\n player = self.get_player()\r\n if player is None:\r\n return False\r\n \r\n return True", "def is_valid_move(self, position, dest_square):\n if self.symbol.isupper() and position.turn != 'w':\n return False\n elif self.symbol.islower() and position.turn != 'b':\n return False\n elif dest_square not in self.calculate_scope(position):\n return False\n else:\n return True", "def _isvalidmove(self, from_, to_):\n if self.board[from_].occupant is None:\n print(\"Moving from empty square\")\n return False\n piece = self.board[from_].occupant\n\n if piece.color != self.to_move:\n print(\"Wrong color\")\n return False\n\n if self.is_checked:\n if piece.notation != 'K':\n print(\"King is checked!\")\n return False\n\n diff = (\n to_cartesian(to_)[0] - to_cartesian(from_)[0],\n to_cartesian(to_)[1] - to_cartesian(from_)[1]\n )\n if not piece.hopping:\n if self.board.isblocked(from_, to_):\n print(\"Move blocked by other pieces\")\n return False\n\n if self.board[to_].occupant is not None:\n if piece.color == self.board[to_].occupant.color:\n print(\"Cannot capture friendly\")\n return False\n\n if diff not in piece.get_captures():\n print(\"Invalid piece capture\")\n return False\n\n if diff not in piece.get_moves():\n print(\"Invalid piece move\")\n return False\n\n return True", "def check_move(board, move):\n\n player, spike_index, fields_to_move = Judge._validate_move(move)\n\n # 1. moving out of the bar\n # 2. check if the source is of the valid player\n # 3. check if the destination is valid\n\n board.set_player_perspective(player)\n\n # 1.\n if spike_index == OUT_OF_BAR_SPECIAL_MOVE:\n if board.bar[player] < 1:\n return False\n\n if not board.valid_dest(fields_to_move - 1):\n return False\n\n return True\n\n # 2.\n if not board.valid_source(spike_index):\n return False\n # 3.\n dest_spike_index = spike_index + fields_to_move\n\n if dest_spike_index >= len(INITIAL_SPIKES_STATE):\n return board.all_at_home()\n \n return board.valid_dest(dest_spike_index)", "def is_valid_move(self, position: Point) -> bool:\n\t\tif self.tiles[position.x][position.y] == 0:\n\t\t\treturn True\n\t\treturn False", "def is_valid(self, move):\r\n return move > 10 and move < 89", "def move_is_valid(self, pos):\n\n if (not isinstance(pos, tuple) or len(pos) != 2 or \n not isinstance(pos[0], int) or not isinstance(pos[1], int)):\n return False\n y, x = pos\n if (y >= 0 and y < self.size and x >= 0 and x < self.size and \n self.board[pos] == HexBoard.EMPTY):\n return True\n else:\n return False", "def is_valid(move):\n return isinstance(move, int) and move in Othello.squares()", "def has_valid_move(self, cur_square, board):\n coords = cur_square.coords\n neighbor_list = [tuple(map(sum, zip(coords, offset))) for offset in self._offsets]\n return self.has_valid_move_in_list(coords, neighbor_list, board)", "def checkValidMove(self, move):\n boardCopy = copy.deepcopy(self)\n tilesChange = False\n if move == Move.UP:\n boardCopy.moveUp()\n elif move == Move.DOWN:\n boardCopy.moveDown()\n elif move == Move.LEFT:\n boardCopy.moveLeft()\n elif move == Move.RIGHT:\n boardCopy.moveRight()\n else:\n raise ValueError('Invalid Move was input')\n \n for i in range(4):\n for j in range(4):\n if boardCopy.getTile(i,j) != self.getTile(i,j):\n tilesChange = True\n del(boardCopy)\n return tilesChange", "def is_move_valid(self, from_row, from_col, to_row, to_col):\n # check is taking own piece?\n if self._is_taking_own_piece(from_row, from_col, to_row, to_col):\n return False\n\n piece = self.board.squares[from_row][from_col]\n if piece == ChessPiece.W_ROOK or piece == ChessPiece.B_ROOK:\n return self.is_rook_move_valid(from_row, from_col,\n to_row, to_col)\n if piece == ChessPiece.W_KNIGHT or piece == ChessPiece.B_KNIGHT:\n return self.is_knight_move_valid(from_row, from_col,\n to_row, to_col)\n if piece == ChessPiece.W_BISHOP or piece == ChessPiece.B_BISHOP:\n return self.is_bishop_move_valid(from_row, from_col,\n to_row, to_col)\n if piece == ChessPiece.W_QUEEN or piece == ChessPiece.B_QUEEN:\n return self.is_queen_move_valid(from_row, from_col,\n to_row, to_col)\n if piece == ChessPiece.W_KING or piece == ChessPiece.B_KING:\n return self.is_king_move_valid(from_row, from_col,\n to_row, to_col)\n if piece == ChessPiece.W_PAWN or piece == ChessPiece.B_PAWN:\n return self.is_pawn_move_valid(from_row, from_col,\n to_row, to_col)", "def validate_move(move):\n if move[0] in cc.VALID_RANKS and move[1] in cc.VALID_RANKS:\n valid = True\n else:\n valid = False\n return valid", "def can_move(self):\n return self.movement", "def is_valid_move(self, board, fieldy, fieldx):\n if isinstance(board[fieldy][fieldx], Piece):\n return False\n if self.posy - fieldy == self.direction and abs(self.posx - fieldx) == 1:\n return True\n else:\n return False", "def is_valid_move(self, somerow, somecol):\n bool_1 = self.board[somerow][somecol] != 1\n bool_2 = self.num_queens_placed < self.size \n bool_3 = self.attack(somerow, somecol)\n return bool_1 and bool_2 and bool_3", "def check_valid_move(grid: np.ndarray, current_position: tuple, move: tuple) -> bool:\n # getting coordinates for moved position\n moved_position = tuple(np.add(current_position, move))\n\n def compare_coordinates(a: tuple, b: tuple) -> bool:\n \"\"\"\n Helper function to compare coordinates\n Checks if a is smaller than b\n \"\"\"\n return all(np.array(a) < np.array(b))\n\n # checking if coordinates are inside the array (between (0,0) and (N,N))\n if compare_coordinates((0, 0), moved_position) and compare_coordinates(moved_position, grid.shape):\n # checking if the coordinates are not on the obstacle\n if grid[moved_position] == 'x':\n return False\n else:\n return True\n else:\n return False", "def is_moving(self) -> bool:\n return self.orders and self.orders[0].ability.id is AbilityId.MOVE", "def validate_move(move, player_board):\n select_row = move.select_row\n select_col = move.select_col\n \n player_board_rows = player_board.shape[0]\n player_board_cols = player_board.shape[1]\n \n if select_row >= player_board_rows or select_row < 0 or \\\n select_col >= player_board_cols or select_col < 0 or \\\n player_board[select_row][select_col] != -1:\n return False\n \n return True", "def is_move_valid(move: Move, board: Board, whites_turn: bool) -> bool:\n if out_of_bounds(move[0]) == True or out_of_bounds(move[1]) == True:\n return False\n \n if move[0] == move[1]:\n return False\n\n if is_current_players_piece(piece_at_position(move[0], board), False) and whites_turn == True:\n return False\n elif is_current_players_piece(piece_at_position(move[0], board), True) and whites_turn == False:\n return False\n\n\n if piece_at_position(move[1], board) in WHITE_PIECES and whites_turn == True:\n return False\n elif piece_at_position(move[1], board) in BLACK_PIECES and whites_turn == False:\n return False\n\n\n if move[1] not in get_possible_moves(move[0], board):\n return False\n\n\n test_board = board\n test_board = update_board(test_board, move)\n if is_in_check(test_board, True) and whites_turn == True:\n return False\n elif is_in_check(test_board, False) and whites_turn == False:\n return False\n\n return True", "def is_legal_move(self, current_player, move):\n\t\tstarting_pos = move[0]\n\t\tending_pos = move[1]\n\t\tif ending_pos[0] not in range(self.board_size) or ending_pos[1] not in range(self.board_size):\t# Discard any generated moves that fall off of the board\n\t\t\treturn False \n\t\tif self.board.repr[starting_pos[0]][starting_pos[1]]!=self.player_symbol[current_player]:\n\t\t\tprint \"this should never trigger and is redundant\"\n\t\t\treturn False\n\t\tif self.board.repr[ending_pos[0]][ending_pos[1]]!= '.':\t# Check that landing spot is empty\n\t\t\treturn False\n\t\tmiddle_pos = (starting_pos[0]-(starting_pos[0]-ending_pos[0])/2,starting_pos[1]-(starting_pos[1]-ending_pos[1])/2)\t# Check the middle spot is the other piece - this should in theory not matter because the pieces alternate\n\t\tother_player = 1 - current_player \n\t\tif self.board.repr[middle_pos[0]][middle_pos[1]] != self.player_symbol[other_player]:\n\t\t\treturn False \n\t\treturn True", "def isValidPlayer(self, move: Move) -> bool:\n errors = err(\n move.player_id == self.current_player.id,\n \"Wrong player; {} is not {}\",\n move.player_id, self.current_player.id\n )\n if errors == None:\n return True\n self.errors_list = [errors]\n return False", "def LegalMove(self, pos):\n\n return (0 <= pos <= BOARD_SIZE) and (self.state[pos] == EMPTY)", "def _is_valid_move(self, vector, current_piece, other_piece):\n # If direction is forward and the space is non-empty, break\n if vector[0] == 0 and other_piece != \"empty\":\n return False\n # If direction is diagonal and space is empty, break\n if vector[0] != 0 and other_piece == \"empty\":\n return False\n # If moving by 2 spaces, check if in starting row\n if vector[1] == 2 and current_piece.position[1] != 1:\n return False\n if vector[1] == -2 and current_piece.position[1] != 6:\n return False\n\n return True", "def is_knight_move_valid(self, from_row, from_col, to_row, to_col):\n # check for valid move\n if ((abs(from_row - to_row) == 1 and abs(from_col - to_col) == 2) or\n (abs(from_row - to_row) == 2 and abs(from_col - to_col) == 1)):\n return True\n return False", "def valid_move(x, y):\r\n if [x, y] in empty_cells(board):\r\n return True\r\n else:\r\n return False", "def CheckMove(self,move):\n\t\tif(move=='w'):\n\t\t\tif(self.x==0):\n\t\t\t\treturn 0\n\t\t\treturn 1\n\t\telif(move=='s'):\n\t\t\tif(self.x==15):\n\t\t\t\treturn 0\n\t\t\treturn 1\n\t\telif(move=='d'):\n\t\t\tif(self.y==35):\n\t\t\t\treturn 0\n\t\t\treturn 1\n\t\telif(move=='a'):\n\t\t\tif(self.y==0):\n\t\t\t\treturn 0\n\t\t\treturn 1", "def is_valid_move(x:int, y:int,board_length) -> bool:\n if x < 0 or y < 0 or x == board_length or y == board_length:\n return False\n return True", "def validmove(self, boxId):\n return ((self.gameState[boxId] == 0) and (0 <= boxId <= 8))", "def isValid(self, game):\n if self.unitid == None or self.team == None or self.direction == None:\n return False\n\n unit = game.getUnit(self.team, self.unitid)\n\n # Validate it can act\n if not unit.canAct():\n return False\n \n # Check map bounds of destination spot\n newPos = unit.pos.translate(self.direction, 1)\n if newPos.y < 0 or newPos.y >= game.map.height:\n return False\n if newPos.x < 0 or newPos.x >= game.map.height:\n return False\n \n # Note: Collisions are handled in the turn loop as both players move\n return True", "def _valid_move_exists(self):\n lst = []\n for i_row in range(self._num_rows):\n for i_col in range(self._num_cols):\n if self._valid_placement(i_row, i_col)[0]:\n lst.append((i_row, i_col))\n\n return lst != [] #If lst != [], then the list has elements -> valid move(s) exist", "def is_moving(self):\n return self.steps < self.max_steps", "def _is_valid_move(self, vector, current_piece, other_piece):\n return True", "def available_moves(self) -> bool:\n has_move = False\n for i in range(self.col):\n if self.valid_column(i):\n has_move = True\n return has_move", "def validate_move(board: list, character: dict, direction: str) -> bool:\n if direction.strip().upper() == \"N\":\n return (character[\"Position\"][0] - 1, character[\"Position\"][1]) in board\n elif direction.strip().upper() == \"S\":\n return (character[\"Position\"][0] + 1, character[\"Position\"][1]) in board\n elif direction.strip().upper() == \"W\":\n return (character[\"Position\"][0], character[\"Position\"][1] - 1) in board\n elif direction.strip().upper() == \"E\":\n return (character[\"Position\"][0], character[\"Position\"][1] + 1) in board\n else:\n print(\"Please enter only directions shown above\")\n return False", "def canMove(self):\n\n if self.index == len(self.path):\n self.move = False\n return self.move", "def validBoard():\r\n\r\n\tglobal move1, move2\r\n\r\n\tif move1==move2 or move1-move2==1:\r\n\t\treturn True\r\n\telse:\r\n\t\treturn False", "def check_move_states(self, player, depth):\n\n if depth >= self.look_ahead:\n return True\n\n for move in gen_moves(player, self.__state.board, self.checker):\n self.__state.push(move)\n winner = self.checker.check_game_over(self.__pid, self.__opponent)\n if winner == self.__opponent:\n return False\n worker = move['xy2']\n if not self.check_build_states(player, worker, depth):\n return False\n self.__state.pop()\n return True", "def valid_move(self, row, col):\n if not self._game_over:\n i_row, i_col = row-1, col-1\n #i_row and i_col wil be used to index the board (hence the i)\n (valid, flip_lst) = self._valid_placement(i_row, i_col)\n #print(\"FOR TESTING. Tiles Flipped: \", flip_lst)\n \n if valid:\n #Big Change: You decided to make determining validity\n # and flipping separate operations\n self._flip(i_row, i_col, flip_lst)\n else:\n print(\"\\nPlease enter a valid move!\")\n return False\n\n if self._board_is_full():\n self._game_over = True\n self._set_winner() \n \n self._switch_turn(self._turn)\n if not self._valid_move_exists(): #Check if the other player has any valid moves\n print(\"\\nNo valid moves exist for {0}. {0}'s turn has been skipped\".format(self._turn))\n self._switch_turn(self._turn) #Switch turn back to player before skip was determined\n if not self._valid_move_exists(): #Check if the other player has any valid moves\n print(\"No valid moves exist for {0}. {0}'s turn has been skipped\".format(self._turn))\n print(\"No moves exist for either player. GAME OVER\")\n self._game_over = True\n self._set_winner()\n return False\n\n return True\n elif self._game_over:\n print(\"The game is over. No more moves can be made!\")\n #TODO: Replace this^ with an exception later?\n return False", "def is_legal(self, move, player, board):\r\n if(self.is_valid(move)==False):\r\n return False\r\n if(board[move]!=core.EMPTY):\r\n return False\r\n return True", "def validate_move(self, move_from, move_to, board):\n\n pass", "def validate_move(self, move_from, move_to, board):\n\n from_coordinates = JanggiGame.translate_to_grid(move_from)\n to_coordinates = JanggiGame.translate_to_grid(move_to)\n from_col = from_coordinates[0]\n from_row = from_coordinates[1]\n to_col = to_coordinates[0]\n to_row = to_coordinates[1]\n\n # if destination within the board\n if (to_col in range(9) and to_row in range(10) and\n # and the move is 1 up/down/left/right (with no other piece here) and then 1 farther out diagonally\n ((to_row - from_row == -2 and abs(to_col - from_col) == 1 and board[from_col][from_row - 1] == '') or\n (to_row - from_row == 2 and abs(to_col - from_col) == 1 and board[from_col][from_row + 1] == '') or\n (to_col - from_col == -2 and abs(to_row - from_row) == 1 and board[from_col - 1][from_row] == '') or\n (to_col - from_col == 2 and abs(to_row - from_row) == 1 and board[from_col + 1][from_row] == '')\n )\n ):\n return True\n else:\n return False", "def isLegalMove(self, column, state):\n \n for i in range(6):\n if state[i][column] == ' ':\n # once we find the first empty, we know it's a legal move\n return True\n \n # if we get here, the column is full\n return False", "def is_moving(self):\n response = self.__send_and_receive(protocol.GET_IS_MOVE)\n value = self.__gen_response_value(response)\n if value:\n # printf(\"\".join(value[1:]))\n if \"\".join(value)[1:] == \"1\":\n return True\n else:\n return False\n else:\n return False", "def has_moved(self):\n return self.move_count > 0", "def checkMove(move: Card, game) -> bool:\n lastMove = game.lastMove\n\n if move.number == lastMove.number:\n return True\n\n elif move.color == lastMove.color: \n return True\n\n elif move.wild: \n return True\n\n return False", "def valid_move(self, stone_color, index):\n if self.get(index) is not None:\n print(\"Invalid move - Space it occupied\")\n return False\n elif self.is_suicide(stone_color, index):\n print(\"Invalid move - Suicide\")\n return False\n else:\n return True", "def can_move(self, relative_location: RelativeCoord) -> bool:\n\n return self.moves.can_move(relative_location)", "def can_move(self):\r\n for wall in self.app.walls:\r\n if vec(self.grid_pos+self.direction) == wall:\r\n return False\r\n return True", "def is_moving(self):\n is_moving = self.get_raw_status() & self.STATUS_MOVING\n return bool(is_moving)", "def test_check_move_with_invalid(self):\n board = [\n [\" \"] * 6,\n [\" \"] * 6,\n [\" \"] * 6,\n [\" \"] * 6,\n [\"\\u25cb\"] * 6,\n [\" \"] * 6,\n [\" \"] * 6\n ]\n valid = self.game.check_move(board, 4)\n self.assertFalse(valid)", "def validate_move(self, move_from, move_to, board):\n\n from_coordinates = JanggiGame.translate_to_grid(move_from)\n to_coordinates = JanggiGame.translate_to_grid(move_to)\n from_col = from_coordinates[0]\n from_row = from_coordinates[1]\n to_col = to_coordinates[0]\n to_row = to_coordinates[1]\n\n # for red soldiers (who can only move downward or to the side)\n if self.get_color() == 'red':\n # if destination within the board and the move is strictly one downward or to the side\n if (to_col in range(9) and to_row in range(10) and\n ((abs(to_col - from_col) == 1 and to_row == from_row) or (to_col == from_col and to_row - from_row == 1))):\n return True\n # if moving diagonally within the blue palace\n if from_coordinates in [[3,7],[5,7]] and to_coordinates == [4,8]:\n return True\n if from_coordinates == [4,8] and to_coordinates in [[3,9],[5,9]]:\n return True\n\n return False\n\n # for blue soldiers (who can only move upward or to the side)\n if self.get_color() == 'blue':\n # if destination within the board and the move is strictly one upward or to the side\n if (to_col in range(9) and to_row in range(10) and\n ((abs(to_col - from_col) == 1 and to_row == from_row) or (to_col == from_col and to_row - from_row == -1))):\n return True\n # if moving diagonally within the red palace\n if from_coordinates in [[3, 2], [5, 2]] and to_coordinates == [4, 1]:\n return True\n if from_coordinates == [4, 1] and to_coordinates in [[3, 0], [5, 0]]:\n return True\n\n return False\n\n return False", "def legalMove(self,p,intMove):\n mPos = self.movePos(p,intMove)#board position of move\n if(self.inBounds(mPos)!=True):#Can't make move out of board bounds\n return False\n #if(p.color != self.whoseTurn):#Can't make move if it's not players pawn\n # return False\n if(intMove==0):#to move forward the node must be empty\n return (self.gameState[mPos.get()] == EMPTY)\n else:#to attack the node must have an enemy\n return (self.gameState[mPos.get()] == self.togglePlayer(p.color))", "def validate_move(self, move_from, move_to, board):\n\n from_coordinates = JanggiGame.translate_to_grid(move_from)\n to_coordinates = JanggiGame.translate_to_grid(move_to)\n from_col = from_coordinates[0]\n from_row = from_coordinates[1]\n to_col = to_coordinates[0]\n to_row = to_coordinates[1]\n\n # a cannon cannot capture another cannon\n if type(board[to_col][to_row]) == Cannon:\n return False\n\n # if destination within the board and the move is strictly horizontal or vertical\n if to_col in range(9) and to_row in range(10) and (to_col == from_col or to_row == from_row):\n # if move is to the left\n if to_col < from_col:\n # make sure there is exactly one intervening piece that's not a cannon\n piece_count = 0\n for col in range(to_col + 1, from_col):\n if type(board[col][to_row]) == Cannon:\n return False\n if issubclass(type(board[col][to_row]), Piece):\n piece_count += 1\n if piece_count == 1:\n return True\n # if move is to the right\n if to_col > from_col:\n # make sure there is exactly one intervening piece that's not a cannon\n piece_count = 0\n for col in range(from_col + 1, to_col):\n if type(board[col][to_row]) == Cannon:\n return False\n if issubclass(type(board[col][to_row]), Piece):\n piece_count += 1\n if piece_count == 1:\n return True\n # if move is upward\n if to_row < from_row:\n # make sure there is exactly one intervening piece that's not a cannon\n piece_count = 0\n for row in range(to_row + 1, from_row):\n if type(board[to_col][row]) == Cannon:\n return False\n if issubclass(type(board[to_col][row]), Piece):\n piece_count += 1\n if piece_count == 1:\n return True\n # if move is downward\n if to_row > from_row:\n # make sure there is exactly one intervening piece that's not a cannon\n piece_count = 0\n for row in range(from_row + 1, to_row):\n if type(board[to_col][row]) == Cannon:\n return False\n if issubclass(type(board[to_col][row]), Piece):\n piece_count += 1\n if piece_count == 1:\n return True\n return False\n\n # for moving diagonally in the red palace\n if (from_coordinates in [[3,0],[3,2],[5,0],[5,2]] and to_coordinates in [[3,0],[3,2],[5,0],[5,2]] and\n type(board[4][1]) != Cannon and issubclass(type(board[4][1]), Piece)):\n return True\n\n # for moving diagonally in the blue palace\n if (from_coordinates in [[3,7],[3,9],[5,7],[5,9]] and to_coordinates in [[3,7],[3,9],[5,7],[5,9]] and\n type(board[4][8]) != Cannon and issubclass(type(board[4][8]), Piece)):\n return True\n\n return False", "def valid_move(board, row, col):\n return board[row][col] == '-'", "def check_move(self, x, y):\n try:\n return self.map[self.y+y][self.x+x] == \" \" or [self.x+x, self.y+y] == self.end_pos\n except IndexError:\n return False", "def can_move(self,direction):\r\n if direction in self.current_room.return_directions():\r\n print('move into the next room')\r\n # makes next room \r\n self.next_room(direction)\r\n return True\r\n else:\r\n print(\"Can't move that way\")\r\n return False", "def _ismoving(self):\n return self.dp.state()==PyTango.DevState.MOVING", "def verify_legal_move(self, direction):\n for b_x, b_y in self.get_block_positions(self.active_piece.FIGURE):\n\n if direction == \"LEFT\":\n b_x -= 1\n elif direction == \"RIGHT\":\n b_x += 1\n elif direction == \"DOWN\":\n b_y += 1\n else:\n raise ValueError\n\n if b_x < 0 or b_x >= self.WIDTH:\n return False\n\n if b_y < 0 or b_y >= self.HEIGHT:\n return False\n\n if self.board[b_y][b_x] != 0:\n return False\n return True", "def is_game_over(self):\n if (self.check_win(HexBoard.RED) or self.check_win(HexBoard.BLUE) or \n len(self.get_move_list())==0):\n self.game_over = True\n return self.game_over", "def validate_move(board: list, character: list, direction: str) -> bool:\n max_x_y_coordinates = board[-1]\n valid_options = []\n if character[1] < max_x_y_coordinates[0]:\n valid_options.append(\"d\")\n if character[1] > 0:\n valid_options.append(\"a\")\n if character[0] < max_x_y_coordinates[1]:\n valid_options.append(\"s\")\n if character[0] > 0:\n valid_options.append(\"w\")\n if direction in valid_options:\n return True\n else:\n return False", "def is_win(state: StonehengeState) -> bool:\n moves = []\n for move in state.get_possible_moves():\n new_state = deepcopy(state.make_move(move))\n moves.append(new_state.finished())\n return any(moves)", "def validate_move(self, move_from, move_to, board):\n\n from_coordinates = JanggiGame.translate_to_grid(move_from)\n to_coordinates = JanggiGame.translate_to_grid(move_to)\n from_col = from_coordinates[0]\n from_row = from_coordinates[1]\n to_col = to_coordinates[0]\n to_row = to_coordinates[1]\n\n if self._color == 'red':\n # if destination within the palace:\n if (to_col in range(3,6) and to_row in range(3) and\n # and the move is 1 horizontal or 1 vertical:\n (((abs(to_col-from_col) == 1 and to_row-from_row == 0) or\n (to_col-from_col == 0 and abs(to_row-from_row) == 1)) or\n # or the move is one diagonal:\n ((from_coordinates == [4,1] and to_coordinates in [[3,0],[3,2],[5,0],[5,2]]) or\n (from_coordinates in [[3,0],[3,2],[5,0],[5,2]] and to_coordinates == [4,1]))\n )\n ):\n return True\n else:\n return False\n\n if self._color == 'blue':\n # if destination within the palace:\n if (to_col in range(3,6) and to_row in range(7,10) and\n # and the move is 1 horizontal or 1 vertical:\n (((abs(to_col-from_col) == 1 and to_row-from_row == 0) or\n (to_col-from_col == 0 and abs(to_row-from_row) == 1)) or\n # or the move is one diagonal:\n ((from_coordinates == [4,8] and to_coordinates in [[3,7],[3,9],[5,7],[5,9]]) or\n (from_coordinates in [[3,7],[3,9],[5,7],[5,9]] and to_coordinates == [4,8]))\n )\n ):\n return True\n else:\n return False", "def validate_move(self, move_from, move_to, board):\n\n from_coordinates = JanggiGame.translate_to_grid(move_from)\n to_coordinates = JanggiGame.translate_to_grid(move_to)\n from_col = from_coordinates[0]\n from_row = from_coordinates[1]\n to_col = to_coordinates[0]\n to_row = to_coordinates[1]\n\n # if destination within the board and the move is strictly horizontal or vertical\n if to_col in range(9) and to_row in range(10) and (to_col == from_col or to_row == from_row):\n # if move is to the left\n if to_col < from_col:\n # make sure no other piece lies between to and from\n for col in range(to_col + 1, from_col):\n if board[col][to_row] != '':\n return False\n return True\n # if move is to the right\n if to_col > from_col:\n # make sure no other piece lies between to and from\n for col in range(from_col + 1, to_col):\n if board[col][to_row] != '':\n return False\n return True\n # if move is upward\n if to_row < from_row:\n # make sure no other piece lies between to and from\n for row in range(to_row + 1, from_row):\n if board[to_col][row] != '':\n return False\n return True\n # if move is downward\n if to_row > from_row:\n # make sure no other piece lies between to and from\n for row in range(from_row + 1, to_row):\n if board[to_col][row] != '':\n return False\n return True\n\n return False\n\n # if moving along the diagonals in the red palace\n if from_coordinates in [[3,0],[3,2],[5,0],[5,2]] and to_coordinates in [[3,0],[3,2],[5,0],[5,2]] and board[4][1] == '':\n return True\n if from_coordinates in [[3,0],[3,2],[5,0],[5,2]] and to_coordinates == [4,1]:\n return True\n if from_coordinates == [4,1] and to_coordinates in [[3,0],[3,2],[5,0],[5,2]]:\n return True\n\n # if moving along the diagonals in the blue palace\n if from_coordinates in [[3,7],[3,9],[5,7],[5,9]] and to_coordinates in [[3,7],[3,9],[5,7],[5,9]] and board[4][8] == '':\n return True\n if from_coordinates in [[3,7],[3,9],[5,7],[5,9]] and to_coordinates == [4,8]:\n return True\n if from_coordinates == [4,8] and to_coordinates in [[3,7],[3,9],[5,7],[5,9]]:\n return True\n\n return False", "def check_move(self, y, x):\n return 0 <= y < len(self.maze) \\\n and 0 <= x < len(self.maze[y]) \\\n and self.maze[y][x] != \"#\"", "def time_to_move(self):\r\n if int(self.pix_pos.x+TOP_BOTTOM_BUFFER//2) % self.app.cell_width == 0:\r\n if self.direction == vec(1, 0) or self.direction == vec(-1, 0) or self.direction == vec(0, 0):\r\n return True\r\n # for the x-direction\r\n\r\n if int(self.pix_pos.y+TOP_BOTTOM_BUFFER//2) % self.app.cell_height == 0:\r\n if self.direction == vec(0, 1) or self.direction == vec(0, -1) or self.direction == vec(0, 0):\r\n return True\r\n # for the y-direction\r\n\r\n # checks to see if the player is still within the bounds\r", "def validMove(move):\r\n\r\n\tglobal tile1, tile2, tile3, tile4, tile5, tile6, tile7, tile8, tile9\r\n\r\n\ta=eval(\"tile\"+str(move)+\"==0\")\r\n\treturn a", "def validate_move(self, move_from, move_to, board):\n\n from_coordinates = JanggiGame.translate_to_grid(move_from)\n to_coordinates = JanggiGame.translate_to_grid(move_to)\n from_col = from_coordinates[0]\n from_row = from_coordinates[1]\n to_col = to_coordinates[0]\n to_row = to_coordinates[1]\n\n if self._color == 'red':\n # if destination within the palace:\n if (to_col in range(3, 6) and to_row in range(3) and\n # and the move is 1 horizontal or 1 vertical:\n (((abs(to_col - from_col) == 1 and to_row - from_row == 0) or\n (to_col - from_col == 0 and abs(to_row - from_row) == 1)) or\n # or the move is one diagonal:\n ((from_coordinates == [4, 1] and to_coordinates in [[3, 0], [3, 2], [5, 0], [5, 2]]) or\n (from_coordinates in [[3, 0], [3, 2], [5, 0], [5, 2]] and to_coordinates == [4, 1]))\n )\n ):\n return True\n else:\n return False\n\n if self._color == 'blue':\n # if destination within the palace:\n if (to_col in range(3, 6) and to_row in range(7, 10) and\n # and the move is 1 horizontal or 1 vertical:\n (((abs(to_col - from_col) == 1 and to_row - from_row == 0) or\n (to_col - from_col == 0 and abs(to_row - from_row) == 1)) or\n # or the move is one diagonal:\n ((from_coordinates == [4, 8] and to_coordinates in [[3, 7], [3, 9], [5, 7], [5, 9]]) or\n (from_coordinates in [[3, 7], [3, 9], [5, 7], [5, 9]] and to_coordinates == [4, 8]))\n )\n ):\n return True\n else:\n return False", "def test_check_move_with_valid(self):\n board = [\n [\" \"] * 6,\n [\" \"] * 6,\n [\" \"] * 6,\n [\" \"] * 6,\n [\"\\u25cb\"] + [\" \"] * 5,\n [\" \"] * 6,\n [\" \"] * 6\n ]\n valid = self.game.check_move(board, 3)\n self.assertTrue(valid)", "def is_legal_move(state, action, player, rewarding_move=False): # TODO: Update this function to an more\n # optimized one.\n action = action.get_action_as_dict()\n if rewarding_move:\n if player == state.get_next_player() == state.get_latest_player():\n if action['action_type'] == YoteActionType.STEAL_FROM_HAND and state.in_hand[player * -1] > 0:\n return True\n elif action['action_type'] == YoteActionType.STEAL_FROM_BOARD:\n opponent_piece = state.get_board().get_player_pieces_on_board(Color(player * -1))\n if opponent_piece and action['action']['at'] in opponent_piece:\n return True\n return False\n else:\n if state.get_next_player() == player:\n if action['action_type'] == YoteActionType.ADD and state.in_hand[player] > 0:\n empty_cells = state.get_board().get_all_empty_cells()\n if empty_cells and action['action']['to'] in empty_cells:\n return True\n elif action['action_type'] == YoteActionType.MOVE:\n if state.get_board().get_cell_color(action['action']['at']) == Color(player):\n effective_moves = YoteRules.get_effective_cell_moves(state, action['action']['at'], player)\n if effective_moves and action['action']['to'] in effective_moves:\n return True\n return False\n return False", "def is_legal_move(self, start_pos, end_pos, start_piece, end_piece_player_id, board):\r\n parsed_positions = self.parse_positions(start_pos, end_pos)\r\n start_row = parsed_positions[0]\r\n start_col = parsed_positions[1]\r\n end_row = parsed_positions[2]\r\n end_col = parsed_positions[3]\r\n\r\n # Case for Red's side\r\n if start_piece.get_player_id() == 'r':\r\n if not (3 <= end_col <= 5 and 0 <= end_row <= 2): # Returns False when is to move outside the palace\r\n return False\r\n else:\r\n if abs(start_col - end_col) == 1 and abs(start_row - end_row) == 1: # Checks if end_pos forces a move diagonally\r\n return True\r\n else:\r\n return False\r\n\r\n # Case for Black's side\r\n else:\r\n if not (3 <= end_col <= 5 and 7 <= end_row <= 9): # Returns False when is to move outside the palace\r\n return False\r\n else:\r\n if abs(start_col - end_col) == 1 and abs(start_row - end_row) == 1: # Checks if end_pos forces a move diagonally\r\n return True\r\n else:\r\n return False", "def getMoveStatus(self):\n return self.hasMoved", "def any_legal_move(self, player, board):\r\n moves = self.legal_moves(player, board)\r\n #print(moves)\r\n return len(moves)!=0", "def can_move(self, direction):\n assert direction\n return self._walls & direction == 0", "def validate_move(coordinates: dict, character_dict: dict, user_input: str) -> bool:\n new_coordinate = get_new_coordinate(x_y_coordinate=character_dict, move_direction=user_input)\n return new_coordinate in coordinates", "def get_is_moving(self):\r\n return self._arm.get_is_moving()", "def is_legal_move(self, house_num):\n return True", "def check_move(self, row, column):\n\n return self._board[row][column] == ' '", "def validate_movement(self, piece, from_col, from_row, to_col, to_row):\n col_diff = abs(ord(from_col) - ord(to_col))\n row_diff = abs(from_row - to_row)\n\n # For any piece, it must actually move...\n if col_diff == 0 and row_diff == 0:\n return False\n # ...and there must be empty spaces in between the from/to squares (when on a column, row, or diagonal)\n if not self.validate_empty_between(from_col, from_row, to_col, to_row):\n return False\n\n # White pawn\n if piece == 'P':\n if col_diff == 1 and (to_row - from_row == 1):\n # Can move diagonally up one square, if taking another piece in that square or by en-passant\n return self.piece_colour(to_col, to_row) == 'B' \\\n or self.is_en_passant(from_col, from_row, to_col, to_row)\n elif col_diff != 0:\n # Otherwise, it can't change columns\n return False\n elif from_row == 2:\n # From initial position, can go up one or two rows (but can't take a piece)\n return (to_row == 3 or to_row == 4) and self.get_square(to_col, to_row) == ' '\n else:\n # Otherwise, can only move up one row (but can't take a piece)\n return to_row - from_row == 1 and self.get_square(to_col, to_row) == ' '\n # Black pawn\n elif piece == 'p':\n if col_diff == 1 and (from_row - to_row == 1):\n # Can move diagonally down one square, if taking another piece in that square or by en-passant\n return self.piece_colour(to_col, to_row) == 'W' \\\n or self.is_en_passant(from_col, from_row, to_col, to_row)\n elif col_diff != 0:\n # Otherwise, it can't change columns\n return False\n elif from_row == 7:\n # From initial position, can go down one or two rows (but can't take a piece)\n return (to_row == 6 or to_row == 5) and self.get_square(to_col, to_row) == ' '\n else:\n # Otherwise, can only move down one row (but can't take a piece)\n return from_row - to_row == 1 and self.get_square(to_col, to_row) == ' '\n # Rook\n elif piece.lower() == 'r':\n # Must remain in same column or same row\n return col_diff == 0 or row_diff == 0\n # Knight\n elif piece.lower() == 'n':\n # Jumps in a 2+1 pattern\n return (col_diff == 2 and row_diff == 1) or (col_diff == 1 and row_diff == 2)\n # Bishop\n elif piece.lower() == 'b':\n # Moves along diagonals\n return col_diff == row_diff\n # Queen\n elif piece.lower() == 'q':\n # Can move along columns, rows, or diagonals\n return col_diff == 0 or row_diff == 0 or col_diff == row_diff\n # King\n elif piece.lower() == 'k':\n # Can move a single square in any direction\n if not(0 <= col_diff <= 1) or not(0 <= row_diff <= 1):\n return False\n\n # But not next to the other king\n other_king = 'k' if piece.isupper() else 'K'\n # Get valid border squares\n border_squares = list(filter(\n lambda b_square: 'a' <= b_square[0] <= 'f' and 1 <= b_square[1] <= 8,\n [\n (chr(ord(to_col) - 1), to_row - 1), (to_col, to_row - 1), (chr(ord(to_col) + 1), to_row - 1),\n (chr(ord(to_col) - 1), to_row), (to_col, to_row), (chr(ord(to_col) + 1), to_row),\n (chr(ord(to_col) - 1), to_row + 1), (to_col, to_row + 1), (chr(ord(to_col) + 1), to_row + 1)\n ]\n ))\n # Check for the other king\n for square in border_squares:\n if self.get_square(square[0], square[1]) == other_king:\n return False\n\n return True", "def __valid_token_moves(self, state, next_state, token_id):\r\n if next_state == False:\r\n return [False, False, False, False]\r\n\r\n current_pos_token = state.state[0][token_id]\r\n next_pos_token = next_state.state[0][token_id]\r\n\r\n current_opponent_states = state.state[1:]\r\n next_opponent_states = next_state.state[1:]\r\n\r\n moved_out = (current_pos_token == -1) and (next_pos_token != -1)\r\n into_goal = (current_pos_token != 99) and (next_pos_token == 99)\r\n send_opp_home = self.__will_send_opponent_home(np.array(current_opponent_states), np.array(next_opponent_states))\r\n send_self_home = (current_pos_token != -1) and (next_pos_token == -1)\r\n \r\n\r\n token_actions = [moved_out, into_goal, send_opp_home, send_self_home] # True if action is valid\r\n\r\n return token_actions", "def is_legal_move(self, start_pos, end_pos, start_piece, end_piece_player_id, board):\r\n parsed_positions = self.parse_positions(start_pos, end_pos)\r\n\r\n start_row = parsed_positions[0]\r\n start_col = parsed_positions[1]\r\n end_row = parsed_positions[2]\r\n end_col = parsed_positions[3]\r\n\r\n # For horizontal movements for the horse\r\n if abs(end_row - start_row) == 1 and abs(end_col - start_col) == 2:\r\n # For movement going left\r\n if end_col - start_col == -2:\r\n if board[start_row][start_col-1].get_piece() is None: # Checks if horse is blocked\r\n return True\r\n else:\r\n return False\r\n # For movement going right\r\n else:\r\n if board[start_row][start_col + 1].get_piece() is None: # Checks if horse is blocked\r\n return True\r\n else:\r\n return False\r\n\r\n # For vertical movement for the horse\r\n elif abs(end_row - start_row) == 2 and abs(end_col - start_col) == 1:\r\n # For movement going down\r\n if end_row - start_row == 2:\r\n if board[start_row + 1][start_col].get_piece() is None:\r\n return True\r\n else:\r\n return False\r\n # For movement going up\r\n if end_row - start_row == -2:\r\n if board[start_row - 1][start_col].get_piece() is None:\r\n return True\r\n else:\r\n return False\r\n\r\n # Returns False if invalid end_pos for the horse\r\n else:\r\n return False", "def test_check_move_with_barely_valid(self):\n board = [\n [\" \"] * 6,\n [\" \"] * 6,\n [\" \"] * 6,\n [\" \"] * 6,\n [\"\\u25cb\"] * 5 + [\" \"],\n [\" \"] * 6,\n [\" \"] * 6\n ]\n valid = self.game.check_move(board, 4)\n self.assertTrue(valid)", "def no_more_move(self):\n if (self.p_no_move + self.c_no_move == 2):\n return True\n return False", "def can_turn_without_moving(self):\n return self.turn", "def is_legal_move(self, start_pos, end_pos, start_piece, end_piece_player_id, board):\r\n parsed_positions = self.parse_positions(start_pos, end_pos)\r\n start_row = parsed_positions[0]\r\n start_col = parsed_positions[1]\r\n end_row = parsed_positions[2]\r\n end_col = parsed_positions[3]\r\n\r\n if start_row != end_row and start_col != end_col: # Moving non-orthogonally\r\n return False\r\n\r\n if start_row == end_row: # Moving horizontally\r\n col_difference = end_col - start_col\r\n\r\n if col_difference > 0: # Moving to the right of the board\r\n for col in range(start_col + 1, end_col): # Checks if there is a piece between start_col and end_col\r\n if board[start_row][col].get_piece() is not None:\r\n return False\r\n # When there is no piece to impede path, check if position is empty or piece is enemy piece\r\n if end_piece_player_id is None or start_piece.get_player_id() != end_piece_player_id:\r\n return True\r\n\r\n if col_difference < 0: # Moving to the left of the board\r\n for col in range(start_col - 1, end_col, -1): # Checks to the left of the board\r\n # If there is a piece to block movement to the end_pos, return False\r\n if board[start_row][col].get_piece() is not None:\r\n return False\r\n if end_piece_player_id is None or start_piece.get_player_id() != end_piece_player_id:\r\n return True\r\n\r\n if start_col == end_col: # Moving verticially\r\n row_difference = end_row - start_row\r\n\r\n if row_difference > 0: # Moving down the board\r\n for row in range(start_row + 1, end_row):\r\n if board[row][start_col].get_piece() is not None: # If no piece is impeding path to end_pos\r\n return False\r\n # Checks if end_pos is empty or an enemy piece is on end_pos\r\n if end_piece_player_id is None or start_piece.get_player_id() != end_piece_player_id:\r\n return True\r\n\r\n if row_difference < 0:\r\n for row in range(start_row -1, end_row, -1):\r\n if board[row][start_col].get_piece() is not None: # If no piece is impeding path to end_pos\r\n return False\r\n # Checks if end_pos is empty or an enemy piece is on end_pos\r\n if end_piece_player_id is None or start_piece.get_player_id() != end_piece_player_id:\r\n return True", "def isValidMove(self, environment, currentCell, x2, y2, checkVisited = True):\n\n x1, y1 = currentCell.location.x, currentCell.location.y\n\n # Check if within bounds\n if x2 < 0 or x2 >= environment.length or y2 < 0 or y2 >= environment.breadth:\n return False\n\n # Check if cell is a wall\n nextCell = environment.grid[x2][y2]\n if nextCell.type == 'wall':\n return False\n\n # Check if cell is already visited\n if checkVisited and nextCell in self.visited:\n return False\n\n # Check for diagonal movement and corner cutting\n manhattanDistance = abs(x1-x2) + abs(y1-y2)\n if manhattanDistance == 2:\n if not environment.allowDiagonals:\n return False\n if not environment.cutCorners:\n if environment.grid[x1][y2].type == 'wall' or environment.grid[x2][y1].type == 'wall':\n return False\n else:\n if environment.grid[x1][y2].type == 'wall' and environment.grid[x2][y1].type == 'wall':\n return False\n\n return True", "def check_one_move(self):\n count_moves = 0\n one_move = None\n for direction in self.directions:\n if self.valid_move(self.loc, direction):\n count_moves += 1\n one_move = direction\n if count_moves != 1:\n return None\n return one_move", "def validate_move(self, move_from, move_to, board):\n\n from_coordinates = JanggiGame.translate_to_grid(move_from)\n to_coordinates = JanggiGame.translate_to_grid(move_to)\n from_col = from_coordinates[0]\n from_row = from_coordinates[1]\n to_col = to_coordinates[0]\n to_row = to_coordinates[1]\n\n # if destination within the board\n if to_col in range(9) and to_row in range(10):\n # if destination is 1 up and diagonally to the left\n if to_col - from_col == -2 and to_row - from_row == -3 and board[from_col][from_row - 1] == '' and board[from_col - 1][from_row - 2] == '':\n return True\n # if destination is 1 up and diagonally to the right\n if to_col - from_col == 2 and to_row - from_row == -3 and board[from_col][from_row - 1] == '' and board[from_col + 1][from_row - 2] == '':\n return True\n # if destination is 1 down and diagonally to the left\n if to_col - from_col == -2 and to_row - from_row == 3 and board[from_col][from_row + 1] == '' and board[from_col - 1][from_row + 2] == '':\n return True\n # if destination is 1 down and diagonally to the right\n if to_col - from_col == 2 and to_row - from_row == 3 and board[from_col][from_row + 1] == '' and board[from_col + 1][from_row + 2] == '':\n return True\n # if destination is 1 left and diagonally up\n if to_col - from_col == -3 and to_row - from_row == -2 and board[from_col - 1][from_row] == '' and board[from_col - 2][from_row - 1] == '':\n return True\n # if destination is 1 left and diagonally down\n if to_col - from_col == -3 and to_row - from_row == 2 and board[from_col - 1][from_row] == '' and board[from_col - 2][from_row + 1] == '':\n return True\n # if destination is 1 right and diagonally up\n if to_col - from_col == 3 and to_row - from_row == -2 and board[from_col + 1][from_row] == '' and board[from_col + 2][from_row - 1] == '':\n return True\n # if destination is 1 right and diagonally down\n if to_col - from_col == 3 and to_row - from_row == 2 and board[from_col + 1][from_row] == '' and board[from_col + 2][from_row + 1] == '':\n return True\n return False", "def checkValidOneMove(ndSoln):\n for i in range(len(ndSoln)-1):\n x1 = ndSoln[i][0]\n y1 = ndSoln[i][1]\n x2 = ndSoln[i+1][0]\n y2 = ndSoln[i+1][1]\n #take Euclidean distance between two consecutive moves\n #which should be approx. 1.0 if the move is valid\n if not(math.isclose(hypot(x2-x1,y2-y1),1.0)):\n return False\n return True", "def is_valid_pos(self, pos_step):\n return not (self.pos.x % pos_step or self.pos.y % pos_step)" ]
[ "0.8282215", "0.8074329", "0.80526775", "0.7981014", "0.78986144", "0.78754056", "0.7871537", "0.78681695", "0.7756859", "0.7713244", "0.7703341", "0.76398927", "0.7610206", "0.75776756", "0.75525403", "0.75510347", "0.7432144", "0.73722756", "0.7332168", "0.72985274", "0.72787625", "0.7255667", "0.72550565", "0.72518146", "0.72038835", "0.7189443", "0.716191", "0.716111", "0.7146476", "0.714388", "0.71411824", "0.7116413", "0.7096034", "0.70959437", "0.70801705", "0.7038388", "0.70311266", "0.70291173", "0.70279455", "0.70233554", "0.7004154", "0.69969755", "0.699156", "0.6990175", "0.6989235", "0.6982965", "0.69698405", "0.6953381", "0.6951546", "0.6937925", "0.69352394", "0.6933345", "0.69155645", "0.6906702", "0.68822944", "0.6848001", "0.68308914", "0.680409", "0.68000275", "0.67993474", "0.6785584", "0.67703104", "0.6755505", "0.6734752", "0.66805804", "0.6670781", "0.6666476", "0.6652975", "0.664842", "0.66194874", "0.66089416", "0.6595169", "0.6590357", "0.6584536", "0.6578412", "0.6570294", "0.6566986", "0.65666515", "0.6565979", "0.6556333", "0.6546549", "0.65259755", "0.6517793", "0.6516739", "0.65119773", "0.6505201", "0.6503104", "0.6502492", "0.64979035", "0.648635", "0.64848363", "0.6469626", "0.64583415", "0.6456065", "0.645013", "0.64487815", "0.64473677", "0.6416259", "0.6408056", "0.6407788" ]
0.8471639
0
Return a representation of this state (which can be used for equality testing).
def __repr__(self) -> Any: game_board = self.__str__() + "\n" current_player_info = "Is p1 the current player? " + str(self.p1_turn) result = game_board + current_player_info return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def state(self):\n return str(self)", "def state(self):\r\n return str(self)", "def state(self):\n\n\t\treturn str(self)", "def __repr__( self ):\n\n return self.__class__.__name__ + \"( \" + repr(self.state) + \")\";", "def __repr__(self):\r\n r = str(self.current_instance_state())\r\n return r", "def __str__(self):\n return ''.join(str(e) + ' ' for e in self.state)", "def state_raw(self):\n return self._state_raw", "def __repr__(self):\n return \"{}(definition={!r}, state={!r}, id={:d})\".format(\n self.__class__.__name__, self.definition, self.state, self.id\n )", "def __repr__(self):\n return (\n repr_builder.ReprBuilder(self)\n .add_value('uid', self._uid)\n .add_value('state', self._state.name)\n .add_value('sandbox', str(self._sandbox))\n .add_value('sender_uid', self._sender.uid)\n .format()\n )", "def stateString(self):\n return self._mdp.stateString(self._cur_state);", "def to_state_json(self) -> Dict[str, Any]:\n return self.state_to_json()", "def __str__(self):\n return \"\".join(list(map(lambda row: ''.join(row), self.state)))", "def __repr__(self) -> str:\n return str(self.as_dict())", "def __repr__(self):\n return str(dict(self))", "def __repr__(self):\n return str(self.__dict__)", "def __repr__(self):\n return str(self.__dict__)", "def __repr__(self):\n return str(self.__dict__)", "def __repr__(self):\n return str(self.__dict__)", "def __repr__(self):\n return str(self.__dict__)", "def _repr_(self):\n return \"Transition from %s to %s: %s\" % (repr(self.from_state),\n repr(self.to_state),\n self._in_out_label_())", "def __repr__(self):\n #return f'{self.id};{self.state}'\n return f'{self.x};{self.y}'", "def state(self):\n return self._state.copy()", "def state(self):\n # type: () -> string_types\n return self._state", "def __repr__(self):\n return \"{}(value={})\".format(self.__class__.__name__, self.value)", "def __repr__(self):\n return \"{}(value={})\".format(self.__class__.__name__, self.value)", "def __str__(self):\n state = ''\n state += ' '.join([str(x) for x in self.pos]) + ' '\n state += ''.join([str(x) + ' ' + str(y) + ' ' for x,\n y in zip(self.BU, self.BD)])\n for e in self.BF:\n state += ' '.join([str(x) for x in e])\n state += ' '\n state += ' '.join([str(x) for x in self.LU]) + ' '\n state += ' '.join([str(x) for x in self.LD]) + ' '\n\n return state", "def __getstate__(self):\n state = self.__dict__.copy()\n self.__cleanState__(state)\n return state", "def get_map_state(self):\n return self.serialize()", "def __repr__(self):\n if self.status == STATUS_CLOSED:\n status = \"CLOSED\"\n elif self.status == STATUS_OPEN:\n status = \"OPEN\"\n else:\n status = \"UNKNOWN\"\n return f\"<{self.__class__.__name__} [{self.key}] status={status} failures={self.failures} checkin={self.checkin}, jitter={self._last_jitter}>\"", "def __repr__(self):\n\n return repr(self.__dict__)", "def __repr__(self):\n string = \"Current state: \\n\"\n if self.state[0] == 0: # We're on the left side\n string += \"M: \"\n string += str(self.state[1]).ljust(10)\n string += \"M: \"\n string += str(TOTAL_NO_MISSIONARIES - self.state[1]).ljust(10)\n string += \"\\n\"\n\n string += \"C: \"\n string += str(self.state[2]).ljust(10)\n string += \"C: \"\n string += str(TOTAL_NO_CANNIBALS - self.state[2]).ljust(10)\n string += \"\\n\"\n\n string += \"Boat position: left\\n\"\n else: # We're on the right side\n string += \"M: \"\n string += str(TOTAL_NO_MISSIONARIES - self.state[1]).ljust(10)\n string += \"M: \"\n string += str(self.state[1])\n string += \"\\n\"\n\n string += \"C: \"\n string += str(TOTAL_NO_CANNIBALS - self.state[2]).ljust(10)\n string += \"C: \"\n string += str(self.state[2]).ljust(10)\n string += \"\\n\"\n\n string += \"Boat position: right\\n\"\n string += \"\\n\"\n return string", "def to_json(self):\n return {\n \"state_type\": self.state_type,\n \"state_id\": self.state_id,\n \"public\": self.public,\n \"private\": self.private\n }", "def __repr__(self):\n return str(self.value)", "def __repr__(self):\n return str(self.value)", "def __getstate__(self):\n return self.__dict__", "def state(self) -> str:", "def __str__(self):\n # Build the string line by line. Join at the end.\n lines = []\n lines.append(\"Initial State: {{{}}}\".format(self.initial_state))\n lines.append(\n \"Final States: {{{}}}\".format(\n \",\".join(map(str, self.final_states))))\n\n # column headers\n lines.append(\n \"State\\t{}\".format(\"\\t\".join(self.alphabet)))\n\n # For each state, print transitions\n for state_name in range(1, len(self.transitions) + 1):\n line = \"{}\".format(state_name)\n for symbol in self.alphabet:\n line += \"\\t{{{}}}\".format(\n \",\".join(map(str, self.transitions.get(\n state_name, dict()).get(symbol, []))))\n lines.append(line)\n\n return \"\\n\".join(lines)", "def serialise(self) -> SerialisedState:\n raise NotImplementedError", "def __repr__(self) -> str:\r\n\t\treturn \"{state}\\n token: {token}\\n Marker: {marker}\\n NewToken: {newToken}\\n newBlock: {newBlock}\".format(\r\n\t\t\tstate=self.NextState.__func__.__qualname__,\r\n\t\t\ttoken=self.Token,\r\n\t\t\tmarker=self.TokenMarker,\r\n\t\t\tnewToken=self.NewToken,\r\n\t\t\tnewBlock=self.NewBlock,\r\n\t\t)", "def save_state_to_dict(self):\n return self.__dict__.copy()", "def __repr__(self) -> str:\n return \"<{}(active={}, is_transaction={})>\".format(\n self.__class__.__name__, self._active, self.is_transaction\n )", "def __repr__(self) -> str:\n return self.value", "def __repr__(self):\n return u\"(\" + str(self) + \", \" + str(dict(self)) + u')'", "def __repr__(self, state):\n print ' ',\n for w in range(len(state)+2):\n print \"___\",\n print '\\n'\n for x in state:\n print \"| \", x, \" |\"\n print ' ',\n for y in range(len(state)+2):\n print \"___\",\n print '\\n'\n return state", "def state(self) -> str:\n return self._state", "def state(self) -> str:\n return self._state", "def state(self) -> str:\n return self._state", "def state(self) -> str:\n return self._state", "def state(self) -> str:\n return self._state", "def state(self):\n return self.get_state()", "def __repr__(self):\n representantion = ''\n\n for i in range(3):\n for j in range(3):\n representantion += str(self.state[3 * i + j])\n\n if j == 2 and i != 2:\n representantion += '\\n'\n else:\n representantion += ' '\n\n return representantion", "def _repr_(self):\n if len(self._states_)==0:\n return \"Empty automaton\"\n if len(self._states_)==1:\n return \"Automaton with 1 state\"\n else:\n return \"Automaton with %s states\" % len(self._states_)", "def state(self):\n return self._state()", "def get_instance_state(self):\r\n\r\n state = {\r\n 'version': self.STATE_VERSION,\r\n 'child_history': self.child_history,\r\n 'child_state': self.child_state,\r\n 'max_score': self._max_score,\r\n 'child_attempts': self.child_attempts,\r\n 'child_created': self.child_created,\r\n 'stored_answer': self.stored_answer,\r\n }\r\n return json.dumps(state)", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()" ]
[ "0.81590044", "0.8067545", "0.7992787", "0.76251036", "0.75809264", "0.7424345", "0.7290553", "0.7284625", "0.7240699", "0.7135407", "0.7102532", "0.70908487", "0.7075042", "0.7063691", "0.7046428", "0.7046428", "0.7046428", "0.7046428", "0.7046428", "0.6999811", "0.6971935", "0.69708204", "0.69302493", "0.6927752", "0.6927752", "0.69233316", "0.69121283", "0.6900457", "0.6874701", "0.6859624", "0.6837503", "0.68211305", "0.681918", "0.681918", "0.68191105", "0.68128294", "0.6801264", "0.67984575", "0.67965907", "0.67890865", "0.67729104", "0.6772197", "0.67531735", "0.6748018", "0.6740931", "0.6740931", "0.6740931", "0.6740931", "0.6740931", "0.6716508", "0.6710727", "0.67091984", "0.6706314", "0.67007494", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606", "0.6676606" ]
0.0
-1
Return an estimate in interval [LOSE, WIN] of best outcome the current player can guarantee from state self.
def rough_outcome(self) -> float: # HUYNH YOU PRICK WHY THE FUCK DO YOU MAKE US WRITE THIS SHIT EVEN IT'S NOT USED ANYWHERE # pick move based on this may not be optimal but better than random # return 1 if win immediately # return -1 if all states reachable will result the other player win # return 0 if otherwise ??? what the fuck does this mean # look two states forward pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rough_outcome(self) -> float:\n if is_win(self):\n return 1\n elif is_lose(self):\n return -1\n return 0", "def rough_outcome_strategy(game: Any) -> Any:\n current_state = game.current_state\n best_move = None\n best_outcome = -2 # Temporarily -- just so we can replace this easily later\n\n # Get the move that results in the lowest rough_outcome for the opponent\n for move in current_state.get_possible_moves():\n new_state = current_state.make_move(move)\n\n # We multiply the below by -1 since a state that's bad for the opponent\n # is good for us.\n guessed_score = new_state.rough_outcome() * -1\n if guessed_score > best_outcome:\n best_outcome = guessed_score\n best_move = move\n\n # Return the move that resulted in the best rough_outcome\n return best_move", "def rough_outcome_strategy(game: Any) -> Any:\n current_state = game.current_state\n best_move = None\n best_outcome = -2 # Temporarily -- just so we can replace this easily later\n\n # Get the move that results in the lowest rough_outcome for the opponent\n for move in current_state.get_possible_moves():\n new_state = current_state.make_move(move)\n\n # We multiply the below by -1 since a state that's bad for the opponent\n # is good for us.\n guessed_score = new_state.rough_outcome() * -1\n if guessed_score > best_outcome:\n best_outcome = guessed_score\n best_move = move\n\n # Return the move that resulted in the best rough_outcome\n return best_move", "def rough_outcome_strategy(game: Any) -> Any:\n current_state = game.current_state\n best_move = None\n best_outcome = -2 # Temporarily -- just so we can replace this easily later\n\n # Get the move that results in the lowest rough_outcome for the opponent\n for move in current_state.get_possible_moves():\n new_state = current_state.make_move(move)\n\n # We multiply the below by -1 since a state that's bad for the opponent\n # is good for us.\n guessed_score = new_state.rough_outcome() * -1\n if guessed_score > best_outcome:\n best_outcome = guessed_score\n best_move = move\n\n # Return the move that resulted in the best rough_outcome\n return best_move", "def reward(self, winner):\n if winner == self.side:\n return self.win\n elif winner == VALUES.NOT_FINISHED:\n return self.not_finished\n elif winner == VALUES.DRAW:\n return self.draw\n else:\n return self.lose", "def calc_winner(self):\n pass", "def get_game_score(self):\n if self.game_is_tied():\n return 0\n elif self.is_game_won():\n my_available_steps = self.steps_available(self.loc)\n opp_available_steps = self.steps_available(self.opponent_loc)\n my_score = self.my_score - self.penalty_score if my_available_steps == 0 else self.my_score\n opp_score = self.opponent_score - self.penalty_score if opp_available_steps == 0 else self.opponent_score\n return (my_score - opp_score) / (abs(my_score) + abs(opp_score))\n else:\n if abs(self.my_score) + abs(self.opponent_score) == 0:\n return 0\n return (self.my_score - self.opponent_score) / (abs(self.my_score) + abs(self.opponent_score))", "def rough_outcome(self) -> float:\n\n if self.p1_turn:\n name = '2'\n else:\n name = '1'\n\n count = 0\n for i in self.claim:\n if i == name:\n count += 1\n over = (self.get_possible_moves() == []) or \\\n (count >= 0.5 * len(self.claim))\n\n result = []\n if over:\n return -1\n else:\n for move in self.get_possible_moves():\n new_state = self.make_move(move)\n if new_state.rough_outcome() == -1:\n result.append(1)\n else:\n result.append(0)\n if 1 in result:\n return 1\n return -1", "def __status(self):\r\n if self.__currentCell == self.storageCell:\r\n return Status.WIN\r\n\r\n if self.__totalReward < self.__rewardThreshold: # force end of game after to much loss\r\n return Status.LOSE\r\n\r\n return Status.PLAYING", "def get_expected_objective(self) -> float:\n # pylint: disable=invalid-name\n obj = 0.\n for gr in self.grounded.values():\n dist = gr.get_expected_dist_to_satisfaction()\n obj += 1 - self.weight * max(0, dist) ** 2\n return obj", "def winner(self):\n state = self._state['visible']\n if state['reserve'][0] < 1:\n return 1\n elif state['reserve'][1] < 1:\n return 0\n return -1", "def winner(self):\n\n if self.home_score > self.away_score:\n return HOME\n elif self.home_score < self.away_score:\n return VISITOR\n else:\n return TIE", "def evaluateWinner(self):\n\t\tif self.pots[-1] == 0:\n\t\t\tself.pots.pop()\n\t\tlivePlayers = self.getLivePlayers()\t\n\t\tfor i in range(len(self.pots)):\n\t\t\tplayers = self.getPlayersInPot(i, livePlayers)\n\t\t\tevaluations = []\n\t\t\tfor x in players:\n\t\t\t\tcombined = x.hand + self.communityCards\n\t\t\t\tevaluations.append((x, self.evaluator.getRankOfSeven(\tcombined[0], \\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcombined[1], \\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcombined[2], \\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcombined[3], \\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcombined[4], \\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcombined[5], \\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcombined[6] )))\n\t\t\twinners = self.getWinners(evaluations, i)\n\t\t\tself.handOutMoney(winners, i)\n\t\t\tself.potwinQ.append(winners[0].name)", "def _determine_outcome(\n self,\n accept: AcceptanceCriterion,\n best: State,\n curr: State,\n cand: State,\n ) -> Outcome:\n outcome = Outcome.REJECT\n\n if accept(self._rnd_state, best, curr, cand): # accept candidate\n outcome = Outcome.ACCEPT\n\n if cand.objective() < curr.objective():\n outcome = Outcome.BETTER\n\n if cand.objective() < best.objective(): # candidate is new best\n logger.info(f\"New best with objective {cand.objective():.2f}.\")\n outcome = Outcome.BEST\n\n return outcome", "def custom_score_2(game, player):\n \n # get avaliable moves for each player\n own_moves = len(game.get_legal_moves(player))\n opp_moves = len(game.get_legal_moves(game.get_opponent(player)))\n \n # shortcut to definite state:\n # 1. my agent win -> return very high score\n if opp_moves == 0:\n return float(\"inf\")\n # 2. opponenent's agent win -> return very low score\n elif own_moves == 0:\n return float(\"-inf\")\n\n # score: avaliable moves ratio\n return float(own_moves/opp_moves)", "def predict_winner(self):\n\t\tif len(self.players) > 1:\n\t\t\t# TODO: convert to using of max() function\n\t\t\twinner = self.players[0]\n\t\t\tfor player in self.players:\n\t\t\t\tif player.wr > winner.wr:\n\t\t\t\t\twinner = player\n\t\t\treturn winner\n\t\telse:\n\t\t\treturn None", "def Pwin(state):\n # Assumes opponent also plays with optimal strategy\n p, me, you, pending = state\n if me + pending >= goal:\n return 1\n elif you >= goal:\n return 0\n else:\n return max(Q_pig(state, action, Pwin) for action in pig_actions(state))", "def utility(self, state, player):\n if state.isWin() or state.isLose():\n return state.getScore()\n\n # In case of cycle.\n if player == PACMAN:\n return INFINITY\n else:\n return -INFINITY", "def get_winner(self) -> int:\n return self._win_state", "def best(self):\n alpha = -1\n beta = +1\n move = self.__negamax(alpha, beta, tt=DictTT())\n return move[1]", "def winner(self):\n\n if self.game_ended():\n return self.winning()\n else:\n return 0", "def showBestGainWon(self) :\n bestGainWon = 0\n for level in self.level_history :\n bestGainWon = level.profit if bestGainWon < level.profit else bestGainWon\n Scenario.messageGetBestGainWon(bestGainWon)", "def Pwin(state):\n # Assumes opponent also plays with optimal strategy.\n (p, me, you, pending) = state\n if me + pending >= goal:\n return 1\n elif you >= goal:\n return 0\n else:\n return max(Q_pig(state, action, Pwin)\n for action in pig_actions(state))", "def evaluate(self, mode=0):\r\n winner = self.determine_winner()\r\n if winner:\r\n return winner * self.WIN_SCORE\r\n\r\n if mode == 1:\r\n return self.centre_priority_evaluate()\r\n elif mode == 2:\r\n return 0.5 * (self.centre_priority_evaluate() + self.piece_evaluate())\r\n else:\r\n return self.piece_evaluate()", "def evaluate_state(state):\n\n my_score = get_action_score(state.action[0], state.action[1], state.action_player, state.occupied)\n other_score = get_action_score(state.action[0], state.action[1], state.player, state.occupied)\n \n return max(my_score, other_score)", "def showWorstGainWon(self) :\n worstGainWon = self.level_history[0].profit\n for level in self.level_history :\n worstGainWon = level.profit if ((worstGainWon > level.profit) and (level.result == 1)) else worstGainWon\n Scenario.messageGetWorstGainWon(worstGainWon)", "def get_winner(state):\n state_val = get_action_score(state.action[0], state.action[1], state.action_player, state.occupied)\n if state_val == 100:\n return state.action_player\n elif len(state.available_moves) == 0:\n return 0\n else:\n return -1", "def eval(self):\n\n ratio_player_win = self.player_wins / self.num_test\n ratio_opponent_win = self.opponent_wins / self.num_test\n ratio_tie = 1.0 - ratio_player_win - ratio_opponent_win\n\n print(\"\\nPlayer Test Results:\")\n print(\"\\tWins {0:.2f}%\".format(100.0 * ratio_player_win))\n print(\"\\tLosses {0:.2f}%\".format(100.0 * ratio_opponent_win))\n print(\"\\tTie {0:.2f}%\".format(100.0 * ratio_tie))\n\n ratio_optimal_win = self.optimal_wins / self.num_test\n ratio_optimal_loose = self.optimal_losses / self.num_test\n ratio_optimal_tie = 1.0 - ratio_optimal_win - ratio_optimal_loose\n\n print(\"\\nOptimal Results:\")\n print(\"\\tPlayer {0:.2f}%\".format(100.0 * ratio_optimal_win))\n print(\"\\tOpponent {0:.2f}%\".format(100.0 * ratio_optimal_loose))\n print(\"\\tTie {0:.2f}%\".format(100.0 * ratio_optimal_tie))\n\n # Ratio of win, loss diff between player and optimal\n # positive if the player beats opponent\n relative_result = ((ratio_player_win - ratio_opponent_win) /\n (ratio_optimal_win - ratio_optimal_loose))\n\n print(\"\\nResults Player Relative Optimal:\")\n print(\"\\tWins {0:.2f}%\".format(100.0 * ratio_player_win / ratio_optimal_win))\n print(\"\\tLosses {0:.2f}%\".format(100.0 * ratio_opponent_win / ratio_optimal_loose))\n print(\"\\tScore {0:.2f}%\".format(100.0 * relative_result))\n\n if self.last_test is not None:\n print(\"Diff from last test score is {0:.2f}%\".format(100.0 * (relative_result - self.last_test)))\n self.last_test = relative_result", "def evaluate_board(self, board):\n \n win_score = 100\n win_or_loss_score = 50\n lose_score = 0\n \n if board.win_for(self.opponent()):\n return lose_score\n if board.win_for(self.side):\n return win_score\n if not board.win_for(self.side) or not board.win_for(self.opponent()):\n return win_or_loss_score", "def minimax_decision(gameState):\n value = -sys.maxsize\n best_value = -sys.maxsize\n best_move = None\n legal_moves = gameState.get_legal_moves()\n for move in legal_moves:\n game = gameState.forecast_move(move)\n value = max(value, min_value(game))\n if value > best_value:\n best_value = value\n best_move = move\n return best_move", "def custom_score_2(game, player):\n if game.is_loser(player):\n return float(\"-inf\")\n if game.is_winner(player):\n return float(\"inf\")\n\n # Aim to maximise your own available moves vs the opponent (Factor 2)\n\n opponent = game.get_opponent(player)\n return float(len(game.get_legal_moves(player)))-2.0*float(len(game.get_legal_moves(opponent)))", "def best_move(self, state, curr_player):\n\t\t# determine opponent's color\n\t\tif curr_player == self.colors[0]:\n\t\t\topp_player = self.colors[1]\n\t\telse:\n\t\t\topp_player = self.colors[0]\n\n\t\treturn self.value(state, curr_player)", "def get_best_score_and_time(self):\n\n best_time = 10000\n best_score = 0\n\n for game in self.games:\n if game.status == \"won\":\n if best_time > game.timing:\n best_time = game.timing\n if best_score < game.score:\n best_score = game.score\n\n if best_time == 10000:\n best_time = 0\n\n return (best_score, best_time)", "def get_winner(self):\n diff = self.home_score - self.away_score\n if diff > 0:\n return self.home_team\n elif diff < 0:\n return self.away_team\n else:\n return None", "def winner(self) -> Player:\n return max(self.players, key=lambda player: player.score)", "def counter_opponent_win(self):\n\n # get essential values\n board = self.get_game_space()\n affinity = self.get_opponent().get_affinity()\n \n # pick the right check for the game we are playing\n if isinstance(board, Gomoku):\n \n # get the possible ways for the opponent to win\n possible_wins = board.get_wins(affinity)\n winning_blocks = board.get_winning_blocks(affinity)\n best_move = None\n\n # sort the best win to counter \n for win in possible_wins:\n if best_move is None: best_move = win\n elif win[0] <= best_move[0]: \n if win[1] >= best_move[1]:\n best_move = win\n if best_move is not None: possible_wins.remove(best_move)\n return best_move", "def get_win_percentage(self) -> float:\n if self.wins == 0:\n return 0.0\n else:\n return round((self.wins / (self.wins + self.losses)) * 100, 2)", "def worst_score(self):\r\n pass", "def best_response(self, opponents_actions, tie_breaking='smallest',\n payoff_perturbation=None, tol=None, random_state=None):\n if tol is None:\n tol = self.tol\n\n payoff_vector = self.payoff_vector(opponents_actions)\n if payoff_perturbation is not None:\n try:\n payoff_vector += payoff_perturbation\n except TypeError: # type mismatch\n payoff_vector = payoff_vector + payoff_perturbation\n\n best_responses = \\\n np.where(payoff_vector >= payoff_vector.max() - tol)[0]\n\n if tie_breaking == 'smallest':\n return best_responses[0]\n elif tie_breaking == 'random':\n return self.random_choice(best_responses,\n random_state=random_state)\n elif tie_breaking is False:\n return best_responses\n else:\n msg = \"tie_breaking must be one of 'smallest', 'random', or False\"\n raise ValueError(msg)", "def best_value(self):\r\n return self._best_value", "def winning_percentage(self):\n return float(len(self.wins))/len((self.wins+self.losses))", "def _get_lip_best(self) -> float:\n pass", "def winner(self):\n state = self.state\n if state == State.X_WON:\n return Square.X\n if state == State.O_WON:\n return Square.O\n return None", "def get_winners(self):\n\n if self.optimal is not None:\n return self.optimal\n clean_proposals = self.cleaner.create_scenarios(self.proposals)\n self.optimal = self.optimizer.optimize(clean_proposals)\n return self.optimal", "def _minimax_decision(gameState):\n # The built in `max()` function can be used as argmax!\n return max(gameState.get_legal_moves(),\n key=lambda m: min_value(gameState.forecast_move(m)))", "def calcul_max_loss(self, percent_allowable_loss):\n if self.capital * percent_allowable_loss / 100 > self.minimal_buy:\n return self.capital * percent_allowable_loss / 100\n else:\n return self.minimal_buy", "def _max(self, board: Board) -> (float, int):\n\n #\n # First we check if we have seen this board position before, and if yes just return the cached value\n #\n board_hash = board.hash_value()\n if board_hash in self.cache:\n return self.cache[board_hash]\n\n #\n # Init the min value as well as action. Min value is set to DRAW as this value will pass through in case\n # of a draw\n #\n max_value = self.DRAW_VALUE\n action = -1\n\n #\n # If the game has already finished we return. Otherwise we look at possible continuations\n #\n winner = board.who_won()\n if winner == self.side:\n max_value = self.WIN_VALUE\n action = -1\n elif winner == board.other_side(self.side):\n max_value = self.LOSS_VALUE\n action = -1\n else:\n for index in [i for i, e in enumerate(board.state) if board.state[i] == EMPTY]:\n b = Board(board.state)\n b.move(index, self.side)\n\n res, _ = self._min(b)\n if res > max_value or action == -1:\n max_value = res\n action = index\n\n # Shortcut: Can't get better than that, so abort here and return this move\n if max_value == self.WIN_VALUE:\n self.cache[board_hash] = (max_value, action)\n return max_value, action\n\n self.cache[board_hash] = (max_value, action)\n return max_value, action", "def min_value(self, game, depth):\n if self.time_left() < self.TIMER_THRESHOLD: # Timeout check\n raise SearchTimeout()\n\n if game.is_loser(self) or game.is_winner(self) or depth == 0: # Terminal test, checks base cases\n return self.score(game,self) # returns the score, UTILITY of the current state\n legal_moves = game.get_legal_moves() # obtain all legal moves for game, ACTIONs that can be taken\n best_score = math.inf # abstraction assignment of infinity(highest possible value for MIN score)\n for m in legal_moves: # iterate through all available actions\n new_state = game.forecast_move(m) # for each available move, forecast the resulting state from that ACTION\n # RESULT of ACTION\n score = self.max_value(new_state, depth - 1) # recursively uses the new state\n best_score = min(best_score,score) # calculates the minimizing score between the states\n return best_score # propagates minimizing score for given state", "def negamax(self):\n if self.check_winner():\n return 1\n elif self.full():\n return 0\n else:\n bestScore = -10\n for r, c in self.empty_cells():\n self.grid[r][c] = self.player\n self.next_player() \n score = -self.negamax()\n if score > bestScore:\n bestScore = score\n self.grid[r][c] = GameModel.EMPTY\n self.next_player()\n return bestScore", "def get_winner(self):\n return self.winner", "def max_value(self, game, depth):\n if self.time_left() < self.TIMER_THRESHOLD: # Timeout check\n raise SearchTimeout()\n\n if game.is_loser(self) or game.is_winner(self) or depth == 0: # Terminal test, checks base cases\n return self.score(game,self) # returns the score, UTILITY of the current state\n legal_moves = game.get_legal_moves() # obtain all legal moves for game, ACTIONs that can be taken\n best_score = -math.inf # abstraction assignment of neg. infinity(lowest possible value for MAX score)\n for m in legal_moves: # iterate through all available actions\n new_state = game.forecast_move(m) # for each available move, forecast the resulting state from that ACTION\n # RESULT of ACTION\n score = self.max_value(new_state, depth - 1) # recursively uses the new state\n best_score = max(best_score,score) # calculates the minimizing score between the states\n return best_score # propagates minimizing score for given state", "def reward(self, player, winning_state):\n if winning_state == \"Tie\":\n return 1\n elif winning_state == \"Resume\":\n return -1\n else:\n if player == \"agent\":\n return 10\n else:\n return -10", "def _game_winner(self):\n return self._env.get_winner()", "def strategy(hand, num_die_sides):\r\n \r\n best_hold = (0.0, ())\r\n current_score = 0\r\n \r\n for held_dice in gen_all_holds(hand):\r\n score = expected_value(held_dice, num_die_sides, len(hand) - len(held_dice))\r\n if score > current_score:\r\n current_score = score\r\n best_hold = (current_score, held_dice)\r\n \r\n return best_hold", "def mostCloseOpp(self, opponents):\n opps=opponents\n distMin = self.width\n numDistMin = None\n i=0\n for opp in opps:\n if self.distMe_Players(opp)<distMin:\n distMin=self.distMe_Players(opp)\n numDistMin=i\n i=i+1\n\n if (numDistMin==None):\n return\n return opps[numDistMin]", "def evaluate(state):\n if wins(state, COMP):\n score = -1\n elif wins(state, HUMAN):\n score = 1\n else:\n score = 0\n\n return score", "def calculate_my_win_strength(self):\n self.winStrength = self.strategy(deepcopy(self.currentBoardState))", "def _policy(self, gameboard):\r\n valid_moves = self._all_valid_moves(gameboard)\r\n _reflex_ = Reflex(self.color)\r\n best_move = None\r\n moves = []\r\n \r\n # step 1, check going to win\r\n for x in range(gameboard.height):\r\n for y in range(gameboard.width):\r\n position = (x, y)\r\n temp = _reflex_.check_going_to_win(position, gameboard)\r\n if len(temp) != 0:\r\n moves += temp\r\n\r\n if len(moves) > 0:\r\n idx = np.random.choice(len(moves), 1)[0]\r\n best_move = moves[idx]\r\n return best_move\r\n \r\n # step 2, check opponent 4\r\n for x in range(gameboard.height):\r\n for y in range(gameboard.width):\r\n position = (x, y)\r\n temp = _reflex_._alter_check_opponent_4(position, gameboard)\r\n if len(temp) != 0:\r\n moves += temp\r\n \r\n if len(moves) > 0:\r\n idx = np.random.choice(len(moves), 1)[0]\r\n best_move = moves[idx]\r\n return best_move\r\n\r\n # step 3, check opponent 3\r\n for x in range(gameboard.height):\r\n for y in range(gameboard.width):\r\n position = (x, y)\r\n temp = _reflex_.check_opponent_3(position, gameboard)\r\n if len(temp) != 0:\r\n moves += temp\r\n \r\n if len(moves) > 0:\r\n idx = np.random.choice(len(moves), 1)[0]\r\n best_move = moves[idx]\r\n return best_move\r\n\r\n # step 4, winning blocks\r\n for x in range(gameboard.height):\r\n for y in range(gameboard.width):\r\n position = (x, y)\r\n temp = _reflex_.check_winning_blocks(position, gameboard)\r\n if len(temp) != 0:\r\n moves += temp\r\n\r\n if len(moves) > 0:\r\n moves = list(set(moves))\r\n moves.sort(key=lambda x: x[2], reverse=True)\r\n max_count = moves[0][2]\r\n new_moves = []\r\n\r\n for t in moves:\r\n if t[2] < max_count:\r\n break\r\n else:\r\n new_moves.append((t[0], t[1]))\r\n\r\n moves = new_moves.copy()\r\n\r\n if len(moves) > 0:\r\n idx = np.random.choice(len(moves), 1)[0]\r\n best_move = moves[idx]\r\n return best_move\r\n\r\n # step 5, random pick one\r\n idx = np.random.choice(len(valid_moves), 1)[0]\r\n return valid_moves[idx]", "def strategy(hand, num_die_sides):\n best_move = (0.0, ())\n all_holds = gen_all_holds(hand)\n for hold in all_holds:\n # hand can be less than 5\n num_free_dice = len(hand) - len(hold)\n expected = expected_value(hold, num_die_sides, num_free_dice)\n if expected > best_move[0]:\n best_move = (expected, hold)\n return best_move", "def showBestBetUse(self) :\n bestBetUse = 0\n for level in self.level_history :\n bestBetUse = level.bet if bestBetUse < level.bet else bestBetUse\n Scenario.messageGetBestBetUse(bestBetUse)", "def _get_reward(self):\n if self.is_game_done:\n return self.price - 1\n else:\n return 0.0", "def _best_action(self, state):\n actions_rewards = list(self.Q[state].items())\n return max(actions_rewards, key=lambda x: x[1])[0]", "def getHighestRank_Naive(self):\n\n # filter out low confidences\n maxConfidence = max(self.Predictors, key=operator.attrgetter('confidence'))\n p = [p for p in self.Predictors if p.confidence >= maxConfidence.confidence]\n \n if len(p) == 1:\n # only one predictor has high confidence\n chosenPredictor = p[0]\n elif len(p) > 1:\n # many predictors has high confidence. look for highest wins\n maxScore = max(p, key=operator.attrgetter('scoreWins'))\n \n# maxScore = 0\n# for pred in p:\n# maxScore = max(maxScore, pred.scoreWins - pred.scoreLosts) \n \n predictors = p\n p = [p for p in predictors if p.scoreWins >= maxScore.scoreWins]\n \n if len(p) == 1:\n chosenPredictor = p[0]\n elif len(p) > 1:\n # there are ties. look for lowest losts\n maxScore = min(p, key=operator.attrgetter('scoreLosts'))\n predictors = p\n p = [p for p in predictors if p.scoreLosts == maxScore]\n \n if len(p) == 1:\n chosenPredictor = p[-1]\n elif len(p) > 1:\n # choose at random\n random = rps.random() % len(p)\n chosenPredictor = p[random]\n \n if len(p) == 0:\n maxConfidence = max(self.Predictors, key=operator.attrgetter('confidence'))\n p = [p for p in self.Predictors if p.confidence >= maxConfidence.confidence]\n \n random = rps.random() % len(p)\n chosenPredictor = p[random]\n else:\n # confidences are low. look for highest wins\n maxScore = max(self.Predictors, key=operator.attrgetter('scoreWins'))\n p = [p for p in self.Predictors if p.scoreWins == maxScore]\n \n if len(p) == 1:\n chosenPredictor = p[0]\n elif len(p) > 1:\n # choose at random\n random = rps.random() % len(p)\n chosenPredictor = p[random]\n else:\n # choose at random\n random = rps.random() % len(self.Predictors)\n chosenPredictor = self.Predictors[random]\n \n if Debug:\n maxScore = max([p.scoreWins for p in self.Predictors]) \n print(\"max score: %f \" % (maxScore), end=\"\") \n maxScore = max([p.confidence for p in self.Predictors]) \n print(\"max confidence: %f \" % (maxScore), end=\"\") \n print(\"chosen predictor: %s\" % (chosenPredictor.name))\n #input()\n\n \n rankConfidence = chosenPredictor.confidence\n return chosenPredictor, rankConfidence", "def next_choice(self, opponent: 'Player') -> str:\n\n if self.adaptive_ai:\n # this is an adaptive_ai player, so see if it has collected\n # enough stats about the current opponent yet:\n if sum(self.opponent_choices[opponent.name].values()) > 5:\n # has enough samples to start adapting to the opponent\n print(' {} is trying to guess the opponent\\'s choice...'.format(self.name))\n\n # AI algorithm 1:\n # simply find the most-frequent selection by the opponent and\n # choose its killer.\n\n guess = self.opponent_choices[opponent.name].most_common(1)[0][0]\n ai_choice = weapon_to_beat(guess)\n print(' ', opponent.name, 'most often chose', guess, 'so he/she chose', ai_choice)\n return ai_choice\n\n # use the standard tendency distribution to choose a weapon:\n n = randint(1, self.randmax)\n if n <= self.tendency[0]:\n return 'rock'\n elif n <= self.tendency[0] + self.tendency[1]:\n return 'paper'\n else:\n return 'scissors'", "def eval_strategy_range(make_strategy, lower_bound, upper_bound):\r\n best_value, best_win_rate = 0, 0\r\n value = lower_bound\r\n while value <= upper_bound:\r\n strategy = make_strategy(value)\r\n win_rate = compare_strategies(strategy)\r\n print('Win rate against the baseline using', value, 'value:', win_rate)\r\n if win_rate > best_win_rate:\r\n best_win_rate, best_value = win_rate, value\r\n value += 1\r\n return best_value", "def get_penalty(state, action, winrate_predictor):\n if violate_rule(state, action):\n return -1 \n return 0", "def get_winner(self):\n return self._winner", "def get_winner(self):\n return self._winner", "def _get_reward(self, player_score, opponent_score):\n return player_score - opponent_score", "def get_result(state, winrate_predictor):\n teamA_picks = state[:, TEAM_A_PICK_INDICES]\n teamB_picks = state[:, TEAM_B_PICK_INDICES]\n team_comp = torch.cat((teamA_picks, teamB_picks), dim=1)\n winrate = winrate_predictor(team_comp)[0, 0]\n \n if winrate >= 0.5:\n return 0\n return 1", "def win(self) -> int:\n earnings: int = self._bet\n\n # If it has a blackjack, the earnings increments 1.5 times\n for hand in self._hands:\n if hand.has_blackjack():\n earnings = round(earnings * 1.5)\n\n self._actual_money += earnings\n return earnings", "def final_value(player, board):\n diff = Othello.score(player, board)\n if diff < 0:\n return MIN_VALUE\n elif diff > 0:\n return MAX_VALUE\n return diff", "def getHighestRank_LowerWilson(self, higherBound = True):\n \n if len(self.Predictors) == 1:\n # there is only one predictor. choose that immediately\n predictor = self.Predictors[0]\n return (predictor, predictor.confidence)\n \n # grab the top 3 wins, top 3 wins-lost, top 3 confidences\n# maxWins = sorted(self.Predictors, key=lambda i: i.scoreWins)\n# maxDiff = sorted(self.Predictors, key=lambda i: i.scoreWins - i.scoreLosts)\n# maxConfidence = sorted(self.Predictors, key=lambda i: i.confidence)\n \n # grab the top predictors by wins, diffs and confidence.\n # on test, this has worse effect on ranking. (need more testing for confirmation)\n filteredPredictors = self.Predictors # no union\n\n # warning: set is non-deterministic\n #filteredPredictors = set(maxWins[:3]) | set(maxDiff[:3]) | set(maxConfidence[:3]) # union\n #filteredPredictors = set(maxWins[:5]) | set(maxDiff[:5]) | set(maxConfidence[:5]) # union\n #filteredPredictors = list(filteredPredictors)\n \n##############\n##todo: add treshold instead?\n#########\n \n predictorScores = []\n for i, predictor in enumerate(filteredPredictors):\n\n if useScoreBuffer == False:\n positiveRatings = predictor.scoreWins\n negativeRatings = predictor.scoreLosts\n totalRatings = predictor.totalTurns\n totalRatings = positiveRatings + negativeRatings\n else:\n positiveRatings = predictor.scoreBuffer.count(scoreBufferWin)\n negativeRatings = predictor.scoreBuffer.count(scoreBufferLost)\n totalRatings = len(predictor.scoreBuffer)\n totalRatings = positiveRatings + negativeRatings\n \n confidence = predictor.confidence\n \n # experiment: what happens if we use our score as confidence in self?\n \n# if confidence >= 1: # possible DNA\n# predictorScores.append((1.0, predictor))\n# continue\n \n if positiveRatings <= 0 or totalRatings <= 0:\n continue\n \n if 1:\n #confidence = 1 - confidence\n maxPredictionRating = 0.99 # possible DNA\n #maxPredictionRating = 1 # possible DNA\n \n if confidence > maxPredictionRating: confidence = maxPredictionRating\n if confidence < 0.0: confidence = 0.0\n\n ratings = rps.binconf(positiveRatings, negativeRatings, confidence)\n #ratings = binconf(positiveRatings, negativeRatings, confidence)\n \n if higherBound:\n rating = float(ratings[1])\n else:\n rating = float(ratings[0])\n \n #rating += (ratings[1] - ratings[0]) / 2\n \n if math.isnan(rating): rating = 0\n \n rating = round(rating,3) # fix for conversion from C float to Python float \n else:\n maxPredictionRating = 0.99 # possible DNA\n #maxPredictionRating = 1 # possible DNA\n if confidence > maxPredictionRating: confidence = maxPredictionRating\n if confidence < 0.0: confidence = 0.0\n \n #z = 1.96 # hardcorded for confidence=95%\n #z = 1.0 # 1.44=85% 1.96=95%\n p = 1 - 0.5 * (1 - confidence)\n z = cached_normcdfi(p)\n #z = rps.normcdfi(p)\n \n phat = float(positiveRatings) / totalRatings\n n = totalRatings\n \n rating = (phat + z*z/(2*n) - z * math.sqrt((phat*(1-phat)+z*z/(4*n))/n))/(1+z*z/n)\n \n #rating = round(rating, 3) # round to the nearest 3 decimals. experiment\n \n predictor.rankingConfidence = rating\n predictorScores.append((rating, predictor))\n\n if len(predictorScores) > 1:\n # filter out predictors that does not tie with the maximum rating, for optimization purposes\n maxRating = max(predictorScores, key=lambda i: i[0])[0]\n p = [p for p in predictorScores if p[0] == maxRating]\n\n if predictorScores[0] != maxRating:\n assert(\"Something is wrong. We filtered out predictions that is not the maximum but we got some here\") \n \n predictorScores = p\n elif len(predictorScores) == 1:\n rating, chosenPredictor = predictorScores[0]\n return chosenPredictor, rating\n else:\n random = rps.random() % len(filteredPredictors)\n chosenPredictor = filteredPredictors[random]\n rating = 0\n return chosenPredictor, rating\n \n # there are multiple predictors with the same rating.\n # let's choose the one with the biggest score (positive - negative)\n if useScoreBuffer == False:\n highestScorers = max(predictorScores, key=lambda i: i[1].scoreWins)\n else:\n highestScorers = max(predictorScores, key=lambda i: i[1].scoreBuffer.count(scoreBufferWin))\n predictorScores = [p for p in predictorScores if p[0] == highestScorers[0]]\n\n # tally the moves and choose the move with the most tally\n \n tally = [0, 0, 0]\n for p in predictorScores:\n # tally[p[1].moveLastTurn] += 1\n if p[1].moveLastTurn == 0: tally[0] += 1\n if p[1].moveLastTurn == 1: tally[1] += 1\n if p[1].moveLastTurn == 2: tally[2] += 1\n \n # let's choose a move at random between them \n # Filter predictorScores to only include the predictors with the maximum tally.\n maxTally = max(tally)\n talliedScorers = []\n if tally[0] == maxTally: \n rocks = [talliedScorers.append(p) for p in predictorScores if p[1].moveLastTurn == 0]\n if tally[1] == maxTally: \n papers = [talliedScorers.append(p) for p in predictorScores if p[1].moveLastTurn == 1]\n if tally[2] == maxTally: \n scissors = [talliedScorers.append(p) for p in predictorScores if p[1].moveLastTurn == 2] \n \n if len(talliedScorers) == 1:\n # in practice, this doesn't happen, but we put in this option to try to minimize bugs\n rating, chosenPredictor = talliedScorers[0]\n else:\n # play the move with the highest score\n finalChoice = None\n \n if tally[0] and tally[0] > tally[1] and tally[0] > tally[2]:\n Rmoves = [p for p in talliedScorers if p[1].moveLastTurn == 0]\n finalChoice = Rmoves[0]\n elif tally[1] and tally[1] > tally[0] and tally[1] > tally[2]:\n Pmoves = [p for p in talliedScorers if p[1].moveLastTurn == 1]\n finalChoice = Pmoves[0]\n elif tally[2] and tally[2] > tally[0] and tally[2] > tally[1]:\n Smoves = [p for p in talliedScorers if p[1].moveLastTurn == 2]\n finalChoice = Smoves[0]\n else: \n # there are still ties so we choose at random\n random = rps.random() % len(talliedScorers)\n finalChoice = talliedScorers[random]\n \n chosenPredictor = finalChoice[1]\n rating = finalChoice[0] \n \n if Debug:\n currentTurn = rps.getTurn()\n print(\"currentTurn\", currentTurn)\n for p in talliedScorers:\n print (\"%s (%i) Wilson Rating: %.3f. Confidence: %.3f Score +%i/-%i\" % (p[1].name, p[1].moveLastTurn, p[0], p[1].confidence, p[1].scoreWins, p[1].scoreLosts))\n \n input() \n\n return chosenPredictor, rating", "def _best_individual(self):\n return max(self._population, key=attrgetter(\"fitness\"))", "def _estimate_strength_from_results(\n number_of_games: int, number_of_wins: int, opponent_rating: float\n) -> Tuple[float, Tuple[float, float]]:\n n, p = number_of_games, number_of_wins / number_of_games\n q = 1 - p\n\n if n * p * q < 9: # Cannot apply normal approximation of binomial distribution\n raise ValueError(\n \"The results obtained in evaluate_player are too extreme to obtain an \"\n \"accurate player evaluation. You can try to solve this issue by increasing\"\n \" the total number of battles. Obtained results: %d victories out of %d\"\n \" games.\" % (p * n, n)\n )\n\n estimate = opponent_rating * p / q\n error = (\n math.sqrt(n * p * q) / n * 1.96\n ) # 95% confidence interval for normal distribution\n\n lower_bound = max(0, p - error)\n lower_bound = opponent_rating * lower_bound / (1 - lower_bound)\n\n higher_bound = min(1, p + error)\n\n if higher_bound == 1:\n higher_bound = math.inf\n else:\n higher_bound = opponent_rating * higher_bound / (1 - higher_bound)\n\n return estimate, (lower_bound, higher_bound)", "def get_best_move(self, curr_player, orig_player, depth=0, alpha=-100, beta=100):\n\n best_move, best_score = None, 0\n is_over, winner = self.is_over()\n\n current_board_state = self.get_board_state()\n current_board_state_best_move_score = self.state_best_move_map.get(current_board_state, {}).get(curr_player)\n\n if current_board_state_best_move_score:\n return current_board_state_best_move_score\n\n # If game is over return the score\n if is_over:\n self.LEAF_COUNT += 1\n if winner:\n best_score = (10 - depth) * (1 if winner == orig_player else -1)\n return best_move, best_score\n\n opponent = self.human if curr_player == self.comp else self.comp\n\n # Else find the move with best score.\n for move in list(self.available_moves):\n\n self.play_move(move, curr_player)\n _, score = self.get_best_move(opponent, orig_player, depth=depth+1, alpha=alpha, beta=beta)\n\n # If move is for orig-player, max score move will be best move.\n if best_move is None or (curr_player == orig_player and score > best_score):\n best_move, best_score = move, score\n alpha = max(alpha, best_score)\n # If move is not for orig-player, min score move will be best move.\n elif best_move is None or (curr_player != orig_player and score < best_score):\n best_move, best_score = move, score\n beta = min(beta, best_score)\n\n self.undo_move(move)\n\n # If the maximum harm (-ve score) the opponent can do is less than the minimum benefit (+ve score) the\n # current player has, no need to check other moves as the opponent can not do better.\n if beta <= alpha:\n break\n\n self.state_best_move_map[current_board_state] = self.state_best_move_map.get(current_board_state) or {}\n self.state_best_move_map[current_board_state][curr_player] = best_move, best_score\n return best_move, best_score", "def best_reward(self, observation, sess, weighted=False):\n if weighted:\n return self.weighted_choice(observation, sess)[1]\n else:\n return self.best_choice(observation, sess)[1]", "def host_result_value(winner: Winner) -> float:\n if winner == Winner.HOME:\n return 1\n if winner == Winner.AWAY:\n return 0\n return 0.5", "def strategy(hand, num_die_sides):\r\n \r\n best_value = 0.0\r\n best_hold = ()\r\n \r\n possible_holds = gen_all_holds(hand)\r\n \r\n for hold in possible_holds:\r\n current_value = expected_value(hold, num_die_sides, len(hand) - len(hold))\r\n if current_value > best_value:\r\n best_value = current_value\r\n best_hold = hold\r\n \r\n return (best_value, best_hold)", "def optimal_max(board):\n # Board full?\n if terminal(board):\n return [None, utility(board)]\n\n available_actions = list(actions(board))\n\n # Naive baseline comparison is negative infinity\n global_optimum = [None, -math.inf]\n\n # For each move, what would opponent do next? Update best move.\n for action in available_actions:\n # Anticipates optimal adversarial moves\n local_optimum = optimal_min(result(board, action))\n\n # Compares local vs global optima\n if global_optimum[1] <= local_optimum[1]:\n global_optimum = [action, local_optimum[1]]\n\n return global_optimum", "def getReward(self):\n# def evaluateFitness(self):\n fitness = 0.0\n distance = self.env.getDistance()\n speed = self.env.getSpeed()\n theta = self.env.getOrientation()\n\n ## implementation 101\n timeBonus = (self.maxTime - self.t)/self.maxTime\n alpha = 1.0/((1+distance)*(1+fabs(theta))*(speed+1));\n if distance < 0.5*self.env.init_distance :\n if(distance < self.env.vicinity_distance and\n abs(theta) < self.env.vicinity_orientation and\n speed < self.env.vicinity_speed ):\n fitness = 1 + timeBonus; \n else:\n fitness = alpha;\n else: fitness = 0\n self.lastFitness = fitness\n if fitness > self.bestFitness : \n self.bestFitness = fitness \n\n return fitness", "def determine_round_winner(self):\n\n if self.getX() + self.SIZE[0] < 0:\n # point for player two\n return 2\n elif self.getX() > Configuration.windowWidth:\n # point for player one\n return 1", "def _get_reward(self, action):\n HIRE_COST = 1 # TODO 7/29/20 - Determine significance of this value\n\n # Lookup the state representation using the cur_state index. Then we\n # can get the candidate productivity score.\n obs = self.observation_function[self.cur_state]\n prod_score = obs[1]\n r = action*(prod_score - HIRE_COST)\n return r", "def improved_score(game, player):\n if game.is_loser(player):\n return float(\"-inf\")\n\n if game.is_winner(player):\n return float(\"inf\")\n\n own_moves = len(game.get_legal_moves(player))\n opp_moves = len(game.get_legal_moves(game.get_opponent(player)))\n return float(own_moves - opp_moves)", "def __get_next_greedy_move(self, game_state): \n best_move = None\n best_score = None\n for free_seat in self.__get_free_seats(game_state):\n next_game_state_score = self.__get_score(game_state, free_seat)\n if best_score is None:\n best_score = next_game_state_score\n best_move = free_seat\n continue\n if next_game_state_score > best_score:\n best_score = next_game_state_score\n best_move = free_seat\n return best_move", "def evaluate(state):\r\n if wins(state, COMP):\r\n score = +1\r\n elif wins(state, HUMAN):\r\n score = -1\r\n else:\r\n score = 0\r\n\r\n return score", "def get_winner(self):\n return AIError(\"Must be implemented in child class!\")", "def showWorstBetUse(self) :\n worstBetUse = self.level_history[0].bet\n for level in self.level_history :\n worstBetUse = level.bet if worstBetUse > level.bet else worstBetUse\n Scenario.messageGetWorstBetUse(worstBetUse)", "def best_move(self) -> tuple:\n if self.root_state.winner != GameMeta.PLAYERS['none']:\n return GameMeta.GAME_OVER\n\n # choose the move of the most simulated node breaking ties randomly\n max_value = max(self.root.children.values(), key=lambda n: n.N).N\n max_nodes = [n for n in self.root.children.values() if n.N == max_value]\n bestchild = choice(max_nodes)\n return bestchild.move", "def value(self):\n #import pdb; pdb.set_trace()\n return ((self.team1.get_cur_hp() / self.team1.get_total_hp()) - \n (self.team2.get_cur_hp() / self.team2.get_total_hp()))", "def get_winner(state):\n\n if", "def propagate(self, game_state, maximizing, max_hitherto, depth, max_depth):\n \n self.moves += 1\n \n\n if depth >= max_depth:\n rating = self.rate_game_state(game_state)\n return (rating, None, None)\n \n else:\n \n color = self.color if maximizing else (1 if self.color==0 else 0)\n\n self.timer.start(\"\")\n choices = []\n for line in self.get_all_lines(game_state, color):\n for vector in LEGAL_VECTORS:\n\n new_game_state = self.get_new_game_state(game_state, line, vector, color)\n if new_game_state is not None:\n choices.append( (new_game_state, self.rate_game_state(new_game_state), line, vector) )\n\n \n\n choices = sorted(choices, key=lambda x: x[1], reverse=maximizing)\n \n final_choice = choices[0][1:]\n del choices[0]\n\n for choice in choices:\n\n new_game_state, estimated_rating, line, vector = choice\n actual_rating, _, _ = self.propagate(new_game_state, not maximizing, final_choice[0], depth+1, max_depth)\n \n if maximizing:\n if final_choice[0] < actual_rating:\n final_choice = (actual_rating, line, vector)\n\n else: #minimizing\n if (max_hitherto is not None) and (actual_rating < max_hitherto): return (actual_rating, line, vector)\n\n if actual_rating < final_choice[0]:\n final_choice = (actual_rating, line, vector)\n\n return final_choice", "def max_value(self, game, depth, alpha, beta):\n if self.time_left() < self.TIMER_THRESHOLD: # Timeout check\n raise SearchTimeout()\n\n if game.is_loser(self) or game.is_winner(self) or depth == 0: # Terminal test, checks base cases\n return self.score(game,self) # returns the score, UTILITY of the current state\n\n legal_moves = game.get_legal_moves() # obtain all the available moves on the board\n best_score = -math.inf # abstraction assignment of neg. infinity\n\n for m in legal_moves: # iterate through available moves - ACTIONS available to the state\n new_state = game.forecast_move(m)\n # for each move - ACTION, create the outcome of that move - RESULT of each ACTION resulting in a new state\n score = self.min_value(new_state, depth - 1, alpha, beta) # recursive call to min - using new state, alpha and beta\n best_score = max(best_score, score) # calculate max between best_score and score\n if best_score >= beta: # check if best score is greater than or equal to beta\n return best_score # return best score\n alpha = max(alpha, best_score) # calculate max between alpha and best_score\n return best_score # propagate max and return its value", "def max_diffs(state):\n # your code here\n return best_action(state, pig_actions, Q_pig, win_diff)", "def calculate_utility(state, player):\n thisPlayer = player\n \n if state.winner() == (not thisPlayer):\n return -BigInitialValue\n if state.winner() == thisPlayer:\n return BigInitialValue\n return calculate_possible_fours(state, thisPlayer) - calculate_possible_fours(state, not thisPlayer)", "def custom_score_5(game, player):\n \"\"\"custom_score_5 heuristic function defines chances heuristics\n \"\"\"\n if game.is_loser(player):\n return float(\"-inf\")\n\n if game.is_winner(player):\n return float(\"inf\")\n\n length_my_player_moves = len(game.get_legal_moves(player)) #Calculate length of available moves for myPlayer\n length_opp_payer_moves = len(game.get_legal_moves(game.get_opponent(player)))#Calculate length of available moves for the oppositePlayer\n return float(length_my_player_moves*length_my_player_moves - length_opp_payer_moves*length_opp_payer_moves)", "def chooseAction(self, gameState):\n probabilities = self.assignProbablities(gameState)\n #print probabilities\n prob, bestProbabilityAction = max(probabilities)\n return bestProbabilityAction", "def utility(self, state):\n \n if state.cachedWin and state.cachedWinner:\n return 10000\n elif state.cachedWin and not state.cachedWinner:\n return -10000\n else:\n return 0", "def get_winning_votes(self):\n try:\n votes = self.get_winner().votes\n except ValueError:\n votes = -1\n return votes", "def get_winner(self) -> None:\n if not self.get_game_ending_hands():\n if max(self.user.hand.value) > max(self.house.hand.value): # Values above 21 are omitted\n self.event_player_wins()\n elif max(self.user.hand.value) == max(self.house.hand.value):\n self.event_player_push()\n else:\n self.event_house_wins()" ]
[ "0.7338502", "0.7059583", "0.7059583", "0.7059583", "0.7020939", "0.6829376", "0.6702647", "0.6694379", "0.66560304", "0.6624065", "0.6616302", "0.6588654", "0.6566183", "0.6555869", "0.6547012", "0.6531186", "0.6518109", "0.6501664", "0.6488798", "0.6458663", "0.6427033", "0.64219874", "0.6408247", "0.6403664", "0.6400599", "0.63933843", "0.6387703", "0.6387276", "0.6378572", "0.6365341", "0.63526493", "0.63522595", "0.6320776", "0.63045466", "0.63044447", "0.62970275", "0.62770706", "0.62698764", "0.6265379", "0.6259836", "0.62554413", "0.62536514", "0.62492454", "0.62428117", "0.62145287", "0.62128955", "0.62047863", "0.6200659", "0.61960906", "0.6195706", "0.61770946", "0.61745733", "0.61745656", "0.61732703", "0.6169824", "0.61681336", "0.61500615", "0.61478907", "0.61459243", "0.61403286", "0.6137025", "0.6128683", "0.6119463", "0.6119103", "0.61137277", "0.6109533", "0.610441", "0.610441", "0.61017287", "0.6097846", "0.6095148", "0.6090347", "0.608117", "0.6074002", "0.607357", "0.60706824", "0.6066748", "0.60424846", "0.60401636", "0.60395163", "0.6032667", "0.60324055", "0.6031868", "0.60304254", "0.6024928", "0.6024075", "0.6022076", "0.60133594", "0.6011935", "0.6004465", "0.6004015", "0.5996286", "0.5992162", "0.59888303", "0.5983072", "0.59799534", "0.5979876", "0.59766966", "0.597524", "0.5972028" ]
0.73625016
0
Initialize this Game, using p1_starts to find who the first player is.
def __init__(self, p1_starts: bool) -> None: side_length = int(input("Enter the side length of the board: ")) self.current_state = StonehengeState(p1_starts, side_length)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def start(self):\n self.__init__()\n self.set_n_players()\n self.init_players()\n self.init_territory_selection_phase()\n self.init_troop_deployment_phase()\n # self.game_phase()", "def initGame(self):\n self.map = {}\n self.blocks = Group()\n self.Coins =Group()\n self.players = Group()\n self.player1 = Player(1525,75,2)\n self.players.add(self.player1)\n if self.playernum == 2:\n self.player2 = Player(75,825,1)\n self.players.add(self.player2)\n else:\n self.player2 = False", "def bcp_game_start(self, **kargs):\n self.bcp_player_add(number=1)\n self.bcp_player_turn_start(player=1)\n self.events.post('game_started', **kargs)", "def startGame(self):\n\n\t\tfor name in self.players.keys():\n\t\t\tself.startPlayerGame((name, 0))\n\t\tself.setupGuiSignals()", "def start_of_game(self):\n pass", "def set_first_player(self):\n if self.player2.won_previous:\n self.current_player = self.player2\n else: self.current_player = self.player1", "def start(self):\n self.player = Player()\n self.dealer = Dealer()\n self.pot = 0\n self.side_bet = 0\n start_game()", "def __init__(self):\n self.__grid = create_grid(\n Settings.SIZE_X, Settings.SIZE_Y, MarkerType.NONE)\n\n self.__turn = 0\n self.__state = GameState.PLAYING\n self.__winner = MarkerType.NONE\n self.__loser = MarkerType.NONE\n\n # Separate counter for turns, because __turn depends on starting player\n self.__turns_played = 0", "def init_game(self):\n self.view.carregar_jogadores_possiveis(self._possible_players_list())\n self.view.put_view_in_main_loop()", "def start_game(self):\n self.code = code.get_random_num()\n self.Player1 = self.get_player(1)\n self.Player2 = self.get_player(2)\n attempt = self.Player1.make_guess()\n guess.guess_lists(attempt, self.code)\n right_answer_list = guess.return_answer()\n num_guessed_list = guess.return_player_guess()\n check.check(num_guessed_list, right_answer_list)\n attempt = self.Player2.make_guess()\n guess.guess_lists(attempt, self.code)\n right_answer_list = guess.return_answer()\n num_guessed_list = guess.return_player_guess()\n output = check.check(num_guessed_list, right_answer_list)\n play = end_game.end_game(output)\n if play == True:\n self.keep_playing()", "def start(self):\n self.save_checkpoint(\"setup\")\n\n logging.info(\"Starting game...\")\n body = render_message(\n \"welcome.html\",\n game_name=self.name,\n night_end=self.night_end.strftime(\"%I:%M %p\"),\n day_end=self.day_end.strftime(\"%I:%M %p\"),\n players=self.game.players,\n )\n self.send_message(mafia.events.PUBLIC, \"%s: Start\" % self.name, body)\n self.game.begin()\n self.started = True\n\n self.save_checkpoint(\"start\")", "def start_game(self):\n\n\t\tpass", "def __init__(self):\n self.played_pos = []\n self.grid = [['-', '-', '-'],\n ['-', '-', '-'],\n ['-', '-', '-']]\n self.player_played_pos = {'p1': set(), 'p2': set()}", "def init(self, start):\r\n\t\tself.start = start\r\n\t\tself.time = 0\r\n\t\tself.t = []\r\n\t\tself.ch = []", "def initializeGame(self):\n # Fill deck with cards and shuffle it\n self.deck.fill(104)\n self.deck.shuffle()\n #print \"Deck initialized\"\n\n # Initialize the field\n self.field.initialize(self.deck.draw(4))\n self.field.sortField()\n #self.field.printField()\n\n # Set players to initial state again\n # Distribute cards and set bulls to 0\n for p in self.players:\n p.bulls = 0\n p.setHand(self.deck.draw(10))", "def start_game(self) -> None:\n self.init_game()\n self.play()", "def __init__(self, player1AI = False, player2AI = False):\n\t\tself.tick = 0\n\t\tself.player1AI = player1AI\n\t\tself.player2AI = player2AI\n\t\tself.selectionIndex = [0, 0]\n\t\tself.colours = [\"#ff6363\", \"#ffc163\", \"#88de68\", \"#63c6ff\", \"#ffffff\", \"#000000\"]\n\t\tself.playerReady = [False, False]\n\t\tself.playerKeys = {0: [\"W\", \"S\", \"SPACE\"], 1: [\"⭡\", \"⭣\", \"ENTER\"]}\n\t\tself.timeSinceReady = 0\n\t\tself.headings = [\n\t\t\t\"Player 1:\" if not self.player1AI else \"Computer:\",\n\t\t\t\"Player 2:\" if not self.player2AI else \"Computer:\"\n\t\t]\n\t\tself.itemSpacing = 0.15", "def __init__(self, players):\n\n self._players = players\n self._game = None", "def __init__(self, player1, player2, state_machine, restore = False):\r\n super().__init__()\r\n self.__players[0] = player1\r\n self.__players[1] = player2\r\n self.__player_names[player1] = 'Human'\r\n self.__player_names[player2] = 'Bot'\r\n self.__state_machine = state_machine", "def _init_game(self):\n state, player_id = self.game.init_game()\n if self.record_action:\n self.action_recorder = []\n return self._extract_state(state), player_id", "def _prepare_first_step(self):\n if self.townhalls:\n self._game_info.player_start_location = self.townhalls.first.position\n self._game_info.map_ramps, self._game_info.vision_blockers = self._game_info._find_ramps_and_vision_blockers()", "def __init__(self, players=None):\n self.game = Game()\n if players:\n self.player1 = players[0]\n self.player2 = players[1]\n else:\n self.player1 = Player('X')\n self.player2 = Player('O')\n self.record = Record()\n self.winning_moves = []", "def start_game(self, **kwargs):\n\n success, info = self.gms.start_game(\n player=kwargs.get('player', 'x'),\n first_turn=raw_input('Would you like to go first? y/n\\n') == 'y'\n )\n if success:\n if info['status_code'] == core_constants.GAME_STATUS_HUMAN_MOVE_REQUIRED:\n print(self.gms.game.get_board_state_pretty())\n self.play_human_move()\n else:\n print(info['messages'][0])", "def __init__(self, player1, player2):\n self.players = [player1, player2]\n self.tokens = {\n ' ': ' ',\n player1: 'X',\n player2: 'O',\n }\n self.score = {\n player1: 0,\n player2: 0,\n }\n self.moves = None\n self.winner = None\n self.turn = ''\n self.reset()", "def start(self):\n for game_object in self.game_objects:\n game_object.start()\n # end for\n self.time = time.time()\n self.paused = False\n self.running = True\n print 'GAME STARTED'", "def start(self):\n\n # Call the protected _turn method to start the game\n self._turn()", "def initialize(self):\n self._start = time.time()", "def __init__(self, players):\n\n # Define the players\n self.players = players\n\n # Define who starts the game\n self.nplayer = 1 \n\n # Define the board\n self.board = [0] * 9", "def start_new_game(self):\r\n\r\n self.initialize_game_params()\r\n self.timer = Timer(self.screen)\r\n self.mine_counter = MineCounter(self.num_of_mines, self.screen)\r\n self.reset_button = ResetButton(self.screen)\r\n self.high_score = HighScore(self.rows, self.cols, self.num_of_mines, self.screen)\r\n self.board = Board(self.rows, self.cols, self.num_of_mines, self.screen)\r\n self.play_game()", "def __init__(self, player_1):\n self.die = Die()\n self.player_1 = player_1\n self.current_player = self.player_1\n self.turns = 1", "def initialize(self):\n\n # --------- BEGIN YOUR CODE ----------\n\n # This is exactly the same as Human.initialize, just copy the code over\n\n # --------- END YOUR CODE ----------\n pass", "def initialize(self):\r\n if self.method == \"naive\":\r\n self.teams = TeamsNaive(\r\n self.data_path, self.season_to_play, self.season_data\r\n )\r\n self.gsim = GameNaive(False)\r\n if not self.playoffs_only:\r\n self.season = Season(self.season_calendar, self.teams_info, self.gsim)", "def spawn_players(self) -> None:\n # Initialise the players\n self.player1 = Player(self.sensitivity, self.screen_width, self.screen_height, self.screen_width // 2, 50,\n self.player_lives, self.fps, self.player1_bullet, Direction.DOWN, self.debug)\n self.player2 = Player(self.sensitivity, self.screen_width, self.screen_height, self.screen_width // 2,\n self.screen_height - 50, self.player_lives, self.fps, self.player2_bullet, Direction.UP,\n self.debug)\n\n # Rotate the image of the player at the top\n self.player1.rotate(180)", "def __init__(self,player1: Player = ManualPlayer(\"P1\"),\\\r\n player2: Player = ManualPlayer(\"P2\")):\r\n\r\n self.board = np.zeros((BOARD_SIZE,BOARD_SIZE)\\\r\n ,dtype=np.int8)\r\n self.board[3,3] = '2'\r\n self.board[4,4] = '2'\r\n self.board[3,4] = '1'\r\n self.board[4,3] = '1' \r\n\r\n self.players = []\r\n self.players.append(player1)\r\n self.players.append(player2)\r\n self.turn = 1\r\n self.count = 0", "def initialize(self):\n self.currState = self.startState", "def _initialize_player_stats(self):\r\n self.reset()\r\n for x in range(self.num_players):\r\n self.player_draws[f'{x}']=[]\r\n self.player_points[f'{x}']= 0", "def __init__(self, noPlayers, noLaps):\n\t\tself.noPlayers = noPlayers\n\t\tself.noLaps = noLaps\n\t\tself.players = []\n\t\tself.laps = dict()\n\t\tfor pNo in range(0, noPlayers-1):\n\t\t\tplayer = Player()\n\t\t\tself.players.append(player)\n\t\t\tself.laps[id(player)] = 0", "def setup_new_game(self):\r\n self._player = Player()\r\n self._stats = GameStats(self._bb_settings)\r\n self._scoreboard = Scoreboard(self._bb_settings, self._screen)", "def start(self, timed):\n\n if timed:\n self._game = TimedGame(self._players)\n else:\n self._game = Game(self._players)\n \n self._game.start()", "def __init__(self, player_raw: dict, time_per_player_game: int,\n start_side: int, board_size: int, n_players: int):\n self.is_ai = player_raw[\"is_ai\"]\n self.id_ = player_raw[\"id_\"]\n self.name = player_raw[\"name\"]\n self.executable = player_raw[\"executable\"]\n self.color = player_raw[\"color\"]\n self.time_per_game = time_per_player_game\n self.start_side = start_side\n self.board_size = board_size\n self.n_players = n_players\n self.coordinates = self.get_starting_coordinates(\n board_size, start_side)\n\n self.goals: List[Coordinates] = self.get_goal_states(\n self.coordinates, self.board_size)\n\n if self.is_ai and os.path.isfile(self.executable):\n self.proc: Optional[Popen] = self.start_ai(\n self.executable,\n self.n_players,\n self.board_size,\n self.time_per_game,\n self.start_side,\n )", "def start(self):\n self._state = STATE_INACTIVE\n self._game = None\n self._last_key_press = False\n self._last_n_press = False\n self._last_lose_life = False\n self._mssg = (GLabel(text=START_MSSG, x=GAME_WIDTH/2, y=GAME_HEIGHT/2, font_size=24))\n self.time = None\n self._points_mssg = None\n self._falling_points = []\n self._FP_mssg = None", "def initialize(self):\n self._startTime = time.time()", "async def start(self):\n await self.on_start()\n valid = await self.get_participants()\n\n if valid:\n await asyncio.sleep(1)\n await self.prepare()\n await self.game()\n\n del started[started.index(self.channel.id)]", "def __init__(self, min_player_count):\n self.min_player_count = min_player_count", "def __initPicks(self):\n picks = self.teamparser.getPlayerPicks()\n try:\n self.picks = picks[(self.team, self.position)] #players picks\n except KeyError, err:\n self.picks = []\n raise TypeError, \"Invalid Team/Position: \" + self.team", "def __init__(self):\n\n self.frameCount = 0\n self._initScreen()\n self._initObjects()\n self._initControls()\n self._initLevel()\n self._start()\n print \"DEBUG: Initializing Game\"\n pass", "def start_play(self, player1, player2, start_player=0, is_shown=1):\n if start_player not in (0, 1):\n raise Exception('start_player should be either 0 (player1 first) '\n 'or 1 (player2 first)')\n self.board.init_board(start_player)\n p1, p2 = self.board.players\n player1.set_player_ind(p1)\n player2.set_player_ind(p2)\n players = {p1: player1, p2: player2}\n if is_shown:\n self.graphic(self.board, player1.player, player2.player)\n while True:\n move = -1\n current_player = self.board.current_player\n player_in_turn = players[current_player]\n if len(self.board.availables) != 0:\n move = player_in_turn.get_action(self.board)\n self.board.do_move(move)\n if is_shown:\n self.graphic(self.board, player1.player, player2.player)\n end, winner = self.board.game_end()\n if end:\n if is_shown:\n if winner != -1:\n print(\"Game end. Winner is\", players[winner])\n else:\n print(\"Game end. Tie\")\n return winner", "def __init__(self):\n self.games = {} # Dict from gameIDs to game objects. Initially empty.\n self.players = {} # Dict from playerID to player name\n self._version = __version__ # Used in version check during un-pickling\n\n # Log initialization\n TournamentSystem._logger.debug(\"Initialized\")", "def start_21game(self):\n self.is_game_start = True\n self.already_has_a_winner = False\n self.player_point = {}\n self.generate_21game_number()\n self.boardcast(self.game_msg)", "def bcp_player_turn_start(self, player, **kwargs):\n\n if ((self.player and self.player.number != player) or\n not self.player):\n\n self.player = self.player_list[int(player)-1]", "def __init__(self, players, piles=None):\n self.players = players\n self.piles = piles if (piles != None) else [5, 5, 5, 5]\n self.nplayer = 1 # player 1 starts.", "def __init__(self, is_p1_turn, current_total):\n self.p1_turn = is_p1_turn\n self.current_total = current_total", "def __init__(self, start_time=None):\n if start_time is None:\n self.started = time.time()\n else:\n self.started = start_time", "def _prepare_start(self, client, player_id, game_info, game_data, realtime: bool = False):\n self._client: Client = client\n self.player_id: int = player_id\n self._game_info: GameInfo = game_info\n self._game_data: GameData = game_data\n self.realtime: bool = realtime", "def setUp(self):\n d = self.deck = TestDeck()\n self.game = test_setup.two_player_lead('Laborer', deck=d)\n self.p1, self.p2 = self.game.players", "def start_game(self) -> None:\n if self.started and not self.finished:\n self.finish_game()\n \n self.started = True\n self.finished = False\n\n self.game_count += 1\n self.games_list[self.game_index] = {\n \"total_kills\": 0,\n \"players\": [],\n \"kills\": {}\n }\n\n return", "def game_start(self):\r\n\t\tself._comm_server.broadcast_message(\"game-start\")\r\n\t\tself._is_game_started = True\r\n\t\tself._handlers[\"game-start\"].invoke()\r\n\t\t_logger.info(\"Game is started.\")", "def __init__(self):\n\n super().__init__()\n self.setup_janggi_game()\n self._game_state = 'UNFINISHED'\n self._player_turn = 'BLUE'", "def spawn_players(self) -> None:\n #Create the player\n self.player1 = Player(self.sensitivity, self.screen_width, self.screen_height, self.screen_width//(3/2), self.screen_height-50, self.player_lives, self.fps, self.player1_bullet, Direction.UP, self.debug)\n\n #Create the AI\n self.player2 = AIPlayer(self.sensitivity, self.screen_width, self.screen_height, self.screen_width//3, self.screen_height-50, self.player_lives, self.fps, self.player2_bullet, Direction.UP, 1, True, self.debug)", "def game_start():\n herolist = Hero_List(hots_db)\n heroclasses = []\n for item in herolist:\n heroclasses.append(Item(item, 'hero'))\n curgame = Game(Team('home'), Team('enemy'), Team('hero_pool', heroclasses), '')\n return curgame", "def set_plays(self) -> None:\n player1 = self._get_input('What is the name of player 1?')\n player2 = self._get_input('What is the name of player 2?')\n self.state = State(player1, player2)", "def __init__(self):\n self.players = {1: [\"Player_a\", \"\\u25CF\"], 2: [\"Player_b\", \"\\u25CB\"]}\n self.current_player = 1\n self.playing_player = self.players[1]\n self.grid = [[\" \"] * 6 for x in range(7)]", "def start_new_game(player1, player2):\n return {\n 'player1': \"X\",\n 'player2': \"O\",\n 'board': [\n [\"-\", \"-\", \"-\"],\n [\"-\", \"-\", \"-\"],\n [\"-\", \"-\", \"-\"],\n ],\n 'next_turn': \"X\",\n 'winner': None\n }", "def start(self):\n # Call the protected _turn method to start the game\n self._end_time = time.time() + 60\n self._turn()", "def game_initialise(self):\n super().game_initialise()\n #\n # It might be unnecessary that the terminate lock is a threading.Lock()\n # instance. An ordinary property might do just as well, because it only\n # gets set to True.\n self._terminateLock = threading.Lock()\n self._mainLock = threading.Lock()\n self._tickLock = threading.Lock()\n self._tickRaise = None\n self._skippedTicks = 0\n #\n # Reference time for when the game engine was started.\n self._gameInitialisePerf = time.perf_counter()\n self._tickPerf = 0.0", "def __init__(self, players):\n\n self._players = players\n self._current_player = players.get()", "def start(self, board, player1, player2):\n # Check to see who goes first\n if random.randint(0, 1) == 0:\n self.printGoFirst(player1)\n\n while True:\n # Make a move and check if that move wins/draws the game\n board.makeMove(player1.getNextMove(board, self.letter1), self.letter1)\n board.printBoard()\n if board.isWinner(self.letter1):\n self.printWinner(player1)\n break\n if board.isFull():\n print \"The game is drawn\"\n break\n\n # Make a move and check if that move wins/draws the game\n board.makeMove(player2.getNextMove(board, self.letter2), self.letter2)\n board.printBoard()\n if board.isWinner(self.letter2):\n self.printWinner(player2)\n break\n if board.isFull():\n print \"The game is drawn\"\n break\n else:\n self.printGoFirst(player2)\n\n while True:\n # Make a move and check if that move wins/draws the game\n board.makeMove(player2.getNextMove(board, self.letter1), self.letter1)\n board.printBoard()\n if board.isWinner(self.letter1):\n self.printWinner(player2)\n break\n if board.isFull():\n print \"The game is drawn\"\n break\n\n # Make a move and check if that move wins/draws the game\n board.makeMove(player1.getNextMove(board, self.letter2), self.letter2)\n board.printBoard()\n if board.isWinner(self.letter2):\n self.printWinner(player1)\n break\n if board.isFull():\n print \"The game is drawn\"\n break", "def reset(self, starting_player):\n self.__turn = starting_player.value\n self.__turns_played = 0\n self.__winner = MarkerType.NONE\n self.__loser = MarkerType.NONE\n\n for y in range(Settings.SIZE_Y):\n for x in range(Settings.SIZE_X):\n self.__grid[y][x] = MarkerType.NONE\n\n self.__state = GameState.PLAYING", "def start(self):\n self.timeStart = pygame.time.get_ticks()", "def set_player_start_position(self):\n if self.field_size.x() == 0: return\n \n parts = len(self.player_list)\n y_list = []\n for p in range(1,parts+1):\n y_list.append(self.field_size.y()*p/(parts+1))\n\n for i,p in enumerate(self.player_list):\n p1 = Qt.QPoint(self.start_y,y_list[i])\n p2 = Qt.QPoint(self.start_y+self.start_length,y_list[i])\n p.set_start_position([p1,p2])\n p.status_remove = False\n p.override_direction(0)", "def init_new_game(self):\n self.game = get_new_game(self.game_config)", "def at_start(self):\n if not self.db.started:\n self.player.start()\n self.db.started = True", "async def on_start(self):\n m = \"**{}** has started a game of {}! To participate, say `I`! **{} players needed.**\".format(\n self.message.author.display_name, self.name, self.num)\n await client.say(self.message, m)", "def start(self):\n self.playing = True\n self.table = table.Table(self.players)", "def __init__(self):\n super().__init__(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)\n self.player_count: int = None\n self.player_hand_0: arcade.SpriteList = None\n self.player_hand_1: arcade.SpriteList = None\n self.deck: arcade.SpriteList = None\n self.pile: arcade.SpriteList = None", "def __init__(self):\r\n self.players = {}", "def initGameState(self):\n print(\"Setting game state: \")\n self.playGUI = GUI()\n self.playGUI.drawBoard(self.player)", "def __init__(self):\n self.deck = Deck()\n self.player1 = Player(INITIAL_CHIPS)\n self.player2 = Player(INITIAL_CHIPS)\n self.flop = []\n self.turn = None\n self.river = None\n\n self.this_player = self.player2\n self.other_player = self.player1 # 一局开始前会对换一次玩家\n\n self.last_action = None", "def __init__(self):\n # Current player\n self.player = X\n\n # Board\n self.board = [\n [None, None, None],\n [None, None, None],\n [None, None, None]\n ]\n\n # Winner\n self.winner = None\n\n # Game over\n self._gameover = False", "def __init__(self, players):\n\n # Instantiate a Players object with the players queue\n self._players = Players(players)\n # Instantiate the Die to be used for the current game\n self._die = Die()\n # Track the game status\n self._active_turn = True\n self._end_game = False", "def __init__(self):\r\n threading.Thread.__init__(self)\r\n self.wait_to_start = True\r\n self.id = 0\r\n self.players = None\r\n self.socket = None", "def __init__(self):\n self.numShipsPerPlayer = 0\n self.playerType = 1 # Whether P2 is a human (1) or AI (2-4 for difficulty)\n \n self.grid = Grid()\n self.shipDir = 0 # Direction of the ship currently being placed (index of c.DIRS)\n self.lenShip = 1 # Length of the ship to place next\n \n self.p1Ships = []\n self.p2Ships = []\n \n # Number of special shots each player has (gain one every 10 rounds)\n self.round = 0\n self.p1_special_shots = 0\n self.p2_special_shots = 0\n \n self.is_P1_turn = False\n self.is_placing = False\n self.is_shooting = False\n self.in_transition = False\n \n self.msg = \"\" # Message to display below game board", "def start_game(self):\n self._add_mines()", "def start(self):\n\t\tself._start = time.clock()\n\t\tif self._initial is None:\n\t\t\tself._initial = self._start\n\t\treturn self", "def initialize_game_params(self):\r\n\r\n self.is_new_game = True\r\n self.is_game_over = False\r\n self.is_game_lost = False\r\n self.is_left_mouse_down = False\r\n self.is_right_mouse_down = False\r\n self.num_of_hidden_non_mines_tiles = self.rows * self.cols - self.num_of_mines", "def start(self):\n \n if self.started:\n return\n \n self.clock_time = 0.\n self.virtual_time = 0.\n self.game_time = 0.\n self.game_frame_count = 0\n self.real_time_passed = 0.\n \n self.real_time = self.get_real_time()\n self.started = True\n \n self.fps = 0.0\n self.fps_sample_start_time = self.real_time\n self.fps_sample_count = 0", "def init_players(self):\n complain = \"\"\n players_turn = random.sample(range(self.n_players), self.n_players)\n players_created = {}\n picked_colors = []\n for x in range(self.n_players):\n while True:\n clear_output()\n try:\n color = input(\n f\"{complain}Player {x+1}, please type in one of the following colors: ({', '.join([x.capitalize() for x in self.world.player_colors if x not in picked_colors])}):\\n\").lower()\n if color in self.world.player_colors and color not in picked_colors:\n picked_colors.append(color)\n players_created[players_turn[x]] = Player(\n color.capitalize(), self.start_troops)\n break\n else:\n complain = \"Please enter a valid color\\n\"\n except:\n pass\n\n self.players = [players_created[y] for x in range(\n self.n_players) for y in players_created.keys() if int(y) == x]", "def init_players(self):\n self.spaceships.append(self.player)\n SoundManager.add_sfx(\n self.player.states['exploded']['sfx'],\n self.player\n )", "def initialise(self):\n # Can take quite a lot of time due to the homing\n print(\"Initialising spectrograph.\")\n err = self._dll.ShamrockInitialize()\n self.status(\"Initialisation\", err)", "def start_game(self):\n self.board = Board(num_tableaus=self.tableau_qty, num_decks=self.decks, deal_3=self.deal_3)\n self.board.init_move_dict()\n self.board.deal(self.deck)\n\n if self.api_use:\n self.init_game_api()\n elif self.commandline:\n self.init_cl_game()\n else:\n self.init_pygame()", "def start(self):\n running = True\n while running:\n k=self.Game.playgame()\n if k=='Exit':\n running = False\n continue\n elif k=='resume':\n continue\n elif k=='GameOver':\n o=self.gameover()\n if o=='newgame':\n self.Game=Game(self.Display)\n else:\n running = False\n while k=='Won':\n o=self.won()\n if o=='newgame':\n self.Game=Game(self.Display)\n break\n elif o==\"Exit\":\n output = self.Game.popup()\n if output == 'resume':\n self.Game.GameBoard.display()\n continue\n else:\n running = True\n break", "def prep_game1(self):\n game_str1 = str(self.stats.player1_game)\n self.game_image1 = self.font.render(game_str1, True, self.text_colour, self.settings.bg_colour)\n\n # Position the level to the bottom of the screen\n self.game_rect1 = self.game_image1.get_rect()\n width, height = self.game_rect1.size\n self.game_rect1.x = self.settings.screen_width/3\n self.game_rect1.y = self.settings.screen_height - height", "def maybe_start(self):\r\n\t\tif not [p for p in self.players if not p.ready]\\\r\n\t\t and len(self.players) == self.max_players \\\r\n\t\t and not self.started:\r\n\t\t\tself.start()", "def start(self, player: Player) -> Game:\n\n board_payload = dict(rows=self.rows, cols=self.cols)\n initial_slots = self._get_initial_slots(**board_payload)\n board_db = self.repo.boards.add(\n {**board_payload, \"slots\": initial_slots, \"mines\": 0}\n )\n board = Board.from_orm(board_db)\n\n board.set_mines(mines=self.mines)\n board_db = self.repo.boards.update(board_db, board)\n\n game_payload = dict(\n player_id=player.id,\n board_id=board.id,\n status=GameStatusEnum.ongoing,\n start_time=datetime.utcnow(),\n )\n game_db = self.repo.games.add(game_payload)\n game = Game.from_orm(game_db)\n return game", "def init_player():\n global active_track_idx\n global track_last_slided_pos\n global track_last_paused_pos\n global track_total_play_time \n\n # INITIALIZE Player\n active_track_idx = -1\n cancel_update_play_time_loop()\n cancel_track_end_event_loop()\n track_status.set(\"---\")\n track_title.set(\"--- : \")\n play_pause_btn.configure(image=play_img)\n track_last_slided_pos = 0\n track_last_paused_pos = 0\n track_total_play_time = 0\n track_pos_label.configure(text=\"00:00\")\n track_length_label.configure(text=\"00:00\")\n track_pos_slider.configure(state=\"disabled\")\n track_pos.set(0)", "def __init__(self):\n self.name = None\n self.time_stamp = time()\n self.last_win = None\n self.wins = 0", "def __init__(self, initial=[1, 3, 5, 7]):\n self.piles = initial.copy()\n self.player = 0\n self.winner = None", "def __init__(self, initial=[1, 3, 5, 7]):\n self.piles = initial.copy()\n self.player = 0\n self.winner = None", "def initial_move(self):\n\n # Make the first move based on the game we\n # are currently playing, otherwise return\n if isinstance(self.get_game_space(), Gomoku):\n\n # play one stone in the bottom left-hand corner\n self.get_game_space().set_tile(0,6,self.get_affinity())\n\n # the agents are now in play \n self.set_play_status(True)\n self.get_opponent().set_play_status(True)\n\n else:\n print('Unknown game. Returning')\n return None", "def startup(self):\n self.prev_gray = None\n self.frame_idx = 1\n self.tracks = []\n self.fps = []\n self.vid_info = None\n self.track_new_points_count = 0" ]
[ "0.6787327", "0.64845306", "0.64357686", "0.6349507", "0.62722546", "0.626561", "0.6244119", "0.61926365", "0.61749655", "0.61128175", "0.6096133", "0.6081823", "0.606986", "0.6065711", "0.6050611", "0.5989051", "0.59888893", "0.59368724", "0.592886", "0.5927789", "0.59158814", "0.5914303", "0.5874678", "0.5868876", "0.5853849", "0.5851166", "0.5850582", "0.5842807", "0.5837797", "0.5831502", "0.5817212", "0.58074933", "0.57977486", "0.57952875", "0.57936114", "0.5789931", "0.57886076", "0.5786284", "0.5780355", "0.5778418", "0.57759595", "0.5770308", "0.5769199", "0.57684404", "0.5754577", "0.57509786", "0.5743635", "0.57414883", "0.5737975", "0.5729662", "0.5723277", "0.57120204", "0.57052463", "0.56894577", "0.568942", "0.5687262", "0.5686119", "0.5681899", "0.5649867", "0.56464803", "0.5639712", "0.56094325", "0.56069815", "0.5606245", "0.5605215", "0.55893475", "0.5588507", "0.5585616", "0.5581774", "0.55694896", "0.5567104", "0.55657476", "0.55620205", "0.5553403", "0.5549834", "0.5537477", "0.5532297", "0.5520924", "0.55160004", "0.5507507", "0.5504904", "0.5504579", "0.55043346", "0.55013174", "0.5496003", "0.5492302", "0.54846543", "0.54787046", "0.5463119", "0.54612446", "0.545009", "0.5438682", "0.5438471", "0.54351896", "0.54310876", "0.54278344", "0.54256177", "0.54256177", "0.54213345", "0.5418299" ]
0.5549503
75
Return the instructions for this Game.
def get_instructions(self) -> str: instruction = "Duck need to fill this blank _____, which I have no idea what it is #$%^&*" return instruction
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_instructions(self) -> str:\n instructions = \"Players take turns to occupy available positions \" \\\n \"on the \" \\\n \"board. Once half or more of a ley-line has been \" \\\n \"occupied\" \\\n \"one player, that ley-line is entirely captured by \" \\\n \"said player. The winner is the person who captures \" \\\n \"half\" \\\n \"or more of the ley-lines first.\"\n return instructions", "def get_instructions(self):\n return \"A non-negative whole number is chosen as the starting \\n\" \\\n \"valueby some neutral entity. In our case, a player will \\n\" \\\n \"choose it (i.e. through the use of input. The player whose \\n\" \\\n \"turn it is chooses some square of a positive whole number (\\n\" \\\n \"such as 1, 4, 9, 16, . . . ) to subtract from the \\n\" \\\n \"value, provided the chosen square is not larger. After \\n\" \\\n \"subtracting, we have a new value and the next player \\n\" \\\n \"chooses a square to ubtract from it. Play continues\\n\" \\\n \" to alternate between the two players until no moves are\\n\" \\\n \" possible. Whoever is about to play at that point loses!\"", "def getInstructions(self):\n\t\treturn \"\"", "def instructions(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"instructions\")", "def instructions(self) -> List[str]:\n return list(self._map.keys())", "def get_instructions(self):\n tmp_ins = []\n idx = 0\n for i in self.method.get_instructions():\n if idx >= self.start and idx < self.end:\n tmp_ins.append(i)\n\n idx += i.get_length()\n return tmp_ins", "def instructions(self):\n return \"\\n\".join(\n [i for i in self.schema.instructions().split(\"\\n\") if i != \"Oppskrift\"]\n )", "def plot_instructions(self):\n return self.__plot_instructions", "def print_instructions(self):\n\t\tprint('\\n\\n==========================================================================')\n\t\tprint('==========================================================================\\n')\n\t\tprint('Welcome to Tic Tac Toe, the came you know and love. \\nThe rules are the same ones you know and love. \\nTo make a move just type the coordinates of the spot like so - row,column. \\nNo spaces please! Lets go ahead and start! Here is a picuter of the board with some coordinates just in case!\\n')\n\t\tprint('=====================')\n\t\tprint('|| 0,0 | 0,1 | 0,2 ||')\n\t\tprint(' -----------------')\n\t\tprint('|| 1,0 | 1,1 | 1,2 ||')\n\t\tprint(' -----------------')\n\t\tprint('|| 2,0 | 2,1 | 2,2 ||')\n\t\tprint('=====================')\n\t\tprint('\\n==========================================================================')\n\t\tprint('==========================================================================\\n\\n')", "def showInstructions():\n print(\"\"\"\n RPG Game\n ========\n Commands:\n go [direction]\n get [item]\n\n\t\"\"\")", "def get_instructions(prog):\n insts = {}\n for i in range(prog.InstructionCount()):\n insts[i] = prog.setParam(i)\n return insts", "def instructions():\n\t\n\tprint \\\n\t\"\"\"\n\tToday we will play the perennial favorite game of...\n\tRock! Paper!! Scissors!!!.\n\tThe objective of the game is to outthink your opponent (in this case me) and defeat.\n\tThe rules are very simple\n\t1. Paper covers the Rock\n\t2. Rock breaks the Scissors\n\t3. Scissors cut the Paper\n\t\n\tChoose your move from the following:\n\t1. Paper (p)\n\t2. Rock (r)\n\t3. Scissors (s)\n\t\n\tAre you ready? Alright then, let's play...\n\t\"\"\"", "def draw_instructions_page(self):\n arcade.draw_rectangle_filled(SCREEN_WIDTH // 2, SCREEN_HEIGHT // 2,\n SCREEN_WIDTH,\n SCREEN_HEIGHT + 1000, arcade.color.BLACK)\n\n # title, instructions\n arcade.draw_rectangle_filled(670, 570, 1035, 100, arcade.color.RED)\n arcade.draw_rectangle_filled(450, 350, 470, 250, arcade.color.WHITE)\n arcade.draw_rectangle_filled(450, 350, 447, 230, arcade.color.BLACK)\n arcade.draw_text(\"STREET RACER XTREME\", 168, 525, arcade.color.BLACK, 85)\n arcade.draw_text(\"CLICK TO START GAME!\", 760, 330, arcade.color.WHITE, 35)\n arcade.draw_text(\"Coins are 10 points each\", 320, 400, arcade.color.WHITE, 20)\n arcade.draw_text(\"Press space to use nitrous\", 320, 340, arcade.color.WHITE, 20)\n arcade.draw_text(\"Move with the arrow keys\", 320, 280, arcade.color.WHITE, 20)\n arcade.draw_text(\"!! DONT CRASH INTO ANYBODY !!\", 350, 100, arcade.color.RED, 40)", "def get_current_instruction(self) -> Dict:\n\n instructions = self.environment.code.instruction_list\n return instructions[self.mstate.pc]", "def instructions(self):\n yield self.inst\n for inst in self.arguments[:]:\n yield inst\n for basic_block in self.basic_blocks[:]:\n if basic_block.function is not None:\n yield basic_block.inst\n for inst in basic_block.insts[:]:\n yield inst\n yield self.end_inst", "def instructions():\n\n instructions_text = 'Here is how to use this calculator:<br>'\n instructions_text += 'http://localhost:8080/ => These instructions<br>'\n instructions_text += 'To add: http://localhost:8080/add/23/42 => 65<br>'\n instructions_text += 'To subtract: http://localhost:8080/subtract/23/42 => -19<br>'\n instructions_text += 'To multiply: http://localhost:8080/multiply/3/5 => 15<br>'\n instructions_text += 'To divide: http://localhost:8080/divide/22/11 => 2'\n\n return instructions_text", "def make_instructions(self):\n #de, aux, vers = self.rods\n de, aux, vers = 0, 1, 2\n n = self.num_rings\n\n self.recur(n, de, aux, vers)\n\n ### Add dummy tuple at end so I can look one move ahead on states\n self.instructions.append((0, 0, 0))", "def get_alt_commands(self):\n return self.altcmds", "def get_commands():\n return \"Commands:\\n 1 [Find shortest path between given cities]\\n 2 [Find shortest path between random cities]\\n 3 [See list of cities]\\n 4 [Close application]\\n\"", "def instruction_names(self):\n return list(self.instruction_pool.keys())", "def instruct(self):\n\t\tretval = \" `% Instructions\\r\\n\"\n\t\tretval += \"`0-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-\\r\\n\"\n\t\tretval += \" `9WELCOME TO THE ADVENTURE OF A LIFETIME!\\r\\n\\r\\n\"\n\t\tretval += \" `0** `9Full Multi-Node Support.\\r\\n\"\n\t\tretval += \" `0** `9This game is FINISHABLE! (If the sysop chooses)\\r\\n\"\n\t\tretval += \" `0** `9Real Time Online Messages And Battles.\\r\\n\"\n\t\tretval += \" `0** `9Marrage And other 'Real Life' Options.\\r\\n\"\n\t\tretval += \" `0** `9RIP & In-Game Downloading Of Icons File Support. (Both are Auto Detect)\\r\\n\"\n\t\tretval += \" `0** `9Auto Reincarnation If A Player Is Dead For Two Days.\\r\\n\\r\\n\\r\\n\"\n\t\tretval += \" `2This is multi player battle game, created for BBS's, it is the\\r\\n\"\n\t\tretval += \" `2type of game where you kill other players, get stronger and stronger\\r\\n\"\n\t\tretval += \" `2and your number one goal is to stay #1 in the player rankings! Of\\r\\n\"\n\t\tretval += \" `2course, killing the Dreaded Red Dragon will make you a hero, and your\\r\\n\"\n\t\tretval += \" `2name will be immortalized in the Hall Of Honor.\\r\\n\\r\\n\"\n\t\tretval += \" `2Each day, you are given a certain amount of fights per day, once you\\r\\n\"\n\t\tretval += \" `2use them, you can no longer do battle that day, you must call back\\r\\n\"\n\t\tretval += \" `2the NEXT day to be 'refilled'.\\r\\n\\r\\n\"\n\t\tretval += \" `2Stay at the Inn, and you will be safe from `0MOST`2 attackers...If they\\r\\n\"\n\t\tretval += \" `2want to kill you bad enough, they may find a way...However costly.\\r\\n\\r\\n\"\n\t\tretval += \" `2Be sure to buy better armour and weapons when possible, it really makes\\r\\n\"\n\t\tretval += \" `2a LARGE difference. \\r\\n\\r\\n\"\n\t\tretval += \" `2Be sure to take advantage of the advanced mail writing functions\\r\\n\"\n\t\tretval += \" `2avaible, they are very fast and easy to use, and you will have LOADS\\r\\n\"\n\t\tretval += \" `2more fun when you get to `0KNOW`2 who you are killing!\\r\\n\\r\\n\"\n\t\tretval += \" `2Particapate in conversation at The Bar, interacting with real people\\r\\n\"\n\t\tretval += \" `2is what makes BBS games so enjoyable, and this game is loaded with ways\\r\\n\"\n\t\tretval += \" `2to do that... From insulting people in the Daily Happenings, to \\r\\n\"\n\t\tretval += \" `2slaughtering them in cold blood, then sending them mail gloating over\\r\\n\"\n\t\tretval += \" `2the victory, this game will let you have some fun!\\r\\n\"\n\t\tretval += \" `2The game is pretty self explanatory, so I will let you, the player, \\r\\n\"\n\t\tretval += \" `2explore on your own. Just hit '`0?`2' when you're not sure, and you will\\r\\n\"\n\t\tretval += \" `2get a menu. For starters, try visiting the Inn.\\r\\n\"\n\t\tretval += \" `2If you are male, try your hand at Flirting with Violet...If you\\r\\n\"\n\t\tretval += \" `2are female, you can try your luck with The Bard.\\r\\n\\r\\n\"\n\t\tretval += \" `2If someone else attacks you and loses, you will get the experience\\r\\n\"\n\t\tretval += \" `2just as if you killed them yourself. (You will be mailed on the\\r\\n\"\n\t\tretval += \" `2details of the battle)\\r\\n\\r\\n\"\n\t\tretval += \" `9NOTE: This game contains some mature subject matter.\\r\\n\\r\\n\"\n\t\tretval += \" `0GOOD LUCK AND HAPPY GAMING!`9\\r\\n\"\n\t\treturn retval", "def _extract_instructions(self, xmltree):\r\n return get_instructions(xmltree)", "def _extract_instructions(self, xmltree):\r\n return get_instructions(xmltree)", "def _extract_instructions(self, xmltree):\r\n return get_instructions(xmltree)", "def instruction_iter(self):\n for ins in self.instructions:\n yield ins", "def instructions():\n running = True\n while running:\n for evnt in event.get():\n if evnt.type == KEYDOWN:\n if evnt.key == K_RETURN:\n return \"menu\"\n if evnt.type == QUIT:\n return \"exit\"\n # Drawing all of the instructions screen text and background\n screen.blit(backgroundPics[0],(0,0))\n screen.blit(instructHelp,(235,40))\n screen.blit(moveRightHelp,(80,130))\n screen.blit(moveLeftHelp,(80,170))\n screen.blit(jumpHelp,(80,210))\n screen.blit(crouchHelp,(80,250))\n screen.blit(pauseHelp,(80,290))\n screen.blit(musicPauseHelp,(80,330))\n screen.blit(backTextHelp,(650,450))\n screen.blit(titleSelect,(610,445))\n screen.blit(brickSprites[0][3], (375,400))\n display.flip()\n fpsCounter.tick(60)\n return \"menu\"", "def instructions(self):\n for inst in self.global_insts[:]:\n yield inst\n for function in self.functions[:]:\n for inst in function.instructions():\n yield inst", "def printInstructions(self):\n print(\"\"\"•\tAim of the Game is to be the first to lose all of your chips\n•\tPlayers are put in order of the lowest to \nhighest based on their first roll\n(This is done automatically when you enter your name)\n• You start out with 5 chips.\n• When it is your turn you roll the die.\n\\t•\tIf the space with the same number as the die is empty (value of 0),\n\\t\\tput a chip there.\n\\t•\tbut if there already is a chip there (value of 1), you must take it.\n\\t•\tIf you roll a 6, you always put one of your chips on the space number 6 – \n\\t\\tregardless of how many chips are there already. \n\\t\\tChips on space number 6 are out of the game,\n\\t\\tand you never pick these up again.\n\"\"\")", "def instruction():\n print('- - - - - - - - - - - - - - - - - - - - -')\n print(\"this is instruction for tic tac toe game\".upper())\n print('- - - - - - - - - - - - - - - - - - - - -')\n print('This is game for two players')\n print('Each player can choose a number between 1 and 9')\n print('Numbers represent the fields on the board')\n print('You can choose only numbers that are not taken by any player')\n list_of_symbols = ['1', '2', '3', '4', '5', '6', '7', '8', '9']\n print_board(list_of_symbols)\n print('You win the game if you have 3 symbols in column, row or diagonally')\n print('- - - - - - - - - - - - - - - - - - - - -')\n\n begin_game()", "def instructions():\n txt_naming = ConfigReader.texture_naming_dict()\n\n text = \"<b>Texture naming rules:</b><br>(put an underscore _ at the end of file name; \" \\\n \"you can enumerate textures using two digits after texture type without any other character\" \\\n \"<br> e.g. _normal01 or in general _normalXX)\" \\\n \"<br>\"\n\n for key, value in txt_naming.iteritems():\n text += \"<br>- {0}: {1}\".format(key, ', '.join(a for a in value['text']))\n\n text += \"<br>\"\n text += \"<br><b>File formats:</b>\"\n text += \"<br>Meshes:\"\n text += ConfigReader.generate_file_filter()\n text += \"<br>Textures:\"\n text += ConfigReader.generate_texture_filter()\n\n return text", "def __repr__(self):\n s = \"\"\n for y in range(0,HEIGHT):\n temp=\"\"\n for x in range(0,WIDTH):\n temp = temp+ str(self.gameState[x,y])\n s += temp+\"\\n\"\n return s", "def GetInstructionList():\n return [i[0] for i in ida_idp.ph_get_instruc() if i[0]]", "def actions(self):\r\n return self.puzzle.actions", "def get_possible_exploit(self):\n return 'exploit', self.exploit.run()", "def instruction_steps(self) -> Sequence['outputs.CodelessUiConnectorConfigPropertiesResponseInstructionSteps']:\n return pulumi.get(self, \"instruction_steps\")", "def getCommands(self):", "def getGameState(self):\n peg1 = ()\n peg2 = ()\n peg3 = ()\n onStatement = Statement()\n onTerm1 = Term('?x')\n onTerm2 = Term('?y')\n onStatement.terms = (onTerm1, onTerm2)\n onStatement.predicate = 'on'\n for fact in self.kb.facts:\n if match(fact.statement, onStatement):\n if fact.statement.terms[0] == Term(Constant('disk1')):\n disk = 1\n elif fact.statement.terms[0] == Term(Constant('disk2')):\n disk = 2\n elif fact.statement.terms[0] == Term(Constant('disk3')):\n disk = 3\n elif fact.statement.terms[0] == Term(Constant('disk4')):\n disk = 4\n elif fact.statement.terms[0] == Term(Constant('disk5')):\n disk = 5\n if fact.statement.terms[1] == Term(Constant('peg1')):\n peg1 = peg1 + (disk,)\n elif fact.statement.terms[1] == Term(Constant('peg2')):\n peg2 = peg2 + (disk,)\n elif fact.statement.terms[1] == Term(Constant('peg3')):\n peg3 = peg3 + (disk,)\n\n peg1 = tuple(sorted(peg1))\n peg2 = tuple(sorted(peg2))\n peg3 = tuple(sorted(peg3))\n result = (peg1, peg2, peg3)\n return result\n ### student code goes here", "def __str__(self):\n\n instructions = []\n if self.directory:\n instructions.append(comment('OpenMPI'))\n else:\n instructions.append(comment(\n 'OpenMPI version {}'.format(self.version)))\n instructions.append(packages(ospackages=self.__ospackages))\n if self.directory:\n # Use source from local build context\n instructions.append(\n copy(src=self.directory,\n dest=os.path.join(self.__wd, self.directory)))\n instructions.append(shell(commands=self.__commands))\n if self.__environment_variables:\n instructions.append(environment(\n variables=self.__environment_variables))\n\n return '\\n'.join(str(x) for x in instructions)", "def __repr__(self):\n return \"\\nSprite info: \" + self.name + \"\\nx = {0}\\ny = {1}\\nhealth = {2}\\nstrength = {3}\\nloot = {4}\\n\"\\\n .format(self.x, self.y, self.health, self.strength, self.loot)", "def draw_instruction():\r\n arcade.draw_text(\r\n \"This is a game of Santa, Reindeer, Snowman\", 0, 50, arcade.color.WHITE, 15\r\n )\r\n arcade.draw_text(\r\n \"Santa beats snowman, snowman beats reindeer, reindeer beats santa\",\r\n 0,\r\n 30,\r\n arcade.color.WHITE,\r\n 13,\r\n )\r\n arcade.draw_text(\r\n \"Press button 1 for santa, 2 for reindeer, and 3 for snowman\",\r\n 0,\r\n 10,\r\n arcade.color.WHITE,\r\n 15,\r\n )\r\n arcade.draw_text(\r\n \"User Choice\", WINDOW_WIDTH - 175, WINDOW_HEIGHT - 60, arcade.color.WHITE, 15\r\n )\r\n arcade.draw_text(\"CPU Choice\", 75, WINDOW_HEIGHT - 60, arcade.color.WHITE, 15)", "def __repr__(self):\n s = ''\n nums = (self.mines + 1) * self.clicked\n for row in range(0, self.rows):\n for col in range(0, self.cols):\n num = nums[row, col]\n if self.isFlagged(row, col):\n s += str(Board.FLAGGED) + ' '\n elif num == 0:\n s += str(Board.HIDDEN) + ' '\n else:\n if self.isMine(row, col):\n s += str(Board.MINE) + ' '\n else:\n s += str(int(num - 1)) + ' '\n s += '\\n'\n return s", "def get_instructions(self, robot):\n d('ROBO_%s requires his new instructions' % str(robot.id))\n self.instructions_lock.acquire()\n try:\n d('Deciding new instruction for ROBO_%s' % str(robot.id))\n if self.input_users:\n input_user = self.input_users.pop()\n d('Do _login(username, password)')\n self.expect_target_ids = True\n robot.perform('_login', (input_user['username'].rstrip(), input_user['password'].rstrip()))\n \n elif self.expect_target_ids:\n d('Expecting target ids')\n if not len(self.users_queue.pending_pages):\n d('Waiting for target ids')\n with self.target_ids_cond:\n self.target_ids_cond.wait()\n \n page_to_visit = self.users_queue.pop()\n robot.perform('_visit', (page_to_visit,))\n \n else:\n robot.perform('end_robot', ())\n\n d('Finish Deciding for ROBO_%s' % str(robot.id))\n\n except:\n traceback.print_exc()\n return", "def get_equipment(self):\n s = ''\n for i in range(12, 16):\n s += ' ' + str(self.dna[i])\n return s", "def instruction():\n return render_template(\n 'instruction.html',\n title='说明',\n year=datetime.now().year,\n message='Instruction'\n )", "def _get_mnemonic(self, opcode):\n try:\n return {\n # Math Operations: +, -. *. /.\n 0x0: ('ADD', 'W'),\n 0x1: ('SUB', 'W'),\n 0x2: ('MUL', 'W'),\n 0x3: ('IDIV', 'W'),\n # Stack operations\n 0x4: ('PUSH', 'W'),\n 0x5: ('POP', '-'),\n 0x6: ('DUP', '-'),\n 0x7: ('STCK_RD', 'W'),\n 0x8: ('STCK_WR', 'W'),\n # Global data (VM program input) operatiaons\n 0x9: ('LDR', 'W'),\n 0xA: ('STR', 'W'),\n # Jumps and syscalls (SVCs)\n 0xB: ('JZ', 'W'),\n 0xC: ('JMP', 'W'),\n 0xD: ('SVC', 'W'),\n # Miscellaneous\n 0xE: ('SPNLCK', '-'),\n 0xF: ('UNKNWN', '-')\n }[opcode]\n except KeyError:\n raise Exception(f'Illegal instruction with opcode: {opcode:X}h')", "def move_info(self):\n print(\"\\n\" + self.name + \" Moves: \")\n\n #Light attack\n print(\"-- \" + self.moves[0] + \" --\")\n print(\"\\tAn efficient attack...\")\n print(\"\\tGuaranteed to do damage within a range of 15 to 25 damage points.\")\n #Heavy attack\n print(\"-- \" + self.moves[1] + \" --\")\n print(\"\\tAn risky attack...\")\n print(\"\\tCould deal damage up to 50 damage points or as little as 0 damage points.\")\n #Restore move\n print(\"-- \" + self.moves[2] + \" --\")\n print(\"\\tA restorative move...\")\n print(\"\\tGuaranteed to heal your Pykemon 15 to 25 damage points.\")\n #Special attack\n print(\"-- \" + self.moves[3] + \" --\")\n print(\"\\tA powerful GRASS based attack...\")\n print(\"\\tGuaranteed to deal MASSIVE damage to WATER type Pykemon.\")", "def __str__(self):\n s=\"\"\n for y in range(0,HEIGHT):\n for x in range(0,WIDTH):\n s+=str(self.gameState[x,y])\n return s", "def get_commands(self):\n\t\tshutit_global.shutit_global_object.yield_to_draw()\n\t\ts = ''\n\t\tfor c in self.build['shutit_command_history']:\n\t\t\tif isinstance(c, str):\n\t\t\t\t#Ignore commands with leading spaces\n\t\t\t\tif c and c[0] != ' ':\n\t\t\t\t\ts += c + '\\n'\n\t\treturn s", "def move_info(self):\n print(\"\\n\" + self.name + \" Moves: \")\n\n #Light attack\n print(\"-- \" + self.moves[0] + \" --\")\n print(\"\\tAn efficient attack...\")\n print(\"\\tGuaranteed to do damage within a range of 15 to 25 damage points.\")\n #Heavy attack\n print(\"-- \" + self.moves[1] + \" --\")\n print(\"\\tAn risky attack...\")\n print(\"\\tCould deal damage up to 50 damage points or as little as 0 damage points.\")\n #Restore move\n print(\"-- \" + self.moves[2] + \" --\")\n print(\"\\tA restorative move...\")\n print(\"\\tGuaranteed to heal your Pykemon 15 to 25 damage points.\")\n #Special attack\n print(\"-- \" + self.moves[3] + \" --\")\n print(\"\\tA powerful WATER based attack...\")\n print(\"\\tGuaranteed to deal MASSIVE damage to FIRE type Pykemon.\")", "def move_info(self):\n print(\"\\n\" + self.name + \" Moves: \")\n\n #Light attack\n print(\"-- \" + self.moves[0] + \" --\")\n print(\"\\tAn efficient attack...\")\n print(\"\\tGuaranteed to do damage within a range of 15 to 25 damage points.\")\n #Heavy attack\n print(\"-- \" + self.moves[1] + \" --\")\n print(\"\\tAn risky attack...\")\n print(\"\\tCould deal damage up to 50 damage points or as little as 0 damage points.\")\n #Restore move\n print(\"-- \" + self.moves[2] + \" --\")\n print(\"\\tA restorative move...\")\n print(\"\\tGuaranteed to heal your Pykemon 15 to 25 damage points.\")\n #Special attack\n print(\"-- \" + self.moves[3] + \" --\")\n print(\"\\tA powerful FIRE based attack...\")\n print(\"\\tGuaranteed to deal MASSIVE damage to GRASS type Pykemon.\")", "def __instructions(self):\n\n self += comment('Arm Allinea Studio version {}'.format(self.__version))\n\n if self.__ospackages:\n self += packages(ospackages=self.__ospackages)\n\n if self.__tarball:\n self += copy(src=self.__tarball, dest=self.__wd)\n\n self += shell(commands=self.__commands)\n self += environment(variables=self.environment_step())", "def options(self):\n if self._state == GameState.PLAY_OR_DRAW:\n return [NopAction(), DrawAction()] + self._play_options()\n elif self._state == GameState.PLAY:\n return [NopAction()] + self._play_options()\n elif self._state == GameState.PLAY_DRAWN:\n res = [NopAction()]\n if self._can_play(self._current_hand()[-1]):\n res += [PlayCardAction(len(self._current_hand()) - 1)]\n return res\n elif self._state == GameState.PICK_COLOR or self._state == GameState.PICK_COLOR_INIT:\n return [PickColorAction(c) for c in [Color.RED, Color.ORANGE, Color.GREEN, Color.BLUE]]\n elif self._state == GameState.CHALLENGE_VALID or self._state == GameState.CHALLENGE_INVALID:\n return [NopAction(), ChallengeAction()]\n raise RuntimeError('invalid state')", "def to_asm(self) -> str:\n try:\n build_instruction = self._handlers_map()[self.segment]\n return clean_instructions(build_instruction())\n except KeyError:\n pass\n try:\n build_instruction = self._handlers_map()[self.cmd]\n return clean_instructions(build_instruction())\n except KeyError:\n raise ValueError(\"Unsupported command.\")", "def cmdline(self, args=()):\r\n cmds = [self._interpreter.binary]\r\n cmds.append(self._pex)\r\n cmds.extend(args)\r\n return cmds", "def get_map_instructions(start_lng, start_lat, end_lng, end_lat):\r\n directions_resp = requests.get(\r\n f\"https://api.mapbox.com/directions/v5/mapbox/walking/{start_lng},{start_lat};{end_lng},{end_lat}\",\r\n params={\r\n \"access_token\": MAPBOX_TOKEN,\r\n \"geometries\": \"geojson\",\r\n \"steps\": \"true\",\r\n \"alternatives\": \"true\",\r\n },\r\n )\r\n instructions=[]\r\n for step in directions_resp.json()['routes'][0]['legs'][0]['steps']:\r\n instructions.append(f\"{step['maneuver']['instruction']}\")\r\n #listToStr = '<br>'.join(map(str, instruction))\r\n return instructions", "def getGameState(self):\n row1 = [0, 0, 0]\n row2 = [0, 0, 0]\n row3 = [0, 0, 0]\n tilePosStatement = Statement()\n posTerm1 = Term('?x')\n posTerm2 = Term('?y')\n posTerm3 = Term('?tile')\n tilePosStatement.terms = (posTerm1, posTerm2, posTerm3)\n tilePosStatement.predicate = 'tilePos'\n for fact in self.kb.facts:\n if match(fact.statement, tilePosStatement):\n if fact.statement.terms[2] == Term(Constant('tile1')):\n term = 1\n if fact.statement.terms[2] == Term(Constant('tile2')):\n term = 2\n if fact.statement.terms[2] == Term(Constant('tile3')):\n term = 3\n if fact.statement.terms[2] == Term(Constant('tile4')):\n term = 4\n if fact.statement.terms[2] == Term(Constant('tile5')):\n term = 5\n if fact.statement.terms[2] == Term(Constant('tile6')):\n term = 6\n if fact.statement.terms[2] == Term(Constant('tile7')):\n term = 7\n if fact.statement.terms[2] == Term(Constant('tile8')):\n term = 8\n if fact.statement.terms[2] == Term(Constant('empty')):\n term = -1\n if fact.statement.terms[0] == Term(Constant('pos1')):\n col = 0\n elif fact.statement.terms[0] == Term(Constant('pos2')):\n col = 1\n elif fact.statement.terms[0] == Term(Constant('pos3')):\n col = 2\n if fact.statement.terms[1] == Term(Constant('pos1')):\n row1[col] = term\n\n elif fact.statement.terms[1] == Term(Constant('pos2')):\n row2[col] = term\n\n elif fact.statement.terms[1] == Term(Constant('pos3')):\n row3[col] = term\n\n row1 = tuple(row1)\n row2 = tuple(row2)\n row3 = tuple(row3)\n result = (row1, row2, row3)\n return result\n\n ### Student code goes here", "def __str__(self):\n return str((self.instruction_pointer, self.program,))", "def cmdline(self, args=()):\n cmds = [self._interpreter.binary]\n cmds.append(self._pex)\n cmds.extend(args)\n return cmds", "def assemble(self):\n machineCodeLength = len(self.instructionList)\n # Adds all of the data lengths to the length\n for symbol in self.symbolTable:\n if symbol[\"type\"] == \"DATA\":\n machineCodeLength += symbol[\"length\"]\n # Stores the machine code instructions\n machineCode = [0 for i in range(machineCodeLength)]\n # Adds all DATA symbols to the machineCode\n dataOffset = len(self.instructionList) # Stores the offset into the machine code for the current data symbol\n for symbol in self.symbolTable:\n if symbol[\"type\"] == \"DATA\":\n # Stores the operand into the memory\n\n # Stores the memory location of the data\n symbol[\"pointer\"] = dataOffset\n dataOffset += symbol[\"length\"]\n\n # Assembles every instruction\n for i in range(len(self.instructionList)):\n ins = self.instructionList[i]\n # Constructs the machine code instruction\n machineCode[i] |= (ins['controlBits'] & 0x3F) << 26\n machineCode[i] |= (ins['code'] & 0xFF) << 18\n # Looks through all of the awaiting in the operand and fills in the output for each\n for sym in ins['operand']['awaiting']:\n symType = \"DATA\" if \"DATA\" in sym else \"LABEL\"\n symbolName = sym[symType]['symbol']\n destination = sym[symType]['output']\n # Searches through the symbol table for the symbol\n for symbol in self.symbolTable:\n # Checks if it is a valid symbol\n if symbol[\"type\"] == symType and symbol[\"name\"] == symbolName:\n if symbol[\"type\"] == \"LABEL\":\n ins[\"operand\"][destination] = symbol[\"pointer\"]\n elif symbol[\"type\"] == \"DATA\":\n ins[\"operand\"][destination] = symbol[\"pointer\"]\n ins['operand']['awaiting'] = []\n print(ins)\n # Gets the main operand value\n if ins['operand']:\n if 'operand' in ins['operand']:\n if ins['operand']['operandType'] == 'int':\n machineCode[i] |= (1 << 18) # Sets value mode for the operand\n value = ins['operand']['operand'].to_bytes(4, \"big\")\n machineCode[i] |= value[0] << 12\n machineCode[i] |= value[1] << 8\n machineCode[i] |= value[2] << 4\n machineCode[i] |= value[3]\n elif ins['operand']['operandType'] == 'float':\n machineCode[i] |= (1 << 18) # Sets value mode for the operand\n value = struct.pack('>f', ins['operand']['operand'])\n machineCode[i] |= value[0] << 12\n machineCode[i] |= value[1] << 8\n machineCode[i] |= value[2] << 4\n machineCode[i] |= value[3]\n elif ins['operand']['operandType'] == 'register':\n machineCode[i] |= (ins['operand']['operand'] & 0xF) << 4\n if 'Rin' in ins['operand']: \n # Clears the bits at the location\n machineCode[i] &= 0xFFFFF0FF\n machineCode[i] |= (ins['operand']['Rin'] & 0xF) << 8\n elif 'address' in ins['operand']:\n if ins['operand']['addressingMode'] == \"Absolute\" or ins['operand']['addressingMode'] == \"Indirect\":\n addr = ins['operand']['address'].to_bytes(4, \"big\")\n machineCode[i] |= addr[0] << 12\n machineCode[i] |= addr[1] << 8\n machineCode[i] |= addr[2] << 4\n machineCode[i] |= addr[3]\n if ins['operand']['addressingMode'] == \"Absolute\": machineCode[i] |= 0x0 << 16\n elif ins['operand']['addressingMode'] == \"Indirect\": machineCode[i] |= 0x1 << 16\n\n if ins['operand']['addressingMode'] == \"Register\":\n machineCode[i] |= 0x2 << 16\n machineCode[i] |= ins['operand']['offset']\n if 'Rout' in ins['operand']:\n # Clears the bits at the location\n machineCode[i] &= 0xFFFFF0FF\n machineCode[i] |= (ins['operand']['Rin'] & 0xF) << 8\n else:\n # Clears the bits at the location\n machineCode[i] &= 0xFFFF0FFF\n machineCode[i] |= (ins['operand']['Rin'] & 0xF) << 12\n elif ins['operand']['addressingMode'] == \"RegisterOffset\": \n machineCode[i] |= 0x3 << 16\n\n if 'Rout' in ins['operand']:\n # Clears the bits at the location\n machineCode[i] &= 0xFFFF0FFF\n machineCode[i] |= (ins['operand']['Rout'] & 0xF) << 12\n print(machineCode[i])", "def __str__(self):\n s = \"\"\n for r in range(1,self.size+1):\n for c in range(1,self.size+1):\n s += str(self.gameState[r,c])\n return s", "def help(self):\n return {\n 'text': 'Available Commands: \\n `/ranti my-task e.g /ranti my-task` \\n To get task assigned to you.\\n'\n ' \\n `/ranti show-task [date]{dth-month-year} e.g /ranti show-task 5th-june-2018` \\n Show all tasks for a particular date \\n'\n '\\n `/ranti show-task [today] e.g /ranti show-task today` \\n Show all tasks for today \\n'\n '\\n `/ranti show-task [tomorrow] e.g /ranti show-task tomorrow` \\n Show all tasks for tomorrow \\n'\n '\\n `/ranti help` \\n This help information \\n \\n Ranti ver: 1.0'\n }", "def silkscreen_commands(self):\n return self.pcb_layers[\"silkscreen\"].commands", "def _get_legal_actions(self):\n return self.game.get_legal_actions()", "def actions(self):\n return {0, 1, 2, 3, 4, 5, 11, 12}", "def getGameState(self):\n ### Student code goes here\n row1 = [-1, -1, -1]\n row2 = [-1, -1, -1]\n row3 = [-1, -1, -1]\n for i in self.kb.kb_ask(parse_input(\"fact: (pos ?t ?px ?py\")):\n if str(i.bindings_dict['?t'])=='empty':\n t = -1\n else:\n t = int(i.bindings_dict['?t'][4])\n xpx = int(i.bindings_dict['?px'][3])\n xpy = int(i.bindings_dict['?py'][3])\n if xpy == 1:\n row1[xpx-1] = t\n elif xpy == 2:\n row2[xpx-1] = t\n elif xpy == 3:\n row3[xpx-1] = t\n return tuple((tuple(row1),tuple(row2),tuple(row3)))", "def __repr__(self) -> Any:\n game_board = self.__str__() + \"\\n\"\n current_player_info = \"Is p1 the current player? \" + str(self.p1_turn)\n result = game_board + current_player_info\n return result", "def _help_actions(self):\n actions_str = \"\"\n for (key, value) in self.actions_help.items():\n actions_str += \"command: %s\\n%s\\n\\n\" % (key, value)\n print(actions_str)\n sys.exit(0)", "def instruction_probabilities(self):\n return list(self.instruction_pool.values())", "def read_instr(self):\n label = self.nr\n opcode = self.read_word()\n if opcode not in Instrs:\n raise ValueError(\"Unknown opcode %s\" % opcode)\n\n name, arg_names = Instrs[opcode]\n if opcode == Opcode.SWITCH:\n n = self.read_word()\n size_tag = n >> 16\n size_long = n & 0xFFFF\n size = size_tag + size_long\n tab = []\n for _ in range(size):\n tab.append(self.read_word())\n args = [n, tab]\n elif opcode == Opcode.CLOSUREREC:\n f = self.read_word()\n v = self.read_word()\n o = self.read_word()\n t = []\n for _ in range(f - 2):\n t.append(self.read_word())\n args = [f, v, o, t]\n else:\n # Normal opcode:\n args = []\n for arg_name in arg_names:\n # if arg_name in ['n', 's', 'ofs', 's', 't', 'p']:\n if True:\n arg = self.read_word()\n else:\n raise NotImplementedError(arg_name)\n args.append(arg)\n # print(label, name, args)\n ins = Instruction(opcode, name, args)\n ins.label = label\n return ins", "def help_text(cls):\n ret = (\"I currently answer these burning questions, \" +\n \"but only when you address me by name:\\n\" +\n \"\\\"tell me about server `(server_id|server_name)`\\\"\\n\" +\n \"\\\"tell me about ip `ip_address`\\\"\\n\" +\n \"\\\"tell me about group `(group_id|group_name)`\\\"\\n\" +\n \"\\\"list all servers\\\"\\n\" +\n \"\\\"list server groups\\\"\\n\" +\n \"\\\"servers with CVE `cve_id`\\\"\\n\" +\n \"\\\"servers in group `(group_id|group_name)`\\\"\\n\" +\n \"\\\"group firewall `(group_id|group_name)`\\\"\\n\" +\n \"\\\"ec2 halo footprint csv\\\"\\n\" +\n \"\\\"version\\\"\\n\" +\n \"\\\"tasks\\\"\\n\" +\n \"\\\"config\\\"\\n\")\n return ret", "def directions(self):\n return self.piece_behavior.directions", "def generateAction(self):\n # make a game action\n self.gameEnv.performAction(self.gameNetwork)\n # get the game action\n x, y = self.current\n gameAction = self.game.toSinglePos(x, y)\n # make a piece action\n net = self.toNetInput()\n pieceAction = self.selectAction(self.internalNetwork, net)\n # return the actions\n return pieceAction, gameAction", "def help(self):\n help = ''\n cmds = [(x, y) for x, y in Commands.__dict__.iteritems()]\n cmds.sort(key=lambda x: x[0])\n for name, member in cmds:\n if name.startswith('cmd_') and callable(member):\n help += ' %s\\n' % ' '.join([name[4:]] +\n ['<%s>' % x for x in\n inspect.getargspec(member).args[1:]])\n if member.__doc__:\n help += ' %s\\n' % member.__doc__.splitlines()[0]\n return 'Available commands:\\n%s' % help", "def _find_processing_instructions(self):\n pass", "def g(self):\n return self.moves", "def game_input(self):\n self._flush_print_buffer()\n last_output = player._program_output[-1]\n command = self.get_command()\n return command", "def target_iops(self):\n return self._target_iops", "def _printable(self):\n toPrint = \"Command Header. Qubit ID: \" + str(self.qubit_id) + \" \"\n toPrint = toPrint + \"Instruction: \" + str(self.instr) + \" \"\n toPrint = toPrint + \"Notify: \" + str(self.notify) + \" \"\n toPrint = toPrint + \"Block: \" + str(self.block) + \" \"\n toPrint = toPrint + \"Action: \" + str(self.action)\n return toPrint", "def get_instruction_string(steps, show_substeps=False): \n\n string = \"\"\n n = 1\n for step in steps:\n instruction = \"\"\n if step[\"travel_mode\"] == \"TRANSIT\":\n instruction = (\n f'Step {n}: At {step[\"departure_time\"]} take the {step[\"instruction\"]}\\n'\n f'Bus Name: {step[\"bus_name\"]}\\n'\n f'Get off stop {step[\"departure_stop\"]} at {step[\"arrival_time\"]} after riding {step[\"num_stops\"]} stops\\n'\n f'\\n'\n )\n string += instruction\n else:\n instruction = (\n f'Step {n}: {step[\"instruction\"]}\\n'\n f'Approx {step[\"duration\"]} and {step[\"distance\"]}\\n'\n f'\\n'\n )\n string += instruction\n\n if show_substeps:\n for step2 in step[\"substeps\"]:\n instruction = (\n f'{step2[\"instruction\"]}\\n'\n )\n string += instruction + '\\n'\n n += 1\n \n return string", "def fetch_instruction(self) -> dict:\n instruction = self.__ROM.read(self.regs[\"ip\"].read())\n self.regs[\"ip\"].inc()\n return self.disassembler.decode_instruction(instruction)", "def __repr__(self):\r\n s = 'Player ' + str(self.checker)\r\n v = ' ('+ self.tiebreak+', '+str(self.lookahead)+')'\r\n s += v\r\n return s", "def script_commands(self):\n return self.get_data(\"script_commands\")", "def encode(self):\n instruction_iter = itertools.chain([self.opcode], self.args)\n\n elems = ARG_SEP.join(self.encode_arg(arg) for arg in instruction_iter)\n\n return elems + INST_TERM", "def get_commands(self, options):\n molecule = ['molecule']\n molecule.extend(options.get_global_opts())\n molecule.extend(['test', '-s', self.scenario.name])\n tox = Tox()\n molecule.extend(tox.posargs)\n return [molecule]", "def additional_instruction_text(self):\r\n return ''", "def help(self):\n print(\"GAME HELP\")\n print(\"Command\\t\\t\\t\\tDescription\\n\")\n print(\"{0} <{1}>:\\t\\t\\t{2}\".format(GO, \"direction\", \"Move through an exit.\"))\n print(\"{0} <{1}>:\\t\\t{2}\".format(GO, \"exit description\", \"Move through an exit.\"))\n print(\"{0} <{1}>:\\t\\t\\t{2}\".format(TAKE, \"item\", \"Take an item.\"))\n print(\"{0} <{1}>:\\t\\t\\t{2}\".format(DROP, \"item\", \"Drop an item.\"))\n print(\"{0} <{1}>:\\t\\t{2}\".format(TALK, \"character\", \"Talk to a character.\"))\n print(\"{0}:\\t\\t\\t\\t{1}\".format(LOOK, \"Print the current space description again.\"))\n print(\"{0}:\\t\\t\\t{1}\".format(SAVEGAME, \"Save your current game.\"))\n print(\"{0}:\\t\\t\\t\\t{1}\".format(QUIT, \"Quit the game.\"))\n print(\"{0} <{1}>:\\t\\t\\t{2}\".format(LOOK_AT, \"item\", \"Look more closely at an item.\"))\n print(\"{0}:\\t\\t\\t\\t{1}\".format(LISTEN, \"Listen more closely to the sounds around you.\"))\n print(\"{0} <{1}>:\\t\\t\\t{2}\".format(PULL, \"item\", \"Pull an item.\"))\n print(\"{0} <{1}>:\\t\\t\\t{2}\".format(PUSH, \"item\", \"Push an item.\"))\n print(\"{0}:\\t\\t\\t{1}\".format(CHARGE, \"Charge your batteries in a charger.\"))\n print(\"{0} <{1}>:\\t\\t\\t{2}\".format(USE, \"item\", \"Use an item you are carrying.\"))\n print(\"{0}:\\t\\t\\t\\t{1}\".format(WAIT, \"Wait for something to happen.\"))\n print(\"{0}:\\t\\t\\t\\t{1}\".format(HELP, \"Print this help message.\"))\n print(\"{0}:\\t\\t\\t{1}\".format(INVENTORY, \"Print the items you are currently carrying.\"))\n print(\"{0}:\\t\\t\\t{1}\".format(LOADGAME, \"Load a previously saved game.\"))", "def disassemble(self, script):\n return ' '.join(self.opcode_list(script))", "def helpme(self):\n\n print(\"{}{}{}\".format(' ', 'Commands', ' '))\n print(\"{}{}{}\".format(' ', '--------', ' '))\n print(\"{} {} {}\".format('help ', '|', 'Display all usable commands'))\n print(\"{} {} {}\".format('look ', '|', 'Explore the room to find current location, exits and potential items.'))\n print(\"{} {} {}\".format('go ', '|', 'The prefix required to navigate your player.'))\n print(\"{} {} {}\".format('get ', '|', 'The prefix for picking up items.'))\n print(\"{} {} {}\".format('drop ', '|', 'The prefix for dropping items.'))\n print(\"{} {} {}\".format('inv ', '|', 'Displays the player inventory'))\n print(\"{} {} {}\".format('health ', '|', 'Displays player health'))\n print(\"{} {} {}\".format('eat ', '|', 'Allows the player to use consumables to gain health'))\n print(\"{} {} {}\".format('equip ', '|', 'Equip a weapon in your inventory'))\n print(\"{} {} {}\".format('unequip', '|', 'Unequip a current weapon'))\n print(\"{} {} {}\".format('attack ', '|', 'Allows the player to attack a non-player'))\n print(\"{} {} {}\".format('push ', '|', 'Returns NPC to spawn'))\n print(\"{} {} {}\".format('save ', '|', 'Save current player progress'))\n print(\"{} {} {}\".format('load ', '|', 'Load a previous character'))", "def _commands(self) -> Dict[str, List[str]]:\r\n pass", "def get_actions(self):\r\n return -4,4", "def __str__(self):\r\n\t\toutStr = \"\"\r\n\t\toutStr += \"Heuristic Level: \" + str(self.heuristic)\r\n\t\toutStr += \"\\n-\" + \"-----\"*self.n\r\n\t\tfor row in self.board:\r\n\t\t\ttempStr = (\"\\n|\" + \" %2d |\" * self.n)\r\n\t\t\toutStr += tempStr % tuple(row)\r\n\t\t\toutStr += \"\\n-\" + \"-----\"*self.n\r\n\r\n\t\treturn outStr", "def getOutput(self):\n text = \"\"\n text += \"*\"*self.getLevel() + \" \"\n if self.isTODO():\n text += \"TODO \"\n if self.isDONE():\n text += \"DONE \"\n text += self.getTitle()\n return text", "def getGameState(self):\n ### Student code goes here\n row1 = ()\n row2 = ()\n row3 = ()\n for currRow in range(1,4):\n for currCol in range(1,4):\n tileFound = False\n for fact in self.kb.facts:\n if fact.statement.predicate == \"located\":\n tile = fact.statement.terms[0].term.element\n column = fact.statement.terms[1].term.element\n row = fact.statement.terms[2].term.element\n\n tileNumber = int(tile[-1])\n columnNumber = int(column[-1])\n rowNumber = int(row[-1])\n\n if rowNumber == currRow and columnNumber == currCol:\n tileFound = True\n if rowNumber == 1:\n row1 += tuple([tileNumber])\n elif rowNumber == 2:\n row2 += tuple([tileNumber])\n elif rowNumber == 3:\n row3 += tuple([tileNumber])\n \n break\n\n if not tileFound:\n if currRow == 1:\n row1 += tuple([-1])\n elif currRow == 2:\n row2 += tuple([-1])\n elif currRow == 3:\n row3 += tuple([-1])\n\n\n return (row1, row2, row3)", "def __repr__(self):\r\n c = \"Player \" + self.checker + \" (\" + self.tiebreak + \", \" + str(self.lookahead) + \")\"\r\n return c", "def details(self) -> str:\n return f\"- **language**: [{self.language}]\\n\" \\\n f\"- **opengame**: [{self.opengame}]\\n\" \\\n f\"- **system**: [{self.system}]\\n\" \\\n f\"- **mode**: [{self.mode}]\\n\" \\\n f\"- **attributes**: [{self.attributes}]\\n \" \\\n f\"- **score_threshold**: [{self.score_threshold}]\\n \" \\\n f\"- **monsters**: [{self.monsters}]\\n\"", "def get_summon_spaces(self, grid): # Wip function.\r\n summonSpace = \"\"\"\r\nSTEP 1\r\nHOP X -1 Y -1\r\nHOP X 1 Y -1\r\nHOP X -1 Y 1\r\nHOP X 1 Y 1\"\"\"\r\n lines = summonSpace.splitlines()\r\n summon_options = []\r\n for line in lines:\r\n summon_options.extend(\r\n grid.get_all_movements_in_range(self.position, line))\r\n return summon_options", "def get_debug_strings(self):\n debug_strings = []\n debug_strings.append(f\"program counter: {self.program_counter:#06x}\")\n debug_strings.append(f\"index register: {self.index_register:#06x}\")\n debug_strings.append(f\"word: {self._current_word:#06x}\")\n debug_strings.append(f\"op: {self._current_operation.__class__.__name__}\")\n debug_strings.append(f\"sound timer: {self.sound_timer:#06x}\")\n debug_strings.append(f\"delay timer: {self.delay_timer:#06x}\")\n\n for i in range(16):\n debug_strings.append(f\"register V{i}: {self.general_purpose_registers[i]:#06x}\")\n\n return debug_strings", "def info(self):\n return (f\"Match id: {self._id}\\n\"\n f\"dire_score: {self.dire_score}\\n\"\n f\"dire_team: {self.dire_team}\\n\"\n f\"duration: {self.duration}\\n\"\n f\"game_mode: {self.game_mode}\\n\"\n f\"patch: {self.patch}\\n\"\n f\"radiant_score: {self.radiant_score}\\n\"\n f\"radiant_team: {self.radiant_team}\\n\"\n f\"radiant_win: {self.radiant_win}\\n\"\n f\"skill: {self.skill}\\n\"\n f\"start_time: {self.start_time}\\n\")", "def output(self):\n acts = []\n for i in self.actions:\n acts.append(i.output())\n return {\n \"delay\": self.delay,\n \"actions\": acts\n }", "def get_symbols(self):\n\n raise NotImplementedError('''\n Must implement get_symbols. Call help() for details.\n ''')" ]
[ "0.77113837", "0.7536874", "0.74031675", "0.6759023", "0.6670727", "0.6315557", "0.5983526", "0.5832342", "0.5808417", "0.57887095", "0.5746708", "0.5729109", "0.56965", "0.5680344", "0.5596628", "0.55811346", "0.55728334", "0.5472555", "0.5458725", "0.54577094", "0.5449291", "0.54386926", "0.54386926", "0.54386926", "0.5436416", "0.54153734", "0.5398837", "0.5392579", "0.53712666", "0.5335539", "0.5314593", "0.5280593", "0.52608645", "0.5226544", "0.5213324", "0.52024883", "0.5183347", "0.5179725", "0.5154974", "0.51366186", "0.5133411", "0.5131106", "0.5124509", "0.5112427", "0.5111433", "0.5105754", "0.51024103", "0.51012236", "0.5095845", "0.5094338", "0.5089163", "0.5080882", "0.50771976", "0.5076325", "0.5038946", "0.50373363", "0.5032397", "0.5031308", "0.50180435", "0.5010093", "0.5005008", "0.4992047", "0.49728265", "0.49605265", "0.49393466", "0.4931778", "0.4912262", "0.49079642", "0.490397", "0.48971906", "0.48770756", "0.48759604", "0.48742172", "0.48684093", "0.48571768", "0.48531678", "0.48531532", "0.48439708", "0.48418188", "0.4840933", "0.4822519", "0.48218137", "0.4815806", "0.48112634", "0.48031324", "0.47926667", "0.47772574", "0.47765753", "0.47597912", "0.47596622", "0.47574666", "0.47539636", "0.47503862", "0.47485015", "0.4745556", "0.4741734", "0.47406003", "0.47320944", "0.47320035", "0.4725796" ]
0.66112
5
Return whether or not this game is over at state.
def is_over(self, state: StonehengeState) -> bool: total_result = state.hori_result + state.left_result + state.right_result total_line = len(total_result) p1_taken = 0 p2_taken = 0 # all_taken = True for item in total_result: if item == '1': p1_taken+=1 elif item =='2': p2_taken += 1 # else: # all_taken = False # print('p1 taken:' + str(p1_taken)) # print('p2 taken:' + str(p2_taken)) # print('p1_taken more than half?') # print(float(p1_taken) >= total_line/2) # print('p2_taken more than half?') # print(float(p2_taken) >= total_line/2) return float(p1_taken) >= total_line/2 or float(p2_taken) >= total_line/2
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def game_over(self) -> bool:\n return self.rstate.game_over()", "def is_over(self):\n return self.game.is_over()", "def is_game_over(self) -> bool:\n return self._is_game_over", "def game_over(self):\n # TODO: Define the game over condition for Adventure.\n # use self.over to determine if the game if over\n return self.over", "def is_game_over(self):\r\n\r\n if self.winner != 0:\r\n return True\r\n\r\n return False", "def is_over(self, state):\n return state.current_total == 0", "def is_game_over(self):\n return self.state.all_avatars_placed() and self.state.is_game_over()", "def game_over(self):\n return bool(self.last_round and self.last_player == self.current_player)", "def __game_is_over(self):\n return not (self.__playing and self.__bricks_total > 0 and self.__num_lives > 0)", "def is_game_over(self):\n if (self.check_win(HexBoard.RED) or self.check_win(HexBoard.BLUE) or \n len(self.get_move_list())==0):\n self.game_over = True\n return self.game_over", "def gameover(self):\n if self._gameover:\n return True\n \n if self.terminal():\n self._gameover = True\n return True\n \n return False", "def is_over(self):\n alive_players = [1 if p.status == \"alive\" else 0 for p in self.players]\n # If only one player is alive, the game is over.\n if sum(alive_players) == 1:\n return True\n\n # If all rounds are finshed\n if self.round_counter >= 2:\n return True\n return False", "def game_over(self):\n return self.lives() < 0", "def gameOver(self):\n\t\treturn self.lives == 0", "def _check_game_over(self):\n return self.game_board.check_game_over()", "def is_over(self):\n return (self.possible_moves() == []) or self.loss_condition()", "def is_winning_state(self):\n return self.game.is_winning_state()", "def game_over(state):\r\n return wins(state, HUMAN) or wins(state, COMP)", "def isGameOver(self):\n pass", "def game_over(state):\n return wins(state, HUMAN) or wins(state, COMP)", "def is_over(self):\n winner = TictactoeMatch.get_winner(self.inputs_)\n if winner:\n self.result_ = winner\n if Config.USER['debug']['enabled']:\n print \"It is over! Player \"+str(self.result_)+\" (\"+str(self.player_label_[self.result_])+\") wins!\"\n return True\n for value in self.inputs_:\n if value == TictactoeMatch.EMPTY:\n if Config.USER['debug']['enabled']:\n print \"Go!\"\n return False\n self.result_ = TictactoeMatch.DRAW\n if Config.USER['debug']['enabled']:\n print \"It is over! Draw!\"\n return True", "def is_game_over(self):\n if self.just_cheated_a or self.just_cheated_b:\n return False\n if self.game_stage == 3:\n return (self.die_a.current_value == \"5\" and self.die_b.current_value == \"6\" or\n self.die_a.current_value == \"6\" and self.die_b.current_value == \"5\")\n else:\n return False", "def is_game_over(self):\n\n if len(self.next_pieces) == 0:\n return True", "def is_over(self):\n winner = self.get_winner()\n status = bool(winner or not self.available_moves)\n return status, winner", "def is_over(self):\n return self.is_dead", "def is_over(self):\n return self.is_dead", "def is_over(self):\n return self.is_dead", "def is_game_over(cls):\n cls.record_winner()\n cls.record_tie()", "def isOpen(self):\n\t\treturn not self.endgame", "def is_over(self):\n for el1, el2, el3 in self.WINNING_POSITIONS:\n if self.board[el1] == self.board[el2] == self.board[el3]:\n if self.board[el1] == 0:\n continue\n\n self.winner = self.board[el1]\n return True\n\n if self.__class__.EMPTY_POSITION_COUNTER not in self.board:\n return True\n\n return False", "def is_over(self):\n return self._is_dead", "def is_over(self):\n return self._is_dead", "def game_over(self):\n\n if self._number_of_moves == 9:\n return True\n\n return self._number_of_moves == 9 or self.winner_found()", "def game_over(self):\n self.over = True", "def is_over(self, board):\n if _winner(board) != 0:\n return True\n return False", "def game_is_over(self) -> models.Conclusion:\n raise NotImplementedError", "def is_game_over(self):\n bk = False\n wk = False\n\n # Find the kings\n for row in range(8):\n for col in range(8):\n if self.board.squares[row][col] == ChessPiece.B_KING: # Black king symbol\n bk = True\n break\n if self.board.squares[row][col] == ChessPiece.W_KING: # Black king symbol\n wk = True\n break\n\n # If a king is missing, end the game. This fixes a bug we were having\n if bk == False:\n return 1\n if wk == False:\n return 2\n\n if self.white_wins():\n return 1\n elif self.black_wins():\n return 2\n elif self.tie():\n return 3\n else:\n return 0", "def check_if_over(self):\n if self.remainingBalls == 0:\n self.check_if_won()\n self.game_over = True", "def get_gameover_state(self) -> State:\n return State.TWO_PLAYER_GAMEOVER", "def isGameOver(self):\n for row in range(0, self.rows):\n for col in range(0, self.cols):\n if self.isMine(row, col) and self.isClicked(row, col):\n return True\n return False", "def IsGameOver(self):\n return any(c.cX + c.width >= self.end_location for c in self.enemies)", "def __game_is_over(self, x, y):\n\t\tif np.count_nonzero(self.board) >= 42:\n\t\t\treturn True\n\n\t\tlines = self.__extract_lines(x, y)\n\n\t\tfor line in lines:\n\t\t\tif self.__winner_in_line(line) != 0:\n\t\t\t\treturn True\n\n\t\treturn False", "def has_a_winner(self):\n return self.state in {State.X_WON, State.O_WON}", "def is_game_won(self):\n return True", "def is_over(self, state) -> bool:\n\n p1_count = 0\n p2_count = 0\n ley_line_total = (state.side_length + 1) * 3\n for itype in state.current_ley_lines:\n for line in itype:\n if line[0] == '1':\n p1_count += 1\n if line[0] == '2':\n p2_count += 1\n\n if p1_count >= ley_line_total/2 or p2_count >= ley_line_total/2:\n return True\n return False", "def check_over(self):\n if self.board.has_winner() == 1:\n return 1\n elif self.board.has_winner() == 2:\n return 2\n elif self.board.check_cats_game():\n return 0\n else:\n return -1", "def is_game_over(board):\n winner = check_winner(board)\n draw = check_draw(winner, board)\n return True if winner or draw else False", "def is_over(self):\n # If menu is over reset the offset\n if self.is_dead:\n Drawable.WINDOW_OFFSET = self._old_offset\n return self.is_dead", "def game_over(_user_id):\n _board = boards[_user_id]\n return _board.is_game_over()", "def is_game_win(self):\n return not self.deck and not self.hand", "def isGameOver(self):\n for i in range(self.rows):\n for j in range(self.columns):\n if self.grid[i][j].face == 'down':\n return False\n #if here then all cards must be face up\n return True", "def game_over(self) -> bool:\n for row in range(9):\n for col in range(9):\n if self._grid_sol[row][col] != self.get_cell(row, col):\n return False\n return True", "def check_game_over(self):\n red, blue = self.board.count_piece()\n if blue == 0:\n self.ui.show_result(\"RED WIN!\")\n self.turn = RED\n elif red == 0:\n self.ui.show_result(\"BLUE WIN!\")\n self.turn = BLUE\n elif red == blue == 1:\n self.ui.show_result(\"DRAW!\")", "def determine_game_state(self):\n if self.board == BLANK_BOARD:\n return GameState.GAME_NOT_STARTED\n\n # check for three of the same symbol across or down.\n for r in range(3):\n offset = r*3\n if self.board[offset] == self.board[offset+1] == self.board[offset+2]:\n if self.board[offset] == X_SYMBOL:\n return GameState.GAME_OVER_X_WINS\n elif self.board[offset] == O_SYMBOL:\n return GameState.GAME_OVER_O_WINS\n if self.board[r] == self.board[3 + r] == self.board[6 + r]:\n if self.board[r] == X_SYMBOL:\n return GameState.GAME_OVER_X_WINS\n elif self.board[r] == O_SYMBOL:\n return GameState.GAME_OVER_O_WINS\n\n # check for diagonal wins\n if ((self.board[0] == self.board[4] == self.board[8]) or\n (self.board[2] == self.board[4] == self.board[6])):\n if self.board[4] == X_SYMBOL:\n return GameState.GAME_OVER_X_WINS\n elif self.board[4] == O_SYMBOL:\n return GameState.GAME_OVER_O_WINS\n \n # check for tie.\n if not self.board.count(EMPTY_SYMBOL):\n return GameState.GAME_OVER_DRAW\n\n return GameState.GAME_IN_PROGRESS", "def is_game_over(self):\n\n # This checks whether or not the board is full...\n if len(self.board.values()) == 100 and \\\n 0 not in self.board.values():\n p1 = self._longest_chain(1)\n p2 = self._longest_chain(2)\n if len(p1) > len(p2):\n return 1\n elif len(p2) > len(p1):\n return 2\n else:\n return 0\n\n # If it's not full. We check for boxes\n else:\n for x in range(self.width-1):\n for y in range(self.height-1):\n slice = self._slice((x,y), (2,2))\n if 0 not in slice[0] and 0 not in slice[1]:\n # is this slice a box?\n if slice[0][0] == slice[0][1] and \\\n slice[0][1] == slice[1][0] and \\\n slice[1][0] == slice[1][1]:\n return slice[0][0] # winner\n\n return -1 # game is not over", "def is_endgame_state(self) :\n raise NotImplementedError", "def endState(self):\n return not(self.state.winner() == None and len(self.state.get_actions()) > 0)", "def gameOver():\n if len(p1)==0 and len(p1winnings)==0:\n return True\n elif len(p2)==0 and len(p2winnings)==0:\n return True\n return False", "def check_game_over(self):\n for piece in self.pieces:\n if not piece.destroyed:\n return False\n print(\"Signal.END\")\n return True", "def checkGameState(self, fpsclock, screen):\n if self.isWin() or self.isLost():\n if self.exitMenu(fpsclock, screen):\n return True\n return False", "def is_end_game(self):\n win = self.is_game_won()\n tie = self.game_is_tied()\n return win or tie", "def is_game_over(self):\n board = list(self.board)\n for wins in self.WINNING:\n # Create a tuple\n w = (board[wins[0]], board[wins[1]], board[wins[2]])\n if w == ('X', 'X', 'X'):\n return 'X'\n if w == ('O', 'O', 'O'):\n return 'O'\n # Check for stalemate\n if ' ' in board:\n return None\n return ' '", "def handle(self) -> State:\n # Check if both players are destroyed\n if self.player2.is_destroyed():\n # Mark the game as over\n self.over = True\n # adds 1 to the number of games played\n self.update_trackers()\n # Return the gameover state\n return self.get_gameover_state()\n\n # Otherwise return the current state\n return super().handle()", "def is_game_over(self):\n if max([max(row) for row in self.grid]) == 2 ** (self.grid_size ** 2):\n raise GameException('Congrats, You won !')\n\n # If there is a zero then the game is not over\n for row in self.grid:\n if 0 in row:\n return False\n\n # Check if two consecutive number (vertically or horizontally) are\n # equal. In this case the game is not over.\n for i in range(self.grid_size):\n for j in range(self.grid_size):\n # horizontal check\n if (i < self.grid_size - 1 and\n self.grid[i][j] == self.grid[i + 1][j]):\n return False\n # vertical check\n if (j < self.grid_size - 1 and\n self.grid[i][j] == self.grid[i][j + 1]):\n return False\n\n return True", "def is_over(self, time):\n over = (not self.enable_loop()) and (time >= self.get_duration())\n return over", "def notify_game_over(self):\n self.is_game_over = True", "def verify_ending(self):\n self._fast_forward_to_penultimate_play()\n if self.game_status.game_over:\n # Game shouldn't be over quite yet!\n self.reset()\n return False\n\n self.apply_next_event()\n game_over = self.game_status.game_over\n excess_outs = self.game_status.excess_outs\n self.reset()\n return game_over and not excess_outs", "def overtime(self):\n if self._overtime != '':\n return True\n return False", "def check(self):\n\n if (sum(self.game_state) == 0):\n return 1\n elif (self.game_state[-1] >=1 ):\n return -1\n else:\n return 0", "def game_over(self):\n raise NotImplementedError(\"Abstract method\") # no mercy for stooges", "def _checkRoundOver(self):\n\n if not any(player.isAlive() for player in self.teams[0].players):\n self.endGame()", "def over(self):\n return self._over", "def check_win(self):\n return UNEXPOSED not in self.get_game() and self.get_game().count(FLAG) == len(self.get_pokemon_location)", "def check_if_game_over():\n check_for_winner()\n check_for_tie()", "def verify_winning_state(self, state):\n return self.game.verify_winning_state(state)", "def is_end_game(state):\n if YoteRules.is_boring(state) or YoteRules.is_player_stuck(state, state.get_next_player()):\n return True\n latest_player_score = state.score[state.get_latest_player()]\n if latest_player_score >= MAX_SCORE:\n return True\n return False", "def game_over(self):\n red_minion = 0\n blue_minion = 0\n red_master = 0\n blue_master = 0\n only_masters = True\n for row in self.board:\n for piece in row:\n if piece != 0:\n if not piece.master:\n if piece.player:\n blue_minion += 1\n else:\n red_minion += 1\n only_masters = False\n else:\n if piece.player:\n blue_master += 1\n else:\n red_master += 1\n if blue_minion + blue_master == 0:\n self.winner = \"Red\"\n self.red_victories += 1\n self.number_of_games +=1\n self.game_over_screen()\n return True\n elif red_minion + red_master == 0:\n self.winner = \"Blue\"\n self.blue_victories += 1\n self.number_of_games +=1\n self.game_over_screen()\n return True\n elif only_masters:\n if red_master > blue_master:\n self.winner = \"Red\"\n self.red_victories += 1\n elif blue_master > red_master:\n self.winner = \"Blue\"\n self.blue_victories += 1\n else:\n self.winner = \"Nobody\"\n self.number_of_games +=1\n self.game_over_screen()\n return True\n \n return False", "def isOver(self):\n\t\tisFull = Piece.BLANK\n\t\tfor a,b,c in [[0, 1, 2], [3, 4, 5], [6, 7, 8], [0, 3, 6], [1, 4, 7], [2, 5, 8], [0, 4, 8], [2, 4, 6]]:\n\t\t\tif (self.board[a] is self.board[b] is self.board[c] and self.board[a] is not Piece.BLANK):\n\t\t\t\treturn self.board[a]\n\t\t\tif (self.board[a] is Piece.BLANK or self.board[b] is Piece.BLANK or self.board[c] is Piece.BLANK):\n\t\t\t\tisFull = False\n\t\treturn isFull", "def game_on(self):\n doc = self.documentation\n return (self.draw.accepted or doc[len(doc)-1].accepted) and (self.board.stones_set < self.board.max_nr_stones) and (self.board.score[opponent(self.draw.player)] > 0)", "def check_loss(self):\n return POKEMON in self.get_game()", "def is_game_lost(self):\n values = [self.hand[i]._lvalue + self.hand[i]._rvalue for i in range(len(self.hand))]\n return not sum_in_list_dyn(values, self.number_point)", "def isGoalState(self, state):\n diff = state.get_pos() - self._player_loc\n return diff.y == 0 and (diff.x == 0 or diff.z == 0) and \\\n abs(diff.x) + abs(diff.z) == 2 and \\\n state.get_block(self._player_loc + diff/2 + _Vec3(0, -1, 0)) not in \\\n (_AIR, _LAVA, _WATER)", "def is_on_ground(self):\n return bool(self.ground_sprites())", "def is_in_board(self):\n return self.is_alive()", "def isGameOver(self, boards):\n return self.deadTest(boards[0]) and self.deadTest(boards[1]) and self.deadTest(boards[2])", "def isGameOver(self, boards):\n return self.deadTest(boards[0]) and self.deadTest(boards[1]) and self.deadTest(boards[2])", "def game_over(players):\n active_players = players_with_decks(players)\n if not active_players or len(active_players) == 1:\n return True\n return False", "def check_if_game_over():\n # Calling check for winners.\n check_for_winner()\n # Calling check it's tie or not.\n check_if_tie()", "async def check_game_over(self, game_id):\n game = await self.get_game(game_id)\n player1_stand = await self.check_player_standing(game[1])\n player2_stand = await self.check_player_standing(game[2])\n if player1_stand and player2_stand:\n return True\n else:\n return False", "def _checkRoundOver(self):\n\n # if we already ended it doesn't matter\n if self.hasEnded():\n return\n\n if not any(player.isAlive() for player in self.teams[0].players):\n # allow continuing after wave 1\n if self._wave > 1:\n self.continueOrEndGame()\n else:\n self.endGame()", "def isAlive(self):\n return self.state", "def isAlive(self):\n return self.state", "def active(self):\n return self.home is not None and self.away is not None and self.winner is None", "def in_state(self, state):\n if state == self.get_state():\n return True\n return False", "def is_over(state, distribution):\n mask, heads, pos = state\n\n exist_move = False\n\n for i in range(4):\n # Player `i` doesn't have any piece left\n if (mask >> (7 * i)) & ((1 << 7) - 1) == 0:\n return True\n\n for j in range(7):\n if ((mask >> (i * 7 + j)) & 1) and intersect(distribution[i][j], heads):\n exist_move = True\n\n return not exist_move", "def is_game_won(self):\n if self.game_is_tied():\n return False\n my_available_steps = self.steps_available(self.loc)\n opp_available_steps = self.steps_available(self.opponent_loc)\n if my_available_steps == 0 or opp_available_steps == 0:\n return True\n else:\n return False", "def get_game_on_status(self) -> bool:\n return self._game_on.get()", "def stats_change(self):\n return True if self.board.prev_state != self.board.shot_count else False", "def cell_is_game_over(self, y, x, map_data):\n # check for water\n if map_data[y][x] == self.WATER_SYMBOL:\n return True\n\n # check for anti-tank\n # up direction\n for i in range(y, -1, -1):\n if map_data[i][x] == self.ANTI_TANK_DOWN_SYMBOL:\n return True\n # if blocked, can stop checking for anti-tank\n if self.cell_is_blocked(i, x, map_data):\n break\n\n # down direction\n for i in range(y, self.y_size):\n if map_data[i][x] == self.ANTI_TANK_UP_SYMBOL:\n return True\n # if blocked, can stop checking for anti-tank\n if self.cell_is_blocked(i, x, map_data):\n break\n\n # left direction\n for i in range(x, -1, -1):\n if map_data[y][i] == self.ANTI_TANK_RIGHT_SYMBOL:\n return True\n # if blocked, can stop checking for anti-tank\n if self.cell_is_blocked(y, i, map_data):\n break\n\n # right direction\n for i in range(x, self.x_size):\n if map_data[y][i] == self.ANTI_TANK_LEFT_SYMBOL:\n return True\n # if blocked, can stop checking for anti-tank\n if self.cell_is_blocked(y, i, map_data):\n break\n\n # no water or anti-tank danger\n return False", "def is_reached(self, vehicle_state) -> bool:\n return False" ]
[ "0.8707897", "0.8566029", "0.8421653", "0.8119031", "0.8112403", "0.81087774", "0.80789787", "0.79621136", "0.7939655", "0.78865045", "0.7864895", "0.7863609", "0.784667", "0.7836293", "0.78253675", "0.7812383", "0.76985806", "0.7698057", "0.7644452", "0.76256573", "0.76249146", "0.76190174", "0.75497705", "0.75321925", "0.7519197", "0.7519197", "0.7519197", "0.7513014", "0.7480331", "0.745088", "0.7423712", "0.7423712", "0.7409058", "0.7399793", "0.7358372", "0.7216192", "0.721501", "0.72140807", "0.7187278", "0.71651167", "0.71304107", "0.7107452", "0.708976", "0.7086775", "0.7062085", "0.7043208", "0.70395845", "0.70047045", "0.6952524", "0.6949994", "0.6888251", "0.686858", "0.6864257", "0.6835734", "0.680932", "0.6802263", "0.68013966", "0.6786406", "0.6784776", "0.6783516", "0.6782658", "0.6776429", "0.6775859", "0.6764319", "0.67174494", "0.67168295", "0.6711513", "0.66927683", "0.66891295", "0.66844535", "0.66693956", "0.66505355", "0.6626597", "0.6623235", "0.661248", "0.6584144", "0.6567466", "0.6552337", "0.65469253", "0.6523224", "0.6519008", "0.6511903", "0.6510881", "0.650557", "0.6495245", "0.6495245", "0.6492588", "0.6473148", "0.64726937", "0.6443373", "0.6440636", "0.6440636", "0.643227", "0.6426176", "0.6424237", "0.6422176", "0.63971615", "0.63927984", "0.63834894", "0.63759685" ]
0.67353415
64
Return whether player has won the game.
def is_winner(self, player: str) -> bool: total_result = self.current_state.hori_result + self.current_state.left_result + self.current_state.right_result total_line = len(total_result) p1_taken = 0 p2_taken = 0 for item in total_result: if item == '1': p1_taken+=1 elif item == '2': p2_taken += 1 if player == "p1": return float(p1_taken) >= total_line/2 return float(p2_taken) >= total_line/2
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_game_won(self):\n return True", "def has_won(board, player):\r\n return False", "def has_won(board, player):\n return False", "def player_has_won(self):\n return len(self._words_guessed) == self._num_words", "def is_game_won(self):\n if self.game_is_tied():\n return False\n my_available_steps = self.steps_available(self.loc)\n opp_available_steps = self.steps_available(self.opponent_loc)\n if my_available_steps == 0 or opp_available_steps == 0:\n return True\n else:\n return False", "def has_won(self):\n return len(self.hand) == 0", "def won_game(self):\n for player in self.players:\n if len(player.cards) == 0:\n\n return True\n return False", "def is_game_over(self):\r\n\r\n if self.winner != 0:\r\n return True\r\n\r\n return False", "def is_game_won(self) -> int:\n\n b = self.board\n for c1, c2, c3, c4 in _WINDOWS:\n if b[c1] and (b[c1] == b[c2] == b[c3] == b[c4]):\n print(\"win\", c1, c2, c3, c4)\n return b[c1]", "def won(self):\n if self.current_room.name == \"Victory\":\n return True\n else:\n return False", "def check_if_won(self):\n if self.player_points > self.enemy_points:\n self.bHasWon = True\n else:\n self.bHasWon = False", "def game_won(self):\n return all((foundation.is_full() for foundation in self.foundations.values()))", "def is_game_won(board, player):\n\n\tis_won = False\n\n\tif (\n\t\tboard[0] == board[1] == board[2] == player or\n\t\tboard[3] == board[4] == board[5] == player or\n\t\tboard[6] == board[7] == board[8] == player or\n\t\tboard[0] == board[3] == board[6] == player or\n\t\tboard[1] == board[4] == board[7] == player or\n\t\tboard[2] == board[5] == board[8] == player or\n\t\tboard[0] == board[4] == board[8] == player or\n\t\tboard[2] == board[4] == board[6] == player\n\t):\n\t\tis_won = True\n\n\treturn is_won", "def hasWin(self) :\n comparison = self.compareNumberUser()\n if (comparison == 'equal') :\n return True\n else :\n return False", "def has_a_winner(self):\n return self.state in {State.X_WON, State.O_WON}", "def game_over(self):\n\n if self._number_of_moves == 9:\n return True\n\n return self._number_of_moves == 9 or self.winner_found()", "def check_game(self):\n gameOver = None\n if self.turn > 4:\n gameOver = self.check_x_won()\n if gameOver is True:\n self.game_x_won()\n return\n\n gameOver = None\n if self.turn > 5:\n gameOver = self.check_o_won()\n if gameOver is True:\n self.game_o_won()\n return\n\n if self.turn >= 9:\n self.game_tie()\n return", "def is_winning_state(self):\n return self.game.is_winning_state()", "def won(self):\n return self.roster.won", "def is_over(self):\n alive_players = [1 if p.status == \"alive\" else 0 for p in self.players]\n # If only one player is alive, the game is over.\n if sum(alive_players) == 1:\n return True\n\n # If all rounds are finshed\n if self.round_counter >= 2:\n return True\n return False", "def is_game_win(self):\n return not self.deck and not self.hand", "def game_over(self):\n return bool(self.last_round and self.last_player == self.current_player)", "def has_won(self):\n coders_card = self.get_coders().get_amount()\n if coders_card > 3:\n return True\n else:\n return False", "def check_for_game_won(self):\n all_moscuvites_captured = True\n king_captured = True\n king_escaped = True\n for piece in self.game_pieces:\n if piece.player == 2:\n all_moscuvites_captured = False\n elif piece.player == 3:\n king_captured = False\n king_coords = (piece.x,piece.y)\n escape_coords = [(0, 0), (0, 8),\n (8, 0), (8, 8)]\n if king_coords not in escape_coords:\n king_escaped = False\n if king_captured:\n return 2\n elif king_escaped or all_moscuvites_captured:\n return 1\n else:\n return 0", "def is_end_game(self):\n win = self.is_game_won()\n tie = self.game_is_tied()\n return win or tie", "def is_winner(self, player) -> bool:\n return (self.current_state.get_current_player_name() != player\n and self.is_over(self.current_state))", "def is_won(self):\n return self.position == self.proposition.outcome and self.proposition.is_paid", "def winFor(self,player):\n if(self.cachedWin == False):\n won = False;\n if(player==WHITE):\n for x in range(0,WIDTH):\n if(self.gameState[x,0]==WHITE):\n won = True\n \n elif(player==BLACK):\n for x in range(0,WIDTH):\n if(self.gameState[x,HEIGHT-1]==BLACK):\n won = True\n \n if(len(self.successors()) == 0):#IF there are no available moves for both players\n bCount = self.count(BLACK) #check who has the most pawns\n wCount = self.count(BLACK)\n if(bCount>wCount):\n self.cachedWin = True\n self.cachedWinner = player\n return True\n if(wCount>bCount):\n self.cachedWin = True\n self.cachedWinner = player\n return True\n \n if(won):\n self.cachedWin = True\n self.cachedWinner = player\n return True\n else:\n return False\n else:\n return player == self.cachedWinner", "def check_player_reached():\n global round_start_timer, round_over\n\n if player1.alive and player1.rect.top < (platform_width // 2):\n add_time_points()\n reset_players()\n player1.wins += 1\n return True\n\n elif player2.alive and (player2.rect.top + player2.image.get_height()) > \\\n (SCREEN_HEIGHT - platform_width):\n player2.wins += 1\n round_over = True\n add_time_points()\n reset_players()\n return True", "def is_winner(self, player):\n return (self.current_state.get_current_player_name() != player\n and self.is_over(self.current_state))", "def game_tie(self):\n\n shape = self.board.shape\n if np.count_nonzero(self.board) == (shape[0] * shape[1]):\n # The board is full\n player = 0\n return True\n else:\n return False", "def check_win(self):\n return UNEXPOSED not in self.get_game() and self.get_game().count(FLAG) == len(self.get_pokemon_location)", "def winner(self):\n\n if self.game_ended():\n return self.winning()\n else:\n return 0", "def game_over(players):\n active_players = players_with_decks(players)\n if not active_players or len(active_players) == 1:\n return True\n return False", "def check_win(self):\r\n wins = [self.check_rows(), self.check_cols(), self.check_diag()]\r\n for case, pos in wins:\r\n if case != -1:\r\n print('Game over!')\r\n if self.grid[case][-1] == self.computer:\r\n print('The computer won!')\r\n return (True, pos)\r\n print('The player won!')\r\n return (True, pos)\r\n\r\n return (self.check_draw(), None)", "def winner(self):\n return self._fetch_element('winner') == 'true'", "def game_over(self) -> bool:\n return self.rstate.game_over()", "def checkForWin(self):\n w = self.getWinner()\n if w == PLAYER or w == AI:\n # self.printBoard()\n # print('%d'%w + ' won!')\n return\n if w == Tie:\n # print('Tie')\n return", "def is_winner(self):\n return self.winner", "def check_win(self, player):\n def check_row_win(player):\n for row in self.game_state:\n if player == row[0] == row[1] == row[2]:\n return True\n return False\n\n def check_column_win(player):\n # For doing a column check, transpose the grid and do a row check\n trans_game_state = numpy.transpose(self.game_state)\n for row in trans_game_state:\n if player == row[0] == row[1] == row[2]:\n return True\n return False\n\n def check_diag_win(player):\n # Left to right diagonal\n if player == self.game_state[0][0] == self.game_state[1][1] == self.game_state[2][2]:\n return True\n # Right to left diagonal\n if player == self.game_state[0][2] == self.game_state[1][1] == self.game_state[2][0]:\n return True\n return False\n\n if check_column_win(player) or check_diag_win(player) or check_row_win(player):\n return True\n return False", "def will_player_win_after_n(self):\n clone_state = self._state.clone()\n clone_state.play('n')\n won_columns = 0\n for won_column in clone_state.finished_columns:\n if self._state.player_turn == won_column[1]:\n won_columns += 1\n # This means if the player stop playing now, they will win the game\n if won_columns == 3:\n return True\n else:\n return False", "def win(self, player):\n if player == 1:\n a = self.player_one.moves\n else:\n a = self.player_two.moves\n winning_moves = []\n for i in range(1, 9, 3):\n winning_moves.append(range(i, i + 3))\n for i in range(1, 4):\n winning_moves.append(range(i, i + 7, 3))\n winning_moves.append([1, 5, 9])\n winning_moves.append([3, 5, 7])\n for move in winning_moves:\n flg = True\n for index in move:\n if index not in a:\n flg = False\n break\n if flg:\n return True, player\n if len(self.player_one.moves) + len(self.player_two.moves) == 9:\n self.print_space()\n self.display_board()\n self.print_space()\n print \" Games is drawn\"\n self.logging.debug(\"Game is draw, nobody won\")\n self.logging.debug(\"Enjoy the game again :)\")\n sys.exit(100)\n return False, player", "def verify_winner(self):\r\n return self.count_pegs() == 1", "def gameOver(self):\n\t\treturn self.lives == 0", "def is_over(self):\n winner = TictactoeMatch.get_winner(self.inputs_)\n if winner:\n self.result_ = winner\n if Config.USER['debug']['enabled']:\n print \"It is over! Player \"+str(self.result_)+\" (\"+str(self.player_label_[self.result_])+\") wins!\"\n return True\n for value in self.inputs_:\n if value == TictactoeMatch.EMPTY:\n if Config.USER['debug']['enabled']:\n print \"Go!\"\n return False\n self.result_ = TictactoeMatch.DRAW\n if Config.USER['debug']['enabled']:\n print \"It is over! Draw!\"\n return True", "def still_playing_game(self):\n for player in self.players:\n if player.is_playing:\n return True\n return False", "def gameOver():\n if len(p1)==0 and len(p1winnings)==0:\n return True\n elif len(p2)==0 and len(p2winnings)==0:\n return True\n return False", "def is_win(self, roster):\n player = roster.get_current()\n guess = player.get_move().get_guess()\n if guess == self._code:\n return True\n else:\n return False", "def is_game_over(self) -> bool:\n return self._is_game_over", "def player_has_won(board, player):\n check = 0b000000000\n for index in range(9):\n if board[index] == player:\n check |= (1 << index)\n for pattern in config.WINNING_PATTERNS:\n if pattern & check == pattern:\n return True\n return False", "def gameWon(self):\n \n wins = [ threeInARow( self.squares[0], self.squares[1], self.squares[2] ),\n threeInARow( self.squares[3], self.squares[4], self.squares[5] ),\n threeInARow( self.squares[6], self.squares[7], self.squares[8] ),\n threeInARow( self.squares[0], self.squares[3], self.squares[6] ),\n threeInARow( self.squares[1], self.squares[4], self.squares[7] ),\n threeInARow( self.squares[2], self.squares[5], self.squares[8] ),\n threeInARow( self.squares[0], self.squares[4], self.squares[8] ),\n threeInARow( self.squares[2], self.squares[4], self.squares[6] ) ]\n \n return any(wins)", "def game_on(self):\n doc = self.documentation\n return (self.draw.accepted or doc[len(doc)-1].accepted) and (self.board.stones_set < self.board.max_nr_stones) and (self.board.score[opponent(self.draw.player)] > 0)", "def check_for_end_of_game(self):\n return self.player_1.score + self.player_2.score >= self.number_of_cells", "def check_winner(self):\n\t\tif self.check_diagonals() or self.check_rows() or self.check_columns():\n\t\t\treturn True\n\t\telif self.board_is_full():\n\t\t\tprint(\"There was a draw, everyone lost\")\n\t\t\treturn None\n\t\treturn False", "def _check_for_win(self):\n slots_available = any(\n [slot.available for slot in self.board.iter_slots() if not slot.mine]\n )\n if not slots_available:\n self.status = GameStatusEnum.won\n self.end_time = datetime.utcnow()", "def is_over(self):\n return self.game.is_over()", "def checkWinner(self, surface):\r\n winner = True\r\n \r\n # Checks for winner\r\n for point in self.points:\r\n if point.getTeam() == self.getTurn():\r\n winner = False\r\n \r\n # Displays winner message if there is a winner\r\n if winner:\r\n self.surface.fill(BLACK)\r\n winText = graphicalObjects.Text(self.getCurrentString() + ' wins!', WIN_CENTER, 20)\r\n winText.draw(self.surface)\r\n pygame.display.flip()\r\n self.won = True", "def is_game_over(self):\n\n if len(self.next_pieces) == 0:\n return True", "def is_won(self):\n for tile in self:\n if not tile.is_mine and tile.visibility != 1:\n return False\n return True", "def won(self):\n return self.mines_found == self.mines", "def won(self):\n return self.mines_found == self.mines", "def won(self):\n return self.mines_found == self.mines", "def won(self):\n return self.mines_found == self.mines", "def won(self):\n return self.mines_found == self.mines", "def won(self):\n return self.mines_found == self.mines", "def checkWin(self, board):\n for w in self.wins:\n if board[w[0]] != ' ' and (board[w[0]] == board[w[1]] == board[w[2]]):\n self.winner = board[w[0]]\n return True", "def check_win(self, player):\n for win_pos in TicTacToe.win_pos:\n # for each winning position defined we take the set difference to the positions played be player\n # if there are not elements left after resulting set after difference operator\n # we get False as return. ie he has placed his marker in the winning positions which in turn makes him\n # the winner\n if not win_pos.difference(self.player_played_pos[player]):\n return True\n\n # if after checking for every winning positions if the control still reaches here,\n # the player has not marked the winning positions. returns False\n return False", "def is_game_over(self):\n bk = False\n wk = False\n\n # Find the kings\n for row in range(8):\n for col in range(8):\n if self.board.squares[row][col] == ChessPiece.B_KING: # Black king symbol\n bk = True\n break\n if self.board.squares[row][col] == ChessPiece.W_KING: # Black king symbol\n wk = True\n break\n\n # If a king is missing, end the game. This fixes a bug we were having\n if bk == False:\n return 1\n if wk == False:\n return 2\n\n if self.white_wins():\n return 1\n elif self.black_wins():\n return 2\n elif self.tie():\n return 3\n else:\n return 0", "def have_won(self):\n raise NotImplemented", "def is_game_over(cls):\n cls.record_winner()\n cls.record_tie()", "def has_winner(self):\n\n if self.num_black_pieces == 0 or len(self.get_all_valid_moves(Player.black)) == 0:\n return Player.white\n elif self.num_white_pieces == 0 or len(self.get_all_valid_moves(Player.white)) == 0:\n return Player.black\n elif self.repetition_happened() or self.passive_game():\n return \"Tie\"\n else:\n return None", "def is_winner(self):\n return self._winner != self.NEUTRAL_PLAYER", "def is_over(self):\n winner = self.get_winner()\n status = bool(winner or not self.available_moves)\n return status, winner", "def wins(self):\n return self._wins", "def check_won(board,player):\n # X axis\n if (\n (len(set(board[1:4])) == 1 and ' ' not in set(board[1:4])) or\n (len(set(board[4:7])) == 1 and ' ' not in set(board[4:7])) or\n (len(set(board[7:10])) == 1 and ' ' not in set(board[7:10]))\n ):\n print('Player %s, you win!' % player)\n display_board(board)\n return True\n # Y axis\n if (\n (len(set(board[1::3])) == 1 and ' ' not in set(board[1::3])) or\n (len(set(board[2::3])) == 1 and ' ' not in set(board[2::3])) or\n (len(set(board[3::3])) == 1 and ' ' not in set(board[3::3]))\n ):\n print('Player %s, you win!' % player)\n display_board(board)\n return True\n # Diagonals\n if (\n (len(set(board[1::4])) == 1 and ' ' not in set(board[1::4])) or\n (len(set(board[3:9:2])) == 1 and ' ' not in set(board[3:9:2]))\n ):\n print('Player %s, you win!' % player)\n display_board(board)\n return True\n\n return False", "def check_if_user_won(self, board, pos, player_no):\n\n has_player_got_4 = set()\n has_player_got_4.add(pos)\n\n self.check_horizontal(has_player_got_4, board, pos, player_no)\n\n if len(has_player_got_4) >= 4:\n return True\n\n has_player_got_4 = set()\n has_player_got_4.add(pos)\n\n self.check_vertical(has_player_got_4, board, pos, player_no)\n\n if len(has_player_got_4) >= 4:\n return True\n\n has_player_got_4 = set()\n has_player_got_4.add(pos)\n\n self.check_diagonal(has_player_got_4, board, pos, player_no)\n\n if len(has_player_got_4) >= 4:\n return True\n\n has_player_got_4 = set()\n has_player_got_4.add(pos)\n\n self.check_inverted_diagonal(has_player_got_4, board, pos, player_no)\n\n if len(has_player_got_4) >= 4:\n return True\n\n if self.check_if_board_full(board):\n self.draw = True\n return True", "def is_game_over(board):\n winner = check_winner(board)\n draw = check_draw(winner, board)\n return True if winner or draw else False", "def all_players_finish(self):\n return len(self.game_winners) == len(self.players)", "def game_end(self):\n win, winner = self.has_a_winner()\n if win:\n return True, winner\n elif not len(self.availables): #\n return True, -1\n\n return False, -1", "def is_game_over(self):\n if (self.check_win(HexBoard.RED) or self.check_win(HexBoard.BLUE) or \n len(self.get_move_list())==0):\n self.game_over = True\n return self.game_over", "def is_game_complete(game):\n game_round = min(len(game.creator_scores), len(game.invitee_scores))\n creator_score = sum(game.creator_scores[:game_round])\n invitee_score = sum(game.invitee_scores[:game_round])\n return creator_score >= GAME_SCORE_TO_WIN or invitee_score >= GAME_SCORE_TO_WIN", "def __game_is_over(self, x, y):\n\t\tif np.count_nonzero(self.board) >= 42:\n\t\t\treturn True\n\n\t\tlines = self.__extract_lines(x, y)\n\n\t\tfor line in lines:\n\t\t\tif self.__winner_in_line(line) != 0:\n\t\t\t\treturn True\n\n\t\treturn False", "def game_over(self):\n return self.lives() < 0", "async def check_game_over(self, game_id):\n game = await self.get_game(game_id)\n player1_stand = await self.check_player_standing(game[1])\n player2_stand = await self.check_player_standing(game[2])\n if player1_stand and player2_stand:\n return True\n else:\n return False", "def _check_game_over(self):\n return self.game_board.check_game_over()", "def winning_game_player(players):\n\n # in order for there to be a winner, the game must\n # be over\n if not game_over(players):\n return None\n\n # if the game is over, it could be that there is no\n # winner\n active_players = players_with_decks(players)\n if not active_players:\n return False\n\n # if the game is over than find the winner\n return players_with_decks(players)[0]", "def is_player_ready(self):\n player = self.base.game_instance['player_ref']\n if (player\n and base.player_states[\"is_alive\"]\n and base.player_states[\"is_idle\"]\n and not base.player_states[\"is_moving\"]\n and not base.player_states[\"is_running\"]\n and not base.player_states[\"is_crouch_moving\"]\n and not base.player_states[\"is_crouching\"]\n and not base.player_states[\"is_standing\"]\n and not base.player_states[\"is_jumping\"]\n and not base.player_states[\"is_h_kicking\"]\n and not base.player_states[\"is_f_kicking\"]\n and not base.player_states[\"is_using\"]\n and not base.player_states[\"is_attacked\"]\n and not base.player_states[\"is_busy\"]\n and not base.player_states[\"is_turning\"]\n and not base.player_states[\"is_mounted\"]\n and not base.player_states[\"horse_riding\"]\n and not self.base.game_instance[\"is_player_sitting\"]\n and not player.get_python_tag(\"is_on_horse\")\n ):\n return True\n else:\n return False", "def is_over(self):\n for el1, el2, el3 in self.WINNING_POSITIONS:\n if self.board[el1] == self.board[el2] == self.board[el3]:\n if self.board[el1] == 0:\n continue\n\n self.winner = self.board[el1]\n return True\n\n if self.__class__.EMPTY_POSITION_COUNTER not in self.board:\n return True\n\n return False", "def game_is_tied(self):\n tie_score = False\n if self.my_score == self.opponent_score:\n tie_score = True\n my_moves = self.steps_available(self.loc)\n opponent_moves = self.steps_available(self.opponent_loc)\n if my_moves == 0 and opponent_moves == 0 and tie_score:\n return True\n else:\n penalty = self.penalty_score\n if my_moves == 0 and opponent_moves != 0:\n return (self.my_score - penalty) == self.opponent_score\n elif my_moves != 0 and opponent_moves == 0:\n return self.my_score == (self.opponent_score - penalty)\n else:\n return False", "def game_end(self):\n win, winner = self.has_a_winner()\n if win:\n return True, winner\n elif not len(self.availables):\n return True, -1\n return False, -1", "def game_end(self):\n win, winner = self.has_a_winner()\n if win:\n return True, winner\n elif not len(self.availables):\n return True, -1\n return False, -1", "def is_complete(self):\n return self.winner is not None", "def has_won(self, disc, coordinates):\n board = self.game[\"board\"]\n for check_method in self.check_has_won_methods:\n if check_method(board, disc, *coordinates) is True:\n return True\n return False", "def is_over(self, board):\n if _winner(board) != 0:\n return True\n return False", "def check_win_lose(self):\n if self.b.get_player_i() == 7: # player got to the bank\n return 1 # win\n if self.b.get_chaser_i() == self.b.get_player_i(): # chaser catch the player\n return 2 # lose\n return 0 # nothing", "def check_game_over(self):\n red, blue = self.board.count_piece()\n if blue == 0:\n self.ui.show_result(\"RED WIN!\")\n self.turn = RED\n elif red == 0:\n self.ui.show_result(\"BLUE WIN!\")\n self.turn = BLUE\n elif red == blue == 1:\n self.ui.show_result(\"DRAW!\")", "def determine_win(self):\n if self.match.radiant_win is True and self.player_slot < 5:\n return True\n if self.match.radiant_win is False and self.player_slot > 5:\n return True\n return False", "def check_if_game_over():\n # Calling check for winners.\n check_for_winner()\n # Calling check it's tie or not.\n check_if_tie()", "def enough_players():\n return True", "def __check_winner(self):\n for i in range(0, 3):\n col = self.__get_col(i)\n if col.get(self.player_char) == 3:\n print('\\nYou win!')\n self.game_ended = True\n return\n if col.get(self.opponent_char) == 3:\n print('\\nYou lose.')\n self.game_ended = True\n return\n row = self.__get_row(i)\n if row.get(self.player_char) == 3:\n print('\\nYou win!')\n self.game_ended = True\n return\n if row.get(self.opponent_char) == 3:\n print('\\nYou lose.')\n self.game_ended = True\n return\n for i in range(0, 2):\n diag = self.__get_diag(i)\n if diag.get(self.player_char) == 3:\n print('\\nYou win!')\n self.game_ended = True\n return\n if diag.get(self.opponent_char) == 3:\n print('\\nYou lose.')\n self.game_ended = True\n return\n if self.state.count(' ') == 0:\n print('\\nDraw!')\n self.game_ended = True" ]
[ "0.8601931", "0.8356279", "0.83288485", "0.81141126", "0.80790204", "0.78687596", "0.78639466", "0.7839697", "0.7838136", "0.76941895", "0.7626411", "0.75731826", "0.754925", "0.74468017", "0.74358803", "0.74049085", "0.7379903", "0.7379802", "0.7361152", "0.7361038", "0.7336999", "0.733275", "0.7278496", "0.7252811", "0.72298825", "0.7222105", "0.7163967", "0.71198744", "0.7118674", "0.71046823", "0.708241", "0.70790076", "0.7071814", "0.7056981", "0.70424235", "0.7040125", "0.7034183", "0.7032303", "0.7008262", "0.7005223", "0.6978853", "0.6978712", "0.69781256", "0.6978117", "0.6977161", "0.69710904", "0.69655854", "0.69591206", "0.69270754", "0.6912101", "0.6897184", "0.68773454", "0.68760586", "0.68661654", "0.6866002", "0.68317026", "0.68275476", "0.68267024", "0.68242425", "0.68147427", "0.68147427", "0.68147427", "0.68147427", "0.68147427", "0.68147427", "0.6812471", "0.6806246", "0.6804701", "0.68035424", "0.67919946", "0.67841494", "0.67681414", "0.67632306", "0.67599374", "0.67464423", "0.6735631", "0.6713064", "0.67120236", "0.6711364", "0.6711266", "0.67065364", "0.66948766", "0.6692027", "0.6685475", "0.668537", "0.66777843", "0.6672777", "0.6661874", "0.666119", "0.66609395", "0.66609395", "0.6651903", "0.6646783", "0.6645227", "0.6638016", "0.6631143", "0.66270584", "0.6623048", "0.66177964", "0.66095746" ]
0.69783485
42
Return the move that string represents. If string is not a move, return some invalid move.
def str_to_move(self, str1: str) -> Any: if not str1.strip().isalpha(): return -1 return str1.strip()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def str_to_move(self, string):\n if not string.strip().isalpha():\n return -1\n return string.strip()", "def str_to_move(self, string):\n if not string.strip().isdigit():\n return -1\n\n return int(string.strip())", "def get_move() -> str:\n msg = 'Enter a move for that section (C to check, S to swap, R to rotate): '\n move = input(msg)\n while not wf.is_valid_move(move):\n print('Invalid move!')\n move = input(msg) \n return move", "def handle_move(self, move_string):\n def map_move(move):\n col = int(ascii_lowercase.find(move[0])) + 1 # dummy col\n row = int(move[1:])\n # if not 0 < col <= game[\"board_width\"]:\n # raise ValueError('bad coord; invalid col in ' + coord)\n # if not 0 < row <= game[\"board_height\"]:\n # raise ValueError('bad coord; invalid row in ' + coord)\n return row*(self.rules[\"row_len\"]) + col\n move = list(map(map_move,move_string.split(' ')))\n self.turn[\"board\"][move[0]].make_move(*move[1:])\n self.turn[\"half_move_clock\"] += 1\n if self.turn[\"active_player\"] == 1:\n self.turn[\"full_move_clock\"] += 1\n self.turn[\"active_player\"] = (self.turn[\"active_player\"] + 1) % 2\n # self.turn[\"board\"][move_start].make_move(move_end)", "def get_move(character: dict, move_command: str) -> dict:\n\n movelist = get_character_movelist(character)\n\n move = list(filter(lambda x: (move_simplifier(x['Command'].replace(\"\\\\\",\"\"))\n == move_simplifier(move_command)), movelist))\n if not move:\n move = list(filter(lambda x: (is_command_in_alias(move_command, x)), movelist))\n\n if move:\n move[0]['Command'] = move[0]['Command'].replace(\"\\\\\",\"\")\n return move[0]\n else:\n return None", "def parse_move(move):\n if not (len(move) == 2):\n return None, None\n try:\n row = ord(move[0].upper()) - 65\n col = int(move[1])\n except:\n return None, None\n return row, col", "def _parse_move_statement(dlstr):\n\n try:\n tokens = dlstr.lower().split()\n if tokens[0] != \"move\":\n raise ValueError(\"Expected 'move' statement\")\n\n mtype, nmove, pfreq, rmin = \\\n tokens[1], int(tokens[2]), int(tokens[3]), float(tokens[4])\n except IndexError:\n raise ValueError(\"Badly formed 'move' statement?\")\n\n return mtype, nmove, pfreq, rmin", "def get_move(self, board):\n\n valid_moves = [move for move in board.legal_moves]\n is_valid_move = False\n while not is_valid_move:\n move = input(\"Enter a valid move in uci format: \").lower()\n if len(move) == 4 or len(move) == 5:\n try:\n player_move = chess.Move.from_uci(move)\n\n if board.is_legal(player_move):\n try:\n board.push(player_move)\n return player_move\n except:\n print(\"invalid move...\")\n else:\n print(\"invalid move...\")\n except:\n print(\"invalid move...\")\n else:\n print(\"invalid move...\")", "def get_move_from_user(self):\n user_input = input(\"Move: \")\n if user_input == 'undo':\n return user_input\n try:\n move_list = user_input.split(\" \")\n move_list[1] = int(move_list[1])\n except:\n move_list = ['XX', 0, 'XX']\n return move_list", "def get_next_move(self):\n return int(input('Enter your move: '))", "def get_move(self):\n if self._difficulty == 0:\n return self._get_easy_move()\n else:\n # Different stategies/difficulties can be attached here\n return", "def get_next_move(self):\n if self.move == 'X':\n return 'O'\n return 'X'", "def get_move(self, find_move_name):\n frame_data = self._get_frame_data()\n sprites = self._get_sprites()\n\n # Need to check both names separately\n for move in frame_data.keys():\n if '\"' in find_move_name:\n temp_move_name = find_move_name.replace('\"', '')\n if temp_move_name == move:\n frame_data_name = move\n break\n else:\n continue\n elif find_move_name.lower() == move.lower():\n frame_data_name = move\n break\n\n else:\n for move in frame_data.keys():\n if find_move_name.lower() in move.lower():\n frame_data_name = move\n break\n else:\n raise MoveNotFound\n\n sprite_name = None\n\n # temporary fix for the 214/236B/22x/5AD meme\n if '214b' in frame_data_name.lower() and not '214bc' in frame_data_name.lower():\n for move in sprites.keys():\n if '214A/B' in move:\n sprite_name = move\n break\n elif '236b' in frame_data_name.lower() and not '236bc' in frame_data_name.lower():\n for move in sprites.keys():\n if '236A/B' in move:\n sprite_name = move\n break\n\n elif '22' in frame_data_name.lower():\n for move in sprites.keys():\n if '22A/B' in move and '22c' not in frame_data_name.lower():\n sprite_name = move\n break\n elif '22A/B/C' in move and '22c' in frame_data_name.lower():\n sprite_name = move\n break\n\n elif 'reversal' in frame_data_name.lower():\n for move in sprites.keys():\n if '5AD' in move:\n sprite_name = move\n break\n\n for move in sprites.keys():\n if sprite_name is not None:\n break\n if 'j.' in frame_data_name.lower() and ' ' in frame_data_name:\n for split_name in frame_data_name.split(' '):\n if move.lower() == split_name.lower():\n sprite_name = move\n break\n elif move.lower() == frame_data_name.lower():\n sprite_name = move\n break\n else:\n for move in sprites.keys():\n if sprite_name is not None:\n break\n if 'j.' in frame_data_name.lower() and ' ' in frame_data_name:\n for split_name in frame_data_name.split(' '):\n if move.lower() in split_name.lower():\n sprite_name = move\n break\n elif move.lower() in frame_data_name.lower() and '22' not in find_move_name:\n print('ok')\n sprite_name = move\n break\n elif find_move_name.lower() in move.lower():\n sprite_name = move\n break\n else:\n sprite_name = None\n\n if sprite_name is None:\n sprite = ''\n else:\n sprite = self._get_high_quality_sprite(sprites[sprite_name])\n\n return {\n frame_data_name: {\n 'fd': frame_data[frame_data_name],\n 'sprite': sprite\n }\n }", "def interactive_strategy(game: Game) -> str:\n move = input(\"Enter a move: \")\n return game.str_to_move(move)", "def _get_move(self) -> Tile:\n if not self.game_state:\n raise RuntimeError(\"Cannot call get_move when the game has not started!\")\n if isinstance(self.current_turn, Player):\n return self._get_player_move()\n elif isinstance(self.current_turn, Enemy):\n return self._get_enemy_move()\n else:\n raise TypeError(\"You're trying to move something that isn't a character or an adversary.\")", "def move(self):\r\n their_move = self.last_moves[\"their_move\"]\r\n return (their_move == \"\" and random.choice(moves) or their_move)", "def get_move(moves):\n pass", "def parse_move_to_square(self, uci_move: str):\n chars = utils.split_string_to_chars(uci_move)\n square_from = ''.join(chars[0] + chars[1])\n square_to = ''.join(chars[2] + chars[3])\n return square_from, square_to", "def get_move(state):\n entry = game_states[get_values(state)]\n options = list()\n\n for move in entry:\n move_result = entry[move]\n if move_result == 'Y':\n return move\n elif move_result == 'N':\n continue\n options.extend([move]*move_result)\n return choice(options)", "def from_string(dlstr):\n\n moves_volume = {\"vector\": VolumeVectorMove,\n \"ortho\": VolumeOrthoMove,\n \"cubic\": VolumeCubicMove}\n\n moves_mc = {\"atom\": AtomMove,\n \"molecule\": MoleculeMove,\n \"rotatemol\": RotateMoleculeMove,\n \"gcinsertatom\": InsertAtomMove,\n \"gcinsertmol\": InsertMoleculeMove}\n\n lines = dlstr.splitlines()\n tokens = lines[0].lower().split()\n if tokens[0] != \"move\" or len(tokens) < 4:\n raise ValueError(\"Expected: 'move key ...': got {!r}\".format(lines[0]))\n\n key = tokens[1]\n\n # We need to allow for possible DL key abbreviations\n if key.startswith(\"atom\"):\n key = \"atom\"\n if key.startswith(\"molecu\"):\n key = \"molecule\"\n if key.startswith(\"rotatemol\"):\n key = \"rotatemol\"\n\n inst = None\n if key == \"volume\":\n subkey = tokens[2]\n if subkey in moves_volume:\n inst = moves_volume[subkey].from_string(dlstr)\n else:\n if key in moves_mc:\n inst = moves_mc[key].from_string(dlstr)\n\n if inst is None:\n raise ValueError(\"Move unrecognised: {!r}\".format(dlstr))\n\n return inst", "def get_move(self, game_state: BotGameState) -> BotMove:\n return", "def move(self):\r\n move = None\r\n if self.last_move is None:\r\n move = rockyman.move(self)\r\n else:\r\n index = the_moves.index(self.last_move) + 1\r\n if index >= len(the_moves):\r\n index = 0\r\n move = the_moves[index]\r\n self.last_move = move\r\n return move", "def player_move():\n\tmove = None\n\twhile move not in moves:\n\t\tmove = raw_input(\"What is your move %s? --> \" % name)\n\treturn move", "def fix_move(self, invalid_move: QMove):\n\n # TODO: reduce time_per_game second by second\n ERROR_MSG = f\"INVALID_MOVE {invalid_move.to_string()}\"\n\n if self.is_ai and self.proc is not None:\n self.proc.stdin.write(str.encode(ERROR_MSG + '\\n'))\n self.proc.stdin.flush()\n new_move = QMove(os.read(self.proc.stdout.fileno(), 100))\n else:\n new_move = QMove(\n input(\"Move was invalid, enter a valid move:\\n\\t>> \"))\n\n return new_move", "def interactive_strategy(game: Any) -> Any:\n move = input(\"Enter a move: \")\n return game.str_to_move(move)", "def interactive_strategy(game: Any) -> Any:\n move = input(\"Enter a move: \")\n return game.str_to_move(move)", "def interactive_strategy(game: Any) -> Any:\n move = input(\"Enter a move: \")\n return game.str_to_move(move)", "def test_move_initialization():\r\n m = Move('A1', 'B2')\r\n assert m.get_from_str() == 'A1'\r\n assert m.get_to_str() == 'B2'\r\n assert m.get_from_xy() == (7, 0)\r\n assert m.get_to_xy() == (6, 1)", "def update_puzzle(self, move_string):\r\n zero_row, zero_col = self.current_position(0, 0)\r\n for direction in move_string:\r\n if direction == \"l\":\r\n assert zero_col > 0, \"move off grid: \" + direction\r\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col - 1]\r\n self._grid[zero_row][zero_col - 1] = 0\r\n zero_col -= 1\r\n elif direction == \"r\":\r\n assert zero_col < self._width - 1, \"move off grid: \" + direction\r\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col + 1]\r\n self._grid[zero_row][zero_col + 1] = 0\r\n zero_col += 1\r\n elif direction == \"u\":\r\n assert zero_row > 0, \"move off grid: \" + direction\r\n self._grid[zero_row][zero_col] = self._grid[zero_row - 1][zero_col]\r\n self._grid[zero_row - 1][zero_col] = 0\r\n zero_row -= 1\r\n elif direction == \"d\":\r\n assert zero_row < self._height - 1, \"move off grid: \" + direction\r\n self._grid[zero_row][zero_col] = self._grid[zero_row + 1][zero_col]\r\n self._grid[zero_row + 1][zero_col] = 0\r\n zero_row += 1\r\n else:\r\n assert False, \"invalid direction: \" + direction", "def update_puzzle(self, move_string):\r\n zero_row, zero_col = self.current_position(0, 0)\r\n for direction in move_string:\r\n if direction == \"l\":\r\n assert zero_col > 0, \"move off grid: \" + direction\r\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col - 1]\r\n self._grid[zero_row][zero_col - 1] = 0\r\n zero_col -= 1\r\n elif direction == \"r\":\r\n assert zero_col < self._width - 1, \"move off grid: \" + direction\r\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col + 1]\r\n self._grid[zero_row][zero_col + 1] = 0\r\n zero_col += 1\r\n elif direction == \"u\":\r\n assert zero_row > 0, \"move off grid: \" + direction\r\n self._grid[zero_row][zero_col] = self._grid[zero_row - 1][zero_col]\r\n self._grid[zero_row - 1][zero_col] = 0\r\n zero_row -= 1\r\n elif direction == \"d\":\r\n assert zero_row < self._height - 1, \"move off grid: \" + direction\r\n self._grid[zero_row][zero_col] = self._grid[zero_row + 1][zero_col]\r\n self._grid[zero_row + 1][zero_col] = 0\r\n zero_row += 1\r\n else:\r\n assert False, \"invalid direction: \" + direction", "def update_puzzle(self, move_string):\r\n zero_row, zero_col = self.current_position(0, 0)\r\n for direction in move_string:\r\n if direction == \"l\":\r\n assert zero_col > 0, \"move off grid: \" + direction\r\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col - 1]\r\n self._grid[zero_row][zero_col - 1] = 0\r\n zero_col -= 1\r\n elif direction == \"r\":\r\n assert zero_col < self._width - 1, \"move off grid: \" + direction\r\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col + 1]\r\n self._grid[zero_row][zero_col + 1] = 0\r\n zero_col += 1\r\n elif direction == \"u\":\r\n assert zero_row > 0, \"move off grid: \" + direction\r\n self._grid[zero_row][zero_col] = self._grid[zero_row - 1][zero_col]\r\n self._grid[zero_row - 1][zero_col] = 0\r\n zero_row -= 1\r\n elif direction == \"d\":\r\n assert zero_row < self._height - 1, \"move off grid: \" + direction\r\n self._grid[zero_row][zero_col] = self._grid[zero_row + 1][zero_col]\r\n self._grid[zero_row + 1][zero_col] = 0\r\n zero_row += 1\r\n else:\r\n assert False, \"invalid direction: \" + direction", "def update_puzzle(self, move_string):\n zero_row, zero_col = self.current_position(0, 0)\n for direction in move_string:\n if direction == \"l\":\n assert zero_col > 0, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col - 1]\n self._grid[zero_row][zero_col - 1] = 0\n zero_col -= 1\n elif direction == \"r\":\n assert zero_col < self._width - 1, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col + 1]\n self._grid[zero_row][zero_col + 1] = 0\n zero_col += 1\n elif direction == \"u\":\n assert zero_row > 0, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row - 1][zero_col]\n self._grid[zero_row - 1][zero_col] = 0\n zero_row -= 1\n elif direction == \"d\":\n assert zero_row < self._height - 1, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row + 1][zero_col]\n self._grid[zero_row + 1][zero_col] = 0\n zero_row += 1\n else:\n assert False, \"invalid direction: \" + direction", "def update_puzzle(self, move_string):\n zero_row, zero_col = self.current_position(0, 0)\n for direction in move_string:\n if direction == \"l\":\n assert zero_col > 0, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col - 1]\n self._grid[zero_row][zero_col - 1] = 0\n zero_col -= 1\n elif direction == \"r\":\n assert zero_col < self._width - 1, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col + 1]\n self._grid[zero_row][zero_col + 1] = 0\n zero_col += 1\n elif direction == \"u\":\n assert zero_row > 0, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row - 1][zero_col]\n self._grid[zero_row - 1][zero_col] = 0\n zero_row -= 1\n elif direction == \"d\":\n assert zero_row < self._height - 1, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row + 1][zero_col]\n self._grid[zero_row + 1][zero_col] = 0\n zero_row += 1\n else:\n assert False, \"invalid direction: \" + direction", "def move(self):\r\n my_move = self.last_moves[\"my_move\"]\r\n return (my_move != \"\" and moves[(moves.index(my_move)+1) % 3] or\r\n random.choice(moves))", "def update_puzzle(self, move_string):\n zero_row, zero_col = self.current_position(0, 0)\n for direction in move_string:\n if direction == \"l\":\n assert zero_col > 0, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col\n - 1]\n self._grid[zero_row][zero_col - 1] = 0\n zero_col -= 1\n elif direction == \"r\":\n assert zero_col < self._width - 1, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col\n + 1]\n self._grid[zero_row][zero_col + 1] = 0\n zero_col += 1\n elif direction == \"u\":\n assert zero_row > 0, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row - 1][\n zero_col]\n self._grid[zero_row - 1][zero_col] = 0\n zero_row -= 1\n elif direction == \"d\":\n assert zero_row < self._height - 1, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row + 1][\n zero_col]\n self._grid[zero_row + 1][zero_col] = 0\n zero_row += 1\n else:\n assert False, \"invalid direction: \" + direction", "def interactive_strategy(game: Any) -> Any:\r\n move = input(\"Enter a move: \")\r\n return game.str_to_move(move)", "def movee(self):\n\n #return the initial state if he cant move and he's in the initial state\n if not self.move and self.index == 0:\n return self.path[self.index]\n\n #return the goal state if he's at the goal state\n if self.index == len(self.path):\n return self.path[-1]\n\n #return the next move and increments the index attribute\n nextMove = self.path[self.index]\n self.index += 1\n\n return nextMove", "def get_move(self, i):\n # Exception if not (0 <= i < self.length)\n return self._moves[i]", "def update_puzzle(self, move_string):\n zero_row, zero_col = self.current_position(0, 0)\n for direction in move_string:\n if direction == \"l\":\n assert zero_col > 0, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col - 1]\n self._grid[zero_row][zero_col - 1] = 0\n zero_col -= 1\n elif direction == \"r\":\n assert zero_col < self._width - 1, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col + 1]\n self._grid[zero_row][zero_col + 1] = 0\n zero_col += 1\n elif direction == \"u\":\n assert zero_row > 0, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row - 1][zero_col]\n self._grid[zero_row - 1][zero_col] = 0\n zero_row -= 1\n elif direction == \"d\":\n assert zero_row < self._height - 1, \"move off grid: \" + direction\n self._grid[zero_row][zero_col] = self._grid[zero_row + 1][zero_col]\n self._grid[zero_row + 1][zero_col] = 0\n zero_row += 1\n else:\n assert False, \"invalid direction: \" + direction\n for row in self._grid:\n print row\n print", "def human_move(board,player):\r\n \r\n s = input(\"Please input a legal move in a format of \\\"current_position-landing_position\\\", if the move is cantering or plain. In case of a capturing move, follow \\\"current_position-landing_position-enemy piece\\\": \")\r\n move = s.split('-')\r\n legal = legal_moves(board,player)\r\n execution(move,legal,board,player)", "def choose_move(self, board):\n if self.opp == Player.HUMAN:\n time.sleep(4)\n if self.type == Player.HUMAN:\n move = input(\"Please enter your move:\")\n while not board.legalMove(self, move):\n print(move, \"is not valid\")\n move = input(\"Please enter your move\")\n return move\n elif self.type == Player.RANDOM:\n move = choice(board.legalMoves(self))\n return move\n elif self.type == Player.MINIMAX:\n val, move = self.minimax_move(board, self.depth * 2,\n Player.MAX_PLAYER)\n board.last_move = move\n return move\n elif self.type == Player.ABPRUNE:\n val, move = self.alpha_beta_move(board, self.depth * 2,\n float('-inf'), float('inf'),\n Player.MAX_PLAYER)\n return move\n elif self.type == Player.CUSTOM:\n move = self.agent.getAction(board)\n self.agent.update_current_state(board, move)\n return move\n elif self.type == Player.MIX:\n return self.mixed_move(board)\n\n else:\n print(\"Unknown player type\")\n return -1", "def _get_move_result(self, unlocked_before_move : bool, err = None):\n if err:\n return Moveresult.INVALID\n elif self.current_turn.entity in self.game_state.get_completed_characters():\n return Moveresult.EXIT\n elif self.game_state.is_character_expelled(self.current_turn.entity):\n return Moveresult.EJECT\n elif self.game_state.is_current_level_unlocked() and not unlocked_before_move:\n return Moveresult.KEY\n else:\n return Moveresult.OK", "def best_move(self):\n if self._move is not None:\n return self._move\n else:\n return self.pass_move", "def takeStrategicMove():\r\n\tglobal move1, move2\r\n\r\n\tif move1==0 or move2==0:\r\n\t\tif validMove(1):\r\n\t\t\treturn 1\r\n\t\telif validMove(5):\r\n\t\t\treturn 5\r\n\telif winningMove():\r\n\t\treturn winningMove()\t\t\r\n\telif blockingMove():\r\n\t\treturn blockingMove()\r\n\telse:\r\n\t\treturn takeNaiveMove()", "def move(self):\r\n if self.last_op_move is None:\r\n return rockyman.move(self)\r\n else:\r\n return self.last_op_move", "def get_move(board, engine, color, move_num, time, **kwargs):\n legal_moves = board.get_legal_moves(color)\n\n if not legal_moves:\n return None\n elif len(legal_moves) == 1:\n return legal_moves[0]\n else:\n try:\n move = engine.get_move(copy.deepcopy(board), color, move_num, time[color], time[-color])\n except Exception, e:\n print traceback.format_exc()\n raise SystemError(color)\n\n if move not in legal_moves:\n print \"legal list\", [move_string(m) for m in legal_moves]\n print \"illegal\", move_string(move), \"=\", move\n raise LookupError(color)\n\n return move", "def move(self, move):\n possible_words = self._possible_words()\n if move not in possible_words:\n raise ValueError\n else:\n return self._extend(move)", "def get_move(state, player, max_time, verbose):\n # We call the player bot in a separate process.This allows us to terminate\n # if the player takes too long.\n manager = Manager()\n result = manager.dict() # result is a variable shared between our process and\n # the player's. This allows it to pass the move to us\n\n # Start a process with the function 'call_player' and the given arguments\n process = Process(target=call_player, args=(player, state, result))\n\n # Start the process\n process.start()\n\n # Rejoin at most max_time miliseconds later\n process.join(max_time / 1000)\n\n # Check if the process terminated in time\n move = None\n if process.is_alive():\n pr('! Player {} took too long, no move made.'.format(state.whose_turn()), verbose)\n\n process.terminate()\n process.join()\n\n else:\n # extract the move\n move = result['move']\n\n return move", "def get_move(board, engine, color, move_num, time, **kwargs):\n legal_moves = board.get_legal_moves(color)\n\n if not legal_moves:\n return None\n elif len(legal_moves) == 1:\n return legal_moves[0]\n else:\n try:\n move = engine.get_move(copy.deepcopy(board), color, move_num, time[color], time[-color])\n except Exception as e:\n print(traceback.format_exc())\n raise SystemError(color)\n\n if move not in legal_moves:\n print(\"legal list\", [move_string(m) for m in legal_moves])\n print(\"illegal\", move_string(move), \"=\", move)\n raise LookupError(color)\n\n return move", "def get_move(self, board):\n # First, check if we can win in the next move\n winning_move = self.get_winning_move(board, self.letter)\n if winning_move is not None:\n return winning_move\n # Check if the player could win on their next move, and block them.\n blocking_move = self.get_winning_move(board, self.opponent_letter)\n if blocking_move is not None:\n return blocking_move\n # Try to take one of the corners, if they are free.\n corner_move = self.move_in_a_corner(board)\n if corner_move is not None:\n return corner_move\n # Try to take the center, if it is free.\n if board.size % 2 == 1:\n if board.is_position_availible(board.letters[board.size // 2]\n + board.numbers[board.size // 2]):\n return board.letters[board.size // 2] + board.numbers[board.size // 2]\n # Move on one of the sides.\n return self.choose_random_move_from_list(board, list(board.positions.keys()))", "def get_move(self, last_move: QMove):\n\n # TODO: reduce time_per_game second by second\n if self.is_ai and self.proc is not None:\n self.proc.stdin.write(str.encode(last_move.to_string() + '\\n'))\n self.proc.stdin.flush()\n new_move = QMove(os.read(self.proc.stdout.fileno(), 100))\n else:\n new_move = QMove(input(\"Move, it's your turn:\\n\\t>> \"))\n\n if new_move.type == \"player\":\n self.coordinates = new_move.get_to()\n\n return new_move", "def getMove(self):\n while True:\n try:\n init = tuple(int(str.strip()) for str in raw_input('Choose the initial position of your move: ').split(','))\n break\n except ValueError:\n print(\"Input is not integer.\")\n\n while (len(init) != 2) or (init[0] not in range(1, self.grid.width+1)) or (init[1] not in range(1, self.grid.height+1)):\n print 'Initial position is not valid.'\n init = tuple(int(str.strip()) for str in raw_input('Choose the initial position of your move: ').split(','))\n\n while True:\n try:\n dest = tuple(int(str.strip()) for str in raw_input('Choose the destination position of your move: ').split(','))\n break\n except ValueError:\n print(\"Input is not integer.\")\n\n while (len(dest) != 2) or (dest[0] not in range(1, self.grid.width+1)) or (dest[1] not in range(1, self.grid.height+1)):\n print 'Destination position is not valid.'\n dest = tuple(int(str.strip()) for str in raw_input('Choose the destination position of your move: ').split(','))\n\n return (init, dest)", "def nextMoveGNU(self, move=\"go\", board=None):\n # get move\n if self.pawning:\n while not rospy.is_shutdown():\n for row in [2,3,4,5]:\n for col in ['a','b','c','d','e','f','g','h']:\n p1 = board.getPiece(col,row)\n if p1 != None and abs(p1.type) == ChessPiece.WHITE_PAWN:\n p2 = board.getPiece(col,row+1)\n if p2 == None:\n # this is a candidate \n m = col + str(row) + col + str(row+1)\n self.history.append(m)\n return m\n else:\n self.engine.sendline(move) \n if self.engine.expect(['My move is','Illegal move']) == 1:\n return None \n self.engine.expect('([a-h][1-8][a-h][1-8][RrNnBbQq(\\r\\n)])')\n m = self.engine.after.rstrip()\n self.history.append(m)\n return m", "def is_valid_move(self, move):\n if type(move) == str:\n move = int(move)\n\n return move in self.get_possible_moves()", "def get_player_move(board_positions, is_position_availible):\n player_input = None\n move = None\n while move not in board_positions.keys() or is_position_availible(move) is False:\n print(\"What is your next move? Input in the form letter + number Ex. a3\")\n player_input = input().lower()\n letter = player_input[0] + \" \"\n number = player_input[1:]\n if len(number) < 2:\n number = number + \" \"\n move = letter + number\n return move", "def get_move_no(self):\r\n return self.command_manager.get_move_no()", "def nextMoveGNU(self, move=\"go\", board=None):\n # get move\n if self.pawning:\n while not rospy.is_shutdown():\n rows = [2,3,4,5]\n piece = ChessPiece.WHITE_PAWN\n if board.side == board.BLACK:\n rows = [7,6,5,4]\n piece = ChessPiece.BLACK_PAWN\n for row in rows:\n for col in ['a','b','c','d','e','f','g','h']:\n p1 = board.getPiece(col,row)\n if p1 != None and abs(p1.type) == piece:\n p2 = board.getPiece(col,row+1)\n if p2 == None:\n # this is a candidate\n m = col + str(row) + col + str(row+1)\n self.history.append(m)\n return m\n else:\n self.engine.sendline(move)\n if self.engine.expect(['My move is','Illegal move']) == 1:\n return None\n self.engine.expect('([a-h][1-8][a-h][1-8][RrNnBbQq(\\r\\n)])')\n m = self.engine.after.rstrip()\n self.history.append(m)\n return m", "def makeMove(self, moveStr):\r\n\t\tmoveStr = str(moveStr)\r\n\r\n\t\tmoveUci = self._userParseSanToUci(moveStr)\r\n\t\t# print(moveUci)\r\n\r\n\t\tif moveUci is None:\r\n\t\t\treturn\r\n\r\n\t\tresponse = requests.post(f'https://lichess.org/api/board/game/{self.gameId}/move/{moveUci}', headers=self.authHeader)\r\n\r\n\t\tif response.status_code == 200:\r\n\t\t\tlog.debug('Move Successfully Sent')\r\n\r\n\t\telse:\r\n\t\t\tlog.warning(f'Move Unsuccessfully Sent. Status Code: {response.status_code}')", "def validate_move(maximum: int, character: list, direction: str) -> list:\n if direction in ('N', 'S'):\n character = move_character(character, maximum, 0, direction)\n elif direction in ('E', 'W'):\n character = move_character(character, maximum, 1, direction)\n return character", "def getMove(self, grid):\n\n cells = grid.getAvailableCells()\n\n while True:\n moveInput = input(\"Enter your move: \")\n \n if re.match(r\"place \\d,\\d\", moveInput) or re.match(r\"erase \\d,\\d\", moveInput):\n move = moveInput.split()\n action = move[0]\n pos = move[1].split(',')\n\n if (action == \"place\" and (int(pos[0]), int(pos[1])) in cells) or (action == \"erase\" and grid.getCellValue((int(pos[0]), int(pos[1]))) != 'T'):\n return [move[0], (int(pos[0]), int(pos[1]))]\n \n elif moveInput == \"restart\":\n return -1\n \n elif moveInput == \"show solution\":\n return 0\n \n print(\"Move not valid\")", "def make_move(self, move):\n if type(move) == str:\n move = int(move)\n\n new_state = SubtractSquareState(not self.p1_turn,\n self.current_total - move)\n return new_state", "def get_next_move(current_player: str, board_values: dict[str, str]) -> str:\n valid_input = False\n while not valid_input:\n # take care of any excess whitespace around the input and converts to lowercase\n raw_input = input(c.NEXT_TURN_MESSAGE.format(player=current_player))\n\n validation_result = get_validation_result(raw_input, board_values)\n\n if not validation_result.is_valid:\n print(validation_result.error_message)\n continue\n\n return validation_result.cleaned_input", "def select_move(self):\r\n while True:\r\n move = random.randint(0,8)\r\n if self.grid[move][-1] == ' ':\r\n return move", "def last_move(self) -> str:\n return self.move_history[-1][0]", "def getFirstMove(self):\n while True:\n try:\n move = tuple(int(str.strip()) for str in raw_input('Choose your first move: ').split(','))\n break\n except ValueError:\n print(\"Input is not a integer.\")\n\n while move not in [(1, 1), (self.grid.width/2, self.grid.height/2), \\\n (self.grid.width/2+1, self.grid.height/2+1), (self.grid.width, self.grid.height)]:\n print 'First move is not valid.'\n move = tuple(int(str.strip()) for str in raw_input('Choose your first move: ').split(','))\n return move", "def human_move(self):\n move = -1\n while move < 1 or move > self.BOARD.COLUMNS:\n try:\n move = input(\"{}: Choose a column>>> \".format(self.NAME))\n\n for i in self.QUIT:\n if str(move) == i:\n return None\n\n move = int(move)\n\n except KeyboardInterrupt:\n exit(0)\n except ValueError:\n pass\n if self.PIECE_COUNT <= 0:\n # cannot do anything\n self.STATE == Spectator.State.INACTIVE\n return None\n else:\n return move", "def make_move(state: str, section_num: int, move: str) -> str:\n if move == wf.CHECK:\n check_result = wf.check_section(state, section_num)\n if check_result:\n print('The section is correct')\n else:\n print('The section is incorrect')\n else:\n state = wf.change_state(state, section_num, move) \n return state", "def get_move_type(clicked_tile_position, blank_position):\n move_type = None # will hold move type\n\n clicked_row = clicked_tile_position[0] # get clicked row number\n clicked_col = clicked_tile_position[1] # get clicked column number\n\n blank_row = blank_position[0] # get blank row number\n blank_col = blank_position[1] # get blank column number\n\n # check UP or DOWN\n if clicked_row > blank_row and clicked_col == blank_col: # DOWN move\n move_type = 'down'\n elif clicked_row < blank_row and clicked_col == blank_col: # UP move\n move_type = 'up'\n \n # check LEFT or RIGHT\n if clicked_col > blank_col and clicked_row == blank_row: # RIGHT move\n move_type = 'right'\n elif clicked_col < blank_col and clicked_row == blank_row: # LEFT move\n move_type = 'left'\n \n return move_type", "def CheckMove(self,move):\n\t\tif(move=='w'):\n\t\t\tif(self.x==0):\n\t\t\t\treturn 0\n\t\t\treturn 1\n\t\telif(move=='s'):\n\t\t\tif(self.x==15):\n\t\t\t\treturn 0\n\t\t\treturn 1\n\t\telif(move=='d'):\n\t\t\tif(self.y==35):\n\t\t\t\treturn 0\n\t\t\treturn 1\n\t\telif(move=='a'):\n\t\t\tif(self.y==0):\n\t\t\t\treturn 0\n\t\t\treturn 1", "def api_make_move(self, move_input):\n return self.board.attempt_move(move_input)", "def make_move(self, board: Board) -> int:\n\n move = input()\n move = int(move)\n\n while move not in board.get_valid_moves():\n print(\"That is not a valid move\")\n move = input()\n move = int(move)\n\n return move", "def move(self, AN_str):\n self._move(*self._AN_to_coords(AN_str))", "def getSecondMove(self, firstMove):\n while True:\n try:\n move = tuple(int(str.strip()) for str in raw_input('Choose your second move: ').split(','))\n break\n except ValueError:\n print(\"Input is not a integer.\")\n\n while len(move) != 2 or abs(move[0]-firstMove[0]) + abs(move[1]-firstMove[1]) != 1:\n print 'Second move is not valid.'\n move = tuple(int(str.strip()) for str in raw_input('Choose your second move: ').split(','))\n return move", "def choose_move(self):\r\n \r\n return None", "def match_direction(self, string):\r\n return Direction.query(\r\n or_(Direction.name == string, Direction.short_name == string)\r\n ).first()", "def from_moves(moves):\n if type(moves) is str or type(moves) is unicode:\n moves = moves.split()\n \n branch = tree\n prev = branch[None]\n for move in moves:\n if branch.has_key(move):\n branch = branch[move]\n if branch.has_key(None):\n prev = branch[None]\n else:\n return prev\n \n return prev", "def move_valid(move):\n return True", "def get_move(self, last_result):\n return self.moves.pop()", "def check_one_move(self):\n count_moves = 0\n one_move = None\n for direction in self.directions:\n if self.valid_move(self.loc, direction):\n count_moves += 1\n one_move = direction\n if count_moves != 1:\n return None\n return one_move", "def find_best_move(state: GameState) -> None:", "def set_board(self, move_string):\n next_side = \"X\"\n for col_string in move_string:\n col = int(col_string)\n if col >= 0 and col <= self.width:\n self.add_move(col, next_side)\n if next_side == \"X\":\n next_side = \"O\"\n else:\n next_side = \"X\"", "def get_move(self, legal):\n move = Directions.STOP\n if (self.WEST_KEY in self.keys) and Directions.WEST in legal:\n move = Directions.WEST\n if (self.EAST_KEY in self.keys) and Directions.EAST in legal:\n move = Directions.EAST\n if (self.NORTH_KEY in self.keys) and Directions.NORTH in legal:\n move = Directions.NORTH\n if (self.SOUTH_KEY in self.keys) and Directions.SOUTH in legal:\n move = Directions.SOUTH\n return move", "def getMove(self, board):\n pass", "def _AN_to_coords(self, move: str):\n\n orig_move = move\n\n extra_info = \"\"\n\n # remove all characters that don't matter when parsing\n for pointless_char in \"x+#\":\n move = move.replace(pointless_char, \"\")\n\n # Handle castling\n if CASTLE_QUEENSIDE in move:\n row = self._get_castling_row()\n return (row, 4), (row, 2), CASTLE_QUEENSIDE\n elif CASTLE_KINGSIDE in move:\n row = self._get_castling_row()\n return (row, 4), (row, 6), CASTLE_KINGSIDE\n\n # Pawn promotion\n if move[-2] == \"=\":\n extra_info = move[-1] if self.white_to_move else move[-1].lower()\n move = move[:-2]\n\n # Destination of move, this is the only guaranteed substring in the move\n dest_str = move[-2:]\n dest = State._EAN_coords_to_board_coords(dest_str)\n move = move[:-2]\n\n # Deduce what piece actually made the move, if there is no shown there is no pawn\n # Note in AN pieces are always uppercase and location is lowercase,\n # so this makes it simple to check if we have a piece or a location\n piece = \"P\"\n if move and move[0].isupper():\n piece = move[0]\n move = move[1:]\n if not self.white_to_move:\n piece = piece.lower()\n\n # At this point the only info the move should contain is a hint on where the piece is coming from\n loc_hint = move\n\n possible_moves = self.get_all_moves()\n possible_moves = filter(lambda x: dest_str in x, possible_moves) # Filter to only moves that land on the right destination\n possible_moves = list(filter(lambda x: loc_hint in x[0:2], possible_moves)) # Filter to only moves that match the hint in the algebraic notation\n for possible_move in possible_moves:\n row, col = State._EAN_coords_to_board_coords(possible_move[0:2])\n if self.board[row][col] == piece:\n return (row, col), dest, extra_info\n\n raise ValueError(\"Algebraic notation parsing failed, no valid move found matching the given move \" + orig_move\n + \" with board state\\n\" + str(self))", "def _parse_move(origin, destination, axis):\n # If only one set of coordinates is defined, make sure it's used to move things\n if destination is None:\n destination = origin\n origin = [0, 0]\n\n d = _parse_coordinate(destination)\n o = _parse_coordinate(origin)\n if axis == \"x\":\n d = (d[0], o[1])\n if axis == \"y\":\n d = (o[0], d[1])\n dx, dy = np.array(d) - o\n\n return dx, dy", "def GetMove(self, board):\n move = None\n while True:\n move = input(\"Enter coordinates as XY (e.g. 21): \")\n if board[Game.GetIndexFromCoords(*move)] == \" \":\n return Game.GetIndexFromCoords(*move)\n else:\n print(\"Space occupied.\")", "def GetNextMove(board, index, teams, mover):\n\tif teams[mover] == 'H':\n\t\twhile True:\n\t\t\tmove = int(input('Tell me your move, {}: '.format(mover)))\n\t\t\tresult = ValidateMove(board, mover, move)\n\t\t\tif result == MoveValidation.Valid:\n\t\t\t\treturn move\n\telse:\n\t\treturn GetComputerMove(board, index, mover)", "def get_current_move(self):\n x_count = self.game_board.count('X')\n o_count = self.game_board.count('O')\n if x_count <= o_count:\n return 'X'\n return 'O'", "def move_simplifier(move_input) -> str:\n short_input = move_input.strip().lower()\n short_input = short_input.replace(\"in rage\", \"\")\n\n for old, new in const.REPLACE.items():\n short_input = short_input.replace(old, new)\n\n # cd works, ewgf doesn't, for some reason\n if short_input[:2].lower() == 'cd' and short_input[:3].lower() != 'cds':\n short_input = short_input.lower().replace('cd', 'fnddf')\n if short_input[:2].lower() == 'wr':\n short_input = short_input.lower().replace('wr', 'fff')\n return short_input", "def decide_next_move(self):\n pass", "def last_move(self):\n if len(self.moves) > 0:\n return self.moves[-1]\n else:\n return None", "def get_next_move(board, player):\n print(\"Player '{}' please enter your next move:\".format(player))\n while True:\n move = input(\"> \")\n if '1' <= move <= '9':\n move = int(move)\n if move-1 in valid_moves(board):\n break\n print(\"That is not a valid move, please try again...\")\n return move", "def move(self):\n move = input(\"Pick one weapon - rock, scissors, paper: \").lower()\n while move not in moves:\n \"\"\"Prints out a message to try again\n when something is mistyped or a weapon that is not\n in the moves variable is typed. Will keep repeating\n until a validated move is played.\"\"\"\n move = input(\"You can only use rock, scissors, paper: \").lower()\n return move", "def move(self, find):\n first = self[0].move_number\n last = self[-1].move_number\n fail = \"Move number {} is not in this game. First is {}, last is {}.\".format(find, first, last)\n if first > find:\n raise PGNGameException(fail)\n\n for m in self:\n if find == m.move_number:\n return m\n\n # We haven't found the move\n raise PGNGameException(fail)", "def get_move_seq(self):\n return self._move_seq", "def next_move(board, player):\n \n move_row = \"move\"\n move_column = \"move\"\n\n while not move_row.isnumeric():\n move_row = input(\"{}, pick row to place your {}. > \".format(player.name, player.char))\n while not move_column.isnumeric(): \n move_column = input(\"Pick column in row {} to place your {}. > \".format(move_row, player.char))\n\n move_row = int(move_row)\n move_column = int(move_column)\n\n move = Move(player, (move_row, move_column))\n \n # Check if move is out of bounds\n if (move_row >= len(board.current_board) or\n move_column >= len(board.current_board)):\n print(\"Move out of bounds. Choose a valid move.\")\n return board\n\n # Check if space is already used\n if board.current_board[move_row][move_column] != \"-\":\n print(\"Spot already played. Pick an unused space.\")\n return board\n\n board.last_move = player.name\n board.add_move(move)\n\n return board", "def move(self, m):\n if m not in \"RLUD\":\n raise ValueError(\n (\"Not a legal move: '{}', should be one of \" +\n \"the 'RLUD'.\").format(m))\n if m not in self.legal_moves:\n raise ValueError(\n (\"Not a legal move at this state: '{}', \" +\n \"should be one of the '{}'.\").format(m, self.legal_moves))\n\n posdiff = (0, 0)\n if m == 'L':\n posdiff = (0, 1)\n elif m == 'R':\n posdiff = (0, -1)\n elif m == 'U':\n posdiff = (1, 0)\n elif m == 'D':\n posdiff = (-1, 0)\n\n empty_position = self.get_position(0)\n newpuz = self.swap((empty_position[0] - posdiff[0],\n empty_position[1] - posdiff[1]))\n return newpuz", "def decide_move(self, game_state):\n # Get all possible moves\n valid_pos = game_state.get_valid_positions(game_state.pacs_pos[self.pac_id], 'pac')\n # Get the value of the expression tree for each possible move.\n # Feed the calculator the values of G, P, W, F, M instead of\n # recalculating those values each time we hit them in the tree.\n valid_pos_vals = [ self.tree.root.calc([game_state.G(pos),\n game_state.P(pos),\n game_state.W(pos),\n game_state.F(pos),\n game_state.M(pos, pac_id = self.pac_id)]) \\\n for pos in valid_pos ]\n # Find the index of the highest-valued move\n new_pos_idx = valid_pos_vals.index(max(valid_pos_vals))\n # Set the next move\n self.next_move = valid_pos[new_pos_idx]", "def move_piece(self, move: Move, ignore_check=False):\n # no need to check the values of Position as they are guaranteed to be sane.\n\n # sanity: cant move from a position onto itself\n if move.from_pos.to_str == move.to_pos.to_str: # TODO compare by value, not str\n raise InvalidMoveException('cant move a piece onto itself (%s)' % move.from_pos.to_str())\n\n piece = self.rubric(move.from_pos)\n if piece is None:\n raise Exception(\"assert failed: found empty rubric at: %s\" % move.from_pos.to_str())\n\n # sanity: there must be a piece in the start position:\n if piece.piece_type == PieceType.PLACEHOLDER:\n raise InvalidMoveException('cant move from empty rubric (%s)' % (move.from_pos.to_str()))\n\n # sanity: ensure the move is valid for this turn's color\n if piece.color != self._current_side_color:\n raise InvalidMoveException(\"cant move a piece of this color at this turn\")\n\n # sanity: if capturing, pieces must have different colors\n captured_piece = self.rubric(move.to_pos)\n if captured_piece.piece_type != PieceType.PLACEHOLDER:\n if captured_piece.color == piece.color:\n raise InvalidMoveException('cant capture a piece of the same color (start: %s, end: %s)' %\n (move.from_pos.to_str(), move.to_pos.to_str()))\n\n # handle movement in Check\n king_attackers = self.get_king_attackers()\n if not ignore_check and len(king_attackers) > 0:\n # if the king is under attack (== Check), the only valid moves are those that\n # resolve the situation:\n # a) moving the king to an unattacked position\n # b) capturing the attacker by the king, provided its position is NOT attacked by another piece\n # c) capturing the attacker by another piece\n # d) blocking the check by a rook, queen or bishop: placing them between\n # the king and the attacker - NOTE I will not implement this\n\n # if there are no valid moves, the game is over (Checkmate) - this is handled elsewhere\n\n # determine if the move will resolve the Check:\n # create a copy of the board, run the proposed move (ignore the check)\n # and then determine if we're still in check afterwards\n board_copy = self.get_board_copy()\n board_copy.move_piece(move, ignore_check=True)\n if len(board_copy.get_king_attackers()) > 0:\n # the move did not resolve the check, so it wasnt valid:\n raise InvalidMoveException(\"move failed to resolve Check\")\n\n if piece.is_valid_move(move, self._rubrics):\n # handle capture scenario\n if captured_piece.piece_type != PieceType.PLACEHOLDER:\n # remove the captured piece\n self.remove_piece(captured_piece)\n\n # move the piece to its destination\n self.set_rubric(piece, move.to_pos)\n\n # set empty placeholder in the origin rubric\n self.set_rubric(PlaceHolder(Position), move.from_pos)\n\n return True", "def sim_move(self, state, move):\n out = ''\n for val in self.moves[move]:\n out += state[val]\n return out" ]
[ "0.7124746", "0.7086057", "0.6942887", "0.6783822", "0.65344757", "0.6471586", "0.64489573", "0.6421416", "0.63550556", "0.62944734", "0.6270465", "0.6264515", "0.6182051", "0.60790235", "0.60481", "0.60470694", "0.60350966", "0.601529", "0.5998565", "0.5966165", "0.59147376", "0.59063315", "0.5905333", "0.586383", "0.58591", "0.58591", "0.58591", "0.5858188", "0.5857648", "0.5857648", "0.5857648", "0.5853057", "0.5853057", "0.58451694", "0.5840431", "0.58166665", "0.5814909", "0.5809009", "0.5760116", "0.5752958", "0.57496506", "0.5745952", "0.57372034", "0.5729608", "0.5711252", "0.57041496", "0.57020056", "0.5696555", "0.5676597", "0.5668578", "0.56635237", "0.5640932", "0.5628595", "0.5614205", "0.5612068", "0.5611726", "0.56027067", "0.559526", "0.5583394", "0.55745983", "0.5562415", "0.5556834", "0.55488545", "0.55469525", "0.55243146", "0.55208963", "0.55114007", "0.55081797", "0.5500907", "0.5486841", "0.5485712", "0.54752564", "0.5468996", "0.54678077", "0.5460112", "0.54311526", "0.54286385", "0.54246104", "0.5417902", "0.54105586", "0.54046607", "0.54008174", "0.53982115", "0.5388232", "0.53832215", "0.5360191", "0.5357924", "0.5347767", "0.5346547", "0.53431535", "0.53415656", "0.53379536", "0.5335571", "0.5328671", "0.53231215", "0.53209394", "0.5318116", "0.5317376", "0.5314692", "0.53115755" ]
0.65770066
4
Test that the transform function works for binary data
def test_CredL2_1b1_transform_smoketest_transformer_stack_binary(self): np.random.seed(2525) Xdata = np.random.choice(np.array(['A', 'B', 'C', 'D']), size=(100, 5), p=[0.5, 0.3, 0.1, 0.1]) orig_columns = ['ColA', 'ColB', 'ColC', 'ColD', 'ColE'] Xdf = pd.DataFrame(Xdata, columns=orig_columns) Container_Xdf = Container(Xdf) np.random.seed(2525) yData = np.random.choice([0, 1], size=(100, 1), p=[0.9, 0.1]).ravel() conv = CredL2_1b1('rgl_C=[0.01,0.1];cmin=4;i2w=1') Z = Partition(size=100, folds=5, reps=5) Z.set(max_reps=1, max_folds=0) for weight in [False, True]: if weight: Container_Xdf.initialize({'weight': pandas.Series(np.ones(100))}) out = conv.fit(Container_Xdf, yData, Z) self.assertIsInstance(out, CredL2_1b1) out = conv.transform(Container_Xdf, yData, Z) self.assertIsInstance(out, Container) p = {'k': -1, 'r': 0} self.assertEqual(out(**p).shape[0], Xdf.shape[0]) self.assertEqual(out(**p).shape[1], 15) self.assertEqual(out.colnames(**p), ['DR_cred_' + i for i in orig_columns] + [ 'DR_cred_ColA_XX_ColB', 'DR_cred_ColA_XX_ColC', 'DR_cred_ColA_XX_ColD', 'DR_cred_ColA_XX_ColE', 'DR_cred_ColB_XX_ColC', 'DR_cred_ColB_XX_ColD', 'DR_cred_ColB_XX_ColE', 'DR_cred_ColC_XX_ColD', 'DR_cred_ColC_XX_ColE', 'DR_cred_ColD_XX_ColE']) out = conv.transformer_stack(Container_Xdf, yData, Z) self.assertIsInstance(out, Container) for p in Z: self.assertEqual(out(**p).shape[0], Xdf.shape[0]) self.assertEqual(out(**p).shape[1], 15) self.assertEqual(out.colnames(**p), ['DR_cred_' + i for i in orig_columns] + [ 'DR_cred_ColA_XX_ColB', 'DR_cred_ColA_XX_ColC', 'DR_cred_ColA_XX_ColD', 'DR_cred_ColA_XX_ColE', 'DR_cred_ColB_XX_ColC', 'DR_cred_ColB_XX_ColD', 'DR_cred_ColB_XX_ColE', 'DR_cred_ColC_XX_ColD', 'DR_cred_ColC_XX_ColE', 'DR_cred_ColD_XX_ColE']) if weight: self.assertEqual(np.all(out.get('weight') == np.ones(100)), True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_transform(self):\n t = OneHotEncode(3)\n assert numpy.all(t.transform(0) == numpy.array((1.0, 0.0, 0.0)))\n assert numpy.all(t.transform(1) == numpy.array((0.0, 1.0, 0.0)))\n assert numpy.all(t.transform(2) == numpy.array((0.0, 0.0, 1.0)))\n with pytest.raises(AssertionError):\n t.transform(4)\n with pytest.raises(AssertionError):\n t.transform(-1)\n with pytest.raises(AssertionError):\n t.transform(2.2)\n assert numpy.all(\n t.transform([[2, 1], [0, 2]])\n == numpy.array(\n [[(0.0, 0.0, 1.0), (0.0, 1.0, 0.0)], [(1.0, 0.0, 0.0), (0.0, 0.0, 1.0)]]\n )\n )\n\n t = OneHotEncode(2)\n assert t.transform(0) == 0.0\n assert t.transform(1) == 1.0\n with pytest.raises(TypeError):\n t.transform(\"ipsi\")\n assert numpy.all(\n t.transform([[1, 1], [0, 1]]) == numpy.array([[1.0, 1.0], [0.0, 1.0]])\n )\n\n # for the crazy enough\n t = OneHotEncode(1)\n assert t.transform(0) == 0.0\n with pytest.raises(TypeError):\n t.transform(\"ipsi\")\n assert numpy.all(t.transform([[0, 0], [0, 0]]) == [[0.0, 0.0], [0.0, 0.0]])", "def test_transform(self):\n t = Identity()\n assert t.transform(\"yo\") == \"yo\"", "def test_convert_logical():", "def transform(self, data):", "def test_inverse_transform(self):", "def test_dummy(self, data):\r\n source, expected = data\r\n result = self.converter.convert(source)\r\n self.assertUnicodeEquals(result, expected)", "def test_convert():", "def test_transform(self):\n t = Quantize()\n assert t.transform(8.6) == 9\n assert t.transform(8.4) == 8\n assert t.transform(5.3) == 5\n assert numpy.all(t.transform([8.6, 5.3]) == numpy.array([9, 5], dtype=int))", "def test_transform(self):\n t = Reverse(Quantize())\n assert t.transform(9) == 9.0\n assert t.transform(5) == 5.0\n assert numpy.all(t.transform([9, 5]) == numpy.array([9.0, 5.0], dtype=float))", "def test__transform_continuous(self):", "def transform():", "def test_fundamental_transforms():\n\n transforms = get_fundamental_transforms(inp_size=(100, 50), pixel_mean=[0.5], pixel_std=[0.3])\n\n try:\n inp_img = Image.fromarray(np.loadtxt(\"proj6_code/proj6_unit_tests/test_data/transform_inp.txt\", dtype=\"uint8\"))\n output_img = transforms(inp_img)\n expected_output = torch.load(\"proj6_code/proj6_unit_tests/test_data/transform_out.pt\")\n\n except:\n inp_img = Image.fromarray(\n np.loadtxt(\"../proj6_code/proj6_unit_tests/test_data/transform_inp.txt\", dtype=\"uint8\")\n )\n output_img = transforms(inp_img)\n expected_output = torch.load(\"../proj6_code/proj6_unit_tests/test_data/transform_out.pt\")\n\n assert torch.allclose(expected_output, output_img)", "def test_transform(self):\n t = Enumerate([2, \"asfa\", \"ipsi\"])\n assert t.transform(2) == 0\n assert t.transform(\"asfa\") == 1\n assert t.transform(\"ipsi\") == 2\n with pytest.raises(KeyError):\n t.transform(\"aafdasfa\")\n assert numpy.all(\n t.transform([[\"ipsi\", \"asfa\"], [2, \"ipsi\"]]) == [[2, 1], [0, 2]]\n )\n\n # for the crazy enough\n t = Enumerate([2])\n assert t.transform(2) == 0\n with pytest.raises(KeyError):\n t.transform(\"aafdasfa\")\n assert numpy.all(t.transform([[2, 2], [2, 2]]) == [[0, 0], [0, 0]])", "def test_converter(self, data):\r\n source, expected = data\r\n result = UpcaseConverter().convert(source)\r\n self.assertEquals(result, expected)", "def _transform(self, dataset):\n raise NotImplementedError()", "def transform(self, data, input_content_type, output_content_type):\n return self.transform_fn(data, input_content_type, output_content_type)", "def test_composed_transforms(config, dummy_input):\n cfg = config\n transforms = compose(cfg.dataset.transforms)\n\n # H, W, C\n image, label = dummy_input(image_size=(512, 512, 3),\n label_size=(512, 512, 1))\n _image, _label = transforms(image, label, dtypes=[torch.float, torch.long], elastic_deformation_orders=[3, 0])\n assert _image.dtype == torch.float\n assert _image.size() == (256, 256, image.shape[2])\n assert _label.dtype == torch.long\n assert _label.size() == (256, 256, label.shape[2])\n\n # Test feeding only image\n _image = transforms(image, dtypes=[torch.float])\n assert _image.dtype == torch.float\n assert _image.size() == (256, 256, image.shape[2])", "def image_transform(im_bytes):\n img = [mx.image.imdecode(bytes.fromhex(im.lstrip('0x'))) for im in im_bytes]\n out = gcv.data.transforms.presets.yolo.transform_test(img)\n return out[0]", "def test_base64_to_bytes(self):\n @converters.wrap\n def inner_test(param: bytes):\n \"\"\"Make sure the parameter was converted correctly.\"\"\"\n self.assertEqual(param, b'test')\n inner_test(param='dGVzdA==')", "def test__inverse_transform_continuous(self):", "def testWriteBinaryData(self):\n file_writer = writers.FileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer.WriteBinaryData(b'Binary data')\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n expected_output_data = b'Binary data'\n self.assertEqual(output_data, expected_output_data)", "def test_transform(self):\n result = transform((1, 2) ,2, 2)\n self.assertEqual(result, (4 * PIXEL, 3 * PIXEL))", "def test_transform(self):\n X = self.generate_X()\n task = mmRDTR()\n task.fit(X)\n res = task.transform(X)\n # check if Instance\n self.assertIsInstance(res,Container)\n # check if names\n self.assertEqual(np.all(res.colnames()==[str(i) for i in xrange(len(res.colnames()))]),True)\n # check if values as within the range expected\n self.assertEqual(np.all(res().min()>=-1),True)\n self.assertEqual(np.all(res().max()<=1),True)\n for i in range(len(res.colnames())):\n self.assertEqual(round(res()[:,i].mean(),8),0)\n # check with new data\n Y = self.generate_X()\n res = task.transform(Y)\n self.assertEqual(np.all(res.colnames()==[str(i) for i in xrange(len(res.colnames()))]),True)\n self.assertEqual(np.all(res().min()>=-1),True)\n self.assertEqual(np.all(res().max()<=1),True)", "def test_transform(self):\n data = pd.DataFrame({\n \"x\": np.array([0.1, 0.3, 0.5]),\n \"y\": np.array([\"yes\", \"yes\", \"no\"])\n })\n\n transformer = DataTransformer()\n transformer._column_transform_info_list = [\n ColumnTransformInfo(\n column_name=\"x\", column_type=\"continuous\", transform=None,\n transform_aux=None,\n output_info=[SpanInfo(1, 'tanh'), SpanInfo(3, 'softmax')],\n output_dimensions=1 + 3\n ),\n ColumnTransformInfo(\n column_name=\"y\", column_type=\"discrete\", transform=None,\n transform_aux=None,\n output_info=[SpanInfo(2, 'softmax')],\n output_dimensions=2\n )\n ]\n\n transformer._transform_continuous = Mock()\n selected_normalized_value = np.array([[0.1], [0.3], [0.5]])\n selected_component_onehot = np.array([\n [1, 0, 0],\n [1, 0, 0],\n [1, 0, 0],\n ])\n return_value = (selected_normalized_value, selected_component_onehot)\n transformer._transform_continuous.return_value = return_value\n\n transformer._transform_discrete = Mock()\n transformer._transform_discrete.return_value = [np.array([\n [0, 1],\n [0, 1],\n [1, 0],\n ])]\n\n result = transformer.transform(data)\n transformer._transform_continuous.assert_called_once()\n transformer._transform_discrete.assert_called_once()\n\n expected = np.array([\n [0.1, 1, 0, 0, 0, 1],\n [0.3, 1, 0, 0, 0, 1],\n [0.5, 1, 0, 0, 1, 0],\n ])\n\n assert result.shape == (3, 6)\n assert (result[:, 0] == expected[:, 0]).all(), \"continuous-cdf\"\n assert (result[:, 1:4] == expected[:, 1:4]).all(), \"continuous-softmax\"\n assert (result[:, 4:6] == expected[:, 4:6]).all(), \"discrete\"", "def test_transform_image_not_upscaled(self):\n self.expect_open_image('SomeBlobKey', (400, 300))\n self.expect_encode_image('SomeImageInJpeg')\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageInJpeg', 'image/jpeg'),\n self.app._transform_image('SomeBlobKey', ''))\n self.mox.VerifyAll()", "def test_from_binary_bits_style(self):\n self.assertResult('[0001]', b4('[0001]'))", "def test_from_binary_bits_style(self):\n self.assertResult('[0001 0000]', b8('[0001 0000]'))", "def test_decode():", "def transform():\n pass", "def test_transform(self):\n t = Linearize()\n assert t.transform(numpy.e) == numpy.log(numpy.e)\n t.transform(0)", "def test_binops(self):", "def test_transform(self):\n t = Compose([Enumerate([2, \"asfa\", \"ipsi\"]), OneHotEncode(3)], \"categorical\")\n assert numpy.all(t.transform(2) == numpy.array((1.0, 0.0, 0.0)))\n assert numpy.all(t.transform(\"asfa\") == numpy.array((0.0, 1.0, 0.0)))\n assert numpy.all(t.transform(\"ipsi\") == numpy.array((0.0, 0.0, 1.0)))\n\n with pytest.raises(KeyError):\n t.transform(\"aafdasfa\")\n\n assert numpy.all(\n t.transform([[\"ipsi\", \"asfa\"], [2, \"ipsi\"]])\n == numpy.array(\n [[(0.0, 0.0, 1.0), (0.0, 1.0, 0.0)], [(1.0, 0.0, 0.0), (0.0, 0.0, 1.0)]]\n )\n )\n\n t = Compose([Enumerate([2, \"asfa\"]), OneHotEncode(2)], \"categorical\")\n assert t.transform(2) == 0.0\n assert t.transform(\"asfa\") == 1.0\n with pytest.raises(KeyError):\n t.transform(\"ipsi\")\n assert numpy.all(\n t.transform([[\"asfa\", \"asfa\"], [2, \"asfa\"]])\n == numpy.array([[1.0, 1.0], [0.0, 1.0]])\n )\n\n # for the crazy enough\n t = Compose([Enumerate([2]), OneHotEncode(1)], \"categorical\")\n assert t.transform(2) == 0.0\n with pytest.raises(KeyError):\n t.transform(\"ipsi\")\n assert numpy.all(t.transform([[2, 2], [2, 2]]) == [[0, 0], [0, 0]])", "def test_encode():", "def test_transform_default(self):\n result = transform((1, 2))\n self.assertEqual(result, (2 * PIXEL, 1 * PIXEL))", "def test_binary_helpers(self, number, expected):\n self.assertEqual(positional.from_binary(expected), number)\n self.assertEqual(positional.to_binary(number), expected)\n self.assertEqual(positional.to_binary(str(number)), expected)", "def test_FiducialTransform1(self):\n\n self.delayDisplay(\"Starting the test\")\n #\n # first, get some data\n #\n import urllib\n downloads = (\n ('http://slicer.kitware.com/midas3/download?items=5767', 'FA.nrrd', slicer.util.loadVolume),\n )\n\n for url,name,loader in downloads:\n filePath = slicer.app.temporaryPath + '/' + name\n if not os.path.exists(filePath) or os.stat(filePath).st_size == 0:\n logging.info('Requesting download %s from %s...\\n' % (name, url))\n urllib.urlretrieve(url, filePath)\n if loader:\n logging.info('Loading %s...' % (name,))\n loader(filePath)\n self.delayDisplay('Finished with download and loading')\n\n volumeNode = slicer.util.getNode(pattern=\"FA\")\n logic = FiducialTransformLogic()\n self.assertIsNotNone( logic.hasImageData(volumeNode) )\n self.delayDisplay('Test passed!')", "def test_on_merlin_image_binary(self):\n im = diffread(TEST_MIB)\n self.assertEqual(im.shape, (256, 256))\n self.assertEqual(im.dtype, np.dtype(\">u2\"))", "def test_file_bin_readwrite(self):\n FileWriter(self.binary_path).write_bin(self.binary_string)\n bin_data = FileReader(self.binary_path).read_bin()\n self.assertEqual(bin_data, self.binary_string)", "def transform(self, x):", "def test_to_byte_array(self):\n with Image.open(self.subject) as im:\n image = im.convert(\"RGB\")\n\n byte_array = image_helper.to_byte_array(image)\n\n self.assertGreater(len(byte_array), 0)", "def transform(self, X):\n ...", "def transform(self, X):\n ...", "def transform(self, X):\n ...", "def transform(self, X):\n ...", "def transform(self, X):\n ...", "def transform(self, X):\n ...", "def transform(self, X):\n ...", "def test_bytes_to_bytes(self):\n @converters.wrap\n def inner_test(param: bytes):\n \"\"\"Make sure the parameter was converted correctly.\"\"\"\n self.assertEqual(param, b'Test bytes.')\n inner_test(param=b'Test bytes.')", "def test_transform_image_no_resize_tiff(self):\n self.expect_open_image('SomeBlobKey', (1600, 1200), mime_type='TIFF')\n self.expect_resize(blob_image._DEFAULT_SERVING_SIZE)\n # TIFF is not servable, so we transcode to JPEG.\n self.expect_encode_image('SomeImageInJpeg')\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageInJpeg', 'image/jpeg'),\n self.app._transform_image('SomeBlobKey', ''))\n self.mox.VerifyAll()", "def test_random_transform():\n # given\n train = pd.read_csv('source/train.csv')\n train['labels'] = train['labels'].map(ast.literal_eval)\n image_path = os.path.join('source', train.iloc[0].path)\n all_labels = train.iloc[0]['labels']\n for label in all_labels:\n if label['class'] == 'whiteboard':\n break\n xn = [int(float(x)) for x in label['xn'].split(';')][:4]\n yn = [int(float(y)) for y in label['yn'].split(';')][:4]\n labels = np.zeros((4, 2))\n for i in range(4):\n labels[i, 0] = xn[i]\n labels[i, 1] = yn[i]\n img = cv2.imread(image_path)\n kw = dict(rotation_range=15,\n height_shift_range=0.2,\n width_shift_range=0.2,\n shear_range=0.3,\n channel_shift_range=0.2,\n horizontal_flip=True,\n vertical_flip=True,\n dim_ordering='tf',\n seed=1313)\n # when\n rimg, rlabels = image_generator.random_transform(img, labels, **kw)\n\n # then just assert transformation isn't changed much\n assert MultiPoint([[224.91875347, 58.05657097],\n [673.57648317, 189.27244333],\n [544.23308452, 381.12743459],\n [70.73339963, 312.7359806]]\n ).equals_exact(rlabels, 5)", "def transform(self):", "def test_bin_data_type(self):\n\n with pytest.raises(TypeError) as verr:\n avg.median2D(self.testInst, ['1', 'a', '23', '10'], 'longitude',\n ['0', 'd', '24', 'c'], 'mlt',\n ['dummy1', 'dummy2', 'dummy3'], auto_bin=False)\n\n estr = \"Cannot cast array data from\"\n assert str(verr).find(estr) >= 0\n\n return", "def test_transform_image_original_size(self):\n self.expect_open_image('SomeBlobKey', (1600, 1200))\n self.expect_encode_image('SomeImageInJpeg')\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageInJpeg', 'image/jpeg'),\n self.app._transform_image('SomeBlobKey', 's0'))\n self.mox.VerifyAll()", "def transform(self, data):\n return self.fit_transform(data, fitting=False)", "def test_unfitted_inverse_transform():\n df = cudf.Series(np.random.choice(10, (10,)))\n le = LabelEncoder()\n assert not le._fitted\n\n with pytest.raises(NotFittedError):\n le.transform(df)", "def _check_binary_data(data):\n if not np.array_equal(data, data.astype(bool)):\n raise ValueError(\n \"This mechanism works with binary data, \"\n \"but input is not binary.\")", "def _get_binary(raw_data):\n #try:\n # gray_data = _skimage.color.rgb2gray(raw_data)\n #except:\n # gray_data = raw_data\n\n try:\n thresh = _skimage.filters.threshold_otsu(raw_data[:, :, 0])\n binary = raw_data[:, :, 0] > thresh\n #binary = binary[:, :, 0]\n except ValueError:\n print('valueerror')\n binary = _np.ones(raw_data.shape).astype('bool')\n\n return binary", "def test_data_augmentation_transforms():\n\n transforms_list = get_data_augmentation_transforms(inp_size=(100, 50), pixel_mean=[0.5], pixel_std=[0.3]).transforms\n\n assert len(transforms_list) > 3\n\n # last 3 should be fundamental\n augmentation_transforms = Compose(transforms_list[:-3])\n\n try:\n inp_img = Image.fromarray(np.loadtxt(\"proj6_code/proj6_unit_tests/test_data/transform_inp.txt\", dtype=\"uint8\"))\n\n except:\n inp_img = Image.fromarray(\n np.loadtxt(\"../proj6_code/proj6_unit_tests/test_data/transform_inp.txt\", dtype=\"uint8\")\n )\n augmented_img = augmentation_transforms(inp_img)\n assert isinstance(augmented_img, type(inp_img))\n assert not np.array_equal(augmented_img, inp_img)", "def test_interface(transform, example_tsds: TSDataset):\n start_columnns = example_tsds.columns\n example_tsds.fit_transform(transforms=[transform])\n assert np.all(start_columnns == example_tsds.columns)", "def test_encode_decode(self):\n assert self._test == pybinn.loads(pybinn.dumps(self._test))", "def transform(self, dataset: NumpyOrPandas) -> NumpyDataset:\n # checks here\n super().transform(dataset)\n # convert to accepted dtype and get attributes\n dataset = dataset.to_pandas()\n df = dataset.data\n\n # transform\n roles = NumericRole()\n outputs = []\n\n for n, conlumn_name in enumerate(df.columns):\n if self.cache_dir is not None:\n full_hash = get_textarr_hash(df[conlumn_name]) + get_textarr_hash(self.dicts[conlumn_name][\"feats\"])\n fname = os.path.join(self.cache_dir, full_hash + \".pkl\")\n\n if os.path.exists(fname):\n logger.info3(f\"Load saved dataset for {conlumn_name}\")\n\n with open(fname, \"rb\") as f:\n new_arr = pickle.load(f)\n\n else:\n new_arr = self.dicts[conlumn_name][\"transformer\"].transform(df[conlumn_name])\n with open(fname, \"wb\") as f:\n pickle.dump(new_arr, f)\n else:\n new_arr = self.dicts[conlumn_name][\"transformer\"].transform(df[conlumn_name])\n\n output = dataset.empty().to_numpy()\n output.set_data(new_arr, self.dicts[conlumn_name][\"feats\"], roles)\n outputs.append(output)\n logger.info3(f\"Feature {conlumn_name} transformed\")\n # create resulted\n return dataset.empty().to_numpy().concat(outputs)", "def test_get_file_binary_content(self):\n content = image_helper.get_file_binary_content(self.subject)\n\n self.assertGreater(len(content), 0)\n\n with open(self.subject, \"rb\") as f:\n original_content = f.read()\n\n self.assertEqual(content, original_content)", "def transform(self, x):\n return self._test_transform(x)", "def test_class_encoder_numeric():\n\n encoder = ClassEncoder()\n encoder.fit([1, 2, 2, 6])\n\n result_transf = encoder.transform([1, 1, 2, 6, encoder.unk_label])\n result_inv = encoder.inverse_transform([0, 0, 1, 2, -1])\n\n assert np.array_equal(encoder.classes_, np.array([1, 2, 6, encoder.unk_label]))\n assert encoder.classes_dict == {encoder.unk_label: -1, 1: 0, 2: 1, 6: 2}\n assert np.array_equal(result_transf, np.array([0, 0, 1, 2, -1]))\n assert np.array_equal(result_inv, np.array([1, 1, 2, 6, encoder.unk_label]))", "def test_compress_2_idenctical_char(self):\n text = 'aa'\n actual = LZ77.compress(text)\n expected = bytearray([0]) + bytearray(b'aa')\n self.assertEqual(actual, expected)", "def test_transform(self):\n t = Precision(precision=4)\n assert t.transform(8.654321098) == 8.654\n assert t.transform(0.000123456789) == 0.0001235\n assert numpy.all(\n t.transform([8.654321098, 0.000123456789])\n == numpy.array([8.654, 0.0001235], dtype=float)\n )", "def test_consitency_convert(self):\n name = os.path.basename(self.cbf_filename)\n obj = fabio.open(self.cbf_filename)\n new = obj.convert(\"cbf\")\n new.write(os.path.join(self.tempdir, name))\n other = fabio.open(os.path.join(self.tempdir, name))\n self.assertEqual(abs(obj.data - other.data).max(), 0, \"data are the same\")\n for key in obj.header:\n if key in[ \"filename\", \"X-Binary-Size-Padding\"]:\n continue\n self.assertTrue(key in other.header, \"Key %s is in header\" % key)\n self.assertEqual(obj.header[key], other.header[key], \"value are the same for key %s [%s|%s]\" % (key, obj.header[key], other.header[key]))", "def transform(self, X):\n return np.vectorize(lambda x: self.encode.get(x, 0))(X)", "def test_transcoder(self, raw, value):\n assert DPTSceneNumber.to_knx(value) == DPTArray(raw)\n assert DPTSceneNumber.from_knx(DPTArray(raw)) == value", "def _convert_to_binary(data: List[int], mode: DATA_MODE, version: int, ec: EC_LEVEL) -> str:\n\n binary_data = \"\"\n\n # Group data and convert to binary\n if mode == DATA_MODE.Numeric:\n # Group data in groups of 3 digits\n for i in range(0, len(data), 3):\n group = int(\"\".join([str(i) for i in data[i:i+3]])) # converts the list of digits to an int\n length = len(str(group))\n if length == 3:\n # 3 digits\n binary_data += \"{0:010b}\".format(group)\n elif length == 2:\n # 2 digits\n binary_data += \"{0:07b}\".format(group)\n else:\n # 1 digits\n binary_data += \"{0:04b}\".format(group)\n elif mode == DATA_MODE.Alphanumeric:\n # Group data in groups of 2 chars\n for i in range(0, len(data), 2):\n group = data[i:i+2]\n if len(group) == 2:\n # normal group of 2 chars, convert from pseudo-base 45 to base 10 to binary\n number = group[0] * 45 + group[1]\n binary_data += \"{0:011b}\".format(number)\n else:\n # group of 1 digit, use 6-bit binary\n binary_data += \"{0:06b}\".format(group[0])\n elif mode == DATA_MODE.Byte:\n # Characters already in 0-255 range, just convert to 8-bit binary\n for char in data:\n binary_data += \"{0:08b}\".format(char)\n\n # add CCI\n cci = \"{0:0b}\".format(len(data))\n cci = \"0\" * (CCI_LENGTH.get(mode)[version] - len(cci)) + cci # pad start with 0s to get required CCI length\n binary_data = cci + binary_data\n\n # add data mode to start of string\n binary_data = \"{0:04b}\".format(mode.value) + binary_data\n\n # add terminator byte (4 bits or less if less than 4 bits available based on version) at end of string\n # accessing specific value in dictionary by combining version and EC level (1 and \"M\" -> \"1M\")\n empty_space = STREAM_LENGTH.get(str(version) + ec.name) - len(binary_data)\n pad_length = 4 if empty_space > 4 else empty_space\n binary_data += \"0\" * pad_length\n\n # add bit-padding (fill up last codeword)\n missing = 8 - (len(binary_data) % 8)\n missing = 0 if missing == 8 else missing # if 8 bits are \"missing\", already multiple of 8 so no padding needed\n binary_data += \"0\" * missing\n\n # add byte-padding (fill string up to full size)\n words = [\"11101100\", \"00010001\"]\n i = 0\n while len(binary_data) < STREAM_LENGTH.get(str(version) + ec.name):\n binary_data += words[i]\n i ^= 1 # xoring i with 1 to alternate between first and second padding word\n\n return binary_data", "def test_no_conversion(pdf, config, hs, exported):\n get_info = partial(\n _get_bti_info,\n rotation_x=0.0,\n translation=(0.0, 0.02, 0.11),\n convert=False,\n ecg_ch=\"E31\",\n eog_ch=(\"E63\", \"E64\"),\n rename_channels=False,\n sort_by_ch_name=False,\n )\n\n raw_info, _ = get_info(pdf, config, hs, convert=False)\n raw_info_con = read_raw_bti(\n pdf_fname=pdf,\n config_fname=config,\n head_shape_fname=hs,\n convert=True,\n preload=False,\n ).info\n\n pick_info(\n raw_info_con, pick_types(raw_info_con, meg=True, ref_meg=True), copy=False\n )\n pick_info(raw_info, pick_types(raw_info, meg=True, ref_meg=True), copy=False)\n bti_info = _read_bti_header(pdf, config)\n dev_ctf_t = _correct_trans(bti_info[\"bti_transform\"][0])\n assert_array_equal(dev_ctf_t, raw_info[\"dev_ctf_t\"][\"trans\"])\n assert_array_equal(raw_info[\"dev_head_t\"][\"trans\"], np.eye(4))\n assert_array_equal(raw_info[\"ctf_head_t\"][\"trans\"], np.eye(4))\n\n nasion, lpa, rpa, hpi, dig_points = _read_head_shape(hs)\n dig, t, _ = _make_bti_dig_points(\n nasion, lpa, rpa, hpi, dig_points, convert=False, use_hpi=False\n )\n\n assert_array_equal(t[\"trans\"], np.eye(4))\n\n for ii, (old, new, con) in enumerate(\n zip(dig, raw_info[\"dig\"], raw_info_con[\"dig\"])\n ):\n assert_equal(old[\"ident\"], new[\"ident\"])\n assert_array_equal(old[\"r\"], new[\"r\"])\n assert not np.allclose(old[\"r\"], con[\"r\"])\n\n if ii > 10:\n break\n\n ch_map = {ch[\"chan_label\"]: ch[\"loc\"] for ch in bti_info[\"chs\"]}\n\n for ii, ch_label in enumerate(raw_info[\"ch_names\"]):\n if not ch_label.startswith(\"A\"):\n continue\n t1 = ch_map[ch_label] # correction already performed in bti_info\n t2 = raw_info[\"chs\"][ii][\"loc\"]\n t3 = raw_info_con[\"chs\"][ii][\"loc\"]\n assert_allclose(t1, t2, atol=1e-15)\n assert not np.allclose(t1, t3)\n idx_a = raw_info_con[\"ch_names\"].index(\"MEG 001\")\n idx_b = raw_info[\"ch_names\"].index(\"A22\")\n assert_equal(raw_info_con[\"chs\"][idx_a][\"coord_frame\"], FIFF.FIFFV_COORD_DEVICE)\n assert_equal(\n raw_info[\"chs\"][idx_b][\"coord_frame\"], FIFF.FIFFV_MNE_COORD_4D_HEAD\n )", "def transform_data(dataset_train, dataset_test):\n\n #transform dataset using fit_transform\n dataset_train = scaler.fit_transform(dataset_train)\n\n #transform dataset using transform (does not influence teaching)\n dataset_test = scaler.transform(dataset_test)\n\n return dataset_train, dataset_test", "def test_data_format_binary(sdc_builder, sdc_executor, azure):\n container_name = get_random_string(string.ascii_lowercase, 10)\n event_hub_name = get_random_string(string.ascii_lowercase, 10)\n\n MESSAGES = [b'message 1', b'message 2']\n EXPECTED_OUTPUT = \"[b'message 1', b'message 2']\"\n\n builder = sdc_builder.get_pipeline_builder()\n\n azure_iot_event_hub_consumer = _get_azure_eventhub_consumer_stage(builder, container_name, event_hub_name, 'BINARY')\n\n wiretap = builder.add_wiretap()\n\n azure_iot_event_hub_consumer >> wiretap.destination\n\n consumer_origin_pipeline = builder.build().configure_for_environment(azure)\n sdc_executor.add_pipeline(consumer_origin_pipeline)\n\n create_blob_container(azure, container_name)\n\n try:\n eh_service_bus = azure.event_hubs.service_bus\n\n logger.info('Creating event hub %s under event hub namespace %s', event_hub_name, azure.event_hubs.namespace)\n assert eh_service_bus.create_event_hub(event_hub_name)\n\n eh_service_bus.send_event(event_hub_name, MESSAGES)\n\n sdc_executor.start_pipeline(consumer_origin_pipeline)\n sdc_executor.wait_for_pipeline_metric(consumer_origin_pipeline, 'input_record_count', 1, timeout_sec=120)\n sdc_executor.stop_pipeline(consumer_origin_pipeline)\n\n results = wiretap.output_records[0].field.value.decode()\n assert results == EXPECTED_OUTPUT\n finally:\n try:\n if sdc_executor.get_pipeline_status(consumer_origin_pipeline).response.json().get('status') == 'RUNNING':\n sdc_executor.stop_pipeline(consumer_origin_pipeline)\n except Exception as err:\n logger.error('Could not stop pipeline. Reason found: %s', err)\n\n try:\n logger.info('Deleting event hub %s under event hub namespace %s', event_hub_name, azure.event_hubs.namespace)\n event_hub_exists = True\n while event_hub_exists:\n eh_service_bus.delete_event_hub(event_hub_name)\n try:\n eh_service_bus.get_event_hub(event_hub_name)\n except Exception:\n event_hub_exists = False\n except Exception as err:\n logger.error('Failure deleting event hub %s. Reason found: %s', event_hub_name, err)\n\n try:\n logger.info('Deleting container %s on storage account %s', container_name, azure.storage.account_name)\n azure.storage.delete_blob_container(container_name)\n except Exception as err:\n logger.error('Failure deleting container %s. Reason found: %s', container_name, err)", "def test_make_binary_and_fp(self):\n output_mask = boundary_mask(df=os.path.join(data_dir, 'sample.csv'),\n geom_col=\"PolygonWKT_Pix\")\n truth_mask = skimage.io.imread(os.path.join(data_dir,\n 'sample_b_mask_inner.tif'))\n\n assert np.array_equal(output_mask, truth_mask)", "def test_example(self, example_dataset, expected_result):\n\n transformer = PreprocessFeatures()\n result = transformer.fit_transform(example_dataset)\n\n assert (result == expected_result).all()", "def test_data(self):\n\n self.assertIsInstance(self.image.data, np.ndarray)", "def test_right_binary_to_bytes(self):\n binary_string = '00000101000110100010100011010010'\n byte_string = utils.binary_to_bytes(binary_string, little_endian=False)\n expected_byte_string = b'\\x05\\x1A\\x28\\xD2'\n self.assertEqual(byte_string, expected_byte_string)", "def test_binary_reg_fn():\n inputs = Variable(torch.Tensor([0, .5, 1]))\n outputs = binary_reg_fn(inputs).data\n expected = torch.Tensor([0.0029409, 1, 0.0029409])\n assert is_close(outputs, expected).all(), \\\n \"{} != {}\".format(outputs.tolist(), expected.tolist())", "def _transform_data(self, *args, **kwargs) -> None:\n raise NotImplementedError", "def test_reshape_plain_text_conversion(self):\n\n class Net(torch.nn.Module):\n def __init__(self):\n super().__init__()\n self.fc1 = torch.nn.Linear(4, 4)\n\n def forward(self, x):\n # (1, 4) is stored in a constant module\n out = x.reshape(1, 4)\n out = self.fc1(out)\n return out\n\n model = Net()\n x = torch.ones(2, 2)\n x_enc = crypten.cryptensor(x)\n y = model(x)\n model_crypten = onnx_converter.from_pytorch(model, torch.empty(x.shape))\n\n model_crypten.encrypt()\n y_enc = model_crypten(x_enc)\n self.assertTrue(y_enc.shape == y.shape)", "def test_compress_1_char(self):\n text = 'a'\n actual = LZ77.compress(text)\n expected = bytearray([0]) + bytearray(b'a')\n self.assertEqual(actual, expected)", "def Transform(self, record):\n pass", "def fit_transform(self, data):\n return self.transform(data)", "def test_serialises_and_deserialises_hs00_message_correctly_when_int_input_is_not_ndarray(\n self,\n ):\n original_hist = {\n \"source\": \"some_source\",\n \"timestamp\": 123456,\n \"current_shape\": [2, 5],\n \"dim_metadata\": [\n {\n \"length\": 2,\n \"unit\": \"b\",\n \"label\": \"y\",\n \"bin_boundaries\": [10, 11, 12],\n },\n {\n \"length\": 5,\n \"unit\": \"m\",\n \"label\": \"x\",\n \"bin_boundaries\": [0, 1, 2, 3, 4, 5],\n },\n ],\n \"last_metadata_timestamp\": 123456,\n \"data\": [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]],\n \"errors\": [[5, 4, 3, 2, 1], [10, 9, 8, 7, 6]],\n \"info\": \"info_string\",\n }\n buf = serialise_hs00(original_hist)\n\n hist = deserialise_hs00(buf)\n assert hist[\"source\"] == original_hist[\"source\"]\n assert hist[\"timestamp\"] == original_hist[\"timestamp\"]\n assert hist[\"current_shape\"] == original_hist[\"current_shape\"]\n self._check_metadata_for_one_dimension(\n hist[\"dim_metadata\"][0], original_hist[\"dim_metadata\"][0]\n )\n self._check_metadata_for_one_dimension(\n hist[\"dim_metadata\"][1], original_hist[\"dim_metadata\"][1]\n )\n assert np.array_equal(hist[\"data\"], original_hist[\"data\"])\n assert np.array_equal(hist[\"errors\"], original_hist[\"errors\"])\n assert hist[\"info\"] == original_hist[\"info\"]\n assert (\n hist[\"last_metadata_timestamp\"] == original_hist[\"last_metadata_timestamp\"]\n )", "def testEncodeDecodeEmpty(self):\n empty_case = ''\n encoded_result = ASCIITransportFormat.encode_data(empty_case)\n decoded_result = ASCIITransportFormat.decode_data(encoded_result)\n self.assertEqual(decoded_result, empty_case)", "def test_decode(self):\n pass # TODO(tlarsen)", "def test_transform_data(self):\n # assemble\n input_data = (\n self.spark\n .read\n .parquet(self.test_data_path + 'employees'))\n\n expected_data = (\n self.spark\n .read\n .parquet(self.test_data_path + 'employees_report'))\n\n expected_cols = len(expected_data.columns)\n expected_rows = expected_data.count()\n expected_avg_steps = (\n expected_data\n .agg(mean('steps_to_desk').alias('avg_steps_to_desk'))\n .collect()[0]\n ['avg_steps_to_desk'])\n\n # act\n data_transformed = transform_data(input_data, 21)\n\n cols = len(expected_data.columns)\n rows = expected_data.count()\n avg_steps = (\n expected_data\n .agg(mean('steps_to_desk').alias('avg_steps_to_desk'))\n .collect()[0]\n ['avg_steps_to_desk'])\n\n # assert\n self.assertEqual(expected_cols, cols)\n self.assertEqual(expected_rows, rows)\n self.assertEqual(expected_avg_steps, avg_steps)\n self.assertTrue([col in expected_data.columns\n for col in data_transformed.columns])", "def test_process_mono(self):\n self.encoder = StreamEncoder(**self.default_kwargs)\n test_samples = np.random.rand(DEFAULT_BLOCKSIZE, 1).astype('int16')\n self.encoder.process(test_samples)\n self.encoder.finish()\n self.assertTrue(self.write_callback_called)", "def test_byte_array_conversion():\n ob = ConversionTest()\n\n assert ob.ByteArrayField is None\n\n ob.ByteArrayField = [0, 1, 2, 3, 4]\n array = ob.ByteArrayField\n assert len(array) == 5\n assert array[0] == 0\n assert array[4] == 4\n\n value = b\"testing\"\n ob.ByteArrayField = value\n array = ob.ByteArrayField\n for i, _ in enumerate(value):\n assert array[i] == operator.getitem(value, i)", "def test_to_tensor(dummy_input):\n # Test the 2D image: B, H, W, C\n image, label = dummy_input(image_size=(512, 512, 3),\n label_size=(512, 512, 1))\n transform = ToTensor()\n _image, _label = transform(image, label, dtypes=[torch.float, torch.long])\n assert _image.dtype == torch.float\n assert _label.dtype == torch.long\n\n # Test the 3D image: B, H, W, D, C\n image, label = dummy_input(image_size=(512, 512, 20, 3),\n label_size=(512, 512, 20, 1))\n transform = ToTensor()\n _image, _label = transform(image, label, dtypes=[torch.float, torch.long])\n assert _image.dtype == torch.float\n assert _label.dtype == torch.long", "def transform_fn(model, data, input_content_type, output_content_type):\n net, image_transform, batchify = model\n batch = json.loads(data)\n model_input = batchify(image_transform(batch))\n\n x = net(model_input)\n return x[0].asnumpy().tolist()", "def test_load_full_transform(self):\n self.add_transform(cond_artist=True, cond_album=True, cond_title=True,\n cond_ensemble=True, cond_composer=True, cond_conductor=True,\n change_artist=True, change_album=True, change_title=True,\n change_ensemble=True, change_composer=True, change_conductor=True,\n pattern_artist='Artist', pattern_album='Album', pattern_title='Title',\n pattern_ensemble='Ensemble', pattern_composer='Composer', pattern_conductor='Conductor',\n to_artist='Artist 2', to_album='Album 2', to_title='Title 2',\n to_ensemble='Ensemble 2', to_composer='Composer 2', to_conductor='Conductor 2')\n self.app.load_data()\n self.assertEqual(len(self.app.transforms), 1)\n transform = self.app.transforms.transforms[1]\n self.assertEqual(transform.cond_artist, True)\n self.assertEqual(transform.cond_album, True)\n self.assertEqual(transform.cond_title, True)\n self.assertEqual(transform.cond_ensemble, True)\n self.assertEqual(transform.cond_composer, True)\n self.assertEqual(transform.cond_conductor, True)\n self.assertEqual(transform.change_artist, True)\n self.assertEqual(transform.change_album, True)\n self.assertEqual(transform.change_title, True)\n self.assertEqual(transform.change_ensemble, True)\n self.assertEqual(transform.change_composer, True)\n self.assertEqual(transform.change_conductor, True)\n self.assertEqual(transform.pattern_artist, 'Artist')\n self.assertEqual(transform.pattern_album, 'Album')\n self.assertEqual(transform.pattern_title, 'Title')\n self.assertEqual(transform.pattern_ensemble, 'Ensemble')\n self.assertEqual(transform.pattern_composer, 'Composer')\n self.assertEqual(transform.pattern_conductor, 'Conductor')\n self.assertEqual(transform.to_artist, 'Artist 2')\n self.assertEqual(transform.to_album, 'Album 2')\n self.assertEqual(transform.to_title, 'Title 2')\n self.assertEqual(transform.to_ensemble, 'Ensemble 2')\n self.assertEqual(transform.to_composer, 'Composer 2')\n self.assertEqual(transform.to_conductor, 'Conductor 2')", "def test_from_binary_0b_style(self):\n self.assertResult('[0001]', b4('0b0001'))\n with self.assertRaises(OverflowError):\n b4('0b10001')", "def test_encode(self):\n pass # TODO(tlarsen)", "def test_bytes_io(pdf, config, hs, exported):\n raw = read_raw_bti(pdf, config, hs, convert=True, preload=False)\n\n with open(pdf, \"rb\") as fid:\n pdf = BytesIO(fid.read())\n with open(config, \"rb\") as fid:\n config = BytesIO(fid.read())\n with open(hs, \"rb\") as fid:\n hs = BytesIO(fid.read())\n\n raw2 = read_raw_bti(pdf, config, hs, convert=True, preload=False)\n repr(raw2)\n assert_array_equal(raw[:][0], raw2[:][0])", "def testBinizeUnbinize(self):\n console.terse(\"{0}\\n\".format(self.testBinizeUnbinize.__doc__))\n\n n = 5\n u = aiding.binize(n, 8)\n self.assertEqual(u, '00000101')\n n = aiding.unbinize(u)\n self.assertEqual(n, 5)", "def _is_binary(bytes_data):\n # From: https://stackoverflow.com/a/7392391\n return bool(bytes_data.translate(None, _TEXTCHARS))", "def test_serialises_and_deserialises_hs00_message_correctly_for_int_array_data(\n self,\n ):\n original_hist = {\n \"source\": \"some_source\",\n \"timestamp\": 123456,\n \"current_shape\": [5],\n \"dim_metadata\": [\n {\n \"length\": 5,\n \"unit\": \"m\",\n \"label\": \"some_label\",\n \"bin_boundaries\": np.array([0, 1, 2, 3, 4, 5]),\n }\n ],\n \"last_metadata_timestamp\": 123456,\n \"data\": np.array([1, 2, 3, 4, 5]),\n \"errors\": np.array([5, 4, 3, 2, 1]),\n \"info\": \"info_string\",\n }\n\n buf = serialise_hs00(original_hist)\n hist = deserialise_hs00(buf)\n\n assert hist[\"source\"] == original_hist[\"source\"]\n assert hist[\"timestamp\"] == original_hist[\"timestamp\"]\n assert hist[\"current_shape\"] == original_hist[\"current_shape\"]\n self._check_metadata_for_one_dimension(\n hist[\"dim_metadata\"][0], original_hist[\"dim_metadata\"][0]\n )\n assert np.array_equal(hist[\"data\"], original_hist[\"data\"])\n assert np.array_equal(hist[\"errors\"], original_hist[\"errors\"])\n assert hist[\"info\"] == original_hist[\"info\"]\n assert (\n hist[\"last_metadata_timestamp\"] == original_hist[\"last_metadata_timestamp\"]\n )", "def test_conversion(backend):\n\n x = np.random.rand(10, 10)\n x_b = backend.from_numpy(x)\n x_c = backend.to_numpy(x_b)\n\n assert np.all(np.isclose(x, x_c))", "def test_canConvert(string, cast, expected):\n assert canConvert(string, cast) == expected", "def test_transform_output(argument_pair):\n ..." ]
[ "0.68057483", "0.65271544", "0.64854556", "0.6451207", "0.637159", "0.6202632", "0.6188656", "0.6060267", "0.60558635", "0.60416764", "0.60387063", "0.60317004", "0.6017776", "0.59574693", "0.59446365", "0.59305394", "0.590178", "0.5877073", "0.5847465", "0.5844245", "0.5818709", "0.58172584", "0.5810405", "0.5799555", "0.5799382", "0.5772222", "0.57628465", "0.57428956", "0.5739929", "0.57337296", "0.5730107", "0.5721681", "0.56947196", "0.5687434", "0.5685653", "0.5678328", "0.56744874", "0.56637096", "0.5634384", "0.56219685", "0.5619983", "0.5619983", "0.5619983", "0.5619983", "0.5619983", "0.5619983", "0.5619983", "0.5615926", "0.5608962", "0.5603862", "0.5596891", "0.55936164", "0.559307", "0.5569102", "0.5562239", "0.55490744", "0.5546603", "0.55465055", "0.5545672", "0.55353", "0.5530631", "0.55138654", "0.55088675", "0.5501976", "0.54844236", "0.5483991", "0.54798293", "0.5479388", "0.54689384", "0.545109", "0.54431456", "0.5440335", "0.5430608", "0.5430484", "0.5429305", "0.5423426", "0.54157084", "0.5407295", "0.5396614", "0.5395537", "0.5393371", "0.53928244", "0.5387876", "0.5376489", "0.53733426", "0.5372031", "0.53718746", "0.536237", "0.53588015", "0.5357767", "0.5355128", "0.53521204", "0.5336163", "0.53349954", "0.53111887", "0.53094107", "0.53061825", "0.53043365", "0.5286663", "0.528569", "0.52850395" ]
0.0
-1
Test that the tranform functions functions for regression data
def test_CredL2_1b1_smoketest_transformer_stack_regression(self): np.random.seed(2525) Xdata = np.random.choice(np.array(['A', 'B', 'C', 'D']), size=(100, 5), p=[0.5, 0.3, 0.1, 0.1]) orig_columns = ['ColA', 'ColB', 'ColC', 'ColD', 'ColE'] Xdf = pd.DataFrame(Xdata, columns=orig_columns) Container_Xdf = Container(Xdf) np.random.seed(2525) yData = np.random.choice([0, 1, 2, 3, 4, 5, 6, 7], size=100, p =[0.7/2.22, 0.5/2.22, 0.35/2.22, 0.25/2.22, 0.17/2.22, 0.12/2.22, 0.08/2.22, 0.05/2.22]) conv = CredL2_1b1('rgl_a=[0.01,0.1];cmin=4') Z = Partition(size=100, folds=5, reps=5) Z.set(max_reps=1, max_folds=0) for weight in [False, True]: if weight: Container_Xdf.initialize({'weight': pandas.Series(np.ones(100))}) Container_Xdf.initialize({'weight': pandas.Series(np.ones(100))}) out = conv.fit(Container_Xdf, yData, Z) self.assertIsInstance(out, CredL2_1b1) out = conv.transform(Container_Xdf, yData, Z) self.assertIsInstance(out, Container) for p in Z: self.assertEqual(out(**p).shape[0], Xdf.shape[0]) self.assertEqual(out(**p).shape[1], len(orig_columns)) self.assertEqual(out.colnames(**p), ['DR_cred_' + i for i in orig_columns]) out = conv.transformer_stack(Container_Xdf, yData, Z) self.assertIsInstance(out, Container) for p in Z: self.assertEqual(out(**p).shape[0], Xdf.shape[0]) self.assertEqual(out(**p).shape[1], len(orig_columns)) self.assertEqual(out.colnames(**p), ['DR_cred_' + i for i in orig_columns]) if weight: self.assertEqual(np.all(out.get('weight') == np.ones(100)), True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_transform_interface_repr(example_tsds: TSDataset) -> None:\n trend_transform = TrendTransform(in_column=\"target\", detrend_model=LinearRegression(), model=\"rbf\")\n out_column = f\"regressor_{trend_transform.__repr__()}\"\n result = trend_transform.fit_transform(example_tsds.df)\n for seg in result.columns.get_level_values(0).unique():\n assert out_column in result[seg].columns", "def test_transE_display():\n testing_function('transe', display=True)", "def test_predictor():", "def evaluate_regression(x_test,t_test,basis,bias,w,degree=1,mu=None,s=1):\n \n phi = design_matrix(x_test,basis,degree,bias,mu,s)\n pred_test=phi@w\n # Measure root mean squared error on testing data.\n t_est = pred_test\n #print(\"deleteeeeeeeeeee\",t_est)\n #print(np.shape(t_est))\n err = np.sqrt((np.square(pred_test-t_test)).mean())\n \n \n\n return (t_est, err)", "def test_transform():\n args = get_layer('transform', 'manual', 'temporal', False, False, window=2, step_size=3)\n run_layer(*args)", "def test_multifield_regression(self):\n\n self.model.fit(self.text_data_train, [np.random.random() for _ in self.train_targets])\n self.assertTrue(not self.model.is_classification)\n predictions = self.model.predict(self.text_data_valid)\n self.model.save(self.save_file)\n model = LanguageModelGeneralAPI.load(self.save_file)\n new_predictions = model.predict(self.text_data_valid)\n for new_pred, old_pred in zip(new_predictions, predictions):\n self.assertEqual(new_pred, old_pred)", "def evaluate_regression(x, t, w, basis, degree):\n \t# TO DO:: Compute t_est and err \n #w_tranpose=w.T\n\n\n # My logic goes as follows:\n # Definition of test error is when you run the trained\n # model against a dataset that it hasn't been exposed to\n # this dataset is known as the testset \n\n # As such the basic algorithm goes as follows:\n # We do not need to recompute the weights but we need to recompute\n # phi for our test data\n\n # As such, we are interested in how well our trained weights\n # estimate against the test data so we matrix multiply our\n # weights against the phi from our test data\n # thus t_est = w_train.T*phi(x) since we want to know how well our\n # trained model estimates against the training data\n # but in implementation we do phi(x)*w_train\n # to match array dimensions \n\n\n #Compute design matrix from test data \n phi=design_matrix(x,basis,degree)\n phi_cross=np.linalg.pinv(phi)\n\n # Compute testing weights // just in case we require this variable\n #if(t is not None):\n #w_test=phi_cross.dot(t)\n #w_test=phi_cross.dot(t)\n\n # We want to be able to index into our target vector\n\n #t_est=phi.dot(w_test)\n #if (t is not None):\n # testing_estimate=phi.dot(w_test)\n #testing_estimate=phi.dot(w_test)\n\n # Estimate of our targets according to test data against learned \n # coefficients\n t_est=phi.dot(w)\n #print(\"t_est\",t_est)\n #t_est = None\n\n # We calculate the RMS error as follows\n # Take equation 3.12 of PRML and modify as follows\n # My logic:\n # The equation given in PRML gives the SSE (sum of squares error)\n # By definition the MSE (mean squared error) takes the SSE and divides \n # it by population size, we also preserve the 1/2 constant \n # throughout our calcuations \n # Afterwards we take our MSE and square root it.\n\n # Compute difference between target and estimate\n\n if(t is not None):\n \n diff=t-t_est\n # Square all observations\n diff_squared=np.power(diff,2)\n # Sum up all the observations in our vector\n sig_squared=diff_squared.sum()\n half_sig_squared=0.5*(sig_squared)\n # Calculate population size\n population_size=t.shape[0]\n rmse=np.sqrt(half_sig_squared/population_size)\n err=rmse\n else:\n err=None\n\n #diff=t-t_est\n\n\n # Square all observations \n #diff_squared=np.power(diff,2)\n\n # Sum up all the observations in our vector\n #sig_squared=diff_squared.sum()\n\n #half_sig_squared=0.5*(sig_squared)\n\n # Calculate population size\n #population_size=t.shape[0]\n\n #rmse=np.sqrt(half_sig_squared/population_size)\n #err = rmse\n #print(\"err inside function\",err)\n #err=rmse\n return (t_est, err)", "def test_transform_inverse_transform(example_tsds: TSDataset) -> None:\n trend_transform = TrendTransform(in_column=\"target\", detrend_model=LinearRegression(), model=\"rbf\")\n example_tsds.fit_transform([trend_transform])\n original = example_tsds.df.copy()\n example_tsds.inverse_transform()\n assert (example_tsds.df == original).all().all()", "def test_transform(self):\n t = Linearize()\n assert t.transform(numpy.e) == numpy.log(numpy.e)\n t.transform(0)", "def compare_coefficients1():\n directory = \"C:\\\\Users\\\\Casper\\\\Projects\\\\MasterScriptie\\\\custom_projects\\\\editing\\\\PHT_Preprocessing\\\\out\\\\{}\\\\data.csv\".format(dataset)\n\n X = pd.read_csv(directory)[var_list].to_numpy()[:datapoints_amount]\n y = np.squeeze(pd.read_csv(directory)[target_list].to_numpy())[:datapoints_amount]\n \n X_mean = np.mean(X, axis=0)\n X_std = np.std(X, axis=0)\n \n X_standardized = standardize(X, X_mean, X_std)\n \n model = LogisticRegression().fit(X, y) \n model_standardized = LogisticRegression().fit(X_standardized, y) \n \n print(\"coefficients \", model.coef_)\n print(\"beta coefficients \", model_standardized.coef_)\n \n for tuple_ in zip(model.coef_[0], X_std):\n standardized_coef = unstd_to_std_coef2_log(*tuple_)\n print(standardized_coef)\n \n for tuple_ in zip(model_standardized.coef_[0], X_std):\n unstd_coef = std_to_unstd_coef_log(*tuple_)\n print(unstd_coef)\n \n print(\"\\nintercept \", model.intercept_)\n print(\"coef \", unstd_coef)\n print(\"xmean \", X_mean)", "def do_scikit_learn_regression(data, verbose = False):\n \n \n regr = linear_model.LinearRegression()\n\n x = data['c'].values.reshape(100,1)\n y = data['f'].values.reshape(100,1)\n \n regr.fit(x, y)\n \n if verbose:\n\n string = '\\n'.join((\n f'Coefficient of {regr.coef_[0][0]} compared to actual {9/5}',\n f'Intercept of {regr.intercept_[0]} compared to actual {32}'\n ))\n\n print (string)\n\n return regr.coef_[0][0], regr.intercept_[0]", "def linear_regression(X, Y, Xs_test, Ys_test):\n\n ## YOUR CODE HERE\n #################\n return 0", "def ttest():\n # open test results and perform regression analysis\n alphas = []\n betas = []\n iterations = {}\n with open(f\"Results/conclusion2.csv\") as f:\n csv_reader = csv.reader(f, delimiter=',')\n\n for run in csv_reader:\n max, max_i = get_max_run(run)\n if int(run[0]) not in iterations:\n iterations[int(run[0])] = {100 - int(run[1])-1: int(max)}\n else:\n iterations[int(run[0])][100 - int(run[1])-1] = int(max)\n\n for iteration in iterations:\n mono_levels = list(iterations[iteration].keys())\n pop_sizes = [iterations[iteration][i] for i in mono_levels]\n\n regress_result = regress(pop_sizes, mono_levels)\n alphas += [regress_result[1]]\n betas += [regress_result[0]]\n\n # plot scatter and regression line\n avg_alpha = sum(alphas)/len(alphas)\n avg_beta = sum(betas)/len(betas)\n stddev_beta = np.std(betas)\n vis.scatter_mono(iterations, avg_alpha, avg_beta)\n\n # perform t-test\n ttest_result = stats.ttest_ind(betas, [0 for i in betas], equal_var=True)\n t_stat = ttest_result[0]\n p_value = ttest_result[1]\n print(f'Results from t-test:')\n print(f'Avg beta: {avg_beta}, stddev beta: {stddev_beta}.')\n print(f't-stat: {t_stat}, p-value: {p_value}.')", "def test__transform_continuous(self):", "def fit_test(self):", "def test_input_pandas(multiple_linear_regression_data):\n X, y = multiple_linear_regression_data\n x = pd.DataFrame(X)\n y = pd.DataFrame(y)\n out = multiple_linear_regression(x, y)\n assert out[\"SE\"] == approx(0.03206, abs=1e-5)", "def test_transform(self):\n t = Reverse(Quantize())\n assert t.transform(9) == 9.0\n assert t.transform(5) == 5.0\n assert numpy.all(t.transform([9, 5]) == numpy.array([9.0, 5.0], dtype=float))", "def test_regressor_age_estimator(x, reg_model):\n return reg_model.predict(x)", "def linear_regression(x_train, t_train, basis, bias,reg_lambda=0, degree=1, mu=0, s=1):\n \n # Construct the design matrix.\n # Pass the required parameters to this function\n \n phi = design_matrix(x_train,basis,degree,bias,mu,s) \n #print(x_train.shape) \n # Learning Coefficients\n if reg_lambda > 0:\n I=np.identity((phi.shape[1]),dtype=int)\n inv = np.linalg.inv((reg_lambda*I)+(phi.T@phi))\n w = inv@(phi.T@t_train) \n # regularized regression\n else:\n # no regularization \n w = np.linalg.pinv(phi)@t_train\n \n pred_train=phi@w\n train_err = np.sqrt((np.square(pred_train-t_train)).mean())\n return (w, train_err)", "def test_transform(self):\n X = self.generate_X()\n task = mmRDTR()\n task.fit(X)\n res = task.transform(X)\n # check if Instance\n self.assertIsInstance(res,Container)\n # check if names\n self.assertEqual(np.all(res.colnames()==[str(i) for i in xrange(len(res.colnames()))]),True)\n # check if values as within the range expected\n self.assertEqual(np.all(res().min()>=-1),True)\n self.assertEqual(np.all(res().max()<=1),True)\n for i in range(len(res.colnames())):\n self.assertEqual(round(res()[:,i].mean(),8),0)\n # check with new data\n Y = self.generate_X()\n res = task.transform(Y)\n self.assertEqual(np.all(res.colnames()==[str(i) for i in xrange(len(res.colnames()))]),True)\n self.assertEqual(np.all(res().min()>=-1),True)\n self.assertEqual(np.all(res().max()<=1),True)", "def test_transform(self):\n X,Y,Z = self.generate_data()\n\n p={'k':-1,'r':0}\n\n task = mmSCHPOLY('sc2=0.5')\n res= task.fit_transform(X,Y,Z)\n # check if Instance\n self.assertIsInstance(res,Container)\n # check if expected response\n print res.colnames(**p)\n self.assertEqual(res.colnames(**p)==['0','Pol1stTerm_1','Pol2ndTerm_1'],True)\n self.assertEqual(np.all(res(**p)[:,0]-X()[:,0]==0),True)\n self.assertEqual(np.all(res(**p)[:,1]-X()[:,1]==0),True)\n self.assertEqual(np.all(res(**p)[:,2]-0.5*X()[:,1]**2==0),True)\n\n task = mmSCH2W('sc=0.1')\n res= task.fit_transform(X,Y,Z)\n # check if Instance\n self.assertIsInstance(res,Container)\n # check if expected response\n print res.colnames(**p)\n self.assertEqual(res.colnames(**p)==['Product_1_2'],True)\n self.assertEqual(np.all(res(**p)[:,0]-0.1*X()[:,1]*X()[:,2]==0),True)", "def test_input_transposed_vector(multiple_linear_regression_data):\n X, y = multiple_linear_regression_data\n x = X.copy().T\n y = pd.DataFrame(y)\n\n # There is a difference with a transposed array\n with pytest.raises(\n AssertionError, match=r\"N >= K: You need at least as many rows .*\"\n ):\n _ = multiple_linear_regression(x, y)", "def test_tmle_fit(continuous_dataset_fixture):\n\n tmle = TMLE(\n treatment_grid_bins=[22.1, 30, 40, 50, 60, 70, 80.1],\n random_seed=100,\n verbose=True,\n )\n tmle.fit(\n T=continuous_dataset_fixture[\"treatment\"],\n X=continuous_dataset_fixture[[\"x1\", \"x2\"]],\n y=continuous_dataset_fixture[\"outcome\"],\n )\n\n assert tmle.n_obs == 72\n assert len(tmle.psi_list) == 5\n assert len(tmle.std_error_ic_list) == 5", "def test_fn(df, fn):\n\n y_pred = []\n y_true = []\n\n for key in df.index:\n y_t, *inputs = df.loc[key]\n y_true.append(y_t)\n y_p = fn(*inputs)\n y_pred.append(y_p)\n\n # linear regression without intercept\n c = np.mean(y_true) / np.mean(y_pred)\n y_pred = np.multiply(y_pred, c)\n\n rmse = np.sqrt(np.mean(np.subtract(y_pred, y_true) ** 2))\n return rmse, y_pred, y_true, c", "def mk_test(input_data):\r\n\r\n\ttrend, h, p, z, Tau, s, var_s, slope, intercept = mk.original_test(input_data)\r\n\r\n\treturn trend, h, p, z, Tau, s, var_s, slope, intercept", "def test_transform(self):\n t = Quantize()\n assert t.transform(8.6) == 9\n assert t.transform(8.4) == 8\n assert t.transform(5.3) == 5\n assert numpy.all(t.transform([8.6, 5.3]) == numpy.array([9, 5], dtype=int))", "def nnRegression(data):", "def test_single_linear_regression_data_passing_correctly(\n single_linear_regression_model, single_linear_regression_data\n):\n assert (\n single_linear_regression_model.predictor_vars_train.all()\n == single_linear_regression_data[\"predictor_vars\"].all()\n )\n assert (\n single_linear_regression_model.response_var_train.all()\n == single_linear_regression_data[\"response_var\"].all()\n )\n assert type(single_linear_regression_model.predictor_vars_train) == np.ndarray\n assert type(single_linear_regression_model.response_var_train) == np.ndarray", "def regression(x_train, y_train, x_test=[], y_test=[], D=2, plot=False):\n Ntrain = len(x_train)\n Ntest = len(x_test)\n Xtrain = np.asmatrix(x_train)\n Ytrain = np.asmatrix(y_train).transpose()\n Xtest = np.asmatrix(x_test)\n Ytest = np.asmatrix(y_test).transpose() \n \n X = get_feature_matrix(Ntrain, Xtrain, D)\n X_test = get_feature_matrix(Ntest, Xtest, D)\n\n w = np.linalg.solve(np.dot(X.T, X), np.dot(X.T, Ytrain))\n w = w.reshape((w.shape[0],)).tolist()[0]\n \n predicted_Y = X.dot(w).T\n Rtrain = np.linalg.norm(predicted_Y - Ytrain) #training error\n\n predicted_Y_test = X_test.dot(w).T\n Rtest = np.linalg.norm(predicted_Y_test - Ytest) #test error \n\n average_training_error = (Rtrain**2) / Ntrain\n average_test_error = (Rtest**2) / Ntest\n\n if plot:\n # plots\n x = np.linspace(-5, 5, 1000)\n y = predict_y(x, w)\n plt.subplot(211)\n plt.scatter(x_train, y_train)\n plt.plot(x, y)\n plt.title('Training samples and regression')\n plt.grid(True)\n\n x = np.linspace(-5, 5, 1000)\n y = predict_y(x, w)\n plt.subplot(212)\n plt.scatter(x_test, y_test)\n plt.plot(x,y)\n plt.title('Test samples and regression')\n plt.grid(True) \n\n plt.show()\n \n return {'weights': w, \n 'average_training_error': average_training_error,\n 'average_test_error': average_test_error,\n }", "def test_interface(transform, example_tsds: TSDataset):\n start_columnns = example_tsds.columns\n example_tsds.fit_transform(transforms=[transform])\n assert np.all(start_columnns == example_tsds.columns)", "def test_T0():", "def _test_trend(degree, with_intercept):\n y = make_forecasting_problem()\n forecaster = PolynomialTrendForecaster(degree=degree, with_intercept=with_intercept)\n forecaster.fit(y)\n\n # check coefficients\n # intercept is added in reverse order\n actual = forecaster.regressor_.steps[-1][1].coef_[::-1]\n expected = get_expected_polynomial_coefs(y, degree, with_intercept)\n np.testing.assert_allclose(actual, expected)", "def test_example(self, example_dataset, expected_result):\n\n transformer = PreprocessFeatures()\n result = transformer.fit_transform(example_dataset)\n\n assert (result == expected_result).all()", "def test_linear_activation(self):\n self.assertEqual([0.5, 0.6], af.Linear().output([0.5, 0.6]))\n self.assertEqual([1, 1], list(\n af.Linear().derivative(np.array([0.5, 0.6]))))", "def linear_regression_sklearn(data):\n# Split the data into training/testing sets\n dataset = np.array(data)\n\n X_train = dataset[:,0].reshape(-1,1)\n y_train = dataset[:,1]\n\n# Create linear regression object\n regr = linear_model.LinearRegression()\n\n# Train the model using the training sets\n regr.fit(X_train, y_train)\n\n return (regr.coef_[0], regr.intercept_)", "def test_analytical_vs_numerical():\n pass", "def test_from_pytorch_training_regression(self):\n import torch.nn as nn\n import torch.nn.functional as F\n\n class FeedForward(nn.Module):\n def __init__(self):\n super(FeedForward, self).__init__()\n self.fc1 = nn.Linear(3, 10)\n self.fc2 = nn.Linear(10, 1)\n\n def forward(self, x):\n out = self.fc1(x)\n out = F.relu(out)\n out = self.fc2(out)\n return out\n\n model_plaintext = FeedForward()\n batch_size = 5\n\n x_orig = get_random_test_tensor(size=(batch_size, 3), is_float=True)\n dummy_input = torch.empty((1, 3))\n # y is a linear combo of features 1 and 3\n y_orig = 2 * x_orig[:, 0] + 3 * x_orig[:, 2]\n\n x_train = crypten.cryptensor(x_orig, requires_grad=True)\n y_train = crypten.cryptensor(y_orig.unsqueeze(-1))\n\n # create encrypted model\n model = crypten.nn.from_pytorch(model_plaintext, dummy_input)\n model.train()\n model.encrypt()\n\n self._check_training(model, x_train, y_train, \"MSELoss\")\n self._check_model_export(model, x_train)", "def test_inverse_transform(self):", "def test_models_regression(model):\n atom = ATOMRegressor(X_reg, y_reg, test_size=0.24, random_state=1)\n atom.run(\n models=model,\n metric=\"neg_mean_absolute_error\",\n n_calls=2,\n n_initial_points=1,\n bo_params={\"base_estimator\": \"gbrt\", \"cv\": 1},\n )\n assert not atom.errors\n assert hasattr(atom, model)", "def linear_regression(x, t, basis, reg_lambda=0, degree=0):\n\n # TO DO:: Complete the design_matrix function.\n # e.g. phi = design_matrix(x,basis, degree)\n \n phi=design_matrix(x,basis,degree)\n phi_cross=np.linalg.pinv(phi)\n #t_tranpose=t.T\n # TO DO:: Compute coefficients using phi matrix\n if(reg_lambda==0):\n w=phi_cross.dot(t)\n if(reg_lambda!=0):\n # print(\"Inside lambda if: \")\n n_col=phi.shape[1]\n #r=phi.T.dot(phi) + reg_lambda * np.identity(n_col)\n r=reg_lambda*np.identity(n_col)+phi.T.dot(phi)\n r=np.linalg.inv(r)\n\t#r=np.linalg.inv(r)\n [email protected]\n w=z@t\n #w = phi_cross.dot(t)\n\n # Measure root mean squared error on training data.\n # Basic algorithim goes as follows:\n # \t1. We take Equation 3.12 * 1/n \n # Then we math.sqrt( of the equation obtained in 1.)\n\n # t_est: variable for estimation of targets\n t_est= phi.dot(w)\n \n # variable to calculate the difference between our target and estimate\n # target is the left operand, estimate is right operand\n diff=t-t_est\n \n # Square all the elements\n diff_squared=np.power(diff,2)\n\n # Sum up all the elements of diff_squared, i.e take square of\n # all elements then sum them up\n\n sig_squared=diff_squared.sum()\n\n # multiply by 1/2 as specified in PRML\n\n half_sig_squared=0.5*(sig_squared)\n\n # Divide by population size and square root\n population_size= t.shape[0]\n\n rmse_bforesqrt=half_sig_squared/population_size\n\n train_err = np.sqrt(rmse_bforesqrt)\n\n return (w, train_err)", "def experiment_models(train, test, train_target, test_target):\n # Linear models\n linear_models = [(LinearRegression, {\"n_jobs\": -1}),\n (Lasso, {\"alpha\": 3}),\n (Ridge, {\"alpha\": 3}),\n (LinearSVR, {\"random_state\": 0, \"tol\": 1e-5})]\n\n # Add polynomial features\n poly = preprocessing.PolynomialFeatures(2)\n\n # scaler\n scaler = preprocessing.StandardScaler().fit(train)\n\n print(\"Use linear models with linear features\")\n for model_ in linear_models:\n scaled_train = scaler.transform(train)\n scaled_test = scaler.transform(test)\n model = model_[0](**model_[1])\n model.fit(scaled_train, train_target.to_numpy())\n train_pred = model.predict(scaled_train)\n valid_pred = model.predict(scaled_test)\n print(\"=========================================\")\n print(f\"Model : {model_}\")\n compute_metrics(train_pred, train_target, valid_pred, test_target)\n print(\"=========================================\")\n\n print(\"Use linear models with polynomial features\")\n train = poly.fit_transform(train)\n test = poly.transform(test)\n scaler = preprocessing.StandardScaler().fit(train)\n for model_ in linear_models:\n scaled_train = scaler.transform(train)\n scaled_test = scaler.transform(test)\n model = model_[0](**model_[1])\n model.fit(scaled_train, train_target.to_numpy())\n train_pred = model.predict(scaled_train)\n valid_pred = model.predict(scaled_test)\n print(\"=========================================\")\n print(f\"Model : {model_}\")\n compute_metrics(train_pred, train_target, valid_pred, test_target)\n print(\"=========================================\")", "def test_linear_regression(subtests):\n test_cases = [{\n \"description\": \"Empty stdin\",\n \"expected_status_code\": 1,\n \"expected_stderr\": \"No standard input provided to Linear Regression algorithm, exiting\\n\",\n \"expected_stdout\": \"\",\n \"stdin\": \"\"\n }, {\n \"description\": \"Invalid JSON stdin\",\n \"expected_status_code\": 1,\n \"expected_stderr\": \"Invalid JSON provided: Expecting value: line 1 column 1 (char 0), exiting\\n\",\n \"expected_stdout\": \"\",\n \"stdin\": \"invalid\"\n }, {\n \"description\":\n \"JSON stdin missing 'lookAhead'\",\n \"expected_status_code\":\n 1,\n \"expected_stderr\":\n \"Invalid JSON provided: missing 'look_ahead', exiting\\n\",\n \"expected_stdout\":\n \"\",\n \"stdin\":\n \"\"\"{\n \"replicaHistory\": [\n {\n \"time\": \"2020-02-01T00:55:33Z\",\n \"replicas\": 2\n }\n ]\n }\"\"\"\n }, {\n \"description\":\n \"Invalid timestamp provided\",\n \"expected_status_code\":\n 1,\n \"expected_stderr\":\n \"Invalid datetime format: time data 'invalid' does not match format \" + \"'%Y-%m-%dT%H:%M:%SZ'\\n\",\n \"expected_stdout\":\n \"\",\n \"stdin\":\n \"\"\"{\n \"lookAhead\": 10,\n \"replicaHistory\": [\n {\n \"time\": \"invalid\",\n \"replicas\": 2\n }\n ]\n }\"\"\"\n }, {\n \"description\":\n \"Invalid current time provided\",\n \"expected_status_code\":\n 1,\n \"expected_stderr\":\n \"Invalid datetime format: time data 'invalid' does not match format \" + \"'%Y-%m-%dT%H:%M:%SZ'\\n\",\n \"expected_stdout\":\n \"\",\n \"stdin\":\n \"\"\"{\n \"lookAhead\": 15000,\n \"currentTime\": \"invalid\",\n \"replicaHistory\": []\n }\"\"\"\n }, {\n \"description\":\n \"Successful prediction, now\",\n \"expected_status_code\":\n 0,\n \"expected_stderr\":\n \"\",\n \"expected_stdout\":\n \"5\",\n \"stdin\":\n \"\"\"{\n \"lookAhead\": 0,\n \"currentTime\": \"2020-02-01T00:56:12Z\",\n \"replicaHistory\": [\n {\n \"replicas\": 1,\n \"time\": \"2020-02-01T00:55:33Z\"\n },\n {\n \"replicas\": 2,\n \"time\": \"2020-02-01T00:55:43Z\"\n },\n {\n \"replicas\": 3,\n \"time\": \"2020-02-01T00:55:53Z\"\n },\n {\n \"replicas\": 4,\n \"time\": \"2020-02-01T00:56:03Z\"\n }\n ]\n }\"\"\"\n }, {\n \"description\":\n \"Successful prediction, 10 seconds in the future\",\n \"expected_status_code\":\n 0,\n \"expected_stderr\":\n \"\",\n \"expected_stdout\":\n \"6\",\n \"stdin\":\n \"\"\"{\n \"lookAhead\": 10000,\n \"currentTime\": \"2020-02-01T00:56:12Z\",\n \"replicaHistory\": [\n {\n \"replicas\": 1,\n \"time\": \"2020-02-01T00:55:33Z\"\n },\n {\n \"replicas\": 2,\n \"time\": \"2020-02-01T00:55:43Z\"\n },\n {\n \"replicas\": 3,\n \"time\": \"2020-02-01T00:55:53Z\"\n },\n {\n \"replicas\": 4,\n \"time\": \"2020-02-01T00:56:03Z\"\n }\n ]\n }\"\"\"\n }, {\n \"description\":\n \"Successful prediction, 15 seconds in the future\",\n \"expected_status_code\":\n 0,\n \"expected_stderr\":\n \"\",\n \"expected_stdout\":\n \"7\",\n \"stdin\":\n \"\"\"{\n \"lookAhead\": 15000,\n \"currentTime\": \"2020-02-01T00:56:12Z\",\n \"replicaHistory\": [\n {\n \"replicas\": 1,\n \"time\": \"2020-02-01T00:55:33Z\"\n },\n {\n \"replicas\": 2,\n \"time\": \"2020-02-01T00:55:43Z\"\n },\n {\n \"replicas\": 3,\n \"time\": \"2020-02-01T00:55:53Z\"\n },\n {\n \"replicas\": 4,\n \"time\": \"2020-02-01T00:56:03Z\"\n }\n ]\n }\"\"\"\n }]\n\n for i, test_case in enumerate(test_cases):\n with subtests.test(msg=test_case[\"description\"], i=i):\n result = subprocess.run([\"python\", \"./algorithms/linear_regression/linear_regression.py\"],\n input=test_case[\"stdin\"].encode(\"utf-8\"),\n capture_output=True,\n check=False)\n\n stderr = result.stderr\n if stderr is not None:\n stderr = stderr.decode(\"utf-8\")\n\n stdout = result.stdout\n if stdout is not None:\n stdout = stdout.decode(\"utf-8\")\n\n assert test_case[\"expected_status_code\"] == result.returncode\n assert test_case[\"expected_stderr\"] == stderr\n assert test_case[\"expected_stdout\"] == stdout", "def test_transform_interface_out_column(example_tsds: TSDataset) -> None:\n out_column = \"regressor_test\"\n trend_transform = TrendTransform(\n in_column=\"target\", detrend_model=LinearRegression(), model=\"rbf\", out_column=out_column\n )\n result = trend_transform.fit_transform(example_tsds.df)\n for seg in result.columns.get_level_values(0).unique():\n assert out_column in result[seg].columns", "def test_test_regression_model(self):\n model = RegressionTestModel()\n example = {'x_1': 3, 'x_2': 2}\n pred = list(model.predict([example]))[0]\n self.assertEqual(pred['score'], 20)", "def test__inverse_transform_continuous(self):", "def test_fit(self):\n X = self.generate_X()\n task = mmRDTR()\n fit_result = task.fit(X)", "def polytrans(features,features_test,features_oos,poly): \n \n features['FEMA_21'] = poly.fit_transform(np.nan_to_num(features.FEMA_21.astype(np.float32)).reshape(-1, 1))\n features['FEMA_8'] = poly.fit_transform(np.nan_to_num(features.FEMA_8.astype(np.float32)).reshape(-1, 1))\n features['FADRLo'] = poly.fit_transform(np.nan_to_num(features.FADRLo.astype(np.float32)).reshape(-1, 1))\n features['FADRHi'] = poly.fit_transform(np.nan_to_num(features.FADRHi.astype(np.float32)).reshape(-1, 1))\n features['FRVI40'] = poly.fit_transform(np.nan_to_num(features.FRVI40.astype(np.float32)).reshape(-1, 1))\n features['FRVI60'] = poly.fit_transform(np.nan_to_num(features.FRVI60.astype(np.float32)).reshape(-1, 1))\n features['FONLOSMA5'] = poly.fit_transform(np.nan_to_num(features.FONLOSMA5.astype(np.float32)).reshape(-1, 1))\n features['FONHISMA5'] = poly.fit_transform(np.nan_to_num(features.FONHISMA5.astype(np.float32)).reshape(-1, 1))\n features['FONLOSMA21'] = poly.fit_transform(np.nan_to_num(features.FONLOSMA21.astype(np.float32)).reshape(-1, 1))\n features['FONHISMA21'] = poly.fit_transform(np.nan_to_num(features.FONHISMA21.astype(np.float32)).reshape(-1, 1))\n features['FONLOSMA34'] = poly.fit_transform(np.nan_to_num(features.FONLOSMA34.astype(np.float32)).reshape(-1, 1))\n features['FSBGAMMA'] = poly.fit_transform(np.nan_to_num(features.FSBGAMMA.astype(np.float32)).reshape(-1, 1))\n features['FOPENWEEKLY'] = poly.fit_transform(np.nan_to_num(features.FOPENWEEKLY.astype(np.float32)).reshape(-1, 1))\n features['FHIGHWEEKLY'] = poly.fit_transform(np.nan_to_num(features.FHIGHWEEKLY.astype(np.float32)).reshape(-1, 1))\n features['FLOWWEEKLY'] = poly.fit_transform(np.nan_to_num(features.FLOWWEEKLY.astype(np.float32)).reshape(-1, 1))\n features['FCLOSEWEEKLY'] = poly.fit_transform(np.nan_to_num(features.FCLOSEWEEKLY.astype(np.float32)).reshape(-1, 1))\n features['FOPENDAILY'] = poly.fit_transform(np.nan_to_num(features.FOPENDAILY.astype(np.float32)).reshape(-1, 1))\n features['FHIGHDAILY'] = poly.fit_transform(np.nan_to_num(features.FHIGHDAILY.astype(np.float32)).reshape(-1, 1))\n features['FLOWDAILY'] = poly.fit_transform(np.nan_to_num(features.FLOWDAILY.astype(np.float32)).reshape(-1, 1))\n features['FCLOSEDAILY'] = poly.fit_transform(np.nan_to_num(features.FCLOSEDAILY.astype(np.float32)).reshape(-1, 1))\n features['FOPENHOURLY'] = poly.fit_transform(np.nan_to_num(features.FOPENHOURLY.astype(np.float32)).reshape(-1, 1))\n features['FHIGHHOURLY'] = poly.fit_transform(np.nan_to_num(features.FHIGHHOURLY.astype(np.float32)).reshape(-1, 1))\n features['FLOWHOURLY'] = poly.fit_transform(np.nan_to_num(features.FLOWHOURLY.astype(np.float32)).reshape(-1, 1))\n features['FCLOSEHOURLY'] = poly.fit_transform(np.nan_to_num(features.FCLOSEHOURLY.astype(np.float32)).reshape(-1, 1))\n features['FSMA200'] = poly.fit_transform(np.nan_to_num(features.FSMA200.astype(np.float32)).reshape(-1, 1))\n features['FBOLUP20'] = poly.fit_transform(np.nan_to_num(features.FBOLUP20.astype(np.float32)).reshape(-1, 1))\n features['FPP'] = poly.fit_transform(np.nan_to_num(features.FPP.astype(np.float32)).reshape(-1, 1))\n features['FS38'] = poly.fit_transform(np.nan_to_num(features.FS38.astype(np.float32)).reshape(-1, 1))\n features['FS62'] = poly.fit_transform(np.nan_to_num(features.FS62.astype(np.float32)).reshape(-1, 1))\n features['FS100'] = poly.fit_transform(np.nan_to_num(features.FS100.astype(np.float32)).reshape(-1, 1))\n features['FS138'] = poly.fit_transform(np.nan_to_num(features.FS138.astype(np.float32)).reshape(-1, 1))\n features['FR162'] = poly.fit_transform(np.nan_to_num(features.FS162.astype(np.float32)).reshape(-1, 1))\n features['FS200'] = poly.fit_transform(np.nan_to_num(features.FS200.astype(np.float32)).reshape(-1, 1))\n features['FR38'] = poly.fit_transform(np.nan_to_num(features.FR38.astype(np.float32)).reshape(-1, 1))\n features['FR62'] = poly.fit_transform(np.nan_to_num(features.FR62.astype(np.float32)).reshape(-1, 1))\n features['FR100'] = poly.fit_transform(np.nan_to_num(features.FR100.astype(np.float32)).reshape(-1, 1))\n features['FR138'] = poly.fit_transform(np.nan_to_num(features.FR138.astype(np.float32)).reshape(-1, 1))\n features['FR162'] = poly.fit_transform(np.nan_to_num(features.FR162.astype(np.float32)).reshape(-1, 1))\n features['FR200'] = poly.fit_transform(np.nan_to_num(features.FR200.astype(np.float32)).reshape(-1, 1))\n features['SBATR'] = poly.fit_transform(np.nan_to_num(features.SBATR.astype(np.float32)).reshape(-1, 1))\n \n features_test['FEMA_21'] = poly.fit_transform(np.nan_to_num(features_test.FEMA_21.astype(np.float32)).reshape(-1, 1))\n features_test['FEMA_8'] = poly.fit_transform(np.nan_to_num(features_test.FEMA_8.astype(np.float32)).reshape(-1, 1))\n features_test['FADRLo'] = poly.fit_transform(np.nan_to_num(features_test.FADRLo.astype(np.float32)).reshape(-1, 1))\n features_test['FADRHi'] = poly.fit_transform(np.nan_to_num(features_test.FADRHi.astype(np.float32)).reshape(-1, 1))\n features_test['FRVI40'] = poly.fit_transform(np.nan_to_num(features_test.FRVI40.astype(np.float32)).reshape(-1, 1))\n features_test['FRVI60'] = poly.fit_transform(np.nan_to_num(features_test.FRVI60.astype(np.float32)).reshape(-1, 1))\n features_test['FONLOSMA5'] = poly.fit_transform(np.nan_to_num(features_test.FONLOSMA5.astype(np.float32)).reshape(-1, 1))\n features_test['FONHISMA5'] = poly.fit_transform(np.nan_to_num(features_test.FONHISMA5.astype(np.float32)).reshape(-1, 1))\n features_test['FONLOSMA21'] = poly.fit_transform(np.nan_to_num(features_test.FONLOSMA21.astype(np.float32)).reshape(-1, 1))\n features_test['FONHISMA21'] = poly.fit_transform(np.nan_to_num(features_test.FONHISMA21.astype(np.float32)).reshape(-1, 1))\n features_test['FONLOSMA34'] = poly.fit_transform(np.nan_to_num(features_test.FONLOSMA34.astype(np.float32)).reshape(-1, 1))\n features_test['FSBGAMMA'] = poly.fit_transform(np.nan_to_num(features_test.FSBGAMMA.astype(np.float32)).reshape(-1, 1))\n features_test['FOPENWEEKLY'] = poly.fit_transform(np.nan_to_num(features_test.FOPENWEEKLY.astype(np.float32)).reshape(-1, 1))\n features_test['FHIGHWEEKLY'] = poly.fit_transform(np.nan_to_num(features_test.FHIGHWEEKLY.astype(np.float32)).reshape(-1, 1))\n features_test['FLOWWEEKLY'] = poly.fit_transform(np.nan_to_num(features_test.FLOWWEEKLY.astype(np.float32)).reshape(-1, 1))\n features_test['FCLOSEWEEKLY'] = poly.fit_transform(np.nan_to_num(features_test.FCLOSEWEEKLY.astype(np.float32)).reshape(-1, 1))\n features_test['FOPENDAILY'] = poly.fit_transform(np.nan_to_num(features_test.FOPENDAILY.astype(np.float32)).reshape(-1, 1))\n features_test['FHIGHDAILY'] = poly.fit_transform(np.nan_to_num(features_test.FHIGHDAILY.astype(np.float32)).reshape(-1, 1))\n features_test['FLOWDAILY'] = poly.fit_transform(np.nan_to_num(features_test.FLOWDAILY.astype(np.float32)).reshape(-1, 1))\n features_test['FCLOSEDAILY'] = poly.fit_transform(np.nan_to_num(features_test.FCLOSEDAILY.astype(np.float32)).reshape(-1, 1))\n features_test['FOPENHOURLY'] = poly.fit_transform(np.nan_to_num(features_test.FOPENHOURLY.astype(np.float32)).reshape(-1, 1))\n features_test['FHIGHHOURLY'] = poly.fit_transform(np.nan_to_num(features_test.FHIGHHOURLY.astype(np.float32)).reshape(-1, 1))\n features_test['FLOWHOURLY'] = poly.fit_transform(np.nan_to_num(features_test.FLOWHOURLY.astype(np.float32)).reshape(-1, 1))\n features_test['FCLOSEHOURLY'] = poly.fit_transform(np.nan_to_num(features_test.FCLOSEHOURLY.astype(np.float32)).reshape(-1, 1))\n features_test['FSMA200'] = poly.fit_transform(np.nan_to_num(features_test.FSMA200.astype(np.float32)).reshape(-1, 1))\n features_test['FBOLUP20'] = poly.fit_transform(np.nan_to_num(features_test.FBOLUP20.astype(np.float32)).reshape(-1, 1))\n features_test['FPP'] = poly.fit_transform(np.nan_to_num(features_test.FPP.astype(np.float32)).reshape(-1, 1))\n features_test['FS38'] = poly.fit_transform(np.nan_to_num(features_test.FS38.astype(np.float32)).reshape(-1, 1))\n features_test['FS62'] = poly.fit_transform(np.nan_to_num(features_test.FS62.astype(np.float32)).reshape(-1, 1))\n features_test['FS100'] = poly.fit_transform(np.nan_to_num(features_test.FS100.astype(np.float32)).reshape(-1, 1))\n features_test['FS138'] = poly.fit_transform(np.nan_to_num(features_test.FS138.astype(np.float32)).reshape(-1, 1))\n features_test['FR162'] = poly.fit_transform(np.nan_to_num(features_test.FS162.astype(np.float32)).reshape(-1, 1))\n features_test['FS200'] = poly.fit_transform(np.nan_to_num(features_test.FS200.astype(np.float32)).reshape(-1, 1))\n features_test['FR38'] = poly.fit_transform(np.nan_to_num(features_test.FR38.astype(np.float32)).reshape(-1, 1))\n features_test['FR62'] = poly.fit_transform(np.nan_to_num(features_test.FR62.astype(np.float32)).reshape(-1, 1))\n features_test['FR100'] = poly.fit_transform(np.nan_to_num(features_test.FR100.astype(np.float32)).reshape(-1, 1))\n features_test['FR138'] = poly.fit_transform(np.nan_to_num(features_test.FR138.astype(np.float32)).reshape(-1, 1))\n features_test['FR162'] = poly.fit_transform(np.nan_to_num(features_test.FR162.astype(np.float32)).reshape(-1, 1))\n features_test['FR200'] = poly.fit_transform(np.nan_to_num(features_test.FR200.astype(np.float32)).reshape(-1, 1))\n features_test['SBATR'] = poly.fit_transform(np.nan_to_num(features_test.SBATR.astype(np.float32)).reshape(-1, 1))\n\n features_oos['FEMA_21'] = poly.fit_transform(np.nan_to_num(features_oos.FEMA_21.astype(np.float32)).reshape(-1, 1))\n features_oos['FEMA_8'] = poly.fit_transform(np.nan_to_num(features_oos.FEMA_8.astype(np.float32)).reshape(-1, 1))\n features_oos['FADRLo'] = poly.fit_transform(np.nan_to_num(features_oos.FADRLo.astype(np.float32)).reshape(-1, 1))\n features_oos['FADRHi'] = poly.fit_transform(np.nan_to_num(features_oos.FADRHi.astype(np.float32)).reshape(-1, 1))\n features_oos['FRVI40'] = poly.fit_transform(np.nan_to_num(features_oos.FRVI40.astype(np.float32)).reshape(-1, 1))\n features_oos['FRVI60'] = poly.fit_transform(np.nan_to_num(features_oos.FRVI60.astype(np.float32)).reshape(-1, 1))\n features_oos['FONLOSMA5'] = poly.fit_transform(np.nan_to_num(features_oos.FONLOSMA5.astype(np.float32)).reshape(-1, 1))\n features_oos['FONHISMA5'] = poly.fit_transform(np.nan_to_num(features_oos.FONHISMA5.astype(np.float32)).reshape(-1, 1))\n features_oos['FONLOSMA21'] = poly.fit_transform(np.nan_to_num(features_oos.FONLOSMA21.astype(np.float32)).reshape(-1, 1))\n features_oos['FONHISMA21'] = poly.fit_transform(np.nan_to_num(features_oos.FONHISMA21.astype(np.float32)).reshape(-1, 1))\n features_oos['FONLOSMA34'] = poly.fit_transform(np.nan_to_num(features_oos.FONLOSMA34.astype(np.float32)).reshape(-1, 1))\n features_oos['FSBGAMMA'] = poly.fit_transform(np.nan_to_num(features_oos.FSBGAMMA.astype(np.float32)).reshape(-1, 1))\n features_oos['FOPENWEEKLY'] = poly.fit_transform(np.nan_to_num(features_oos.FOPENWEEKLY.astype(np.float32)).reshape(-1, 1))\n features_oos['FHIGHWEEKLY'] = poly.fit_transform(np.nan_to_num(features_oos.FHIGHWEEKLY.astype(np.float32)).reshape(-1, 1))\n features_oos['FLOWWEEKLY'] = poly.fit_transform(np.nan_to_num(features_oos.FLOWWEEKLY.astype(np.float32)).reshape(-1, 1))\n features_oos['FCLOSEWEEKLY'] = poly.fit_transform(np.nan_to_num(features_oos.FCLOSEWEEKLY.astype(np.float32)).reshape(-1, 1))\n features_oos['FOPENDAILY'] = poly.fit_transform(np.nan_to_num(features_oos.FOPENDAILY.astype(np.float32)).reshape(-1, 1))\n features_oos['FHIGHDAILY'] = poly.fit_transform(np.nan_to_num(features_oos.FHIGHDAILY.astype(np.float32)).reshape(-1, 1))\n features_oos['FLOWDAILY'] = poly.fit_transform(np.nan_to_num(features_oos.FLOWDAILY.astype(np.float32)).reshape(-1, 1))\n features_oos['FCLOSEDAILY'] = poly.fit_transform(np.nan_to_num(features_oos.FCLOSEDAILY.astype(np.float32)).reshape(-1, 1))\n features_oos['FOPENHOURLY'] = poly.fit_transform(np.nan_to_num(features_oos.FOPENHOURLY.astype(np.float32)).reshape(-1, 1))\n features_oos['FHIGHHOURLY'] = poly.fit_transform(np.nan_to_num(features_oos.FHIGHHOURLY.astype(np.float32)).reshape(-1, 1))\n features_oos['FLOWHOURLY'] = poly.fit_transform(np.nan_to_num(features_oos.FLOWHOURLY.astype(np.float32)).reshape(-1, 1))\n features_oos['FCLOSEHOURLY'] = poly.fit_transform(np.nan_to_num(features_oos.FCLOSEHOURLY.astype(np.float32)).reshape(-1, 1))\n features_oos['FSMA200'] = poly.fit_transform(np.nan_to_num(features_oos.FSMA200.astype(np.float32)).reshape(-1, 1))\n features_oos['FBOLUP20'] = poly.fit_transform(np.nan_to_num(features_oos.FBOLUP20.astype(np.float32)).reshape(-1, 1))\n features_oos['FPP'] = poly.fit_transform(np.nan_to_num(features_oos.FPP.astype(np.float32)).reshape(-1, 1))\n features_oos['FS38'] = poly.fit_transform(np.nan_to_num(features_oos.FS38.astype(np.float32)).reshape(-1, 1))\n features_oos['FS62'] = poly.fit_transform(np.nan_to_num(features_oos.FS62.astype(np.float32)).reshape(-1, 1))\n features_oos['FS100'] = poly.fit_transform(np.nan_to_num(features_oos.FS100.astype(np.float32)).reshape(-1, 1))\n features_oos['FS138'] = poly.fit_transform(np.nan_to_num(features_oos.FS138.astype(np.float32)).reshape(-1, 1))\n features_oos['FR162'] = poly.fit_transform(np.nan_to_num(features_oos.FS162.astype(np.float32)).reshape(-1, 1))\n features_oos['FS200'] = poly.fit_transform(np.nan_to_num(features_oos.FS200.astype(np.float32)).reshape(-1, 1))\n features_oos['FR38'] = poly.fit_transform(np.nan_to_num(features_oos.FR38.astype(np.float32)).reshape(-1, 1))\n features_oos['FR62'] = poly.fit_transform(np.nan_to_num(features_oos.FR62.astype(np.float32)).reshape(-1, 1))\n features_oos['FR100'] = poly.fit_transform(np.nan_to_num(features_oos.FR100.astype(np.float32)).reshape(-1, 1))\n features_oos['FR138'] = poly.fit_transform(np.nan_to_num(features_oos.FR138.astype(np.float32)).reshape(-1, 1))\n features_oos['FR162'] = poly.fit_transform(np.nan_to_num(features_oos.FR162.astype(np.float32)).reshape(-1, 1))\n features_oos['FR200'] = poly.fit_transform(np.nan_to_num(features_oos.FR200.astype(np.float32)).reshape(-1, 1))\n features_oos['SBATR'] = poly.fit_transform(np.nan_to_num(features_oos.SBATR.astype(np.float32)).reshape(-1, 1))\n\n return(features,features_test,features_oos)", "def test_function_tall(self):\r\n self.assertEquals(\r\n preview.latex_preview('f(3^2)', functions=['f']),\r\n r'\\text{f}\\left(3^{2}\\right)'\r\n )", "def testRegression(self):\n cont_features = [\n tf.contrib.layers.real_valued_column('feature', dimension=4)]\n\n regressor = tf.contrib.learn.DNNRegressor(feature_columns=cont_features,\n hidden_units=[3, 3])\n\n regressor.fit(input_fn=_iris_input_multiclass_fn, steps=1000)\n regressor.evaluate(input_fn=_iris_input_multiclass_fn, steps=100)", "def test_unfitted_inverse_transform():\n df = cudf.Series(np.random.choice(10, (10,)))\n le = LabelEncoder()\n assert not le._fitted\n\n with pytest.raises(NotFittedError):\n le.transform(df)", "def test_transform_prep():\n args = get_layer('transform', 'manual', 'temporal', False, True, window=2, step_size=3)\n run_layer(*args)", "def test_naive_forecaster_model_with_regressor_pyfunc_output(\n naive_forecaster_model_with_regressor, model_path, data_longley\n):\n _, _, _, X_test = data_longley\n\n flavor.save_model(sktime_model=naive_forecaster_model_with_regressor, path=model_path)\n loaded_pyfunc = flavor.pyfunc.load_model(model_uri=model_path)\n\n X_test_array = convert(X_test, \"pd.DataFrame\", \"np.ndarray\")\n\n model_predict = naive_forecaster_model_with_regressor.predict(fh=FH, X=X_test)\n predict_conf = pd.DataFrame([{\"fh\": FH, \"predict_method\": \"predict\", \"X\": X_test_array}])\n pyfunc_predict = loaded_pyfunc.predict(predict_conf)\n np.testing.assert_array_equal(model_predict, pyfunc_predict)\n\n model_predict_interval = naive_forecaster_model_with_regressor.predict_interval(\n fh=FH, coverage=COVERAGE, X=X_test\n )\n predict_interval_conf = pd.DataFrame(\n [\n {\n \"fh\": FH,\n \"predict_method\": \"predict_interval\",\n \"coverage\": COVERAGE,\n \"X\": X_test_array,\n }\n ]\n )\n pyfunc_predict_interval = loaded_pyfunc.predict(predict_interval_conf)\n np.testing.assert_array_equal(model_predict_interval.values, pyfunc_predict_interval.values)\n\n model_predict_quantiles = naive_forecaster_model_with_regressor.predict_quantiles(\n fh=FH, alpha=ALPHA, X=X_test\n )\n predict_quantiles_conf = pd.DataFrame(\n [\n {\n \"fh\": FH,\n \"predict_method\": \"predict_quantiles\",\n \"alpha\": ALPHA,\n \"X\": X_test_array,\n }\n ]\n )\n pyfunc_predict_quantiles = loaded_pyfunc.predict(predict_quantiles_conf)\n np.testing.assert_array_equal(model_predict_quantiles.values, pyfunc_predict_quantiles.values)\n\n model_predict_var = naive_forecaster_model_with_regressor.predict_var(fh=FH, cov=COV, X=X_test)\n predict_var_conf = pd.DataFrame(\n [\n {\n \"fh\": FH,\n \"predict_method\": \"predict_var\",\n \"cov\": COV,\n \"X\": X_test_array,\n }\n ]\n )\n pyfunc_predict_var = loaded_pyfunc.predict(predict_var_conf)\n np.testing.assert_array_equal(model_predict_var.values, pyfunc_predict_var.values)", "def testTsysLLSp(self):\n self._runTest('tsys', True, self.tsys_funcs.keys(), 'linear,linear')", "def test_T1():", "def test_T1():", "def fit_and_predict(self, X_train, y_train, X_test, y_test):\n if self.feature_transform_func:\n X_train, X_test = self.feature_transform_func(X_train, X_test)\n\n self.fit(X_train, y_train)\n y_predict = self.predict(X_test)\n return self.Acu_eval(y_predict, y_test)", "def test_extra():\n return {\n 'conversion': functools.partial(\n conversion, is_train=False, is_extrapolation=True),\n }", "def regression_vault():\n return load_regression_tests()", "def test_transform_proba():\n args = get_layer('transform', 'manual', 'temporal', True, False, window=2, step_size=3)\n run_layer(*args)", "def test_T2():", "def test_T2():", "def test_gtf(self):\n #TODO write bed tests", "def test_fit() -> None:\n mapie = MapieRegressor()\n mapie.fit(X_toy, y_toy)", "def test_regress(self):\r\n x = [0, 12, 29.5, 43, 53, 62.5, 75.5, 85, 93]\r\n y = [8.98, 8.14, 6.67, 6.08, 5.90, 5.83, 4.68, 4.20, 3.72]\r\n self.assertFloatEqual(regress(x, y), (-0.05322, 8.7038), 0.001)\r\n # higher precision from OpenOffice\r\n self.assertFloatEqual(regress(x, y), (-0.05322215, 8.70402730))\r\n\r\n # add test to confirm no overflow error with large numbers\r\n x = [32119, 33831]\r\n y = [2.28, 2.43]\r\n exp = (8.761682243E-05, -5.341209112E-01)\r\n self.assertFloatEqual(regress(x, y), exp, 0.001)", "def test_fx_spec(self):\n df = self.df.copy()\n normalizer = self.normalizer(f0='f0', f1='f1', f2='f2', f3='f3', **self.kwargs)\n normalizer.normalize(df)\n self.assertListEqual(\n normalizer.params['formants'],\n ['f0', 'f1', 'f2', 'f3'])", "def poly_regression_second(X, Y, Xs_test, Ys_test):\n ## YOUR CODE HERE\n #################\n return 0", "def test_transformations_first():\n atom = ATOMClassifier(X10_str, y10, verbose=2, random_state=1)\n atom.encode(max_onehot=None)\n atom.prune(max_sigma=1.7)\n atom.run(\"Tree\")\n pred_1 = atom.tree.predict(X10_str, pipeline=None)\n pred_2 = atom.tree.predict(X10_str, pipeline=True)\n assert not np.array_equal(pred_1, pred_2)\n\n # Fails because the data is not encoded\n pytest.raises(ValueError, atom.tree.predict, X10_str, pipeline=False)", "def test_T01():", "def test_single_linear_regression_coefficients(single_linear_regression_model):\n print(single_linear_regression_model)\n expected_coefficients = [(0, 151.27), (1, 303.90)]\n no_of_betas = len(single_linear_regression_model.B)\n for n in range(no_of_betas):\n assert single_linear_regression_model.B[n] == pytest.approx(\n expected_coefficients[n][1], 0.001\n )", "def test_load_full_transform(self):\n self.add_transform(cond_artist=True, cond_album=True, cond_title=True,\n cond_ensemble=True, cond_composer=True, cond_conductor=True,\n change_artist=True, change_album=True, change_title=True,\n change_ensemble=True, change_composer=True, change_conductor=True,\n pattern_artist='Artist', pattern_album='Album', pattern_title='Title',\n pattern_ensemble='Ensemble', pattern_composer='Composer', pattern_conductor='Conductor',\n to_artist='Artist 2', to_album='Album 2', to_title='Title 2',\n to_ensemble='Ensemble 2', to_composer='Composer 2', to_conductor='Conductor 2')\n self.app.load_data()\n self.assertEqual(len(self.app.transforms), 1)\n transform = self.app.transforms.transforms[1]\n self.assertEqual(transform.cond_artist, True)\n self.assertEqual(transform.cond_album, True)\n self.assertEqual(transform.cond_title, True)\n self.assertEqual(transform.cond_ensemble, True)\n self.assertEqual(transform.cond_composer, True)\n self.assertEqual(transform.cond_conductor, True)\n self.assertEqual(transform.change_artist, True)\n self.assertEqual(transform.change_album, True)\n self.assertEqual(transform.change_title, True)\n self.assertEqual(transform.change_ensemble, True)\n self.assertEqual(transform.change_composer, True)\n self.assertEqual(transform.change_conductor, True)\n self.assertEqual(transform.pattern_artist, 'Artist')\n self.assertEqual(transform.pattern_album, 'Album')\n self.assertEqual(transform.pattern_title, 'Title')\n self.assertEqual(transform.pattern_ensemble, 'Ensemble')\n self.assertEqual(transform.pattern_composer, 'Composer')\n self.assertEqual(transform.pattern_conductor, 'Conductor')\n self.assertEqual(transform.to_artist, 'Artist 2')\n self.assertEqual(transform.to_album, 'Album 2')\n self.assertEqual(transform.to_title, 'Title 2')\n self.assertEqual(transform.to_ensemble, 'Ensemble 2')\n self.assertEqual(transform.to_composer, 'Composer 2')\n self.assertEqual(transform.to_conductor, 'Conductor 2')", "def poly_regression_second(X, Y, Xs_test, Ys_test):\n \n poly = PolynomialFeatures(degree = 2)\n X2 = poly.fit_transform(X)[:,1:]\n X2_test = []\n for X_test in Xs_test:\n X2_test.append(poly.fit_transform(X_test)[:,1:])\n mses = linear_regression(X2, Y, X2_test, Ys_test)\n return mses", "def test_detrend_transform(self):\n snv = SNV(q=50, robust=False, detrend=True)\n X_t = snv.fit_transform(self.X)\n self.assertTrue(np.allclose(X_t, 0.0))", "def linear_regression_forecasting(x_train,y_train,x_valid,y_valid,x_test,y_test):\n y_train = y_train.reshape(TRAINING_BATCH_SIZE,N_PREDICTIONS*N_OUTPUT_FEATURES)\n y_valid = y_valid.reshape(VALIDATION_BATCH_SIZE,N_PREDICTIONS*N_OUTPUT_FEATURES)\n layer1 = keras.layers.Flatten(input_shape=[N_INPUT_STEPS,N_INPUT_FEATURES]) #input layer flattens each batch instance from [n_steps,n_input_features] to [n_steps*n_input_features]\n layer2 = keras.layers.Dense(N_PREDICTIONS*N_OUTPUT_FEATURES) #fully connected layer solves combination of linear equations\n model = keras.models.Sequential([layer1,layer2])\n model.compile(loss=\"mse\",optimizer=\"adam\")\n training_history = model.fit(x_train,y_train,epochs=N_EPOCHS,validation_data=(x_valid,y_valid),verbose=0)\n y_pred = model.predict(x_test, TESTING_BATCH_SIZE)\n y_pred = y_pred.reshape(TESTING_BATCH_SIZE,N_PREDICTIONS,N_OUTPUT_FEATURES)\n return training_history.history, y_pred, model", "def test_run_regressions(test_data, model):\n answer = model._Run_Regressions(\n test_data,\n 100,\n forecasts={\"arima\", \"holtwinters\", \"prophet\", \"arima_r\", \"sarima_r\"}\n )", "def test_T3():", "def test_T3():", "def test_transform(self):\n data = pd.DataFrame({\n \"x\": np.array([0.1, 0.3, 0.5]),\n \"y\": np.array([\"yes\", \"yes\", \"no\"])\n })\n\n transformer = DataTransformer()\n transformer._column_transform_info_list = [\n ColumnTransformInfo(\n column_name=\"x\", column_type=\"continuous\", transform=None,\n transform_aux=None,\n output_info=[SpanInfo(1, 'tanh'), SpanInfo(3, 'softmax')],\n output_dimensions=1 + 3\n ),\n ColumnTransformInfo(\n column_name=\"y\", column_type=\"discrete\", transform=None,\n transform_aux=None,\n output_info=[SpanInfo(2, 'softmax')],\n output_dimensions=2\n )\n ]\n\n transformer._transform_continuous = Mock()\n selected_normalized_value = np.array([[0.1], [0.3], [0.5]])\n selected_component_onehot = np.array([\n [1, 0, 0],\n [1, 0, 0],\n [1, 0, 0],\n ])\n return_value = (selected_normalized_value, selected_component_onehot)\n transformer._transform_continuous.return_value = return_value\n\n transformer._transform_discrete = Mock()\n transformer._transform_discrete.return_value = [np.array([\n [0, 1],\n [0, 1],\n [1, 0],\n ])]\n\n result = transformer.transform(data)\n transformer._transform_continuous.assert_called_once()\n transformer._transform_discrete.assert_called_once()\n\n expected = np.array([\n [0.1, 1, 0, 0, 0, 1],\n [0.3, 1, 0, 0, 0, 1],\n [0.5, 1, 0, 0, 1, 0],\n ])\n\n assert result.shape == (3, 6)\n assert (result[:, 0] == expected[:, 0]).all(), \"continuous-cdf\"\n assert (result[:, 1:4] == expected[:, 1:4]).all(), \"continuous-softmax\"\n assert (result[:, 4:6] == expected[:, 4:6]).all(), \"discrete\"", "def test_exercise_2():\n dirname = os.path.dirname(os.path.realpath(__file__))\n df = pd.read_pickle(f\"{dirname}/material/data-consumption-function.pkl\")\n\n def construct_predicted_values(income, alpha, beta, gamma):\n return alpha + beta * income ** gamma\n\n mock_rslt = [-91.1933, 0.5691, 1.0204]\n income = df[\"realgdp\"].values\n df[\"realcons_pred\"] = construct_predicted_values(income, *mock_rslt)\n\n x = df.index.get_level_values(\"Year\")\n fig, ax = plt.subplots()\n ax.plot(x, df[\"realcons_pred\"], label=\"Predicted\")\n ax.plot(x, df[\"realcons\"], label=\"Observed\")", "def transform_data(dataset_train, dataset_test):\n\n #transform dataset using fit_transform\n dataset_train = scaler.fit_transform(dataset_train)\n\n #transform dataset using transform (does not influence teaching)\n dataset_test = scaler.transform(dataset_test)\n\n return dataset_train, dataset_test", "def run_regression(train_embeds, train_labels, test_embeds, test_labels):\n np.random.seed(1)\n from sklearn.linear_model import SGDClassifier\n from sklearn.dummy import DummyClassifier\n from sklearn.metrics import f1_score\n dummy = DummyClassifier()\n dummy.fit(train_embeds, train_labels)\n log = SGDClassifier(loss=\"log\", n_jobs=10, tol=1e-3)\n log.fit(train_embeds, train_labels)\n print(\"F1 score:\", f1_score(test_labels, log.predict(test_embeds), average=\"micro\"))\n print(\"Random baseline f1 score:\", f1_score(test_labels, dummy.predict(test_embeds), average=\"micro\"))", "def data_transformer(data, trafo, power=1.):\n\n tf = trafo.split('-')\n if tf[0] == 'NA': # only power transform\n return data\n elif tf[0] == 'pow': # log_10 transform\n return data ** power\n elif tf[0] == 'log': # log_10 transform\n return np.log10(data) ** power\n elif tf[0] == 'd1': # first difference over period dx\n i = int(tf[1])\n return (data[i:] - data[:-i]) ** power\n elif tf[0] == 'pch': # percentage change over period px\n i = int(tf[1])\n return (100. * (data[i:] - data[:-i]) / data[:-i]) ** power\n elif tf[0] == 'ld': # log difference (approx pch for small changes)\n i = int(tf[1])\n return (100 * np.log(data[i:] / data[:-i])) ** power\n else:\n raise ValueError(\"Invalid transformation value.\")", "def linear_regresstion_action(X_train=\"not defined\", X_test=\"not defined\", y_train=\"not defined\", y_test=\"not defined\",\r\n input_data=\"not defined\"):\r\n if \"not defined\" in [X_train, X_test, y_train, y_test]:\r\n X_train, X_test, y_train, y_test = splitting.splitting_data()\r\n\r\n if input_data == \"not defined\":\r\n raise ValueError(\"please provide input data\")\r\n\r\n linreg = LinearRegression()\r\n grid = {\r\n \"normalize\": [\"True\", \"False\"],\r\n }\r\n\r\n model = RandomizedSearchCV(linreg, grid, random_state=1007486)\r\n model.fit(X_train, y_train)\r\n y_pred = model.predict(X_test)\r\n predicted_units = model.predict(input_data)\r\n\r\n # assert score > 0.6, \"fuck this model is too bad!!!\"\r\n return y_pred, predicted_units", "def test_forfatal_functions(self):\n logging.getLogger(\"tensorflow\").setLevel(logging.ERROR)\n logging.getLogger(\"batchglm\").setLevel(logging.WARNING)\n logging.getLogger(\"diffxpy\").setLevel(logging.WARNING)\n\n num_observations = 10\n num_features = 2\n\n sim = Simulator(num_observations=num_observations, num_features=num_features)\n sim.generate_sample_description(num_batches=0, num_conditions=0)\n sim.generate()\n\n random_sample_description = pd.DataFrame({\n \"pseudotime\": np.random.random(size=sim.nobs),\n \"batch\": np.random.randint(2, size=sim.nobs)\n })\n\n test = de.test.continuous_1d(\n data=sim.X,\n continuous=\"pseudotime\",\n df=3,\n formula_loc=\"~ 1 + pseudotime + batch\",\n formula_scale=\"~ 1\",\n factor_loc_totest=\"pseudotime\",\n test=\"wald\",\n sample_description=random_sample_description,\n quick_scale=True,\n batch_size=None,\n training_strategy=\"DEFAULT\",\n dtype=\"float64\"\n )\n\n summary = test.summary()\n ids = test.gene_ids\n\n # 1. Test all additional functions which depend on model computation:\n # 1.1. Only continuous model:\n temp = test.log_fold_change(genes=ids, nonnumeric=False)\n temp = test.max(genes=ids, nonnumeric=False)\n temp = test.min(genes=ids, nonnumeric=False)\n temp = test.argmax(genes=ids, nonnumeric=False)\n temp = test.argmin(genes=ids, nonnumeric=False)\n temp = test.summary(nonnumeric=False)\n # 1.2. Full model:\n temp = test.log_fold_change(genes=ids, nonnumeric=True)\n temp = test.max(genes=ids, nonnumeric=True)\n temp = test.min(genes=ids, nonnumeric=True)\n temp = test.argmax(genes=ids, nonnumeric=True)\n temp = test.argmin(genes=ids, nonnumeric=True)\n temp = test.summary(nonnumeric=True)\n\n return True", "def test_inverse_transform_train(transform, outliers_solid_tsds):\n original_df = outliers_solid_tsds.df.copy()\n outliers_solid_tsds.fit_transform([transform])\n outliers_solid_tsds.inverse_transform()\n\n assert (original_df == outliers_solid_tsds.df).all().all()", "def test_fit(self):\n data = pd.DataFrame({\n \"x\": np.random.random(size=100),\n \"y\": np.random.choice([\"yes\", \"no\"], size=100)\n })\n\n transformer = DataTransformer()\n transformer._fit_continuous = Mock()\n transformer._fit_continuous.return_value = ColumnTransformInfo(\n column_name=\"x\", column_type=\"continuous\", transform=None,\n transform_aux=None,\n output_info=[SpanInfo(1, 'tanh'), SpanInfo(3, 'softmax')],\n output_dimensions=1 + 3\n )\n\n transformer._fit_discrete = Mock()\n transformer._fit_discrete.return_value = ColumnTransformInfo(\n column_name=\"y\", column_type=\"discrete\", transform=None,\n transform_aux=None,\n output_info=[SpanInfo(2, 'softmax')],\n output_dimensions=2\n )\n\n transformer.fit(data, discrete_columns=[\"y\"])\n\n transformer._fit_discrete.assert_called_once()\n transformer._fit_continuous.assert_called_once()\n assert transformer.output_dimensions == 6", "def test_machine_learning():", "def simple_linear_regression(train, test):\r\n predictions = list()\r\n b0, b1 = coefficients(train)\r\n for row in test:\r\n yhat = b0 + b1 * row[0]\r\n predictions.append(yhat)\r\n return predictions", "def test_variational():\n # iris\n #pres = \"Test pour le data set Iris (facile, classique)\"\n #test_from_func_variational(pres, 15, 10, 3, True, Iris)\n\n # breast cancer\n pres = \"Test pour le data set Breast Cancer (facile, classique)\"\n test_from_func_variational(pres, 15, 10, 3, True, Breast_cancer)\n\n # digits\n # pres = \"Test pour le data set Digits (difficile, classique)\"\n # test_from_func(pres, 10, 10, 10, True, Digits, quantum_instance)\n\n # wine\n # pres = \"Test pour le data set Wine (moyen, classique)\"\n # test_from_func(pres, 15, 10, 5, True, Wine, quantum_instance)\n\n # gaussian\n pres = \"Test pour des données gaussiennes (moyen, classique)\"\n for _ in range(1):\n print(\"\\n\")\n print(\"New iteration\")\n test_from_func_variational(pres, 25, 10, 2, True, Gaussian)\n print(\"\\n\")\n\n # small adn strings\n pres = \"Test pour des séquences ADN courtes (difficile, classique)\"\n test_from_func_variational(pres, 10, 15, 14, True, Sequence)\n\n #Quantum data\n pres = \"Test pour des données générées par ordinateur quantique (facile, quantique)\"\n print(pres)\n _, samp_train, samp_test, labels = ad_hoc_data(15, 10, 2, 0.3, True)\n sample_m, sample_p = stock_get(20, 0.3)\n\n labels_me = [-1, 1]\n samp_train_me = {-1: np.array(sample_m[:15]), 1: np.array(sample_p[:15])}\n samp_test_me = {-1: np.array(sample_m[15:]), 1: np.array(sample_p[15:])}\n print(samp_train)\n print(samp_train_me)\n print(samp_test)\n print(samp_test_me)\n\n my_impl_variational(samp_train, samp_test, labels)\n print(\"Pour autres données quantiques\")\n my_impl_variational(samp_train_me, samp_test_me, labels_me)", "def fun(params, slope, data):\n x, y_true = data\n return y_true - model_fun(params, slope, x)", "def testLogisticRegression_TensorData(self):\n language_column = tf.contrib.layers.sparse_column_with_hash_bucket(\n 'language', hash_bucket_size=20)\n feature_columns = [\n tf.contrib.layers.embedding_column(language_column, dimension=1),\n tf.contrib.layers.real_valued_column('age')\n ]\n\n classifier = tf.contrib.learn.DNNClassifier(\n n_classes=2,\n feature_columns=feature_columns,\n hidden_units=[3, 3],\n config=tf.contrib.learn.RunConfig(tf_random_seed=1))\n\n classifier.fit(input_fn=_input_fn, steps=100)\n\n scores = classifier.evaluate(input_fn=_input_fn, steps=1)\n self.assertGreater(scores['accuracy'], 0.9)\n self.assertLess(scores['loss'], 0.3)\n predict_input_fn = functools.partial(_input_fn, num_epochs=1)\n predictions = list(\n classifier.predict(input_fn=predict_input_fn, as_iterable=True))\n self.assertListEqual(predictions, [1, 0, 0])", "def test_super_transform_called(self, mocker):\n\n df = d.create_df_2()\n\n x = ScalingTransformer(columns=[\"a\"], scaler=\"standard\")\n\n x.fit(df)\n\n expected_call_args = {0: {\"args\": (d.create_df_2(),), \"kwargs\": {}}}\n\n with h.assert_function_call(\n mocker,\n tubular.base.BaseTransformer,\n \"transform\",\n expected_call_args,\n return_value=d.create_df_2(),\n ):\n\n x.transform(df)", "def runTest(self):\n self.setUp()\n self.test_FiducialTransform1()", "def test_parse_trflp(self):\r\n\r\n data = \\\r\n \"\"\"\tBin (10bp)\tBin (20bp)\tBin (30bp)\tBin (40 bp)\r\nSamp-le 1\t1000\t2000\t3000\t4000\r\nSample 2\t\t2000\t3000\t4000\r\nSample 3\t\t\t3000\t4000\r\nSample 4\t\t\t\t4000\r\nSample 5\t25\t\t\t\"\"\"\r\n samples, otus, data = parse_trflp(data.split('\\n'))\r\n\r\n samples_exp = [\r\n 'Samp.le.1',\r\n 'Sample.2',\r\n 'Sample.3',\r\n 'Sample.4',\r\n 'Sample.5']\r\n otus_exp = ['Bin__10bp_', 'Bin__20bp_', 'Bin__30bp_', 'Bin__40_bp_']\r\n data_exp = array([[1000, 0, 0, 0, 25],\r\n [2000, 2000, 0, 0, 0],\r\n [3000, 3000, 3000, 0, 0],\r\n [4000, 4000, 4000, 4000, 0]])\r\n\r\n self.assertEqual(samples, samples_exp)\r\n self.assertEqual(otus, otus_exp)\r\n assert_almost_equal(data, data_exp)", "def eval_regression_model(model: NeuralNetwork, X_test: np.ndarray, y_test: np.ndarray):\n preds = model.forward(X_test)\n preds = preds.reshape(-1, 1)\n print(\"Mean absolute error: {:.2f}\".format(mae(preds, y_test)))\n print()\n print(\"Root mean squared error {:.2f}\".format(rmse(preds, y_test)))", "def test_convert():", "def test_TLearner(self):\n # TLearner test\n # Instantiate TLearner\n Y, T, X, _ = ihdp_surface_B()\n est = AutomatedTLearner(models=automl_model_reg())\n\n # Test constant and heterogeneous treatment effect, single and multi output y\n\n est.fit(Y, T, X=X)\n _ = est.effect(X)", "def pred_linreg_fxn(x_train, y_train, x_pred):\n slope_train, intercept_train = linreg_fxn(x_train, y_train)\n y_pred = x_pred*slope_train + intercept_train\n # return y_pred\n return y_pred, slope_train, intercept_train", "def test_domain_and_target_type(self):\n t = Linearize()\n assert t.domain_type == \"real\"\n assert t.target_type == \"real\"", "def dataframe_regression(df, cal_mode, do_or_undo=\"do\"):\n\n # perform linear regression to optimise to matched column\n if do_or_undo == \"do\":\n\n # if temperature is used in calibration\n if \"temp\" in cal_mode:\n\n x_results = sm.OLS(df.X_matched, df[[\"X\", \"intercept\", \"T_dev\"]]).fit()\n y_results = sm.OLS(df.Y_matched, df[[\"Y\", \"intercept\", \"T_dev\"]]).fit()\n z_results = sm.OLS(df.Z_matched, df[[\"Z\", \"intercept\", \"T_dev\"]]).fit()\n # if temperature NOT used in calibration\n else:\n x_results = sm.OLS(df.X_matched, df[[\"X\", \"intercept\"]]).fit()\n y_results = sm.OLS(df.Y_matched, df[[\"Y\", \"intercept\"]]).fit()\n z_results = sm.OLS(df.Z_matched, df[[\"Z\", \"intercept\"]]).fit()\n\n # perform linear regression to optimise the transformed x,y,z data back to the original x,y,z data\n elif do_or_undo == \"undo\":\n\n # if temperature was used in calibration\n if \"temp\" in cal_mode:\n x_results = sm.OLS(df[\"X\"], df[[\"X_orig\", \"intercept\", \"T_dev\"]]).fit()\n y_results = sm.OLS(df[\"Y\"], df[[\"Y_orig\", \"intercept\", \"T_dev\"]]).fit()\n z_results = sm.OLS(df[\"Z\"], df[[\"Z_orig\", \"intercept\", \"T_dev\"]]).fit()\n # if temperature was NOT used in calibration\n else:\n x_results = sm.OLS(df[\"X\"], df[[\"X_orig\", \"intercept\"]]).fit()\n y_results = sm.OLS(df[\"Y\"], df[[\"Y_orig\", \"intercept\"]]).fit()\n z_results = sm.OLS(df[\"Z\"], df[[\"Z_orig\", \"intercept\"]]).fit()\n\n return x_results, y_results, z_results", "def test_translate(self, model_path):\n # expected file\n model = pysd.read_vensim(model_path)\n random_vars = [\n \"A B uniform matrix\",\n \"A B uniform matrix 1\",\n \"A B uniform matrix 1 0\",\n \"A B uniform scalar\",\n \"A B uniform vec\",\n \"A B uniform vec 1\",\n \"normal A B uniform matrix\",\n \"normal A B uniform matrix 1\",\n \"normal A B uniform matrix 1 0\",\n \"normal scalar\",\n \"normal vec\",\n \"normal vec 1\",\n \"uniform matrix\",\n \"uniform scalar\",\n \"uniform vec\"\n ]\n out = model.run(return_columns=random_vars, flatten_output=False)\n for var in out.columns:\n if isinstance(out[var].values[0], xr.DataArray):\n values = np.array([a.values for a in out[var].values])\n else:\n values = out[var].values\n # assert all values are different in each dimension and time step\n assert len(np.unique(values)) == np.prod(values.shape)", "def test_tanh_activation(self):\n self.assertEqual([0.099668, 0.099668], list(\n af.TanH().output(np.array([0.1, 0.1]))))\n self.assertEqual([0.990066, 0.990066], list(\n af.TanH().derivative(np.array([0.1, 0.1]))))" ]
[ "0.64185077", "0.6215986", "0.6215151", "0.6212859", "0.6164075", "0.60881317", "0.6073903", "0.60206157", "0.60000205", "0.5982403", "0.5937053", "0.5922052", "0.59196836", "0.5906585", "0.58948827", "0.5866362", "0.5859663", "0.5814201", "0.5783901", "0.57772166", "0.5771462", "0.57689106", "0.5761881", "0.57605153", "0.5736527", "0.57331395", "0.5731471", "0.5717569", "0.57153296", "0.57060456", "0.5698003", "0.56931496", "0.56845534", "0.566602", "0.56630164", "0.5620916", "0.56183773", "0.5610913", "0.56097466", "0.5601796", "0.5596466", "0.5590408", "0.5586627", "0.55857337", "0.55771434", "0.55738175", "0.5545401", "0.55453074", "0.55307925", "0.55285716", "0.5522992", "0.55225194", "0.5520956", "0.55190337", "0.55190337", "0.5511177", "0.5510181", "0.5491237", "0.54909515", "0.54810876", "0.54810876", "0.5478301", "0.5477142", "0.5467714", "0.5466527", "0.54660803", "0.5456164", "0.5452423", "0.54514337", "0.54468894", "0.5444323", "0.54256296", "0.5420508", "0.5411022", "0.540744", "0.540744", "0.54044175", "0.54009837", "0.53984594", "0.53971136", "0.53892916", "0.5377475", "0.5375848", "0.5366765", "0.5356191", "0.53518975", "0.53474903", "0.5336124", "0.5334674", "0.53340983", "0.5325806", "0.53193706", "0.5318558", "0.53135854", "0.53125095", "0.5309659", "0.5305763", "0.5301718", "0.5299736", "0.52911955", "0.52898294" ]
0.0
-1
Reference values taken from NIST data base
def test_transport_sanity(self): T = 400 cv_mole, W = 21005.045895231186, 28.014 species_name = "N2" data = ct_properties.ctThermoTransport("gri30.cti", verbose=False) data.evaluate_properties() i = data.gas.species_index(species_name) As, Ts, _, poly_mu, poly_kappa, log_poly_mu, log_poly_kappa= ct2foam_utils.fit_ct_transport(data) mu_s = tr_fitter.sutherland(T, As[i], Ts[i]) kappa_s=tr_fitter.euken(mu_s, cv_mole, W, R) mu_logp, kappa_logp = tr_fitter.eval_log_polynomial(log_poly_mu[i,:], log_poly_kappa[i,:], T) mu_p, kappa_p = tr_fitter.eval_polynomial(poly_mu[i,:], poly_kappa[i,:], T) # rough test whether they are in the same scale... mu_ref = 2.2217e-5 kappa_ref = 0.032205 self.assertTrue(np.abs(mu_s-mu_ref)/np.abs(mu_ref) < 0.07) self.assertTrue(np.abs(mu_p-mu_ref)/np.abs(mu_ref) < 0.01) self.assertTrue(np.abs(mu_logp-mu_ref)/np.abs(mu_ref) < 0.01) self.assertTrue(np.abs(kappa_s-kappa_ref)/np.abs(kappa_ref) < 0.05) self.assertTrue(np.abs(kappa_p-kappa_ref)/np.abs(kappa_ref) < 0.05) self.assertTrue(np.abs(kappa_logp-kappa_ref)/np.abs(kappa_ref) < 0.05)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def referenzdaten_einlesen(self):\n self.refdata = np.genfromtxt(self.referencefile, skip_header=1, usecols=np.asarray(self.referenzspalte))\n self.Referencedata = Reference()", "def prep_reference(self):\n\n # if basin\n if self.config.metric == 'basin':\n df = pd.read_csv(self.config.gcam_basin_names_file, usecols=['basin_id'])\n m = sorted(df['basin_id'].tolist())\n\n # if AEZ, use 1 through 18 - this will not change\n elif self.config.metric == 'aez':\n m = list(range(1, 19, 1))\n\n # read in region ids\n rdf = pd.read_csv(self.config.gcam_region_names_file, usecols=['gcam_region_id'])\n r = sorted(rdf['gcam_region_id'].tolist())\n\n return m, r", "def identify_primary_reference_datasets(conn, log):\n\n primary_ref = {}\n\n primary_ref['refimg_id_ip'] = phot_db.find_primary_reference_image_for_field(conn)\n\n query = 'SELECT facility, filter, software FROM reference_images WHERE refimg_id=\"'+str(primary_ref['refimg_id_ip'])+'\"'\n t = phot_db.query_to_astropy_table(conn, query, args=())\n\n primary_ref['facility_id'] = t['facility'][0]\n primary_ref['software_id'] = t['software'][0]\n\n query = 'SELECT filter_id, filter_name FROM filters WHERE filter_name=\"ip\"'\n t = phot_db.query_to_astropy_table(conn, query, args=())\n primary_ref['ip'] = t['filter_id'][0]\n\n for f in ['rp', 'gp']:\n query = 'SELECT filter_id, filter_name FROM filters WHERE filter_name=\"'+f+'\"'\n t = phot_db.query_to_astropy_table(conn, query, args=())\n primary_ref[f] = t['filter_id'][0]\n\n query = 'SELECT refimg_id FROM reference_images WHERE facility=\"'+str(primary_ref['facility_id'])+\\\n '\" AND software=\"'+str(primary_ref['software_id'])+\\\n '\" AND filter=\"'+str(t['filter_id'][0])+'\"'\n qs = phot_db.query_to_astropy_table(conn, query, args=())\n\n if len(qs) > 0:\n primary_ref['refimg_id_'+f] = qs['refimg_id'][0]\n else:\n log.info('WARNING: Database contains no primary reference image data in filter '+f)\n\n log.info('Identified the primary reference datasets for this field as:')\n for key, value in primary_ref.items():\n log.info(str(key)+' = '+str(value))\n\n return primary_ref", "def fetchReferences(self, dataRef, exposure):\n skyMap = dataRef.get(self.dataPrefix + \"skyMap\", immediate=True)\n tractInfo = skyMap[dataRef.dataId[\"tract\"]]\n patch = tuple(int(v) for v in dataRef.dataId[\"patch\"].split(\",\"))\n patchInfo = tractInfo.getPatchInfo(patch)\n references = lsst.afw.table.SourceCatalog(self.references.schema)\n references.extend(self.references.fetchInPatches(dataRef, patchList=[patchInfo]))\n return references", "def lookup():", "def get_reference_data(ref_name):\n\n conn = r.connect(host=HOST, port=PORT, db=DB)\n ref_data = r.table(FINGERPRINT_TABLE).get(ref_name).run(conn)\n\n try:\n return ref_data[\"distance\"], ref_data[\"spl\"], ref_data[\"fingerprint\"]\n except KeyError:\n raise LookupError(\"Database does not have the requested reference\")", "def get_reference(self):\t\t\n\t\treturn self._reference", "def fetchRefSeq(genome = 'hg18',lookupval = 'name'):\n cursor=gbdbConnect(gbdbname=genome)\n select=\"SELECT * FROM refGene\"\n cursor.execute(select)\n rows=cursor.fetchall()\n output={}\n for chr in genomelib.chr_names:\n output[chr]={}\n output[chr]['+']={}\n output[chr]['-']={}\n for row in rows:\n if row['chrom'] in genomelib.chr_names:\n output[row['chrom']][row['strand']][row[lookupval]]=row\n return output", "def values():", "def db_values(self, db):", "def relink_datasets(self, name, ref_product):\n\n list_ds = [(ds[\"name\"], ds[\"reference product\"], ds[\"location\"]) for ds in self.db]\n\n for act in self.db:\n for exc in act['exchanges']:\n if \"name\" in exc and \"product\" in exc and exc[\"type\"] == \"technosphere\":\n if (exc['name'], exc.get('product')) == (name, ref_product):\n if (name, ref_product, act[\"location\"]) in list_ds:\n exc[\"location\"] = act[\"location\"]\n else:\n try:\n new_loc = self.geo.ecoinvent_to_iam_location(act[\"location\"])\n except KeyError:\n new_loc = \"\"\n\n if (name, ref_product, new_loc) in list_ds:\n exc[\"location\"] = new_loc\n else:\n # new location in ei3.7, not yet defined in `constructive_geometries`\n if act[\"location\"] in (\"North America without Quebec\", \"US only\"):\n new_loc = self.geo.ecoinvent_to_iam_location(\"US\")\n exc[\"location\"] = new_loc\n\n elif act[\"location\"] in (\"RoW\", \"GLO\"):\n new_loc = self.geo.ecoinvent_to_iam_location(\"CN\")\n exc[\"location\"] = new_loc\n else:\n print(\"Issue with {} used in {}: cannot find the IAM equiavlent for \"\n \"the location {}\".format(name, act[\"name\"], act[\"location\"]))\n\n if \"input\" in exc:\n exc.pop(\"input\")", "def retrieve(dbxrefs, basics=True, hierarchy=True, wikipedia=True, literature=True, cross_references=True, overlaps=True):\n resolved = dbxref.resolver.resolve(dbxrefs, check_existence=False)\n documents = []\n for entry in resolved:\n # Construct URL for retrieve\n json_url = entry['locations']['json'][0]\n logger.debug('URL: %s', json_url)\n r = requests.get(json_url)\n logger.debug('Content: %s', r.text)\n ipro = json.loads(r.text)\n\n # Parse retrieved json file by selected Options\n output = {\"id\": entry[\"dbxref\"]}\n if basics:\n try:\n output.update(accession=ipro[\"metadata\"][\"accession\"], entry_type=ipro[\"metadata\"][\"type\"],\n description=ipro[\"metadata\"][\"description\"], counters=ipro[\"metadata\"][\"counters\"],\n entry_id=ipro[\"metadata\"][\"entry_id\"], name=ipro[\"metadata\"][\"name\"],\n source_database=ipro[\"metadata\"][\"source_database\"])\n except KeyError:\n logger.warning(\"One or more basic information were not available for the given entry. Please check your output.\")\n if hierarchy:\n try:\n output.update(hierarchy=ipro[\"metadata\"][\"hierarchy\"])\n except KeyError:\n logger.warning(\"Hierarchy information was not available for the given entry.\")\n if wikipedia:\n try:\n output.update(wikipedia=ipro[\"metadata\"][\"wikipedia\"])\n except KeyError:\n logger.warning(\"Wikipedia articles were not available for the given entry.\")\n if literature:\n try:\n output.update(literature=ipro[\"metadata\"][\"literature\"])\n except KeyError:\n logger.warning(\"Literature was not available for the given entry.\")\n if cross_references:\n try:\n output.update(cross_references=ipro[\"metadata\"][\"cross_references\"])\n except KeyError:\n logger.warning(\"Cross_references were not available for the given entry.\")\n if overlaps:\n try:\n output.update(overlaps=ipro[\"metadata\"][\"overlaps_with\"])\n except KeyError:\n logger.warning(\"Overlap information was not available for the given entry.\")\n documents.append(output)\n return documents", "def _reference(self):\r\n return {1:2, \"key1\":\"value1\", \"key2\":(1,2,3)}", "def __generate_reference__(self, triple_map, **kwargs):\n raw_value = self.source.get(str(triple_map.reference))\n if raw_value is None or len(raw_value) < 1:\n return\n if hasattr(triple_map, \"datatype\"):\n if triple_map.datatype == NS_MGR.xsd.anyURI.rdflib:\n output = rdflib.URIRef(raw_value)\n else:\n output = rdflib.Literal(\n raw_value,\n datatype=triple_map.datatype)\n else:\n output = rdflib.Literal(raw_value)\n return output", "def test_reference(self):\n dset = self.f.create_dataset('x', (1,), dtype=h5py.ref_dtype)\n dset[0] = self.f.ref\n self.assertEqual(type(dset[0]), h5py.Reference)", "def _process_dbxref(self):\n\n raw = '/'.join((self.rawdir, 'dbxref'))\n logger.info(\"processing dbxrefs\")\n line_counter = 0\n\n with open(raw, 'r') as f:\n filereader = csv.reader(f, delimiter='\\t', quotechar='\\\"')\n f.readline() # read the header row; skip\n for line in filereader:\n (dbxref_id, db_id, accession, version, description, url) = line\n # dbxref_id\tdb_id\taccession\tversion\tdescription\turl\n # 1\t2\tSO:0000000\t\"\"\n\n db_ids = { # the databases to fetch\n 50: 'PMID', # pubmed\n 68: 'RO', # obo-rel\n 71: 'FBdv', # FBdv\n 74: 'FBbt', # FBbt\n # 28:, # genbank\n 30: 'OMIM', # MIM\n # 38, # ncbi\n 75: 'ISBN', # ISBN\n 46: 'PMID', # PUBMED\n 51: 'ISBN', # isbn\n 52: 'SO', # so\n # 76, # http\n 77: 'PMID', # PMID\n 80: 'FBcv', # FBcv\n # 95, # MEDLINE\n 98: 'REACT', # Reactome\n 103: 'CHEBI', # Chebi\n 102: 'MESH', # MeSH\n 106: 'OMIM', # OMIM\n 105: 'KEGG-path', # KEGG pathway\n 107: 'DOI', # doi\n 108: 'CL', # CL\n 114: 'CHEBI', # CHEBI\n 115: 'KEGG', # KEGG\n 116: 'PubChem', # PubChem\n # 120, # MA???\n 3: 'GO', # GO\n 4: 'FlyBase', # FlyBase\n # 126, # URL\n 128: 'PATO', # PATO\n # 131, # IMG\n 2: 'SO', # SO\n 136: 'MESH', # MESH\n 139: 'CARO', # CARO\n 140: 'NCBITaxon', # NCBITaxon\n # 151, # MP ???\n 161: 'DOI', # doi\n 36: 'BDGP', # BDGP\n # 55, # DGRC\n # 54, # DRSC\n # 169, # Transgenic RNAi project???\n 231: 'RO', # RO ???\n 180: 'NCBIGene', # entrezgene\n # 192, # Bloomington stock center\n 197: 'UBERON', # Uberon\n 212: 'ENSEMBL', # Ensembl\n # 129, # GenomeRNAi\n 275: 'PMID', # PubMed\n 286: 'PMID', # pmid\n 264: 'HGNC',\n # 265: 'OMIM', # OMIM_Gene\n 266: 'OMIM', # OMIM_Phenotype\n 300: 'DOID', # DOID\n 302: 'MESH', # MSH\n 347: 'PMID', # Pubmed\n }\n\n if accession.strip() != '' and int(db_id) in db_ids:\n # scrub some identifiers here\n m = re.match(\n r'(doi|SO|GO|FBcv|FBbt_root|FBdv|FBgn|FBdv_root|FlyBase|FBbt):',\n accession)\n if m:\n accession = re.sub(m.group(1)+r'\\:', '', accession)\n elif re.match(\n r'(FlyBase miscellaneous CV|cell_lineprop|relationship type|FBgn$)',\n accession):\n continue\n elif re.match(r'\\:', accession): # starts with a colon\n accession = re.sub(r'\\:', '', accession)\n elif re.search(r'\\s', accession):\n # skip anything with a space\n # logger.debug(\n # 'dbxref %s accession has a space: %s',\n # dbxref_id, accession)\n continue\n\n if re.match(r'http', accession):\n did = accession.strip()\n else:\n prefix = db_ids.get(int(db_id))\n did = ':'.join((prefix, accession.strip()))\n if re.search(r'\\:', accession) and prefix != 'DOI':\n logger.warning(\n 'id %s may be malformed; skipping', did)\n\n self.dbxrefs[dbxref_id] = {db_id: did}\n\n elif url != '':\n self.dbxrefs[dbxref_id] = {db_id: url.strip()}\n else:\n continue\n\n # the following are some special cases that we scrub\n if int(db_id) == 2 \\\n and accession.strip() == 'transgenic_transposon':\n # transgenic_transposable_element\n self.dbxrefs[dbxref_id] = {db_id: 'SO:0000796'}\n\n line_counter += 1\n\n return", "def test_reference_field(self):\n dt = np.dtype([('a', 'i'),('b', h5py.ref_dtype)])\n\n dset = self.f.create_dataset('x', (1,), dtype=dt)\n dset[0] = (42, self.f['/'].ref)\n\n out = dset[0]\n self.assertEqual(type(out[1]), h5py.Reference) # isinstance does NOT work", "def load_references(self, collections, item):", "def make_reference(self):\r\n d = dict([(\"M\", 1000),\r\n (\"CM\", 800),\r\n (\"D\", 500),\r\n (\"CD\", 300),\r\n (\"C\", 100),\r\n (\"XC\", 80),\r\n (\"L\", 50),\r\n (\"XL\", 30),\r\n (\"X\", 10),\r\n (\"IX\", 8),\r\n (\"V\", 5),\r\n (\"IV\", 3),\r\n (\"I\", 1),])\r\n return d", "def __init__(self, issuer):\n self.reference = {\n REF_COUNTRY: {\n REF_NAME: COUNTRY,\n REF_VALUE: '',\n },\n REF_COMMON_NAME: {\n REF_NAME: COMMON_NAME,\n REF_VALUE: ''\n },\n REF_LOCALITY: {\n REF_NAME: LOCALITY,\n REF_VALUE: ''\n },\n REF_STATE_LOCALITY: {\n REF_NAME: STATE_LOCALITY,\n REF_VALUE: ''\n },\n REF_ORGANIZATION: {\n REF_NAME: ORGANIZATION,\n REF_VALUE: ''\n },\n REF_ORGANIZATION_UNIT: {\n REF_NAME: ORGANIZATION_UNIT,\n REF_VALUE: ''\n },\n REF_EMAIL_ADDRESS: {\n REF_NAME: EMAIL_ADDRESS,\n REF_VALUE: ''\n }\n }\n\n issuer_as_text = issuer.__str__()\n issuer_as_text = issuer_as_text.replace('/', '*')\n issuer_as_text = issuer_as_text.split('*')\n issuer_as_text.pop(0)\n\n i = 0\n for data in issuer_as_text:\n master_key = data.split('=')[0]\n secondary_key = REF_NAME\n secondary_value = data.split('=')[1]\n self.reference[master_key][REF_VALUE] = secondary_value\n i += 1", "def references(name, tag):\n\n gen_refs = \"\\n\".join([''.join([\"J. Bartels, The technique of scaling \",\n \"indices K and Q of geomagnetic activity, \",\n \"Ann. Intern. Geophys. Year 4, 215-226, \",\n \"1957.\"]),\n ''.join([\"J. Bartels,The geomagnetic measures for \",\n \"the time-variations of solar corpuscular \",\n \"radiation, described for use in \",\n \"correlation studies in other geophysical \",\n \"fields, Ann. Intern. Geophys. Year 4, \",\n \"227-236, 1957.\"]),\n ''.join([\"P.N. Mayaud, Derivation, Meaning and Use \",\n \"of Geomagnetic Indices, Geophysical \",\n \"Monograph 22, Am. Geophys. Union, \",\n \"Washington D.C., 1980.\"]),\n ''.join([\"G.K. Rangarajan, Indices of magnetic \",\n \"activity, in Geomagnetism, edited by I.A. \",\n \"Jacobs, Academic, San Diego, 1989.\"]),\n ''.join([\"M. Menvielle and A. Berthelier, The \",\n \"K-derived planetary indices: description \",\n \"and availability, Rev. Geophys. 29, 3, \",\n \"415-432, 1991.\"])])\n refs = {'kp': {'': gen_refs, 'forecast': gen_refs, 'recent': gen_refs}}\n\n return refs[name][tag]", "def get_ref_values(sbf):\n\n filename = path.join(REFERENCE_DIR, \"{}.npy\".format(sbf))\n values = np.split(np.load(filename), ANGULAR_POINTS + 1)\n return values[0], values[1:]", "def read_refcopy(f):\n\n d_refcopy = {}\n for n, line in enumerate(open(f)):\n if line.startswith('#'):\n continue\n\n line = line.rstrip()\n if line == '':\n continue\n\n _lis = line.split('\\t')\n taxa, num, = _lis[:2]\n skip = False\n for word in EXCLUDE:\n if word in taxa:\n skip = True\n break\n\n if skip:\n continue \n\n # the parsing of taxa works for both mothur output and this\n taxa = taxa.rstrip(';') # for mothur classfy.seqs output\n lis = taxa.split(';')\n lis2 = []\n for item in lis:\n item = item.strip() # for copyrigher copy table ' ;' separater\n if item.endswith(')'):\n item = item.rsplit('(', 1)[0].strip()\n\n # remove taxon level prefix, e.g. 'p__Firmicutes'\n if '__' in item:\n item = item.split('__', 1)[1]\n\n #item = item.strip('\"')\n\n # green gene taxonomy has sapce\n item = item.replace(' ', '_')\n\n item = item.lower()\n if item in ['', 'unknown', 'other', 'unassigned']:\n item = 'Unclassifed'\n\n item = item.capitalize()\n lis2.append(item)\n\n length = len(lis2)\n assert length <= LEVELS, '> {} levels found ({})'.format(\n LEVELS, length)\n if length != LEVELS:\n lis2 = lis2 + ['Unclassified']*(LEVELS - length)\n\n tu = tuple(lis2)\n d_refcopy[tu] = float(num)\n\n return d_refcopy", "def load_srumid_lookups(database):\n id_lookup = {}\n #Note columns 0 = Type, 1 = Index, 2 = Value\n lookup_table = database.get_table_by_name('SruDbIdMapTable')\n column_lookup = dict([(x.name,index) for index,x in enumerate(lookup_table.columns)]) \n for rec_entry_num in range(lookup_table.number_of_records):\n bin_blob = smart_retrieve(lookup_table,rec_entry_num, column_lookup['IdBlob'])\n if smart_retrieve(lookup_table,rec_entry_num, column_lookup['IdType'])==3:\n bin_blob = BinarySIDtoStringSID(bin_blob)\n elif not bin_blob == \"Empty\":\n bin_blob = blob_to_string(bin_blob)\n id_lookup[smart_retrieve(lookup_table,rec_entry_num, column_lookup['IdIndex'])] = bin_blob\n return id_lookup", "def getReference(self, data: ghidra.program.model.listing.Data, toAddress: ghidra.program.model.address.Address) -> ghidra.program.model.symbol.Reference:\n ...", "def store_wn_lookup():\n syns = list( wn.all_synsets() )\n #syn_str = map(lambda s: str(s).replace(\"Synset\",'').strip('()').strip(\"'\"), syns)\n syn_str = map(lambda s: str(s).replace(\"Synset\",'').strip('()').strip(\"'\").strip('\"'), syns)\n #offsets_list = [(\"n%08d\" % s.offset, s) for s in syns]\n olist = map(lambda a, b: (\"n%08d\" % a.offset, b), syns, syn_str)\n offset_dict = dict(olist)\n pickle.dump(offset_dict, open('/Users/xlx/Documents/proj/imgnet-flickr/db3/wn_offset_dict.pickle', 'wb'))", "def __generate_reference__(self, triple_map, **kwargs):\n pass", "def __generate_reference__(self, triple_map, **kwargs):\n pass", "def get_uid(new_data, metadata, conn):\n for awe in new_data[\"all_wkn_entry\"]:\n for key in [\"wkn\",\"isin\"]:\n if awe[key] == \"\":continue\n if key == \"wkn\":\n s = select([metadata.tables[\"WKN\"]]).where(metadata.tables[\"WKN\"].c.WKN == awe[key]).order_by(\n metadata.tables[\"WKN\"].c.unternehmenId.desc())\n else:\n s = select([metadata.tables[\"WKN\"]]).where(metadata.tables[\"WKN\"].c.ISIN == awe[key]).order_by(\n metadata.tables[\"WKN\"].c.unternehmenId.desc())\n result = conn.execute(s)\n try:\n someid = result.fetchone()[0]\n except:\n new_data['reference'] = new_data['unternehmenId']\n new_data['id'] = \"\"\n return 0\n s = select([metadata.tables['MainRelation']]).where(\n metadata.tables['MainRelation'].c.referenz == someid)\n result = conn.execute(s)\n fone = result.fetchall()\n if len(fone) > 0:\n for row in fone:\n new_data['reference'] = row[0]\n new_data['id'] = row[4]\n return 0\n\n s = select([metadata.tables['MainRelation']]).where(\n metadata.tables['MainRelation'].c.weiteresAuftreten == someid)\n result = conn.execute(s)\n fone = result.fetchall()\n if len(fone) > 0:\n for row in fone:\n new_data['reference'] = row[0]\n new_data['id'] = row[4]\n return 0\n new_data['reference'] = new_data['unternehmenId']\n new_data['id'] = \"\"\n return 0\n #TODO-HINT: Be aware its order is descendent by year to avoid apply new WKN's to old ones which get used in early years!", "def get_actual_ref(self, ref):\n while ref in self.reference_map:\n ref = self.reference_map[ref]\n return ref", "def image_reference(self, image_id):\n info = self.image_info[image_id]\n if info[\"source\"] == \"coco\":\n return \"http://cocodataset.org/#explore?id={}\".format(info[\"id\"])\n else:\n super(CocoDataset, self).image_reference(image_id)", "def read_reference_data():\n return {f:read_local_file(f) for f in os.listdir(DATA_DIR)}", "def populate_crossreference_dictionary(self):\n master_crossreference_dictionary = dict()\n\n # If additional crossreferences need to be used to find interactors, they can be added here.\n # Use the crossreference prefix as the dictionary name.\n # Also add a regex entry to the resolve_identifier function.\n master_crossreference_dictionary['UniProtKB'] = dict()\n master_crossreference_dictionary['ENSEMBL'] = dict()\n master_crossreference_dictionary['NCBI_Gene'] = dict()\n master_crossreference_dictionary['RefSeq'] = dict()\n\n for key in master_crossreference_dictionary:\n self.logger.info('Querying for %s cross references.', key)\n with Neo4jHelper().run_single_parameter_query(self.query_xrefs, key) as result:\n for record in result:\n cross_ref_record = None\n # Modify the cross reference ID to match the PSI MITAB format if necessary.\n # So far, this is just converting 'NCBI_Gene' to 'entrez gene/locuslink'.\n if record['cr.globalCrossRefId'].startswith('NCBI_Gene'):\n cross_ref_record_split = record['cr.globalCrossRefId'].split(':')[1]\n cross_ref_record = 'entrez gene/locuslink:' + cross_ref_record_split\n else:\n cross_ref_record = record['cr.globalCrossRefId']\n\n # The crossreference dictionary is a list of genes\n # linked to a single crossreference.\n # Append the gene if the crossref dict entry exists.\n # Otherwise, create a list and append the entry.\n if cross_ref_record.lower() in master_crossreference_dictionary[key]:\n master_crossreference_dictionary[key][cross_ref_record.lower()].append(record['g.primaryKey'])\n else:\n master_crossreference_dictionary[key][cross_ref_record.lower()] = []\n master_crossreference_dictionary[key][cross_ref_record.lower()].append(record['g.primaryKey'])\n\n # The ids in PSI-MITAB files are lower case, hence the .lower() used above.\n\n return master_crossreference_dictionary", "def add_entrez_ids(lookup: pd.DataFrame) -> pd.DataFrame:\n \n celeg_ens2entrez = get_species_ens_entrez_lookup(CELEGANS_DATASET_NAME)\n celeg_ens2entrez.columns = ['celeg_ensembl_id',\n 'celeg_entrez_id']\n \n droso_ens2entrez = get_species_ens_entrez_lookup(DROSO_DATASET_NAME)\n droso_ens2entrez.columns = ['dmelanogaster_ensembl_id',\n 'dmelanogaster_entrez_id']\n \n lookup_with_entrez = pd.merge(lookup, celeg_ens2entrez,\n left_on=\"Gene stable ID\",\n right_on=\"celeg_ensembl_id\",\n how=\"left\")\n \n lookup_with_entrez = pd.merge(lookup_with_entrez, droso_ens2entrez,\n left_on=\"Drosophila melanogaster gene stable ID\",\n right_on=\"dmelanogaster_ensembl_id\",\n how=\"left\")\n \n lookup_with_entrez.to_csv(LOOKUP_FILENAME, header=True, index=False)\n return lookup_with_entrez", "def get_data(self, reference):\n \n refindex = self.columns[reference]\n refdata = self.data[refindex]\n result = dict()\n for feature, index in self.rows.items():\n result[feature] = refdata[index]\n return result", "def GetValues(self):", "def build_reference(sets_to_reference):\n\n number_to_uuid = {}\n card_reference = {}\n name_to_uuid = {}\n uuid_to_number = {}\n\n print(\"- Building internal Card Reference -\")\n for setName in tqdm(sets_to_reference) :\n # Fix 1 on WIN systems since CON.json is reserved :\n if setName == 'CON':\n setName = 'CON_'\n # End Fix 1\n with open(ROOT_DIR + 'data/sets/' + setName + '.json') as f:\n # Fix 2 on WIN systems since CON.json is reserved :\n if setName == 'CON_':\n setName = 'CON'\n # End Fix 2\n data = json.load(f)\n name_to_uuid[setName]= {} \n number_to_uuid[setName]= {}\n card_reference[setName]= {}\n uuid_to_number[setName]= {}\n for item in data['data']['cards']:\n #print(item)\n number_to_uuid[setName][item['number']] = item['uuid']\n name_to_uuid[setName][item['name']] = item['uuid']\n uuid_to_number[setName][item['uuid']] = item['number']\n foreignName = {}\n for languageData in item['foreignData']:\n if languageData['language'] == 'Spanish' and 'ES' in languages_to_reference:\n name_to_uuid[setName][languageData['name']] = item['uuid']\n language = 'ES'\n foreignName[language] = languageData['name']\n elif languageData['language'] == 'French' and 'FR' in languages_to_reference:\n name_to_uuid[setName][languageData['name']] = item['uuid']\n language = 'FR'\n foreignName[language] = languageData['name']\n elif languageData['language'] == 'German' and 'DE' in languages_to_reference:\n name_to_uuid[setName][languageData['name']] = item['uuid']\n language = 'DE'\n foreignName[language] = languageData['name']\n elif languageData['language'] == 'Italian' and 'IT' in languages_to_reference:\n name_to_uuid[setName][languageData['name']] = item['uuid']\n language = 'IT'\n foreignName[language] = languageData['name']\n elif languageData['language'] == 'Portuguese' and 'PT' in languages_to_reference:\n name_to_uuid[setName][languageData['name']] = item['uuid']\n language = 'PT'\n foreignName[language] = languageData['name']\n elif languageData['language'] == 'Japanese' and 'JP' in languages_to_reference:\n name_to_uuid[setName][languageData['name']] = item['uuid']\n language = 'JP'\n foreignName[language] = languageData['name']\n elif languageData['language'] == 'Korean' and 'KO' in languages_to_reference:\n name_to_uuid[setName][languageData['name']] = item['uuid']\n language = 'KO'\n foreignName[language] = languageData['name']\n elif languageData['language'] == 'Russian' and 'RU' in languages_to_reference:\n name_to_uuid[setName][languageData['name']] = item['uuid']\n language = 'RU'\n foreignName[language] = languageData['name']\n elif languageData['language'] == 'Chinese' and 'ZH' in languages_to_reference:\n name_to_uuid[setName][languageData['name']] = item['uuid']\n language = 'ZH'\n foreignName[language] = languageData['name']\n card_reference[setName][item['uuid']] = {'name' : item['name'],\n 'colorIdentity' : item['colorIdentity'],\n 'convertedManaCost' : item['convertedManaCost'],\n 'legalities' : item['legalities'],\n 'foreignName' : foreignName,\n 'number' : item['number'],\n 'rarity' : item['rarity'],\n 'setCode' : item['setCode'],\n 'subtypes' : item['subtypes'],\n 'supertypes' : item['supertypes'],\n 'types' : item['types'],\n 'uuid' : item['uuid'] }\n try :\n card_reference[setName][item['uuid']]['keywords'] = item['keywords']\n except :\n pass\n try :\n card_reference[setName][item['uuid']]['power'] = item['power']\n except :\n pass\n try :\n card_reference[setName][item['uuid']]['toughness'] = item['toughness']\n except :\n pass \n try :\n card_reference[setName][item['uuid']]['manaCost'] = item['manaCost']\n except :\n pass\n # Token version of the set : setname is preceded by 'T' \n name_to_uuid['T'+setName]= {} \n number_to_uuid['T'+setName]= {}\n card_reference['T'+setName]= {}\n uuid_to_number['T'+setName]= {} \n for item in data['data']['tokens']:\n number_to_uuid['T'+setName][item['number']] = item['uuid']\n name_to_uuid['T'+setName][item['name']] = item['uuid']\n uuid_to_number['T'+setName][item['uuid']] = item['number']\n card_reference['T'+setName][item['uuid']] = {'name' : item['name'],\n 'colorIdentity' : item['colorIdentity'],\n 'convertedManaCost' : 0,\n 'number' : item['number'],\n 'setCode' : item['setCode'],\n 'subtypes' : item['subtypes'],\n 'supertypes' : item['supertypes'],\n 'types' : item['types'],\n 'uuid' : item['uuid'] }\n return (card_reference, name_to_uuid, number_to_uuid, uuid_to_number)", "def refseq_based_clustering(self):\n self.refseq_based = Usefuls.NonRedSet.NonRedSetDict()\n for prey in self.ivv_info.Prey_info().preys():\n refseqid = self.get_refseq(prey)\n if refseqid:\n self.refseq_based.append_Dict(refseqid, prey)", "def referencevalue(self, *args, **kwargs):\n return _coordsys.coordsys_referencevalue(self, *args, **kwargs)", "def download_data(self) -> None: # coverage: ignore\n\n navaids = []\n c = requests.get(f\"{base_url}/earth_fix.dat\")\n\n for line in c.iter_lines():\n\n line = line.decode(encoding=\"ascii\", errors=\"ignore\").strip()\n\n # Skip empty lines or comments\n if len(line) < 3 or line[0] == \"#\":\n continue\n\n # Start with valid 2 digit latitude -45. or 52.\n if not ((line[0] == \"-\" and line[3] == \".\") or line[2] == \".\"):\n continue\n\n # Data line => Process fields of this record, separated by a comma\n # Example line:\n # 30.580372 -094.384169 FAREL\n fields = line.split()\n navaids.append(\n Navaid(\n fields[2],\n \"FIX\",\n float(fields[0]),\n float(fields[1]),\n None,\n None,\n None,\n None,\n )\n )\n\n c = requests.get(f\"{base_url}/earth_nav.dat\")\n\n for line in c.iter_lines():\n\n line = line.decode(encoding=\"ascii\", errors=\"ignore\").strip()\n\n # Skip empty lines or comments\n if len(line) == 0 or line[0] == \"#\":\n continue\n\n # Data line => Process fields of this record, separated by a comma\n # Example lines:\n # 2 58.61466599 125.42666626 451 522 30 0.0 A Aldan NDB\n # 3 31.26894444 -085.72630556 334 11120 40 -3.0 OZR CAIRNS VOR-DME\n # type lat lon elev freq ? var id desc\n # 0 1 2 3 4 5 6 7 8\n\n fields = line.split()\n\n # Valid line starts with integers\n if not fields[0].isdigit():\n continue # Next line\n\n # Get code for type of navaid\n itype = int(fields[0])\n\n # Type names\n wptypedict = {\n 2: \"NDB\",\n 3: \"VOR\",\n 4: \"ILS\",\n 5: \"LOC\",\n 6: \"GS\",\n 7: \"OM\",\n 8: \"MM\",\n 9: \"IM\",\n 12: \"DME\",\n 13: \"TACAN\",\n }\n\n # Type code never larger than 20\n if itype not in list(wptypedict.keys()):\n continue # Next line\n\n wptype = wptypedict[itype]\n\n # Select types to read\n if wptype not in [\"NDB\", \"VOR\", \"ILS\", \"GS\", \"DME\", \"TACAN\"]:\n continue # Next line\n\n # Find description\n try:\n idesc = line.index(fields[7]) + len(fields[7])\n description: Optional[str] = line[idesc:].strip().upper()\n except Exception:\n description = None\n\n navaids.append(\n Navaid(\n fields[7],\n wptype,\n float(fields[1]),\n float(fields[2]),\n float(fields[3][1:])\n if fields[3].startswith(\"0-\")\n else float(fields[3]),\n float(fields[4])\n if wptype == \"NDB\"\n else float(fields[4]) / 100,\n float(fields[6])\n if wptype in [\"VOR\", \"NDB\", \"ILS\", \"GS\"]\n else None,\n description,\n )\n )\n\n self._data = pd.DataFrame.from_records(\n navaids, columns=NavaidTuple._fields\n )\n\n self._data.to_pickle(self.cache_dir / \"traffic_navaid.pkl\")", "def References(self, default=[{}]):\n tmp = self.data.get('metadata', {}).get('references', default)\n return [HEP.ReferenceObject(i) for i in tmp]", "def read_ref_dbase(self, inasdfname, phase='P'):\n indset = pyasdf.ASDFDataSet(inasdfname)\n #--------------------\n # station inventory\n #--------------------\n wavlst = indset.waveforms.list()\n self.inv = indset.waveforms[wavlst[0]].StationXML\n for staid in wavlst[1:]:\n self.inv+= indset.waveforms[staid].StationXML\n self.add_stationxml(self.inv)\n #--------------------\n # ref data\n #--------------------\n for staid in wavlst:\n netcode, stacode = staid.split('.')\n staid_aux = netcode+'_'+stacode+'_'+phase\n if indset.auxiliary_data.RefRHScount[staid_aux].parameters['Nhs'] == 0:\n print 'No harmonic stripping data for '+staid\n continue\n ref_header = {'Nraw': indset.auxiliary_data['RefRHScount'][staid_aux].parameters['Nraw'], \\\n 'Nhs': indset.auxiliary_data['RefRHScount'][staid_aux].parameters['Nhs'], \\\n 'delta': indset.auxiliary_data['RefRHSmodel'][staid_aux]['A0_A1_A2']['A0'].parameters['delta'], \\\n 'npts': indset.auxiliary_data['RefRHSmodel'][staid_aux]['A0_A1_A2']['A0'].parameters['npts']}\n \"\"\"\n 0 - A0 from A0-A1-A2 inversion\n 1 - misfit from raw A0+A1+A2\n 2 - misfit from binned A0+A1+A2\n 3 - weighted misfit from binned A0+A1+A2\n \"\"\"\n data = np.zeros((4, ref_header['npts']))\n data[0, :] = indset.auxiliary_data['RefRHSmodel'][staid_aux]['A0_A1_A2']['A0'].data.value\n data[1, :] = indset.auxiliary_data['RefRHSmodel'][staid_aux]['A0_A1_A2']['mf_A0_A1_A2_obs'].data.value\n data[2, :] = indset.auxiliary_data['RefRHSmodel'][staid_aux]['A0_A1_A2']['mf_A0_A1_A2_bin'].data.value\n data[3, :] = indset.auxiliary_data['RefRHSmodel'][staid_aux]['A0_A1_A2']['wmf_A0_A1_A2_bin'].data.value\n self.add_auxiliary_data(data=data, data_type='RefR', path=staid_aux, parameters=ref_header)\n return", "def values(self):\r\n my_values = []\r\n for sleek_ref in self.data.values():\r\n try:\r\n my_values.append(sleek_ref())\r\n except SleekRefDied:\r\n pass\r\n return my_values", "def _get_reference(self):\n super()._get_reference()\n\n # Additional object references from this env\n self.cube_body_id = self.sim.model.body_name2id(\"pot\")\n self.handle_1_site_id = self.sim.model.site_name2id(\"pot_handle_1\")\n self.handle_0_site_id = self.sim.model.site_name2id(\"pot_handle_2\")\n self.table_top_id = self.sim.model.site_name2id(\"table_top\")\n self.pot_center_id = self.sim.model.site_name2id(\"pot_center\")", "def recs_to_lookup(filename):\n d = {\"\": \"\"}\n for flds in nndb_recs(filename, [\"key\", \"val\"]):\n d[flds[\"key\"]] = flds[\"val\"]\n return d", "def find_references(doi):\n if doi is None:\n return None\n\n references = []\n if doi:\n response = requests.get(f\"https://opencitations.net/index/api/v1/references/{doi}\").json()\n if response:\n references = [{\"doi\": r['cited'].replace(\"coci =>\", \"\")} for r in response]\n\n if references:\n return references\n else:\n return None", "def getXRefsFrom(self):\r\n # type: () -> (list[int], list[int])\r\n crefs = []\r\n drefs = []\r\n\r\n\r\n # normalFlow = True\r\n # for ref in idautils.CodeRefsFrom(self.func_ea, normalFlow): # XrefsFrom\r\n # crefs.append(ref)\r\n # for ref in idautils.CodeRefsFrom(self.func_ea, not normalFlow): # XrefsFrom\r\n # crefs.append(ref)\r\n # for ref in idautils.CodeRefsFrom(self.func_ea-1, normalFlow): # XrefsFrom\r\n # crefs.append(ref)\r\n # for ref in idautils.CodeRefsFrom(self.func_ea-1, not normalFlow): # XrefsFrom\r\n # crefs.append(ref)\r\n\r\n # needed to identify pool variables. drefs accessing the pool may access pointers\r\n # in the pool. the pointers should be retrieved instead\r\n size_pool = self.getSize(withPool=True)\r\n # for each instruction\r\n for i in idautils.FuncItems(self.func_ea):\r\n for xref in idautils.XrefsFrom(i, 0):\r\n # if the xref is to a far or near called function\r\n if xref.type == idc.fl_CN or xref.type == idc.fl_CF:\r\n if xref.to not in crefs:\r\n crefs.append(xref.to)\r\n # if the xref is to a read or write data access\r\n if xref.type == idc.dr_W or xref.type == idc.dr_R:\r\n if xref.to not in drefs:\r\n # if xref.to is in the pool, then retrieve content if it's a pointer\r\n if xref.to < self.func_ea + size_pool:\r\n # those are the references found at the pool location\r\n iteratedOnce = False\r\n for poolRef in idautils.XrefsFrom(xref.to, 0):\r\n if iteratedOnce:\r\n raise(FunctionException(\"%08X: there should only be one data xref in pool variable\"\r\n % (self.func_ea)))\r\n # there should only be one in the pool refernce\r\n if poolRef.to not in drefs:\r\n drefs.append(poolRef.to)\r\n iteratedOnce = True\r\n else:\r\n drefs.append(xref.to)\r\n\r\n # for ref in idautils.DataRefsFrom(self.func_ea):\r\n # drefs.append(ref)\r\n # for ref in idautils.DataRefsFrom(self.func_ea - 1):\r\n # drefs.append(ref)\r\n return crefs, drefs", "def get_doc_prov(j, gcis_url, refList):\n gcis_ns = \"https://gcis-search-stage.jpl.net:3000/gcis.owl#\"\n doc = ProvEsDocument()\n bndl = None\n \n#to get people attributed to, you need to grab article -> jornal_identifier -> look up in references\n# for ref in refList:\n# if ref['child_publication'] == j['uri']:\n \n\n\n doc_attrs = [\n (\"prov:type\", 'gcis:Article'),\n (\"prov:label\", j['title']),\n (\"prov:location\", j['uri']),\n #(\"prov:wasAttributedTo\", j['']),\n ]\n doc.entity('bibo:%s' % j['identifier'], doc_attrs)\n\n prov_json = json.loads(doc.serialize())\n\n return prov_json", "def ref_values(x, y):\n check_evaluation_points(x, y)\n values = np.empty((21,x.shape[0]))\n _ap.ap_ref_values(x, y, x.shape[0], values)\n return values", "def build_messy_lookup(source,dest,ref_col):\n la = QuickGrid().open(source)\n od = QuickGrid().open(join(\"source_files\",\"local_authority_data_names.csv\"))\n\n lookup = QuickGrid()\n lookup.header = [\"la name\",ref_col]\n\n possible = [\"official-name\",\"alt-name-1\",\"alt-name-2\",\"alt-name-3\"]\n possible = [p for p in possible if p in la.header]\n for r in la:\n for p in possible:\n if r[p]:\n lookup.add([r[p],r[ref_col]])\n \n current_names = [x[0] for x in lookup]\n\n for r in od:\n if r[\"name\"] not in current_names:\n code = r[\"local-authority\"].split(\":\")[1]\n lookup.add([r[\"name\"],code])\n \n lookup.save(dest,force_unicode=True)", "def getReferenceDetails(soup):\n refDict = {}\n refs = soup.find_all('edmx:reference')\n for ref in refs:\n includes = ref.find_all('edmx:include')\n for item in includes:\n if item.get('namespace') is None or ref.get('uri') is None:\n rsvLogger.error(\"Reference incorrect for: \", item)\n continue\n if item.get('alias') is not None:\n refDict[item['alias']] = (item['namespace'], ref['uri'])\n else:\n refDict[item['namespace']] = (item['namespace'], ref['uri'])\n refDict[item['namespace'].split('.')[0]] = (item['namespace'], ref['uri'])\n return refDict", "def dbxref_mapper(self, data: pd.DataFrame, primary_key: str, code_type: str) -> pd.DataFrame:\n\n col_lab = code_type.upper() + '_DBXREF_ONT_' # column labels\n ont_labels = merge_dictionaries(self.ont_dict, 'label', reverse=True)\n\n # convert ontology dictionary to Pandas DataFrame\n ont_df = pd.concat([pd.DataFrame(self.ont_dict[ont]['dbxref'].items(), columns=['CODE', col_lab + 'URI'])\n for ont in self.ont_dict.keys() if len(self.ont_dict[ont]['dbxref']) > 0])\n # normalize source_code prefix values\n ont_df['CODE'] = normalizes_source_codes(ont_df['CODE'].to_frame(), self.source_code_map)\n # merge ontology data and clinical data and run ohdsi ananke approach to specifically pull umls ont mappings\n if self.umls_cui_data is not None:\n dbxrefs = pd.concat(\n [data.merge(ont_df, how='inner', on='CODE').drop_duplicates(),\n ohdsi_ananke(primary_key, list(self.ont_dict.keys()), ont_df.copy(), data, self.umls_cui_data.copy())]\n )\n else:\n dbxrefs = data.merge(ont_df, how='inner', on='CODE').drop_duplicates()\n\n # update content and labels\n dbxrefs[col_lab + 'TYPE'] = dbxrefs[col_lab + 'URI'].apply(lambda x: x.split('/')[-1].split('_')[0])\n dbxrefs[col_lab + 'LABEL'] = dbxrefs[col_lab + 'URI'].apply(lambda x: ont_labels[x])\n # update evidence formatting --> EX: CONCEPTS_DBXREF_UMLS:C0008533\n dbxrefs[col_lab + 'EVIDENCE'] = dbxrefs['CODE'].apply(lambda x: col_lab[0:-4] + x)\n # drop unneeded columns\n dbxrefs = dbxrefs[[primary_key] + [x for x in list(dbxrefs.columns) if x.startswith(col_lab[0:-4])]]\n\n return dbxrefs.drop_duplicates()", "def fetch_data(self):", "def __get_references(self):\n named_references = []\n for usage in self.xml_cache.get_xml_tree(\"usagemodel\"):\n variable_usages = usage.findall(\".//namedReference__VariableUsage\")\n for name in variable_usages:\n named_references.append(name.get(\"referenceName\"))\n return named_references", "def load_absolute_case_numbers():\n client = MongoClient(f'mongodb://{os.getenv(\"USR_\")}:{os.getenv(\"PWD_\")}@{os.getenv(\"REMOTE_HOST\")}:{os.getenv(\"REMOTE_PORT\")}/{os.getenv(\"AUTH_DB\")}')\n db = client[os.getenv(\"MAIN_DB\")]\n rki_collection = db[\"rkidata\"]\n data = pd.DataFrame(list(rki_collection.find()))\n data[\"IdLandkreis\"] = pd.to_numeric(data[\"IdLandkreis\"])\n return data", "def compute_species_from_citation_linking(citelist: list) -> None:\n # for i, cite in enumerate(tqdm(citelist)):\n unrecorded_xrefs = []\n recorded_refs = set()\n for i, cite in enumerate(citelist):\n recorded_refs.add(cite.cite_key)\n if cite.actual == \"=\":\n cname = \"\"\n crossnames = collections.Counter()\n for j in range(i): # only look at entries up to the current one\n tmp = citelist[j]\n if (tmp.cite_key == cite.application) and match_num_ref(tmp.name_key, cite.cite_n):\n cname = tmp.name\n crossnames.update([tmp.actual])\n if len(crossnames) == 0:\n unrecorded_xrefs.append([cite.cite_key, cite.application, cite.name_key, cite.cite_n])\n elif len(crossnames) == 1:\n cite.actual = list(crossnames.keys())[0]\n else:\n # find name(s) with largest count\n mcnt = max(crossnames.values())\n keylist = []\n for key in crossnames:\n if crossnames[key] == mcnt:\n keylist.append(key)\n\n if len(keylist) == 1:\n cite.actual = keylist[0]\n else:\n cite_name = cite.name.lower()\n while cite_name.find(\" \") > -1:\n cite_name = cite_name[cite_name.find(\" \")+1:]\n while cname.find(\" \") > -1:\n cname = cname[cname.find(\" \")+1:]\n\n if cite_name in keylist:\n cite.actual = cite_name\n elif cname in keylist:\n cite.actual = cname\n else:\n cite.actual = keylist[0]\n\n if cite.name_note == \".\":\n cite.name_note = \"in part\"\n else:\n cite.name_note = \"in part; \" + cite.name_note\n\n for x in unrecorded_xrefs:\n if x[1] in recorded_refs:\n report_error(\"Reference {} ({}) does not appear until after citation from {} ({})\".format(x[1], x[3],\n x[0], x[2]))", "def _get_uri_reference(self):\n ref_name, ref_val = next(iter(self._choose_reference().items()))\n if ref_name == 'sha1':\n return 'sha1/%s' % ref_val\n else:\n return 'ref/%s' % ref_val", "def get_doc_prov(j, gcis_url, refList, orgList):\n doc = ProvEsDocument()\n \n org = requests.get(j['href']).json()\n \n doc_attrs = [\n (\"prov:type\", 'gcis:organization'),\n (\"prov:label\", j['name']),\n (\"prov:location\", \"%s%s\"%(gcis_url, j['uri'])),\n (\"gcis:organization_type_identifier\", j['organization_type_identifier']),\n (\"gcis:country_code\", j['country_code']),\n ]\n orgID = 'bibo:%s' % j['identifier']\n doc.agent(orgID, doc_attrs)\n\n for child in org['children']:\n cOrgURI = child['organization']\n rel = child['relationship']\n\n cOrg = next(o for o in orgList if o['uri'] == cOrgURI)\n cOrgID = 'bibo:%s'%cOrg['identifier']\n\n #cOrgAttrs = [\n # (\"prov:type\", 'gcis:organization'),\n # (\"prov:label\", cOrg['name']),\n # (\"prov:location\", cOrg['uri']),\n # (\"gcis:organization_type_identifier\", cOrg['organization_type_identifier']),\n # (\"gcis:country_code\", cOrg['country_code']),\n # ]\n #doc.entity(cOrgID, cOrgAttrs)\n #doc.hadMember(orgID, cOrgID)\n #for parent in org['parents']:\n # pOrgURI = parent['organization']\n # rel = parent['relationship']\n # pOrg = next(o for o in orgList if o['uri'] == pOrgURI)\n # pOrgID = 'bibo:%s'%pOrg['identifier']\n # doc.hadMember(pOrgID, orgID)\n\n prov_json = json.loads(doc.serialize())\n\n return prov_json", "def refseq_based_clustering(self):\n self.refseq_based = NonRedSetDict()\n for prey in self.ivv_info.Prey_info().preys():\n refseqid = self.get_refseq(prey)\n if refseqid:\n self.refseq_based.append_Dict(refseqid, prey)", "def get_data(link):\n data = re.get(link)\n jsondata = data.json()\n for weatherstation in jsondata['weatherStations']:\n FetchandStore.sensordict.update({weatherstation[\"id\"]:weatherstation[\"sensorValues\"]})\n for sensorvalue in weatherstation[\"sensorValues\"]:\n FetchandStore.sensors.append({\"id\": sensorvalue[\"roadStationId\"], \"name\": sensorvalue[\"oldName\"],\n \"value\": sensorvalue[\"sensorValue\"], \"unit\": sensorvalue[\"sensorUnit\"],\n \"datetime\": sensorvalue[\"measuredTime\"]})\n return FetchandStore.sensors", "def RefsPage(request):\n sources = models.ValueSource.objects.all()\n sorted_sources = sorted(sources, key=lambda s: s.citation)\n template_data = {\"sources\": sorted_sources}\n return render_to_response('data_refs.html', template_data)", "def _get_sup_relations(user):\n global sup_relations, nk_stat\n rel_support_datasets = os.listdir(\"temp/relation_support_datasets\") #gets a list of the relation support datasets\n rel_support_datasets = sorted([i for i in rel_support_datasets if '.csv' in i and user.username in i])\n sup_relations = []\n nk_stat = []\n for f in rel_support_datasets:\n df = pd.read_csv(\"temp/relation_support_datasets/\"+f, engine='python')\n sup_relations.append(\", \".join(list(df['reldescription'].unique())))\n N = df['reldescription'].unique().shape[0]\n K = df[df['reldescription'] == df['reldescription'].loc[0]].shape[0]\n \n nk_stat.append(\"{}-way {}-shot\".format(N, K))", "def fetch(url, header_path, id, ip, dbase, targets_table):\n # url = 'http://esimbad/testGSAV7/reslabo?FENID=resLaboPatDitep&NIP={}' \\\n # '&STARTDATE={}&ENDDATE={}'\n\n # header_path = '~/workspace/data/biology/header.csv'\n # constant names specific to our database\n KEY1 = 'id'\n KEY2 = 'NIP'\n C1J1 = 'C1J1'\n\n header = pd.read_csv(header_path, sep=';', encoding='latin1').columns\n\n\n engine = get_engine(id, ip, dbase)\n\n df_ids = sql2df(engine, targets_table)[[KEY1, 'nip', C1J1]]\n df_ids.rename({'nip': KEY2}, inplace=True, axis=1)\n df_ids['patient_id'] = df_ids[KEY1]\n\n cols = [KEY2, 'Analyse', 'Resultat', 'Date prelvt']\n df_res = pd.DataFrame(data=None, columns=cols)\n\n for index, row in df_ids.iterrows():\n nip = row[KEY2].replace(' ', '')\n patient_id = row['patient_id']\n c1j1_date = row[C1J1].date()\n start_date = c1j1_date - timedelta(weeks=8)\n\n c1j1 = str(c1j1_date).replace('-', '')\n start = str(start_date).replace('-', '')\n\n req = requests.get(url.format(nip, start, c1j1))\n values = BeautifulSoup(req.content, 'html.parser').body.text\n\n new_df = pd.read_csv(StringIO(values), sep=';', header=None,\n index_col=False, names=header)\n new_df = new_df.loc[:, cols + ['LC']] # remove LC\n\n # normalize nip\n new_df[KEY2] = row[KEY2]\n # new_df[KEY2] = new_df[KEY2].map(str)\n # new_df[KEY2] = [nip[:4] + '-' + nip[4:] for nip in new_df[KEY2]]\n\n new_df.drop('LC', axis=1, inplace=True)\n\n df_res = pd.concat([df_res, new_df], axis=0,\n sort=False, ignore_index=True)\n\n return df_res", "def references(md5):\n u = Upload.objects.filter(md5=md5).first()\n if not u:\n abort(404)\n # first, is this searchable?\n is_searchable = False\n count = elastic.count('page', filter={'md5': md5})\n if count > 0:\n is_searchable = True\n #annotations = Reference.objects.filter(upload=u, ref_url__exists=True)\n annotations = Reference.objects.filter(upload=u).order_by('ref_pos')\n # create a list of referenced things\n references = {'references':[], 'searchable': is_searchable}\n for a in annotations:\n try:\n references['references'].append({\n 'pos_x': a.pos_x, \n 'pos': a.pos, \n 'ref': a.ref_upload.md5, \n 'ref_pos': a.ref_pos\n })\n except:\n pass\n return jsonify(references)", "def _ref_dc(self):\n val_ref = self.meta[globals._ref_ds_attr]\n ref_dc = parse(globals._ds_short_name_attr, val_ref)[0]\n return ref_dc", "def Values(self) -> _n_1_t_4:", "def _get_references_data(wit_path):\n\n with open(os.path.join(wit_path, '.wit', 'references.txt'), 'r') as data:\n info = {'None': 'None'}\n info.update({'HEAD': data.readline().split('=')[-1].strip('\\n')})\n info.update({'master': data.readline().split('=')[-1].strip('\\n')})\n for row in data.readlines():\n name, commit_id = row.split('=')\n info.update({name.strip('\\n'): commit_id.strip('\\n')})\n\n return info", "def recompute_xref():\n\n import cross_references\n cross_references.r.recompute_xref()", "def __init__(self,lookup_data_file=None,lookup_data_fp=None,\n cols=(0,3), major_delimiter='\\t',minor_delimiter=',',\n null_ids=('NA')):\n # Initialise\n self.__lookup = {}\n self.__reverse_lookup = {}\n # Open file and read in data\n if lookup_data_fp is None:\n fp = io.open(lookup_data_file,'rt')\n else:\n fp = lookup_data_fp\n for line in fp:\n # Split into columns on major delimiter\n data = line.strip().split(major_delimiter)\n # Get the data\n key = data[cols[0]]\n # Split again on minor delimiter\n values = []\n if minor_delimiter:\n for item in data[cols[1]].strip().split(minor_delimiter):\n values.append(item)\n else:\n values.append(data[cols[1]])\n for value in values:\n # Check for 'null' values\n if value in null_ids:\n continue\n # Store the data\n try:\n self.__lookup[key].append(value)\n except KeyError:\n self.__lookup[key] = [value]\n try:\n self.__reverse_lookup[value].append(key)\n except KeyError:\n self.__reverse_lookup[value] = [key]\n # Finished - close the file\n if lookup_data_fp is None:\n fp.close()", "def load_rentedout():", "def references(self):\n return self.header('References', '').split()", "def test_scalar(self):\n dset = self.f.create_dataset('x', (), dtype=h5py.ref_dtype)\n dset[()] = self.f.ref\n self.assertEqual(type(dset[()]), h5py.Reference)", "def load_nist(ion):\n import glob\n # Find file\n srch_file = os.path.join(data.Paths.nist, f'{ion}_vacuum.ascii')\n nist_file = glob.glob(srch_file)\n if len(nist_file) == 0:\n raise IOError(f\"Cannot find NIST file {srch_file}\")\n # Read\n nist_tbl = Table.read(nist_file[0], format='ascii.fixed_width')\n gdrow = nist_tbl['Observed'] > 0. # Eliminate dummy lines\n nist_tbl = nist_tbl[gdrow]\n # Now unique values only (no duplicates)\n uniq, indices = np.unique(nist_tbl['Observed'],return_index=True)\n nist_tbl = nist_tbl[indices]\n # Deal with Rel\n agdrel = []\n for row in nist_tbl:\n try:\n gdrel = int(row['Rel.'])\n except:\n try:\n gdrel = int(row['Rel.'][:-1])\n except:\n gdrel = 0\n agdrel.append(gdrel)\n agdrel = np.array(agdrel)\n # Remove and add\n nist_tbl.remove_column('Rel.')\n nist_tbl.remove_column('Ritz')\n nist_tbl['RelInt'] = agdrel\n #nist_tbl.add_column(Column([ion]*len(nist_tbl), name='Ion', dtype='S5'))\n nist_tbl.add_column(Column([ion]*len(nist_tbl), name='Ion', dtype='U5'))\n nist_tbl.rename_column('Observed','wave')\n # Return\n return nist_tbl", "def load_values(self):\n # TODO: Add self.prefix and extension\n NetworkTables.loadEntries(self.file.get_filename(), prefix='/vision/' + self.name + '_')", "def update_refseq_info(ensembl_info, word, value):\n if \"refseq\" in word:\n if \"mrna\" in word:\n if \"predicted\" in word:\n ensembl_info[\"refseq_mrna_predicted\"] = value\n else:\n ensembl_info[\"refseq_mrna\"] = value\n\n if \"ncrna\" in word:\n ensembl_info[\"refseq_ncrna\"] = value\n return ensembl_info", "def main2():\n\t\n\tcu_locations = cu_locations_data();\n\t\n\tfor row in cu_locations:\n\t\tprint \"INSERT INTO contact ('ref_id') VALUES (%s);\" % ( row['location_id'] );", "def mk_dsinfo(self, val):\n dsinfo = {}\n atags = self.ds_atags()\n # dsinfo['description'] = ''\n dsinfo['dimensions'] = {}\n dsinfo['dimdef'] = {}\n # dsinfo['ref'] = ''\n dsinfo['dtype'] = '' # type actually present in val, e.g. 'int32'\n dsinfo['data_type'] = '' # type specified in definition, e.g. int, float, number, text\n dsinfo['shape'] = '' # shape of array or string 'scalar'\n # dsinfo['unit'] = ''\n # dsinfo['semantic_type'] = '' \n dsinfo['atags'] = {}\n df = self.sdef['df']\n # save all referenced atags\n for tag in atags:\n if tag in df and tag != 'description': # don't save descriptions by default\n dsinfo['atags'][atags[tag]['atname']] = {\n 'data_type': atags[tag]['data_type'],\n 'description': atags[tag]['description'],\n 'value': df[tag],} \n if self.link_info:\n # setting this dataset to another dataset by a link\n # get previously saved info about dataset linking to\n # import pdb; pdb.set_trace()\n if 'node' in self.link_info:\n # linking to node in current file\n node = self.link_info['node']\n dsinfo['shape'] = node.dsinfo['shape']\n dsinfo['dtype'] = node.dsinfo['dtype']\n elif 'extlink' in self.link_info:\n # linking to external file. Cannot do validation of datatype\n # leave dsinfo['shape'] and dsinfo['dtype'] empty to indicate both are unknown\n pass\n else:\n raise SystemError(\"** Error: invalid key in link_info %s\" % self.link_info)\n else:\n dsinfo['dtype'], dsinfo['shape'] = self.get_dtype_and_shape(val)\n if 'dimensions' in df.keys():\n dsinfo['dimensions'] = df['dimensions']\n if dsinfo['shape'] == 'scalar':\n print (\"** Warning, expecting array value because dimensions defined\"\n \" for dataset, but value assigned is scalar with type '%s'.\"\n \" Dimensions are:\" % dsinfo['dtype'])\n # pp.pprint(df['dimensions'])\n # print('Scalar value is:')\n # pp.pprint(val)\n # traceback.print_stack()\n # sys.exit(1)\n else: \n\t\t\t\tif dsinfo['shape'] and len(dsinfo['dimensions']) != len(dsinfo['shape']):\n\t\t\t\t\tprint (\"** Warning, %i dimensions defined in data set, but number of\"\n\t\t\t\t\t\t\" dimensions in value assigned is %i. Shape is:\") % (\n\t\t\t\t\t\tlen(dsinfo['dimensions']), len(dsinfo['shape']))\n\t\t\t\t\tpp.pprint(dsinfo['shape']);\n\t\t\t\t\t# print \"if dimensions are Nx1 and using MatLab, consider transpose (') to make 1xN\"; \n\t\t\t\t\t# traceback.print_stack()\n\t\t\t\t\t# sys.exit(1)\n\t\t\t\telse: \n\t\t\t\t\t# check for any dimensions defined in dataset\n\t\t\t\t\ti = 0\n\t\t\t\t\tfor dim in dsinfo['dimensions']:\n\t\t\t\t\t\tif dim.endswith('^'):\n\t\t\t\t\t\t\tscope = 'global'\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tscope = 'local'\n\t\t\t\t\t\tdsinfo['dimdef'][dim] = {'scope':scope, 'len': dsinfo['shape'][i]}\n\t\t\t\t\t\tif dim in df.keys():\n\t\t\t\t\t\t\tdsinfo['dimdef'][dim].update(df[dim])\n\t\t\t\t\t\ti = i + 1\n if 'attributes' in df.keys():\n pass # do nothing here, attributes moved to self.attributes \n if 'data_type' in df.keys():\n dsinfo['data_type'] = df['data_type']\n else:\n if not df:\n # nothing specified for dataset definition. Must be custom dataset\n # (being created by \"set_custom_dataset\"). Do no validation\n return dsinfo\n print \"** Error: 'data_type' not specified in dataset definition\"\n print \"definition is:\"\n pp.pprint(df)\n traceback.print_stack()\n sys.exit(1)\n # Now, some simple validation\n if dsinfo['dtype'] and not valid_dtype(dsinfo['data_type'], dsinfo['dtype']):\n raise ValueError((\"** Error, expecting type '%s' assinged to dataset, but\"\n \" value being stored is type '%s'\") % (dsinfo['data_type'], dsinfo['dtype'])) \n # make sure everything defined in dataset definition is valid\n for key in df.keys():\n if (key in ('dimensions', 'data_type', 'attributes') or\n key in atags or key in dsinfo['dimensions']):\n continue\n print \"** Error, invalid key (%s) in dataset definition\" % key\n print \"dataset definition is:\"\n pp.pprint(df)\n traceback.print_stack()\n sys.exit(1) \n return dsinfo", "def __init__(self, referent=None, address=0):\n self._referent = referent #value to store\n self._address = address #address to store at", "def _getRef(self):\n for param in self.context.fixup:\n if param.name == \"ref\":\n return eval(param.defaultValue)", "def get_data(self):", "def __local_rn(soup):\n return __get_local_g1_news(soup)", "def __local_rn(soup):\n return __get_local_g1_news(soup)", "def connect_refs_to_species(species: list, citelist: list) -> dict:\n # create a dictionary with empty reference lists\n species_refs = {s.species: set() for s in species}\n # go through all citations\n # for c in tqdm(citelist):\n for c in citelist:\n if c.actual in species_refs:\n reflist = species_refs[c.actual]\n reflist |= {c.cite_key}\n return species_refs", "def Reference(self, default={}):\n return HEP.ReferenceHEPObject(self.data.get('reference', default))", "def getUniChemData(self, inchiKeyList):\n mapD = {\n 1: {\"name\": \"chembl\", \"baseUrl\": \"https://www.ebi.ac.uk/chembl/\", \"entryUrl\": \"https://www.ebi.ac.uk/chembldb/compound/inspect/\"},\n 3: {\"name\": \"pdb\", \"baseUrl\": \"http://www.ebi.ac.uk/pdbe/\", \"entryUrl\": \"http://www.ebi.ac.uk/pdbe-srv/pdbechem/chemicalCompound/show/\"},\n 2: {\"name\": \"drugbank\", \"baseUrl\": \"http://drugbank.ca/\", \"entryUrl\": \"http://www.drugbank.ca/drugs/\"},\n 5: {\"name\": \"pubchem_dotf\", \"baseUrl\": \"http://pubchem.ncbi.nlm.nih.gov/sources/sources.cgi\", \"entryUrl\": \"http://pubchem.ncbi.nlm.nih.gov/substance/\"},\n 4: {\"name\": \"gtopdb\", \"baseUrl\": \"http://www.guidetopharmacology.org\", \"entryUrl\": \"http://www.guidetopharmacology.org/GRAC/LigandDisplayForward?ligandId=\"},\n 11: {\"name\": \"ibm\", \"baseUrl\": \"http://www-935.ibm.com/services/us/gbs/bao/siip/nih/\", \"entryUrl\": \"http://www-935.ibm.com/services/us/gbs/bao/siip/nih/?sid=\"},\n 6: {\"name\": \"kegg_ligand\", \"baseUrl\": \"http://www.genome.jp/kegg/ligand.html\", \"entryUrl\": \"http://www.genome.jp/dbget-bin/www_bget?\"},\n 9: {\"name\": \"zinc\", \"baseUrl\": \"http://zinc15.docking.org\", \"entryUrl\": \"http://zinc15.docking.org/substances/\"},\n 8: {\"name\": \"nih_ncc\", \"baseUrl\": \"http://nihsmr.evotec.com/evotec/\", \"entryUrl\": \"\"},\n 10: {\"name\": \"emolecules\", \"baseUrl\": \"https://www.emolecules.com/\", \"entryUrl\": \"https://www.emolecules.com/cgi-bin/more?vid=\"},\n 12: {\"name\": \"atlas\", \"baseUrl\": \"http://www.ebi.ac.uk/gxa/home\", \"entryUrl\": \"http://www.ebi.ac.uk/gxa/query?conditionQuery=\"},\n 7: {\"name\": \"chebi\", \"baseUrl\": \"http://www.ebi.ac.uk/chebi/downloadsForward.do\", \"entryUrl\": \"http://www.ebi.ac.uk/chebi/searchId.do?chebiId=CHEBI%3A\"},\n 14: {\n \"name\": \"fdasrs\",\n \"baseUrl\": \"http://fdasis.nlm.nih.gov/srs/srs.jsp\",\n \"entryUrl\": \"http://fdasis.nlm.nih.gov/srs/ProxyServlet?mergeData=true&objectHandle=DBMaint&APPLICATION_NAME=fdasrs&actionHandle=default&nextPage=jsp/srs/ResultScreen.jsp&TXTSUPERLISTID=\",\n },\n 15: {\"name\": \"surechembl\", \"baseUrl\": \"https://www.surechembl.org/search/\", \"entryUrl\": \"https://www.surechembl.org/chemical/\"},\n 21: {\"name\": \"pubchem_tpharma\", \"baseUrl\": \"http://www.thomson-pharma.com/\", \"entryUrl\": \"http://pubchem.ncbi.nlm.nih.gov/substance/\"},\n 22: {\"name\": \"pubchem\", \"baseUrl\": \"http://pubchem.ncbi.nlm.nih.gov\", \"entryUrl\": \"http://pubchem.ncbi.nlm.nih.gov/compound/\"},\n 27: {\"name\": \"recon\", \"baseUrl\": \"https://vmh.uni.lu\", \"entryUrl\": \"https://vmh.uni.lu/\"},\n 28: {\"name\": \"molport\", \"baseUrl\": \"https://www.molport.com/shop/index\", \"entryUrl\": \"https://www.molport.com/shop/molecule-link/\"},\n 31: {\n \"name\": \"bindingdb\",\n \"baseUrl\": \"https://www.bindingdb.org/bind/index.jsp\",\n \"entryUrl\": \"http://www.bindingdb.org/bind/chemsearch/marvin/MolStructure.jsp?monomerid=\",\n },\n 41: {\"name\": \"swisslipids\", \"baseUrl\": \"http://www.swisslipids.org/\", \"entryUrl\": \"http://www.swisslipids.org/\"},\n 29: {\"name\": \"nikkaji\", \"baseUrl\": \"http://jglobal.jst.go.jp/en/\", \"entryUrl\": \"http://jglobal.jst.go.jp/en/redirect?Nikkaji_No=\"},\n 32: {\"name\": \"comptox\", \"baseUrl\": \"https://comptox.epa.gov/dashboard/\", \"entryUrl\": \"https://comptox.epa.gov/dashboard/\"},\n 33: {\"name\": \"lipidmaps\", \"baseUrl\": \"http://www.lipidmaps.org\", \"entryUrl\": \"http://www.lipidmaps.org/data/LMSDRecord.php?LMID=\"},\n 35: {\"name\": \"carotenoiddb\", \"baseUrl\": \"http://carotenoiddb.jp/index.html\", \"entryUrl\": \"http://carotenoiddb.jp/Entries/\"},\n 36: {\"name\": \"metabolights\", \"baseUrl\": \"http://www.ebi.ac.uk/metabolights/\", \"entryUrl\": \"http://www.ebi.ac.uk/metabolights/\"},\n 37: {\"name\": \"brenda\", \"baseUrl\": \"https://www.brenda-enzymes.org/index.php\", \"entryUrl\": \"https://www.brenda-enzymes.org/ligand.php?brenda_ligand_id=\"},\n 17: {\"name\": \"pharmgkb\", \"baseUrl\": \"https://www.pharmgkb.org\", \"entryUrl\": \"https://www.pharmgkb.org/drug/\"},\n 18: {\"name\": \"hmdb\", \"baseUrl\": \"http://www.hmdb.ca\", \"entryUrl\": \"http://www.hmdb.ca/metabolites/\"},\n 24: {\n \"name\": \"nmrshiftdb2\",\n \"baseUrl\": \"http://nmrshiftdb.nmr.uni-koeln.de/portal/media-type/html/user/anon/page/default.psml/js_pane/P-Home\",\n \"entryUrl\": \"http://nmrshiftdb.org/molecule/\",\n },\n 25: {\"name\": \"lincs\", \"baseUrl\": \"http://www.lincsproject.org/\", \"entryUrl\": \"http://identifiers.org/lincs.smallmolecule/\"},\n 39: {\"name\": \"chemicalbook\", \"baseUrl\": \"https://www.chemicalbook.com\", \"entryUrl\": \"https://www.chemicalbook.com/ChemicalProductProperty_EN_\"},\n 20: {\"name\": \"selleck\", \"baseUrl\": \"http://www.selleckchem.com\", \"entryUrl\": \"http://www.selleckchem.com/products/\"},\n 23: {\"name\": \"mcule\", \"baseUrl\": \"https://mcule.com\", \"entryUrl\": \"https://mcule.com/\"},\n 26: {\"name\": \"actor\", \"baseUrl\": \"https://actor.epa.gov\", \"entryUrl\": \"http://actor.epa.gov/actor/chemical.xhtml?casrn=\"},\n 34: {\"name\": \"drugcentral\", \"baseUrl\": \"http://drugcentral.org\", \"entryUrl\": \"http://drugcentral.org/drugcard/\"},\n 38: {\"name\": \"rhea\", \"baseUrl\": \"http://www.rhea-db.org\", \"entryUrl\": \"http://www.rhea-db.org/searchresults?q=CHEBI:\"},\n }\n oD = {}\n try:\n for ky in inchiKeyList:\n unc = unichem_client # pylint: disable=no-member\n # unc.set_format(\"json\")\n uDL = unc.get(ky)\n if uDL:\n qD = {}\n for uD in uDL:\n if \"src_id\" in uD and int(uD[\"src_id\"]) in mapD:\n qD[mapD[int(uD[\"src_id\"])][\"name\"]] = uD[\"src_compound_id\"]\n if qD:\n oD[ky] = qD\n\n except Exception as e:\n logger.exception(\"Failing with %s\", str(e))\n return oD", "def read_core_vref(self) -> float:", "def RawRefs(self, default=[{}]):\n tmp = self.data.get('raw_refs', default)\n return [HEP.RawReferenceObject(i) for i in tmp]", "def test_ref_field(self):\n PurchaseOrder = self.new_state.apps.get_model('order', 'purchaseorder')\n SalesOrder = self.new_state.apps.get_model('order', 'salesorder')\n\n for ii in range(10):\n\n po = PurchaseOrder.objects.get(reference=f\"{ii}-abcde\")\n so = SalesOrder.objects.get(reference=f\"{ii}-xyz\")\n\n # The integer reference field must have been correctly updated\n self.assertEqual(po.reference_int, ii)\n self.assertEqual(so.reference_int, ii)\n\n # Tests for orders with overly large reference values\n po = PurchaseOrder.objects.get(pk=self.po_pk)\n self.assertEqual(po.reference, '999999999999999999999999999999999')\n self.assertEqual(po.reference_int, 0x7fffffff)\n\n so = SalesOrder.objects.get(pk=self.so_pk)\n self.assertEqual(so.reference, '999999999999999999999999999999999')\n self.assertEqual(so.reference_int, 0x7fffffff)", "def locationsFromDBS(self, dbs, dataItems):\n result = defaultdict(set)\n for dataItem in dataItems:\n try:\n if isDataset(dataItem):\n phedexNodeNames = dbs.listDatasetLocation(dataItem)\n else:\n phedexNodeNames = dbs.listFileBlockLocation(dataItem)\n result[dataItem].update(phedexNodeNames)\n except Exception as ex:\n self.logger.error('Error getting block location from dbs for %s: %s', dataItem, str(ex))\n\n # convert the sets to lists\n for name, nodes in viewitems(result):\n psns = set()\n psns.update(self.cric.PNNstoPSNs(nodes))\n result[name] = list(psns)\n\n return result", "def test2(self):\n field1 = self.get_first_record_for_name(\"I5\")\n lons, lats = self.get_longitudes_and_latitudes_for_the_last_read_rec()", "def get_ref_of_region(self, contig, site):\n return self.FastaFile.fetch(region=contig+site).upper()", "def ref_analyzer(citation_informations, initialresult, initial_citationlist,\n initial_referencelist,config, updated_rec_list ):\n function = \"\"\n try:\n function = config.get(\"rank_method\", \"function\")\n except:\n register_exception(prefix=\"cfg section [rank_method] has no attr function\", alert_admin=True)\n return {}\n\n pubrefntag = \"\"\n try:\n pubrefntag = config.get(function, \"reference_via_report_number\")\n except:\n register_exception(prefix=\"cfg section \"+function+\" has no attr reference_via_report_number\", alert_admin=True)\n return {}\n\n pubreftag = \"\"\n try:\n pubreftag = config.get(function, \"reference_via_pubinfo\")\n except:\n register_exception(prefix=\"cfg section \"+function+\" has no attr reference_via_pubinfo\", alert_admin=True)\n return {}\n\n #pubrefntag is often 999C5r, pubreftag 999C5s\n if task_get_task_param('verbose') >= 9:\n write_message(\"pubrefntag \"+pubrefntag)\n write_message(\"pubreftag \"+pubreftag)\n\n citation_list = initial_citationlist\n reference_list = initial_referencelist\n result = initialresult\n d_reports_numbers = citation_informations[0] #dict of recid -> institute_give_publ_id\n d_references_report_numbers = citation_informations[1] #dict of recid -> ['astro-ph/xyz'..]\n d_references_s = citation_informations[2]\n #dict of recid -> publication_infos_in_its_bibliography\n d_records_s = citation_informations[3] #recid -> its publication inf\n t1 = os.times()[4]\n\n write_message(\"Phase 0: temporarily remove changed records from citation dictionaries; they will be filled later\")\n for somerecid in updated_rec_list:\n try:\n del citation_list[somerecid]\n except KeyError:\n pass\n try:\n del reference_list[somerecid]\n except KeyError:\n pass\n\n write_message(\"Phase 1: d_references_report_numbers\")\n #d_references_report_numbers: e.g 8 -> ([astro-ph/9889],[hep-ph/768])\n #meaning: rec 8 contains these in bibliography\n\n done = 0\n numrecs = len(d_references_report_numbers)\n for thisrecid, refnumbers in d_references_report_numbers.iteritems():\n if (done % 1000 == 0):\n mesg = \"d_references_report_numbers done \"+str(done)+\" of \"+str(numrecs)\n write_message(mesg)\n task_update_progress(mesg)\n task_sleep_now_if_required()\n done = done+1\n\n for refnumber in refnumbers:\n if refnumber:\n p = refnumber\n f = 'reportnumber'\n #sanitise p\n p.replace(\"\\n\",'')\n #search for \"hep-th/5644654 or such\" in existing records\n rec_ids = get_recids_matching_query(p, f)\n\n if len(rec_ids) > 1:\n msg = \"Whoops: record '%d' report number value '%s' \" \\\n \"matches many records; taking only the first one. %s\" % \\\n (thisrecid, p, repr(rec_ids))\n write_message(msg, stream=sys.stderr)\n try:\n raise ValueError(msg)\n except ValueError:\n register_exception(alert_admin=True)\n\n if rec_ids and rec_ids[0]:\n write_citer_cited(thisrecid, rec_ids[0])\n remove_from_missing(p)\n if not result.has_key(rec_ids[0]):\n result[rec_ids[0]] = 0\n # Citation list should have rec_ids[0] but check anyway\n if not citation_list.has_key(rec_ids[0]):\n citation_list[rec_ids[0]] = []\n #append unless this key already has the item\n if not thisrecid in citation_list[rec_ids[0]]:\n citation_list[rec_ids[0]].append(thisrecid)\n #and update result\n result[rec_ids[0]] += 1\n\n if not reference_list.has_key(thisrecid):\n reference_list[thisrecid] = []\n if not rec_ids[0] in reference_list[thisrecid]:\n reference_list[thisrecid].append(rec_ids[0])\n else:\n #the reference we wanted was not found among our records.\n #put the reference in the \"missing\".. however, it will look\n #bad.. gfhgf/1254312, so get the corresponding 999C5s (full ref) too\n #This should really be done in the next loop d_references_s\n #but the 999C5s fields are not yet normalized\n\n #rectext = print_record(thisrecid, format='hm', ot=pubreftag[:-1])\n rectext = \"\" # print_record() call disabled to speed things up\n lines = rectext.split(\"\\n\")\n rpart = p #to be used..\n for l in lines:\n if (l.find(p) > 0): #the gfhgf/1254312 was found.. get the s-part of it\n st = l.find('$s')\n if (st > 0):\n end = l.find('$', st)\n if (end == st):\n end = len(l)\n rpart = l[st+2:end]\n insert_into_missing(thisrecid, rpart)\n\n mesg = \"d_references_report_numbers done fully\"\n write_message(mesg)\n task_update_progress(mesg)\n\n t2 = os.times()[4]\n\n #try to find references based on 999C5s, like Phys.Rev.Lett. 53 (1986) 2285\n write_message(\"Phase 2: d_references_s\")\n done = 0\n numrecs = len(d_references_s)\n for thisrecid, refss in d_references_s.iteritems():\n if (done % 1000 == 0):\n mesg = \"d_references_s done \"+str(done)+\" of \"+str(numrecs)\n write_message(mesg)\n task_update_progress(mesg)\n task_sleep_now_if_required()\n\n done = done+1\n\n for refs in refss:\n if refs:\n p = refs\n #remove the latter page number if it is like 67-74\n matches = re.compile(\"(.*)(-\\d+$)\").findall(p)\n if matches and matches[0]:\n p = matches[0][0]\n\n # check reference value to see whether it is well formed:\n if not re_CFG_JOURNAL_PUBINFO_STANDARD_FORM_REGEXP_CHECK.match(p):\n msg = \"Whoops, record '%d' reference value '%s' \" \\\n \"is not well formed; skipping it.\" % (thisrecid, p)\n write_message(msg, stream=sys.stderr)\n try:\n raise ValueError(msg)\n except ValueError:\n register_exception(alert_admin=True)\n continue # skip this ill-formed value\n\n # look for reference value:\n rec_id = None\n try:\n rec_ids = list(search_unit(p, 'journal') - INTBITSET_OF_DELETED_RECORDS)\n except:\n rec_ids = None\n if len(rec_ids) > 1:\n msg = \"Whoops, record '%d' reference value '%s' \" \\\n \"matches many records; taking only the first one. %s\" % \\\n (thisrecid, p, repr(rec_ids))\n write_message(msg, stream=sys.stderr)\n try:\n raise ValueError(msg)\n except ValueError:\n register_exception(alert_admin=True)\n\n if rec_ids and rec_ids[0]:\n write_message(\"These match searching \"+p+\" in journal: \"+repr(rec_ids), verbose=9)\n #the refered publication is in our collection, remove\n #from missing\n remove_from_missing(p)\n else:\n #it was not found so add in missing\n insert_into_missing(thisrecid, p)\n #check citation and reference for this..\n if rec_ids and rec_ids[0]:\n #the above should always hold\n if not result.has_key(rec_ids[0]):\n result[rec_ids[0]] = 0\n if not citation_list.has_key(rec_ids[0]):\n citation_list[rec_ids[0]] = []\n if not thisrecid in citation_list[rec_ids[0]]:\n citation_list[rec_ids[0]].append(thisrecid) #append actual list\n result[rec_ids[0]] += 1 #add count for this..\n\n #update reference_list accordingly\n if not reference_list.has_key(thisrecid):\n reference_list[thisrecid] = []\n if not rec_ids[0] in reference_list[thisrecid]:\n reference_list[thisrecid].append(rec_ids[0])\n mesg = \"d_references_s done fully\"\n write_message(mesg)\n task_update_progress(mesg)\n\n t3 = os.times()[4]\n done = 0\n numrecs = len(d_reports_numbers)\n write_message(\"Phase 3: d_reports_numbers\")\n\n #search for stuff like CERN-TH-4859/87 in list of refs\n for thisrecid, reportcodes in d_reports_numbers.iteritems():\n if (done % 1000 == 0):\n mesg = \"d_report_numbers done \"+str(done)+\" of \"+str(numrecs)\n write_message(mesg)\n task_update_progress(mesg)\n done = done+1\n\n for reportcode in reportcodes:\n if reportcode:\n rec_ids = []\n try:\n rec_ids = get_recids_matching_query(reportcode, pubrefntag)\n except:\n rec_ids = []\n\n if rec_ids:\n for recid in rec_ids:\n #normal checks..\n if not citation_list.has_key(thisrecid):\n citation_list[thisrecid] = []\n if not reference_list.has_key(recid):\n reference_list[recid] = []\n if not result.has_key(thisrecid):\n result[thisrecid] = 0\n\n #normal updates\n if not recid in citation_list[thisrecid]:\n result[thisrecid] += 1\n citation_list[thisrecid].append(recid)\n if not thisrecid in reference_list[recid]:\n reference_list[recid].append(thisrecid)\n\n mesg = \"d_report_numbers done fully\"\n write_message(mesg)\n task_update_progress(mesg)\n\n #find this record's pubinfo in other records' bibliography\n write_message(\"Phase 4: d_records_s\")\n done = 0\n numrecs = len(d_records_s)\n t4 = os.times()[4]\n for thisrecid, recs in d_records_s.iteritems():\n if (done % 1000 == 0):\n mesg = \"d_records_s done \"+str(done)+\" of \"+str(numrecs)\n write_message(mesg)\n task_update_progress(mesg)\n done = done+1\n p = recs.replace(\"\\\"\",\"\")\n #search the publication string like Phys. Lett., B 482 (2000) 417 in 999C5s\n rec_ids = list(search_unit(f=pubreftag, p=p, m='a') - INTBITSET_OF_DELETED_RECORDS)\n write_message(\"These records match \"+p+\" in \"+pubreftag+\" : \"+str(rec_ids), verbose=9)\n if rec_ids:\n for rec_id in rec_ids:\n #normal checks\n if not result.has_key(thisrecid):\n result[thisrecid] = 0\n if not citation_list.has_key(thisrecid):\n citation_list[thisrecid] = []\n if not reference_list.has_key(rec_id):\n reference_list[rec_id] = []\n\n if not rec_id in citation_list[thisrecid]:\n result[thisrecid] += 1\n citation_list[thisrecid].append(rec_id)\n if not thisrecid in reference_list[rec_id]:\n reference_list[rec_id].append(thisrecid)\n\n mesg = \"d_records_s done fully\"\n write_message(mesg)\n task_update_progress(mesg)\n\n write_message(\"Phase 5: reverse lists\")\n\n #remove empty lists in citation and reference\n keys = citation_list.keys()\n for k in keys:\n if not citation_list[k]:\n del citation_list[k]\n\n keys = reference_list.keys()\n for k in keys:\n if not reference_list[k]:\n del reference_list[k]\n\n write_message(\"Phase 6: self-citations\")\n selfdic = {}\n #get the initial self citation dict\n initial_self_dict = get_cit_dict(\"selfcitdict\")\n selfdic = initial_self_dict\n #add new records to selfdic\n acit = task_get_option(\"author-citations\")\n if not acit:\n write_message(\"Self cite processing disabled. Use -A option to enable it.\")\n else:\n write_message(\"self cite and author citations enabled\")\n selfdic = get_self_citations(updated_rec_list, citation_list,\n initial_self_dict, config)\n #selfdic consists of\n #key k -> list of values [v1,v2,..]\n #where k is a record with author A and k cites v1,v2.. and A appears in v1,v2..\n\n #create a reverse \"x cited by y\" self cit dict\n selfcitedbydic = {}\n for k in selfdic.keys():\n vlist = selfdic[k]\n for v in vlist:\n if selfcitedbydic.has_key(v):\n tmplist = selfcitedbydic[v]\n if not k in tmplist:\n tmplist.append(k)\n else:\n tmplist = [k]\n selfcitedbydic[v] = tmplist\n\n write_message(\"Getting author citations\")\n\n #get author citations for records in updated_rec_list\n initial_author_dict = get_initial_author_dict()\n authorcitdic = initial_author_dict\n acit = task_get_option(\"author-citations\")\n if not acit:\n print \"Author cites disabled. Use -A option to enable it.\"\n else:\n write_message(\"author citations enabled\")\n authorcitdic = get_author_citations(updated_rec_list, citation_list,\n initial_author_dict, config)\n\n\n if task_get_task_param('verbose') >= 3:\n #print only X first to prevent flood\n tmpdict = {}\n tmp = citation_list.keys()[0:10]\n for t in tmp:\n tmpdict[t] = citation_list[t]\n write_message(\"citation_list (x is cited by y): \"+str(tmpdict))\n write_message(\"size: \"+str(len(citation_list.keys())))\n tmp = reference_list.keys()[0:10]\n tmpdict = {}\n for t in tmp:\n tmpdict[t] = reference_list[t]\n write_message(\"reference_list (x cites y): \"+str(tmpdict))\n write_message(\"size: \"+str(len(reference_list.keys())))\n tmp = selfcitedbydic.keys()[0:10]\n tmpdict = {}\n for t in tmp:\n tmpdict[t] = selfcitedbydic[t]\n mesg = \"selfcitedbydic (x is cited by y and one of the authors of x same as y's):\"\n mesg += str(tmpdict)\n write_message(mesg)\n write_message(\"size: \"+str(len(selfcitedbydic.keys())))\n tmp = selfdic.keys()[0:100]\n tmpdict = {}\n for t in tmp:\n tmpdict[t] = selfdic[t]\n mesg = \"selfdic (x cites y and one of the authors of x same as y's): \"+str(tmpdict)\n write_message(mesg)\n write_message(\"size: \"+str(len(selfdic.keys())))\n tmp = authorcitdic.keys()[0:10]\n tmpdict = {}\n for t in tmp:\n tmpdict[t] = authorcitdic[t]\n write_message(\"authorcitdic (author is cited in recs): \"+str(tmpdict))\n write_message(\"size: \"+str(len(authorcitdic.keys())))\n insert_cit_ref_list_intodb(citation_list, reference_list,\n selfcitedbydic, selfdic, authorcitdic)\n\n t5 = os.times()[4]\n\n write_message(\"Execution time for analyzing the citation information generating the dictionary:\")\n write_message(\"... checking ref number: %.2f sec\" % (t2-t1))\n write_message(\"... checking ref ypvt: %.2f sec\" % (t3-t2))\n write_message(\"... checking rec number: %.2f sec\" % (t4-t3))\n write_message(\"... checking rec ypvt: %.2f sec\" % (t5-t4))\n write_message(\"... total time of ref_analyze: %.2f sec\" % (t5-t1))\n\n return result", "def reference_nodes_idx(self) -> Dict[str, torch.Tensor]:\n return self.node_idx_references", "def resolve_references(self, **kwargs):\n # deal with reference variables\n self.references = {}\n # record all reference variables in the references dictionary\n for name, value in kwargs.iteritems():\n if isinstance(value, RefVar):\n self.references[name] = value\n # then, resolve the reference value on the CPU only, so that\n # it can be used in initialize(). The reference variable will\n # still be registered in the visual dictionary, for later use\n # by the GL renderer.\n kwargs[name] = self.resolve_reference(value)['data']\n return kwargs", "def Get_Reference_Value(self):\r\n return self.__readFromRegister(self.__REG_RW_REFERENCE, 0xff)", "def fetch_citylink_refs(self):\n tree = html.fromstring(self.fetch_manifest())\n self_refs = tree.xpath('//table/tr/td/table/tr[position()>4]/td[1]/text()')\n return [x.strip() for x in self_refs[:-1]]", "def add_univ_city(dfsrc, uniquedf):\n\n def get_Univ_Loc_match(rowvals):\n \"\"\" Have to use sub-function to run function on\n each row of a dataframe using apply. \"\"\"\n\n # If encounters a number, go ahead and return nothing\n if isinstance(rowvals, float) or isinstance(rowvals, int):\n return \"\"\n #Inherit the lookup list from parent function\n tester = uniquedf.copy() \n #Create boolean column to check if each lookup value exists \n # in the column being checked, ensure all texts are stripped\n # and all text is lowercase to ensure matches work.\n tester['boole'] = tester['LookupVal'].apply(lambda x: x.strip().lower() in rowvals.strip().lower())\n tester = tester[tester['boole'] == True] #return only rows with match\n # Convert matched rows to a list which can be saved in the column\n tester = tester[['University', 'City', 'Country', 'CODE']].values.tolist()\n \n # If nothing results from lookups, return nothing\n if not tester:\n return \"\"\n else:\n # Otherwise return a unique list of Countries. Multiple\n # Lookups per value means could be multiple hits for 1 match\n tester = [list(x) for x in set(tuple(x) for x in tester)]\n return tester\n\n dfsrc['source_institution_places'] = dfsrc['Source'].apply(get_Univ_Loc_match)\n\n return dfsrc", "def analogies(self, queries):\n pass", "def test_regref(self):\n dset1 = self.f.create_dataset('x', (10,10))\n regref = dset1.regionref[...]\n dset2 = self.f.create_dataset('y', (1,), dtype=h5py.regionref_dtype)\n dset2[0] = regref\n self.assertEqual(type(dset2[0]), h5py.RegionReference)", "def cross_reference(self, model: BDF) -> None:\n msg = ', which is required by PLOAD4 sid=%s' % self.sid\n if self.cid is not None:\n self.cid_ref = model.Coord(self.cid, msg=msg)\n if self.g1 is not None:\n self.g1_ref = model.Node(self.g1, msg=msg + '; g1')\n if self.g34 is not None:\n self.g34_ref = model.Node(self.g34, msg=msg + '; g34')\n if self.eids:\n self.eids_ref = model.Elements(self.eids, msg=msg)", "def search_reference(dic_values, dic_mask, row, col, band_name):\n key_images = [key for key, value in dic_mask.items()]\n\n value_pixel = [value[row, col] for key, value in dic_values[band_name].items() if key in key_images]\n mask_pixel = [value[row, col] for key, value in dic_mask.items() if key in key_images]\n\n indices_not_cloud = [index for index, value in enumerate(mask_pixel) if value == 1][-1]\n\n reference_date = key_images[indices_not_cloud]\n reference_value = value_pixel[indices_not_cloud]\n\n return reference_date, reference_value" ]
[ "0.5812446", "0.57540804", "0.5708854", "0.56447667", "0.56308633", "0.5510093", "0.5462862", "0.54256", "0.53346974", "0.5308915", "0.52702427", "0.52666277", "0.52629995", "0.52512944", "0.52163595", "0.52109677", "0.5196691", "0.519309", "0.51905435", "0.51647764", "0.5161657", "0.51602453", "0.51267844", "0.5122003", "0.5118105", "0.51177293", "0.5114811", "0.5114811", "0.51054215", "0.50906634", "0.50364214", "0.50326693", "0.5031544", "0.502921", "0.50240177", "0.50207806", "0.5015938", "0.50141895", "0.50102574", "0.5009735", "0.50057656", "0.5000107", "0.4994865", "0.49771506", "0.49592242", "0.49565914", "0.49489754", "0.49316123", "0.4924572", "0.49113098", "0.49106747", "0.49104863", "0.49065468", "0.49021152", "0.49012133", "0.48937318", "0.4893261", "0.48807564", "0.48795667", "0.48761418", "0.48684484", "0.48675117", "0.48651272", "0.48597467", "0.48536637", "0.48450983", "0.4844363", "0.48431134", "0.48403227", "0.4838385", "0.48380566", "0.48345715", "0.48333365", "0.48332056", "0.48305222", "0.4828454", "0.4827", "0.48124868", "0.48115012", "0.48094404", "0.48074606", "0.48074606", "0.48063293", "0.48054036", "0.4789939", "0.4788862", "0.47850874", "0.47675157", "0.47671002", "0.47626263", "0.476079", "0.47602713", "0.47579974", "0.4750851", "0.4749099", "0.4746025", "0.47438508", "0.47358915", "0.47356057", "0.47343138", "0.47332516" ]
0.0
-1
This function help to build data that we need to train for CharCNN
def build_dataset(self, data_path, test_size): dataset, label_dataset = self.load_dataset(data_path) # shuffle dataset, label_dataset = shuffle(dataset, label_dataset, random_state = 2111) # split data size = int(len(dataset) * (1 - test_size)) self.x_train = dataset[:size] self.x_val = dataset[size:] self.y_train = np.array(label_dataset[:size]) self.y_val = np.array(label_dataset[size:]) self.vocab_size = len(self.x_train) # build tokenizer self.tokenizer = self.build_tokenizer(self.x_train, self.vocab_size) # Saving Tokenizer print('=============Saving Tokenizer================') print('Begin...') if not os.path.exists(self.vocab_folder): try: os.makedirs(self.vocab_folder) except OSError as e: raise IOError("Failed to create folders") tokenizer_json = self.tokenizer.to_json() with io.open(self.save_tokenizer_path, 'w', encoding= 'utf-8') as f: f.write(json.dumps(tokenizer_json, ensure_ascii= False)) print('Done!!!') # Saving label dict with open('label.json', 'w') as f: json.dump(self.label_dict, f) # get max_len self.max_len = self.get_max_len(self.x_train) # tokenizing self.x_train = np.array(self.tokenize(self.tokenizer, self.x_train, self.max_len)) self.x_val = np.array(self.tokenize(self.tokenizer,self.x_val, self.max_len)) return self.x_train, self.x_val, self.y_train, self.y_val
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def build_dataset(self):\n print(\"reading data of images currently , please wait......\")\n x_train, y_train, _ = get_images(self.train_directory)\n x_test, y_test, _ = get_images(self.test_directory)\n x_train, y_train = image_subset(self.num_classes, x_train, y_train)\n x_test, y_test = image_subset(self.num_classes, x_test, y_test)\n x_train = x_train.astype('float32')\n x_test = x_test.astype('float32')\n self.x_train = x_train / 255\n self.x_test = x_test / 255\n self.y_train = utils.to_categorical(y_train, self.num_classes)\n self.y_test = utils.to_categorical(y_test, self.num_classes)", "def data():\n\n run_type = 'standardised'\n sr = 48000\n train_perc = 0.9\n\n if sr == 48000:\n time_dimension = 282\n if sr == 44100:\n time_dimension = 259\n\n x_train, y_train, x_test, y_test = essential.compile_dataset(run_type, sr)\n\n # reshape for CNN input\n x_train = np.array([x.reshape((128, time_dimension, 1)) for x in x_train])\n x_test = np.array([x.reshape((128, time_dimension, 1)) for x in x_test])\n\n # encoded \n encoder = LabelEncoder()\n encoder.fit(y_train)\n encoder.fit(y_test)\n y_train = encoder.transform(y_train)\n y_test = encoder.transform(y_test)\n\n return x_train, y_train, x_test, y_test", "def prepare_data(self):\n data = self._get_dataset(self.hparams.dataset_path)\n label_encoder = data[\"label_encoder\"]\n del data[\"label_encoder\"]\n\n click.secho(\"Building inputs and labels.\", fg=\"yellow\")\n datasets = {\n \"train\": defaultdict(list),\n \"valid\": defaultdict(list),\n \"test\": defaultdict(list),\n }\n for dataset_name, dataset in data.items():\n for sample in dataset:\n instance = self.build_input(\n self.tokenizer, sample[\"text\"], label_encoder, sample[\"label\"]\n )\n for input_name, input_array in instance.items():\n datasets[dataset_name][input_name].append(input_array)\n\n click.secho(\"Padding inputs and building tensors.\", fg=\"yellow\")\n tensor_datasets = {\"train\": [], \"valid\": [], \"test\": []}\n for dataset_name, dataset in datasets.items():\n dataset = self.pad_dataset(dataset, padding=self.tokenizer.pad_index)\n for input_name in MODEL_INPUTS:\n if input_name == \"labels\":\n tensor = torch.tensor(dataset[input_name], dtype=torch.float32)\n else:\n tensor = torch.tensor(dataset[input_name])\n tensor_datasets[dataset_name].append(tensor)\n\n self.train_dataset = TensorDataset(*tensor_datasets[\"train\"])\n self.valid_dataset = TensorDataset(*tensor_datasets[\"valid\"])\n self.test_dataset = TensorDataset(*tensor_datasets[\"test\"])\n click.secho(\n \"Train dataset (Batch, Candidates, Seq length): {}\".format(\n self.train_dataset.tensors[0].shape\n ),\n fg=\"yellow\",\n )\n click.secho(\n \"Valid dataset (Batch, Candidates, Seq length): {}\".format(\n self.valid_dataset.tensors[0].shape\n ),\n fg=\"yellow\",\n )\n click.secho(\n \"Test dataset (Batch, Candidates, Seq length): {}\".format(\n self.test_dataset.tensors[0].shape\n ),\n fg=\"yellow\",\n )", "def prep_data(self):\n\n self.fit_tokenizer(texts=self.texts)\n sequences = self.get_sequences(self.texts)\n self.text_data = pad_sequences(sequences, maxlen=self.MAX_SEQUENCE_LENGTH)\n\n self.labels = to_categorical(np.asarray(self.labels))\n print('Shape of data tensor:', self.text_data.shape)\n print('Shape of label tensor:', self.labels.shape)\n\n # split the data into a training set and a validation set\n indices = np.arange(self.text_data.shape[0])\n np.random.shuffle(indices)\n self.text_data = self.text_data[indices]\n self.labels = self.labels[indices]\n nb_validation_samples = int(self.VALIDATION_SPLIT * self.text_data.shape[0])\n\n x_train = self.text_data[:-nb_validation_samples]\n y_train = self.labels[:-nb_validation_samples]\n x_val = self.text_data[-nb_validation_samples:]\n y_val = self.labels[-nb_validation_samples:]\n\n return x_train,y_train, x_val, y_val", "def prepareData(self):\n\t\tprint ('')\n\t\tfrom keras.preprocessing.sequence import pad_sequences\n\t\tfrom sklearn.model_selection import train_test_split\n\t\tfrom keras.utils import to_categorical\n\t\timport numpy as np\n\n\t\tfrom sklearn.preprocessing import LabelBinarizer, LabelEncoder\n\n\t\tX_snt = [[self.word2idx[w] if w in self.word2idx else self.word2idx[self.word_unk_token] for w in s] for s in self.x_document]\n\t\ty_tag = [[self.tag2idx[t]] for t in self.y_document]\n\n\t\tX_snt = pad_sequences(maxlen=self.parameters['max_doc_len'], sequences=X_snt, padding='post', value=self.word2idx[self.word_pad_token])\n\t\ty_tag = to_categorical(y_tag, self.tags_len)\n\n\t\tprint (\"\\tRandom:\\t\", self.random)\n\t\tprint (\"\\tTest size:\\t\", self.split_train_test)\n\n\t\tself.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X_snt, y_tag, test_size=self.split_train_test, random_state=self.random)\n\n\t\tself.X_train = np.array(self.X_train)\n\t\tself.X_test = np.array(self.X_test)\n\t\tself.y_train = np.array(self.y_train)\n\t\tself.y_test = np.array(self.y_test)\n\n\t\tprint ('\\n\\tWords: {}\\t{}'.format(self.X_train.shape, self.X_test.shape) )\n\t\tprint ('\\tTags: {}\\t{}\\n'.format(self.y_train.shape, self.y_test.shape))", "def build_train_data(self,data_folder, cv=10, clean_string=False):\n revs = []\n\n vocab = defaultdict(float)\n print data_folder\n with codecs.open( data_folder, 'rb') as fi:\n for line in fi.readlines():\n line = line.decode('utf-8')\n parts = line.split(\"\\n\")[0].split(\"\\t\")\n if len(parts) > 1:\n sent = parts[1]\n rev = []\n rev.append(sent.strip())\n\n if clean_string:\n orig_rev = self.dc.clean_str(\" \".join(rev))\n else:\n orig_rev = \" \".join(rev).lower()\n #print orig_rev\n words = set(orig_rev.split())\n for word in words:\n vocab[word.lower()] += 1\n if len(orig_rev.split()) < 50 :\n\n datum = {\"y\":int(parts[0]),\n \"text\": orig_rev,\n \"num_words\": len(orig_rev.split()),\n \"split\": np.random.randint(0,cv)}\n revs.append(datum)\n # else:\n # print orig_rev\n\n\n return revs, vocab", "def prep_data_CNN(documents):\n t = Tokenizer()\n docs = list(filter(None, documents))\n print(\"Size of the documents in prep_data {}\".format(len(documents)))\n t.fit_on_texts(docs)\n\n vocab_size = len(t.word_counts)\n print(\"Vocab size {}\".format(vocab_size))\n encoded_docs = t.texts_to_sequences(docs)\n print(\"Size of the encoded documents {}\".format(len(encoded_docs)))\n e_lens = []\n for i in range(len(encoded_docs)):\n e_lens.append(len(encoded_docs[i]))\n lens_edocs = list(map(size, encoded_docs))\n max_length = np.average(lens_edocs)\n sequence_length = 1500 # Can use this instead of the above average max_length value\n max_length = sequence_length\n padded_docs = pad_sequences(\n encoded_docs, maxlen=int(max_length), padding='post')\n print(\"Length of a padded row {}\".format(padded_docs.shape))\n print(\"max_length {} and min_length {} and average {}\".format(\n max_length, min(lens_edocs), np.average(lens_edocs)))\n return padded_docs, max_length, vocab_size, t.word_index", "def build_training_data():\r\n for i in range(len(FILE_NAMES)):\r\n input_text = read_file(FILE_NAMES[i])\r\n list_of_word_lines = limiting_sentence_length(input_text)\r\n data = create_training_data_file(list_of_word_lines, LANGUAGE[i])\r\n write_training_data(data, LANGUAGE[i])\r\n merge_training_data()", "def build_data_set(self):\n if not self.assert_data_correct():\n self.download_all_data()\n self.unpack_rename_data()\n self.split_data_characters()\n self.clean_data_fragments()\n self.create_font_data()\n if not self.assert_train_augmented():\n self.augment_train_data()\n if not self.assert_style_data_correct():\n self.download_style_data()\n self.unpack_rename_data()", "def build(self):\n self.build_inputs()\n self.build_image_embeddings()\n self.build_seq_embeddings()\n self.build_encoder()\n self.build_prediction_model()\n self.setup_encoder_initializer()\n self.setup_global_step()\n self.list_trainable_variables()", "def prepare_data():\n gennet.prepare_data('Resnet50')", "def prepare_train_coco_data(args):\n image_dir, annotation_file, data_dir = args.train_coco_image_dir, args.train_coco_annotation_file, args.train_coco_data_dir\n batch_size = args.batch_size\n basic_model = args.basic_model\n num_roi = args.num_roi\n\n coco = COCO(annotation_file)\n\n img_ids = list(coco.imgToAnns.keys())\n img_files = []\n img_heights = []\n img_widths = []\n anchor_files = []\n gt_classes = []\n gt_bboxes = []\n\n for img_id in img_ids:\n img_files.append(os.path.join(image_dir, coco.imgs[img_id]['file_name'])) \n img_heights.append(coco.imgs[img_id]['height']) \n img_widths.append(coco.imgs[img_id]['width']) \n anchor_files.append(os.path.join(data_dir, os.path.splitext(coco.imgs[img_id]['file_name'])[0]+'_'+basic_model+'_anchor.npz')) \n\n classes = [] \n bboxes = [] \n for ann in coco.imgToAnns[img_id]: \n classes.append(coco_category_to_class[ann['category_id']]) \n bboxes.append([ann['bbox'][1], ann['bbox'][0], ann['bbox'][3]+1, ann['bbox'][2]+1]) \n\n gt_classes.append(classes) \n gt_bboxes.append(bboxes) \n \n print(\"Building the training dataset...\")\n dataset = DataSet(img_ids, img_files, img_heights, img_widths, batch_size, anchor_files, gt_classes, gt_bboxes, True, True)\n print(\"Dataset built.\")\n return coco, dataset", "def build(self):\n # open json, len 161,260\n at_json = open_json(self.json_names[0])\n link_json = open_json(self.json_names[1])\n # if need preprocessing, do it\n if self.args.img_preprocessing:\n print(\"resize imgs\")\n for i in tqdm(range(len(link_json))):\n image_url = \"image/\" + link_json[i][\"image_url_4x\"].split('/')[-1]\n img = Image.open(image_url)\n img = img.resize((224, 224))\n img.save(image_url)\n\n # create dataset\n itemlen = 0\n previd = 0\n for i in tqdm(range(len(link_json))):\n image_url = link_json[i][\"image_url_4x\"].split('/')[-1]\n uid = image_url.split('-')[0]\n if previd != uid:\n self.label.append(list(at_json[i].values())[2:])\n if i != 0:\n self.itemlen.append(itemlen)\n itemlen = 0\n self.input.append(f\"{self.frontpath}dataset/image/\" + image_url)\n previd = uid\n itemlen += 1\n self.itemlen.append(itemlen)\n self.separate()\n self.dataset = {\n 'train': self.train,\n 'validation': self.val,\n 'test': self.test\n }\n\n print('finished dataset')", "def _make_data(self):\n pdf_datasets_all = make_pdf_datasets(self.pdf_list, self.xlims, self.ylims, self.tlims, self.dims, 9)\n self.pdf_dataset = np.concatenate(pdf_datasets_all, axis = 0)\n self.PDE_dataset = make_PDE_dataset(self.num_collocation, self.xlims, self.ylims, self.tlims, self.dims)\n self.BC_dataset = make_BC_dataset(self.num_BC, self.xlims, self.ylims, self.tlims, self.dims)", "def buildDataSet():\n (x_train_origin, y_train_origin), (x_test_origin, y_test_origin) = mnist.load_data()\n\n assert K.image_data_format() == 'channels_last'\n x_train_origin = x_train_origin.reshape(x_train_origin.shape[0], h, w, 1)\n x_test_origin = x_test_origin.reshape(x_test_origin.shape[0], h, w, 1)\n\n dataset_train = []\n dataset_test = []\n\n #Sorting images by classes and normalize values 0=>1\n for n in range(nb_classes):\n images_class_n = np.asarray([row for idx,row in enumerate(x_train_origin) if y_train_origin[idx]==n])\n dataset_train.append(images_class_n/255)\n\n images_class_n = np.asarray([row for idx,row in enumerate(x_test_origin) if y_test_origin[idx]==n])\n dataset_test.append(images_class_n/255)\n\n return dataset_train,dataset_test,x_train_origin,y_train_origin,x_test_origin,y_test_origin", "def build_dataset(self): \n start_time = datetime.datetime.now()\n self.func_log(\"\\n\\tIn build_dataset()\")\n \n self.dict_feature = {}\n for key,value in self.key_points.items():\n category = []\n buff_time = datetime.datetime.now()\n for img in value:\n histogram = np.zeros(len(self.visual_words))\n for each_feature in img:\n ind = self.find_index(each_feature, self.visual_words)\n histogram[ind] += 1\n category.append(histogram)\n self.dict_feature[key] = category\n \n buff_time = datetime.datetime.now() - buff_time\n self.func_log(\"\\t\\tKEY: {} finish, Time cose:{}\".format(key, buff_time))\n end_time = datetime.datetime.now() \n self.func_log(\"\\n\\t\\tTime Cost: {}\\n\".format(end_time-start_time))", "def generate_data(groups):\n # get path list for the intended classification problem\n input_paths = generate_input_list(groups) \n X_lst = []\n y = []\n for p in input_paths:\n dp = pd.read_csv(p, sep = '\\t') #datapoint\n # Normalization \n # norm = lambda x: (x - x.mean()) / x.std()\n # dp = dp.apply(norm)\n # Min-Max scaling \n #dp_norm = (dp - dp.min()) / (dp.max() - dp.min())\n #dp = dp_norm.values\n if dp.isnull().sum().sum()>0:\n# print(p, dp.isnull().sum().sum())\n continue\n dp = dp.drop(['time'], axis = 1) \n dp = dp.iloc[:1600:4]\n\n if dp.isnull().sum().sum()>0:\n# print('after norm',p, dp.isnull().sum().sum())\n continue\n dp = dp.values\n\n X_lst.append(dp)\n sample_y = get_target(p, text= True)\n y.append(sample_y)\n X = np.stack(X_lst, axis=0)\n \n # convert y into int 0 and 1\n encoder = LabelEncoder()\n encoder.fit(y)\n y = encoder.transform(y)\n y_dummy = y\n # convert y into one-hot encoding\n if len(groups)>2:\n y_dummy = pd.get_dummies(y)\n y_dummy = y_dummy.values\n return X, y , y_dummy", "def train_build(df):\n print(\"Constructing training set...\")\n recent_labels = pr.labels.get_last_keypresses() #List of strings\n labeled_df = pr.labels.apply_labels_all(df, recent_labels)\n X, y = pr.build_model.make_training_set(labeled_df)\n\n return X, y", "def load_data(self) -> tuple:\n self.read_path = Path(os.environ[\"DATA_PATH\"]) / \"characters\"\n self.pretrain_path = Path(os.environ[\"FONT_DATA\"]) / \"training\"\n self.dataset_builder.build_data_set()\n X_pretrain, y_pretrain, X_train, y_train, X_dev, y_dev, X_test, y_test = tuple(\n [] for l in range(8)\n )\n\n for letter in self.hebrew.letter_li:\n pretrain_images = glob(f\"{Path(self.pretrain_path/letter)}/*.jpeg\")\n train_images = glob(f'{Path(self.read_path/\"train\"/letter)}/*.jpg')\n dev_images = glob(f'{Path(self.read_path/\"dev\"/letter)}/*.jpg')\n test_images = glob(f'{Path(self.read_path/\"test\"/letter)}/*.jpg')\n\n # pretrain data\n for img in pretrain_images:\n image = cv2.imread(img)\n image = cv2.resize(image, self.img_size)\n X_pretrain.append(image)\n y_pretrain.append(self.hebrew.letter_li.index(letter))\n\n # training data\n for img in train_images:\n image = cv2.imread(img)\n image = cv2.resize(image, self.img_size)\n X_train.append(image)\n y_train.append(self.hebrew.letter_li.index(letter))\n\n # dev data\n for img in dev_images:\n image = cv2.imread(img)\n image = cv2.resize(image, self.img_size)\n X_dev.append(image)\n y_dev.append(self.hebrew.letter_li.index(letter))\n\n # test data\n for img in test_images:\n image = cv2.imread(img)\n image = cv2.resize(image, self.img_size)\n X_test.append(image)\n y_test.append(self.hebrew.letter_li.index(letter))\n\n return (\n np.array(X_pretrain),\n np.array(y_pretrain),\n np.array(X_train),\n np.array(y_train),\n np.array(X_dev),\n np.array(y_dev),\n np.array(X_test),\n np.array(y_test),\n )", "def build(self):\n self.build_inputs()\n self.build_word_embeddings()\n self.build_encoder()\n self.build_fc()\n self.build_loss()\n self.build_global_step()", "def get_train_data(self, train_data):\n X = []\n Y = []\n\n # word 2 indices and tag 2 indices\n w2i = {} # word to index\n c2i = {} # char to index\n tag2idx = {} # tag2idx\n\n w2i[\"_UNK\"] = 0 # unk word / OOV\n c2i[\"_UNK\"] = 0 # unk char\n c2i[\"<w>\"] = 1 # word start\n c2i[\"</w>\"] = 2 # word end index\n \n \n num_sentences=0\n num_tokens=0\n for instance_idx, (words, tags) in enumerate(read_conll_file(train_data)):\n instance_word_indices = [] #sequence of word indices\n instance_char_indices = [] #sequence of char indices\n instance_tags_indices = [] #sequence of tag indices\n\n for i, (word, tag) in enumerate(zip(words, tags)):\n\n # map words and tags to indices\n if word not in w2i:\n w2i[word] = len(w2i)\n instance_word_indices.append(w2i[word])\n\n if self.c_in_dim > 0:\n chars_of_word = [c2i[\"<w>\"]]\n for char in word:\n if char not in c2i:\n c2i[char] = len(c2i)\n chars_of_word.append(c2i[char])\n chars_of_word.append(c2i[\"</w>\"])\n instance_char_indices.append(chars_of_word)\n\n if tag not in tag2idx:\n tag2idx[tag]=len(tag2idx)\n\n instance_tags_indices.append(tag2idx.get(tag))\n\n num_tokens+=1\n\n num_sentences+=1\n\n X.append((instance_word_indices, instance_char_indices)) # list of word indices, for every word list of char indices\n Y.append(instance_tags_indices)\n\n\n print(\"%s sentences %s tokens\" % (num_sentences, num_tokens), file=sys.stderr)\n print(\"%s w features, %s c features \" % (len(w2i),len(c2i)), file=sys.stderr)\n if self.c_in_dim == 0:\n print(\"char features disabled\", file=sys.stderr)\n\n assert(len(X)==len(Y))\n\n # store mappings of words and tags to indices\n self.set_indices(w2i, c2i, tag2idx)\n\n return X, Y", "def coco_raw_data(data_path=None):\n train= _read_chars(os.path.join(data_path, \"train_caps.txt\"))\n val = _read_chars(os.path.join(data_path, \"dev_caps.txt\"))\n test = _read_chars(os.path.join(data_path, \"test_caps.txt\"))\n chars = set(train)\n id_2_word = dict(enumerate(chars))\n word_to_id = {i: w for w, i in id_2_word.items()}\n train_data = _file_to_word_ids(train, word_to_id)\n valid_data = _file_to_word_ids(val, word_to_id)\n test_data = _file_to_word_ids(test, word_to_id)\n return train_data, valid_data, test_data, word_to_id, id_2_word", "def load_data():\n # Load and preprocess data\n x_text_train1, x_text_train2, x_text_dev1, x_text_dev2, y_train, y_dev = load_data_and_labels_without_shuffled()\n\n x_text_train1 = split_sentence(x_text_train1)\n x_text_train2 = split_sentence(x_text_train2)\n x_text_dev1 = split_sentence(x_text_dev1)\n x_text_dev2 = split_sentence(x_text_dev2)\n\n x_text_train1 = pad_sentences(x_text_train1)\n x_text_train2 = pad_sentences(x_text_train2)\n x_text_dev1 = pad_sentences(x_text_dev1)\n x_text_dev2 = pad_sentences(x_text_dev2)\n\n # sentences = x_text_train1 + x_text_train2 + x_text_dev1 + x_text_dev2\n # vocabulary, vocabulary_inv = build_vocab(sentences)\n # x_text_train1 = build_input_data(x_text_train1, vocabulary)\n # x_text_train2 = build_input_data(x_text_train2, vocabulary)\n # x_text_dev1 = build_input_data(x_text_dev1, vocabulary)\n # x_text_dev2 = build_input_data(x_text_dev2, vocabulary)\n\n x_train1 = sentence_word2vec(x_text_train1)\n x_train2 = sentence_word2vec(x_text_train2)\n x_dev1 = sentence_word2vec(x_text_dev1)\n x_dev2 = sentence_word2vec(x_text_dev2)\n\n y_train = np.array(y_train)\n y_dev = np.array(y_dev)\n # return [x_text_train1, x_text_train2, x_text_dev1, x_text_dev2, y_train, y_dev, vocabulary, vocabulary_inv]\n\n return [x_train1, x_train2, x_dev1, x_dev2, y_train, y_dev]", "def build_eval_data(self,data, num_classes, clean_string=False):\n revs = []\n num = [-1, 1]\n for line in data:\n line = line.decode('utf-8')\n sent = line\n rev = []\n rev.append(sent.strip())\n\n if clean_string:\n orig_rev = self.dc.clean_str(\" \".join(rev))\n else:\n orig_rev = \" \".join(rev).lower()\n #print orig_rev\n\n datum = {\"y\":num[np.random.randint(0, num_classes)],\n \"text\": orig_rev,\n \"num_words\": len(orig_rev.split()),\n }\n revs.append(datum)\n\n\n return revs", "def build_data(samples, labels):\n num_samples = len(samples)\n indexes = list(range(num_samples))\n np.random.shuffle(indexes)\n num_train = int(train_ratio * num_samples)\n # Get the indexes of train data and test data.\n train_indexes = indexes[0:num_train]\n test_indexes = indexes[num_train:num_samples]\n\n # Build the train data and test data.\n train_data = samples[train_indexes]\n train_labels = labels[train_indexes]\n test_data = samples[test_indexes]\n test_labels = labels[test_indexes]\n\n return train_data, test_data, \\\n train_labels, test_labels, \\\n train_indexes, test_indexes", "def __data_generation(self, list_ids_temp):\r\n # 'Generates data containing batch_size samples' # X : (n_samples, *dim, n_channels)\r\n # Initialization\r\n X = np.empty((self.batch_size, *self.dim, self.n_channels))\r\n y = np.empty(self.batch_size, dtype = int)\r\n\r\n # Get data from .mat files\r\n for i, ID in enumerate(list_ids_temp):\r\n with h5py.File(ID) as file:\r\n _data = list(file['sequences'])\r\n\r\n # Convert to .mat structure to numpy array\r\n _npData = np.array(_data)\r\n _allSequences = np.transpose(_npData)\r\n\r\n # Reshape the numpy array to size : ( # of sequences, # of rows, # of columns, # of channels)\r\n X[i,] = np.reshape(_allSequences, (15, 16, 3200, 1)) # sequences\r\n\r\n # Store class\r\n y[i] = self.labels[ID]\r\n\r\n return X, tf.keras.utils.to_categorical(y, num_classes = self.n_classes)", "def build_enru_custom(self):\n train_data_file = self.data_dir + '/' + enru_paracrawl\n eval_data_file = self.data_dir + '/' + enru_newscomm\n train_data = tf.data.experimental.CsvDataset(\n [train_data_file],\n record_defaults=[tf.string, tf.string],\n compression_type='GZIP',\n field_delim='\\t',\n use_quote_delim=False)\n train_data = train_data.cache() # only read once\n eval_data = tf.data.experimental.CsvDataset(\n [eval_data_file],\n record_defaults=[tf.string, tf.string],\n compression_type='GZIP',\n field_delim='\\t',\n use_quote_delim=False)\n\n eval_data = eval_data.take(3000)\n eval_data = eval_data.cache()\n def to_features_dict(eng, rus):\n return {'inputs': eng, 'targets': rus}\n\n train_data = train_data.map(to_features_dict)\n eval_data = eval_data.map(to_features_dict)\n\n self.default_builder_obj = None\n\n return train_data, eval_data", "def generate_train_test_data(data_dir = '../../att_faces'):\n\n train_data = [ [ read_image('%s/s%d/%d.pgm'%( data_dir, i, j)) for j in range(1,11)] for i in range(1, 36)]\n test_data = [ [ read_image('%s/s%d/%d.pgm'%( data_dir, i, j)) for j in range(1,11)] for i in range(36, 41)]\n \n true_combinations_train = generate_true_combinations(train_data)\n false_combinations_train = generate_false_combination(train_data, int(len(true_combinations_train) / len(train_data)), 10)\n \n true_combinations_test = generate_true_combinations(test_data)\n false_combinations_test = generate_false_combination(test_data, int(len(true_combinations_test) / len(test_data)), 10)\n \n return prepare_to_classifier(true_combinations_train, false_combinations_train, true_combinations_test, false_combinations_test)", "def easydatagen():\n\n # Reading in the training file\n data = pd.read_json('train.json')\n\n # The set of different cuisines\n cuisines = data.cuisine.unique()\n\n # To find the different ingredients, we need to clean them up a little.\n def clean(string) :\n s = string.replace('-',' ') # read low-fat the same as low fat\n s = string.replace('&', 'and') # read & and and as the same\n s = re.sub('\\((.*?)\\)', '', s) # remove everythin g in brackets\n s = re.sub('\\d{1,2}\\%', '', s) # remove things of the form d% or dd%, where d is a digit\n s = ' '.join(s.split()) # remove extra white spaces\n\n return s\n\n ing_list = data.ingredients.values.tolist()\n raw_ingredients = [clean(x) for ing in ing_list for x in ing]\n\n ingredients = sorted(set(raw_ingredients))\n\n # build a dictionary that to each ingredient assigns its index\n ingredient_index = {}\n for i in range(0,len(ingredients)) :\n ingredient_index[ingredients[i]] = i\n\n # the same for cuisines\n cuisine_index = {}\n for i in range(0, len(cuisines)) :\n cuisine_index[cuisines[i]] = i\n\n def ingredients_to_vector(ings) :\n vect = np.zeros(len(ingredients))\n for ing in ings :\n vect[ingredient_index[clean(ing)]] = 1\n\n return vect\n\n def cuisine_to_vector(cus) :\n vect = np.zeros(20)\n vect[cuisine_index[cus]] = 1\n return vect\n\n vect_list = [ingredients_to_vector(ing) for ing in ing_list]\n target_list = [cuisine_to_vector(cus) for cus in data.cuisine.values.tolist()]\n\n # Define training data\n X = np.c_[vect_list]\n Y = np.c_[target_list]\n\n Y_num = np.zeros((Y.shape[0]))\n for i in range(Y.shape[0]):\n Y_num[i] = np.argmax(Y[i])\n\n x_train, x_test, y_train, y_test = train_test_split(X, Y_num, test_size = 0.2)\n\n return x_train, x_test, y_train, y_test", "def _create_data():\n tf.logging.info(\"Create records..\")\n train, val, test = util.load_data(data_dir, FLAGS[\"is_aug\"])\n tf.logging.info(\"Dataset size: Train-{} Test-{} Val-{}\".format(len(train), len(test), len(val)))\n return train, val, test", "def build_character_cnn(model_hyperparameters=None, verbose=None):\r\n if model_hyperparameters is None:\r\n model_hyperparameters = _dutils.load_dictionary('model_hyperparameters.json')\r\n '''\r\n Load hyperparameter-specific values from JSON file.\r\n '''\r\n #The size of the characater vocabulary\r\n vocabulary_size = model_hyperparameters.get(\"vocabulary_size\")\r\n #The max length of the text. Set as 1014 in the original.\r\n text_length = model_hyperparameters.get(\"text_length\")\r\n #Number of filters for each convolutional layer\r\n num_filters = model_hyperparameters.get(\"num_filters\")\r\n #The threshold for the ReLU activation layers\r\n threshold = model_hyperparameters.get(\"relu_threshold\")\r\n #Dropout probability for Dropout layers\r\n dropout_p = model_hyperparameters.get(\"dropout_percent\")\r\n #Embedding output dimension. Implementation sets it equal to vocabulary_size\r\n embed_dim = model_hyperparameters.get(\"embedding_dimension\")\r\n '''\r\n Values below specify the architecture.\r\n These aren't stored in the JSON file due to\r\n architectutre constraints with layers and\r\n kernel sizes.\r\n '''\r\n #The number of units for each dense layer minus output layer\r\n fully_connected_layers = [128,64]\r\n '''\r\n conv_layers is a list of pairs.\r\n First component refers to kernel size.\r\n Second component refers to the size of\r\n the MaxPooling1D layer (-1 indicates said layer is not present).\r\n '''\r\n conv_layers = [[7, 3], [3,-1], [3,-1], [3,-1], [3, 3]]\r\n #Input layer\r\n inputs = Input(shape=(text_length,), name='sent_input', dtype='int32')\r\n #Embedding layers\r\n x = Embedding(vocabulary_size + 1, embed_dim, input_length=text_length, mask_zero=True)(inputs)\r\n #Convolution layers\r\n '''\r\n First Conv1D layer + MaxPooling is separate in case\r\n changes are made upstream. Also it was used to test out\r\n TimeDistributed functionality.\r\n '''\r\n x = (Convolution1D(num_filters, 7))(x)\r\n x = (MaxPooling1D(3))(x)\r\n for cl in conv_layers:\r\n x = (Convolution1D(num_filters, cl[0]))(x)\r\n x = ThresholdedReLU(threshold)(x)\r\n if cl[1] != -1:\r\n x = (MaxPooling1D(cl[1]))(x)\r\n\r\n x = Flatten()(x)\r\n # #Fully connected layers\r\n for fl in fully_connected_layers:\r\n '''\r\n Original architecture did not use L2 regularization.\r\n However, empirical results show that, for my dataset\r\n it works well in handling overfitting.\r\n '''\r\n x = Dense(fl, kernel_regularizer=regularizers.l2(0.0001))(x)\r\n x = ThresholdedReLU(threshold)(x)\r\n '''\r\n Original architecture had dropout at 50%.\r\n This seemed to be too high for my dataset, and\r\n it resulted in underfitting.\r\n '''\r\n x = Dropout(dropout_p)(x)\r\n # #Output layer\r\n predictions = Dense(vocabulary_size, activation='softmax')(x)\r\n # Build and compile model\r\n model = Model(inputs=inputs, outputs=predictions) \r\n if verbose:\r\n model.summary()\r\n return model", "def _build_datasets(self):\n self._build_datasets_sis3302()\n self._build_datasets_sis3305()", "def prepare_data(train_x, train_y, dev_x, dev_y, test_x, testy):\n train_x = torch.FloatTensor(train_x).cuda()\n train_y = torch.FloatTensor(train_y).cuda()\n dev_x = torch.FloatTensor(dev_x).cuda()\n dev_y = torch.FloatTensor(dev_y).cuda()\n test_x = torch.FloatTensor(test_x).cuda()\n test_y = torch.FloatTensor(testy).cuda()\n return train_x, train_y, dev_x, dev_y, test_x, test_y", "def prepare_data_for_training(args):\n # Form the train/test splits and write them to disk\n dataset = data.Dataset(args)\n # get image classes and image counts in each class\n label_map = dataset.get_class_info()\n class_count = len(list(label_map.values()))\n # split the data and store it in log dir\n df_train, df_test = dataset.split_dataset()\n\n # perform dataset augmentations\n image_data = augment.Augmentation(args)\n # get the data gens for training and test images\n train_data_gen, _ = image_data.map_fn_train(df_train)\n test_data_gen, _ = image_data.map_fn_test(df_test)\n\n return train_data_gen, test_data_gen, df_train, df_test, class_count", "def __data_generation(self, list_IDs_temp):\n X = np.empty((self.batch_size, self.dim))\n Y = np.empty((self.batch_size, self.word_length, self.hot_enc_len))\n\n # Generate data\n for i, ID in enumerate(list_IDs_temp): # The enumerate() function adds a counter to an iterable.\n word = self.labels.index2word[ID]\n # Store sample\n X[i, ] = self.labels[word]\n # Store class\n char_hot_enc_pad = self.word_2_seq_hot_enc_sample(word)\n Y[i] = char_hot_enc_pad\n return X.reshape(self.batch_size, self.dim), Y", "def build_dataset(words):\n count = []\n # count.extend(collections.Counter(words).most_common(n_words - 1))\n count.extend(collections.Counter(words).most_common())\n dictionary = dict()\n for word, _ in count:\n dictionary[word] = len(dictionary)\n data = list()\n # unk_count = 0\n for word in words:\n index = dictionary.get(word, 0)\n # if index == 0: # dictionary['UNK']\n # unk_count += 1\n data.append(index)\n # count[0][1] = unk_count\n reversed_dictionary = dict(zip(dictionary.values(), dictionary.keys()))\n data = [data[::2],data[1::2]]\n new_data = list()\n for i in range(len(data[0])):\n new_data.append([data[0][i],data[1][i]])\n data = new_data\n vocabulary_size = len(dictionary)\n print(\"\\n\\ndictionary size = \")\n print(len(dictionary))\n return data, count, dictionary, reversed_dictionary, vocabulary_size", "def build_enru_custom_ft(self):\n eval_data_file = self.data_dir + '/' + enru_newscomm\n eval_data = tf.data.experimental.CsvDataset(\n [eval_data_file],\n record_defaults=[tf.string, tf.string],\n compression_type='GZIP',\n field_delim='\\t',\n use_quote_delim=False)\n train_data = eval_data.skip(3000).take(6000)\n eval_data = eval_data.take(3000)\n\n eval_data = eval_data.cache()\n train_data = train_data.cache()\n def to_features_dict(eng, rus):\n return {'inputs': eng, 'targets': rus}\n\n train_data = train_data.map(to_features_dict)\n eval_data = eval_data.map(to_features_dict)\n\n self.default_builder_obj = None\n\n return train_data, eval_data", "def generate_train_data(comps: List[pd.DataFrame], concen_upper_bound=1000, num_per_combination=1000):\n cps = [to_int_index(c) for c in comps]\n cps = [zero_end_interpolation(c) for c in comps]\n cps = alignment(cps)\n cps = [scale_dataframe(c) for c in cps]\n\n samples = []\n for n_class in range(1, len(cps) + 1):\n comps_roller = ComponentRoller(cps, n_class)\n concen_roller = ConcentrationRoller(1, concen_upper_bound, n_class)\n for i in range(num_per_combination):\n picked_comps, label = comps_roller.roll()\n concen_vector = concen_roller.roll_unique(label)\n the_sample = pd.Series(name=label, data=np.sum(picked_comps * concen_vector, axis=1))\n samples.append(the_sample)\n if i % 100 == 0:\n print('组合数{}: 第{}个样本 --- 标签{},浓度比{}'.format(n_class, i, label, concen_vector))\n df = pd.concat(samples, axis=1)\n return df.values.T, np.array(_to_vectors(df.columns.tolist()))", "def __data_generation(self, list_ids_temp):\n # X : (n_samples, *dim, n_channels)\n # Initialization\n x = np.empty((self.batch_size, *self.dim, self.n_channels))\n y = np.empty(self.batch_size, dtype=int)\n\n # Generate data\n for i, image_path in enumerate(list_ids_temp):\n # Store sample\n x[i,] = self.__preprocess_inputs(image_path)\n\n # Store class\n if self.tar is None:\n y[i] = self.labels[Path(image_path)._parts[-2]]\n else:\n y[i] = self.labels[Path(image_path.name)._parts[-2]]\n\n return x, tf.keras.utils.to_categorical(y, num_classes=self.n_classes)", "def features_from_CNN(self):\n\n dataloader = self.datasetManager.get_dataloader()\n print(\"\\nFeatures obtention with CNN\")\n print(\"-\"*15)\n for i, batch in tqdm.tqdm(enumerate(dataloader)):\n img = self.to_device(batch[0])\n img_name = batch[2][0]\n \n temp = re.findall(r'\\d+', img_name)\n res = list(map(int, temp))\n X = res[-2]\n Y = res[-1]\n \n savepath = os.path.join(self.output_dir, 'data%i'%X)\n create_folder(savepath)\n \n out_CNN = self.network(img) \n \n torch.save(out_CNN, os.path.join(savepath,'features_tensor%i.pt'%Y))", "def prepare_val_coco_data(args):\n image_dir, annotation_file = args.val_coco_image_dir, args.val_coco_annotation_file\n\n coco = COCO(annotation_file)\n\n img_ids = list(coco.imgToAnns.keys())\n img_files = []\n img_heights = []\n img_widths = []\n\n for img_id in img_ids:\n img_files.append(os.path.join(image_dir, coco.imgs[img_id]['file_name']))\n img_heights.append(coco.imgs[img_id]['height']) \n img_widths.append(coco.imgs[img_id]['width']) \n\n print(\"Building the validation dataset...\")\n dataset = DataSet(img_ids, img_files, img_heights, img_widths)\n print(\"Dataset built.\")\n return coco, dataset", "def prepare(self):\n # get data from file\n train_data, test_data = return_speechacts()\n # y are the speechacts or 'labels'\n y_train = [t.split(' ')[0] for t in train_data]\n y_test = [t.split(' ')[0] for t in test_data]\n # x are the sentences\n x_train = [\" \".join(t.split(' ')[1:]) for t in train_data]\n x_test = [\" \".join(t.split(' ')[1:]) for t in test_data]\n # use the tokenizer and padding from keras to assign arrays of integers\n # to sentences, out of vocabulary token is 1\n self.tokenizer_x = Tokenizer(oov_token=1)\n self.tokenizer_x.fit_on_texts(x_train + x_test)\n xt_train = self.tokenizer_x.texts_to_sequences(x_train)\n xt_train = pad_sequences(xt_train, maxlen=self.sentence_size,\n dtype='int32')\n xt_test = self.tokenizer_x.texts_to_sequences(x_test)\n xt_test = pad_sequences(xt_test, maxlen=self.sentence_size,\n dtype='int32')\n # vocab is the number of words in our vocabulary\n self.vocab = len(self.tokenizer_x.word_index) + 1\n # do the same for labels\n self.tokenizer_y = Tokenizer()\n self.tokenizer_y.fit_on_texts(y_train + y_test)\n yt_train = self.tokenizer_y.texts_to_sequences(y_train)\n yt_train = [t[0] for t in yt_train]\n yt_train = to_categorical(yt_train)\n yt_test = self.tokenizer_y.texts_to_sequences(y_test)\n yt_test = [t[0] for t in yt_test]\n yt_test = to_categorical(yt_test)\n self.x_train = x_train\n self.y_train = y_train\n self.x_test = x_test\n self.y_test = y_test\n self.xt_train = xt_train\n self.yt_train = yt_train\n self.xt_test = xt_test\n self.yt_test = yt_test", "def build_data(seed):\n rs = np.random.RandomState(seed)\n\n def y(x):\n \"\"\" y(x) = 1 + 0.3 * x_1 - 0.6 * x_2^2 - 0.2 * x_3^3 + 0.5 x_4^4 \"\"\"\n x1, x2, x3, x4 = x[:, 0], x[:, 1], x[:, 2], x[:, 3]\n return 1 + 0.3 * x1 - 0.6 * x2 ** 2 - 0.2 * x3 ** 3 + 0.5 * x4 ** 4\n\n xtrain = rs.rand(10000, 4)\n xtest = rs.rand(1000, 4)\n ytrain = y(xtrain) + rs.rand(10000) / 10\n ytest = y(xtest) + rs.rand(1000) / 10\n return xtrain, xtest, ytrain, ytest", "def prepare_data(self, context_size, model_name):\n self.context_size = context_size\n data_x = []\n data_y = []\n oob = self.word2idx['OOB']\n\n for item in self.docs:\n data = [oob] * context_size + self.doc2token(item) + [oob] * context_size #padding\n for i in range(context_size, len(data) - context_size):\n data_x.append(data[i - context_size: i] + data[i + 1: i + context_size + 1])\n data_y.append(data[i])\n \n if model_name.lower() == 'skipgram':\n data_x, data_y = data_y, data_x\n self.data_x = Variable(torch.LongTensor(data_x))\n self.data_y = Variable(torch.LongTensor(data_y))\n logging.info(f'data preprocessed, data shape: {self.data_x.shape}, {self.data_y.shape}')", "def make_sub_data_train(data, config):\n sub_input_sequence = []\n sub_label_sequence = []\n\n\tfor scale in range(2,5):\t \n\n\t for i in range(len(data)):\n\n\t\t#input_, label_, = preprocess(data[i], config.scale) # do bicbuic only one scale\n\t\tinput_, label_, = preprocess(data[i], scale) # do bicbuic turn around all scale\n\t\n\t\tif len(input_.shape) == 3: # is color\n\t\t h, w, c = input_.shape\n\t\telse:\n\t\t h, w = input_.shape # is grayscale\n\t\n\t\t#checkimage(input_)\t\t\n\n\t\tnx, ny = 0, 0\n\t\tfor x in range(0, h - config.image_size + 1, config.stride):\n\t\t nx += 1; ny = 0\n\t\t for y in range(0, w - config.image_size + 1, config.stride):\n\t\t\tny += 1\n\n\t\t\tsub_input = input_[x: x + config.image_size, y: y + config.image_size] # 41 * 41\n\t\t\tsub_label = label_[x: x + config.label_size, y: y + config.label_size] # 41 * 41\n\n\n\t\t\t# Reshape the subinput and sublabel\n\t\t\tsub_input = sub_input.reshape([config.image_size, config.image_size, config.c_dim])\n\t\t\tsub_label = sub_label.reshape([config.label_size, config.label_size, config.c_dim])\n\n\t\t\t# Normialize\n\t\t\tsub_input = sub_input / 255.0\n\t\t\tsub_label = sub_label / 255.0\n\t\t\t\n\t\t\t#cv2.imshow(\"im1\",sub_input)\n\t\t\t#cv2.imshow(\"im2\",sub_label)\n\t\t\t#cv2.imshow(\"residual\",sub_input - sub_label)\n\t\t\t#cv2.waitKey(0)\n\n\t\t\t# Rotate 90,180,270\n\t\t\tfor angle in range(0,360,90):\t\n\t\t\t\tsub_input = rotate(sub_input,angle)\t\n\t\t\t\tsub_label = rotate(sub_label,angle)\t\n\t\t\n\t\t\t\t# Add to sequence\n\t\t\t\tsub_input_sequence.append(sub_input)\n\t\t\t\tsub_label_sequence.append(sub_label)\n\n\t\t\t\tcv2.imshow(\"im1\",sub_input)\n\t\t\t\tcv2.imshow(\"im2\",sub_label)\n\t\t\t\tcv2.imshow(\"residual\",sub_input - sub_label)\n\t\t\t\tcv2.waitKey(1)\n\t\t\t\t\n\n \n # NOTE: The nx, ny can be ignore in train\n return sub_input_sequence, sub_label_sequence, nx, ny", "def prepare_data(train_csv, test_csv):\n\n train = pd.read_csv(train_csv)\n test = pd.read_csv(test_csv)\n train = train[test.shape[0]:]\n valid = train[0:test.shape[0]]\n\n x_train = train.drop(columns=\"label\") / 255\n y_train = train.label\n x_valid = valid.drop(columns=\"label\") / 255\n y_valid = valid.label\n x_test = test.drop(columns=\"label\") / 255\n y_test = test.label\n\n y_train = tf.keras.utils.to_categorical(y_train)\n y_valid = tf.keras.utils.to_categorical(y_valid)\n y_test = tf.keras.utils.to_categorical(y_test)\n x_train = x_train.values\n x_valid = x_valid.values\n x_test = x_test.values\n\n return x_train, y_train, x_valid, y_valid, x_test, y_test", "def generate_dataset():\n if not os.path.exists(\"../data/COVID-19/COVID-19.npy\"):\n print(\"Processing Training Data.\")\n training_data = get_training_data('../data/COVID-19/train')\n print(\"Processing Test Data.\")\n test_data = get_training_data('../data/COVID-19/test')\n\n x_train, y_train, x_test, y_test = [], [], [], []\n\n for feature, label in training_data:\n x_train.append(feature)\n y_train.append(label)\n\n for feature, label in test_data:\n x_test.append(feature)\n y_test.append(label)\n\n # Normalize the data\n x_train = np.array(x_train) / 255\n x_test = np.array(x_test) / 255\n\n # resize data for deep learning\n x_train = x_train.reshape(-1, 3, img_size, img_size)\n y_train = np.array(y_train)\n x_test = x_test.reshape(-1, 3, img_size, img_size)\n y_test = np.array(y_test)\n\n # With data augmentation to prevent overfitting and handling the imbalance in dataset\n dataset = {\"x_train\": x_train, \"y_train\": y_train, \"x_test\": x_test, \"y_test\": y_test}\n np.save(\"../data/COVID-19/COVID-19.npy\", dataset)\n else:\n dataset = np.load(\"../data/COVID-19/COVID-19.npy\", allow_pickle=True).item()\n x_train, y_train, x_test, y_test = dataset[\"x_train\"], dataset[\"y_train\"], dataset[\"x_test\"], dataset[\"y_test\"]\n\n x_train_tensor = torch.from_numpy(x_train)\n x_train_tensor = x_train_tensor.type(torch.FloatTensor)\n y_train_tensor = torch.from_numpy(y_train)\n y_train_tensor = y_train_tensor.type(torch.LongTensor)\n x_test_tensor = torch.from_numpy(x_test)\n x_test_tensor = x_test_tensor.type(torch.FloatTensor)\n y_test_tensor = torch.from_numpy(y_test)\n y_test_tensor = y_test_tensor.type(torch.LongTensor)\n\n train_dataset = TensorDataset(x_train_tensor, y_train_tensor)\n test_dataset = TensorDataset(x_test_tensor, y_test_tensor)\n\n return train_dataset, test_dataset", "def dataset_preparation():\r\n with open('../data/patterns_num.txt', 'r') as f:\r\n data = f.readlines()\r\n X, Y = [], []\r\n for line in data:\r\n x, y = line.split('\\t')\r\n if len(x) > 5 and x not in X: # better results are achieved excluding short query patterns\r\n X.append(x.replace(\"X\", \"\").replace(\"Y\", \"\").lower())\r\n Y.append(int(y.replace('\\n', '')))\r\n test_size = 0.2\r\n # print('Test size:', test_size, '\\nWrong classifications:\\n')\r\n\r\n X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=test_size, random_state=42, stratify=Y)\r\n return X_train, y_train, X_test, y_test", "def prep_data(labels, image_root):\n labels = split_description(labels)\n labels = convert_plastics(labels)\n\n # Encoding shape and color data\n labels['Shape'] = encode_column(labels[['Shape']])\n labels['Color'] = encode_column(labels[['Color']])\n labels['isPlastic'] = encode_column(labels[['isPlastic']])\n labels = add_filenames(labels, image_root)\n labels = labels.dropna().reset_index()\n\n return labels", "def _generate_embeddings(self, config): \n tr_parts = []\n te_parts = []\n all_columns = []\n for comp in self.components:\n tr_tmp, te_tmp, cols = comp.generate(config)\n if cols != None:\n print(tr_tmp.shape,te_tmp.shape)\n tr_parts.append(tr_tmp)\n te_parts.append(te_tmp)\n all_columns += cols\n X_train = np.concatenate(tr_parts, axis=1)\n X_test = np.concatenate(te_parts, axis=1)\n print(\"Concatenated size:\", X_train.shape, X_test.shape)\n self.feature_columns = all_columns\n return X_train, X_test", "def convert_train(ndata, ndim):\r\n print ('Converting training data ... ')\r\n x = np.zeros([ndata, ndim])\r\n y = np.zeros([ndata])\r\n \r\n for i in range(0, len(flist) - 2):\r\n batchn = filepath + flist[i]\r\n temp = read(batchn)\r\n x[i * 10000:(i + 1) * 10000] = temp['data']\r\n y[i * 10000:(i + 1) * 10000] = temp['labels']\r\n \"\"\"\r\n i=0\r\n batchn = filepath + flist[i]\r\n\r\n temp = read(batchn)\r\n\r\n x[i * 10000:(i + 1) * 10000] = temp['data']\r\n\r\n y[i * 10000:(i + 1) * 10000] = temp['labels']\r\n \"\"\"\r\n return x, y", "def build_data(self, data_folder, cv=10, clean_string=False):\n revs = []\n # pos_file = loadmodel(data_folder[0])\n # neg_file = loadmodel(data_folder[1])\n pos_texts = loadmodel(data_folder[0]).get(\"content\")\n neg_texts = loadmodel(data_folder[1]).get(\"content\")\n vocab = defaultdict(float)\n happyList = [ \":-)\", \":)\", \":D\", \":o)\", \":]\", \":3\", \":c)\", \":>\", \"=]\", \"8)\", \"=)\", \":}\", \":^)\", \":?)\", \":-)\", \": )\", \": D\", \": o)\", \":]\", \": 3\", \":c)\", \":>\", \"= ]\", \"8 )\", \"= )\", \": }\", \":^)\", \":?)\" ]\n sadList = [ \">:[\", \":-(\", \":(\", \":-c\", \":c\", \":-<\", \":?C\", \":<\", \":-[\", \":[\", \":{\",\">:[\", \":-(\", \": (\", \":-c\", \": c\", \": -<\", \": ?C\", \": <\", \": -[\", \": [\", \": {\" ]\n for line in pos_texts:\n rev = []\n rev.append(line.strip())\n\n if clean_string:\n orig_rev = self.dc.clean_str(\" \".join(rev))\n else:\n orig_rev = \" \".join(rev).lower()\n #print orig_rev\n words = set(orig_rev.split())\n for word in words:\n if word in happyList or word in sadList:\n pass\n else:\n vocab[word] += 1\n datum = {\"y\":1,\n \"text\": orig_rev,\n \"num_words\": len(orig_rev.split()),\n \"split\": np.random.randint(0,cv)}\n revs.append(datum)\n\n for line in neg_texts:\n rev = []\n rev.append(line.strip())\n if clean_string:\n orig_rev = self.dc.clean_str(\" \".join(rev))\n else:\n orig_rev = \" \".join(rev).lower()\n words = set(orig_rev.split())\n for word in words:\n if word in happyList or word in sadList:\n pass\n else:\n vocab[word] += 1\n datum = {\"y\":0,\n \"text\": orig_rev,\n \"num_words\": len(orig_rev.split()),\n \"split\": np.random.randint(0,cv)}\n revs.append(datum)\n return revs, vocab", "def build_input_data(sentences, labels, vocabulary):\n # x = np.array([[vocabulary[word] for word in sentence] for sentence in sentences])\n\n # Uncomment this if we have unprecedented tokens\n for sentence_i in range(len(sentences)):\n for word_j in range(len(sentences[sentence_i])):\n if sentences[sentence_i][word_j] in vocabulary:\n sentences[sentence_i][word_j] = vocabulary[sentences[sentence_i][word_j]]\n else:\n sentences[sentence_i][word_j] = 1\n x = np.array(sentences)\n y = np.array(labels)\n return [x, y]", "def newsgroup_featurize(data_list):\n # TODO: Implement featurization of input.\n all_text = data_list[\"train\"][\"input\"] + data_list[\"test\"][\"input\"] + data_list[\"dev\"][\"input\"]\n word_dict = word_count(all_text)\n bow_noun_features = bow_noun(word_dict) # 11,925 features\n train_input = np.array([text_to_bow_noun_vector(text, bow_noun_features) for text in data_list[\"train\"][\"input\"]])\n dev_input = np.array([text_to_bow_noun_vector(text, bow_noun_features) for text in data_list[\"dev\"][\"input\"]])\n test_input = np.array([text_to_bow_noun_vector(text, bow_noun_features) for text in data_list[\"test\"][\"input\"]])\n return train_input, dev_input, test_input", "def train_data_constructor(learning_files_list):\n\n if learning_files_list is None:\n train_data = None\n else:\n full_learning_data_frame = pd.concat(learning_files_list[i].raw_data for i in range(len(learning_files_list)))\n full_learning_data_frame = full_learning_data_frame.sample(frac=1)\n train_data = TrainData(selex_str_len=len(learning_files_list[0].raw_data['DNA_Id'].iloc[0]), selex_files_num=len(learning_files_list))\n train_data.set_one_hot_matrix(dna_data=full_learning_data_frame['DNA_Id'],\n primary_selex_sequence=learning_files_list[0].primary_selex_sequence)\n train_data.set_enrichment_matrix(enrichment_data=np.asarray(full_learning_data_frame['cycle_matrix']))\n return train_data", "def populate_data(self):\n training_labels = open('./digitdata/traininglabels', 'r')\n training_images = open('./digitdata/trainingimages', 'r')\n count = 0\n with training_images as ti:\n data = list(csv.reader(ti))\n data = [i for i in data if i]\n for label in training_labels:\n l = 0\n while l < 28:\n coord = count + l\n w = 0\n while w < 28:\n int_label = int(label)\n if data[coord][0][w] == \"+\":\n self.class_probabilities[int_label][l][w][0] += 1\n if data[coord][0][w] == \"#\":\n self.class_probabilities[int_label][l][w][1] += 1\n if data[coord][0][w] == \" \":\n self.class_probabilities[int_label][l][w][2] += 1 \n w += 1\n l += 1\n count += 28 \n print self.class_probabilities", "def build_newscomment_train_var(self):\n logging.info('Building news commentary only dataset')\n logging.info(self.configs[NEWS_COMMENTARY])\n builder = tfds.builder(\n WMT_BASE_DATASET_NAME,\n config=self.configs[NEWS_COMMENTARY],\n data_dir=self.data_dir)\n self.default_builder_obj = builder\n shard_spec = self.build_shard_spec(\n start=84000, percent=False, max_size=84000 + self.newscommentary_size)\n logging.info('Training on TFDS dataset %s with split %s',\n WMT_BASE_DATASET_NAME, 'train' + shard_spec)\n train_data = builder.as_dataset(\n split='train' + shard_spec, shuffle_files=False)\n\n valid_shard_spec = self.build_shard_spec(\n max_size=9000, percent=False, start=6000)\n eval_data = builder.as_dataset(\n split='train' + valid_shard_spec, shuffle_files=False)\n return train_data, eval_data", "def _init_dataset(self):\n champions = set()\n\n for name in os.listdir(self.data_root):\n label = name.split(\".\")[0]\n champions.add(label)\n self.image_paths += [(os.path.join(self.data_root, name), [[label]])]\n\n # self.encoder = self.encoder.fit(np.array(list(champions)).reshape(-1, 1))\n self.encoder = self.encoder.fit(list(champions))", "def prepare_data(self):\n # Set up the path\n self.path_target_train = os.path.join(self.data_dir, self.train_path_file_target + \".pkl\")\n self.path_target_test = os.path.join(self.data_dir, self.test_path_file_target + \".pkl\")\n\n if not os.path.exists(self.path_target_train) or not os.path.exists(self.path_target_test):\n # Create vocabularies of the appropriate sizes.\n self.create_vocabulary(self.train_path_file)\n\n # Create token ids for the training data.\n input_train_path = self.train_path_file\n target_train_path = self.train_path_file_target\n train_input, train_input_length, train_labels = self.data_to_token_ids(input_train_path, target_train_path)\n\n # Create token ids for the validation data.\n input_test_path = self.test_path_file\n target_test_path = self.test_path_file_target\n test_input, test_input_length, _ = self.data_to_token_ids(input_test_path, target_test_path, train=False)\n\n # Collect data into a list\n training_data = [train_input, train_input_length, train_labels]\n test_data = [test_input, test_input_length]\n\n # Save all the data\n with open(self.path_target_train, 'wb') as f:\n pickle.dump(training_data,f)\n with open(self.path_target_test, 'wb') as f:\n pickle.dump(test_data, f)\n else:\n # Load data\n with open(self.path_target_train, 'rb') as f:\n training_data = pickle.load(f)\n with open(self.path_target_test, 'rb') as f:\n test_data = pickle.load(f)\n\n # Initialize vocabulary\n self.initialize_vocabulary()\n\n # Convert list into a numpy array - train data\n train_input = pd.DataFrame(training_data[0]).fillna(value=0).astype(int).values\n train_length_input = np.array(training_data[1], dtype=int)\n train_labels = np.array(training_data[2], dtype=int)\n\n # Convert list into a numpy array - test data\n test_input = pd.DataFrame(test_data[0]).fillna(value=0).astype(int).values\n test_length_input = pd.DataFrame(test_data[1]).fillna(value=0).astype(int).values\n\n # Printing maximum length\n print(\"Shape of the input training matrix {}\".format(str(train_input.shape)))\n print(\"Shape of the input test matrix {}\".format(str(test_input.shape)))\n\n # Copy the files\n self.copy_files()\n\n # Return output\n return train_input, train_length_input, train_labels, test_input, test_length_input", "def make_data(input_filepath, output_filepath):\n\n df_train = pd.read_csv(input_filepath+'train_u6lujuX_CVtuZ9i.csv', index_col=0)\n df_test = pd.read_csv(input_filepath+'test_Y3wMUE5_7gLdaTN.csv', index_col=0)\n print('Sizes', df_train.shape, df_test.shape)\n print(\"Outcome dispersion:\\n\", df_train['Loan_Status'].value_counts())\n\n\n # recode and save outcome vector\n y = df_train['Loan_Status'].map({'N': 0, 'Y': 1})\n\n del df_train['Loan_Status']\n\n # all in one dataframe\n df = pd.concat([df_train, df_test])\n print(df.shape)\n\n from src.features.build_features import make_features\n df = make_features(df)\n\n # Divide data on train and test again and save\n data_train = df[df.index.isin(df_train.index)]\n data_test = df[df.index.isin(df_test.index)]\n print(data_train.shape, data_test.shape)\n\n data_tmp = data_train.copy()\n data_tmp['y'] = y\n\n\n data_tmp.to_csv(output_filepath + 'train_ready.csv', index=False)\n data_test.to_csv(output_filepath + 'test_ready.csv', index=False)\n id_test = pd.DataFrame(data=df_test.index, columns=['Loan_ID'])\n id_test.to_csv(output_filepath + 'id_test.csv', index=False)", "def InitDataset(self):\n train_txt = 'ImageSets/Main/train.txt'\n val_txt = 'ImageSets/Main/val.txt'\n annotations = \"Annotations\"\n jpegimages = \"JPEGImages\"\n images_path = train_txt if (self.is_train) else val_txt \n images_path = readTxt(os.path.join(self.path, images_path))\n images_path.pop(-1)\n # rawdata format: [path_2_image, path_2_xml]\n rawData = list()\n for each in images_path:\n xml = os.path.join(self.path, annotations, each + '.xml')\n jpeg = os.path.join(self.path, jpegimages, each + '.jpg')\n rawData.append([jpeg, xml])\n return rawData", "def create_dataset(data_file, char_vocab, num_ep=3, force_create=False, maxlen=500):\n ep = [INS_TOKEN]\n pad_id = char_vocab[PAD_TOKEN]\n start_id = char_vocab[START_TOKEN]\n end_id = char_vocab[END_TOKEN]\n unk_id = char_vocab[UNKNOWN_TOKEN]\n\n tfr_file = re.sub(r'\\.([^\\.]+$)', '.tfrecord', data_file)\n \n if force_create or not os.path.exists(tfr_file):\n with io.open(data_file, 'r', encoding='utf-8') as fp:\n src, target = [], []\n src_lengths, target_lengths = [], []\n maxlen_src = 0\n # maxlen_target = 0\n tf.logging.info('Processing input file..')\n \n for i, line in enumerate(fp):\n if i % 10000 == 0:\n tf.logging.info('Read %d lines', i)\n if '\\t' in line:\n s, t = line.strip().lower().split('\\t')\n else:\n s = line.strip().lower()\n t = ''\n\n len_s = len(s)\n \n # Insert epsilons, basically spaces\n s_ex = list(reduce(lambda x,y: x + y, zip(list(s), *[ep*len_s for i in range(num_ep)])))\n \n if len(s_ex) + 2 < maxlen:\n maxlen_src = max(maxlen_src, len(s_ex) + 2)\n \n src.append([start_id] + [char_vocab.get(c, unk_id) for c in s_ex] + [end_id])\n target.append([start_id] + [char_vocab.get(c, unk_id) for c in t] + [end_id])\n \n src_lengths.append(len(src[-1]))\n target_lengths.append(len(target[-1]))\n\n \n tf.logging.info('Total items %d', len(src))\n tf.logging.info('Max source length is %d', maxlen_src)\n\n src = [s + [pad_id]*(maxlen_src - len(s)) for s in src]\n \n tf.logging.info('Creating TFRecord file %s..', tfr_file)\n writer = tf.python_io.TFRecordWriter(tfr_file)\n \n for i, (s, t, l_s, l_t) in enumerate(zip(src, target, src_lengths, target_lengths)):\n\n features = tf.train.Features(feature={\n 'input': tf.train.Feature(int64_list=tf.train.Int64List(value=s)),\n 'input_length': tf.train.Feature(int64_list=tf.train.Int64List(value=[l_s])),\n 'target': tf.train.Feature(int64_list=tf.train.Int64List(value=t)),\n 'target_length': tf.train.Feature(int64_list=tf.train.Int64List(value=[l_t]))\n })\n example = tf.train.Example(features=features)\n writer.write(example.SerializeToString())\n if i % 10000 == 0:\n tf.logging.info('Wrote %d lines', i)\n sys.stdout.flush()\n \n writer.close()\n \n\n return tfr_file", "def build_matrix(self):\n self.lb_make = LabelEncoder()\n self.lb_make.fit(self.Y_train)\n tokenizer = Tokenizer(num_words=2000)\n x_array_train = numpy.asarray(self.train['text'])\n x_array_test = numpy.asarray(self.test['text'])\n tokenizer.fit_on_texts(x_array_train)\n x_train_matrix = tokenizer.texts_to_matrix(x_array_train, mode='count')\n x_test_matrix = tokenizer.texts_to_matrix(x_array_test, mode='count')\n y_train_numbers = self.lb_make.transform(self.Y_train)\n y_test_numbers = self.lb_make.transform(self.Y_test)\n y_train_matrix = keras.utils.to_categorical(y_train_numbers, 3)\n y_test_matrix = keras.utils.to_categorical(y_test_numbers, 3)\n self.tokenizer = tokenizer\n return x_train_matrix, x_test_matrix, y_train_matrix, y_test_matrix", "def build_newscomment_eval_train(self):\n logging.info('Building news commentary only dataset')\n logging.info(self.configs[NEWS_COMMENTARY_FT])\n builder = tfds.builder(WMT_BASE_DATASET_NAME,\n config=self.configs[NEWS_COMMENTARY_FT],\n data_dir=self.data_dir)\n self.default_builder_obj = builder\n new_train_data, _ = self.build_newscomment_var()\n return new_train_data, new_train_data", "def build_input_data(sentences, labels, vocabulary, pos1_sentences, pos2_sentences):\n x = np.array([[vocabulary[word] for word in sentence] for sentence in sentences])\n y = np.array(labels)\n a1 = np.array(pos1_sentences)\n a2 = np.array(pos2_sentences)\n return [x, y, a1, a2]", "def build_input_data(sentences, labels, vocabulary):\n x = np.array([[vocabulary[word] for word in sentence] for sentence in sentences])\n y = np.array(labels)\n return [x, y]", "def make_dataset():\n\n\tnumberOfTrials = dataset_params.num_of_samples\n\tnumberOfTrials_train = int(numberOfTrials*0.8)\n\tnumberOfTrials_test = int(numberOfTrials*0.2)\n\n\tprint(\"==================================================\")\n\tprint(\"1. Generating Train images ......\")\n\tprint(\"\\nTrain image per variation\", numberOfTrials_train)\n\tmakeDataset(numberOfTrials_train, \"train\")\n\n\tprint(\"==================================================\")\n\tprint(\"2. Generating Test images ......\")\n\tprint(\"\\nTest image per variation\", numberOfTrials_test)\n\tmakeDataset(numberOfTrials_test, \"test\")\n\n\tprint(\"==================================================\")\n\tprint(\"Done!!!\")", "def _data_generation(self, batch_data):\n # Initialization\n batch_x = []\n batch_y = defaultdict(list)\n\n for ind, item_data in batch_data.iterrows():\n img_path = os.path.join(self.img_dir, \"images\", \"rgb\", item_data[\"name\"])\n img = cv2.imread(img_path)\n try:\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n except Exception as error:\n print(img_path)\n print(error)\n not_valid_mask = self.read_masks_borders(item_data[\"name\"])\n img[not_valid_mask] = 0\n\n # getmasks\n targets = np.zeros((img.shape[0], img.shape[1], len(self.classes)))\n for i, c in enumerate(self.classes):\n mask_path = os.path.join(self.img_dir, \"labels\", c, item_data[\"name\"])\n mask = cv2.imread(\n mask_path.replace(\".jpg\", \".png\"), cv2.IMREAD_GRAYSCALE\n )\n mask[not_valid_mask[:, :, 0]] = 0\n mask = mask > 0\n targets[:, :, i] = mask\n\n res = self.reshape_func(image=img, mask=targets)\n img, targets = res['image'], res['mask']\n if self.do_aug:\n res = self.aug(image=img, mask=targets)\n img, targets = res['image'], res['mask']\n\n for i, c in enumerate(self.classes):\n batch_y[c].append(targets[:, :, i])\n\n batch_x.append(img)\n\n batch_x = np.array(batch_x, np.float32)\n batch_y = {k: np.array(v, np.float32) for k, v in batch_y.items()}\n batch_y = {k: np.expand_dims(v, axis=-1) for k, v in batch_y.items()}\n\n return (\n imagenet_utils.preprocess_input(batch_x, \"channels_last\", mode=\"tf\"),\n batch_y\n )", "def produce_init(filename):\n training_dataset = pd.read_csv(f'../Modified Data/{filename}')\n test_dataset = pd.read_csv(f'../Raw Data/test.csv')\n features = list(training_dataset.columns)\n features.remove('SalePrice')\n predict_feature = ['SalePrice']\n\n # Produce Test Data\n test_X = test_dataset.loc[:, features]\n ids_test = test_dataset.loc[:, 'Id']\n\n for column in features:\n if str(training_dataset.loc[:, column].dtype) == 'object':\n # Initialize encoder\n labelencoder = LabelEncoder()\n # Encode Train Data\n training_dataset.loc[:, column] = training_dataset.loc[:, column].fillna('Missing')\n training_dataset.loc[:, column] = pd.Series(labelencoder.fit_transform(training_dataset.loc[:, column]))\n # Encode Test Data\n test_X.loc[:, column] = test_X.loc[:, column].fillna('Missing')\n test_X.loc[:, column] = pd.Series(labelencoder.fit_transform(test_X.loc[:, column]))\n else:\n # Fix missing values for train data\n training_dataset.loc[:, column] = training_dataset.loc[:, column].fillna(int(training_dataset.loc[:, column].mean()))\n # Fix missing values for test data\n test_X.loc[:, column] = test_X.loc[:, column].fillna(int(test_X.loc[:, column].mean()))\n\n return training_dataset, test_X, ids_test", "def train_all_categories(dataset, hidden_size=18, kernel_size=7): \n def extract_amino_acid_alphabet(dataset):\n symbols = set()\n for sequence in dataset.select('seq'):\n letters = set(sequence)\n symbols = symbols | letters\n return sorted(list(symbols))\n\n train_sampler, val_sampler, _ = get_samplers(range(len(dataset)), \n .05, .25)\n categories = sorted(list(set(dataset.select('family'))))\n category_vocab = Vocab(categories)\n char_vocab = Vocab(extract_amino_acid_alphabet(dataset))\n CNN = cudaify(SimpleCNN(len(char_vocab.alphabet),\n hidden_size, \n kernel_size, \n len(category_vocab),\n char_vocab))\n train_loader = DataLoader(dataset, batch_size=32,\n sampler=train_sampler, num_workers=2)\n dev_loader = DataLoader(dataset, batch_size=5,\n sampler=val_sampler, num_workers=2)\n train_network(CNN, Tensorize(char_vocab, category_vocab, 1024), \n train_loader, dev_loader, n_epochs=12, learning_rate=0.001)\n print_patterns(CNN)\n return CNN", "def build_enru_custom_test(self):\n train_data_file = self.data_dir + '/' + enru_paracrawl\n eval_data_file = self.data_dir + '/' + enru_newscomm\n train_data = tf.data.experimental.CsvDataset(\n [train_data_file],\n record_defaults=[tf.string, tf.string],\n compression_type='GZIP',\n field_delim='\\t',\n use_quote_delim=False)\n train_data = train_data.cache() # only read once\n eval_data = tf.data.experimental.CsvDataset(\n [eval_data_file],\n record_defaults=[tf.string, tf.string],\n compression_type='GZIP',\n field_delim='\\t',\n use_quote_delim=False)\n\n eval_data = eval_data.skip(9000).take(10000)\n eval_data = eval_data.cache()\n def to_features_dict(eng, rus):\n return {'inputs': eng, 'targets': rus}\n\n train_data = train_data.map(to_features_dict)\n eval_data = eval_data.map(to_features_dict)\n\n self.default_builder_obj = None\n\n return train_data, eval_data", "def build_dataset(data, vocab_size=50000):\r\n\r\n # we will replace non-frequent tokens with the `unknown` token\r\n unk_token = '<UNK>'\r\n\r\n # calc frequencies of the tokens in our data\r\n tokens_counts = Counter(data)\r\n most_common_tokens = tokens_counts.most_common(vocab_size)\r\n\r\n # create a token => id mapping\r\n token2id = {unk_token: 0}\r\n for token, counts in most_common_tokens:\r\n token2id[token] = len(token2id)\r\n\r\n # create a reverse mapping from ids to tokens\r\n id2token = {i: t for t, i in token2id.items()}\r\n\r\n # convert data to tokens ids\r\n nb_unks = 0\r\n data_tokens_ids = []\r\n for token in data:\r\n if token in token2id:\r\n idx = token2id[token]\r\n else:\r\n idx = token2id[unk_token]\r\n nb_unks += 1\r\n\r\n data_tokens_ids.append(idx)\r\n\r\n print('Vocab size:', len(token2id))\r\n print('Unknown tokens:', nb_unks)\r\n\r\n return data_tokens_ids, token2id, id2token", "def build_data_set(list_of_neurons,trial_data):\n \n #creates a numpy array of all the data with requisite number of repeats and stims\n valid_movie_list = get_available_movies(list_of_neurons,trial_data)\n \n Num_neurons = len(list_of_neurons)\n \n neur_data_list = []\n \n for neuron in list_of_neurons:\n data_list = []\n current_neur_dict = trial_data[neuron]\n for movie in valid_movie_list:\n data_list.append(current_neur_dict[movie])\n if len(valid_movie_list) > 1:\n #stack the different movies by axis 1\n neur_data_list.append(np.concatenate(data_list,axis=1))\n else:\n neur_data_list.append(data_list[0])\n \n #Now we have a list of arrays, we shall stack them\n \n \n return np.stack(neur_data_list,axis=-1)", "def build_graph(self, char_embeddings):\n with vs.variable_scope(\"CharLevelCNN\"):\n batch_size = tf.shape(char_embeddings)[0]\n phrase_len = tf.shape(char_embeddings)[1]\n word_len = tf.shape(char_embeddings)[2]\n char_embedding_size = tf.shape(char_embeddings)[3]\n # b = tf.Variable(tf.constant(0.1, shape=[num_filters]), name=\"b\")\n\n # flatten\n # char_embeddings = tf.reshape(char_embeddings,[-1, word_len, char_embedding_size])\n char_embeddings = tf.reshape(char_embeddings, shape = [batch_size*phrase_len, word_len, self.char_embedding_size])\n\n conv = tf.layers.conv1d(inputs = char_embeddings, filters = self.filters, kernel_size = self.kernel_size, activation = tf.nn.relu, reuse = tf.AUTO_REUSE) # shape (batch_size, phrase_len, word_len, filters)\n\n # unflatten\n conv = tf.reshape(conv, [batch_size, phrase_len, -1, self.filters])\n \n # h = tf.nn.relu(tf.nn.bias_add(conv, b), name=\"relu\")\n # Max-pooling over the outputs\n # cnn_char_embeddings = tf.nn.max_pool(conv, ksize=[1, sequence_length - filter_size + 1, 1, 1], strides=[1, 1, 1, 1], padding='VALID')\n \n cnn_char_embeddings = tf.reduce_max(conv, axis = 2)\n\n # dropout\n cnn_char_embeddings = tf.nn.dropout(cnn_char_embeddings, self.keep_prob)\n return cnn_char_embeddings", "def model(data, ix_to_char, char_to_ix, n_a=50, iter_num=35000, dino_names=7, vocab_size=27):\n rnn = RNN(n_a=n_a, batch_size=1)\n # Retrieve n_x and n_y from vocab_size\n n_x, n_y = vocab_size, vocab_size\n\n # Initialize parameters\n parameters = rnn.initialize_parameters(n_a, n_x, n_y)\n\n # Initialize loss (this is required because we want to smooth our loss, don't worry about it)\n loss = get_initial_loss(vocab_size, dino_names)\n\n # Build list of all dinosaur names (training examples).\n with open(\"../dinos.txt\") as f:\n examples = f.readlines()\n examples = [x.lower().strip() for x in examples]\n\n # Shuffle list of all dinosaur names\n np.random.seed(0)\n np.random.shuffle(examples)\n\n # Initialize the hidden state of rnn\n a_prev = np.zeros((n_a, 1))\n\n # Optimization loop\n for j in range(iter_num):\n\n # Use the hint above to define one training example (X,Y) (≈ 2 lines)\n index = j % len(examples)\n x = [None] + [char_to_ix[ch] for ch in examples[index]] # 输入的名字example是名字list\n y = x[1:] + [char_to_ix[\"\\n\"]] # 对应的输出名字,x左移一位后补\\n\n X_batch = np.zeros((n_x, 1, len(x))) # x转为输入矩阵\n Y_batch = np.zeros((n_y, 1, len(x))) # y转为label\n # 字符对应位置补1\n for t in range(len(x)):\n if x[t] is not None:\n X_batch[x[t], 0, t] = 1\n Y_batch[y[t], 0, t] = 1\n\n # 每个序列输入初始化loss=0\n rnn.loss = 0\n # 送入rnn训练\n curr_loss, gradients, a_prev = rnn.optimize(X=X_batch, Y=Y_batch, a_prev=a_prev)\n\n # Use a latency trick to keep the loss smooth. It happens here to accelerate the training.\n loss = smooth(loss, curr_loss)\n\n # Every 2000 Iteration, generate \"n\" characters thanks to sample() to check if the model is learning properly\n if j % 2000 == 0:\n\n print('Iteration: %d, Loss: %f' % (j, loss) + '\\n')\n\n # The number of dinosaur names to print\n seed = 0\n for name in range(dino_names):\n\n # Sample indices and print them\n sampled_indices = sample(rnn.parameters, char_to_ix, seed)\n print_sample(sampled_indices, ix_to_char)\n\n seed += 1 # To get the same result for grading purposed, increment the seed by one.\n\n print('\\n')\n\n return parameters", "def prepare_data(self):\n try:\n self.train_dataset = self.datasets['train']\n self.val_dataset = self.datasets['val']\n try:\n self.test_dataset = self.datasets['test']\n except:\n pass\n except Exception as e:\n print('Data was not succesfully prepared:', e)", "def prepare_data(self, *args, **kwargs):\n # get paths to train and test splits\n _split_paths = [os.path.join(self.path_to_data, split)\n for split in os.listdir(self.path_to_data)]\n\n # for each split [train, test]\n for _path in _split_paths:\n _img_classes = os.listdir(_path) # get subfolders representing each class\n self.splits[os.path.basename(_path)] = []\n\n # get the images in pairs with its corresponding class\n for _class in _img_classes:\n _data = self.get_img_text_pair(os.path.join(_path, _class))\n\n if os.path.basename(_path) == 'train':\n self.weights[self.encode_label(_class)] = len(_data)\n self.splits[os.path.basename(_path)].extend(_data)", "def ptb_char_raw_data_cooijmans(data_path=None):\n data = np.load(data_path)\n unique_chars = data[\"vocab\"]\n word_to_id = {k: v for k, v in zip(unique_chars, range(len(unique_chars)))}\n id_2_word = {v: k for k, v in word_to_id.items()}\n train_data = data['train']\n valid_data = data['valid']\n test_data = data['test']\n return train_data, valid_data, test_data, word_to_id, id_2_word", "def _make_features(self):\n self.features = {}\n self.labels = {}\n for key in ['train', 'cv', 'test']:\n if self.radius is not None:\n feat, label = self._sliding_window(self.images[key], self.masks[key], window_radius=self.radius)\n self.features[key] = feat\n self.labels[key] = label\n else:\n self.features[key] = self.images[key].reshape(-1, 3)\n self.labels[key] = self.masks[key].ravel()", "def data_preprocessing():\n lineid_content = get_lineid_content()\n print('Read movie_lines.txt file complete...')\n convos = get_convos()\n print('Read movie_conversations.txt file complete...')\n print('Building dataset')\n get_data(lineid_content, convos)", "def generate_coco_dataset(args):\n\targs.data_root = Path(args.data_root)\n\targs.save_root = Path(args.save_root)\n\targs.save_root.mkdir()\n\n\tgenerate_coco_dataset_sub(args, 'train', 'A', args.cat1)\n\tgenerate_coco_dataset_sub(args, 'train', 'B', args.cat2)\n\tgenerate_coco_dataset_sub(args, 'val', 'A', args.cat1)\n\tgenerate_coco_dataset_sub(args, 'val', 'B', args.cat2)", "def create_data(data_size,heme, nucleotide, control, steroid,data_total,path_to_data):\n\n os.chdir(path_to_data)\n\n x_array = np.zeros(shape = (data_size,14,32,32,32))\n\n y_array = np.zeros(shape = data_size)\n\n print(\"data size = \", data_size)\n\n #training set :\n\n file_count = 0\n\n for file in data_total:\n\n y_array[file_count]= find_class(str(file), heme, nucleotide, control, steroid)\n\n x_array[file_count] = np.load(str(file+\".npy\"))\n\n file_count+=1\n\n\n return (x_array, y_array)", "def build_bert_input(data, data_path, tokenizer):\n\n cache_fp = f\"{data_path[:data_path.rfind('.')]}_{type(tokenizer).__name__}_{str(BERT_MAX_LEN)}_cache\"\n if os.path.isfile(cache_fp): \n logger.info(\"Loading tokenized data from cache...\")\n all_samples = torch.load(cache_fp)\n return all_samples\n\n bert_sequences = [] \n\n # modification for turn classification task \n if 'turn' in data_path:\n for instance in data:\n seq = \"[CLS] {} [SEP] {} [SEP]\".format(instance['p'], instance['r'])\n bert_sequences.append([instance['label'], seq])\n\n # regular yes-and classifier \n else: \n \n for k in data['non-yesands'].keys():\n for non_yesand in data['non-yesands'][k]: \n seq = \"[CLS] {} [SEP] {} [SEP]\".format(non_yesand['p'], non_yesand['r'])\n bert_sequences.append([0, seq])\n \n for k in data['yesands'].keys(): \n for yesand in data['yesands'][k]: \n seq = \"[CLS] {} [SEP] {} [SEP]\".format(yesand['p'], yesand['r'])\n bert_sequences.append([1, seq])\n\n sentences = [x[1] for x in bert_sequences]\n labels = [x[0] for x in bert_sequences]\n logger.info(\"Tokenizing loaded data...\")\n tokenized_texts = [tokenizer.encode(sentence) for sentence in sentences]\n\n\n # cache_fp = data_path[:data_path.rfind('.')] + \"_\" + type(tokenizer).__name__\n # if os.path.isfile(cache_fp): \n # logger.info(\"Loading tokenized data from cache...\")\n # tokenized_texts = torch.load(cache_fp)\n # else: \n # logger.info(\"Tokenizing loaded data...\")\n # # tokenize with BERT tokenizer \n # tokenized_texts = [tokenizer.encode(sentence) for sentence in sentences]\n # torch.save(tokenized_texts, cache_fp)\n\n\n\n # pad input to MAX_LEN\n input_ids = pad_sequences(tokenized_texts, maxlen=BERT_MAX_LEN, dtype=\"long\", truncating=\"post\", padding=\"post\")\n\n # get attention masks and segment ids \n attention_masks = build_attention_mask(input_ids)\n segment_ids = build_segment_ids(input_ids)\n\n all_samples = [{\"input_ids\": input_ids[i], \"token_type_ids\": segment_ids[i], \"attention_mask\": attention_masks[i], \"label\": labels[i]} for i in range(len(input_ids))]\n torch.save(all_samples, cache_fp)\n\n return all_samples", "def Train_data():\n print (\"loading train data ...\")\n time_start = time.time()\n data_root = '/media/keziwen/86AA9651AA963E1D'\n with h5py.File(join(data_root, './data/train_real2.h5')) as f:\n data_real = f['train_real'][:]\n num, nt, ny, nx = data_real.shape\n data_real = np.transpose(data_real, (0, 1, 3, 2))\n with h5py.File(join(data_root, './data/train_imag2.h5')) as f:\n data_imag = f['train_imag'][:]\n num, nt, ny, nx = data_imag.shape\n data_imag = np.transpose(data_imag, (0, 1, 3, 2))\n data = data_real+1j*data_imag\n num_train = 15000\n num_validate = 2000\n train_data = data[0:num_train]\n validate_data = data[num_train:num_train+num_validate]\n\n train_data = np.random.permutation(train_data)\n\n time_end = time.time()\n print ('dataset has been created using {}s'.format(time_end-time_start))\n return train_data, validate_data", "def _prepare_data(self):\n #TODO hardcoded values need to change\n print_info(\"Preprocessing the train data...\")\n self._place_dataset(os.path.join(self._hparams[\"temp-data\"], \"train\"),\n self.TRAIN_OUT_PATH)\n\n print_info(\"Preprocessing the test data...\")\n self._place_dataset(os.path.join(self._hparams[\"temp-data\"], \"test\"),\n self.TEST_OUT_PATH)\n\n print_info(\"Preprocessing the validation data...\")\n self._place_dataset(os.path.join(self._hparams[\"temp-data\"], \"val\"),\n self.VAL_OUT_PATH)", "def input_setup(config):\n print(config.is_train)\n # Load data path\n if config.is_train:\n data, label = prepare_data(dataset=\"Train/DoF_Images (2)\")\n else:\n data, label = prepare_data(dataset=\"Test\")\n\n sub_input_sequence = []\n sub_label_sequence = []\n padding = abs(config.image_size - config.label_size) / 2 # 6\n nx = ny = 0\n\n if config.is_train:\n for i in range(len(data)):\n input_, label_ = preprocess(data[i], label[i], config.scale)\n\n if len(input_.shape) == 3:\n h, w, c = input_.shape\n else:\n h, w = input_.shape\n\n for x in range(0, h-config.image_size+1, config.stride):\n if i == 0:\n nx += 1; ny = 0\n for y in range(0, w-config.image_size+1, config.stride):\n if i == 0:\n ny += 1\n\n sub_input = input_[x:x+config.image_size, y:y+config.image_size] # [33 x 33]\n sub_label = label_[x+int(padding):x+int(padding)+config.label_size\n , y+int(padding):y+int(padding)+config.label_size] # [21 x 21]\n # print(sub_input.shape)\n # print(sub_label.shape)\n # Make channel value\n sub_input = sub_input.reshape([config.image_size, config.image_size, 3])\n sub_label = sub_label.reshape([config.label_size, config.label_size, 3])\n\n sub_input_sequence.append(sub_input)\n sub_label_sequence.append(sub_label)\n\n else:\n nx_l = []\n ny_l = []\n for i in range(len(data)):\n input_, label_ = preprocess(data[i], label[i], config.scale)\n\n if len(input_.shape) == 3:\n h, w, c = input_.shape\n else:\n h, w = input_.shape\n\n if w >= 4000 or h > 4000:\n input_ = cv2.resize(input_, dsize=(int(w/2), int(h/2)), interpolation=cv2.INTER_AREA)\n label_ = cv2.resize(label_, dsize=(int(w/2), int(h/2)), interpolation=cv2.INTER_AREA)\n w = int(w/2)\n h = int(h/2)\n\n # Numbers of sub-images in height and width of image are needed to compute merge operation.\n\n nx = ny = 0\n for x in range(0, h-config.image_size+1, config.stride):\n nx += 1; ny = 0\n for y in range(0, w-config.image_size+1, config.stride):\n ny += 1\n sub_input = input_[x:x+config.image_size, y:y+config.image_size] # [33 x 33]\n sub_label = label_[x+int(padding):x+int(padding)+config.label_size, y+int(padding):y+int(padding)+config.label_size] # [21 x 21]\n\n sub_input = sub_input.reshape([config.image_size, config.image_size, 3])\n sub_label = sub_label.reshape([config.label_size, config.label_size, 3])\n\n sub_input_sequence.append(sub_input)\n sub_label_sequence.append(sub_label)\n #print(\"nx: %d ny: %d\" % (nx, ny))\n nx_l.append(nx)\n ny_l.append(ny)\n \"\"\"\n len(sub_input_sequence) : the number of sub_input (33 x 33 x ch) in one image\n (sub_input_sequence[0]).shape : (33, 33, 1)\n \"\"\"\n # Make list to numpy array. With this transform\n arrdata = np.asarray(sub_input_sequence) # [?, 33, 33, 3]\n arrlabel = np.asarray(sub_label_sequence) # [?, 21, 21, 3]\n\n make_data(config, arrdata, arrlabel)\n print(\"make_data success\")\n if config.is_train:\n return nx, ny\n else:\n return nx_l, ny_l, len(data)", "def load_data(self, debug=False):\n with open(self.config.vocab_file,'r') as f:\n self.vocab = pickle.load(f)\n self.char = Character()\n self.encoded_data = pickle.load(open(\"sp/encoded_test\", \"rb\"))\n self.data_chars = pickle.load(open(\"sp/test_chars\", \"rb\"))\n\n char_embedding = np.zeros([len(self.char), len(self.char)], dtype=np.int32)\n for i in range(len(self.char)):\n char_embedding[i][i] = 1\n self.char_embedding = tf.constant(char_embedding)", "def build(self):\n self.build_inputs()\n self.image_embeddings = self.build_image_embeddings(self.images)\n self.seq_embeddings = self.build_seq_embeddings(self.input_seqs)\n self.build_model()\n self.setup_inception_initializer()\n self.setup_global_step()", "def train():\n print(\"Building dynamic character-level ALLDATASET data...\", flush=True)\n dataset = ALLDATASET(\n train_input=FLAGS.train_input, train_output=FLAGS.train_output,\n dev_input=FLAGS.dev_input, dev_output=FLAGS.dev_output,\n predict_input_file=FLAGS.predict_input_file, \n parse_repeated=FLAGS.parse_repeated,\n shuffle=True, max_input_length=FLAGS.max_sentence_length,\n max_label_length=FLAGS.max_sentence_length)\n \n print(\"Building computational graph...\", flush=True)\n graph = tf.Graph()\n \n with graph.as_default():\n\n tf.set_random_seed(1)\n random.seed(1)\n np.random.seed(1)\n\n # During training we use beam width 1. There are lots of complications on\n # the implementation, e.g. only tiling during inference.\n m = Seq2Seq(\n num_types=dataset.num_types(),\n max_encoder_length=FLAGS.max_sentence_length,\n max_decoder_length=FLAGS.max_sentence_length,\n pad_id=dataset.type_to_ix['_PAD'],\n eos_id=dataset.type_to_ix['_EOS'],\n go_id=dataset.type_to_ix['_GO'],\n space_id=dataset.type_to_ix[(' ',)],\n ix_to_type=dataset.ix_to_type,\n batch_size=FLAGS.batch_size, embedding_size=FLAGS.embedding_size,\n hidden_size=FLAGS.hidden_size, rnn_layers=FLAGS.rnn_layers,\n bidirectional_encoder=FLAGS.bidirectional_encoder,\n bidirectional_mode=FLAGS.bidirectional_mode,\n use_lstm=FLAGS.use_lstm, attention=FLAGS.attention, \n dropout=FLAGS.dropout, max_grad_norm=FLAGS.max_grad_norm, beam_size=1,\n epsilon=FLAGS.epsilon, beta1=FLAGS.beta1, beta2=FLAGS.beta2,\n restore=FLAGS.restore, model_output_dir=FLAGS.model_output_dir)\n \n # Allow TensorFlow to resort back to CPU when we try to set an operation to\n # a GPU where there's only a CPU implementation, rather than crashing.\n sess_config = tf.ConfigProto(allow_soft_placement=True)\n \n with tf.Session(graph=graph, config=sess_config) as sess:\n print(\"Initializing or restoring model...\", flush=True)\n m.start()\n \n # If the model was not restored, initialize the variable hyperparameters.\n if sess.run(m.lr) == 0:\n sess.run(tf.assign(m.lr, FLAGS.lr))\n if sess.run(m.p_sample) == 0:\n sess.run(tf.assign(m.p_sample, FLAGS.initial_p_sample))\n \n # Get the number of epochs that have passed (easier by getting batches now)\n step = m.global_step.eval()\n batches = dataset.get_train_batches(m.batch_size)\n epoch = step // len(batches)\n \n # Scheduled sampling decay\n i = FLAGS.initial_p_sample\n f = FLAGS.final_p_sample\n # The stopping point is based on the max epochs\n total_train_steps = len(batches) * FLAGS.epochs_p_sample\n if i != f and not FLAGS.linear_p_sample:\n k = total_train_steps / (float(lambertw(total_train_steps / 2)) * 2)\n expk = float(exp(-total_train_steps / k))\n delta_f = (f - i) * (1 + k) * (1 + k * expk) / (k - k * expk) - f\n delta_i = (f + delta_f) / (1 + k)\n \n while not FLAGS.max_epochs or epoch <= FLAGS.max_epochs:\n print(\"=====EPOCH {}=====\".format(epoch), flush=True)\n while step < (epoch + 1) * len(batches):\n step = m.global_step.eval()\n \n # Scheduled sampling decay\n if i != f:\n # Linear decay\n if FLAGS.linear_p_sample:\n p = min(f, i + step * (f - i) / total_train_steps)\n # Inverse sigmoid decay\n else:\n expk = float(exp(-step / k))\n p = min(f, i - delta_i + (f + delta_f) / (1 + k * expk))\n \n sess.run(tf.assign(m.p_sample, p))\n \n # Gradient descent and backprop\n train_inputs, train_labels = zip(*batches[step % len(batches)])\n train_fd = {m.inputs: train_inputs, m.labels: train_labels}\n \n # Wrap into function to measure running time\n def train_step():\n sess.run(m.train_step, feed_dict=train_fd)\n \n print(\"Global step {0} ({1}s)\".format(\n step, timeit.timeit(train_step, number=1)), flush=True)\n \n if step % FLAGS.num_steps_per_eval == 0:\n valid_inputs, valid_labels = dataset.get_valid_batch(m.batch_size)\n valid_fd = {m.inputs: valid_inputs, m.labels: valid_labels}\n \n # Run training and validation perplexity and samples\n \n lr, train_ppx, train_output, p_sample, train_ppx_summ = sess.run([\n m.lr,\n m.perplexity,\n m.output,\n m.p_sample,\n m.perplexity_summary,\n ], feed_dict=train_fd)\n \n valid_ppx, valid_output, infer_output, valid_ppx_summ = sess.run([\n m.perplexity,\n m.output,\n m.generative_output,\n m.perplexity_summary,\n ], feed_dict=valid_fd)\n \n # Convert data to UTF-8 strings for evaluation and display\n valid_inputs = untokenize_batch(dataset, valid_inputs)\n valid_labels = untokenize_batch(dataset, valid_labels)\n valid_output = untokenize_batch(dataset, valid_output)\n infer_output = untokenize_batch(dataset, infer_output)\n \n # Run evaluation metrics\n lev = levenshtein(infer_output, valid_labels)\n lev_density = levenshtein(infer_output, valid_labels, normalize=True)\n \n lev_summ = sess.run(\n m.lev_summary, feed_dict={m.lev: lev})\n lev_density_summ = sess.run(\n m.lev_density_summary, feed_dict={m.lev_density: lev_density})\n \n # Write summaries to TensorBoard\n m.train_writer.add_summary(train_ppx_summ, global_step=step)\n m.valid_writer.add_summary(valid_ppx_summ, global_step=step)\n m.valid_writer.add_summary(lev_summ, global_step=step)\n m.valid_writer.add_summary(lev_density_summ, global_step=step)\n \n # Display results to stdout\n print(\" lr:\", lr)\n print(\" p_sample:\", p_sample)\n print(\" train_ppx:\", train_ppx)\n print(\" valid_ppx:\", valid_ppx)\n print(\" lev:\", lev)\n print(\" lev_density:\", lev_density)\n print(\"Input:\")\n print(valid_inputs[0])\n print(\"Target:\")\n print(valid_labels[0])\n print(\"Output with ground truth:\")\n print(valid_output[0])\n print(\"Greedily decoded output:\")\n print(infer_output[0], flush=True)\n \n # Epoch about to be done - save, reshuffle the data and get new batches\n print(\"Saving model...\")\n m.save()\n print(\"Model saved. Resuming training...\", flush=True)\n batches = dataset.get_train_batches(m.batch_size)\n epoch += 1", "def set_data():\r\n #if not os.path.exists(filepath):\r\n #download_data()\r\n metadata = read(filepath + flist[-1])\r\n ndata = metadata['num_cases_per_batch']\r\n ndim = metadata['num_vis']\r\n\r\n data, train, test = {}, {}, {}\r\n data['labels'] = metadata['label_names']\r\n data['ntraindata'] = metadata['num_cases_per_batch'] * (len(flist) - 2)\r\n data['ntestdata'] = metadata['num_cases_per_batch']\r\n data['ndim'] = metadata['num_vis']\r\n\r\n train['x'], train['y'] = convert_train(data['ntraindata'], data['ndim'])\r\n\r\n testdata = read(filepath + flist[-2])\r\n test['x'] = testdata['data']\r\n test['y'] = testdata['labels']\r\n\r\n data['train'], data['test'] = train, test\r\n save_pkl(data)", "def build_model_mobilenet(num_classes):", "def make_training_set(ind_list, training_data): \n \n exp = training_data[ind_list[0]] \n X_train = exp[0]\n u_train = exp[1] \n\n for i in ind_list[1:]: \n exp = training_data[i]\n X_train = np.append(X_train, exp[0], axis=0)\n u_train = np.append(u_train, exp[1], axis=0)\n\n return X_train, u_train", "def get_data():\n transform = Compose([paddle.vision.Resize(32),\n Normalize(mean=[127.5], std=[127.5], data_format='CHW'),\n paddle.vision.transforms.Transpose()])\n train_data = paddle.vision.datasets.Cifar10(mode='train', transform=transform)\n l = len(train_data)\n return paddle.io.random_split(train_data, [l // 2, l - l // 2])", "def prepare_data():\n user_name = os.environ.get('USER')\n traintest_corpus = ResumeCorpus('/Users/' + user_name + '/Documents/Data')\n random.shuffle(traintest_corpus.resumes)\n\n for resume in traintest_corpus.resumes:\n try:\n review_text = pre_processing(resume[0])\n review_text = \" \".join(review_text)\n data_dict['data'].append(review_text)\n data_dict['label'].append(resume[1])\n except:\n pass", "def generate_training_data(train_conll, feature_config=None):\n for s in read_sentences(train_conll):\n c = initialize_configuration(s)\n fvecs_and_labels = []\n while c.buffer:\n tr = oracle(c)\n\n fvecs_and_labels.append((extract_features(c, feature_config), tr.op+'_'+tr.l))\n\n if tr.op == 'sh':\n c = shift(c)\n elif tr.op == 'la':\n c = left_arc(c, tr.l)\n elif tr.op == 'ra':\n c = right_arc(c, tr.l)\n yield (s, fvecs_and_labels)", "def __data_generation(self, list_IDs_temp):\n\n # Initialization\n X = np.empty((self.batch_size, self.grid_dim, self.grid_dim, self.grid_dim, self.n_channels))\n y = np.empty(self.batch_size, dtype=int)\n\n # Generate data\n for i, ID in enumerate(list_IDs_temp):\n # Store sample\n X[i], y[i] = pro_lig_reader_sample(ID[0], ID[1], self.grid_size, self.n_channels, self.grid_resolution)\n return X, keras.utils.to_categorical(y, num_classes=self.n_classes)", "def build_model():", "def _prepare_data_for_training(self, training_data, intent_dict):\r\n\r\n X = np.stack([e.get(\"text_features\")\r\n for e in training_data.intent_examples])\r\n\r\n intents_for_X = np.array([intent_dict[e.get(\"intent\")]\r\n for e in training_data.intent_examples])\r\n\r\n Y = np.stack([self.encoded_all_intents[intent_idx]\r\n for intent_idx in intents_for_X])\r\n\r\n return X, Y, intents_for_X", "def prepare_data(src, dst):\n\n data_prefix = 'miniCelebA_'\n for split in ['train', 'val', 'test']:\n print('processing %s split' % split)\n if (not os.path.exists(os.path.join(dst, 'x_' + split + '.npy')) or not\n os.path.exists(os.path.join(dst, 'y_' + split + '.npy'))):\n labels = glob(os.path.join(src, split, '*'))\n no_sample = 0\n for lb in labels:\n no_sample += len(os.listdir(lb))\n\n x = np.zeros((no_sample, 224, 224, 3))\n y = np.zeros((no_sample, 20))\n count = 0\n for lb in labels:\n files = glob(os.path.join(lb, '*.png'))\n for f in files:\n print('processing file: %s, with label %s' % (f, lb.split('/')[-1]))\n y[count] = to_categorical(int(lb.split('/')[-1]), 20)\n img = misc.imresize(misc.imread(f), (224, 224), 'bicubic')\n if img.ndim == 2:\n img = np.expand_dims(img, -1)\n img = np.concatenate((img, img, img), axis=-1)\n x[count] = img\n\n count += 1\n\n assert count == no_sample, \"number of sample (%d) is different than number of read image (%d)\" % (\n no_sample, count)\n\n x = get_deep_feature(x)\n np.save(os.path.join(dst, data_prefix + 'x_' + split + '.npy'), x)\n np.save(os.path.join(dst, data_prefix + 'y_' + split + '.npy'), y)", "def prepare_data(dataset, train_ratio=0.8, input_dim=None, seed=10):\n # Retrieve main path of project\n dirname = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))\n\n # Download and store dataset at chosen location\n if dataset == 'Cora' or dataset == 'PubMed' or dataset == 'Citeseer':\n path = os.path.join(dirname, 'data')\n data = Planetoid(path, name=dataset, split='full')[0]\n data.name = dataset\n data.num_classes = (max(data.y)+1).item()\n # data.train_mask, data.val_mask, data.test_mask = split_function(data.y.numpy())\n # data = Planetoid(path, name=dataset, split='public', transform=T.NormalizeFeatures(), num_train_per_class=20, num_val=500, num_test=1000)\n\n elif dataset == 'Amazon':\n path = os.path.join(dirname, 'data', 'Amazon')\n data = Amazon(path, 'photo')[0]\n data.name = dataset\n data.num_classes = (max(data.y)+1).item()\n data.train_mask, data.val_mask, data.test_mask = split_function(\n data.y.numpy(), seed=seed)\n # Amazon: 4896 train, 1224 val, 1530 test\n \n elif dataset in ['syn1', 'syn2', 'syn4', 'syn5']: \n data = synthetic_data(\n dataset, dirname, train_ratio, input_dim)\n \n elif dataset == 'syn6':\n data = gc_data(dataset, dirname, train_ratio)\n\n elif dataset == 'Mutagenicity':\n data = gc_data(dataset, dirname, train_ratio)\n\n return data" ]
[ "0.68234545", "0.6755744", "0.6743118", "0.6716415", "0.67143404", "0.6676263", "0.66579074", "0.65451914", "0.6450987", "0.6438156", "0.639055", "0.6383469", "0.6370643", "0.63556325", "0.63493556", "0.63293546", "0.6261715", "0.62448275", "0.6217389", "0.621433", "0.6206522", "0.6190782", "0.61842823", "0.61541975", "0.6128401", "0.6104466", "0.61035377", "0.61024195", "0.6071104", "0.6058", "0.6031053", "0.60236293", "0.6011136", "0.59919685", "0.59793746", "0.5964562", "0.59616673", "0.5955641", "0.5943348", "0.5936046", "0.5919457", "0.5918209", "0.5917975", "0.59075993", "0.5902814", "0.58979475", "0.5894305", "0.5891649", "0.5872632", "0.58699006", "0.58683985", "0.5864352", "0.58576506", "0.58462596", "0.5842893", "0.58405936", "0.58346945", "0.5830587", "0.58283156", "0.5814874", "0.5812104", "0.5807948", "0.5804817", "0.5804167", "0.58010143", "0.5795935", "0.57761484", "0.57728875", "0.5771165", "0.57626295", "0.57609504", "0.57531834", "0.5745447", "0.5740702", "0.57390046", "0.5729959", "0.5721762", "0.57133037", "0.57061267", "0.57053995", "0.56989616", "0.5694844", "0.56882334", "0.5686455", "0.5686398", "0.56853586", "0.5676321", "0.5675719", "0.56740695", "0.5673851", "0.5671697", "0.5667304", "0.5663895", "0.566229", "0.566213", "0.5654294", "0.5651963", "0.56457597", "0.5643095", "0.56380206" ]
0.61662716
23
Set common fields in layer to addressing dictonary.
def set_address_values(layer): cursor = arcpy.SearchCursor(layer) for row in cursor: layer_fields = arcpy.ListFields(layer) for x in range(len(layer_fields)): layer_fields[x] = layer_fields[x].name for key in address_dict: if key in layer_fields and address_dict.get(key) is None: address_dict[key] = row.getValue(key)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_attrs(self, **kwargs) -> None:\n self._obj.coords[GEO_MAP_COORD].attrs.update(**kwargs)", "def update_asop_dict(asop_dict,region,coords,color,all_settings):\n # Set unique color\n asop_dict['color'] = color\n\n # Apply any general user settings\n asop_dict['grid_desc'] = all_settings.get('grid','native')\n asop_dict['grid_type'] = all_settings.get('grid','native')\n asop_dict['region_name'] = region\n asop_dict['region_desc'] = region.replace('_',' ')\n asop_dict['region'] = coords\n\n # Edit dx for region\n mean_lat = np.mean(coords[0:2])\n asop_dict['dx'] = asop_dict['dx'] * np.cos(np.radians(mean_lat))\n all_settings.pop('infile','') # key not allowed\n for key in asop_dict:\n if key in all_settings:\n asop_dict[key] = all_settings[key]\n\n # Apply any specific file settings\n infile = os.path.basename(asop_dict['infile'])\n file_settings = settings.get(infile,{})\n file_settings.pop('infile','') # key not allowed\n file_settings.pop('region','')\n if file_settings:\n for key in file_settings:\n asop_dict[key] = file_settings[key]\n if 'legend_name' not in file_settings:\n asop_dict['legend_name'] = asop_dict['name'].replace('_',' ')\n\n print('---> Final data dictionary:')\n print(json.dumps(asop_dict, sort_keys=True, indent=2))\n\n return asop_dict", "def __setAttributes(self):\n values = {\"f\":\"json\"}\n layerInfo = self._getEsriRESTJSON(self.url,values)\n #Geometry Type\n geometryType = getGeometryType(layerInfo['geometryType'])\n self.geometryType = geometryType\n #Name\n name=arcpy.ValidateTableName(layerInfo['name'])\n self.name=name\n #Spatial Reference - both the wkid and the arcpy SpatialReference object\n #in case it's in a wkt\n try:\n wkid = layerInfo['extent']['spatialReference']['wkid']\n except:\n wkid = 4326\n sr = arcpy.SpatialReference()\n sr.factoryCode = int(wkid)\n sr.create()\n self.sr = sr\n self.wkid = wkid\n #field used to update the feature class are a subset of all the fields in a feature class\n fields = layerInfo['fields']\n updateFields = []\n for field in fields:\n if (field['type'] in ['esriFieldTypeOID','esriFieldTypeGeometry','esriFieldTypeGUID'] or 'shape' in field['name'].lower() or field['name'] in self.userFields):\n pass\n else:\n updateFields.append(field)\n updateFields.insert(0, {\"name\":'Shape@', \"type\":\"esriFieldTypeGeometry\"})\n self.updateFields = updateFields\n #Max values\n if layerInfo.has_key('maxRecordCount'):\n self.maxRecordCount = int(layerInfo['maxRecordCount'])\n else:\n self.maxRecordCount = 1000", "def set_specific_fields(self):\n raise NotImplementedError(\"Must be defined by subclass!\")", "def overwrite_field(self,cells=None,edges=None,source='depth_max',target='depth_mean'):\n if cells is not None:\n self.cells[target][cells]=self.cells[source][cells]\n if edges is not None:\n self.edges[target][edges]=self.edges[source][edges]", "def __setattr__(self, k, v):\n if k[:1] != '_' and \\\n not k in ('dimensions', 'typecode'):\n if k not in self._ncattrs:\n self._ncattrs += (k, )\n object.__setattr__(self, k, v)", "def __init__(self):\n\n for layer in self._layer_class_map:\n setattr(self, layer, self._layer_class_map[layer]())", "def _configure(self):\n from .topology import FieldBase\n\n Component._configure(self)\n\n mapBasis = {\n \"simplex\": FieldBase.SIMPLEX_BASIS,\n \"tensor\": FieldBase.TENSOR_BASIS,\n \"default\": FieldBase.DEFAULT_BASIS,\n }\n self.cellBasis = mapBasis[self.inventory.cellBasisStr]\n\n mapSpace = {\n \"polynomial\": FieldBase.POLYNOMIAL_SPACE,\n \"point\": FieldBase.POINT_SPACE,\n }\n self.feSpace = mapSpace[self.inventory.feSpaceStr]\n return", "def new_data(first: dict, second: dict, changeables: tuple):\n for name, field in first.items():\n if name not in changeables:\n second[name] = field", "def set_org_and_space_dicts(self, org_dict, space_dict):\n self._space = space_dict\n self._org = org_dict\n return self", "def __init__ (self, d):\n try:\n self.__dict__.update (d.__dict__)\n except:\n self.__dict__.update (d)", "def prepareMapping(self, layer, scheme):\n mapping = {}\n mapping['geometry'] = layer.geom_type.name\n for field_name, layer_field in mappingScheme.items():\n field = self.getModel(layer)._meta.get_field(field_name)\n if isinstance(layer_field, dict):\n subMapping = {}\n layer_fields = layer_field\n for rel_field_name, layer_field in layer_fields.items():\n if layer_field in layer.fields:\n subMapping[rel_field_name] = layer_field\n if subMapping:\n mapping[field_name] = subMapping\n elif layer_field in layer.fields:\n mapping[field_name] = layer_field\n if not field.null and field_name not in mapping:\n raise ValueError('%s does not exist on layer' % layer_field)\n return mapping", "def _update(self, other):\n # NOTE: detail map properties should NEVER be overridden. NEVER. EVER. kthx.\n if other.use_alpha:\n self.use_alpha = True\n if other.mipmap:\n self.mipmap = True", "def _update_all_fields(self, name, value):\n for field in self._field_map.values():\n setattr(field, name, value)", "def _set_attributes(self):", "def __extract_common_attrs(self, raw_data: Dict) -> None:\n for attr in self.COMMON_ATTRS:\n if attr not in self.ATTRS and attr in raw_data:\n setattr(self, attr, raw_data[attr])", "def _update_loose (self, dict):\n self.__dict__.update(dict)", "def set_properties(struct):", "def _set_default_init_field_attributes(self, n_dims=None):\n\n # we use the module defaults for the datasets to initialize them\n field_feature_shapes = dict(FIELD_FEATURE_SHAPES)\n field_feature_dtypes = dict(FIELD_FEATURE_DTYPES)\n\n\n # get the number of coordinates of positions. If there is a\n # main_reps then we have to set the number of atoms to that,\n # if not we count the number of atoms in the topology\n if self._main_rep_idxs is None:\n self._n_coords = json_top_atom_count(self.topology)\n self._main_rep_idxs = list(range(self._n_coords))\n else:\n self._n_coords = len(self._main_rep_idxs)\n\n # get the number of dimensions as a default\n if n_dims is None:\n self._n_dims = N_DIMS\n\n # feature shapes for positions and positions-like fields are\n # not known at the module level due to different number of\n # coordinates (number of atoms) and number of dimensions\n # (default 3 spatial). We set them now that we know this\n # information.\n # add the postitions shape\n field_feature_shapes[POSITIONS] = (self._n_coords, self._n_dims)\n # add the positions-like field shapes (velocities and forces) as the same\n for poslike_field in POSITIONS_LIKE_FIELDS:\n field_feature_shapes[poslike_field] = (self._n_coords, self._n_dims)\n\n # set the attributes\n self._field_feature_shapes = field_feature_shapes\n self._field_feature_dtypes = field_feature_dtypes", "def __init__(self, dict1):\n self.__dict__.update(dict1)", "def __setattr__(self, key, value):\n if isinstance(value, DotDict) and key != '_parent':\n value.__dict__['_parent'] = weakref.proxy(self)\n super(DotDictWithAcquisition, self).__setattr__(key, value)", "def update(self, other: Mapping[str, Any]) -> None:\n self._config.update(self._flatten_dict(other))", "def _set_attrs(ds, **attrs_map):\n for key in attrs_map:\n val = attrs_map[key] # Use Python 2/3 agnostic style\n ds.attrs[key] = val", "def update(self, other=[], **kwargs):\n if ismapping(other):\n other = other.items()\n\n for key, value in other:\n self[key] = value\n\n for key, value in kwargs.items():\n self[key] = value", "def join_data(self, base_data, join_data, base_field, join_fields):\n for data in base_data:\n extra = join_data[data[base_field]]\n for field in join_fields:\n data[field] = extra[field]\n \n return base_data", "def _extend_network_dict_provider(self, context, network, bindings=None):\n if 'id' not in network:\n return\n if not bindings:\n bindings = nsx_db.get_network_bindings(context.session,\n network['id'])\n\n # With NSX plugin, \"normal\" overlay networks will have no binding\n if bindings:\n # Network came in through provider networks API\n network[pnet.NETWORK_TYPE] = bindings[0].binding_type\n network[pnet.PHYSICAL_NETWORK] = bindings[0].phy_uuid\n network[pnet.SEGMENTATION_ID] = bindings[0].vlan_id", "def set_params(self, dic):\n if dic is not None:\n for key, val in zip(dic.keys(), dic.values()):\n if key in self.__dict__.keys():\n self.__dict__[key] = val\n\n if 'scale_params' in self.__dict__.keys():\n self.scale_params.set_params(dic)\n if 'atmospheric_params' in self.__dict__.keys():\n if self.atmospheric_params is not None:\n self.atmospheric_params.set_params(dic)\n\n if 'atemperature_params' in self.__dict__.keys():\n if self.atemperature_params is not None:\n self.atemperature_params.set_params(dic)\n\n if 'oceanic_params' in self.__dict__.keys():\n if self.oceanic_params is not None:\n self.oceanic_params.set_params(dic)\n\n if 'ground_params' in self.__dict__.keys():\n if self.ground_params is not None:\n self.ground_params.set_params(dic)\n\n if 'otemperature_params' in self.__dict__.keys():\n if self.gotemperature_params is not None:\n self.gotemperature_params.set_params(dic)\n\n if 'gtemperature_params' in self.__dict__.keys():\n if self.gotemperature_params is not None:\n self.gotemperature_params.set_params(dic)", "def set_standard_attrs(da):\n da.coords[\"lat\"].attrs = OrderedDict(\n [\n (\"standard_name\", \"latitude\"),\n (\"units\", \"degrees_north\"),\n (\"axis\", \"Y\"),\n (\"long_name\", \"latitude\"),\n (\"out_name\", \"lat\"),\n (\"stored_direction\", \"increasing\"),\n (\"type\", \"double\"),\n (\"valid_max\", \"90.0\"),\n (\"valid_min\", \"-90.0\"),\n ]\n )\n da.coords[\"lon\"].attrs = OrderedDict(\n [\n (\"standard_name\", \"longitude\"),\n (\"units\", \"degrees_east\"),\n (\"axis\", \"X\"),\n (\"long_name\", \"longitude\"),\n (\"out_name\", \"lon\"),\n (\"stored_direction\", \"increasing\"),\n (\"type\", \"double\"),\n (\"valid_max\", \"180.0\"),\n (\"valid_min\", \"-180.0\"),\n ]\n )\n da.coords[\"depth_coord\"].attrs = OrderedDict(\n [\n (\"standard_name\", \"depth\"),\n (\"units\", \"m\"),\n (\"axis\", \"Z\"),\n (\"long_name\", \"ocean depth coordinate\"),\n (\"out_name\", \"lev\"),\n (\"positive\", \"down\"),\n (\"stored_direction\", \"increasing\"),\n (\"valid_max\", \"12000.0\"),\n (\"valid_min\", \"0.0\"),\n ]\n )\n da.coords[\"time\"].attrs = OrderedDict(\n [\n (\"standard_name\", \"time\"),\n (\"axis\", \"T\"),\n (\"long_name\", \"time\"),\n (\"out_name\", \"time\"),\n (\"stored_direction\", \"increasing\"),\n ]\n )\n da.coords[\"time\"].encoding[\"units\"] = \"days since '1900-01-01'\"\n\n return da", "def update_general(info, key, val):\n\n info[\"model_params\"][key] = val", "def __config_attributes(self):\n self.__name = self.__data[self.__code][\"airportName\"]\n self.__country = Country(name=self.__data[self.__code][\"countryName\"],\n code=self.__data[self.__code][\"countryCode\"])\n try:\n self.__city = self.__data[self.__code][\"city\"]\n except Exception:\n self.__city = ''", "def visit_record(self, syrecord):\n for other_key, other_value in syrecord.items():\n try:\n getattr(self.current, other_key).update(other_value)\n except KeyError:\n setattr(self.current, other_key, other_value)", "def populate(self, **kw):\n for name, field in self:\n if name in kw:\n field.__set__(self, kw[name])", "def part(self, **kwargs):\n for key, value in kwargs.items():\n setattr(self, key, value)", "def __setstate__(self, statedict):\n for k, v in list(statedict.items()):\n setattr(self, k, v)", "def __setstate__(self, statedict):\n for k, v in list(statedict.items()):\n setattr(self, k, v)", "def __init__(self, mapping: Mapping[str, Any]) -> None:\n self.__dict__.update(mapping)", "def __init__( self, **kwargs ):\n self.__dict__.update( kwargs )", "def update(self, *args, **kwargs):\n super(ReadOnlyDict, self).update(*args, **kwargs) # pragma: no cover", "def _setup_global_base(self):\n self._setup_facet_orientations()\n\n self._init_econn()\n\n n_dof = 0\n all_dofs = {}\n remaps = {}\n for ig, ap in self.aps.iteritems():\n ii = self.region.get_cells(ig)\n nd = nm.prod(ap.econn.shape)\n\n group = self.domain.groups[ig]\n remaps[ig] = prepare_remap(ii, group.shape.n_el)\n\n aux = nm.arange(n_dof, n_dof + nd, dtype=nm.int32)\n aux.shape = ap.econn.shape\n\n ap.econn[:] = aux\n all_dofs[ig] = aux\n\n n_dof += nd\n\n self.n_nod = n_dof\n\n self.n_bubble_dof = n_dof\n self.bubble_dofs = all_dofs\n self.bubble_remaps = remaps\n\n self.n_vertex_dof = self.n_edge_dof = self.n_face_dof = 0\n\n self._setup_esurface()", "def add_attributes(self, pore_dict, throat_dict):\n\n self.add_node_attributes(self.graph, pore_dict)\n self.add_edge_attributes(self.graph, throat_dict)\n\n self.compute_geometry()", "def gather_params(self):\n for layer in self.layers:\n for name, value in layer.params.iteritems():\n self.params[name] = value", "def set_configs(self, key_to_value, all_layers):\n\n # First check that there are no keys that don't correspond to any config\n # parameter of this layer, and if so, raise an exception with an\n # informative message saying what configs are allowed.\n for key, value in key_to_value.items():\n if key != 'name':\n if key not in self.config:\n configs = ' '.join([('{0}->\"{1}\"'.format(x, y) if isinstance(y, str)\n else '{0}->{1}'.format(x, y))\n for x, y in self.config.items()])\n raise RuntimeError(\"Configuration value {0}={1} was not \"\n \"expected in layer of type {2}; allowed \"\n \"configs with their defaults: {3}\"\n \"\" .format(key, value, self.layer_type, configs))\n\n for key, value in key_to_value.items():\n if key != 'name':\n assert key in self.config # we checked above.\n self.config[key] = xutils.convert_value_to_type(key,\n type(self.config[key]),\n value)\n self.descriptors = dict()\n self.descriptor_dims = dict()\n # Parse Descriptors and get their dims and their 'final' string form.\n # in self.descriptors[key]\n for key in self.get_input_descriptor_names():\n if key not in self.config:\n raise RuntimeError(\"{0}: object of type {1} needs to override\"\n \" get_input_descriptor_names().\"\n \"\".format(sys.argv[0], str(type(self))))\n\n descriptor_string = self.config[key] # input string.\n assert isinstance(descriptor_string, str)\n desc = self.convert_to_descriptor(descriptor_string, all_layers)\n desc_dim = self.get_dim_for_descriptor(desc, all_layers)\n desc_norm_str = desc.str()\n\n # desc_output_str contains the \"final\" component names, those that\n # appear in the actual config file (i.e. not names like\n # 'layer.auxiliary_output'); that's how it differs from desc_norm_str.\n # Note: it's possible that the two strings might be the same in\n # many, even most, cases-- it depends whether\n # output_name(self, auxiliary_output)\n # returns self.get_name() + '.' + auxiliary_output\n # when auxiliary_output is not None.\n # That's up to the designer of the layer type.\n desc_output_str = self.get_string_for_descriptor(desc, all_layers)\n self.descriptors[key] = {'string': desc,\n 'normalized-string': desc_norm_str,\n 'final-string': desc_output_str,\n 'dim': desc_dim}\n\n # the following helps to check the code by parsing it again.\n desc2 = self.convert_to_descriptor(desc_norm_str, all_layers)\n desc_norm_str2 = desc2.str()\n # if the following ever fails we'll have to do some debugging.\n if desc_norm_str != desc_norm_str2:\n raise RuntimeError(\"Likely code error: '{0}' != '{1}'\"\n \"\".format(desc_norm_str, desc_norm_str2))", "def setup(self): \n self.suburbs_dict = dict()\n self.raw_proIds_dict = dict()\n self.propertyIds_dict = dict()\n self.valuations = dict()", "def update(self, d):\n for k in d:\n self[k] = d[k]", "def _general_set_neighs(self, key):\n if type(key) == list:\n self._set_neighs_general_list(key)\n elif type(key) == np.ndarray:\n self._set_neighs_general_array(key)\n elif type(key) in inttypes:\n self._set_neighs_number(key)\n else:\n# print key\n raise TypeError(\"Incorrect neighs input in pst.Neighs_Info\")", "def update(self, other={}, **kwargs):\n joined = dict(other, **kwargs)\n\n # Update with the new set of k:v pairs,\n # but delete existing keys which are being assigned the value of None\n for k, v in joined.items():\n if v is None:\n if k in self:\n del self[k]\n else:\n self[k] = v", "def set_additional_fields(cls, model, data):\n for k, v in data.items():\n if not hasattr(model, k):\n setattr(model, k, v)", "def add_fields(self, fields):\n for label, data in fields.items():\n self[label] = data", "def __update_request(self, request_dict, namespace, apikey):\n request_dict['namespace'] = namespace if namespace else self.namespace\n request_dict['apikey'] = apikey if apikey else self.apikey", "def _deep_update_config(config, updates):\n for key, value in updates.iteritems():\n if isinstance(value, collections.Mapping):\n config[key] = DexNet._deep_update_config(config.get(key, {}), value)\n else:\n config[key] = value\n return config", "def set_params(self, state_dicts):\n raise NotImplementedError", "def makeMapping(globalMap):\n \n from memops.xml.Implementation import bool2str, str2bool\n\n # Set up top level dictionaries\n loadMaps = globalMap.get('loadMaps')\n mapsByGuid = globalMap.get('mapsByGuid')\n\n abstractTypes = globalMap.get('ACCO').get('abstractTypes')\n exolinks = globalMap.get('ACCO').get('exolinks')\n\n # Class AccessControlStore\n currentMap = {}\n abstractTypes['AccessControlStore'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-09-04-17:18:10_00001'] = currentMap\n loadMaps['ACCO.AccessControlStore'] = currentMap\n currentMap['tag'] = 'ACCO.AccessControlStore'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-09-04-17:18:10_00001'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'accessControlStores'\n currentMap['isTop'] = True\n currentMap['objkey'] = 'name'\n currentMap['class'] = memops.api.AccessControl.AccessControlStore\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute AccessControlStore.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute AccessControlStore.createdBy\n contentMap['createdBy'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:00:59_00002__www.ccpn.ac.uk_Fogh_2007-10-03-14:53:27_00001__www.ccpn.ac.uk_Fogh_2006-09-14-16:28:57_00002')\n\n # Attribute AccessControlStore.guid\n contentMap['guid'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:26_00002')\n\n # Attribute AccessControlStore.isModifiable\n contentMap['isModifiable'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-17-14:16:26_00010__www.ccpn.ac.uk_Fogh_2007-10-03-14:53:27_00001__www.ccpn.ac.uk_Fogh_2006-09-14-16:28:57_00002')\n\n # Attribute AccessControlStore.lastUnlockedBy\n contentMap['lastUnlockedBy'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:00:59_00003__www.ccpn.ac.uk_Fogh_2007-10-03-14:53:27_00001__www.ccpn.ac.uk_Fogh_2006-09-14-16:28:57_00002')\n\n # Attribute AccessControlStore.name\n currentMap = {}\n contentMap['name'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-09-04-17:21:38_00006'] = currentMap\n loadMaps['ACCO.AccessControlStore.name'] = currentMap\n currentMap['tag'] = 'ACCO.AccessControlStore.name'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-09-04-17:21:38_00006'\n currentMap['name'] = 'name'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033')\n\n # Role AccessControlStore.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n\n # Role AccessControlStore.accessObjects\n currentMap = {}\n contentMap['accessObjects'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00013'] = currentMap\n loadMaps['ACCO.AccessControlStore.accessObjects'] = currentMap\n currentMap['tag'] = 'ACCO.AccessControlStore.accessObjects'\n currentMap['type'] = 'child'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00013'\n currentMap['name'] = 'accessObjects'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['implSkip'] = True\n currentMap['content'] = globalMap.get('ACCO').get('abstractTypes')\n\n # Role AccessControlStore.userGroups\n currentMap = {}\n contentMap['userGroups'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-09-04-17:21:38_00003'] = currentMap\n loadMaps['ACCO.AccessControlStore.userGroups'] = currentMap\n currentMap['tag'] = 'ACCO.AccessControlStore.userGroups'\n currentMap['type'] = 'child'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-09-04-17:21:38_00003'\n currentMap['name'] = 'userGroups'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['implSkip'] = True\n currentMap['content'] = globalMap.get('ACCO').get('abstractTypes')\n\n # Role AccessControlStore.users\n currentMap = {}\n contentMap['users'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-09-04-17:21:38_00001'] = currentMap\n loadMaps['ACCO.AccessControlStore.users'] = currentMap\n currentMap['tag'] = 'ACCO.AccessControlStore.users'\n currentMap['type'] = 'child'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-09-04-17:21:38_00001'\n currentMap['name'] = 'users'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['implSkip'] = True\n currentMap['content'] = globalMap.get('ACCO').get('abstractTypes')\n # End of AccessControlStore\n\n currentMap = abstractTypes.get('AccessControlStore')\n aList = ['createdBy', 'guid', 'isModifiable', 'lastUnlockedBy']\n currentMap['headerAttrs'] = aList\n aList = ['name']\n currentMap['simpleAttrs'] = aList\n aList = ['users', 'userGroups', 'accessObjects', 'access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n aList = ['accessObjects', 'userGroups', 'users']\n currentMap['children'] = aList\n\n # Class AccessObject\n currentMap = {}\n abstractTypes['AccessObject'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00014'] = currentMap\n loadMaps['ACCO.AccessObject'] = currentMap\n currentMap['tag'] = 'ACCO.AccessObject'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00014'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'accessObjects'\n currentMap['objkey'] = 'name'\n currentMap['class'] = memops.api.AccessControl.AccessObject\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute AccessObject.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute AccessObject.description\n currentMap = {}\n contentMap['description'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:27_00005'] = currentMap\n loadMaps['ACCO.AccessObject.description'] = currentMap\n currentMap['tag'] = 'ACCO.AccessObject.description'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:27_00005'\n currentMap['name'] = 'description'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['proc'] = 'direct'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00035')\n\n # Attribute AccessObject.name\n currentMap = {}\n contentMap['name'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:27_00004'] = currentMap\n loadMaps['ACCO.AccessObject.name'] = currentMap\n currentMap['tag'] = 'ACCO.AccessObject.name'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:27_00004'\n currentMap['name'] = 'name'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033')\n\n # Role AccessObject.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n\n # Role AccessObject.permissions\n currentMap = {}\n contentMap['permissions'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:27_00001'] = currentMap\n loadMaps['ACCO.AccessObject.permissions'] = currentMap\n currentMap['tag'] = 'ACCO.AccessObject.permissions'\n currentMap['type'] = 'child'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:27_00001'\n currentMap['name'] = 'permissions'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['content'] = globalMap.get('ACCO').get('abstractTypes')\n # End of AccessObject\n\n currentMap = abstractTypes.get('AccessObject')\n aList = ['description', 'name']\n currentMap['simpleAttrs'] = aList\n aList = ['permissions', 'access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n aList = ['permissions']\n currentMap['children'] = aList\n\n # Class Permission\n currentMap = {}\n abstractTypes['Permission'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00018'] = currentMap\n loadMaps['ACCO.Permission'] = currentMap\n currentMap['tag'] = 'ACCO.Permission'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00018'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'permissions'\n currentMap['class'] = memops.api.AccessControl.Permission\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute Permission.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute Permission.opType\n currentMap = {}\n contentMap['opType'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00021'] = currentMap\n loadMaps['ACCO.Permission.opType'] = currentMap\n currentMap['tag'] = 'ACCO.Permission.opType'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00021'\n currentMap['name'] = 'opType'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = 'any'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Attribute Permission.permission\n currentMap = {}\n contentMap['permission'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00023'] = currentMap\n loadMaps['ACCO.Permission.permission'] = currentMap\n currentMap['tag'] = 'ACCO.Permission.permission'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00023'\n currentMap['name'] = 'permission'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = True\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00028')\n\n # Attribute Permission.permissionClass\n currentMap = {}\n contentMap['permissionClass'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00020'] = currentMap\n loadMaps['ACCO.Permission.permissionClass'] = currentMap\n currentMap['tag'] = 'ACCO.Permission.permissionClass'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00020'\n currentMap['name'] = 'permissionClass'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = 'any'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Attribute Permission.roleName\n currentMap = {}\n contentMap['roleName'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00022'] = currentMap\n loadMaps['ACCO.Permission.roleName'] = currentMap\n currentMap['tag'] = 'ACCO.Permission.roleName'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00022'\n currentMap['name'] = 'roleName'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = 'any'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037')\n\n # Role Permission.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n\n # Role Permission.userGroup\n currentMap = {}\n contentMap['userGroup'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00016'] = currentMap\n loadMaps['ACCO.Permission.userGroup'] = currentMap\n currentMap['tag'] = 'ACCO.Permission.userGroup'\n currentMap['type'] = 'link'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00016'\n currentMap['name'] = 'userGroup'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['copyOverride'] = True\n # End of Permission\n\n currentMap = abstractTypes.get('Permission')\n aList = ['opType', 'permission', 'permissionClass', 'roleName']\n currentMap['headerAttrs'] = aList\n aList = ['userGroup']\n currentMap['optLinks'] = aList\n aList = ['access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n\n # Class User\n currentMap = {}\n abstractTypes['User'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00017'] = currentMap\n loadMaps['ACCO.User'] = currentMap\n currentMap['tag'] = 'ACCO.User'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00017'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'users'\n currentMap['objkey'] = 'name'\n currentMap['class'] = memops.api.AccessControl.User\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute User.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute User.isSuperuser\n currentMap = {}\n contentMap['isSuperuser'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2010-05-06-13:30:17_00060'] = currentMap\n loadMaps['ACCO.User.isSuperuser'] = currentMap\n currentMap['tag'] = 'ACCO.User.isSuperuser'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2010-05-06-13:30:17_00060'\n currentMap['name'] = 'isSuperuser'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['default'] = False\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00028')\n\n # Attribute User.name\n currentMap = {}\n contentMap['name'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00019'] = currentMap\n loadMaps['ACCO.User.name'] = currentMap\n currentMap['tag'] = 'ACCO.User.name'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00019'\n currentMap['name'] = 'name'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033')\n\n # Attribute User.passwordHashed\n currentMap = {}\n contentMap['passwordHashed'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2009-08-19-17:31:11_00005'] = currentMap\n loadMaps['ACCO.User.passwordHashed'] = currentMap\n currentMap['tag'] = 'ACCO.User.passwordHashed'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2009-08-19-17:31:11_00005'\n currentMap['name'] = 'passwordHashed'\n currentMap['hicard'] = 1\n currentMap['locard'] = 0\n currentMap['eType'] = 'cplx'\n currentMap['proc'] = 'direct'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00035')\n\n # Role User.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n\n # Role User.ledGroups\n currentMap = {}\n contentMap['ledGroups'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00014'] = currentMap\n loadMaps['ACCO.User.ledGroups'] = currentMap\n currentMap['tag'] = 'ACCO.User.ledGroups'\n currentMap['type'] = 'link'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00014'\n currentMap['name'] = 'ledGroups'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['copyOverride'] = True\n\n # Role User.userGroups\n currentMap = {}\n contentMap['userGroups'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00012'] = currentMap\n loadMaps['ACCO.User.userGroups'] = currentMap\n currentMap['tag'] = 'ACCO.User.userGroups'\n currentMap['type'] = 'link'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00012'\n currentMap['name'] = 'userGroups'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['copyOverride'] = True\n # End of User\n\n currentMap = abstractTypes.get('User')\n aList = ['isSuperuser']\n currentMap['headerAttrs'] = aList\n aList = ['name', 'passwordHashed', 'ledGroups', 'userGroups']\n currentMap['simpleAttrs'] = aList\n aList = ['access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n\n # Class UserGroup\n currentMap = {}\n abstractTypes['UserGroup'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00016'] = currentMap\n loadMaps['ACCO.UserGroup'] = currentMap\n currentMap['tag'] = 'ACCO.UserGroup'\n currentMap['type'] = 'class'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00016'\n currentMap['eType'] = 'cplx'\n currentMap['fromParent'] = 'userGroups'\n currentMap['objkey'] = 'name'\n currentMap['class'] = memops.api.AccessControl.UserGroup\n contentMap = {}\n currentMap['content'] = contentMap\n\n # Attribute UserGroup.applicationData\n contentMap['applicationData'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-09-14-18:48:27_00007')\n\n # Attribute UserGroup.name\n currentMap = {}\n contentMap['name'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00018'] = currentMap\n loadMaps['ACCO.UserGroup.name'] = currentMap\n currentMap['tag'] = 'ACCO.UserGroup.name'\n currentMap['type'] = 'attr'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00018'\n currentMap['name'] = 'name'\n currentMap['hicard'] = 1\n currentMap['locard'] = 1\n currentMap['eType'] = 'cplx'\n currentMap['data'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033')\n\n # Role UserGroup.access\n contentMap['access'] = mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-12-31-09:03:01_00014')\n\n # Role UserGroup.leaders\n currentMap = {}\n contentMap['leaders'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00015'] = currentMap\n loadMaps['ACCO.UserGroup.leaders'] = currentMap\n currentMap['tag'] = 'ACCO.UserGroup.leaders'\n currentMap['type'] = 'link'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00015'\n currentMap['name'] = 'leaders'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['copyOverride'] = True\n\n # Role UserGroup.members\n currentMap = {}\n contentMap['members'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00013'] = currentMap\n loadMaps['ACCO.UserGroup.members'] = currentMap\n currentMap['tag'] = 'ACCO.UserGroup.members'\n currentMap['type'] = 'link'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00013'\n currentMap['name'] = 'members'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['copyOverride'] = False\n\n # Role UserGroup.permissions\n currentMap = {}\n contentMap['permissions'] = currentMap\n mapsByGuid['www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00017'] = currentMap\n loadMaps['ACCO.UserGroup.permissions'] = currentMap\n currentMap['tag'] = 'ACCO.UserGroup.permissions'\n currentMap['type'] = 'link'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-17-14:16:23_00017'\n currentMap['name'] = 'permissions'\n currentMap['hicard'] = -1\n currentMap['locard'] = 0\n currentMap['copyOverride'] = False\n # End of UserGroup\n\n currentMap = abstractTypes.get('UserGroup')\n aList = ['name', 'leaders', 'members', 'permissions']\n currentMap['simpleAttrs'] = aList\n aList = ['access', 'applicationData']\n currentMap['cplxAttrs'] = aList\n\n # Out-of-package link to AccessControlStore\n currentMap = {}\n exolinks['AccessControlStore'] = currentMap\n loadMaps['ACCO.exo-AccessControlStore'] = currentMap\n currentMap['tag'] = 'ACCO.exo-AccessControlStore'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-09-04-17:18:10_00001'\n currentMap['name'] = 'AccessControlStore'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = memops.api.AccessControl.AccessControlStore\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n\n # Out-of-package link to AccessObject\n currentMap = {}\n exolinks['AccessObject'] = currentMap\n loadMaps['ACCO.exo-AccessObject'] = currentMap\n currentMap['tag'] = 'ACCO.exo-AccessObject'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00014'\n currentMap['name'] = 'AccessObject'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = memops.api.AccessControl.AccessObject\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033'))\n\n # Out-of-package link to Permission\n currentMap = {}\n exolinks['Permission'] = currentMap\n loadMaps['ACCO.exo-Permission'] = currentMap\n currentMap['tag'] = 'ACCO.exo-Permission'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00018'\n currentMap['name'] = 'Permission'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = memops.api.AccessControl.Permission\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033'))\n aList.append(globalMap.get('ACCO').get('exolinks'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00037'))\n\n # Out-of-package link to User\n currentMap = {}\n exolinks['User'] = currentMap\n loadMaps['ACCO.exo-User'] = currentMap\n currentMap['tag'] = 'ACCO.exo-User'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00017'\n currentMap['name'] = 'User'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = memops.api.AccessControl.User\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033'))\n\n # Out-of-package link to UserGroup\n currentMap = {}\n exolinks['UserGroup'] = currentMap\n loadMaps['ACCO.exo-UserGroup'] = currentMap\n currentMap['tag'] = 'ACCO.exo-UserGroup'\n currentMap['type'] = 'exo'\n currentMap['guid'] = 'www.ccpn.ac.uk_Fogh_2006-08-16-14:22:54_00016'\n currentMap['name'] = 'UserGroup'\n currentMap['eType'] = 'cplx'\n currentMap['class'] = memops.api.AccessControl.UserGroup\n aList = list()\n currentMap['keyMaps'] = aList\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2008-06-30-16:30:50_00001'))\n aList.append(mapsByGuid.get('www.ccpn.ac.uk_Fogh_2006-08-16-14:22:53_00033'))", "def update_dict(self, ipdict, min_ip):\n temp_dict = copy.deepcopy(ipdict)\n # we thought about iterating over all entries to set all is_master-attributes to false, but\n # this is not possible due to the current implementation of util.listhandler - Entry\n # --> will not be implemented --> see Nico's comment\n # Nico:\n # As soon an entry is created without explicitly passing the parameter is_master=True it is defaulted to is_master=False.\n # As long as the creator of the record (ip, entry) does not explicitly set this parameter, it wont be set anywhere.\n temp_dict.get(min_ip).is_master = True\n return temp_dict", "def get_attribute(layername='np', sublayer='sig_vv_aft'):\n layer1 = {'gm': 'Global_Projection', 'np': 'North_Polar_Projection', 'radar': 'Sigma0_Data', 'flag': 'flag',\n 'cell_tb_v_aft': 'North_Polar_Projection'}\n # all_tb = [u'cell_tb_h_aft', u'cell_tb_qual_flag_h_aft', u'cell_tb_qual_flag_v_aft',\n # u'cell_tb_v_aft', u'site_loc', u'tb_cell_lat', u'tb_cell_lon']\n # tbh_aft = ['cell_tb_h_aft', 'tb_cell_lat', 'tb_cell_lon', 'cell_tb_qual_flag_h_aft']\n # tbv_aft = ['cell_tb_v_aft', 'tb_cell_lat', 'tb_cell_lon', 'cell_tb_qual_flag_v_aft']\n # tbv_af = ['cell_tb_v_aft', 'cell_lat', 'cell_lon', 'cell_tb_qual_flag_v_aft', 'cell_tb_error_h_aft',\n # 'cell_tb_time_utc_aft', 'cell_boresight_incidence_aft']\n # tbv_fo = ['cell_tb_v_fore', 'cell_lat', 'cell_lon', 'cell_tb_qual_flag_v_fore', 'cell_tb_error_h_fore',\n # 'cell_tb_time_utc_fore', 'cell_boresight_incidence_fore']\n # l_sig = ['cell_lat', 'cell_lon', 'cell_sigma0_qual_flag_vv', 'cell_sigma0_vv_aft']\n # h_sig = ['cell_lat', 'cell_lon', 'cell_sigma0_qual_flag_vv', 'cell_sigma0_vv_aft']\n att_dict = \\\n {'tbh_aft': ['cell_tb_h_aft', 'cell_lat', 'cell_lon', 'cell_tb_qual_flag_h_aft'],\n 'tbv_aft': ['cell_tb_v_aft', 'cell_lat', 'cell_lon', 'cell_tb_qual_flag_v_aft'],\n 'cell_tb_v_aft': ['cell_tb_v_aft', 'cell_lat', 'cell_lon', 'cell_tb_qual_flag_v_aft', 'cell_tb_error_v_aft',\n 'cell_tb_time_utc_aft', 'cell_boresight_incidence_aft'],\n 'cell_tb_h_aft': ['cell_tb_h_aft', 'cell_lat', 'cell_lon', 'cell_tb_qual_flag_h_aft', 'cell_tb_error_h_aft',\n 'cell_tb_time_utc_aft', 'cell_boresight_incidence_aft'],\n 'cell_tb_v_fore': ['cell_tb_v_fore', 'cell_lat', 'cell_lon', 'cell_tb_qual_flag_v_fore', 'cell_tb_error_h_fore',\n 'cell_tb_time_utc_fore', 'cell_boresight_incidence_fore'],\n 'sig_vv_aft': ['cell_lat', 'cell_lon', 'cell_sigma0_qual_flag_vv', 'cell_sigma0_vv_aft'],\n 'sig_hh_aft': ['cell_lat', 'cell_lon', 'cell_sigma0_qual_flag_vv', 'cell_sigma0_vv_aft'],\n\n 'smap_tb': ['cell_tb_v_aft', 'cell_tb_qual_flag_v_aft', 'cell_tb_error_v_aft',\n 'cell_tb_h_aft', 'cell_tb_qual_flag_h_aft', 'cell_tb_error_h_aft',\n 'cell_boresight_incidence_aft', 'cell_tb_time_seconds_aft',\n 'cell_tb_v_fore', 'cell_tb_qual_flag_v_fore', 'cell_tb_error_v_fore',\n 'cell_tb_h_fore', 'cell_tb_qual_flag_h_fore', 'cell_tb_error_h_fore',\n 'cell_boresight_incidence_fore', 'cell_tb_time_seconds_fore'],\n 'smap_tb_lonlat': ['cell_lon', 'cell_lat',\n 'cell_tb_v_aft', 'cell_tb_qual_flag_v_aft', 'cell_tb_error_v_aft',\n 'cell_tb_h_aft', 'cell_tb_qual_flag_h_aft', 'cell_tb_error_h_aft',\n 'cell_boresight_incidence_aft', 'cell_tb_time_seconds_aft',\n 'cell_tb_v_fore', 'cell_tb_qual_flag_v_fore', 'cell_tb_error_v_fore',\n 'cell_tb_h_fore', 'cell_tb_qual_flag_h_fore', 'cell_tb_error_h_fore',\n 'cell_boresight_incidence_fore', 'cell_tb_time_seconds_fore'],\n 'smap_ta_lonlat_colrow': ['cell_lon', 'cell_lat',\n 'cell_tb_v_aft', 'cell_tb_qual_flag_v_aft', 'cell_tb_error_v_aft',\n 'cell_tb_h_aft', 'cell_tb_qual_flag_h_aft', 'cell_tb_error_h_aft',\n 'cell_boresight_incidence_aft', 'cell_tb_time_seconds_aft',\n 'cell_tb_v_fore', 'cell_tb_qual_flag_v_fore', 'cell_tb_error_v_fore',\n 'cell_tb_h_fore', 'cell_tb_qual_flag_h_fore', 'cell_tb_error_h_fore',\n 'cell_boresight_incidence_fore', 'cell_tb_time_seconds_fore', 'cell_row', 'cell_column']}\n att_read = [layer1[layername], att_dict[sublayer]]\n\n\n # att_dict = {'sig_vv_aft': att_sig_vv_aft, 'sig_hh_aft': att_sig_hh_aft,\n # if layername == 'sigma':\n # attributes = ['Sigma0_Data/cell_sigma0_vv_aft', 'Sigma0_Data/cell_lat', 'Sigma0_Data/cell_lon',\n # 'Sigma0_Data/cell_sigma0_qual_flag_vv']\n # elif layername == 'tb':\n # attributes = ['Global_Projection/cell_tb_v_aft', 'Global_Projection/tb_cell_lat',\n # 'Global_Projection/tb_cell_lon', '/none']\n # elif layername == 'tbn':\n # attributes = ['North_Polar_Projection/cell_tb_v_aft', 'North_Polar_Projection/tb_cell_lat',\n # 'North_Polar_Projection/tb_cell_lon', '/none']\n # else:\n # print 'there is no %s data' % layername\n return att_read", "def update(self, other):\n for name, value in other.items():\n self.__setitem__(name, value)", "def _set_general(self, neighs_info):\n ## 0. Format inputs\n # If int is a neighs\n if type(neighs_info) in [int, float, np.int32, np.int64, np.float]:\n self._set_neighs_number(neighs_info)\n self.set_sp_rel_pos = self._null_set_rel_pos\n self.get_sp_rel_pos = self._null_get_rel_pos\n # If slice is a neighs\n elif type(neighs_info) == slice:\n self._set_neighs_slice(neighs_info)\n self.set_sp_rel_pos = self._null_set_rel_pos\n self.get_sp_rel_pos = self._null_get_rel_pos\n # If array is a neighs\n elif type(neighs_info) == np.ndarray:\n self._set_neighs_general_array(neighs_info)\n self.set_sp_rel_pos = self._null_set_rel_pos\n self.get_sp_rel_pos = self._null_get_rel_pos\n # If int could be neighs or list of tuples\n elif type(neighs_info) == list:\n self._set_structure_list(neighs_info)\n # If tuple there are more information than neighs\n elif type(neighs_info) == tuple:\n self._set_structure_tuple(neighs_info)\n else:\n assert(type(neighs_info).__name__ == 'instance')\n ## Substitution main information\n self.idxs = neighs_info.idxs\n self.ks = neighs_info.ks\n self.iss = neighs_info.iss\n ## Copying class information\n self._constant_neighs = neighs_info._constant_neighs\n self._kret = neighs_info._kret\n self._n = neighs_info._n\n self.format_set_info = neighs_info.format_set_info\n self.format_get_info = neighs_info.format_get_info\n self._format_globalpars(neighs_info.staticneighs,\n neighs_info.ifdistance, neighs_info.level)\n self._format_setters(*neighs_info.format_set_info)\n self._format_getters(*neighs_info.format_get_info)\n self._format_joining_functions()", "def _update(self, *keys_and_val):\n if len(xxx) < 2:\n raise NotEnoughInfo\n value, *location = xxx[::-1]\n location.reverse()\n final_key = location.pop()\n ptr__target_dct = get_target_dct(location)\n ptr__target_dct[final_key] = value\n return", "def PopulateCommonFieldValues(self, field, mojom_field):\n field.name = mojom_field.decl_data.short_name\n field.kind = self.KindFromMojom(mojom_field.type)\n field.attributes = self.AttributesFromMojom(mojom_field)", "def set_params(self,**kwargs):\n for key in kwargs:\n setattr(self, key, kwargs[key])", "def __init__(self, layer):\n Layer.__init__(self, layer)\n # Special methods to help access fields that cannot be accessed normally\n self.getters = {\n 'load' : self.get_load,\n 'options' : self.get_options\n }\n self.setters = {\n 'load' : self.set_load,\n 'options' : self.set_options\n }\n # Special methods to help generate fields that cannot be generated normally\n self.generators = {\n 'load' : self.gen_load,\n 'dataofs' : self.gen_dataofs,\n 'flags' : self.gen_flags,\n 'chksum' : self.gen_chksum,\n 'options' : self.gen_options,\n 'window' : self.gen_window\n }", "def setup_known_fields(self):\n\n kfields = dict(self.known_fields)\n freg = re.compile(r\"(^.+)_\\d+$\")\n for field in self:\n if self[field].get(\"units\") is not None:\n continue\n\n if field in kfields:\n self[field][\"units\"] = kfields[field]\n continue\n\n fs = freg.search(field)\n if fs and fs.groups()[0] in kfields:\n self[field][\"units\"] = kfields[fs.groups()[0]]", "def _populate(self, fields):\n schema = self.schema\n for k, v in fields.items():\n fields[k] = schema.fields[k].iget(self, v)\n\n self.modify(fields)\n self.reset_modified()", "def _add_grid_attributes(self, ds):\n for name_int, names_ext in self._grid_attrs.items():\n ds_coord_name = set(names_ext).intersection(set(ds.coords) |\n set(ds.data_vars))\n model_attr = getattr(self.model, name_int, None)\n if ds_coord_name and (model_attr is not None):\n # Force coords to have desired name.\n ds = ds.rename({list(ds_coord_name)[0]: name_int})\n ds = ds.set_coords(name_int)\n if not np.array_equal(ds[name_int], model_attr):\n if np.allclose(ds[name_int], model_attr):\n msg = (\"Values for '{0}' are nearly (but not exactly) \"\n \"the same in the Run {1} and the Model {2}. \"\n \"Therefore replacing Run's values with the \"\n \"model's.\".format(name_int, self.run,\n self.model))\n logging.info(msg)\n ds[name_int].values = model_attr.values\n else:\n msg = (\"Model coordinates for '{0}' do not match those\"\n \" in Run: {1} vs. {2}\"\n \"\".format(name_int, ds[name_int], model_attr))\n logging.info(msg)\n\n else:\n # Bring in coord from model object if it exists.\n ds = ds.load()\n if model_attr is not None:\n ds[name_int] = model_attr\n ds = ds.set_coords(name_int)\n if (self.dtype_in_vert == 'pressure' and\n internal_names.PLEVEL_STR in ds.coords):\n self.pressure = ds.level\n return ds", "def UpdateLayers(self):\n pass", "def __init__(self, rename = None, drop = None, keep=None):\r\n if drop and keep:\r\n raise Exception('Configuration error in FieldMap: you cant specify both keep and drop options.')\r\n super(FieldMap, self).__init__()\r\n\r\n if rename:\r\n self.rename = rename\r\n else:\r\n self.rename = {}\r\n\r\n self.drop = drop or []\r\n self.keep = keep or []", "def updateLayerData(self, **kwargs):\n self.currentLayerData = self.layers[self.getCurrentRow()]\n self.currentLayerData.update(**kwargs)\n self.layers[self.getCurrentRow()] = self.currentLayerData\n self.updateSelectedLayer()", "def update(self, *dicts, _force_mutable=False, **d_settings):\n for d in dicts + (d_settings,):\n for k, v in d.items():\n info = self._deco_class_settings_dict.get(k)\n # skip immutable settings\n if info and not self._deco_class_settings_dict[k].mutable and not _force_mutable:\n continue\n # if not info, KeyError from __setitem__\n self.__setitem__(k, v, info=info, _force_mutable=_force_mutable)", "def set_dict(self, dic): # -> None:\n ...", "def set_nodes(self, ndict):\n self.inode_ref = ndict[self.inode]\n self.jnode_ref = ndict[self.jnode]", "def set_params(self, dic):\n if dic is not None:\n for key, val in zip(dic.keys(), dic.values()):\n if key in self.__dict__.keys():\n if isinstance(self.__dict__[key], Parameter):\n if isinstance(val, Parameter):\n self.__dict__[key] = val\n else:\n d = self.__dict__[key].__dict__\n self.__dict__[key] = Parameter(val, input_dimensional=d['_input_dimensional'],\n units=d['_units'],\n description=d['_description'],\n scale_object=d['_scale_object'],\n return_dimensional=d['_return_dimensional'])\n else:\n self.__dict__[key] = val", "def __init__(self, *args, **kwargs):\n super(AttrDict, self).__init__(*args, **kwargs)\n self.__dict__ = self", "def update(self, mapping):\n if not ismapping(mapping):\n raise TypeError(\"mapping type required\")\n field_names = getpyattr(type(self), 'field_names')\n for key, value in mapping.items():\n if key in field_names:\n setattr(self, key, value)", "def update(self, other):\n if isinstance(other, ParameterDict):\n for key, value in other.items():\n self._type_converter[key] = other._type_converter[key]\n self._dict[key] = value\n else:\n for key, value in other.items():\n self[key] = value", "def common(self, common):\n self._common = common", "def dict(self, *, exclude_none=True, by_alias=True, **kwargs):\n return super().dict(\n exclude_none=exclude_none,\n by_alias=by_alias,\n **kwargs\n )", "def updateAttrs(self, kwargs):\n for k, v in kwargs.iteritems():\n setattr(self, k, v)", "def attribute_dict(self, attribute_dict):\n self.__attribute_dict.update(attribute_dict)", "def _set_raw_structure(self, key):\n self.set_neighs(key)\n self.ifdistance = False", "def include(self, map):\n self.map.update(map)", "def _copy_kwargs(self, **kwargs):\n ns = self.__dict__\n for attr, kw in {'_engine': 'engine', '_format': 'format'}.items():\n assert kw not in kwargs\n if attr in ns:\n kwargs[kw] = ns[attr]\n return super()._copy_kwargs(**kwargs)", "def __initAvailableLayerTypes(self):\n from backend.caffe.path_loader import PathLoader\n caffe = PathLoader().importCaffe()\n layerNameMainParts = list(caffe.layer_type_list())\n\n res = {}\n paramsPerLayerType = {}\n\n # calculate common parameters of all layer types\n # by removing all which will be used for one specific layer type only\n # also keep in mind which ones have been removed to readd them to specific layers\n commonParams = self._availableParameterGroupDescriptors[\"LayerParameter\"].parameter() #use .parameter() on purpose\n layerSpecificParameters = set()\n for nameMainPart in layerNameMainParts:\n specificParamsName = [nameMainPart + \"Parameter\"]\n if moreLayerNameParameter.has_key(nameMainPart):\n specificParamsName.append( moreLayerNameParameter[nameMainPart])\n paramsPerLayerType[nameMainPart] = {}\n for key, value in commonParams.items():\n if value.isParameterGroup() and value.parameterName() in specificParamsName:\n paramsPerLayerType[nameMainPart][key] = value\n layerSpecificParameters.add(key)\n\n\n # special case: shared params for loss layers\n key = \"loss_param\"\n value = commonParams[key]\n del commonParams[key]\n for nameMainPart in layerNameMainParts:\n if LayerType.getCategoryByName(nameMainPart) == LayerType.CATEGORY_LOSS:\n paramsPerLayerType[nameMainPart][key] = value\n\n # TODO is there a special case for the TransformationParameter?\n\n # create each layer type after one another\n for nameMainPart in layerNameMainParts:\n\n # add common params to the specific ones\n layerTypeParam = paramsPerLayerType[nameMainPart].keys()\n paramsPerLayerType[nameMainPart].update(commonParams)\n\n irrelevant = layerSpecificParameters.difference(layerTypeParam)\n res[nameMainPart] = LayerType(nameMainPart, paramsPerLayerType[nameMainPart], layerTypeParam, irrelevant)\n\n self._commonParams = commonParams\n self._availableLayerTypes = res", "def _update(self, config_dict, allow_new_keys=True):\n if not config_dict:\n return\n\n for k, v in six.iteritems(config_dict):\n if k not in self.__dict__.keys():\n if allow_new_keys:\n self.__setattr__(k, v)\n else:\n raise KeyError('Key `{}` does not exist for overriding. '.format(k))\n else:\n if isinstance(v, dict):\n self.__dict__[k]._update(v, allow_new_keys)\n else:\n self.__dict__[k] = copy.deepcopy(v)", "def __setstate__(self,values):\n self.initDefault()\n setter = object.__setattr__\n for value,attr in zip(values,self.persistent):\n setter(self,attr,value)\n if self.dirty_sizeCrc == None:\n self.dirty_sizeCrc = {} #--Use empty dict instead.\n self.refreshDataSizeCrc()", "def join_us_address_fields(csv_dic, new_field):\n address_str = '%s, %s, %s %s' % (\n csv_dic['Street Address'],\n csv_dic['City'],\n csv_dic['State'],\n csv_dic['ZIP']) \n \n dic = csv_dic\n dic[new_field] = address_str\n del(dic['Street Address'])\n del(dic['City'])\n del(dic['State'])\n del(dic['ZIP'])\n \n return dic", "def __init__(self, **kwargs):\n self.__dict__.update(kwargs)\n self._calc_coords()", "def update(self, **kwargs):\n for k, v in kwargs.iteritems():\n if hasattr(self, k):\n setattr(self, k, v)", "def update_from_kwargs(self, **kwargs):\n for (key, value) in kwargs.items():\n setattr(self, key, value)", "def declare(self, **kwargs):\n # replace previous _dict with new one\n old_dict = self._dict\n self.__dict__.update(_dict=dict(**kwargs))\n # restore previously existing values\n self._dict.update(old_dict)", "def __init__(self, layer_nest):\n self._layer_nest = layer_nest\n super().__init__()", "def __init__(self, **kwargs):\n self.__dict__.update(kwargs)", "def __init__(self, **kwargs):\n self.__dict__.update(kwargs)", "def _assign_fields_to_params(cls, fields, params):\n if fields is None:\n fields = cls.get_default_read_fields()\n if fields:\n params['fields'] = ','.join(fields)", "def __init__(self, layer_list_info):\n self.layer_list_info = layer_list_info", "def _adjust(self, offset, size, *keep_refs):\n for basic_block in self._cfg.values():\n for instr in basic_block:\n instr.adjust(offset, size, instr in keep_refs)", "def update(self, other):\n fields = None\n if isinstance(other, dict):\n fields = other\n elif isinstance(other, Torrent):\n fields = other.fields\n else:\n raise ValueError('Cannot update with supplied data')\n for k, v in fields.iteritems():\n self.fields[k.replace('-', '_')] = v", "def fill(self, **kwargs):\r\n for name in kwargs.keys():\r\n setattr(self, name, kwargs[name])\r\n return self", "def _update_object(self, data_dict):\r\n pass", "def __post_init__(self):\n # Only do this if source_data already exists (not during its own initialization)\n if \"SOURCE_DATA\" in globals():\n for data_field in fields(self):\n setattr(self, data_field.name, getattr(SOURCE_DATA, data_field.name))", "def merge(self, new_store):\n if new_store.name and len(new_store.name) > 0:\n self.name = new_store.name\n if new_store.address and len(new_store.address) > 0:\n self.address = new_store.address\n if new_store.city and len(new_store.city) > 0:\n self.city = new_store.city\n if new_store.state and len(new_store.state) > 0:\n self.state = new_store.state\n if new_store.zip and new_store.zip > 0:\n self.zipcode = new_store.zip\n if new_store.phone and new_store.phone > 0:\n self.phone = new_store.phone", "def alias(cls, typemap, base, *names):\n cls.parameter_alias[base] = (typemap, base)\n for name in names:\n cls.parameter_alias[name] = (typemap, base)" ]
[ "0.57628345", "0.55964243", "0.55274314", "0.5520741", "0.5513947", "0.54532826", "0.54528964", "0.5352825", "0.53380233", "0.52978295", "0.52767116", "0.5263793", "0.5233308", "0.52296996", "0.51820993", "0.51604235", "0.51503444", "0.51213574", "0.5110379", "0.51047695", "0.50988513", "0.508823", "0.506627", "0.5055422", "0.50306296", "0.5018073", "0.50153625", "0.50129914", "0.50034714", "0.5000164", "0.4996957", "0.4996173", "0.498281", "0.49817255", "0.49817255", "0.49807975", "0.49799007", "0.4960345", "0.49599272", "0.495887", "0.49580485", "0.49561474", "0.49548766", "0.49453706", "0.49254045", "0.49213478", "0.49170837", "0.49021953", "0.4897945", "0.48938614", "0.48937863", "0.48893446", "0.48884648", "0.48844305", "0.48759463", "0.48754242", "0.4874314", "0.48617002", "0.48590136", "0.4858392", "0.4852854", "0.4847046", "0.48431808", "0.4841679", "0.48416525", "0.48400623", "0.48390383", "0.48384774", "0.48375118", "0.48284122", "0.4827328", "0.48242858", "0.48238197", "0.48232207", "0.48154745", "0.481224", "0.4811299", "0.4807417", "0.4804967", "0.48022318", "0.48001984", "0.4790127", "0.47880843", "0.4779084", "0.47785267", "0.47780317", "0.4773544", "0.47721803", "0.47688648", "0.4767178", "0.4767178", "0.47656336", "0.47638366", "0.476247", "0.47614136", "0.4750806", "0.47505605", "0.47466087", "0.47453538", "0.47440505" ]
0.6680822
0
Only function in class.
def __init__(self, selected_address, side, ST_PREFIX, axis): if ST_PREFIX == "W" or ST_PREFIX == "E": self.axis_val = int(axis[0]) self.axis_dir = axis[2] self.Neighbor_GRID_Val = str(self.axis_val - 1000) self.axis_val_field = "AxisX_Val" self.axis_dir_field = "AxisX_Dir" else: self.axis_val = int(axis[1]) self.axis_dir = axis[3] self.Neighbor_GRID_Val = str(self.axis_val - 1000) self.axis_val_field = "AxisY_Val" self.axis_dir_field = "AxisY_Dir" self.selection = ( "{0}='{1}' AND {2}='{3}'".format( self.axis_val_field, self.Neighbor_GRID_Val, self.axis_dir_field, ST_PREFIX ) ) arcpy.SelectLayerByAttribute_management( "Addressing_Grid", "NEW_SELECTION", self.selection ) arcpy.CopyFeatures_management( "Addressing_Grid", "neighbor_grid" ) self.near_table = arcpy.GenerateNearTable_analysis( in_features=selected_address, near_features="neighbor_grid", out_table="Near_Table" ) self.cursor = arcpy.SearchCursor(self.near_table) Address_Dist = 0 for row in self.cursor: Address_Dist = int((row.getValue("NEAR_DIST") / 5280) * 1000) if side == "E" or side == "N": EorO_1 = "O" else: EorO_1 = "E" if (Address_Dist % 2) == 0: EorO_2 = "E" else: EorO_2 = "O" if EorO_1 == EorO_2: pass else: Address_Dist = (Address_Dist + 1) self.Address_Dist = Address_Dist self.HOUSENUM = self.axis_val + self.Address_Dist
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def func_in_class():\n return \"just a function hanging out in a class\"", "def someMethod (self):\n pass", "def __call__(self):\n pass", "def __call__(self):\n pass", "def independent_function():\n print(\"calling a function in exampleclass.py file that deos not belong to a class\")", "def method(self):", "def __call__(self) -> None:", "def __call__( self ):\n pass", "def another_method(self):\n pass", "def __call__(object):", "def __call__(self):\n\t\treturn", "def lassh():", "def test_class_method(self):\n self.assertEqual(self.Test.unscoped.im_self.__name__, 'Test')", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__(self):", "def __call__(self):", "def method():\n pass", "def ClassMethod(self, anyInt):\n pass", "def run(self):\n raise Exception('derived class should redefine this function')", "def __call__(self):\r\n raise NotImplementedError('override me')", "def opaque_class(self, classobj):\n self.restrict_class(classobj, None)", "def __def_function__():\n pass", "def regular(self):", "def f_noarg(self) :\n pass", "def __call__(obj):", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def Method_Access(function):\n \n pass", "def fun_a(self):\n pass", "def test_class_method(self):\n self.assertEqual(self.Test.scoped.im_self.__name__, 'Test')", "def annihilate(cls):\n pass", "def func(self):\n return self.__class__", "def __call__(self):\n raise NotImplementedError()", "def function(self):\n raise NotImplementedError", "def mockup(cls):\n pass", "def method(self):\n return None", "def method_a(self):", "def setup_class(cls):\n # ns.assert_true(False, \"setup_class run\")\n print('setup_class\\n')", "def __call__(self):\n raise NotImplementedError", "def a(self):\n pass", "def a(self):\n pass", "def fA(self):\n pass", "def __int__(self):\n pass", "def __init__(self):\r\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def method_b(self):", "def __init__(self) -> None:", "def __init__(self) -> None:", "def setUpClass(cls):\n cls.student_f = inspect.getmembers(Student, inspect.isfunction)", "def test_class_method(self):\n self.assertEqual(self.Test.default_scope.im_self.__name__, 'Test')", "def __nonzero__(self): # real signature unknown; restored from __doc__\r\n pass", "def __nonzero__(self): # real signature unknown; restored from __doc__\r\n pass", "def __call__(self, f):\n raise NotImplementedError()", "def test_class_method(self):\n self.assertEqual(self.Test.scope.im_self.__name__, 'Test')", "def __nonzero__(self): # real signature unknown; restored from __doc__\n pass", "def __nonzero__(self): # real signature unknown; restored from __doc__\n pass", "def __nonzero__(self): # real signature unknown; restored from __doc__\n pass", "def __nonzero__(self): # real signature unknown; restored from __doc__\n pass", "def dummy_fn(self):\n\t\tpass", "def dummy_method_silent(self):\n\n pass", "def _check_e(self, class_, event):\r\n\r\n if not self._check(class_):\r\n msg = 'Event \"%s.%s()\" must belong to a new style class '\r\n msg += 'and can\\'t be a static method'\r\n raise TypeError(msg % (str(class_), str(event)))", "def basic(self):\n pass", "def _check(self, class_):\r\n\r\n if isinstance(class_, (types.FunctionType, types.LambdaType,\r\n types.ClassType, types.InstanceType)):\r\n return False\r\n if not hasattr(class_, '__dict__'):\r\n if not hasattr(class_, '__slots__'):\r\n return False\r\n return True", "def test_class_method(self):\n self.assertEqual(pyperry.Base.add_processor.im_self.__name__, 'Base')", "def __init__(self, func): \n self.func = func", "def __subclasshook__(self, *args, **kwargs): # real signature unknown\n pass", "def __subclasshook__(self, *args, **kwargs): # real signature unknown\n pass", "def __subclasshook__(self, *args, **kwargs): # real signature unknown\n pass", "def __subclasshook__(self, *args, **kwargs): # real signature unknown\n pass", "def __subclasshook__(self, *args, **kwargs): # real signature unknown\n pass", "def __subclasshook__(self, *args, **kwargs): # real signature unknown\n pass", "def __subclasshook__(self, *args, **kwargs): # real signature unknown\n pass", "def __subclasshook__(self, *args, **kwargs): # real signature unknown\n pass", "def __subclasshook__(self, *args, **kwargs): # real signature unknown\n pass", "def __subclasshook__(self, *args, **kwargs): # real signature unknown\n pass", "def __subclasshook__(self, *args, **kwargs): # real signature unknown\n pass" ]
[ "0.7975919", "0.6815057", "0.6792388", "0.6792388", "0.67780167", "0.6729474", "0.6686155", "0.6603621", "0.6547703", "0.6465931", "0.6423527", "0.639462", "0.6383629", "0.6363027", "0.6363027", "0.6363027", "0.6363027", "0.6363027", "0.63594645", "0.63594645", "0.63387555", "0.63195354", "0.6279684", "0.621859", "0.621658", "0.6196642", "0.6196187", "0.61893773", "0.6184078", "0.6179048", "0.6179048", "0.6179048", "0.6179048", "0.6179048", "0.6179048", "0.6179048", "0.6179048", "0.6179048", "0.6179048", "0.6158876", "0.6110445", "0.61030406", "0.6086144", "0.6084521", "0.6082019", "0.6079167", "0.6075799", "0.60741377", "0.60610646", "0.60520273", "0.6043741", "0.60380214", "0.60380214", "0.60071343", "0.5972074", "0.59676415", "0.5954288", "0.5954288", "0.5954288", "0.5954288", "0.5954288", "0.5954288", "0.5954288", "0.5954288", "0.5954288", "0.5954288", "0.5954288", "0.5954288", "0.5954288", "0.5954288", "0.594844", "0.5932322", "0.5932322", "0.5929646", "0.5923618", "0.59198964", "0.59198964", "0.59157544", "0.59120226", "0.5905462", "0.5905462", "0.5905462", "0.5905462", "0.59041786", "0.58974665", "0.58944595", "0.588875", "0.5887488", "0.5881473", "0.5875849", "0.5874615", "0.5874615", "0.5874615", "0.5874615", "0.5874615", "0.5874615", "0.5874615", "0.5874615", "0.5874615", "0.5874615", "0.5874615" ]
0.0
-1
Get AWS ECS task information. For the puspose of getting the EC2 instance id by a given AWS ECS task name, for now, only the 'containerInstanceArn' is fetched from the AWS ECS task.
def get_tasks_information( task: str, list_tasks: str, cluster=CLUSTER_NAME, client=None, region=REGION, ): if not client: session = boto3.session.Session() client = session.client("ecs", region) try: # Get all tasks in the cluster. cluster_tasks = client.list_tasks(cluster=cluster)["taskArns"] logger.debug(f"[CLUSTERTASKS]: '{cluster_tasks}'.") tasks = client.describe_tasks(cluster=cluster, tasks=cluster_tasks)[ "tasks" ] logger.debug(f"[TASKS]: '{tasks}'.") # Filter for given task name. # Get instance id, container_instances = [] task_name = "" for task_ in tasks: task_definition = task_.get("taskDefinitionArn", "") if list_tasks: container_instances.append(task_definition) continue container_instance_arn = task_.get("containerInstanceArn", None) if container_instance_arn: if not list_tasks: if re.search(task, task_definition): container_instances.append(container_instance_arn) task_name = task_definition break else: container_instances.append(container_instance_arn) if list_tasks: return "\n".join(container_instances) instances = describe_instances_with_cluster( container_instances=container_instances, cluster=cluster, client=client, region=region, ) if not instances: return "" logger.info(f"Instance '{instances[0]}' runs task '{task_name}'.") return instances[0] except (botocore.exceptions.ClientError) as e: # TODO: Check right error code. if e.response["Error"]["Code"] == "ClusterNotFoundException": logger.error(f"Cluster '{cluster}' not found: {str(e)}.") else: logger.error(f"Error: {str(e)}") sys.exit(1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get(profile, cluster, tasks):\n client = boto3client.get(\"ecs\", profile)\n params = {}\n params[\"cluster\"] = cluster\n params[\"tasks\"] = tasks\n return client.describe_tasks(**params)", "def get_task_details(self) -> task.TaskMetadata:\n return task.TaskMetadata(\n name=self.name,\n description=self.task_data[\"description\"],\n keywords=self.task_data[\"keywords\"],\n max_input_length_per_query=self.task_data[\"max_input_length\"],\n max_queries=self.task_data[\"max_queries\"],\n )", "def get_task_info(self):\n\n print()\n employee_name = self.task.get_employee_name()\n task_name = self.task.get_task_name()\n mins = self.task.get_time_spent()\n notes = self.task.get_notes()\n date = self.task.get_date()\n\n task = {\n 'employee_name': employee_name,\n 'task_name': task_name,\n 'mins': mins,\n 'notes': notes,\n 'date': date\n }\n\n return task", "def task_id(self) -> str:\n return self.get_from_redis(\"task_id\")", "def createRunTaskDefinition(options):\n\n # ECS Cluster to connect to\n ecsCluster = options.cluster\n # One-off task parameters\n refTaskDefName = options.from_task\n containerCommand = options.command\n containerEntrypoint = options.entrypoint\n containerImage = options.image\n oneOffTaskName = options.task_name\n oneOffTaskLaunchType = options.launch_type\n oneOffTaskNetsId = options.networks_id\n oneOffTaskSgsId = options.security_groups_id\n # Container log group name and log stream prefix for CloudWatch\n oneOffTaskContainerLogGroup = f\"/ecs/{oneOffTaskName}\"\n oneOffTaskContainerLogStreamPrefix = \"ecs\"\n\n # Check if the network configuration is provided when the launch type is FARGATE\n if oneOffTaskLaunchType == \"FARGATE\" and (not oneOffTaskNetsId or not oneOffTaskSgsId):\n print(\"Error: for launch type 'FARGATE' the network configuration must be provided using the `--networks-id` and `--security-groups-id` flags.\")\n sys.exit(1)\n\n # Get the latest active task definition from refTaskDefName\n latestActiveTaskDef = ecs.describe_task_definition(\n taskDefinition=refTaskDefName\n )\n\n # Remove unnecessary keys from the task definition\n # See https://github.com/aws/aws-cli/issues/3064#issuecomment-504681953\n del latestActiveTaskDef['taskDefinition']['taskDefinitionArn']\n del latestActiveTaskDef['taskDefinition']['revision']\n del latestActiveTaskDef['taskDefinition']['status']\n # This key is only present when are required some attributes such as S3 environment files\n try:\n del latestActiveTaskDef['taskDefinition']['requiresAttributes']\n except KeyError:\n pass\n del latestActiveTaskDef['taskDefinition']['compatibilities']\n del latestActiveTaskDef['ResponseMetadata']\n # Added in recent versions of boto3 (1.17.64). For backward compatibility we use exceptions\n try:\n del latestActiveTaskDef['taskDefinition']['registeredAt']\n except KeyError:\n pass\n try:\n del latestActiveTaskDef['taskDefinition']['registeredBy']\n except KeyError:\n pass\n\n # Get the secrets, environment files and environment variables for the first container\n containerSecrets = latestActiveTaskDef['taskDefinition']['containerDefinitions'][0].get('secrets', None)\n containerEnvFiles = latestActiveTaskDef['taskDefinition']['containerDefinitions'][0].get('environmentFiles', None)\n containerEnv = latestActiveTaskDef['taskDefinition']['containerDefinitions'][0].get('environment', None)\n # Get the execution role ARN for the task\n execRoleArn = latestActiveTaskDef['taskDefinition'].get('executionRoleArn', None)\n\n if oneOffTaskLaunchType == \"EC2\":\n # Build the one-off task definition for EC2\n oneOffTaskDef = {\n \"executionRoleArn\": execRoleArn,\n \"containerDefinitions\": [\n {\n \"environmentFiles\": [],\n \"secrets\": [],\n \"environment\": [],\n \"entryPoint\": [],\n \"portMappings\": [],\n \"command\": containerCommand,\n \"cpu\": 128,\n \"memory\": 400,\n \"memoryReservation\": 300,\n \"volumesFrom\": [],\n \"image\": containerImage,\n \"name\": oneOffTaskName,\n \"logConfiguration\": {\n \"logDriver\": \"awslogs\",\n \"options\": {\n \"awslogs-group\": oneOffTaskContainerLogGroup,\n \"awslogs-region\": awsRegion,\n \"awslogs-stream-prefix\": oneOffTaskContainerLogStreamPrefix\n }\n }\n }\n ],\n \"family\": oneOffTaskName\n }\n else:\n # Build the one-off task definition for Fargate\n oneOffTaskDef = {\n \"executionRoleArn\": execRoleArn,\n \"containerDefinitions\": [\n {\n \"environmentFiles\": [],\n \"secrets\": [],\n \"environment\": [],\n \"entryPoint\": [],\n \"portMappings\": [],\n \"command\": containerCommand,\n \"cpu\": 128,\n \"memory\": 400,\n \"memoryReservation\": 300,\n \"volumesFrom\": [],\n \"image\": containerImage,\n \"name\": oneOffTaskName,\n \"logConfiguration\": {\n \"logDriver\": \"awslogs\",\n \"options\": {\n \"awslogs-group\": oneOffTaskContainerLogGroup,\n \"awslogs-region\": awsRegion,\n \"awslogs-stream-prefix\": oneOffTaskContainerLogStreamPrefix\n }\n }\n }\n ],\n \"family\": oneOffTaskName,\n \"networkMode\": \"awsvpc\",\n \"requiresCompatibilities\": [\n \"FARGATE\"\n ],\n \"cpu\": \"256\",\n \"memory\": \"512\"\n }\n\n # Update task definition with optionals keys\n if containerEntrypoint:\n oneOffTaskDef['containerDefinitions'][0].update({\"entryPoint\": containerEntrypoint.split(' ')})\n\n if containerEnvFiles:\n oneOffTaskDef['containerDefinitions'][0].update({\"environmentFiles\": containerEnvFiles})\n\n if containerSecrets:\n oneOffTaskDef['containerDefinitions'][0].update({\"secrets\": containerSecrets})\n\n if containerEnv:\n oneOffTaskDef['containerDefinitions'][0].update({\"environment\": containerEnv})\n\n # Create a new task revision for the one-off task\n response = ecs.register_task_definition(**oneOffTaskDef)\n\n # Get the one-off task definition ARN\n oneOffTaskDefArn = response['taskDefinition']['taskDefinitionArn']\n\n print(f\"==> Created the task definition: {oneOffTaskDefArn}\")\n\n # Create the one-off task container CloudWatch Log Group if does not exists\n print(\"\\n\" + createCloudWatchLogGroup(logGroupName=oneOffTaskContainerLogGroup))\n\n # Run the one-off task with the created task definition (oneOffTaskDefArn)\n if oneOffTaskLaunchType == \"EC2\":\n response = ecs.run_task(\n cluster=ecsCluster,\n taskDefinition=oneOffTaskDefArn\n )\n else:\n response = ecs.run_task(\n cluster=ecsCluster,\n taskDefinition=oneOffTaskDefArn,\n launchType='FARGATE',\n networkConfiguration={\n 'awsvpcConfiguration': {\n 'subnets': oneOffTaskNetsId,\n 'securityGroups': oneOffTaskSgsId,\n 'assignPublicIp': 'DISABLED'\n }\n }\n )\n\n # Get the one-off run task ARN\n oneOffTaskRunArn = response['tasks'][0]['taskArn']\n\n print(f\"\\n==> Executed task ARN: {oneOffTaskRunArn}\")\n print(\"\\nWaiting for the task to finishes...\")\n\n # Wait until the one-off task is stopped\n # The poll is every 6 seconds by default and the maximun number of attempts to be made is 100\n waiter = ecs.get_waiter('tasks_stopped')\n waiter.wait(\n cluster=ecsCluster,\n tasks=[\n oneOffTaskRunArn\n ]\n )\n\n # Get the output of the stopped task\n response = ecs.describe_tasks(\n cluster=ecsCluster,\n tasks=[\n oneOffTaskRunArn\n ]\n )\n\n # Get the container exit status code and its reason\n oneOffTaskExitCode = response['tasks'][0]['containers'][0].get('exitCode')\n oneOffTaskExitCodeReason = response['tasks'][0]['containers'][0].get('reason')\n\n # Get the one-off task stopped reason\n oneOffTaskStopeedReason = response['tasks'][0].get('stoppedReason')\n\n if oneOffTaskExitCode == 0 and not oneOffTaskExitCode:\n print(\"\\n==> The one-off task process has finished correctly!!\")\n printContainerOutput(logGroupName=oneOffTaskContainerLogGroup, taskArn=oneOffTaskRunArn)\n sys.exit()\n else:\n print(\"\\n==> The one-off task has failed!!\")\n print(f\"Container exit code: {oneOffTaskExitCode}\")\n print(f\"Container exit reason: {oneOffTaskExitCodeReason}\")\n print(f\"Stopped reason: {oneOffTaskStopeedReason}\")\n printContainerOutput(logGroupName=oneOffTaskContainerLogGroup, taskArn=oneOffTaskRunArn)\n sys.exit(1)", "def task_id(self):\n return self._task_id", "def task_id(self):\n return self._task_id", "def task_id(self):\n return self._task_id", "def task_id(self):\n return self._task_id", "def task_id(self):\n return self._mpis.task_id", "def get_task_result(self, task_name):\n logging.info(f\"Getting task: {task_name}\")\n if task_name in self._container:\n logging.info(\"Success!\")\n return self._container[task_name].result\n logging.error(f\"Could not find task: {task_name}\")\n raise TaskNotFoundException(f\"Could not find task: {task_name}\")", "def get_instance_image_info(task):\n ctx = task.context\n node = task.node\n image_info = {}\n # NOTE(pas-ha) do not report image kernel and ramdisk for\n # local boot or whole disk images so that they are not cached\n if (node.driver_internal_info.get('is_whole_disk_image')\n or deploy_utils.get_boot_option(node) == 'local'):\n return image_info\n root_dir = get_http_boot_dir()\n i_info = node.instance_info\n labels = ('kernel', 'ramdisk')\n d_info = deploy_utils.get_image_instance_info(node)\n if not (i_info.get('kernel') and i_info.get('ramdisk')):\n glance_service = service.GlanceImageService(context=ctx)\n iproperties = glance_service.show(d_info['image_source'])['properties']\n for label in labels:\n i_info[label] = str(iproperties[label + '_id'])\n node.instance_info = i_info\n node.save()\n\n for label in labels:\n image_info[label] = (\n i_info[label],\n os.path.join(root_dir, node.uuid, label)\n )\n\n return image_info", "def get_task_metadata(self, task):\n return self._gdb_interface.get_task_metadata(task)", "def get_task_uuid(self):\n\t\treturn call_sdk_function('PrlRunningTask_GetTaskUuid', self.handle)", "def getTask():\n\tcontent = requests.get(MANAGER_URL+\"task\", params={\"apiKey\": API_KEY}).text\n\tif content == \"null\":\n\t\treturn None\n\telse:\n\t\treturn json.loads(content)", "def list_ecs_task_definitions():\n tasks = ECS_MANAGER.list_ecs_task_definitions()\n if tasks:\n print(str_sep)\n print(\"Listing task definitions available in {}\".format(SESSION.region_name.upper()))\n print(\"{:50}{:20}\".format('Task', 'Version'))\n print(str_sep)\n\n for task in tasks['taskDefinitionArns']:\n if len(task) > 0:\n task_name, version = task.rsplit(\"/\", 1)[1].split(\":\")\n print(\"{:50}{:20}\".format(task_name, version))", "def get_task(self, task_id: str) -> Mapping[str, Any]:\n return self.__get_one_by_id(\"tasks\", \"task_id\", task_id)", "def task(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"task\")", "async def get_task_result(task_id: TaskId):", "def getTaskName(self):\n return self._taskName", "def task(self):\n return self._task", "def task(self):\n return self._task", "def task(self):\n return self._task", "def task(self):\n return self._task", "def task(self):\n return self._task", "def _mesos_task_info(self, submissionId):\n agent_id = agent_hostname = agent_port = framework_id = container_id = None\n get_state = self.driver.getState()['get_state']\n get_tasks = get_state['get_tasks']\n\n tasks = get_tasks['tasks'] + get_tasks.get('completed_tasks', [])\n tasks_list = list(filter(lambda x: x['task_id']['value'] == submissionId, tasks))\n if len(tasks_list) > 0:\n task = tasks_list[0]\n agent_id = task['agent_id']['value']\n framework_id = task['framework_id']['value']\n\n if agent_id is not None:\n get_agents = get_state['get_agents']\n agents = get_agents['agents']\n agents_list = list(filter(lambda x: x['agent_info']['id']['value'] == agent_id, agents))\n if len(agents_list) > 0:\n agent = agents_list[0]\n agent_hostname = agent['agent_info']['hostname']\n agent_port = agent['agent_info']['port']\n agent_driver = MesosOperatorAgentDriver('{}:{}'.format(agent_hostname, agent_port))\n containers = agent_driver.getContainers()['get_containers']['containers']\n containers_list = list(filter(lambda x: x['executor_id']['value'] == submissionId, containers))\n if len(containers_list) > 0:\n container = containers_list[0]\n container_id = container['container_id']['value']\n\n return agent_id, agent_hostname, str(agent_port), framework_id, container_id", "def get_task_metadata(self, task):\n metadata_record = self._read_transaction(tx.get_task_metadata, task=task)\n return _reconstruct_metadata(metadata_record)", "def task_get(context, task_id, session=None, force_show_deleted=False):\n task_ref = _task_get(context, task_id, session=session,\n force_show_deleted=force_show_deleted)\n return _task_format(task_ref, task_ref.info)", "def test_get_task(self):\n resp = self.app.get('/api/2/inf/esrs',\n headers={'X-Auth': self.token})\n\n task_id = resp.json['content']['task-id']\n expected = 'asdf-asdf-asdf'\n\n self.assertEqual(task_id, expected)", "def get_task_id(self, position):\n task_id = self.stn.get_task_id(position)\n if task_id:\n return task_id\n else:\n raise TaskNotFound", "def task_definition(self):\n return self._task_definition", "def get_task_details_by_id(self,params=['ng2157']):\n result =[]\n query_params = {'attid':params[0]}\n query = \"\"\"select a.Attuid,a.Status,a.Severity,a.TaskDetails,a.Remarks,a.StartDate,a.EndDate,a.TaskFinishDate,a.InsertDate,a.InsertedBy,a.UpdateDate,a.UpdatedBy\n from s08_DB.Alltasks a\n where a.Attuid =:attid\"\"\".replace('\\n',' ')\n with vertica_python.connect(**conn_info) as connection:\n logging.debug(\"Connected to {} on host{} \".format(conn_info['database'],conn_info['host']))\n logging.info(\"The read SQL -> {} \".format(query))\n cur = connection.cursor()\n cur.execute(query,query_params)\n for row in cur.iterate():\n result.append(row)\n return(result)", "def task(self) -> str:\n return self._task", "def task_name(self):\n pass", "def get_info(self, key: str) -> TaskInfo:\n return self.task_graph.nodes[key][\"info\"]", "def taskdetail_get(td_id):\n return IMPL.taskdetail_get(td_id)", "def get_task(self):\n\n url='{url}/task'.format(url=config.SERVER_URL)\n\n try:\n res=request.urlopen(url,timeout=10).read()\n res=str(res,encoding='utf8')\n except Exception as e:\n check_server() # sleep until server is available\n try:\n res=request.urlopen(url,timeout=10).read()\n res=str(res,encoding='utf8')\n except:\n err_str='error: client -> get_task : ' \\\n 'unable to connect to server, exit process'\n info_manager(err_str,type='KEY')\n os._exit(0)\n\n if 'no task' in res: # if server have no task uid ,return 'no task uid'\n err_str= 'error: client -> get_task : ' \\\n 'unable to get task, exit process'\n info_manager(err_str,type='KEY')\n os._exit(0)\n\n try: # try to parse task str\n res=res.split(',')\n self.task_uid=res[0]\n self.task_type=res[1]\n except:\n err_str='error: client -> get_task : ' \\\n 'unable to split task str,exit process'\n info_manager(err_str,type='KEY')\n os._exit(0)", "def _task_info_get(context, task_id, session=None):\n session = session or get_session()\n query = session.query(models.TaskInfo)\n query = query.filter_by(task_id=task_id)\n try:\n task_info_ref = query.one()\n except sa_orm.exc.NoResultFound:\n LOG.debug(\"TaskInfo was not found for task with id %(task_id)s\",\n {'task_id': task_id})\n task_info_ref = None\n\n return task_info_ref", "def get_target(self):\n task = self.task.get_task(self.task_id)\n if 'name' in task:\n return str(task['name'])\n return str(task)", "def __getitem__(self, txid: int) -> asyncio.Task:\n return self._tasks[txid]", "def get_task_by_name(self, task_name):\n task_table = Table('task', self.metadata, autoload=True)\n try:\n parent_task = self.session.query(task_table).filter(task_table.c.name==str(task_name)).one()\n task = parent_task._asdict()\n return task\n except Exception as e:\n logger.info(f\"Error retrieving task {task_name}: {e}\")\n return False", "def task_definition_arn(self) -> str:\n return pulumi.get(self, \"task_definition_arn\")", "def getNodeTaskByUPID(self,node,upid):\n data = self.connect('get','nodes/%s/tasks/%s' % (node,upid),None)\n return data", "def ec2_image_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"ec2_image_id\")", "def task_name(self) -> str:\n return self._task_name", "def get_task(self, task_id):\n res = self.conn.cursor().execute(\"SELECT * FROM tasks WHERE id=?\", (task_id,))\n return res.fetchone()", "def get_info(self, key: str) -> TaskInfo:\n raise NotImplementedError", "def get_module_task_instance_id(task_instances):\n for id in task_instances:\n if task_instances[id] == 'module_node':\n return id\n return None", "def instanceid_lookup(session, hostname):\n if session is None:\n return None\n\n client = session.client('ec2')\n response = client.describe_instances(\n Filters=[{\"Name\": \"tag:Name\", \"Values\": [hostname]}])\n\n item = response['Reservations']\n if len(item) == 0:\n return None\n else:\n item = item[0]['Instances']\n if len(item) == 0:\n return None\n else:\n item = item[0]\n if 'InstanceId' in item:\n return item['InstanceId']\n return None", "def get_task_id(self):\n if self.task_id:\n return self.task_id\n return (f'{self.task_type}_{self.get_source_system().lower()}'\n f'.{self.get_source_subsystem().lower()}.{self.get_name().upper()}')", "def _get_task_id_from_xmodule_args(xmodule_instance_args):\r\n return xmodule_instance_args.get('task_id', UNKNOWN_TASK_ID) if xmodule_instance_args is not None else UNKNOWN_TASK_ID", "def test_get_image_task(self):\n resp = self.app.get('/api/2/inf/esrs/image',\n headers={'X-Auth': self.token})\n\n task_id = resp.json['content']['task-id']\n expected = 'asdf-asdf-asdf'\n\n self.assertEqual(task_id, expected)", "def get_task_index(self):\n return self.task_index", "def get(self, name, task):\n assert name, \"Must input a valid dataset name.\"\n assert task, \"Must input a valid task name.\"\n self._assert_dataset_exists_in_cache(name)\n self._assert_task_exists_in_dataset_in_cache(name, task)\n return self.manager.data[\"dataset\"][name][\"tasks\"][task]", "def get_tasks(self, task_id=None):\n # Recover all config from OpenVAS\n if task_id:\n return self.make_xml_request('<get_tasks id=\"%s\"/>' % name, xml_result=True)\n else:\n return self.make_xml_request(\"<get_tasks />\", xml_result=True)", "def taskid(self):\n raise NotImplementedError('Must be implemented by subclass.')", "def get(self, guid):\n results = j.sal.fs.find(self._root, '*_%s' % guid)\n if len(results) <= 0:\n raise TaskNotFoundError(\"task %s not found\" % guid)\n if len(results) > 1:\n raise RuntimeError(\"found 2 tasks with same guid, this should not happen\")\n return self._deserialize_task(j.sal.fs.readFile(results[0]))", "def _get_current_task():\r\n return current_task", "def _get_current_task():\r\n return current_task", "def get_task_input(self, task, input_id):\n input_record = self._read_transaction(tx.get_task_input, task=task, input_id=input_id)\n return _reconstruct_task_input(input_record[\"i\"])", "def gettaskname(self): # 3\n sizetaskname_ = (1 + self.gettasknamelen())\n arr_taskname = array.array(\"b\",[0]*((sizetaskname_)))\n memview_arr_taskname = memoryview(arr_taskname)\n res,resargs = self.__obj.gettaskname(sizetaskname_,memview_arr_taskname)\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n retarg_taskname = resargs\n retarg_taskname = arr_taskname.tobytes()[:-1].decode(\"utf-8\",errors=\"ignore\")\n return retarg_taskname", "def task(self, name):\n with self.db_lock:\n return self.rcon.hget(self.task_key, name)", "async def task_detail(request, job_id=None, task_name=None):\n jobs = dagobah._serialize().get('jobs', {})\n job = [job for job in jobs if str(job['job_id']) == job_id][0]\n return template('task_detail.html',\n job=job,\n task_name=task_name,\n task=[task for task in job['tasks']\n if task['name'] == task_name][0])", "def get(self, id):\n\n return self.client.get(\"external-task/{0}\".format(id))", "def get_task(self,\n task_label=None,\n notebook_cell_text=None,\n print_return=True):\n\n self._print('Getting task {} ...'.format(task_label))\n\n if task_label:\n task = {task_label: self._tasks[task_label]}\n\n elif notebook_cell_text:\n task = self._load_task_from_notebook_cell(notebook_cell_text)\n\n else:\n raise ValueError(\n 'Get an existing task by querying for its ID or register a '\n 'task from a notebook cell.')\n\n if print_return: # For communicating with JavaScript\n print(dumps(task))\n return task", "def get_input_task(self, name='0'):\n port = self.get_input(name).other\n if port is None:\n return None\n return port.task", "def _get_task(self, task):\n try:\n return TASKS[task]\n except KeyError:\n raise ValueError(\"task %s \"\n \"is not supported. \" % task)", "def get_task_by_tid(self, tid):\n return self.task_controller.get_task(tid)", "def get_task_host(self):\n comp = self.get_task_role()\n host = (comp.host_ref\n if isinstance(comp.host_ref, basestring)\n else comp.host_ref.value())\n if isinstance(host, IPAddressable):\n host.fix_arguments()\n host = host.get_ip()\n return host", "def get_task_host(self):\n comp = self.get_task_role()\n host = (comp.host_ref\n if isinstance(comp.host_ref, basestring)\n else comp.host_ref.value())\n if isinstance(host, IPAddressable):\n host.fix_arguments()\n host = host.get_ip()\n return host", "def name(self):\n return self._client.project_name + '/instances/' + self.instance_id", "def get_task(self, name):\n res = Task()\n self.GetTask(name, res)\n return res", "def get_task(self, id):\n raise NotImplementedError()", "def container_status(self):\n if self.status == 'complete':\n return 'complete'\n try:\n task_status = self._ecs.describe_tasks(tasks=[self.name])['tasks'][0]['lastStatus']\n return task_status\n except (IndexError, ClientError):\n return 'STOPPED'", "def __get_task(self, task_id):\r\n if task_id not in self.__tasks:\r\n self.__tasks[task_id] = Task(task_id)\r\n return self.__tasks[task_id]", "def __get_task(self, task_id):\r\n if task_id not in self.__tasks:\r\n self.__tasks[task_id] = Task(task_id)\r\n return self.__tasks[task_id]", "def get_arns(profile, cluster, started_by=None):\n result = None\n client = boto3client.get(\"ecs\", profile)\n params = {}\n params[\"cluster\"] = cluster\n if started_by:\n params[\"startedBy\"] = started_by\n return client.list_tasks(**params)", "def get_status(ec2,spot_request_id):\n current = ec2.describe_spot_instance_requests(SpotInstanceRequestIds=[spot_request_id,])\n instance_id = current[u'SpotInstanceRequests'][0][u'InstanceId'] if u'InstanceId' in current[u'SpotInstanceRequests'][0] else None\n return instance_id", "def get_execution_info(context):\n reservation_info = {}\n hostname = socket.gethostname()\n reservation_info[\"Python version\"] = platform.python_version()\n reservation_info[\"Operating System\"] = platform.platform()\n reservation_info[\"Platform\"] = platform.system()\n reservation_info[\"Hostname\"] = hostname\n\n try:\n reservation_info[\"IP\"] = socket.gethostbyname(hostname)\n except Exception:\n reservation_info[\"IP\"] = \"n/a\"\n\n try:\n reservation_info[\"ReservationID\"] = get_reservation_context_attribute(\n \"reservation_id\", context\n )\n reservation_info[\"Description\"] = get_reservation_context_attribute(\n \"description\", context\n )\n reservation_info[\"EnviromentName\"] = get_reservation_context_attribute(\n \"environment_name\", context\n )\n reservation_info[\"Username\"] = get_reservation_context_attribute(\n \"owner_user\", context\n )\n except Exception:\n pass\n\n return reservation_info", "def _get_task(self, task_id):\n if not task_id:\n return None\n task = objects.Transaction.get_by_uid(task_id, fail_if_not_found=False)\n if task and task.cluster_id == self.cluster.id:\n return task\n return False", "def describe_instance(instance_id):\n # Instantiate the service resource object\n ec2_resource = session.resource('ec2', region_name=region)\n try:\n # Describe an instance\n instance = ec2_resource.Instance(instance_id)\n print('\\nInstance Id: ' + instance_id)\n print('Instance Id: ' + instance.id)\n print('Image Id: ' + instance.image_id)\n print('Instance Type: ' + instance.instance_type)\n print('State: ' + instance.state['Name'])\n if instance.state['Name'] == 'running':\n print('Private DNS Name: ' + instance.private_dns_name)\n print('Private IP: ' + instance.private_ip_address)\n print('Public DNS Name: ' + instance.public_dns_name)\n print('Public IP: ' + instance.public_ip_address)\n except botocore.exceptions.ClientError as e:\n if e.response['Error']['Code'] == \"MissingParameter\":\n print(\"Error: Missing instance id!!\")\n else:\n raise\n return", "def get(self, task_id):\n try:\n return self.dal.task.get_by_id(task_id)\n except EntityNotFound:\n raise DoesNotExist()", "def get_task(self, id=None, name=None):\n query = \"SELECT * FROM tangerine WHERE \"\n if id: query += \"id='\"+str(id)+\"'\"\n elif name: query += \"name='\"+name+\"' AND parent_job IS NULL\"\n else: return None\n \n cur = self.conn.cursor()\n cur.execute(query + \";\")\n self.conn.commit()\n task = cur.fetchone()\n \n if task:\n return Task(self.columns, task);\n else:\n return None", "def _get_task_meta_for(self, task_id):\n session = Session()\n try:\n task = None\n for task in session.query(Task).filter(Task.task_id == task_id):\n break\n if not task:\n task = Task(task_id)\n session.add(task)\n session.commit()\n if task:\n return task.to_dict()\n finally:\n session.close()", "def url(self):\n endpoint = 'taskinfo?taskID=%d' % self.id\n return posixpath.join(self.connection.weburl, endpoint)", "def get_output_task(self, name='0'):\n port = self.get_output(name).other\n if port is None:\n return None\n return port.task", "def get(self, controller, data, *args, **kwargs): \n task_manager = controller.get_task_manager()\n res = task_manager.get_all_tasks(details=True)\n resp = {\n u'task-instances':res,\n u'count':len(res)\n } \n return resp", "def ec2_image_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"ec2_image_id\")", "async def get_task_status(task_id: TaskId):", "def get_self_instance_id():\n\n logging.debug('get_self_instance_id()')\n response = urllib2.urlopen('http://169.254.169.254/1.0/meta-data/instance-id')\n instance_id = response.read()\n return instance_id", "def instance_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"instance_id\")", "def instance_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"instance_id\")", "def instance_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"instance_id\")", "def instance_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"instance_id\")", "def instance_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"instance_id\")", "def instance_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"instance_id\")", "def get_task(self, u_name):\n raise NotImplementedError()", "def get_instance_id(event):\n try:\n return event['detail']['instance-id']\n except KeyError as err:\n LOGGER.error(err)\n return False", "def get(self, id):\n task = get_task(get_db(), id)\n if not task:\n api.abort(404, f\"Invalid task with id: {id}\")\n return task_to_dict(task)", "def get_tasks_ids(self, name=None):\n\n m_return = {}\n\n for x in self.get_tasks().findall(\"task\"):\n m_return[x.find(\"name\").text] = x.get(\"id\")\n\n if name:\n return {name : m_return[name]}\n else:\n return m_return" ]
[ "0.6348864", "0.598766", "0.5797471", "0.5780299", "0.57383114", "0.56886894", "0.56886894", "0.56886894", "0.56886894", "0.5631942", "0.55174756", "0.5450972", "0.5419298", "0.54183954", "0.54114175", "0.5400899", "0.53903484", "0.53873485", "0.5380392", "0.53680474", "0.53631324", "0.53631324", "0.53631324", "0.53631324", "0.53631324", "0.5350871", "0.5314721", "0.52899784", "0.52896404", "0.5289018", "0.52837414", "0.5274506", "0.52737075", "0.5271599", "0.5271103", "0.5258413", "0.52570504", "0.5255385", "0.52546304", "0.52509356", "0.5249153", "0.5240594", "0.5237209", "0.5228827", "0.5223669", "0.521792", "0.521631", "0.52134407", "0.5194331", "0.51828766", "0.51767755", "0.51696855", "0.5169201", "0.51582026", "0.5128819", "0.5112906", "0.5108146", "0.5096957", "0.5096957", "0.5096466", "0.50908726", "0.5080434", "0.5080346", "0.506722", "0.5057286", "0.50534683", "0.5052055", "0.5051503", "0.5037815", "0.5037815", "0.5037614", "0.5020254", "0.50195223", "0.50111234", "0.49823758", "0.49823758", "0.49763083", "0.49666375", "0.49648607", "0.49593002", "0.49581087", "0.4956298", "0.49478713", "0.49472812", "0.49309513", "0.49283057", "0.4923464", "0.4898853", "0.48978576", "0.48961028", "0.48934093", "0.48934093", "0.48934093", "0.48934093", "0.48934093", "0.48934093", "0.4885462", "0.48841095", "0.4882184", "0.48740464" ]
0.68460375
0
Provide cli arguments. When used as executable script with command line options.
def main(): # import aws_ecs_services.arguments as arguments from .arguments import get_cli_arguments # args = arguments.get_cli_arguments() args = get_cli_arguments() by_service_dns = False by_service_name = False by_task_name = False list_clusters = False only_cluster_instances = False only_ec2_instances = False list_running_services = False list_running_tasks = False list_services = False list_projects = False use_config = False debug = args.debug if debug: logger.setLevel(logging.DEBUG) logger.debug("Show DEBUG information.") stream_handler = logging.StreamHandler(sys.stdout) formatter = logging.Formatter(f"%(lineno)s: {logging.BASIC_FORMAT}") stream_handler.setFormatter(formatter) logger.addHandler(stream_handler) logger.propagate = False else: logger.setLevel(logging.INFO) # If a configuration file and a project are given,the configruation file is used. # Otherwise the cli ooptions are considerd. project = args.project # Variable replacement in config file uses '{service}'. service = args.service config = args.config if ( os.path.exists(config) and project or args.subcommand in ("list-configured-projects", "list-configured-services") ): logger.info(f"Loading config from: '{config}'.") if not os.path.exists(config): logger.error(f"No config file: '{config}'.") return 1 use_config = True if use_config: data = None try: with open(config, "r") as config_file: data = json.load(config_file) except (ValueError) as e: logger.error( f"Check the JSON sytanx in the config file '{config}': '{str(e)}'" ) return 1 logger.debug(f"Data: {data}") if not data or not isinstance(data, dict): logger.error(f"Could not load configuration: '{data}'.") return 1 if use_config: region = data.get("region", args.region) else: region = args.region if use_config: projects = data.get("projects", {}) if args.subcommand not in ("list-configured-projects"): if project not in projects: logger.error( f"Missing configuration for project: '{project}'. Choose from {list(projects.keys())}." ) return 1 project_config = projects.get(project, None) if not project_config: logger.error( f"Missing configuration for project: '{project}'. Choose from {list(projects.keys())}." ) return 1 region = project_config.get("region", region) cluster_name = project_config.get("cluster", "") # Variable replacement in config file uses '{cluster}'. cluster = cluster_name cluster_ = cluster # Get service-specific configuration. services = project_config.get("services", {}) service_config = None if services: service_config = services.get(service, None) logger.debug(f"Service config: {service_config}") if service_config: cluster_ = service_config.get("cluster", cluster_name) cluster_name = replace_config(cluster_, "cluster", locals()) else: cluster_name = args.cluster logger.info(f"Working in: {region}") session = boto3.session.Session() ecs_client = session.client("ecs", region) ec2_client = session.client("ec2", region) ssm_client = session.client("ssm", region) if args.subcommand == "by-service-dns": by_service_dns = True if use_config: service_dns = project_config.get("dns", "") service_dns_ = service_dns if service_config: service_dns_ = service_config.get("dns", service_dns) service_dns = replace_config(service_dns_, "service_dns", locals()) else: service_dns = args.dns if not service_dns: logger.error(f"DNS name missing.") return 1 output_info = args.output elif args.subcommand == "by-service-name": by_service_name = True if use_config: service_name = project_config.get("name", "") service_name_ = service_name if service_config: service_name_ = service_config.get("name", service_name) service_name = replace_config( service_name_, "service_name", locals() ) service_name = service_name if service_name else service else: service_name = args.name elif args.subcommand == "by-task-name": by_task_name = True if use_config: task_name = project_config.get("name", "") task_name_ = task_name if service_config: task_name_ = service_config.get("name", task_name) task_name = replace_config(task_name_, "task_name", locals()) task_name = task_name if task_name else service else: task_name = args.name elif args.subcommand == "list-ec2-instances": only_ec2_instances = True elif args.subcommand == "list-clusters": list_clusters = True elif args.subcommand == "list-instances": only_cluster_instances = True elif args.subcommand == "list-services": list_running_services = True service_name = None elif args.subcommand == "list-tasks": list_running_tasks = True task_name = None elif args.subcommand == "list-configured-services": list_services = True service_name = None elif args.subcommand == "list-configured-projects": list_projects = True service_name = None if list_projects: if not use_config: logger.error("Only available when using a configuration file.") return 1 if not projects: logger.error( "Could not load projects from configuration file: '{config}'." ) return 1 print(f"Found in {config}.") print(*list(projects.keys()), sep="\n") return # No 'cluster' necessary for 'list-clusters'. if not list_clusters and not only_ec2_instances and not cluster_name: logger.error(f"Cluster name missing.") return 1 if list_services: if not use_config: logger.error("Only available when using a configuration file.") return 1 if not services: logger.error( "Could not load services from configuration file: '{config}'." ) return 1 print(f"Found in {config}.") print(*services, sep="\n") return elif only_ec2_instances: instances = get_instances_form_ec2(client=ec2_client) print(json.dumps(instances)) return elif list_clusters: clusters = get_clusters(client=ecs_client) print("\n".join(clusters)) return elif only_cluster_instances: logger.info(f"Checking cluster: {cluster_name}") instance_ids = get_instance_ids_from_cluster( cluster=cluster_name, client=ecs_client ) print(" ".join(instance_ids)) return elif by_service_name or list_running_services: logger.info(f"Checking cluster: {cluster_name}") instance_ids = get_instance_ids_from_cluster( cluster=cluster_name, client=ecs_client ) instance_id = get_instance_id_by_service_name( instance_ids=instance_ids, service=service_name, list_services=list_running_services, client=ssm_client, region=region, ) return elif by_task_name or list_running_tasks: logger.info(f"Checking cluster: {cluster_name}") instance_ids = get_tasks_information( task=task_name, list_tasks=list_running_tasks, cluster=cluster_name, client=ecs_client, ) print(instance_ids) return elif by_service_dns: logger.info(f"Checking cluster: {cluster_name}") service_ip = get_host_ip(host_name=service_dns) logger.info(f"IP of {service_dns} is {service_ip}") logger.debug(f"Output: {output_info}.") if output_info == "service": print(service_ip) return else: logger.debug(f"Get instance IDs for cluster:' {cluster_name}'.") instance_ids = get_instance_ids_from_cluster( cluster=cluster_name, client=ecs_client ) logger.debug(instance_ids) logger.debug("Get instance details.") ( instance_private_ip, instance_private_dns, instance_id, ) = get_instance_info_by_service_dns( instance_ids=instance_ids, service_ip=service_ip, client=ec2_client, ) if output_info == "ip": print(instance_private_ip) return elif output_info == "id": print(instance_id) return elif output_info == "all": print(instance_private_ip, instance_id, instance_private_dns) return logger.error(f"Not the expected result - nothing accomplished.") return 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_cli_arguments(self):\n pass", "def command_line_arguments():\n\n try:\n parser = argparse.ArgumentParser(description='Log Handler/Cleaner/Copier for Idemia DocAuth')\n\n # Add required arguments.\n parser.add_argument('action', choices=['clean', 'download'], type=str, help='clean or download')\n\n # Parse the arguments\n args = parser.parse_args()\n\n return args\n\n except Exception as err:\n print(err)\n return", "def cli(*args, **kwargs):\n logger.debug('Global options: %s %s', args, kwargs)", "def main(args):\n cli = CLI()\n # Check arguments\n cli.parse_arguments(args)", "def cli():\n config, auth, execute_now = read_command_line_arguments()\n main(config, auth, execute_now)", "def cli(**_) -> None:\n pass", "def add_cli_args(parser):\n parser.add_argument(\n '--raw_path',\n help='Source path where audio data files are stored',\n default=RAW_DATA_PATH\n )\n parser.add_argument(\n '--features_path',\n help='Output path where exported data will be placed',\n default=FEATURES_DATA_PATH\n )\n parser.add_argument(\n '--feature',\n help='name of the feature to be extracted (options: mfsc, leglaive)',\n default=VoiceActivationFrameSelectionFeatureExtractor.feature_name\n )", "def get_args():\n parser = argparse.ArgumentParser(\n description=\"Sets up package within the pheeno's directory.\"\n )\n\n # Required arguments\n parser.add_argument(\"-x\", \"--execute\", action=\"execute\", required=True,\n help=\"something\", default=False)\n\n # Optional arguments\n parser.add_argument(\"-s\", \"--save\", action=\"store\", required=False,\n help=\"something\", default=False)", "def cli(args): # noqa; pylint: disable=unused-argument", "def handle_cmdline_args():\n\n parser = argparse.ArgumentParser(\n description='Generate synthetic data from a specification in a json '\n 'file using the \"synth-method\" described in the json file. ')\n\n parser.add_argument(\n '-i', dest='infile', required=True,\n help='The input json file. Must contain a \"synth-method\" property')\n\n parser.add_argument(\n '-o', dest='outfile_prefix', required=True, help='The prefix of the output paths (data json and csv), relative to the QUIPP-pipeline root directory')\n\n args = parser.parse_args()\n return args", "def commandline_opts():\n\n parser = argparse.ArgumentParser(prog='makebom %s'%(Version))\n\n parser.add_argument('-i','--input',action='store',\\\n help='Input BOM file name')\n parser.add_argument('-r','--ref',action='store',\\\n help='Exosite\\'s PartNumber file')\n parser.add_argument('-o','--output',action='store',\\\n help='Output BOM file name')\n parser.add_argument('-V','--version',action='version',\\\n version='%(prog)s')\n\n return parser.parse_args()", "def __get_cli_args():\r\n parser = argparse.ArgumentParser()\r\n o = parser.add_mutually_exclusive_group()\r\n o.add_argument('-a', action='store_true')\r\n o.add_argument('-b', action='store_true')\r\n parser.add_argument('-suite', help='suite file name for execution')\r\n parser.add_argument('-log', help='LOG level for the execution', default='INFO',\r\n choices=['INFO', 'DEBUG', 'WARNING', 'ERROR', 'CRITICAL'])\r\n args = parser.parse_args()\r\n return args", "def command_line_args(parser):\n AbyssAssembler.command_line_args(parser)\n SpadesAssembler.command_line_args(parser)\n TrinityAssembler.command_line_args(parser)\n VelvetAssembler.command_line_args(parser)", "def commandline():\n command_parser = argparse.ArgumentParser(description=__doc__, epilog=epilog, formatter_class=argparse.RawDescriptionHelpFormatter)\n command_parser.add_argument('-i','--input_file', type=str, required=True, help='input file.')\n command_parser.add_argument('-o','--output_file', type=str, required=True, help='output file.')\n args = command_parser.parse_args()\n return args", "def cli_arguments():\n\n parser = argparse.ArgumentParser(\n formatter_class=argparse.RawDescriptionHelpFormatter,\n usage=f\"\\n{Color.DETAIL}pdforce.py [-p <pdf>] [-w <wordlist>] [-e <encoding>] [-o <output>] [-c] [-h/--help]{Color.END}\",\n description=f\"{Color.EMPHASIS}{TITLE}\\nLightweight PDF password cracker. USE FOR LEGAL INTENTS ONLY.{Color.END}\",\n epilog=f\"{Color.EMPHASIS}Made by @poponealex - https://github.com/poponealex{Color.END}\",\n )\n\n parser.add_argument(\n \"-p\",\n \"--pdf\",\n type=str,\n help=f\"{Color.INFORMATION}Path to the pdf file.{Color.END}\",\n action=\"store\",\n default=\"\",\n )\n\n parser.add_argument(\n \"-w\",\n \"--wordlist\",\n type=str,\n help=f\"{Color.INFORMATION}Path to the wordlist.{Color.END}\",\n action=\"store\",\n default=\"\",\n )\n\n parser.add_argument(\n \"-e\",\n \"--encoding\",\n type=str,\n help=f\"{Color.INFORMATION}Specify an encoding for the wordlist (https://docs.python.org/3/library/codecs.html#standard-encodings). The default encoding is platform dependent. Use 'iso8859_1' for rockyou. {Color.END}\",\n action=\"store\",\n default=None,\n )\n\n parser.add_argument(\n \"-o\",\n \"--output\",\n help=f\"{Color.INFORMATION}Output the cracked password to a new file.{Color.END}\",\n action=\"store\",\n )\n\n parser.add_argument(\n \"-c\",\n \"--copy\",\n help=f\"{Color.INFORMATION}Copy the password to the clipboard.{Color.END}\",\n action=\"store_true\",\n )\n\n return parser.parse_args()", "def parse_cli_arguments():\n parser = argparse.ArgumentParser('Generates a MANIFEST file used by the '\n 'HMP2 AnADAMA2 workflows.')\n parser.add_argument('-b', '--broad-data-sheet', required=True,\n help='Broad data product status spreadsheet. '\n 'Contains entries indicating new files to be '\n 'processed.')\n parser.add_argument('-o', '--output-manifest', required=True,\n help='Path to desired output manifest file.')\n parser.add_argument('-oi', '--origin-institute', required=True,\n help='Name of institute submitting new files '\n 'to be processed.')\n parser.add_argument('-oc', '--origin-contact', required=True,\n help='Contact person for corresponding origin '\n 'institute.')\n parser.add_argument('-oe', '--origin-contact-email', required=True,\n help='Contact email for contact person.')\n parser.add_argument('-p', '--project-name', dest='project', \n required=True,\n help='Project that sequence files belong too.')\n\n return parser.parse_args()", "def get_args_from_command_line():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--country_code\", type=str, default='US')\n args = parser.parse_args()\n return args", "def configure_commandline(cmdline_arguments: argparse.Namespace) -> Optional[Text]:", "def init_args():\n parser = argparse.ArgumentParser(\n description=\"DeltaSherlock Client software.\")\n parser.add_argument('-v', '--version', action='version', version=VERSION)\n parser.add_argument('-c', '--config', action='store', dest='config_file',\n default='./config.ini', help=\"Path to config file. [default: \\\n %(default)s]\")\n parser.add_argument('-d', '--daemon', action='store_true', dest='daemon',\n default=False, help=\"Run in daemon mode. [default: \\\n %(default)s]\")\n return parser.parse_args()", "def cmd_arguments():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"-i\", \"--image\",\n help=\"Full image path can be optionally supplied.\")\n args = parser.parse_args()\n return args", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def add_command_line_arguments(self, parser):\n # parser.add_option(...)\n pass", "def command_line_arguments():\n _parser.add_argument('-l', '--list', nargs='+',\n help='<Required> Set flag', required=True)\n _parser.add_argument(\"-A\", \"--access\", required=True,\n help=\"access to host => grant/revoke\")", "def cli():\n\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def parse_cli():\n parser = OptionParser()\n return parser.parse_args()", "def cli():\r\n pass", "def get_args_from_command_line():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--country_code\", type=str,\n help=\"Country code\",\n default=\"US\")\n parser.add_argument(\"--n_workers\", type=int, help=\"number of workers\",\n default=20)\n parser.add_argument(\"--survey_link\", type=str)\n parser.add_argument(\"--block_size\", help='number of tweets per worker', type=int)\n parser.add_argument(\"--version_number\", type=str)\n parser.add_argument(\"--mode\", type=str, help='Whether to create HIT in sandbox or in production')\n parser.add_argument(\"--language_qualification\", type=int, help='')\n\n args = parser.parse_args()\n return args", "def cli():\n pass", "def commandline_options():\n parser = argparse.ArgumentParser(\n description='ocn_diags_generator: CESM wrapper python program for Ocean Diagnostics packages.')\n\n parser.add_argument('--backtrace', action='store_true',\n help='show exception backtraces as extra debugging '\n 'output')\n\n parser.add_argument('--debug', action='store_true',\n help='extra debugging output')\n\n #parser.add_argument('--config', nargs=1, required=True, help='path to config file')\n\n options = parser.parse_args()\n return options", "def cli():\n ...", "def launch_cli() -> None:\n app.run(main, flags_parser=_parse_flags)", "def parse_cli_args():\r\n parser = argparse.ArgumentParser(\r\n description=\"list all installed packages\")\r\n\r\n parser.add_argument(\"-v\", \"--verbose\",\r\n help=\"increase output verbosity\",\r\n action=\"store_true\")\r\n\r\n parser.add_argument(\"-d\", \"--debug\",\r\n help=\"enable debug output\",\r\n action=\"store_true\")\r\n\r\n parser.add_argument(\"-N\", \"--dry-run\",\r\n help=\"Do not perform any actions, only simulate them.\",\r\n action=\"store_true\")\r\n\r\n args = parser.parse_args()\r\n\r\n # set debug log state\r\n DebugLog.enabled = args.debug\r\n\r\n with DebugLogScopedPush(\"cli arguments:\"):\r\n DebugLog.print(str(args))\r\n\r\n return args", "def parse_cli_args(self):\n parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on Linode')\n parser.add_argument('--list', action='store_true', default=True,\n help='List nodes (default: True)')\n parser.add_argument('--host', action='store',\n help='Get all the variables about a specific node')\n parser.add_argument('--refresh-cache', action='store_true', default=False,\n help='Force refresh of cache by making API requests to Linode (default: False - use cache files)')\n self.args = parser.parse_args()", "def cli_main(*cli_args):\n return runner.invoke(main, cli_args)", "def main():\n sys.argv.pop(0)\n (cmd, var, args) = process_options(sys.argv[:])\n execute(cmd, var, args)", "def cli():\n parser = argparse.ArgumentParser()\n # add arguments\n parser.add_argument('image_path', type = str, default = './flowers/test/1/image_06743.jpg', help ='Directory of Image of testing')\n parser.add_argument('checkpoint', type = str, default = 'checkpoint.pth', help ='Directory to save checkpoints')\n parser.add_argument('--top_k', action = 'store', dest = 'top_k', type = int, default = 5)\n parser.add_argument('--category_names', action='store', dest='category_names', type=str, default='cat_to_name.json')\n parser.add_argument('--gpu', action = 'store', default = False, help = 'GPU mode')\n return parser.parse_args()", "def cli() -> None:" ]
[ "0.7795223", "0.7400118", "0.7389782", "0.73847723", "0.7258737", "0.7255228", "0.7248011", "0.724467", "0.7233422", "0.717214", "0.710672", "0.7086328", "0.7038673", "0.70361656", "0.7034294", "0.70218986", "0.68577576", "0.6856456", "0.6839717", "0.6820217", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68195426", "0.68070704", "0.679604", "0.6792743", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.6789109", "0.67730594", "0.6756378", "0.6755698", "0.67502004", "0.6727378", "0.6712597", "0.67027086", "0.6697839", "0.66769797", "0.66606236", "0.66510594", "0.6647765", "0.6646002" ]
0.0
-1
Perform all graphical tasks for this frame.
def draw(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def render(self):\n self.delete()\n self.__create_background(self._imfname)\n # XXX must be last after successor implementation, but works without this line\n #self.c.event_generate(\"<Configure>\")\n #self.c.update_idletasks()", "def __display(self):\n self.__rotate_model()\n self.__generate_shadows()\n self.__generate_model()\n\n glutSwapBuffers()\n if self.__callback is not None:\n self.__callback()", "def draw(self):\n self.figure.canvas.draw_idle()", "def run(self):\n self.root.title(\"Etymology relations\")\n self.root.geometry(\"1080x600\")\n self.root.deiconify()\n self.root.mainloop()", "def draw(self):\n self.screen.fill(WHITE)\n self.color_invalid()\n self.draw_selected()\n self.shade_locked_cells()\n self.draw_grid()\n self.draw_buttons()\n self.draw_numbers()", "def display(self):\n self.main_window = tk.Tk()\n self.main_window.title('ACO Simulator')\n self.status_text = tk.StringVar()\n self.status_text.set(\"Click start to run the simulation.\")\n\n self.start_btn_text = tk.StringVar()\n self.start_btn_text.set(\"Start\")\n self.pause_btn_text = tk.StringVar()\n self.pause_btn_text.set(\"End Simulation\")\n\n self.grid_frame = tk.Frame(master=self.main_window, relief=tk.RAISED, borderwidth=1)\n self.grid_frame.grid(padx=10, pady=10)\n\n for y in range(self.rows):\n row = []\n for x in range(self.columns):\n frame = tk.Frame(master=self.grid_frame, width=10, height=10, bg='blue')\n frame.grid(row=y, column=x, padx=1, pady=1)\n row.append(frame)\n self.grid.append(row)\n\n frame = tk.Frame(master=self.main_window)\n frame.grid(padx=10, pady=5, columnspan = self.columns)\n self.status = tk.Label(master=frame, textvariable=self.status_text)\n self.status.pack()\n\n frame = tk.Frame(master=self.main_window)\n frame.grid(padx=10, pady=5, columnspan = self.columns)\n self.submit_btn = tk.Button(master=frame, textvariable=self.start_btn_text, width=15)\n self.submit_btn.pack()\n self.submit_btn.bind(\"<Button-1>\", self.start_aco)\n\n frame = tk.Frame(master=self.main_window)\n frame.grid(padx=10, pady=5, columnspan = self.columns)\n self.pause_btn = tk.Button(master=frame, textvariable=self.pause_btn_text, width=15)\n self.pause_btn.pack()\n self.pause_btn.bind(\"<Button-1>\", self.end_aco)\n\n frame = tk.Frame(master=self.main_window, width=10, height=15)\n frame.grid(columnspan = self.columns)\n\n self.main_window.mainloop()", "def display(self):\n self.figure, self.axes = self.createFigure()\n\n self.setupLayout()\n self.quitFlag = False\n self.animation = animation.FuncAnimation(self.figure, self.animate, interval=100)\n plt.show()", "def start_ui(self):\n\t\tself.start_animation()\n\t\tself.app.exec()", "def draw(self):\n\n # Use update instead of update_idletasks because it works better\n # on some Windows machines.\n self.root.update()", "def run(self):\n self.window.mainloop()", "def execute(self):\n while(self._running):\n for event in pygame.event.get():\n self.event_handler(event)\n \"\"\"Drawing\"\"\"\n if self.state == self.states['Game']:\n self.update_state()\n self.move()\n\n self.render()\n self.cleanup()", "def run(self):\n self.centre.findChild(QPushButton, \"confirmBtn\").hide()\n self.centre.findChild(QPushButton, \"cancelBtn\").hide()\n self.centre.findChild(QLabel, \"loadingLabel\").show()\n self.centre.findChild(QLabel, \"title\").setText(\"Optimisation & Visualisation Processing\")\n self.finished.emit()", "def runFrame(self):\n self._drawFrame(self._advanceTime())", "def updateGraphics():\n _root.update_idletasks()", "def execute(self):\n self.init()\n\n while self.running:\n self.render()\n self.events()\n\n pygame.quit()", "def repaint(self):\n pass", "def draw(self) -> None:\n assert self.screen is not None\n self.screen.border()\n self.screen.addstr(2, 2, self.title, curses.A_STANDOUT)\n self.screen.addstr(4, 2, self.subtitle, curses.A_BOLD)\n\n for index, item in enumerate(self.all_items):\n self.draw_item(index, item)\n\n self.refresh_screen()\n if self._debug_screens: # pragma: no cover all\n with _SCREENDUMP_DIR.joinpath(f\"{self.title}-{time.time()}\").open(\n \"wb\",\n ) as f:\n self.screen.putwin(f)\n with _SCREENDUMP_DIR.joinpath(\n f\"stdscr-{self.title}-{time.time()}\",\n ).open(\"wb\") as f:\n self.screen.putwin(f)", "def refresh_self(self) -> None:\n self._logger.debug(\"running\")\n try:\n self.figure.canvas.draw()\n except Exception as e:\n self._logger.exception(\"issue with drawing canvas.\")\n self._logger.debug(\"done\")", "def render(self):\n self.screen.fill(prepare.BACKGROUND_COLOR)\n self.health_bar()\n # self.enemy_health()\n self.energy_bar()\n self.level.draw(self.screen)\n pg.display.update()", "def loop(self, frame):\n self.root = frame\n self.drawUI()\n cv2.imshow('Fotopasca', self.root)", "def visualize(self):\n app = QtGui.QApplication([''])\n SceneGUI(self)\n app.exec_()", "def draw(self):\n self.figure.show()\n self.figure.canvas.draw()", "def visualise(self):\n self.w = VisualizeSetupBox(self.master, self._df)\n self.master.wait_window(self.w.top)", "def controls(self):\n\n framecrtl = tk.Frame(self)\n framecrtl.pack()\n\n run = ModernButton(framecrtl, text='Run', command=self.start_threading, width=5)\n run.pack(side='left', padx=5, pady=5)\n\n cancel = ModernButton(framecrtl, text='Cancel', command=self.destroy, width=5)\n cancel.pack(side='left', padx=5, pady=5)\n\n abort = ModernButton(framecrtl, text='Abort', command=self.root.destroy, width=5)\n abort.pack(side='left', padx=5, pady=5)", "def renderall(self):\n\n if not self.isinitialized:\n return\n # clear display\n self.screen.fill(BGCOLOR)\n # draw the board\n self.drawBoard()\n # flip the display to show whatever we drew\n pygame.display.flip()", "def __call__(self):\n self.show()", "def draw(self):\n ui.clear()\n ui.draw_board(self)\n ui.output_buffer()", "def draw(self):\n\n for item in self.vis:\n item.undraw()\n self.render()\n for item in self.vis:\n item.draw(self.win)\n self.drawn = True", "def process_widgets(self):\r\n\r\n self.runmode_menu.add_radiobutton(label=\"Graphical User Interface\", value=0, variable=self.gui_menu_var,\r\n command=self.disable_debugging_mode)\r\n self.runmode_menu.add_radiobutton(label=\"Command Line Interface\", value=1, variable=self.gui_menu_var,\r\n command=lambda gui=self: load_cli(self))\r\n self.runmode_menu.add_radiobutton(label=\"Debugging Mode (GUI + CLI)\", value=2, variable=self.gui_menu_var,\r\n command=self.enable_debugging_mode)\r\n\r\n # Placing all the submenus\r\n self.filemenu.add_cascade(label=\"Run Mode\", menu=self.runmode_menu)\r\n self.menubar.add_cascade(label=\"File\", menu=self.filemenu)\r\n\r\n self.config(menu=self.menubar) # Indicating that the \"menubar\" variable is the filemenu of the application\r\n\r\n self.folder_frame.pack()\r\n\r\n # self.folder_locator.pack(side=LEFT, padx=10, pady=10)\r\n\r\n self.media_folder_label.pack(side=LEFT, padx=10, pady=10)\r\n\r\n self.folder_button.pack(side=LEFT)\r\n\r\n self.path_frame_parent.pack(side=LEFT)\r\n\r\n self.search_frame.pack()\r\n\r\n self.search_frame.pack()\r\n self.search_entry.grid(row=0, column=0, padx=10, pady=20)\r\n self.search_button.grid(row=0, column=1, padx=5)\r\n # self.advanced_search_button.grid(row=0, column=2, padx=5)\r\n\r\n self.media_frame.pack()\r\n\r\n self.button_frame.pack()", "def tick (self):\n\t\n\t\tself.display.clear ()\n\t\tself.draw ()\n\t\tfor sprite in self.sprites:\n\t\t\tsprite.drawToDisplay (self.display)\n\t\tself.display.showFrame ()", "def render(self, screen) -> None:\n screen.fill(self.background_color)\n self.draw_center_circle(screen)\n self.draw_rectangle_field(screen)\n self.draw_defense_areas(screen)\n self.draw_field_divider(screen)\n self.draw_goals(screen)", "def execute(self):\n while(self._running):\n # get all pygame events from queue\n for event in pygame.event.get():\n self.event_handler(event)\n self.update()\n self.render()\n self.cleanup()", "def play(self):\n self.populateGrid()\n self.displayGrid()", "def show(self):\n # * displays the window, after using either the iconify or the withdraw methods\n self.wm_deiconify()\n # * this method can be called after the event which needs to happen before the window event\n self.wait_window()", "def setUpGUI(self):\n WHITE = '#ffffff'\n # Set up the GUI so that we can paint the fractal image on the screen\n canvas = Canvas(self.window, width=self.width, height=self.height, bg=WHITE)\n canvas.pack()\n canvas.create_image((self.width/2, self.height/2), image=self.img, state=\"normal\")", "def done_paint(self):\r\n curses.panel.update_panels()\r\n curses.doupdate()", "def OnInit(self):\n frame = App_Frame(title='PyDraw')\n frame.Show()\n return True", "def run(self):\n\n self._keep_running.set()\n\n # Create a command window to take user inputs\n # gui_handler = threading.Thread(name=\"CommandWindow\", daemon=True, \\\n # target=self._command_window.mainloop)\n\n # Launch a thread for fetching position data constantly\n # TODO: Making these threads stoppable is too much of a pain!\n position_fetcher = threading.Thread(name=\"PositionFetcher\", daemon=True, \\\n target=self.fetch_position_and_update_frames)\n spike_fetcher = threading.Thread(name=\"SpikeFetcher\", daemon=True, \\\n target=self.fetch_spikes_and_update_frames)\n place_field_fetcher = threading.Thread(name=\"PlaceFieldFetched\", daemon=True, \\\n target=self.fetch_place_fields)\n ripple_frame_fetcher = threading.Thread(name=\"RippleFrameFetcher\", daemon=True, \\\n target=self.fetch_incident_ripple)\n\n position_fetcher.start()\n spike_fetcher.start()\n place_field_fetcher.start()\n ripple_frame_fetcher.start()\n\n # Start the animation for Spike-Position figure, place field figure\n self.initialize_ripple_detection_fig()\n self.initialize_spike_pos_fig()\n self.initialize_place_field_fig()\n plt.show()\n\n # This is a blocking command... After you exit this, everything will end.\n self._command_window.mainloop()\n position_fetcher.join()\n spike_fetcher.join()\n place_field_fetcher.join()\n ripple_frame_fetcher.join()\n logging.info(MODULE_IDENTIFIER + \"Closed GUI and display pipes\")", "def paint(self):\n self.paint_snake()\n self.paint_apple()\n root.mainloop()", "def run(self):\r\n\r\n # If any of the test constructors update the settings, reflect\r\n # those changes on the GUI before running\r\n if GUIEnabled:\r\n self.gui_table.updateGUI(self.settings)\r\n self.clock = pygame.time.Clock()\r\n self.screen.fill((0, 0, 0))\r\n\r\n # Run the simulation loop\r\n self.SimulationLoop([0, 0, 0])\r\n\r\n if GUIEnabled and self.settings.drawMenu:\r\n self.gui_app.paint(self.screen)\r\n\r\n pygame.display.flip()\r\n self.clock.tick(self.settings.hz)\r\n self.fps = self.clock.get_fps()", "def run(self):\n\n self._check_hardware_control()\n\n if self._is_stabilizing:\n #If we are locking the power, then need to update teh feedback loop and change the output label\n self._update_feedback()\n self._update_output_voltage_label()\n\n #We always need to update the plots as well and power label\n\n self._update_plots()\n self._update_power_label()\n\n self.gui.force_update()", "def UpdateUI(self):\n # colors & font\n self.SetBackgroundColour(wx.GetApp().settings.bg_color)\n self.SetForegroundColour(wx.GetApp().settings.fg_color)\n self.SetFont(wx.GetApp().settings.text_font)\n\n # apply new (or not) 'wx' values to content.\n p = self.FindWindow(\"content\")\n if p is not None:\n p.SetBackgroundColour(wx.GetApp().settings.bg_color)\n p.SetForegroundColour(wx.GetApp().settings.fg_color)\n p.SetFont(wx.GetApp().settings.text_font)\n\n # apply new (or not) 'wx' values to header.\n p = self.FindWindow(\"header\")\n if p is not None:\n p.SetBackgroundColour(wx.GetApp().settings.header_bg_color)\n p.SetForegroundColour(wx.GetApp().settings.header_fg_color)\n p.SetFont(wx.GetApp().settings.header_text_font)\n\n # apply new (or not) 'wx' values to actions.\n p = self.FindWindow(\"actions\")\n if p is not None:\n p.SetBackgroundColour(wx.GetApp().settings.action_bg_color)\n p.SetForegroundColour(wx.GetApp().settings.action_fg_color)\n p.SetFont(wx.GetApp().settings.action_text_font)\n\n self.Refresh()", "def on_draw(self):\n # draw everything", "def set_GUI(\r\n self\r\n ):\r\n self.top = tk.Tk()\r\n self.top.title(\"Data Collection Interface\")\r\n \r\n self.get_label(\r\n self.top,\r\n text = \"Folder name\",\r\n width = None, # in characters\r\n height = 1, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 0,\r\n column = 0,\r\n return_lbl = False\r\n )\r\n\r\n self.e_path = self.get_entry(\r\n self.top,\r\n default_txt = \"Collected_data\",\r\n enable = True,\r\n width = 30,\r\n row = 0,\r\n column = 1)\r\n\r\n self.get_label(\r\n self.top,\r\n text = \"Number of Images\",\r\n width = None, # in characters\r\n height = 1, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 1,\r\n column = 0,\r\n return_lbl = False\r\n )\r\n\r\n self.e_num_images = self.get_entry(\r\n self.top,\r\n default_txt = \"1800\",\r\n enable = True,\r\n width = 30,\r\n row = 1,\r\n column = 1)\r\n\r\n self.get_label(\r\n self.top,\r\n text = \"Total Time\",\r\n width = None, # in characters\r\n height = 1, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 2,\r\n column = 0,\r\n return_lbl = False\r\n )\r\n\r\n self.e_tot_time = self.get_entry(\r\n self.top,\r\n default_txt = \"15\",\r\n enable = True,\r\n width = 30,\r\n row = 2,\r\n column = 1)\r\n\r\n self.get_label(\r\n self.top,\r\n text = \"Minutes\",\r\n width = None, # in characters\r\n height = 1, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 2,\r\n column = 2,\r\n return_lbl = False\r\n )\r\n\r\n self.get_label(\r\n self.top,\r\n text = \"Time between photos\",\r\n width = None, # in characters\r\n height = 1, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 3,\r\n column = 0,\r\n return_lbl = False\r\n )\r\n\r\n self.e_interval = self.get_entry(\r\n self.top,\r\n default_txt = \"0.001\",\r\n enable = False,\r\n width = 30,\r\n row = 3,\r\n column = 1)\r\n\r\n self.get_label(\r\n self.top,\r\n text = \"Seconds\",\r\n width = None, # in characters\r\n height = 1, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 3,\r\n column = 2,\r\n return_lbl = False\r\n )\r\n\r\n self.get_label(\r\n self.top,\r\n text = \"Images Per Folder\",\r\n width = None, # in characters\r\n height = 1, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 4,\r\n column = 0,\r\n return_lbl = False\r\n )\r\n\r\n self.e_images_per_folder = self.get_entry(\r\n self.top,\r\n default_txt = \"500\",\r\n enable = True,\r\n width = 30,\r\n row = 4,\r\n column = 1)\r\n\r\n self.get_label(\r\n self.top,\r\n text = \"Progress Display Frequency\",\r\n width = None, # in characters\r\n height = 1, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 5,\r\n column = 0,\r\n return_lbl = False\r\n )\r\n\r\n self.e_prog_display_freq = self.get_entry(\r\n self.top,\r\n default_txt = \"5\",\r\n enable = True,\r\n width = 30,\r\n row = 5,\r\n column = 1)\r\n\r\n self.get_label(\r\n self.top,\r\n text = \"Preview Display Frequency\",\r\n width = None, # in characters\r\n height = 1, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 6,\r\n column = 0,\r\n return_lbl = False\r\n )\r\n\r\n self.e_prew_display_freq = self.get_entry(\r\n self.top,\r\n default_txt = \"10\",\r\n enable = True,\r\n width = 30,\r\n row = 6,\r\n column = 1)\r\n\r\n self.get_label(\r\n self.top,\r\n text = \"\",\r\n width = None, # in characters\r\n height = 2, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 7,\r\n column = 2,\r\n return_lbl = False\r\n )\r\n \"\"\"\r\n self.get_label(\r\n self.top,\r\n text = \"\",\r\n width = None, # in characters\r\n height = 2, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 8,\r\n column = 2,\r\n return_lbl = False\r\n )\"\"\"\r\n\r\n self.r_radio_button_variable = tk.IntVar(self.top,1)\r\n\r\n self.r_images_time = self.get_radio_button(\r\n self.top,\r\n control_variable =self.r_radio_button_variable ,\r\n returned_value = 1,\r\n text = \"Images + Total time\",\r\n enable = True,\r\n default_state = True,\r\n #width = 30,\r\n row = 9,\r\n column = 0,\r\n align = tk.W,\r\n command = self.block_entry)\r\n\r\n self.r_images_interval = self.get_radio_button(\r\n self.top,\r\n control_variable =self.r_radio_button_variable ,\r\n returned_value = 2,\r\n text = \"Images + Time interval\",\r\n enable = True,\r\n default_state = False,\r\n #width = 30,\r\n row = 10,\r\n column = 0,\r\n align = tk.W,\r\n command = self.block_entry)\r\n\r\n self.r_time_interval = self.get_radio_button(\r\n self.top,\r\n control_variable =self.r_radio_button_variable ,\r\n returned_value = 3,\r\n text = \"Total time + Time interval\",\r\n enable = True,\r\n default_state = False,\r\n #width = 30,\r\n row = 11,\r\n column = 0,\r\n align = tk.W,\r\n command = self.block_entry)\r\n\r\n self.get_label(\r\n self.top,\r\n text = \"\",\r\n width = None, # in characters\r\n height = 2, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 12,\r\n column = 0,\r\n return_lbl = False\r\n )\r\n\r\n self.get_label(\r\n self.top,\r\n text = \"\",\r\n width = None, # in characters\r\n height = 1, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 14,\r\n column = 0,\r\n return_lbl = False\r\n )\r\n\r\n self.r_quality_variable = tk.StringVar(self.top,\"Low\")\r\n\r\n self.r_HQuality = self.get_radio_button(\r\n self.top,\r\n control_variable =self.r_quality_variable ,\r\n returned_value = \"High\",\r\n text = \"High Quality\",\r\n enable = True,\r\n default_state = False,\r\n #width = 30,\r\n row = 16,\r\n column = 0,\r\n align = tk.W,\r\n command = self.quality_change)\r\n\r\n self.r_LQuality = self.get_radio_button(\r\n self.top,\r\n control_variable =self.r_quality_variable ,\r\n returned_value = \"Low\",\r\n text = \"Low Quality\",\r\n enable = True,\r\n default_state = True,\r\n #width = 30,\r\n row = 15,\r\n column = 0,\r\n align = tk.W,\r\n command = self.quality_change)\r\n\r\n self.r_Day_Night_variable = tk.StringVar(self.top,\"Day\")\r\n\r\n self.r_Day = self.get_radio_button(\r\n self.top,\r\n control_variable =self.r_Day_Night_variable ,\r\n returned_value = \"Day\",\r\n text = \"Day\",\r\n enable = True,\r\n default_state = True,\r\n #width = 30,\r\n row = 15,\r\n column = 1,\r\n align = tk.W,\r\n command = self.day_change)\r\n\r\n self.r_Night = self.get_radio_button(\r\n self.top,\r\n control_variable =self.r_Day_Night_variable ,\r\n returned_value = \"Night\",\r\n text = \"Night\",\r\n enable = True,\r\n default_state = False,\r\n #width = 30,\r\n row = 16,\r\n column = 1,\r\n align = tk.W,\r\n command = self.day_change)\r\n\r\n self.c_auto_zip_variable = tk.IntVar(self.top,0)\r\n\r\n self.c_auto_zip = tk.Checkbutton(\r\n self.top,\r\n text = \"Auto Zip\",\r\n variable = self.c_auto_zip_variable)\r\n self.c_auto_zip.grid(row = 17,column = 0, sticky = tk.W)\r\n self.c_auto_zip.deselect()\r\n\r\n self.get_label(\r\n self.top,\r\n text = \"\",\r\n width = None, # in characters\r\n height = 1, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 18,\r\n column = 0,\r\n return_lbl = False\r\n )\r\n\r\n\r\n self.l_image = self.get_label(\r\n self.top,\r\n text = None,\r\n width = None, # in characters\r\n height = None, # in lines\r\n font = None,\r\n stick = None,\r\n row = 114,\r\n column = 3,#0,\r\n return_lbl = True,\r\n ctr_var = None\r\n )\r\n\r\n self.get_label(\r\n self.top,\r\n text = \"Progress :\",\r\n width = None, # in characters\r\n height = 1, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 113,\r\n column = 0,\r\n return_lbl = False\r\n )\r\n\r\n self.progress_var = tk.StringVar(self.top)\r\n \r\n self.get_label(\r\n self.top,\r\n text = \"\",\r\n width = 40, # in characters\r\n height = 2, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 113,\r\n column = 1,\r\n return_lbl = False,\r\n ctr_var = self.progress_var\r\n )\r\n \"\"\"\r\n self.lab = self.get_label(\r\n self.top,\r\n text = \"\",\r\n width = 40, # in characters\r\n height = 1, # in lines\r\n font = None,\r\n stick = tk.W,\r\n row = 10,\r\n column = 1,\r\n return_lbl = True,\r\n #ctr_var = self.progress_var\r\n )\"\"\"\r\n\r\n self.b_start = self.get_button(\r\n root = self.top,\r\n button_text = \"Start\",\r\n row = 5,\r\n column = 2,\r\n enable = True,\r\n width = 10,\r\n height =1,\r\n command = self.start_collecting\r\n )\r\n\r\n self.b_pause = self.get_button(\r\n root = self.top,\r\n button_text = \"Zip Folder\",\r\n row = 6,\r\n column = 2,\r\n enable = True,\r\n width = 10,\r\n height =1,\r\n command = self.zip_folder\r\n )\r\n\r\n self.b_stop = self.get_button(\r\n root = self.top,\r\n button_text = \"Stop\",\r\n row = 7,\r\n column = 2,\r\n enable = True,\r\n width = 10,\r\n height =1,\r\n command = self.stop_collecting\r\n )\r\n\r\n self.b_red = self.get_button(\r\n root = self.top,\r\n button_text = \"Red\",\r\n row = 10,\r\n column = 2,\r\n enable = True,\r\n width = 10,\r\n height =1,\r\n command = self.red\r\n )\r\n self.b_red.config(bg='red',activebackground = 'red')\r\n\r\n self.b_yellow = self.get_button(\r\n root = self.top,\r\n button_text = \"Yellow\",\r\n row = 11,\r\n column = 2,\r\n enable = True,\r\n width = 10,\r\n height =1,\r\n command = self.yellow\r\n )\r\n self.b_yellow.config(bg='yellow', activebackground = 'yellow')\r\n\r\n self.b_green = self.get_button(\r\n root = self.top,\r\n button_text = \"Green\",\r\n row = 12,\r\n column = 2,\r\n enable = True,\r\n width = 10,\r\n height =1,\r\n command = self.green\r\n )\r\n self.b_green.config(bg='green', activebackground = 'green')\r\n\r\n self.b_normal = self.get_button(\r\n root = self.top,\r\n button_text = \"No light\",\r\n row = 13,\r\n column = 2,\r\n enable = True,\r\n width = 10,\r\n height =1,\r\n command = self.normal\r\n )\r\n\r\n self.b_load_orientation = self.get_button(\r\n root = self.top,\r\n button_text = \"Load Orientation\",\r\n row = 15,\r\n column = 2,\r\n enable = True,\r\n width = 10,\r\n height =1,\r\n command = self.load_orientation\r\n )\r\n\r\n \r\n\r\n \"\"\"\r\n self.get_label(\r\n self.top,\r\n text = \"tesing\",\r\n width = 10, # in characters\r\n height = 2, # in lines\r\n font = ('Times', '12', 'normal'),\r\n row = 0,\r\n column = 0,\r\n return_lbl = False\r\n )\r\n \r\n self.button1 = self.get_button(\r\n root = self.top,\r\n button_text = \"test\",\r\n row = 1,\r\n column = 3,\r\n enable = True,\r\n width = 10,\r\n height =1,\r\n command = self.pt\r\n )\r\n\r\n self.entry = self.get_entry(\r\n self.top,\r\n default_txt = \"Test\",\r\n enable = True,\r\n width = 30,\r\n row = 3,\r\n column = 0)\r\n\r\n self.contrl = tk.IntVar(self.top)\r\n self.radio = self.get_radio_button(\r\n self.top,\r\n control_variable =self.contrl ,\r\n returned_value = 5,\r\n text = \"radio\",\r\n enable = True,\r\n default_state = False,\r\n #width = 30,\r\n row = 0,\r\n column = 0,\r\n align = tk.W,\r\n command = self.pt)\r\n\r\n self.radio2 = self.get_radio_button(\r\n self.top,\r\n control_variable =self.contrl ,\r\n returned_value = 6,\r\n text = \"radio2\",\r\n enable = True,\r\n default_state = False,\r\n width = None,\r\n row = 1,\r\n column = 0,\r\n align = tk.W,\r\n command = self.pt)\"\"\"", "def start(self):\n self.main_frame.render()\n self.main_frame.Bind(wx.EVT_CLOSE, self.close)\n self.application.MainLoop()", "def run(self):\n\n # GUI\n # initialization\n glutInit() # initialize glut\n glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_ALPHA | GLUT_DEPTH)\n glutInitWindowSize(width, height) # set window size\n glutInitWindowPosition(0, 0) # set window position\n window = glutCreateWindow(b'-- YAY ALGEO --') # create window with title\n glutDisplayFunc(draw) # set draw function callback\n glutIdleFunc(draw) # draw all the time\n glutMainLoop()", "def display(self) -> None:\n self.root.display()", "def execute(self):\n while(self._running):\n # get all pygame events from queue\n for event in pygame.event.get():\n self.event_handler(event)\n self.move()\n self.render()\n self.cleanup()", "def normal_run(self):\n super().events_buttons(back=True)\n self.events_delete_btns()\n self.draw()", "def paint(self):\r\n pass", "def setup_gui(self):\n # if there are more than 1 visualizer we need to assure that there\n # will not be tag conflicts\n BaseRealTimeVisualizer.setup_gui_lock.acquire()\n # look for valid tag\n dpg.create_context()\n\n self.id = 0\n while dpg.does_item_exist(f'main_window_{self.id}'):\n self.id += 1\n\n with dpg.texture_registry(show=False):\n dpg.add_dynamic_texture(\n width=self.width,\n height=self.height,\n default_value=np.zeros((self.width, self.height, 3)),\n tag=f'input_image_texture_{self.id}',\n )\n\n with dpg.window(\n tag=f'main_window_{self.id}',\n no_title_bar=True,\n autosize=True\n ):\n dpg.add_image(\n texture_tag=f'input_image_texture_{self.id}',\n tag=f'image_render_{self.id}',\n pos=(_PADDING, _PADDING)\n )\n\n dpg.set_global_font_scale(_FONT_SCALE)\n\n if self.id == 0:\n dpg.set_primary_window(f'main_window_{self.id}', True)\n dpg.create_viewport(\n title=self.title,\n width=self.width + _PADDING*2,\n height=self.height + _PADDING*2,\n resizable=True\n )\n dpg.setup_dearpygui()\n dpg.show_viewport()\n elif self.id == 1:\n dpg.set_primary_window('main_window_0', False)\n\n BaseRealTimeVisualizer.setup_gui_lock.release()", "def _drawFrame(self):\n\n self._clearScreen()\n \n for object in Object.Objects:\n self._drawObject(object)\n\n for entity in Entity.Entities:\n self._drawObject(entity)\n\n self._drawObject(Game.Player)", "def exec(self):\n self._root.after(100, self.change_state, States.INITIAL) # enter the state once gui is setup\n super().exec()", "def run(self):\n # While loop to show display\n while True:\n for event in pg.event.get():\n # Quitting game\n if event.type == QUIT:\n pg.quit()\n sys.exit()\n # If game can continue\n if self.b.get_status() == \"-\":\n # Pressing mouse\n if event.type == MOUSEBUTTONDOWN:\n pos = pg.mouse.get_pos()\n for r in self.b.get_board_array():\n for square in r:\n if square.get_visual().collidepoint(pos):\n square.click()\n self.b.update_board()", "def main(self):\n counter = 1L\n timer = time.clock()\n try:\n c = True\n while c != 27: # the ESC key\n\n frame = self.camera.get_frame()\n pre_options = self.preprocessing.options\n # Apply preprocessing methods toggled in the UI\n preprocessed = self.preprocessing.run(frame, pre_options)\n frame = preprocessed['frame']\n if 'background_sub' in preprocessed:\n cv2.imshow('bg sub', preprocessed['background_sub'])\n # Find object positions\n # model_positions have their y coordinate inverted\n\n # IMPORTANT\n model_positions, regular_positions = self.vision.locate(frame)\n model_positions = self.postprocessing.analyze(model_positions)\n\n # Update planner world beliefs\n self.planner.update_world(model_positions)\n self.planner.plan()\n\n # Use 'y', 'b', 'r' to change color.\n c = waitKey(2) & 0xFF\n actions = []\n fps = float(counter) / (time.clock() - timer)\n # Draw vision content and actions\n\n self.GUI.draw(\n frame, model_positions, actions, regular_positions, fps, None,\n None, None, None, False,\n our_color=self.color, our_side=self.side, key=c, preprocess=pre_options)\n counter += 1\n\n except:\n # This exception is stupid TODO: refactor.\n print(\"TODO SOMETHING CLEVER HERE\")\n raise\n finally:\n tools.save_colors(self.pitch, self.calibration)", "def draw(self):\n if self.is_clicked:\n pg.draw.circle(self.window, self.color, (self.x, self.y), self.r, 0)\n else:\n pg.draw.circle(self.window, self.color, (self.x, self.y), self.r, 1)", "def _render(self):\n self._renderer.render_menu()\n pg.display.update()", "def draw(self):\n\n State.screen.draw()", "def draw(self):\n\n self.state_stack.peek().draw(self.screen)", "def setUpFrame(self):\n #adds labels to the Board\n self.mineLabel = tk.Label(self, text=\"Mines: \"+str(self.numMines))\n self.mineLabel.grid(row=0, column=0, sticky=\"W\", columnspan=int((self.cols-2)/2))\n self.smileButton = tk.Label(self, image=self.images[1])\n self.smileButton.grid(row=0, column=int((self.cols-2)/2), sticky=\"WE\", columnspan=2)\n self.flagLabel = tk.Label(self, text=\"Flags: \"+str(self.numFlags))\n self.flagLabel.grid(row=0, column=int((self.cols-2)/2)+2, sticky=\"E\", columnspan=int((self.cols-1)/2))\n\n #left click listeners on smileButton\n self.smileButton.bind('<ButtonPress-1>', lambda event, num=0: self.changeSmile(num))\n self.smileButton.bind('<ButtonRelease-1>', self.replay)", "def display(self):\n\t\tself.imgDisplay.set_from_pixbuf(self.getVisible())\n\t\tgc.collect()", "def initGUI(self):\r\n\r\n self.pack(fill=tk.BOTH, expand=True)\r\n\r\n # Figure out sizing.\r\n width = 200\r\n height = 200\r\n pad = 5\r\n fontWidth = 8\r\n bigWidth = int((width*3 + pad*6) / fontWidth)\r\n \r\n # Create option frames.\r\n self.frameOptions = tk.LabelFrame(self, text=\"Options:\",\r\n width=width, height=height)\r\n self.frameSegment = tk.LabelFrame(self, text=\"Segmentation Method:\",\r\n width=width, height=height)\r\n self.frameMeasure = tk.LabelFrame(self, text=\"Measurements:\",\r\n width=width, height=height)\r\n\r\n # Create text boxes and labels.\r\n self.labelStatus = tk.LabelFrame(self, text=\"Status:\", bd=0)\r\n self.labelResults = tk.LabelFrame(self, text=\"Results:\", bd=0)\r\n self.textStatus = ScrolledText(self.labelStatus, height=5,\r\n width=bigWidth)\r\n self.textResults = ScrolledText(self.labelResults, height=10,\r\n width=bigWidth)\r\n\r\n # Create buttons.\r\n self.buttonCalculate = tk.Button(self, text='Calculate',\r\n width=20, height=1, font=12, bd=3,\r\n command=lambda:self.prepare())\r\n self.buttonSaveAll = tk.Button(self, text='Save Session Summary',\r\n command=self.saveAll)\r\n self.buttonSelectOutFold = tk.Button(self, text='Set Output Folder',\r\n command=self.setOutputFolder)\r\n self.buttonAbout = tk.Button(self, text='About', command=self.about)\r\n\r\n # Arrange toplevel widgets.\r\n self.frameOptions.grid(row=0, column=2, padx=pad, pady=pad,\r\n sticky='NESW')\r\n self.frameSegment.grid(row=0, column=1, padx=pad, pady=pad,\r\n sticky='NESW')\r\n self.frameMeasure.grid(row=0, column=0, padx=pad, pady=pad,\r\n sticky='NESW')\r\n\r\n self.buttonCalculate.grid(row=1, column=1, \r\n padx=pad, pady=pad*3)\r\n self.buttonSelectOutFold.grid(row=1, column=0, \r\n padx=pad, pady=pad*3)\r\n self.buttonAbout.grid(row=6, column=2, sticky='e', padx=20, pady=10)\r\n\r\n self.labelStatus.grid(row=2, column=0, columnspan=3, sticky='w',\r\n padx=pad, pady=pad)\r\n self.textStatus.grid(row=3, column=0, columnspan=3)\r\n self.labelResults.grid(row=4, column=0, columnspan=3, sticky='w',\r\n padx=pad, pady=pad)\r\n self.textResults.grid(row=5, column=0, columnspan=3)\r\n self.buttonSaveAll.grid(row=6, column=1, padx=pad, pady=pad)\r\n\r\n # Variables\r\n self.outFold = None\r\n columns = [[\"\",\"\",\"\",\"\",\r\n \"Bright phase diameter\",\r\n \"\",\"\",\r\n \"Dark phase diameter\",\r\n \"\",\"\",\r\n \"Bright length\",\r\n \"\",\"\",\r\n \"Dark length\",\r\n \"\",\"\",\r\n \"Bright area\",\r\n \"\",\"\",\r\n \"Dark area\",\r\n \"\",\"\",\r\n \"Bright connected length\",\r\n \"\",\"\",\r\n \"Dark connected length\",\r\n \"\",\"\"], \r\n [\"image\",\r\n \"pixel size\",\r\n \"area frac\",\r\n \"est diam\",\r\n \"Average\",\r\n \"SD\",\r\n \"Measured\",\r\n \"Average\",\r\n \"SD\",\r\n \"Measured\",\r\n \"Average\",\r\n \"SD\",\r\n \"Measured\",\r\n \"Average\",\r\n \"SD\",\r\n \"Measured\",\r\n \"Average\",\r\n \"SD\",\r\n \"Measured\",\r\n \"Average\",\r\n \"SD\",\r\n \"Measured\",\r\n \"Average\",\r\n \"SD\",\r\n \"Measured\",\r\n \"Average\",\r\n \"SD\",\r\n \"Measured\"]]\r\n \r\n self.saveAll = np.array(columns)\r\n\r\n # Measurement options.\r\n # Variables.\r\n self.varDiameter = tk.BooleanVar()\r\n self.varLength = tk.BooleanVar()\r\n self.varArea = tk.BooleanVar()\r\n self.varSumConnectedLength = tk.BooleanVar()\r\n self.varAreaFraction = tk.BooleanVar()\r\n # Create widgets.\r\n self.checkDiameter = tk.Checkbutton(self.frameMeasure,\r\n text=\"Diameter\", variable=self.varDiameter)\r\n self.checkLength = tk.Checkbutton(self.frameMeasure,\r\n text=\"Length\", variable=self.varLength)\r\n self.checkArea = tk.Checkbutton(self.frameMeasure,\r\n text=\"Area\", variable=self.varArea)\r\n self.checkSumConnectedLength = tk.Checkbutton(self.frameMeasure,\r\n text=\"Connected length\", variable=self.varSumConnectedLength)\r\n self.checkAreaFraction = tk.Checkbutton(self.frameMeasure,\r\n text=\"Area fraction\", variable=self.varAreaFraction)\r\n # Pack widgets.\r\n self.checkDiameter.grid(row=0, column=0, sticky='w')\r\n self.checkLength.grid(row=1, column=0, sticky='w')\r\n self.checkArea.grid(row=2, column=0, sticky='w')\r\n self.checkSumConnectedLength.grid(row=3, column=0, sticky='w')\r\n self.checkAreaFraction.grid(row=4, column=0, sticky='w')\r\n # Check appropriate boxes.\r\n self.checkDiameter.select()\r\n self.checkLength.select()\r\n self.checkArea.select()\r\n self.checkSumConnectedLength.select()\r\n self.checkAreaFraction.select()\r\n \r\n # Segment options.\r\n # Variables.\r\n self.varSegment = tk.StringVar()\r\n # Create widgets.\r\n self.radAccurate = tk.Radiobutton(self.frameSegment,\r\n text=\"Accurate\", variable=self.varSegment, value=\"accurate\",\r\n command=self.updateOptions)\r\n self.radFast = tk.Radiobutton(self.frameSegment,\r\n text=\"Fast\", variable=self.varSegment, value=\"fast\",\r\n command=self.updateOptions)\r\n self.radManual= tk.Radiobutton(self.frameSegment,\r\n text=\"Manual\", variable=self.varSegment, value=\"manual\",\r\n command=self.updateOptions)\r\n self.radFromBinary = tk.Radiobutton(self.frameSegment,\r\n text=\"From binary\", variable=self.varSegment, value=\"binary\",\r\n command=self.updateOptions)\r\n # Pack widgets.\r\n self.radAccurate.grid(row=0, column=0, sticky='w')\r\n self.radFast.grid(row=1, column=0, sticky='w')\r\n self.radManual.grid(row=2, column=0, sticky='w')\r\n self.radFromBinary.grid(row=3, column=0, sticky='w')\r\n # Check appropriate boxes.\r\n self.radAccurate.select()\r\n\r\n # Option options.\r\n # Profiles\r\n profiles = autoSelect.profiles()\r\n # Variables.\r\n self.varShowSteps = tk.BooleanVar()\r\n self.varOutputExcel = tk.BooleanVar()\r\n self.varSavePDF = tk.BooleanVar()\r\n self.varSaveMovie = tk.BooleanVar()\r\n self.varSaveBinary = tk.BooleanVar()\r\n self.varAutoParse = tk.BooleanVar()\r\n self.varProfile = tk.StringVar()\r\n self.varProfile.set(profiles[0])\r\n # Create widgets.\r\n self.checkShowSteps = tk.Checkbutton(self.frameOptions,\r\n text=\"Show steps\", variable=self.varShowSteps)\r\n self.checkOutputExcel = tk.Checkbutton(self.frameOptions,\r\n text=\"Output to Excel\", variable=self.varOutputExcel)\r\n self.checkSavePDF = tk.Checkbutton(self.frameOptions,\r\n text=\"Save PDF\", variable=self.varSavePDF)\r\n self.checkSaveMovie = tk.Checkbutton(self.frameOptions,\r\n text=\"Save movie\", variable=self.varSaveMovie)\r\n self.checkSaveBinary = tk.Checkbutton(self.frameOptions,\r\n text=\"Save binary\", variable=self.varSaveBinary)\r\n self.checkAutoParse = tk.Checkbutton(self.frameOptions,\r\n text=\"Auto parse raw image\", variable=self.varAutoParse,\r\n command=self.updateAuto)\r\n self.optionProfile = tk.OptionMenu(self.frameOptions, self.varProfile,\r\n *profiles)\r\n self.optionProfile.config(state=tk.DISABLED)\r\n\r\n # Pack widgets.\r\n self.checkShowSteps.grid(row=0, column=0, sticky='w')\r\n self.checkOutputExcel.grid(row=1, column=0, sticky='w')\r\n self.checkSavePDF.grid(row=2, column=0, sticky='w')\r\n #self.checkSaveMovie.grid(row=3, column=0, sticky='w')\r\n self.checkSaveBinary.grid(row=4, column=0, sticky='w')\r\n self.checkAutoParse.grid(row=5, column=0, sticky='w')\r\n self.optionProfile.grid(row=6, column=0, sticky='w', padx=15)\r\n \r\n # Check appropriate boxes.\r\n self.checkOutputExcel.select()\r\n\r\n self.createToolTips()", "def startGUI(self):\n #cria uma nova janela chamada root com titulo\n self.root = Tk()\n self.root.title(\"Kalman Graphics\")\n\n #configura um frame na janela root\n mainframe = ttk.Frame(self.root, padding=\"0 0 0 0\")\n mainframe.grid(column=0, row=0, sticky=(N, W, E, S))\n mainframe.columnconfigure(0, weight=0)\n mainframe.rowconfigure(0, weight=0)\n\n #Acrescentando um plot\n self.initPlot()\n self.canvas1 = FigureCanvasTkAgg(self.myfig1, master=mainframe)\n self.canvas1.get_tk_widget().grid(column=1,row=1)\n \n #define variaveis que estarao na janela\n self.receivedMessage = StringVar(self.root)\n\n #define um label\n messageLabel = ttk.Label(mainframe, textvariable=self.receivedMessage)\n messageLabel.grid(column=1, row=2, sticky=(W, E))\n\n #para cada uma das janelas ou filhos do mainframe eu coloco um padding ao redor\n for child in mainframe.winfo_children():\n child.grid_configure(padx=0, pady=0)\n\n #schedule de uma funcao a cada 25ms\n self.root.after(10, self.processIncoming)\n\n #loop principal\n self.root.mainloop()", "def show(self):\n self.driver.send(self.canvas)", "def main():\n LayoutsWithPanels().mainloop()", "def draw(self):\n self.screen.fill(BACKGROUND_COLOR)\n self.cannon.draw(self.screen)\n self.objects.draw(self.screen)", "def show_gui():\n pass", "def main(self):\n update = self.update\n draw = self.draw\n screen = self.screen\n flip = pg.display.update\n clock = time.time\n frame_length = (1. / self.fps)\n time_since_draw = 0\n last_update = clock()\n fps_timer = 0\n frames = 0\n\n while not self.done:\n clock_tick = clock() - last_update\n last_update = clock()\n time_since_draw += clock_tick\n update(clock_tick)\n if time_since_draw >= frame_length:\n time_since_draw -= frame_length\n draw(screen)\n flip()\n frames += 1\n\n fps_timer, frames = self.handle_fps(clock_tick, fps_timer, frames)\n time.sleep(.01)", "def render(self):\n\t\tself._getSize()\n\t\tself.screen.clear()\n\t\tif self.width < 60 or self.height < 20:\n\t\t\tself.wts(1, 1, \"Windows too small to render!\" , 1)\n\t\telse:\n\t\t\t# check if resized\n\t\t\tif curses.is_term_resized(self.height, self.width):\n\t\t\t\tcurses.resizeterm(self.height, self.width)\n\t\t\t\tself._getSize()\n\t\t\t# render border\n\t\t\tif self.screenBorder:\n\t\t\t\tself.drawBorder()\n\t\t\t# render lines\n\t\t\tself.drawLines()\n\t\t\t# render status\n\t\t\tself.wts(self.height - 1, 1, self.status , 1)\n\t\t\t# render objects\n\t\t\tself.drawObjects(self.objects)\n\t\t\t# render menus\n\t\t\tself.drawObjects(self.menus)\n\t\tself.screen.refresh()", "def show(self):\n QtGui.QGuiApplication.processEvents()", "def mainloop(self):\n self.master.mainloop()", "def run(self):\n self.ui['main_window'].widgets['main'].show_all()\n gtk.main()", "def setUI(self):\n self.parent.title(\"Handwritten digits classification\")\n self.pack(fill=BOTH, expand=1)\n self.columnconfigure(6,weight=1)\n self.rowconfigure(2, weight=1)\n self.canv = Canvas(self, bg=\"white\")\n self.canv.grid(row=2, column=0, columnspan=7,\n padx=5, pady=5,\n sticky=E + W + S + N)\n self.canv.bind(\"<B1-Motion>\",\n self.draw)\n\t\t\t\n\t\t\t\n #size_lab = Label(self, text=\"Classificator: \")\n #size_lab.grid(row=0, column=0, padx=5)\n predict_btn = Button(self, text=\"Predict\", width=10, command=lambda: self.predict())\n predict_btn.grid(row=0, column=0)\n delete_btn = Button(self, text=\"Clear\", width=10, command=lambda: self.canv.delete(\"all\"))\n delete_btn.grid(row=1, column=0, sticky=W)", "def redraw(self):\n self._create()", "def draw_app(self):\n \n # Start iterations\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n\n self.display.fill([255, 255, 255])\n self.grid.draw(self.display)\n pygame.display.update()", "def draw(self):\n self._background.draw(self.view)\n if self._state == STATE_INACTIVE:\n self._message.draw(self.view)\n if self._state == STATE_COUNTDOWN:\n self._game.draw(self.view)\n self._countdownMessage.draw(self.view)\n self._soundImage.draw(self.view)\n if self._state == STATE_ACTIVE:\n self._game.draw(self.view)\n self._soundImage.draw(self.view)\n if self._state == STATE_PAUSED:\n self._game.draw(self.view)\n self._pausedMessage.draw(self.view)\n if self._state == STATE_RESET:\n self._message.draw(self.view)\n if self._state == STATE_COMPLETE:\n self._game.draw(self.view)\n self._pausedMessage.draw(self.view)", "def main(self):\n self.validate()\n self.root.mainloop()", "def display_main(self):\n self.clear_terminal()\n self.main_menu()\n self.handle_selection_main()", "def do_layout(self):\n self.define_panel_structure()\n self.layout_selection()\n self.layout_data_list()\n self.layout_batch()\n self.layout_button()", "def redraw(self):\n raise NotImplementedError()", "def draw(self):\n\t\tpass", "def display(self):\n with push_style():\n fill(255)\n circle((self.xoff + self.x, self.yoff + self.y), 6, mode=\"CENTER\")", "def redraw(self) -> None:\n self.canvas.draw_idle()\n self.Refresh()", "def update_figure(self):\n\n self.draw()", "def _DisplayFrames(self) :\n for f in self.frame_objects.values() : # Show all the frames.\n # XP initially stacks all the controls on a tab in the upper left corner.\n # When the size of the frame is reduced to near zero and then expanded, everything is in the correct location.\n # Can automatic minimizing and maximizing solve the problem? Possibilities: # f.Maximize(), f.Restore(), f.Iconize().\n if os.name == 'posix' : # Linux renders well.\n f.Show()\n else : \n f.Show() # Diddle with the frame to get XP to render correctly.\n f.Iconize() # Just minimizing and leaving it to the user to restore to the original size seems to work well.", "def draw(self, force=False):\n self.display.draw(force)", "def __display_controls(self):\n self.__fill_data_variables()\n self.__fill_smoothing_method()\n self.__fill_smooth_factor()\n\n left_box = VBox([self.data_vars])\n center_box = VBox([self.smoothing_methods])\n right_box = VBox([self.smooth_factor])\n #_HBox = HBox([left_box, center_box, right_box],\n _HBox = HBox([left_box, center_box, right_box],\n layout={'height': '80px',\n 'width' : '99%'}\n )\n display(_HBox)", "def do_paint(self):\r\n curses.curs_set(0)\r\n if self.win:\r\n self.paint()\r\n self.done_paint()", "def frame(self):\n self.run_command('frame')", "def draw(self):\n\n self.squares.draw(self.screen)\n if not self.hide_grid:\n self.draw_grid()\n self.fleas.draw(self.screen)\n pygame.display.flip()", "def inicialUI(self):\r\n\r\n self.setGeometry(500, 500, 500, 500)\r\n self.setWindownTitle(\"Pesquisa\")\r\n self.displayWidgets()\r\n\r\n self.show()", "def loop(self):\r\n while self.__running:\r\n self.__check_events()\r\n self.__render()\r\n self.__reset_variables()", "def main(self):\n self.root.mainloop()", "def _redraw_operation(self):\n try:\n self._profile()\n self._window.operate()\n except Exception as e:\n # Catch exceptions raised when executing the Use Case\n self._result = self._complete(exception=e)\n logger.debug(traceback.format_exc())\n self._record_current_time()\n self._check_if_completed()\n # self._window.process_events()", "def _run_sim(self):\n\n self.ensemble = Ensemble(self.game_display,\n (self.disp_width, self.disp_height),\n n_atoms=self.n_atoms, exc0=self.exc0,\n chi=self.chi, f=self.f, mass=self.mass,\n g0=self.g0, g1=self.g1, rad=self.rad)\n self.window_open = True\n self.t = range(self.plot_window)\n self.T_ex = np.ones(self.plot_window)*np.nan\n self.T_ex[-1] = self.ensemble.T_ex\n self.T_kin = np.ones(self.plot_window)*np.nan\n self.T_kin[-1] = self.ensemble.T_kin\n\n self.plot_T_ex = self.ax.plot(self.t, self.T_ex, 'r',\n label='Excitation Temperature')\n self.plot_T_kin = self.ax.plot(self.t, self.T_kin, 'b',\n label='Kinetic Temperature')\n self.ax.legend(loc='upper left')\n self.ax.set_ylim(0, 2*self.ensemble.T_kin)\n self.ax.set_xlim(0, self.plot_window)\n self.ax.set_xlabel('Time (frames)')\n self.ax.set_ylabel('Temperature (arb. units)')\n self.ax.tick_params(labeltop=False, labelright=True, right=True)\n\n self.clock = pygame.time.Clock()\n while self.window_open:\n\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n self.window_open = False\n\n self.clock.tick(self.fps)\n self.ensemble.update(self.clock.get_time())\n self._update_plot()\n pygame.display.update()", "def run(self):\n GLib.MainLoop().run()", "def run():\n gui = GUI()\n gui.mainloop()" ]
[ "0.6894054", "0.67451495", "0.66122764", "0.6550248", "0.64933664", "0.64838165", "0.64499927", "0.64354634", "0.6388672", "0.638855", "0.6348826", "0.6347117", "0.6344852", "0.63217235", "0.6315397", "0.6307719", "0.6287468", "0.62729156", "0.62716115", "0.6268537", "0.6263936", "0.6230095", "0.6214113", "0.62124825", "0.62090105", "0.62014264", "0.6195446", "0.6194811", "0.6182808", "0.6180723", "0.61599195", "0.61592674", "0.6145552", "0.61145157", "0.6112629", "0.61121035", "0.6107036", "0.61017436", "0.61013407", "0.61013335", "0.6097917", "0.60944504", "0.6090833", "0.6089668", "0.60715425", "0.60700923", "0.60580367", "0.6046182", "0.6043865", "0.6038591", "0.6033314", "0.60307425", "0.60208696", "0.601866", "0.60184693", "0.60176766", "0.600846", "0.6004471", "0.60034335", "0.5999607", "0.5998049", "0.5994393", "0.5994229", "0.5993861", "0.59852755", "0.5984013", "0.5983965", "0.5975865", "0.5975647", "0.5974893", "0.5972849", "0.59693944", "0.5966801", "0.5966649", "0.59638625", "0.59612566", "0.59449464", "0.59438837", "0.59427667", "0.59422046", "0.59399927", "0.59215486", "0.59195405", "0.59182984", "0.59180605", "0.5916448", "0.5913458", "0.5912179", "0.59045845", "0.5898664", "0.58910495", "0.58882713", "0.5885667", "0.58785903", "0.58690363", "0.5861514", "0.58611655" ]
0.60316306
54
Perform all calculations for the amount of time that has passed.
def update(self, dt): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def compute(self, duration: Optional[Number]) -> None:\n await envs.sleep(duration)\n self.total_compute_time += duration", "def _timed_execute(self):\n tstart = time.perf_counter()\n self._func(*self._func_args, **self._func_kwargs)\n tend = time.perf_counter() \n\n tdelta = tend - tstart\n\n return tdelta", "def evaluate(self, time) -> float:\n ...", "def run_at_time(self, input_dict):\n lead_seq = util.get_lead_sequence(self.config, input_dict)\n for lead in lead_seq:\n self.clear()\n input_dict['lead_hours'] = lead\n self.config.set('config', 'CURRENT_LEAD_TIME', lead)\n os.environ['METPLUS_CURRENT_LEAD_TIME'] = str(lead)\n time_info = time_util.ti_calculate(input_dict)\n self.run_at_time_once(time_info)", "def elapseTime(self, gameState):\n\n \"*** YOUR CODE HERE ***\"\n\n allPossible = util.Counter()\n\n for oldPos in self.legalPositions:\n actions = gameState.getLegalActions(agentIndex)\n successorStates = [gameState.generateSuccessor(action) for action in actions]\n newPosDist = {}\n for state in successorStates:\n position = state.getAgentPosition(agentIndex)\n prob = 1.0/len(actions)\n newPosDist[position] = prob\n\n for newPos, prob in newPosDist.items():\n allPossible[newPos] += prob * self.beliefs[oldPos]\n\n allPossible.normalize()\n self.beliefs = allPossible", "def _run_time(func):\n start_time = datetime.datetime.now()\n func\n end_time = datetime.datetime.now()\n return end_time - start_time", "def perf_timer():\n start_time = datetime.now()\n yield\n end_time = datetime.now()\n log.info(end_time - start_time)", "def calculate(self):\n #runs = [ai\n # for ei in self.experiment_queues\n # for ai in ei.cleaned_automated_runs]\n #\n #ni = len(runs)\n #self.nruns = ni\n # for ei in self.experiment_queues:\n # dur=ei.stats.calculate_duration(ei.cleaned_automated_runs)\n # if\n\n\n tt = sum([ei.stats.calculate_duration(ei.cleaned_automated_runs)\n for ei in self.experiment_queues])\n self._total_time = tt\n offset = 0\n if self._start_time:\n offset = time.time() - self._start_time\n\n self.etf = self.format_duration(tt - offset)", "def GAME_TIME_ADVANCE(dt):", "def compute_time_step(self):\r\n # append the current time/MoI to the lists\r\n self.get_outputs()\r\n\r\n print(\"-------------------------\")\r\n print(\"Now Running Cycle {}, t: {:.3e}, Completed {:.2f}%, CFL: {:.3e}\"\r\n .format(self.ind, self.t, 100*self.t/self.end_time,\r\n self.CFL(self.dt)))\r\n\r\n try:\r\n self.solver.solve()\r\n except:\r\n print(\"DIVERGED\")\r\n self.logfile.write(\"%s: STOPPED DUE TO DIVERGENCE \\n\" %\r\n (self.convert_time(time.time()\r\n - self.start_time)))\r\n self.diverged = True\r\n return\r\n\r\n # if we want to save at steps, save all the functions\r\n if self.savesteps:\r\n self.save_funcs(self.u, self.p, self.ftides, self.gravity,\r\n self.centrifugal, self.coriolis, self.forcing)\r\n\r\n # write to log\r\n self.logfile.write(\r\n \"{}: --- Solved Cycle {}, t={:.3e}, Completed {:.2f}%,\\\r\n CFL: {:.3e} --- \\n\".format(\r\n self.convert_time(time.time()-self.start_time), self.ind,\r\n self.t, 100*self.t/self.end_time, self.CFL(self.dt)))\r\n\r\n # update the timestep, for if CFL is too large\r\n self.update_dt()\r\n\r\n # remove the mean velocity\r\n self.adjust_u()\r\n\r\n # assign the current solution to the prior solution\r\n self.u_p_.assign(self.up)\r\n\r\n # update the run index\r\n self.ind += 1", "def report_total_usage(self):\n work_time = 0\n if self.type == 'normal':\n work_time = self.fwk.fwk_global_time - self.start_exec_time\n elif self.type == 'sandia_work':\n self.total_time += self.fwk.fwk_global_time - self.start_exec_time\n self.total_usage = self.total_time * self.nproc\n if self.state == \"running\":\n # update total work done\n self.sim.completed_work += self.fwk.fwk_global_time - self.start_exec_time\n elif self.state == \"failed\":\n # add this work to the work to be redone\n self.sim.rework_todo += self.fwk.fwk_global_time - self.start_exec_time\n self.state = \"not_ready\"\n self.num_faults += 1\n elif self.type == 'sandia_rework':\n self.total_rework_time += self.fwk.fwk_global_time - self.start_exec_time\n self.total_rework_usage = self.total_rework_time * self.nproc\n if self.state == \"running\":\n # update total work done\n self.sim.next_ckpt = self.sim.ckpt_interval - (self.fwk.fwk_global_time - self.start_exec_time)\n self.sim.rework_todo -= self.fwk.fwk_global_time - self.start_exec_time\n elif self.state == \"failed\":\n # add this work to the work to be redone\n self.state = \"not_ready\"\n self.num_faults += 1\n elif self.type == 'sandia_ckpt':\n self.total_ckpt_time += self.fwk.fwk_global_time - self.start_exec_time\n self.total_ckpt_usage = self.total_ckpt_time * self.nproc\n if self.state == \"running\":\n # update last ckpt\n self.sim.last_ckpt = self.sim.completed_work\n elif self.state == \"failed\":\n # add work to rework\n self.sim.rework_todo += self.sim.next_ckpt\n self.state = \"not_ready\"\n self.num_faults += 1\n elif self.type == 'sandia_restart':\n print(\"time spent in rework\", self.fwk.fwk_global_time - self.start_exec_time)\n self.total_restart_time += self.fwk.fwk_global_time - self.start_exec_time\n self.total_restart_usage = self.total_restart_time * self.nproc\n #if self.state == \"running\":\n # nothing to do?\n # pass\n if self.state == \"failed\":\n # gotta try again\n self.state = \"ready\"\n self.num_faults += 1\n else:\n print(\"problems updating state in report_total_usage\")\n raise\n if self.type == 'normal':\n if self.sim.state == 'rework':\n self.total_rework_time += work_time\n self.total_rework_usage = self.total_rework_time * self.nproc\n else: # sim.state == 'work'\n if self.retry:\n self.total_rework_time += work_time\n self.total_rework_usage = self.total_rework_time * self.nproc\n else:\n self.total_time += work_time\n self.total_usage = self.total_time * self.nproc", "def run_all_iterations(self):\n self.start_time = time.time()\n for _ in xrange(self.iterations):\n self.run_iteration()\n self.elapsed_time = time.time() - self.start_time\n\n self.print_statistics()", "def compute_time_step():\n\n dt = Hydro.compute_time_step()\n\n return dt", "def do_work(self) -> None:\n self._get_btc_eur_15min()\n print(\n f\"1 BTC = {self.btc_eur_15min} EUR\"\n f\"\\t\\t(15min delayed market price)\"\n )\n\n self._get_eur_gbp_last_month()\n print(\n f\"1 EUR = {self.eur_gbp_last_month} GBP\"\n f\"\\t(last month average rate)\"\n )\n\n self._get_btc_gbp_15min()\n print(\n f\"1 BTC = {self.btc_gbp_15min:.6f} GBP\"\n f\"\\t(BTC 15min delayed market price; GBP latest daily average rate)\"\n )", "def trip_duration_stats(df):\n\n print('\\nCalculating Trip Duration...\\n')\n start_time = time.time()\n\n # TO DO: display total travel time\n\n total_travel_time_in_sec = df['Trip Duration'].sum()\n total_travel_time_in_years = total_travel_time_in_sec // (60 * 60 * 24 *365)\n\n modulus1_in_sec = total_travel_time_in_sec - total_travel_time_in_years*(60 * 60 * 24 *365)\n #print(\"modulus1_in_sec:\", modulus1_in_sec)\n total_travel_time_in_months = modulus1_in_sec // (60 * 60 * 24 *31)\n\n modulus2_in_sec = modulus1_in_sec - total_travel_time_in_months*(60 * 60 * 24 *31)\n #print(\"modulus2_in_sec:\", modulus2_in_sec)\n total_travel_time_in_weeks = modulus2_in_sec // (60 * 60 * 24 *7)\n\n modulus3_in_sec = modulus2_in_sec - total_travel_time_in_weeks*(60 * 60 * 24 *7)\n #print(\"modulus3_in_sec:\", modulus3_in_sec)\n total_travel_time_in_days = modulus3_in_sec // (60 * 60 * 24)\n\n modulus4_in_sec = modulus3_in_sec - total_travel_time_in_days*(60 * 60 * 24)\n #print(\"modulus4_in_sec:\", modulus4_in_sec)\n total_travel_time_in_hours = modulus4_in_sec // (60 * 60)\n\n modulus5_in_sec = modulus4_in_sec - total_travel_time_in_hours*(60 * 60)\n #print(\"modulus5_in_sec:\", modulus5_in_sec)\n total_travel_time_in_minutes = modulus5_in_sec // 60\n\n modulus6_in_sec = modulus5_in_sec - total_travel_time_in_minutes*60\n #print(\"modulus6_in_sec:\", modulus6_in_sec)\n total_travel_time_in_seconds_modulus = modulus6_in_sec\n\n print(\"total travel time of all Users combined:\\n YEARS: {} \\n MONTHS: {} \\n WEEKS: {} \\n DAYS: {} \\n HOURS: {} \\n MINUTES: {} \\n SECONDS: {} \\n\".format(total_travel_time_in_years, total_travel_time_in_months, total_travel_time_in_weeks, total_travel_time_in_days, total_travel_time_in_hours, total_travel_time_in_minutes, total_travel_time_in_seconds_modulus))\n\n # TO DO: display mean travel time\n\n mean_travel_time_in_sec = df['Trip Duration'].mean()\n mean_travel_time_in_minutes = mean_travel_time_in_sec // 60\n modulus_in_sec = mean_travel_time_in_sec - mean_travel_time_in_minutes*60\n mean_travel_time_in_seconds_modulus = modulus_in_sec\n\n print(\"mean travel time:\\n MINUTES: {} \\n SECONDS: {} \\n\".format(int(mean_travel_time_in_minutes), mean_travel_time_in_seconds_modulus))\n\n#trip_duration_stats(pd.read_csv('{}.csv'.format(city)))\n\n print(\"\\nThis took %s seconds.\" % (time.time() - start_time))\n print('-'*40)", "def run(self):\n last = self.system.last_timestep\n start = last.timestep + 1 if last else 0\n del last\n end = self.system.cg_steps\n \n logging.info(\"running timesteps {} to {}\".format(start, end))\n \n for _ in range(start, end):\n self.system.begin_timestep()\n self.atomistic_step()\n self.cg_step()\n self.system.end_timestep()\n \n logging.info(\"completed all {} timesteps\".format(end-start))", "def work(self):\n while(True):\n debug_print = False\n if debug_print == True:\n start = time.time()\n\n flow = self.gauge.read_flow_from_dp()\n self.flw_q.put([time.time(), flow])\n\n if debug_print == True:\n flow_time = time.time()\n print(f\"Runtime - calc_flow: {1000 * (flow_time - start):.0f} ms\")\n\n pressure = self.gauge.read_pressure()\n self.prs_q.put([time.time(), pressure])\n\n if debug_print == True:\n pressure_time = time.time()\n print(f\"Runtime - read_pressure: {1000 * (pressure_time - flow_time):.0f} ms\")\n \n if debug_print == True:\n runtime = time.time() - start\n print(f\"Runtime - total: {1000 * runtime:.1f} ms\")\n print(f\"Frequency: {1 / runtime:.1f} Hz\")", "def __get_elapsed__(self):\n elapsed = (self.__end_time - self.__start_time)\n unit = \"seconds\"\n if elapsed >= 3600:\n unit = \"minutes\"\n hours = elapsed / 3600\n minutes = hours % 60\n hours = floor(hours)\n print(self.name, \"took\", str(hours), \"hours and\", \"{0:.2f}\".format(minutes), unit, \"to complete\")\n elif elapsed >= 60:\n minutes = floor(elapsed / 60)\n seconds = elapsed % 60\n print(self.name, \"took\", str(minutes), \"minutes and\", \"{0:.2f}\".format(seconds), unit, \"to complete\")\n else:\n print(self.name, \"took\", \"{0:.2f}\".format(elapsed), unit, \"to complete\")", "def main():\n assert how_many_seconds(2) == 7200\n assert how_many_seconds(10) == 36000\n assert how_many_seconds(24) == 86400\n assert how_many_seconds(36) == 129600\n print('Passed.')", "def __get_elapsed__(self):\n elapsed = (self.__end_time - self.__start_time)\n unit = \"seconds\"\n if elapsed >= 3600:\n unit = \"minutes\"\n hours = elapsed / 3600\n minutes = hours % 60\n hours = floor(hours)\n print(\"{} {} took {} hours and {:.2f} {} to complete\".format(self.__get_timestamp__(), self.name, hours, minutes, unit))\n elif elapsed >= 60:\n minutes = floor(elapsed / 60)\n seconds = elapsed % 60\n print(\"{} {} took {} minutes and {:.2f} {} to complete\".format(self.__get_timestamp__(), self.name, minutes, seconds, unit))\n else:\n print(\"{} {} took {:.2f} {} to complete\".format(self.__get_timestamp__(), self.name, elapsed, unit))", "def remaining_ms():", "def calculateWaitingTime(self, inputs):\n CollisionCounter.CollisionCounter.getInstance().waitingTimeCalculated(self.time)\n timeUntilDepature = self.getAtt('departure_time', inputs) - self.time\n remainingLoadingTime = self.calculateLoadingTime(inputs)\n # calculates first maximum possible waiting time\n sampleTime = int((timeUntilDepature - remainingLoadingTime) / self.participants)\n\n if sampleTime >= 1:\n # result is big enough for a standard treatment\n self.waitingTime = MyRandom.RandomNumber.getInstance().getRandomNumber(sampleTime + 1)\n elif sampleTime < 1:\n # reslut is too small, special treatment necessary\n upperLimit = (10 * (1 - (math.exp(sampleTime - 1)))) + 1\n self.waitingTime = MyRandom.RandomNumber.getInstance().getRandomNumber(max((min(upperLimit,\n timeUntilDepature)) + 1, 1))\n # decides whether charging is allowed during waiting time\n if not self.stayedConnected:\n self.stayConnected = True\n self.stayedConnected = True\n else:\n self.stayConnected = False\n self.stayedConnected = False", "def _simulation(self, env):\n while True:\n # calculate costs, profits and a distance\n self.total_cost += self._calculate_costs()\n self.total_profit += self._calculate_profits()\n self.total_distance_after_repair += self.distance\n yield env.timeout(self.driving_time)\n\n self.number_of_courses += 1", "def getTimes():", "def getTimes():", "def getTimes():", "def _update_timing(self, iteration_time, partial_total_time, run_initial_iteration, iteration_limit):\n self._timing_data[\"iteration_seconds\"] = iteration_time\n self._timing_data[\"average_seconds_per_iteration\"] = \\\n partial_total_time / (self._iteration - run_initial_iteration)\n estimated_timedelta_remaining = datetime.timedelta(\n seconds=self._timing_data[\"average_seconds_per_iteration\"] * (iteration_limit - self._iteration)\n )\n estimated_finish_date = datetime.datetime.now() + estimated_timedelta_remaining\n self._timing_data[\"estimated_time_remaining\"] = str(estimated_timedelta_remaining) # Putting it in dict as str\n self._timing_data[\"estimated_localtime_finish_date\"] = estimated_finish_date.strftime(\"%Y-%b-%d-%H:%M:%S\")\n total_time_in_seconds = datetime.timedelta(\n seconds=self._timing_data[\"average_seconds_per_iteration\"] * iteration_limit\n )\n self._timing_data[\"estimated_total_time\"] = str(total_time_in_seconds)\n\n # Estimate performance\n moves_iterator = self._flatten_moves_iterator()\n # Only consider \"dynamic\" moves (timestep and n_steps attributes)\n moves_times = [move.timestep.value_in_unit(unit.nanosecond) * move.n_steps for move in moves_iterator if\n hasattr(move, \"timestep\") and hasattr(move, \"n_steps\")]\n iteration_simulated_nanoseconds = sum(moves_times)\n seconds_in_a_day = (1 * unit.day).value_in_unit(unit.seconds)\n self._timing_data[\"ns_per_day\"] = iteration_simulated_nanoseconds / (\n self._timing_data[\"average_seconds_per_iteration\"] / seconds_in_a_day)", "def test_peformance(self):\n timedeltas = []\n for file in os.listdir(settings.ANALYSIS_REPORT_FOLDER):\n _file = open(os.path.join(settings.ANALYSIS_REPORT_FOLDER, file), \"r\")\n report = json.loads(_file.read())\n timedeltas.append(\n parse_datetime(report['finish']) - parse_datetime(report['start']))\n _file.close()\n\n # number of queue\n print('NUMBER OF QUEUE = {}'.format(len(timedeltas)))\n\n # get average time\n average_timedelta = sum(timedeltas, datetime.timedelta(0)) / len(timedeltas)\n print('AVERAGE = {}'.format(average_timedelta))\n self.assertTrue(average_timedelta < datetime.timedelta(minutes=3))\n\n # get total process time\n total = timedeltas[0]\n for delta in timedeltas[:1]:\n total += delta\n print('TOTAL = {}'.format(total))\n self.assertTrue(total < datetime.timedelta(minutes=3 * len(timedeltas)))", "def run_timer():\n \n start_time = time.time()\n print(start_time)\n stopper = input(\"Press enter to stop\")\n end_time = time.time()\n print(\"You have finished collecting the blocks!\")\n duration = int(end_time - start_time)\n if duration > 25:\n print(\"You were too slow collecting the blocks, better luck next time\")\n else: \n print(\"Good job speedy, you collected all the blocks before time ran out!\")", "def speed_test_interval(self, seconds=0, minutes=0, hours=0, days=0, timeout=60):\n print('Initializing test.')\n end = datetime.datetime.now() + datetime.timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds)\n rows = []\n while end > datetime.datetime.now():\n rows.append(self.speed_test(timeout))\n delta_time = end - datetime.datetime.now()\n if delta_time.days < 0:\n delta_time = datetime.timedelta(0)\n print(f'\\r{delta_time} remaining.', end='')\n print()\n\n return self.speed_data", "def measure(self):\n # --- perform repeated runs\n for i_run in range(self.n_runs):\n if self.verbosity > 0:\n print(\"Run {0} / {1} ...\".format(i_run, self.n_runs), end = '')\n tdelta = self._timed_execute()\n self._run_times[i_run] = tdelta\n\t\t\t\n if self.verbosity == 2:\n print(tdelta)\n \n # calculate mean\n self._tmean = np.mean(self._run_times)\n # calculate standard deviation\n self._tstdev = np.std(self._run_times)\n # allow access to results\n self.__hasrun = True", "def calculate_time(start_time):\r\n return round(time() - start_time, 2)", "def calc_idle_time(self):\n tt = 0.0\n keys = self.output.keys()\n keys.sort()\n jobEndKeys = [thekey for thekey in keys if 'JobEnd' in thekey]\n jobEndKeys = jobEndKeys[0:len(jobEndKeys)-1]\n for key in jobEndKeys:\n idxstart = keys.index(key)\n stime = self.output[keys[idxstart]][1]\n idxend = idxstart + 1\n while not (keys[idxend][1] in ['Preparation','Production']):\n idxend += 1\n # Now we have the entry where the next work cycle starts\n etime = self.output[keys[idxend]][1]\n tot_time = (etime - stime).seconds/3600.\n for ii in range(idxstart+1, idxend):\n if keys[ii][1] in ['W-up', 'Maintenance']:\n tot_time -= self.output[keys[ii]][4]\n # if the time is longer than 5 min:\n if tot_time >= 5.0/60.0:\n tt += tot_time\n print 'idle time ', tt, ' hours'", "def count_time(func):\n def wrapper():\n start_time = time.time()\n res = func()\n end_time = time.time()\n print(\"The progress cost: {:4}\".format(end_time-start_time))\n return res\n return wrapper", "def compute_go_duration(self, units='seconds'):\n go_duration = 0\n for trial in self.trials:\n max_time = 0\n for event in trial.events:\n if self.stop > max_time:\n max_time = self.stop\n\n go_duration += max_time\n\n self.go_duration = (go_duration, units)", "def __pass_time(self):\n self.hunger += 1\n self.boredom += 1", "def elapsed():\n global start_time\n return time.time() - start_time", "def wrapper(*args, **kwargs):\n starttime = compat.perf_clock()\n method(*args, **kwargs)\n endtime = compat.perf_clock() - starttime\n endtime_proper = math.ceil(endtime * 100) / 100 # rounding\n mins, secs = divmod(endtime_proper, 60)\n hrs, mins = divmod(mins, 60)\n print(\"COMPLETED IN {0:02d}:{1:02d}:{2:02d}\".format(int(hrs), int(mins), int(secs)))", "def hourly_stats():\r\n count_total.delay()\r\n count_unique.delay()\r\n count_tags.delay()", "def _calculate_runtime(self):\n lines = self.file_dic['output'].splitlines()\n start_time = datetime.strptime(lines[0].strip(), self.timestring_format)\n fin_time = datetime.strptime(lines[-1].strip(), self.timestring_format)\n dif = fin_time - start_time\n self.date = fin_time.strftime('%d %b %Y')\n self.runtime = dif.total_seconds()", "def calculate_time(func):\n def timer(*args, **kwargs):\n start_time = time.time()\n x = func(*args, **kwargs)\n end_time = time.time()\n run_time = end_time - start_time\n print(f'Total time',run_time)\n return x\n return timer", "def timer():\n start = time.time()\n\n yield\n\n end = time.time()\n\n print('Elapsed: {:.2f}s'.format(end - start))", "def wrapper():\n start_time = time.time()\n func()\n end_time = time.time()\n run = end_time - start_time\n print(f'Total time {run}')", "def test_timestep(self):\n class Mock(object):\n def __init__(self):\n self.t = 0.0\n self.dt = None\n\n def evolve(self1, t, dt):\n if self1.dt is not None:\n self.assertAlmostEqual(self1.dt, dt)\n else:\n self1.dt = dt\n\n self.assertAlmostEqual(self1.t, t)\n\n self1.t += self1.dt\n\n t_max = 10.0\n dt = 0.2\n\n G = Mock()\n simulation.Simulation(G, dt=dt).run(t_max)\n self.assertAlmostEqual(G.dt, dt)", "def compute_time(self):\n return self.compute_time_without_gc() + self.gc_time", "def check_timer():\n end = time.time()\n time_elapsed = end - target_time[0]\n durationMSG = fg.cyan + f\"Scans Completed for {args.target} in: \" + fg.rs\n print(durationMSG, display_time(time_elapsed))", "def track_duration(self):\n # raise NotImplementedError\n self.out_schema.append(\"run_duration\")\n self._track_duration = True\n # self.runner = GridExecutor.timer(self.runner)", "def step(self):\n\n self.last_input_time = current_time()\n self.current_time = self.last_input_time\n\n for component in self.components:\n component.input(self.last_input_time)\n\n for component in self.components:\n component.fire()\n\n self.last_spent = current_time() - self.last_input_time\n last_dt = self.interval - self.last_spent\n\n self.lagged = False\n if last_dt > 0.0:\n time.sleep(last_dt)\n elif last_dt < 0.0:\n self.lagged = True\n\n self.last_output_time = current_time()\n self.current_time = self.last_output_time\n\n for component in self.components:\n component.output(self.last_output_time)\n\n self.last_output_time = current_time()\n self.current_time = self.last_output_time\n\n return self.current_time", "def time(self):\n\n self.timing = True\n self.scramble()\n\n self.disp = False", "def pass_time(self, t):\n cont = time.time() + t\n while time.time() < cont:\n time.sleep(0)", "def test_cpu_total_work(self):\n import time\n from supvisors.statistics import instant_cpu_statistics, cpu_total_work\n # take 2 spaced instant cpu statistics\n ref_stats = instant_cpu_statistics()\n time.sleep(1)\n last_stats = instant_cpu_statistics()\n total_work = cpu_total_work(last_stats, ref_stats)\n # total work should be quite close to sleeping time\n self.assertAlmostEqual(1, total_work, 1)", "def run_timings():\n\n running_times = []\n\n while recorded_time := input(f\"Enter your 10k time: \"):\n if not recorded_time:\n break\n running_times.append(float(recorded_time))\n average_pace = sum(running_times) / len(running_times)\n return average_pace", "def time(self) -> float:\n return self.state.game_loop / 22.4 # / (1/1.4) * (1/16)", "def time_thread(self):\n while self.time > 0:\n t.sleep(1)\n self.time -= 1\n self.end_round(\"Time is up\")", "def _get_running_time(self):\n time_sum = 0.0\n for subdir in os.listdir(self.path):\n if subdir.startswith('.'):\n continue\n try:\n line = open('{0}/{1}/{2}/out/OUTDOCK'.format(self.path, subdir, DOCKING_RUN_FILES),'r').readlines()[-1]\n if line.startswith('elapsed time'):\n time = float(line.split()[-1])\n time_sum = time_sum + time\n except:\n pass \n self.running_time = time_sum", "def _calibrate_time(self):\n time_overhead = 0\n for i in range(1000):\n start = self._adjusted_time()\n end = self._adjusted_time()\n time_overhead += end - start\n return time_overhead / 1000", "def finish(self):\r\n if self._elapsed is None:\r\n self._elapsed = self._now() - self._start", "def time(n_games, time_per_game):\n\n total_time = n_games * time_per_game / 60\n return total_time", "def finish_simulation(self, current_time):\r\n for time_freed in self.free_slots:\r\n self.idle_ms += time_freed - current_time", "def run_perft(self, depth):\n start = time()\n done = self.perft(self.game.board, self.game.current_turn, depth)\n end = time()\n print(\"Elapsed time for depth \" + str(depth) + \": \")\n print(str(end-start))\n return done", "def update_based_on_time(self):\n for counter, agent in enumerate(self.agents):\n if self.t >= agent.getFinishTime() and self.agent_current_task[counter] != -1: # task is finished\n task_num = self.agent_current_task[counter]\n self.finish_time_per_task_dict[task_num] = self.t\n self.is_task_finished[0][task_num] = 1\n agent.changebusy(False)\n self.update_agent_is_idle_based_on_class()", "def run(self):\n self.evaluate()\n self.accumulate()\n self.summarize()", "def _run_till_idle(self, probes, t0):\n probe_tasks = []\n for probe in probes:\n probe.done_cb = self.probe_done\n probe_tasks.append(probe.run())\n self.loop.run_until_complete(asyncio.gather(*probe_tasks))\n t = time.time()\n delta_t = t - t0\n print(\"Execution time %.1f\" % delta_t)\n return t", "def method_compute_timestep(self):\n\n myg = self.cc_data.grid\n\n cfl = self.rp.get_param(\"driver.cfl\")\n\n u = self.cc_data.get_var(\"x-velocity\")\n v = self.cc_data.get_var(\"y-velocity\")\n\n # the timestep is min(dx/|u|, dy|v|)\n xtmp = ytmp = 1.e33\n if not abs(u).max() == 0:\n xtmp = myg.dx/abs(u.v()).max()\n if not abs(v).max() == 0:\n ytmp = myg.dy/abs(v.v()).max()\n\n dt = cfl*min(xtmp, ytmp)\n\n # We need an alternate timestep that accounts for buoyancy, to\n # handle the case where the velocity is initially zero.\n rho = self.cc_data.get_var(\"density\")\n rho0 = self.base[\"rho0\"]\n rhoprime = self.make_prime(rho, rho0)\n\n g = self.rp.get_param(\"lm-atmosphere.grav\")\n\n F_buoy = (abs(rhoprime*g).v()/rho.v()).max()\n\n dt_buoy = np.sqrt(2.0*myg.dx/F_buoy)\n\n self.dt = min(dt, dt_buoy)\n if self.verbose > 0:\n print(f\"timestep is {dt}\")", "def important_time(self):\n\t\twork_s = self.work_time().seconds\n\t\tbreak_s = self.break_time().seconds\n\t\tif self.status():\n\t\t\tremaining_time_s = tomato(work_s, break_s)\n\t\telse:\n\t\t\tremaining_time_s = potato(work_s, break_s)\n\n\t\timp_time = datetime.now() + timedelta(0, remaining_time_s)\n\t\treturn imp_time", "def timer_handler():\r\n \r\n global elapsed_time\r\n elapsed_time += 1", "def _compute_duration_overtime(self):\n diff_float = 0\n for ts_line in self:\n if ts_line.x_start_date:\n st_datetime = fields.Datetime.from_string(\n ts_line.x_start_date)\n en_datetime = fields.Datetime.from_string(\n ts_line.x_end_date)\n diff = en_datetime - st_datetime\n if not ts_line.is_overtime:\n if(time(1, 00) <= st_datetime.time() <= time(5, 00)):\n if(time(6, 00) <= en_datetime.time() <= time(10, 00)):\n # del 1 hour for breaking lunch\n diff_float = round(\n diff.total_seconds() / 3600.0, 2)-1\n else:\n diff_float = round(diff.total_seconds() / 3600.0, 2)\n ts_line.x_is_per_diem = False\n ts_line.unit_amount = diff_float", "def cpu_time(self):", "def time(self, start_time):\n \n TIME_LIST.append((time.time() - start_time))\n print(\"--- %s seconds ---\" % (time.time() - start_time))", "def update(self, delta_time):\n self.total_time += delta_time", "def update_elapsed(self) -> None:\n if self.start_time:\n self.time_elapsed = datetime.now() - self.start_time\n else:\n raise ValueError(\"Timer not started\")", "def finish(self, secs=False):\n self.timeFinish = pygame.time.get_ticks()()\n elapsedTime = self.timeFinish - self.timeStart\n if secs:\n return elapsedTime / 1000\n else:\n return elapsedTime", "def step(self):\n\n for component in self.components:\n component.input(self.current_time)\n\n for component in self.components:\n component.fire()\n\n self.current_time = self.current_time + self.interval\n\n for component in self.components:\n component.output(self.current_time)\n\n return self.current_time", "def end_time(self) -> float:\r\n ...", "def trip_duration_stats(df):\n\n print('\\nCalculating Trip Duration...\\n')\n start_time = time.time()\n # display total travel time\n tot_sum = (df['End Time'] - df['Start Time']).sum()\n print('The total travel time for all trips within the selected data set is {} (hh:mm:ss).'.format(tot_sum))\n # display mean travel time\n mean_trav_time = (df['End Time'] - df['Start Time']).mean()\n print('The mean travel time for all trips within the selected data set is {} (hh:mm:ss).'.format(mean_trav_time))\n\n print(\"\\nThis took %s seconds.\" % (time.time() - start_time))\n print('-'*40)\n wait = input('Press Enter to continue. ')", "def time(self, operation, time_in_ms):\n pass # Do nothing", "def time_method(method, *arguments):\n start = int(round(time.time() * 1000))\n result = method(*arguments)\n end = int(round(time.time() * 1000))\n total = (end - start) / 1000\n print('Time: %0.03fs' % total)\n return result", "def compute_run_duration(flasher_data, initial_delay):\n if initial_delay is None:\n tot = 0\n else:\n tot = initial_delay\n\n for pair in flasher_data:\n tot += pair[1] + 10\n\n return tot", "def check(self):\r\n boto.log.info('checking Task[%s]-now=%s, last=%s' % (self.name, self.now, self.last_executed))\r\n\r\n if self.hourly and not self.last_executed:\r\n return 0\r\n \r\n if self.daily and not self.last_executed:\r\n if int(self.hour) == self.now.hour:\r\n return 0\r\n else:\r\n return max( (int(self.hour)-self.now.hour), (self.now.hour-int(self.hour)) )*60*60\r\n\r\n delta = self.now - self.last_executed\r\n if self.hourly:\r\n if delta.seconds >= 60*60:\r\n return 0\r\n else:\r\n return 60*60 - delta.seconds\r\n else:\r\n if int(self.hour) == self.now.hour:\r\n if delta.days >= 1:\r\n return 0\r\n else:\r\n return 82800 # 23 hours, just to be safe\r\n else:\r\n return max( (int(self.hour)-self.now.hour), (self.now.hour-int(self.hour)) )*60*60", "def apply_time_leverage_fees(self):\n if len(self.active_long_positions + self.active_short_positions) == 0:\n return\n\n if self.leverage > 1:\n for pos in (self.active_long_positions + self.active_short_positions):\n pos.apply_time_leverage_fees()\n\n self.update_capital_stats()", "def time_step(self):\n\n rho_rel = np.abs(self.rho_dt / self.rho)\n rho_rel_max = np.max(rho_rel)\n e_rel = np.abs(self.e_dt / self.e)\n e_rel_max = np.max(e_rel)\n x_rel = np.abs(self.u / self.dx)\n x_rel_max = np.max(x_rel)\n y_rel = np.abs(self.w / self.dy)\n y_rel_max = np.max(y_rel)\n rel = [rho_rel_max, e_rel_max, x_rel_max, y_rel_max]\n delta = np.max(np.abs(rel))\n\n if 0.1 <= delta <= 1e3:\n self.dt = self.p / delta\n else:\n self.dt = self.p", "def run(self):\r\n\r\n # t=0 is singular point\r\n\r\n print 'Time of laboratory clock Tw =', self.tick\r\n tt = self.tmp\r\n ll = self.lst\r\n car = self.interaction(self.carr)\r\n ll.item_run(tt, self.tick, car)\r\n tt = tt.next\r\n\r\n # run of local time\r\n\r\n while not tt is None:\r\n\r\n if tt.dedicated_node:\r\n self.tick = self.tick + 1\r\n print 'Time of laboratory clock Tw =', self.tick\r\n\r\n # self.move() # It is classical motion of particle (example).\r\n\r\n self.move_reset()\r\n car = self.interaction(self.carr)\r\n\r\n ll = self.lst\r\n while not ll is None:\r\n ll.item_run(tt, self.tick, car)\r\n ll = ll.right\r\n\r\n tt = tt.next", "def take_measurements(timeout=6):\n print(\"Taking measurements for\", timeout, \"seconds.\")\n start = time.monotonic()\n while time.monotonic() - start < timeout:\n for i, (toggle, polarity) in enumerate(vectors):\n result = trackpad.measure_adc(toggle, polarity)\n print(\"meas{}: {}\".format(i, result - compensation[i]), end=\"\\t\")\n print()", "def evaluateTime(self, *args):\n return _osgAnimation.Motion_evaluateTime(self, *args)", "def estimated_time(self):\n self._update()\n if not self.running_mode:\n return 0 if self._is_finished() else float(\"nan\")\n elif self.running_mode == \"local\":\n start = self.processes[0].create_time()\n elif self.running_mode == \"grid\":\n start = self.job[\"start_time\"]\n if start == 0:\n # Queued, but not started\n return float(\"nan\")\n else:\n logger.warning(\"Invalid running_mode attribute\")\n return float(\"nan\")\n current = self.current_step()\n if current <= 0: # If not dumped yet or error\n return float('nan')\n else:\n elapsed = time() - start\n return elapsed * (self.total_steps / current - 1)", "def averageTime(self):\n \n pass", "def runtime(self):\n return (self.time - self.start).total_seconds()", "def do_timing(self):\n sts = datetime.datetime.now()\n _ = dep.read_env(get_path('good_env.txt'))\n ets = datetime.datetime.now()\n print(\"%.5f reads per second\" % (1. / (ets - sts).total_seconds(),))\n self.assertEquals(1, 2)", "def print_fun_facts(num_hours, num_minutes):\n\n # If the number of hours are less than 1, there are no real analytics that\n # can be given to the user, so the program exits\n if num_hours < 1:\n os._exit(1)\n\n print(\"\\nIn the time you spent on league, here's some things you\", \n \"could have done:\")\n\n # Get the total number of minutes that the user spent playing league in the\n # last week\n total_mins = num_hours * 60 + num_minutes\n\n # Number of hours it takes to fly coast to coast\n hours_to_fly_from_la_to_nyc = 5\n\n # Find how far or how many times the user could have flown coast to coast\n flying_data = time_to_perform_task(total_mins, hours_to_fly_from_la_to_nyc)\n\n # Check if the data returned is not a whole number, but a percentage\n # This will occur if hte user hasn't played enough league to complete more\n # than 1 flight from coast to coast\n if flying_data[0]:\n print(\"- Flown \", flying_data[1],\"% of the way from LA to NYC\", sep='')\n else:\n print(\"- Flown from LA to NYC\", flying_data[1], \"times\")\n\n # Repeating the same process, but with the Great Gatsby\n hours_to_read_great_gatsby = 2.62\n gatsby_data = time_to_perform_task(total_mins, hours_to_read_great_gatsby)\n if gatsby_data[0]:\n print(\"- Read \", gatsby_data[1],\"% of The Great Gatsby\", sep='')\n else:\n print(\"- Read The Great Gatsby \", gatsby_data[1], \" times\", sep='')\n \n # Again repeating the same process to print analytics about Avengers: Endgame\n hours_to_watch_endgame = 3.2\n endgame_data = time_to_perform_task(total_mins, hours_to_watch_endgame)\n if endgame_data[0]:\n print(\"- Watched \", endgame_data[1],\"% of Avengers: Endgame\", sep='')\n else:\n print(\"- Watched Avengers: Endgame \", endgame_data[1], \" times\", sep='')", "def timer_update(self):\n if self.mineboard.gamestate is not None:\n return\n time_so_far = round(time.time()-self.start_time)\n if time_so_far == 1:\n self.now.set(f\"Time so far: {time_so_far} second\")\n else:\n self.now.set(f\"Time so far: {time_so_far} seconds\")\n self.after(1000, self.timer_update) # calls this function every second", "def time_remaining(self):\n elapsed_time = time.time() - self.start_time\n self.progressbar['value'] = progressbar.current\n time_remaining = round((1 - progressbar.current) * elapsed_time)\n # Disabled for Demo due to confusion\n # if time_remaining < 60:\n # self.progress_label.config(text=f'Estimated Time Remaining: {time_remaining} seconds')\n # elif 3600 > time_remaining > 60:\n # time_remaining = round(time_remaining / 60)\n # self.progress_label.config(text=f'Estimated TIme Remaining: {time_remaining} minutes')\n # elif time_remaining > 3600:\n # time_remaining = dt.timedelta(seconds=time_remaining)\n # self.progress_label.config(text=f'Estimated Time Remaining: {time_remaining}')", "def runDelayedEntryStrat(self):\n # Choose the first day I'd like to enter randomly\n start, end = self.randomDays()\n \n # Now wait for positive momentum\n flag = False\n trueStart = start\n while not flag:\n aWin = self.momementum[(start + pandas.DateOffset(days=-30)):start]\n if (aWin > 0).sum() > 30*.75:\n # I'm going to invest\n #print \"Made it out!\"\n flag = True\n else:\n # I'm going to wait another five days\n start = start + pandas.DateOffset(days=5)\n if (end-start).days < self.daysHeld/2:\n # Screw it, I've waited too long I'm going to invest anyways\n flag = True\n #print \"Screw it, I've only got %s of %s days left!\" % ((end-start).days, self.daysHeld)\n \n #print \"I wasted %d days\" % (start-trueStart).days\n self.wastedTime = self.wastedTime + (start-trueStart).days \n gain = (self.df.adj_close[end] - getInfl(self.df.adj_close[start], start.year, end.year)) / \\\n getInfl(self.df.adj_close[start], start.year, end.year)\n return gain", "def compute_total_times(self):\n rval = {}\n for fgraph, node in self.apply_time:\n if node not in rval:\n self.fill_node_total_time(fgraph, node, rval)\n return rval", "def automate_time(f, f_unit, t, t_unit):\n f_unit, t_unit = clean_units([f_unit, t_unit])\n\n num_tasks = number_times_per_year(f, f_unit)\n\n time_seconds = t * converter[t_unit]\n\n total_time = calculate_automation(num_tasks, time_seconds)\n\n value, unit = usable_numbers(total_time)\n print('You are spending %d %s every 5 years on this task' % (value, unit))\n \n # print(\"%d times per %s, I spend %d %s doing the task, which is %d seconds per 5 years\" % (f, f_unit, t, t_unit, total_time))\n # print(\"num_tasks: %d, time_seconds: %d\" % (num_tasks, time_seconds))\n # print(' ')", "def runtime_cal(start,end) :\n run_time = end - start\n mm = int(run_time/60)\n ss = round(run_time%60)\n return mm, ss", "def enter_data_for_time_calc():\n print(\"Pace & Distance -> Time\")\n print(\"=\" * 50)\n\n pace = input(\"Pace[min/km]: \")\n distance = float(input(\"Distance[km]: \"))\n\n calc_time(pace, distance)", "def step(self, dt):\n self.time_elapsed += dt\n self.project()", "def calc_total_wait(self, current_time_step):\n self.total_wait = current_time_step - self.time_entered\n return self.total_wait", "def time_calculator(seconds):\n\n days = calculate_days(seconds)\n seconds = seconds - calculate_seconds_in_days(days)\n\n hours = calculate_hours(seconds)\n seconds = seconds - calculate_seconds_in_hours(hours)\n\n minutes = calculate_minutes(seconds)\n seconds = seconds - calculate_seconds_in_minutes(minutes)\n\n total_time = \"%d Day/s %d Hour/s %d Minute/s %d Second/s\" % (days, hours, minutes, seconds)\n\n print(total_time)", "def time(self):\r\n raise NotImplementedError", "def run(self):\n last_time = time.time()\n while self.running:\n now_time = time.time()\n interval = now_time - last_time\n last_time = now_time\n self.update(interval)\n time.sleep(Options['update interval'])" ]
[ "0.675693", "0.64293903", "0.6368792", "0.61246926", "0.6124316", "0.6109275", "0.6093881", "0.6072631", "0.6060954", "0.60540646", "0.60454965", "0.6036691", "0.6011635", "0.6011255", "0.59790087", "0.5965647", "0.5960139", "0.59283966", "0.5919275", "0.59172326", "0.5911589", "0.58907765", "0.5879307", "0.58646536", "0.58646536", "0.58646536", "0.58642364", "0.5853346", "0.5819165", "0.57782894", "0.5765931", "0.5765168", "0.5760979", "0.574445", "0.5742801", "0.57386434", "0.57383364", "0.5727598", "0.57241434", "0.5723302", "0.5720284", "0.5717969", "0.5717284", "0.5717106", "0.57164526", "0.570591", "0.5703923", "0.57027483", "0.56920666", "0.56883293", "0.56858444", "0.5685198", "0.56778526", "0.56767905", "0.5673219", "0.5665044", "0.56524944", "0.564774", "0.56433284", "0.5625514", "0.5625203", "0.5622267", "0.5614879", "0.561335", "0.56126016", "0.56123346", "0.56098706", "0.56069356", "0.5606673", "0.55975276", "0.55957", "0.5590993", "0.55866694", "0.5586225", "0.5583075", "0.5575141", "0.557386", "0.5569017", "0.55689573", "0.55555654", "0.5547784", "0.5546001", "0.5545604", "0.5541143", "0.55407834", "0.5532082", "0.5531011", "0.5525739", "0.5521515", "0.5516031", "0.55100113", "0.55092436", "0.55078197", "0.5505467", "0.55015445", "0.5500511", "0.5497476", "0.5492438", "0.54848474", "0.5483426", "0.547863" ]
0.0
-1
automatically release blocks when blocks dict looses shape
def free_finalizer(self, dataset: dict): # for gc being late if dataset: if dataset['vrtx']: dataset['vrtx'].release() if dataset['indx']: dataset['indx'].release() dataset.clear()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def finalize_block_construction(self, pyomo_block):\n pass", "def blocks(self):\n pass", "def _prepare_blocks():\n\n counter = blocks[0]['freeStart']\n maxBlocks = blocks[0]['maxBlocks']\n while(counter < maxBlocks) :\n try:\n # print (mount['parent'] + '/linddata.' + str(counter))\n f = open(mount['parent'] + '/linddata.' + str(counter), 'r') \n except IOError, e:\n return STATUS['M_BD']\n else :\n fdatastring = f.next()\n fdata = deserializedata(fdatastring)\n blocks[counter] = fdata\n counter += 1\n \n return STATUS['OK']", "def destroy(self, coords):\n\n block = blocks[self.get_block(coords)]\n self.set_block(coords, block.replace)\n self.set_metadata(coords, 0)", "def clear(self):\n self.blocks.clear()", "def delete_block(self, block):\n raise NotImplementedError('delete_block')", "def removeMemoryBlock(self, block: ghidra.program.model.mem.MemoryBlock) -> None:\n ...", "def remove_block(self, block):\n raise NotImplementedError()", "def close_subblock(self, lines: Tuple[int, int]) -> None:", "def validate_blocks():\n # Validate the remesh blocks modifiers\n bpy.ops.object.apply_all_modifiers()\n \n date_1 = datetime.datetime.now()\n print(\"Start\")\n \n # Select the bottom faces\n Button_Operations.select_faces(radians(10))\n\n # Extrude the support\n bpy.ops.mesh.extrude_region_move(MESH_OT_extrude_region={\"use_normal_flip\":False, \"use_dissolve_ortho_edges\":False, \"mirror\":False}, TRANSFORM_OT_translate={\"value\":(0, 0, -20), \"orient_type\":'GLOBAL', \"orient_matrix\":((1, 0, 0), (0, 1, 0), (0, 0, 1)), \"orient_matrix_type\":'GLOBAL', \"constraint_axis\":(False, False, True), \"mirror\":False, \"use_proportional_edit\":False, \"proportional_edit_falloff\":'SMOOTH', \"proportional_size\":1, \"use_proportional_connected\":False, \"use_proportional_projected\":False, \"snap\":False, \"snap_target\":'CLOSEST', \"snap_point\":(0, 0, 0), \"snap_align\":False, \"snap_normal\":(0, 0, 0), \"gpencil_strokes\":False, \"cursor_transform\":False, \"texture_space\":False, \"remove_on_cancel\":False, \"release_confirm\":False, \"use_accurate\":False, \"use_automerge_and_split\":False})\n\n # Select all\n bpy.ops.mesh.select_all(action='SELECT')\n\n # Bissect and delete the element under the xy plane\n bpy.ops.mesh.bisect(plane_co=(0, 0, 0.01), plane_no=(0, 0, 1), use_fill=False, clear_inner=True, xstart=942, xend=1489, ystart=872, yend=874, flip=False) \n\n # Fill the hole and triangulate faces\n Button_Operations.manifold_and_triangulate()\n \n # Delete the copy\n object_to_delete = bpy.data.objects[\"temp_copy\"]\n bpy.data.objects.remove(object_to_delete, do_unlink=True) \n \n # Switch in object mode \n bpy.ops.object.mode_set(mode='OBJECT')", "def removeBlock(self, block: ghidra.program.model.mem.MemoryBlock, monitor: ghidra.util.task.TaskMonitor) -> None:\n ...", "def __del__(self):\r\n self.chunk = None", "def split(self, block: ghidra.program.model.mem.MemoryBlock, addr: ghidra.program.model.address.Address) -> None:\n ...", "def create_hard_blocks(self):\n for x in xrange(1, self.map_size[0], 2):\n for y in xrange(1, self.map_size[1], 2):\n self.create_hard_block_at(x, y)", "def _prepare_consumed_block():\n \n blkcounter = 0\n maxBlocks = blocks[0]['maxBlocks']\n \n freeStart = blocks[0]['freeStart']\n freeEnd = blocks[0]['freeEnd']\n \n while(blkcounter in xrange(0, maxBlocks, 1)) :\n # data type can be varied\n # dictionary - for superBlock, directory and file inode\n # list - for free block list, index list\n # string - data blocks\n # empty - free block\n bdata = blocks[blkcounter]\n # an afterthought - we really do not need to bother about\n # data blocks here, as all the data blocks should be in one\n # of index list or in location attribute of inode\n \n if (type(bdata) == list) :\n if(blkcounter >= freeStart and blkcounter <= freeEnd) :\n # free block list, so no reading the content of the list, \n # but mark the block as a consumed block \n consumedBlocks.append(blkcounter)\n else :\n # this is an index list, marking this block as consumed\n # mark all the blocks listed in this list as consumed as well\n consumedBlocks.append(blkcounter)\n for n in bdata :\n consumedBlocks.append(n)\n elif (type(bdata) == dict) :\n # mark this block as consumed block\n consumedBlocks.append(blkcounter)\n if(bdata.has_key('filename_to_inode_dict')) :\n # if directory node has connected files\n for n in bdata['filename_to_inode_dict'].values() :\n consumedBlocks.append(n)\n elif(bdata.has_key('location')): \n # if this is a file node, then check indirect attribute\n # and if inderect if 0, mark the value in location attribute\n # as consumed block\n # if indirect is 1, mark the value in location attribute\n # as consumed block, then leave it, cause the 'if' block already\n # does a check on index list \n consumedBlocks.append(bdata['location'])", "def free(self, name: str):\n # Find the block to be freed\n to_free = None\n for b in self.used_list:\n if b.name == name:\n to_free = b\n break\n # Return if the user requested to free a block that is not in the used list\n if to_free is None:\n print(\"free FAIL; no such name={}\", to_free)\n return\n # Remove the block from the used list\n to_free.used = False\n self.used_list.remove(to_free)\n \n # Check if either of the neighbors of this block are free and need to be coalesced with this block\n left_end_address = to_free.start()\n right_start_address = to_free.end()\n left_block = None\n right_block = None\n for block in self.free_list:\n if (block.end() == left_end_address):\n left_block = block\n elif (block.start() == right_start_address):\n right_block = block\n\n # Coalesce neighboring blocks if necessary, add blocks to free list\n if (left_block is not None and right_block is not None):\n # Remove these blocks so they can be coalesced with each other and with to_free\n self.free_list.remove(left_block)\n self.free_list.remove(right_block)\n start: int = left_block.start()\n end: int = right_block.end()\n new_name: str = left_block.name + name + right_block.name\n new_free = Block(start, end-start, new_name)\n self.free_list.append(new_free)\n elif (left_block is not None):\n # Remove this block so it can be coalesced with to_free\n self.free_list.remove(left_block)\n start: int = left_block.start()\n end: int = right_start_address\n new_name: str = left_block.name + name\n new_free = Block(start, end-start, new_name)\n self.free_list.append(new_free)\n elif (right_block is not None):\n # Remove this block so it can be coalesced with to_free\n self.free_list.remove(right_block)\n start: int = left_end_address\n end: int = right_block.end()\n new_name: str = name + right_block.name\n new_free = Block(start, end-start, new_name)\n self.free_list.append(new_free)\n else:\n # None of to_free's neighbors are free, so just add it alone to the free list\n self.free_list.append(to_free)", "def run(self, block_nodes):\n #make higher level nodes by aggregation\n nodes_dict = self.getNodesAllGeolevels(block_nodes)\n print(\"Num blocks: {}\".format(nodes_dict[\"Block\"].count()))\n\n block_nodes.unpersist()\n del block_nodes\n gc.collect()\n\n nodes_dict = self.noisyAnswers(nodes_dict)\n nodes_dict, feas_dict = self.topdown(nodes_dict)\n\n return (nodes_dict,feas_dict)", "def block_seen(self):\n self.blocklist.update(self.mapping.values())\n self.mapping = dict()", "def example_deletion_with_block_lowering(self):\n i = 0\n while i < len(self.shrink_target.blocks):\n if not self.is_shrinking_block(i):\n i += 1\n continue\n\n u, v = self.blocks[i].bounds\n\n j = 0\n while j < len(self.shrink_target.examples):\n n = int_from_bytes(self.shrink_target.buffer[u:v])\n if n == 0:\n break\n ex = self.shrink_target.examples[j]\n if ex.start < v or ex.length == 0:\n j += 1\n continue\n\n buf = bytearray(self.shrink_target.buffer)\n buf[u:v] = int_to_bytes(n - 1, v - u)\n del buf[ex.start : ex.end]\n if not self.incorporate_new_buffer(buf):\n j += 1\n\n i += 1", "def finalize():", "def finalize():", "def finalize():", "def free(self):\n pass", "def free(self):\n pass", "def build(self, block_size):", "def _settle_shape(self, shape):\n if shape:\n for block in shape.blocks:\n self.array[block.row_position][block.column_position] = block\n self.remove_completed_lines()", "def finalize(self):", "def finalize(self):", "def removeBlock(self, aBlock: gp.Block):\n \n for y, row in iter(self.blocks):\n for x, block in iter(row):\n if block is aBlock:\n self.blocks[y][x] = None\n self.playerSprites.remove(aBlock.sprite)\n return", "def remesh_blocks():\n \n # Get the active object\n obj = bpy.context.active_object\n \n nameCopy = \"temp_copy\"\n\n # Switch in object mode \n bpy.ops.object.mode_set(mode='OBJECT')\n\n # Remove all modifiers from the object\n obj.modifiers.clear()\n\n # Delete the existing copy \n for o in bpy.data.objects:\n if o.type == 'MESH' and o.name == nameCopy:\n # Delete the existing copy\n object_to_delete = bpy.data.objects[nameCopy]\n bpy.data.objects.remove(object_to_delete, do_unlink=True) \n \n \n # Make a copy of the object\n new_obj = obj.copy()\n new_obj.data = obj.data.copy()\n new_obj.animation_data_clear()\n bpy.context.collection.objects.link(new_obj)\n\n # Rename the copy\n new_obj.name = nameCopy\n\n # Hide the copy\n new_obj.hide_viewport = True\n\n # Remesh the faces of the object with blocks\n bpy.ops.object.modifier_add(type='REMESH')\n bpy.context.object.modifiers[\"Remesh\"].mode = 'BLOCKS'\n bpy.context.object.modifiers[\"Remesh\"].octree_depth = bpy.context.scene.level_blocks\n bpy.context.object.modifiers[\"Remesh\"].scale = 0.99\n bpy.context.object.modifiers[\"Remesh\"].use_remove_disconnected = False\n bpy.context.object.modifiers[\"Remesh\"].threshold = 1\n bpy.context.object.modifiers[\"Remesh\"].use_smooth_shade = False\n\n # Make intersection between the remesh object and the original\n bpy.ops.object.modifier_add(type='BOOLEAN')\n bpy.context.object.modifiers[\"Boolean\"].operation = 'INTERSECT'\n bpy.context.object.modifiers[\"Boolean\"].operand_type = 'OBJECT'\n bpy.context.object.modifiers[\"Boolean\"].object = bpy.data.objects[nameCopy]\n bpy.context.object.modifiers[\"Boolean\"].solver = 'FAST'\n bpy.context.object.modifiers[\"Boolean\"].double_threshold = 0", "def populate_blocks_with_blockheights(self):\n for (height, block) in enumerate(self.blocks):\n block[\"height\"] = height", "def remove_blocks(draft):\n for symbol in draft.Blocks:\n if symbol.Name in blocks_to_delete:\n print(\"[-] %s, \\tdeleted\" % symbol.Name)\n symbol.delete()\n\n # for ball in draft.ActiveSheet.Balloons:\n if draft.Balloons:\n for ball in draft.Balloons:\n if ball.BalloonType == 7: # type 7 filter the triangle balloons.\n print(\"[-] %s, \\tdeleted\" % ball.Name)\n ball.Delete()\n else:\n pass", "def changed_block(self, old_block, new_block):", "def init_blocks(self):\n length = self.physics.len_blocks\n rect = Rectangle(Vector(self.rpos.x, self.rpos.y),\n Vector(self.rpos.x + length, self.rpos.y + length))\n self.rects.append(rect)\n self.physics.add_block(rect, self.stype)", "def clear_blockages(self):\n debug.info(3,\"Clearing all blockages\")\n self.rg.clear_blockages()", "def all_blocks(state):\n return state.clear.keys()", "def num_blocks(self): # -> int:\n ...", "def mapDel(block, posMap):\n for (x, y) in block.coords:\n theFallener(x + block.x, y + block.y, 0, posMap)", "def build_blocks():\n block_1 = GRect(375, 80, x=20, y=330)\n block_1.filled = True\n block_1.color = 'firebrick'\n block_1.fill_color = 'firebrick'\n window.add(block_1)\n block_2 = GRect(375, 80, x=405, y=330)\n block_2.filled = True\n block_2.color = 'steelblue'\n block_2.fill_color = 'steelblue'\n window.add(block_2)\n block_3 = GRect(375, 80, x=20, y=420)\n block_3.filled = True\n block_3.color = 'goldenrod'\n block_3.fill_color = 'goldenrod'\n window.add(block_3)\n block_4 = GRect(375, 80, x=405, y=420)\n block_4.filled = True\n block_4.color = 'forestgreen'\n block_4.fill_color = 'forestgreen'\n window.add(block_4)\n block_5 = GRect(60, 40, x=720, y=120)\n block_5.filled = True\n block_5.color = 'dodgerblue'\n block_5.fill_color = 'dodgerblue'\n window.add(block_5)\n circle_1 = GOval(90, 90, x=20, y=170)\n circle_1.filled = True\n circle_1.color = 'blueviolet'\n circle_1.fill_color = 'blueviolet'\n window.add(circle_1)", "def blocks(self, blocks: int):\n\n self._blocks = blocks", "def remove_blocks(self, *vertices):\n for vertex in vertices:\n try:\n self.world[vertex] = None\n self.shown.pop(vertex)\n for vtx in self._shown[vertex]:\n vtx.delete()\n except KeyError:\n pass\n except IndexError:\n pass", "def decache_block(self, course_key, version_guid, block_key):\n bulk_write_record = self._get_bulk_ops_record(course_key)\n if bulk_write_record.active:\n try:\n del bulk_write_record.modules[version_guid][block_key]\n except KeyError:\n pass", "def gc(self):\n self._complete_grid = None", "def add_block(self, env, block_color, width, height):\n\n block_size = (0.04, 0.04, 0.04)\n block_urdf = \"stacking/block.urdf\"\n block_pose = self.get_random_pose(env, block_size)\n block_id = env.add_object(block_urdf, block_pose)\n pb.changeVisualShape(\n block_id, -1, rgbaColor=utils.COLORS[block_color] + [1])\n # (0, None): 0 means that the block is symmetric.\n # TODO(hagrawal): Not sure what None means. Update. This is kept\n # for CLIPort compatibility. We don't use it.\n self.blocks.append((block_id, (0, None)))\n block_pix = utils.xyz_to_pix(block_pose[0], self.bounds, self.pix_size)\n block_obj_info = {\n \"obj_id\": block_id,\n \"pose\": block_pose,\n \"size\": block_size,\n \"urdf\": block_urdf,\n \"color\": block_color,\n \"unknown_color\": block_color in utils.EVAL_COLORS,\n \"pix\": block_pix,\n \"region\": determine_region(block_pix[0], block_pix[1], width, height),\n }\n return block_obj_info", "def remove_from_block(self):\n self.enclosing_block.remove_ops([self])", "def transform_block(block):\n return {\n 'type': 'block',\n 'children': [transform_child(child) for child in block]\n }", "def deallocate(self):\n raise NotImplementedError", "def remove_blocks(self, block_ids):\n self.smd3.remove_blocks(block_ids)\n self.logic.update(self.smd3)\n self.header.update(self.smd3)", "def init_blocks(self):\n length = self.physics.len_blocks\n rect = Rectangle(self.rpos, self.rpos + Vector(length, length))\n self.rects.append(rect)\n self.physics.add_block(rect, 'bomberman')", "def release(self):\n # type: () -> None\n for part in self.parts:\n part.release()", "def __del__(self):\n if self._alloc:\n _pychidg.f90wrap_graphics_bc_t_finalise(this=self._handle)", "def __parse_blocks_pass(self):\n\n self.stack = [DocumentStackToken()]\n\n self.tokenized_document = []\n token_to_use = self.source_provider.get_next_line()\n did_start_close = False\n did_started_close = False\n requeue = []\n ignore_link_definition_start = False\n POGGER.debug(\"---$---\", token_to_use)\n POGGER.debug(\"---\")\n self.__parse_properties.pragma_lines = {}\n line_number = 1\n try:\n (\n token_to_use,\n line_number,\n requeue,\n ) = self.__process_front_matter_header_if_present(\n token_to_use, line_number, requeue\n )\n did_start_close = token_to_use is None\n keep_on_going = True\n while keep_on_going:\n POGGER.debug(\"next-line>>$\", token_to_use)\n POGGER.debug(\"stack>>$\", self.stack)\n POGGER.debug(\"current_block>>$\", self.stack[-1])\n POGGER.debug(\"line_number>>$\", line_number)\n POGGER.debug(\"---\")\n\n position_marker = PositionMarker(line_number, 0, token_to_use)\n parser_state = ParserState(\n self.stack,\n self.tokenized_document,\n TokenizedMarkdown.__close_open_blocks,\n self.__handle_blank_line,\n )\n if did_start_close:\n POGGER.debug(\"\\n\\ncleanup\")\n\n was_link_definition_started_before_close = self.stack[\n -1\n ].was_link_definition_started\n\n did_started_close = True\n (\n tokens_from_line,\n requeue_line_info,\n ) = TokenizedMarkdown.__close_open_blocks(\n parser_state,\n self.tokenized_document,\n include_block_quotes=True,\n include_lists=True,\n caller_can_handle_requeue=True,\n was_forced=True,\n )\n if tokens_from_line and not self.tokenized_document:\n self.tokenized_document.extend(tokens_from_line)\n\n if not (requeue_line_info and requeue_line_info.lines_to_requeue):\n keep_on_going = False\n else:\n assert was_link_definition_started_before_close\n assert not requeue_line_info.lines_to_requeue[0]\n\n del requeue_line_info.lines_to_requeue[0]\n line_number -= 1\n\n did_start_close = False\n tokens_from_line = None\n else:\n POGGER.debug(\">>>>$\", self.tokenized_document)\n\n if not token_to_use or not token_to_use.strip():\n POGGER.debug(\"call __parse_blocks_pass>>handle_blank_line\")\n (\n tokens_from_line,\n requeue_line_info,\n ) = self.__handle_blank_line(\n parser_state,\n token_to_use,\n from_main_transform=True,\n position_marker=position_marker,\n )\n else:\n POGGER.debug(\"\\n\\nnormal lines\")\n (\n tokens_from_line,\n _,\n _,\n requeue_line_info,\n _,\n ) = ContainerBlockProcessor.parse_line_for_container_blocks(\n parser_state,\n position_marker,\n ignore_link_definition_start,\n self.__parse_properties,\n None,\n )\n\n POGGER.debug(\"<<<<$\", self.tokenized_document)\n\n if keep_on_going:\n line_number, ignore_link_definition_start = TokenizedMarkdown.__xx(\n line_number, requeue_line_info, requeue\n )\n\n POGGER.debug(\n \"---\\nbefore>>$\",\n self.tokenized_document,\n )\n POGGER.debug(\"before>>$\", tokens_from_line)\n if tokens_from_line:\n self.tokenized_document.extend(tokens_from_line)\n POGGER.debug(\n \"after>>$\",\n self.tokenized_document,\n )\n if requeue:\n POGGER.debug(\"requeue>>$\", requeue)\n POGGER.debug(\"---\")\n\n (\n token_to_use,\n did_start_close,\n did_started_close,\n ) = self.__determine_next_token_process(\n requeue, did_start_close, did_started_close\n )\n except AssertionError as this_exception:\n error_message = f\"A project assertion failed on line {line_number} of the current document.\"\n raise BadTokenizationError(error_message) from this_exception\n\n if self.__parse_properties.pragma_lines:\n self.tokenized_document.append(\n PragmaToken(self.__parse_properties.pragma_lines)\n )\n return self.tokenized_document", "def dense_block(x, blocks, name):\r\n for i in range(blocks):\r\n x = conv_block(x, 20, name=name + '_block' + str(i + 1))\r\n return x", "def block(self):\n pass", "def release(self):\n # type: () -> None\n for k in self.kernels:\n k.release()", "def release(self):", "def erase_block(self):\n self.blocks[self.editor_cursor_position[1]][self.editor_cursor_position[0]] = '0'", "def get_block_dict(self) -> dict:\n return self.blocks", "def consolidate_empty_blocks(self):\n new_blocks = []\n for block in self.blocks:\n if isinstance(block, BasicBlock) and not block.statements:\n self.remove_block(block)\n else:\n new_blocks.append(block)\n self.blocks = new_blocks", "def save_block(self, block):\n # Implementing this is optional.", "def update_blocks_closure(self, ln, block, fail_bool):\n\n if ln == Line.LINE_GREEN:\n # Check that block isnt already in that state\n if self.blocks_green_arr[block - 1].open == (not fail_bool):\n if fail_bool == True:\n self.blocks_green_arr[block - 1].num_faliures += 1\n else:\n self.blocks_green_arr[block - 1].num_faliures -= 1\n else:\n if fail_bool == True:\n self.blocks_green_arr[block - 1].num_faliures += 1\n else:\n self.blocks_green_arr[block - 1].num_faliures -= 1\n\n\n # Update block if fail\n if self.blocks_green_arr[block - 1].num_faliures > 0:\n if self.blocks_green_arr[block - 1].open:\n signals.ctc_update_failure_blocks_gui.emit(ln, fail_bool)\n self.blocks_green_arr[block - 1].open = False\n else:\n if not self.blocks_green_arr[block - 1].open:\n signals.ctc_update_failure_blocks_gui.emit(ln, fail_bool)\n self.blocks_green_arr[block - 1].open = True\n\n elif ln == Line.LINE_RED:\n # Check that block isnt already in that state\n if self.blocks_red_arr[block - 1].open == (not fail_bool):\n if fail_bool == True:\n self.blocks_red_arr[block - 1].num_faliures += 1\n else:\n self.blocks_red_arr[block - 1].num_faliures -= 1\n else:\n if fail_bool == True:\n self.blocks_red_arr[block - 1].num_faliures += 1\n else:\n self.blocks_red_arr[block - 1].num_faliures -= 1\n\n # Update block if fail\n if self.blocks_red_arr[block - 1].num_faliures > 0:\n if self.blocks_red_arr[block - 1].open:\n signals.ctc_update_failure_blocks_gui.emit(ln, fail_bool)\n self.blocks_red_arr[block - 1].open = False\n else:\n if not self.blocks_red_arr[block - 1].open:\n signals.ctc_update_failure_blocks_gui.emit(ln, fail_bool)\n self.blocks_red_arr[block - 1].open = True\n\n else:\n raise Exception(\"CTC : UPDATE BLOCK CLOSURES (maint. mode from SWTrack \\\n Cont. Send INVALID Line\")", "def finalize(self):\n self.total_priors = np.sum(list(self.priors.values()))\n self.total_blocks = np.sum(list(self.nblocks.values()))\n self.total_fitness = np.sum(list(self.fitness.values()))\n self.blocks = BedTool.from_dataframe(self.df)", "def die(self):\n self.pjs.bombermen.remove(self)\n for block in self.physics.blocks[self.stype]:\n if block == self.rects[0]:\n self.physics.blocks[self.stype].remove(block)", "def finalize(self):\r\n pass", "def dense_block(x, blocks, name, train_bn):\r\n for i in range(blocks):\r\n x = conv_block(x, 32, name=name + '_block' + str(i + 1), train_bn=train_bn)\r\n return x", "def reserve(self, block=False):\n mc = self.get(block)\n try:\n yield mc\n finally:\n self.put(mc)", "def finalize(self):\n pass", "def finalize(self):\n pass", "def finalize(self):\n pass", "def finalize(self):\n pass", "def finalize(self):\n pass", "def finalize(self):\n pass", "def releaseTraingingData(self):\n del(self.documents)\n #del(self.sumsOfVectors)\n self.documents = {}\n #self.sumsOfVectors = {}", "def __init__(self):\n self.block = 1000\n self._map = [] \n self.len = 0 \n self.incr()", "def getallocatedblocks(): # real signature unknown; restored from __doc__\n return 0", "def _adjustBlock(self, b):\n raise NotImplementedError", "def finalize(self):\n return", "def minimize_individual_blocks(self):\n i = len(self.blocks) - 1\n while i >= 0:\n u, v = self.blocks[i].bounds\n Lexical.shrink(\n self.shrink_target.buffer[u:v],\n lambda b: self.try_shrinking_blocks((i,), b),\n random=self.random,\n full=False,\n )\n i -= 1", "def penblock(self, block):\n self.block = block", "def finalise(self):", "def doSize(self, mode = None, postState = None):\n #try:\n _str_func = 'size'\n log.debug(cgmGEN.logString_start(_str_func))\n\n \n _str_state = getState(self)\n if _str_state not in ['define','form']:\n raise ValueError,\"|{0}| >> [{1}] is not in define state. state: {2}\".format(_str_func,self.mNode, _str_state)\n \n #mBlockModule = self.p_blockModule\n #log.debug(\"|{0}| >> BlockModule: {1}\".format(_str_func,mBlockModule))\n #reload(mBlockModule)\n \n l_baseNames = self.datList_get('nameList')\n l_toCreate = l_baseNames#...this needs to be 2\n \n cgmGEN.func_snapShot(vars())\n \n mRigBlock = self\n class castSizer(DRAGFACTORY.clickMesh):\n def __init__(self,rigBlock = mRigBlock,**kws):\n if kws:log.debug(\"kws: %s\"%str(kws))\n \n super(castSizer, self).__init__(**kws)\n self._mRigBlock = mRigBlock\n self.toCreate = l_toCreate\n self._rigBlockTally = 0\n log.debug(\"|{0}| >> Please place '{1}'\".format(_str_func, self.toCreate[0]))\n \n def release_post_insert(self):\n \n #cgmGEN.func_snapShot(self.__dict__)\n self._rigBlockTally +=1 \n if self._createModeBuffer:\n mc.delete(self._createModeBuffer)\n \n if self._rigBlockTally < len(l_toCreate):\n log.debug(\"|{0}| >> Please place '{1}'\".format(_str_func, self.toCreate[self._rigBlockTally]))\n else:\n log.debug(\"|{0}| >> Finalizing...\".format(_str_func,self)+ '-'*80)\n \n l_pos = self.l_returnRaw\n mVector = MATH.get_vector_of_two_points(l_pos[0],l_pos[-1],asEuclid=True)\n mVector.normalize()\n \n mRigBlock.p_position = l_pos[0]\n mRigBlock.baseAim = mVector.x, mVector.y, mVector.z\n \n mRigBlock.baseSizeZ = DIST.get_distance_between_points(l_pos[0],l_pos[-1])\n mRigBlock.baseSizeX = mRigBlock.baseSizeZ/2\n mRigBlock.baseSizeY = mRigBlock.baseSizeX\n \n #cgmGEN.func_snapShot(vars())\n \n log.debug(\" pos...\")\n for p in l_pos:\n log.debug(\" {0}\".format(p))\n log.debug(\" baseAim: {0}\".format(_str_func,mRigBlock.baseAim))\n log.debug(\" baseSize: {0}\".format(_str_func,mRigBlock.baseSize))\n \n self.finalize()\n self.dropTool()\n \n if postState:\n changeState(postState,forceNew=True)\n return True\n \n castSizer(mode = 'midPoint',toCreate = l_toCreate)\n\n \n #except Exception,err:\n #cgmGEN.cgmExceptCB(Exception,err)", "def updateBlock(self):\n self.blkno = self.blknoSpinBox.value() - 1\n self.initDataParms()\n self.updateCurveList()\n self.compute()", "def close(self):\n for m in self._mappers:\n m.close()\n self._mappers = []\n self._offsets = []\n self._sizes = []\n self._handler = None", "def gc_blocks(seq, block_size):\n\n # Make all capital\n seq = seq.upper()\n iterations = len(seq) // block_size\n\n # Iterate through finding the GC content\n gc = []\n for i in range(iterations):\n block = seq[i*block_size:(i+1)*block_size]\n gc.append((block.count('G') + block.count('C')) / block_size)\n return tuple(gc)", "def _initBlock(o,block):\n o.block = block.clone().shift(*o.board.startPosition)", "def flush_structure(self):\n ...", "def clear(self):\n super().clear()\n self.world = None\n self.regions = {}\n self.loaded_regions = set()\n self.given_center = False", "def free_intermediate_arrays(self):\n self._mgx = None\n self._mgy = None\n self._mgz = None\n self._vander = None\n self._bkg_cube = None\n self._bkg_cube_dirty = True", "def _reset(self):\n\n self._filename = None\n self._block_map = {}\n self._ast = []\n self._ast.append(None) # header\n self._ast.append([]) # options list\n self._ast.append([]) # block list", "def put_block(self):\n self.blocks[self.editor_cursor_position[1]][\n self.editor_cursor_position[0]] = self.available_block_types[self.current_block_type]", "def createInnerRepresentation(self):\n\n for idx, single_block in enumerate(self._block_list):\n del self._to_be_processed[:]\n del self._metastring_rest[:]\n self._metastring_rest.append(self._metastring[idx])\n self.addMetastringPointer(single_block)", "def block(self, block):\n\n self._block = block", "def block(self, block):\n\n self._block = block", "def build_block_cross(self):\n from ambry.geo.util import find_geo_containment, find_containment\n from geoid import civick \n\n lr = self.init_log_rate(3000)\n\n def gen_bound():\n \n boundaries = self.library.dep('blockgroups').partition\n\n # Note, ogc_fid is the primary key. The id column is created by the shapefile. \n for i,boundary in enumerate(boundaries.query(\n \"SELECT AsText(geometry) AS wkt, gvid FROM blockgroups\")):\n lr('Load rtree')\n \n yield i, boundary['wkt'] , boundary['gvid'] \n \n def gen_points():\n\n for row in self.partitions.find(table = 'facilities_addresses').rows:\n if row['longitude'] and row['latitude']:\n yield (row['longitude'], row['latitude']), row['facilities_id']\n\n\n p = self.partitions.find_or_new(table='facilities_geoids')\n p.clean()\n\n with p.inserter() as ins:\n for point, point_o, cntr_geo, cntr_o in find_containment(gen_bound(),gen_points()):\n\n blockgroup_gvid = civick.Blockgroup.parse(cntr_o)\n tract_gvid = blockgroup_gvid.convert(civick.Tract)\n county_gvid = blockgroup_gvid.convert(civick.County)\n \n ins.insert(dict(facilities_id = point_o, \n blockgroup_gvid = str(blockgroup_gvid),\n tract_gvid = str(tract_gvid),\n county_gvid = str(county_gvid)\n ))\n \n lr('Marking point containment')", "def add_blocks(self, block_list):\n blocks = block_list.copy() ## Fixes bug??\n for block in block_list:\n x, y = block\n self[x, y] = True", "def __init__(self, context, screen, prev_state):\n self.context = context\n self.screen = screen\n self.prev_state = prev_state\n self.background = Assets.background\n self.border = Assets.border\n self.blocks = []\n self.block_types = dict()\n for i in xrange(0, len(Assets.blocks)):\n self.block_types[str(i + 1)] = Assets.blocks[i]\n self.block_types['e'] = Assets.blockE\n self.block_types['i'] = Assets.blockI\n self.block_types['m'] = Assets.blocksM[len(Assets.blocksM) - 1]\n # self.block_types['0'] = AssetManager.editor_cursor_block\n self.available_block_types = sorted(self.block_types.keys())\n # self.available_block_types.append('0')\n for _ in xrange(0, BLOCK_NUM_HEIGHT):\n self.blocks.append(['0', ] * BLOCK_NUM_WIDTH)\n # for x in xrange(0, BLOCK_NUM_WIDTH):\n self.editor_cursor_block = Assets.editor_cursor_block\n self.editor_cursor_position = (0, 0)\n self.current_block_type = 0\n self.mode_paint = False\n self.mode_erase = False\n self.font = Assets.font\n self.label_current_block_type = self.font.render(\n \"Current block: +/-/mouse wheel to change block type, 0 to reset\",\n 1, (255, 255, 255))\n self.label_help_top = self.font.render(\n \"Esc - Back to menu, F5 - Save, F9 - Load, RMB - Remove block, MMB - Pick block\",\n 1, (255, 255, 255))\n # print sorted(self.block_types.keys())", "def blocks(self): # -> BlockView:\n ...", "def __init__(self,\n num_heads=8,\n seq_len=1024,\n block=16,\n different_layout_per_head=False,\n num_sliding_window_blocks=3,\n global_block_indices=[0],\n global_block_end_indices=None):\n super().__init__(num_heads, seq_len, block, different_layout_per_head)\n\n if (self.num_blocks < num_sliding_window_blocks):\n raise ValueError(\n f'Number of sliding window blocks, {num_sliding_window_blocks}, must be smaller than overal number of blocks in a row, {self.num_blocks}!'\n )\n self.num_sliding_window_blocks = num_sliding_window_blocks\n\n if (self.num_blocks < len(global_block_indices)):\n raise ValueError(\n f'Number of global blocks indices, {global_block_indices}, must be smaller than overal number of blocks in a row, {self.num_blocks}!'\n )\n for idx in global_block_indices:\n if idx >= self.num_blocks:\n raise ValueError(\n f'Global block index, {global_block_indices[idx]}, must be smaller than overal number of blocks in a row, {self.num_blocks}!'\n )\n self.global_block_indices = global_block_indices\n\n if (global_block_end_indices is not None):\n if (len(global_block_indices) != len(global_block_end_indices)):\n raise ValueError(\n f'Global block start indices length, {len(global_block_indices)}, must be same as global block end indices length, {len(global_block_end_indices)}!'\n )\n for _, (start_idx, end_idx) in enumerate(zip(global_block_indices, global_block_end_indices)):\n if end_idx > self.num_blocks:\n raise ValueError(\n f'Global block end index, {global_block_end_indices[idx]}, must be smaller (equal) than overal number of blocks in a row, {self.num_blocks}!'\n )\n if start_idx >= end_idx:\n raise ValueError(\n f'Global block start index, {start_idx}, must be smaller than global block end index, {end_idx}!'\n )\n self.global_block_end_indices = global_block_end_indices\n self.make_layout()", "def block_offsets(self):\n ...", "def breakOutBlockData(input_queue, output_queue, message_queue, config, \n db_config, start_time):\n\n try:\n temp_time = time.localtime()\n continue_run, county_fips = getCounty_fips(config, start_time)\n\n if continue_run:\n continue_run = changeTogeom(db_config, config, start_time)\n\n if continue_run:\n continue_run = makeBlockTablePickle(config, db_config, start_time)\n\n if continue_run:\n for _ in range(config['number_servers']):\n message_queue.put('parse_blockdf')\n\n continue_run, county_counter = loadBlockQueue(input_queue, county_fips, \n config, start_time)\n\n if continue_run:\n continue_run = s0f.processWork(config, input_queue, output_queue, \n county_counter, start_time)\n\n if continue_run:\n continue_run = changeToGEOMETRY(config, db_config, start_time)\n \n if continue_run:\n my_message = \"\"\"\n INFO - STEP 0 (MASTER): TASK 6 OF 13 - COMPLETED CREATING COUNTY\n LEVEL GEOJSON BLOCK FILES\n \"\"\"\n my_message = ' '.join(my_message.split())\n print(nbmf.logMessage(my_message,temp_time, time.localtime(), \n time.mktime(time.localtime()) - time.mktime(start_time))) \n gc.collect() \n return True\n \n else:\n my_message = \"\"\"\n ERROR - STEP 0 (MASTER): TASK 6 OF 13 - FAILED TO CREATE COUNTY\n LEVEL GEOJSON BLOCK FILES\n \"\"\"\n my_message = ' '.join(my_message.split()) + '\\n' + traceback.format_exc()\n print(nbmf.logMessage(my_message,temp_time, time.localtime(), \n time.mktime(time.localtime()) - time.mktime(start_time)))\n return False \n\n except:\n my_message = \"\"\"\n ERROR - STEP 0 (MASTER): TASK 6 OF 13 - FAILED TO CREATE COUNTY\n LEVEL GEOJSON BLOCK FILES\n \"\"\"\n my_message = ' '.join(my_message.split()) + '\\n' + traceback.format_exc()\n print(nbmf.logMessage(my_message,temp_time, time.localtime(), \n time.mktime(time.localtime()) - time.mktime(start_time)))\n return False" ]
[ "0.69011474", "0.6140728", "0.601313", "0.59879786", "0.59604454", "0.58850497", "0.581188", "0.58049697", "0.5709238", "0.56697476", "0.5665943", "0.5652778", "0.5647419", "0.5641385", "0.5623683", "0.5622437", "0.56117296", "0.55557007", "0.5543001", "0.55420923", "0.55420923", "0.55420923", "0.55107075", "0.55107075", "0.5506", "0.55052024", "0.5495003", "0.5495003", "0.54832375", "0.5473158", "0.5471061", "0.54602736", "0.5456974", "0.54551744", "0.54520005", "0.54482335", "0.54153466", "0.54145944", "0.54122466", "0.54120857", "0.5405932", "0.54029876", "0.5399258", "0.53991514", "0.5393541", "0.5391447", "0.5390043", "0.5378449", "0.5377147", "0.5373209", "0.53676605", "0.53572506", "0.5342807", "0.5320556", "0.5301219", "0.5283379", "0.5271424", "0.5261426", "0.52572924", "0.5252053", "0.5250837", "0.52170205", "0.52169305", "0.521536", "0.5209298", "0.51979953", "0.5193904", "0.5193904", "0.5193904", "0.5193904", "0.5193904", "0.5193904", "0.5193134", "0.518788", "0.5185988", "0.51822317", "0.51796466", "0.51783204", "0.5173804", "0.5167092", "0.516221", "0.51398796", "0.51390135", "0.5136748", "0.51218593", "0.5119857", "0.51179093", "0.5117829", "0.511617", "0.5098617", "0.5096945", "0.50957584", "0.50957584", "0.5094023", "0.5087295", "0.50856435", "0.5080638", "0.507915", "0.5078029", "0.5065125" ]
0.50894725
94
Returns the details for a given plugin.
def load_details(self): response = self._server._api_request("GET", "/plugins/plugin/%d" % self.id, "") if response is not None: self.id = response["id"] self.name = response["name"] self.family_name = response["family_name"] self.attributes = response["attributes"] return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getPluginInfo(pluginId):\n url = f\"https://develop.roblox.com/v1/plugins?pluginIds={pluginId}\"\n r = requests.get(url)\n j = json.loads(r.text)\n return j['data']", "def get_details(self):\n return PluginDetails(\n plugin_name=\"bad-string-detail-is-int\",\n plugin_id=\"MDE007\",\n plugin_enabled_by_default=True,\n plugin_description=123,\n plugin_version=\"0.0.0\",\n plugin_interface_version=1,\n )", "def metadata_for_plugin(self, plugin):\r\n if plugin in self._plugins:\r\n return self._plugins[plugin]\r\n else:\r\n fp = None\r\n metadata = None\r\n info_file = plugin + INFO_FILE_EXTENSION\r\n try:\r\n fp = open(os.path.join(self.path, info_file), 'r')\r\n metadata = json.load(fp)\r\n except Exception as e:\r\n self.log.exception('Exception caught while loading plugin metadata: %s' % e)\r\n raise e\r\n finally:\r\n if fp:\r\n fp.close()\r\n return metadata", "def get_details(self):\n return PluginDetails(\n plugin_name=\"no-hard-tabs\",\n plugin_id=\"MD010\",\n plugin_enabled_by_default=True,\n plugin_description=\"Hard tabs\",\n plugin_version=\"0.5.0\",\n plugin_interface_version=1,\n plugin_url=\"https://github.com/jackdewinter/pymarkdown/blob/main/docs/rules/rule_md010.md\",\n plugin_configuration=\"code_blocks\",\n )", "def plugin_info():\n\n return {\n 'name': 'Enviro pHAT Poll Plugin',\n 'version': '1.7.0',\n 'mode': 'poll',\n 'type': 'south',\n 'interface': '1.0',\n 'config': _DEFAULT_CONFIG\n }", "def describe(self, plugin):\n plug = plugin_source.load_plugin(plugin)\n plug.describe()", "def get_details(self) -> PluginDetails:\n return PluginDetails(\n plugin_name=\"ol-prefix\",\n plugin_id=\"MD029\",\n plugin_enabled_by_default=True,\n plugin_description=\"Ordered list item prefix\",\n plugin_version=\"0.5.0\",\n plugin_interface_version=1,\n plugin_url=\"https://github.com/jackdewinter/pymarkdown/blob/main/docs/rules/rule_md029.md\",\n plugin_configuration=\"style\",\n )", "def plugin_description(self):\n return self.__plugin_description", "def get_details(self) -> PluginDetails:\n return PluginDetails(\n plugin_name=\"no-multiple-space-atx\",\n plugin_id=\"MD019\",\n plugin_enabled_by_default=True,\n plugin_description=\"Multiple spaces are present after hash character on Atx Heading.\",\n plugin_version=\"0.5.0\",\n plugin_interface_version=1,\n plugin_url=\"https://github.com/jackdewinter/pymarkdown/blob/main/docs/rules/rule_md019.md\",\n )", "def plugin_info():\n\n return {\n 'name': 'MAX31865 Async plugin',\n 'version': '1.0',\n 'mode': 'async',\n 'type': 'south',\n 'interface': '1.0',\n 'config': _DEFAULT_CONFIG\n }", "def do_plugin_show(cc, args):\n fields = args.fields[0] if args.fields else None\n utils.check_empty_arg(args.plugin, '<id>')\n utils.check_for_invalid_fields(\n fields, res_fields.PLUGIN_DETAILED_RESOURCE.fields)\n plugin = cc.plugin.get(args.plugin, fields=fields)\n _print_plugin_show(plugin, fields=fields, json=args.json)", "def plugin_info():\n return {\n 'name': 'Playback',\n 'version': '2.1.0',\n 'mode': 'async',\n 'type': 'south',\n 'interface': '1.0',\n 'config': _DEFAULT_CONFIG\n }", "def plugins_show(what=None, name=None, version=None, details=False):\n global pluginmgr\n return pluginmgr.show(what, name, version, details)", "def get_details(self) -> PluginDetails:\n return PluginDetails(\n plugin_name=\"heading-start-left, header-start-left\",\n plugin_id=\"MD023\",\n plugin_enabled_by_default=True,\n plugin_description=\"Headings must start at the beginning of the line.\",\n plugin_version=\"0.5.0\",\n plugin_interface_version=1,\n plugin_url=\"https://github.com/jackdewinter/pymarkdown/blob/main/docs/rules/rule_md023.md\",\n )", "def get(plugin_id, logger, client, tenant_name, get_data):\n utils.explicit_tenant_name_message(tenant_name, logger)\n logger.info('Retrieving plugin %s...', plugin_id)\n plugin = client.plugins.get(plugin_id, _get_data=get_data)\n columns = PLUGIN_COLUMNS + GET_DATA_COLUMNS if get_data else PLUGIN_COLUMNS\n plugin['installed on'] = _format_installation_state(plugin)\n\n if get_global_json_output():\n # for json, also include installation_state because it's useful\n print_single(columns + ['installation_state'], plugin, 'Plugin:', 50)\n return\n\n states = {}\n for state in plugin.pop('installation_state', []):\n if state.get('manager'):\n label = 'Manager {0}'.format(state['manager'])\n elif state.get('agent'):\n label = 'Agent {0}'.format(state['agent'])\n states[label] = state['state']\n print_details({\n col: plugin.get(col) for col in columns\n }, 'Plugin:')\n print_details(states, 'Plugin installation state:')", "def plugin_data(self) -> global___SummaryMetadata.PluginData:", "def get_details(self):\n return PluginDetails(\n plugin_name=\"no-space-in-links\",\n plugin_id=\"MD039\",\n plugin_enabled_by_default=True,\n plugin_description=\"Spaces inside link text\",\n plugin_version=\"0.5.0\",\n plugin_interface_version=1,\n plugin_url=\"https://github.com/jackdewinter/pymarkdown/blob/main/docs/rules/rule_md039.md\",\n )", "def get_plugin(self, name):", "def get_plugin_meta(self, name: str) -> dict:\n if not self.has_plugin(name):\n raise plugins.exceptions.PluginNotRegistered(name)\n\n return self._plugins[name].get_meta_description()", "def get_plugin_info(self, handle):\n plugin_type = c_int()\n name = create_string_buffer(256)\n ver = c_uint()\n ckresult(\n _dll.FMOD_System_GetPluginInfo(\n self._ptr, handle, byref(plugin_type), byref(name), 256, byref(ver)\n )\n )\n return so(\n type=PLUGINTYPE(plugin_type.value), name=name.value, version=ver.value\n )", "def __get_plugin_details(self, plugin_instance, instance_file_name):\n\n try:\n instance_details = plugin_instance.get_details()\n (\n plugin_id,\n plugin_name,\n plugin_description,\n plugin_enabled_by_default,\n plugin_version,\n plugin_interface_version,\n plugin_url,\n plugin_configuration,\n ) = (\n instance_details.plugin_id,\n instance_details.plugin_name,\n instance_details.plugin_description,\n instance_details.plugin_enabled_by_default,\n instance_details.plugin_version,\n instance_details.plugin_interface_version,\n instance_details.plugin_url,\n instance_details.plugin_configuration,\n )\n except Exception as this_exception:\n raise BadPluginError(\n class_name=type(plugin_instance).__name__,\n ) from this_exception\n\n self.__verify_string_field(plugin_instance, \"plugin_id\", plugin_id)\n self.__verify_string_field(plugin_instance, \"plugin_name\", plugin_name)\n self.__verify_string_field(\n plugin_instance, \"plugin_description\", plugin_description\n )\n self.__verify_boolean_field(\n plugin_instance, \"plugin_enabled_by_default\", plugin_enabled_by_default\n )\n self.__verify_string_field(plugin_instance, \"plugin_version\", plugin_version)\n self.__verify_integer_field(\n plugin_instance, \"plugin_interface_version\", plugin_interface_version\n )\n if plugin_interface_version != 1:\n raise BadPluginError(\n formatted_message=f\"Plugin '{instance_file_name}' with an interface version ('{plugin_interface_version}') that is not '1'.\"\n )\n if plugin_url:\n self.__verify_string_field(plugin_instance, \"plugin_url\", plugin_url)\n if plugin_configuration:\n self.__verify_string_field(\n plugin_instance, \"plugin_configuration\", plugin_configuration\n )\n\n plugin_object = FoundPlugin(\n plugin_id,\n plugin_name,\n plugin_description,\n plugin_instance,\n plugin_enabled_by_default,\n plugin_version,\n plugin_interface_version,\n instance_file_name,\n plugin_url,\n plugin_configuration,\n )\n return plugin_object", "def get_plugin_description(self):\n return constants.L2_GATEWAY_SERVICE_PLUGIN", "def plugin_info():\n return {\n 'name': 'Pandas CSV Reader',\n 'version': '1.7.0',\n 'mode': 'poll',\n 'type': 'south',\n 'interface': '1.0',\n 'config': _DEFAULT_CONFIG\n }", "def get_plugin(request):\r\n res = requests.get(DaemonServer._mock_url + '/plugins/' + request.url_vars['id'])\r\n return res", "def _print_plugin_info(plugin, number, verbose=False):\n\tif len(plugin.prefix) > 0:\n\t\tprint _doWrap(\"Plugin #%s/%s: %s (%s)\" % (number, plugin.prefix, plugin.name, plugin.desc))\n\telse:\n\t\tprint _doWrap(\"Plugin #%s: %s (%s)\" % (number, plugin.name, plugin.desc))\n\n\tif verbose:\n\t\tprint\n\t\tprint \"Plugin Documentation:\"\n\t\tprint _doWrap(plugin.help)\n\t\tprint\n\t\tplugin.argsparser.print_help()", "def help(self, plugin):\n plug = plugin_source.load_plugin(plugin)\n plug.help()", "def get(self, plugin_id, _include=None, **kwargs):\n return get_storage_manager().get(\n models.Plugin,\n plugin_id,\n include=_include\n )", "def get_dsp_info_by_plugin(self, handle):\n desc = DSP_DESCRIPTION()\n self._call_fmod(\"FMOD_System_GetDSPInfoByPlugin\", handle, byref(desc))\n return desc", "def plugin_retrieve_info(stream_id):\n\n global _logger\n\n try:\n # note : _module_name is used as __name__ refers to the Sending Process\n logger_name = _MODULE_NAME + \"_\" + str(stream_id)\n\n if _log_debug_level == 0:\n _logger = logger.setup(logger_name)\n\n elif _log_debug_level == 1:\n _logger = logger.setup(logger_name, level=logging.INFO)\n\n elif _log_debug_level >= 2:\n # noinspection PyArgumentEqualDefault\n _logger = logger.setup(logger_name, level=logging.DEBUG)\n\n except Exception as ex:\n _message = plugin_common.MESSAGES_LIST[\"e000012\"].format(str(ex))\n _current_time = time.strftime(\"%Y-%m-%d %H:%M:%S:\")\n\n print (\"{0} - ERROR - {1}\".format(_current_time, _message))\n\n raise ex\n\n _logger.debug(\"{0} - \".format(\"plugin_retrieve_info\"))\n\n try:\n _retrieve_configuration(stream_id)\n\n plugin_info = {\n 'name': \"OMF Translator\",\n 'version': \"1.0.0\",\n 'type': \"translator\",\n 'interface': \"1.0\",\n 'config': _config\n }\n\n except Exception as ex:\n _message = plugin_common.MESSAGES_LIST[\"e000012\"].format(ex)\n\n _logger.error(_message)\n raise\n\n return plugin_info", "async def get_plugin_config(self, **kwargs) -> Any:\n namespace = self._get_namespace(**kwargs)\n return await self.AD.plugins.get_plugin_meta(namespace)", "def get_plugin_settings(plugin, directory=None):\n repo = require_repo(directory)\n plugins = get_value(repo, 'plugins')\n return plugins.get(plugin) if isinstance(plugins, dict) else None", "def get_details(self):\n return PluginDetails(\n plugin_name=\"list-indent\",\n plugin_id=\"MD005\",\n plugin_enabled_by_default=True,\n plugin_description=\"Inconsistent indentation for list items at the same level\",\n plugin_version=\"0.5.0\",\n plugin_interface_version=1,\n plugin_url=\"https://github.com/jackdewinter/pymarkdown/blob/main/docs/rules/rule_md005.md\",\n )", "def description(cls):\n return \"{} v{}\".format(cls.PLUGIN_NAME, cls.PLUGIN_VERSION)", "def load_plugin(self, plugin):\n return imp.load_module(self._main_module, *plugin[\"info\"])", "def getPlugin(self, *args):\n return _libsbml.SBase_getPlugin(self, *args)", "def get_tool_by_plugin_instance(self, plugin, package_name=None):\n\n if not package_name:\n package_name = plugin.PACKAGE if hasattr(plugins, 'PACKAGE') else None\n if not package_name:\n LOGGER.error('Impossible to retrieve data from plugin with undefined package!')\n return None\n\n if package_name not in self._plugins:\n LOGGER.error(\n 'Impossible to retrieve data from instance: package \"{}\" not registered!'.format(package_name))\n return None\n\n if hasattr(plugin, 'ID'):\n return self.get_tool_by_id(tool_id=plugin.ID, package_name=plugin.PACKAGE)\n\n return None", "def get_plugin_description(self):\n return (\"L3 Router Service Plugin for basic L3 forwarding\"\n \" using OVN\")", "def get_plugin_description(self):\n return (\"L3 Router Service Plugin for basic L3 forwarding\"\n \" using OVN\")", "def get_plugin_interface(self):", "def info(self):\n return self.client.call('GET', self.name + 'info')", "def plugin_name(self):", "def format(self):\n if self._plugin:\n return self._plugin.name", "def get_plugin_name(self):\n return self.plugin_name", "def info(self):\n if self.integration is None:\n return None\n return self.integration.info", "def get_plug(self, name):\n return self.plug_dict[name]", "def fusion_api_get_interconnect_pluggable_module_info(self, uri=None, api=None, param='', headers=None):\n param = '/pluggableModuleInformation/%s' % param\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)", "def get_details(self):\n return self.details", "def get_details(self):\n return self.details", "def get_details(self):\n return self.details", "def uctt_plugin_factory_cli_info(\n environment: Environment, instance_id: str = ''):\n return InfoCliPlugin(environment, instance_id)", "def get_plugin(group, name):\n return _get_plugins(group, name)[name]", "def get_plugin(version):\n build_version = get_build_version(current_app, version)\n if build_version:\n pid = request.args.get('id')\n return _get_plugin(current_app, build_version, pid)\n else:\n return invalid_api_version(404)", "def _get_name(self, plugin):\n return plugin.name if isinstance(plugin, BasePlugin) else plugin", "def get_plugin_data(self, request=None, json_format=True):\n if self.plugin_data_fields:\n return self._get_plugin_data(self.plugin_data_fields,\n request=request,\n json_format=json_format)", "def get_details(self):", "def _section_name(self):\n return 'plugin:' + self.name", "def getPlugin(self, plugin_name):\n\t\tif plugin_name in self.plugins:\n\t\t\treturn self.plugins[plugin_name][\"module\"].getPluginInstance()\n\t\telse:\n\t\t\treturn None", "def getInfo():", "def detail(self):\n info = self.info()\n return info", "def getElementFromPluginsByMetaId(self, *args):\n return _libsbml.SBase_getElementFromPluginsByMetaId(self, *args)", "def plugin_instance(self):\n return self.__plugin_instance", "def get_info(self):\n pass", "def get_info(self):\n pass", "def getInfo(self):\n return self.info", "def vars(self) -> dict[str, str]:\n return {'plugin_name': self.name, 'version': self.version}", "def test_plugin_retrieval(self):\n plugin = ProjectAppPluginPoint.get_plugin(PLUGIN_NAME)\n self.assertIsNotNone(plugin)\n self.assertEqual(plugin.get_model().name, PLUGIN_NAME)\n self.assertEqual(plugin.name, PLUGIN_NAME)\n self.assertEqual(plugin.get_model().title, PLUGIN_TITLE)\n self.assertEqual(plugin.title, PLUGIN_TITLE)\n self.assertEqual(plugin.entry_point_url_id, PLUGIN_URL_ID)", "def fill_plugin_metadata(cls, plugin, metadata):\n metadata['plugin_id'] = plugin.id\n metadata['plugin_version'] = plugin.version\n metadata['hot_pluggable'] = plugin.is_hotpluggable", "def plugin_configuration(self):\n return self.__plugin_configuration", "def plugin_one():\n return \"one\"", "def plugin_id(self):\n return self.__plugin_id", "def driver(self):\r\n ext = self.extensions[0]\r\n return ext.obj if ext.obj else ext.plugin", "def get_plugin_description(self):\n return (\"BGP dynamic routing service for announcement of next-hops \"\n \"for project networks, floating IP's, and DVR host routes.\")", "def return_info(self):\n\t\treturn self.info", "def get_info(self) -> str:\n return self.info", "def test_get_plugin_by_id(self):\n response = self.client.get_plugin_by_id(1)\n self.assertEqual(response['id'], 1)", "def get_information(self):\n try:\n return self._get_information()\n except(AttributeError, KeyError) as e:\n self._logger.error(f\"Error scrapping the tab information: {e}\")", "def details(self, packageName):\n path = \"details?doc=%s\" % requests.utils.quote(packageName)\n message = self.executeRequestApi2(path)\n return message.payload.detailsResponse", "def plugin_url(self):\n return self.__plugin_url", "def details(self):\n print \"ABC - Deployer.details()\"", "def get_plugin_description(self):\n return (\"BGP dynamic routing service for announcement of next-hops \"\n \"for private networks and floating IP's host routes.\")", "def get_info(self):\n return \"TODO !\"", "def get_info(self) -> dict:\n with suppress_stdout():\n with youtube_dl.YoutubeDL() as ydl:\n info_dict = ydl.extract_info(self.url, download=False)\n return info_dict", "def printPluginHeader(self):\n print \"%s, version %s\" % (desc, version)", "def list_plugins(self):\n if self.info is None:\n print(\"Currently no plugin is available.\\n\")\n else:\n print(self.info)\n print('\\n')\n if self.current_analyzer:\n self.check_analyzer()", "def get_details(self):\n return self.__config_data", "def details(self, identifier):\n return self.client.request_with_method(Methods.GET % (self.name, identifier,))", "def GetMetadata(self):\n return self.dict['meta']", "def test_get_plugin_parameters(self):\n plugin_id = 2\n response = self.client.get_plugin_parameters(plugin_id,\n {'limit': 50, 'offset': 0})\n self.assertEqual(response['data'][0]['name'], \"dir\")", "def get_plugin_options(name):\n return get_plugin_loader(name).get_options()", "def get_plugin_setting(self, plugin, parameter):\n asserts.assert_true(\n self.fuel_web.check_plugin_exists(self.cluster_id, plugin),\n \"Plugin {0} isn't found.\".format(plugin))\n\n attributes = self.nailgun_client.get_cluster_attributes(\n self.cluster_id)\n attributes = attributes['editable'][plugin]\n\n value = None\n for item in attributes['metadata']['versions']:\n if (parameter in item and\n item['metadata']['plugin_id'] ==\n attributes['metadata']['chosen_id']):\n value = item[parameter]['value']\n break\n asserts.assert_is_not_none(\n value, \"Could not find parameter {0} for plugin {1}\".format(\n parameter, plugin))\n return value", "def get_info(self, charger):\n data = {\n \"device_id\": self.uuid,\n \"cmd\": \"get_info\",\n \"token\": charger.token(),\n \"account_token\": self.api_token\n }\n headers = {\n \"Content-Type\": \"application/json\"\n }\n\n response = requests.post(\"{}/box_api_secure\".format(self.BASE_URL),\n data=json.dumps(data),\n headers=headers)\n response_json = response.json()\n return response_json", "def get_plugin(config_cls: Type) -> str:\n cls_module = inspect.getmodule(config_cls)\n return 'rastervision.' + cls_module.__name__.split('.')[1]", "def info(self):\n return requests.get(self.info_url + self.pid).json()", "def plugin_version(self):\n return self.__plugin_version", "def details(self, uuid):\n validate_uuid(uuid)\n\n return self._phishdetect.get(API_PATH[\"reports_details\"].format(uuid=uuid))", "def get_info(self):\n url = self._url_for_op('info')\n data= None # This will be a GET request since data is None\n response = self._get_raw_response(self._get_json_headers,\n self._get_json_response, url, data)\n response = json.loads(response)\n self.api_info = response['results']\n return self.api_info", "def playerPlugin(request, playersPluginId):\n plugin = PlayerPlugin.objects.get(pk=playersPluginId)\n tempModel = serializers.serialize('json', [plugin])\n context = json.loads(tempModel[1:-1])\n del context['pk']\n del context['model']\n del context['fields']['class_module']\n del context['fields']['class_module_name']\n del context['fields']['class_archive']\n del context['fields']['class_archive_name']\n del context['fields']['version']\n readme = 'The archive must include context.json and 1 python module\\r\\ncontext.json:\\r\\nbecause data is a free-form field, you cannot use single-quotes (\\') and double-quotes must be escaped (\\\\\")\\r\\n\\tname: is unique to each plugin and if you upload a duplicate a new version will be created\\r\\n\\tpriority: is used to sort the plugin list in club settings, -1 priority plugins are sorted only by name\\r\\n\\tdata: is sent to your plugin when it is run\\r\\n\\tclass_name: is the class within the module that will be used by Abstract Base Class\\r\\n\\tdescription: helpful text for the users of the plugin'\n return createFileResponse(context, plugin.name, plugin.class_archive_name, plugin.class_module_name, os.path.join(settings.BASE_DIR, 'golf/media/'+plugin.class_module.name), readme)", "def load_plugin():\n return HostTestPluginCopyMethod_Shell()", "def get_plugin(name):\n for plugin in IPluginRegistry.plugins:\n if name in plugin.__name__:\n return plugin\n raise ValueError(\"The plugin %s cannot be found.\" % name)", "def get_info(self, card):\n\n try:\n\n info = subprocess.check_output(\n [\"youtube-dl\", \"-i\", \"-j\",\n f\"ytsearch: {card[0]} {card[1]} audiobook\"])\n\n self.details = json.loads(info)\n\n except:\n\n return" ]
[ "0.72806084", "0.71445906", "0.7018347", "0.6907199", "0.6841461", "0.683811", "0.67773193", "0.6708696", "0.6622913", "0.6553286", "0.6524269", "0.6480507", "0.6399457", "0.63910574", "0.6374219", "0.63439155", "0.63216966", "0.630637", "0.62978387", "0.62750924", "0.626016", "0.6237607", "0.6162853", "0.61612505", "0.6093619", "0.60411793", "0.6028037", "0.5951289", "0.5926487", "0.59239286", "0.5869314", "0.58414876", "0.58291334", "0.5820559", "0.58050114", "0.57835746", "0.57469845", "0.57469845", "0.5737585", "0.5708633", "0.5669322", "0.56638855", "0.56296754", "0.560372", "0.55916244", "0.5566481", "0.55628145", "0.55628145", "0.55628145", "0.55077446", "0.5484229", "0.5482281", "0.54814196", "0.5474722", "0.5447697", "0.54427856", "0.5421714", "0.5419057", "0.53986365", "0.5393546", "0.53602713", "0.53595626", "0.53595626", "0.5351613", "0.53505623", "0.5335758", "0.5334252", "0.5318214", "0.5313954", "0.5289184", "0.52560925", "0.5252579", "0.52482575", "0.52469295", "0.52356815", "0.5233468", "0.5232311", "0.52281153", "0.52273", "0.52193934", "0.5215261", "0.521321", "0.52088165", "0.51964575", "0.51957184", "0.5195653", "0.5185853", "0.51848245", "0.5181472", "0.51798123", "0.51759464", "0.51679945", "0.5157125", "0.51533943", "0.5146839", "0.51415646", "0.5137626", "0.5131625", "0.5127152", "0.512014" ]
0.6764841
7
Geeft bericht of iemand lang genoeg is voor de attractie.
def lang_genoeg(lengte): return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def substantiate():", "def cliquer_sur_unité(self):", "def makeGerund(self):\r\n clean_s = self.cleanString(self.text)\r\n LoW = clean_s.split()\r\n for x in LoW: \r\n if 'ing' in x and x not in self.gerund: \r\n self.gerund[x] = 1\r\n elif 'ing' in x and x in self.gerund: \r\n self.gerund[x] += 1\r\n return self.gerund", "def translate_leet(phrase):", "def gk_g_checker(self, seq):\n seq = re.sub(r'гк', r'хк', seq)\n return seq", "def question_new_translate():", "def degibber(self):", "def preberi_pot(ukazi):", "def preberi_pot(ukazi):", "def preberi_pot(ukazi):", "def preberi_pot(ukazi):", "def preberi_pot(ukazi):", "def retranslate(self):\r\n pass", "def retranslate(self):\r\n pass", "def gibber(self): \n for x in self.consonants:\n if (x in self.sentence):\n \t self.sentence = self.sentence.replace(x, x+'o'+unicode(x).lower())", "def elegir_ventana(self):\r\n pass", "def gerundify(verb):\n if verb.endswith(\"e\"):\n verb = verb[:-1]\n\n if random() < 0.4:\n if (\n not verb.startswith(\"a\")\n and not verb.startswith(\"e\")\n and not verb.startswith(\"i\")\n and not verb.startswith(\"o\")\n and not verb.startswith(\"u\")\n ):\n verb = \"a-\" + verb\n\n return verb + \"ing\"", "def test_i18n28(self):\n output = self.engine.render_to_string('i18n28', {'anton': 'α', 'berta': 'β'})\n self.assertEqual(output, 'α + β')", "def test_i18n28(self):\n output = self.engine.render_to_string('i18n28', {'anton': 'α', 'berta': 'β'})\n self.assertEqual(output, 'α + β')", "def alpha(self):\r\n return self.unif[17]", "def test_i18n28(self):\n output = self.engine.render_to_string(\"i18n28\", {\"anton\": \"α\", \"berta\": \"β\"})\n self.assertEqual(output, \"α + β\")", "def nalichtingstijd(self):\n return self._nalichtingstijd.get_waarde()", "def get_translation(self):", "def test_i18n17(self):\n output = self.engine.render_to_string(\"i18n17\", {\"anton\": \"α & β\"})\n self.assertEqual(output, \"α &amp; β\")", "def test_i18n17(self):\n output = self.engine.render_to_string('i18n17', {'anton': 'α & β'})\n self.assertEqual(output, 'α &amp; β')", "def test_i18n17(self):\n output = self.engine.render_to_string('i18n17', {'anton': 'α & β'})\n self.assertEqual(output, 'α &amp; β')", "def gold():\r\n price = give_price_websites_1(\"https://www.tgju.org/profile/geram18\")\r\n\r\n if users_language[update.effective_chat.id] == \"english\":\r\n return \"gold(per gram) : \" + format(price/10000000, '.3f') + \" mTomans\"\r\n elif users_language[update.effective_chat.id] == \"persian\":\r\n return \" هزارتومان\" + format(price/10000000, '.3f') + \"طلا : \"", "def genlangs(self):\r\n raise NotImplementedError", "def translate():\n pass", "def feature_dict(sent, i):\n palabra=sent[i] #suponinedo que al menos tiene una palabra\n especiales= [\"á\",\"é\",\"í\",\"ó\",\"ú\", \"ü\"] #solo chequeo minusculas porque pregunto sobre el lower del string\n\n #sobre la anterior\n if i==0: #primera de la oracion\n alower=\"\"\n aistitle=False\n aisupper=False\n aisnumeric=False\n aisplural=False\n #aunder=False\n aislower=False\n aespecial=False\n else:\n alower = sent[i-1].lower()\n aistitle = sent[i-1].istitle()\n aisupper = sent[i-1].isupper()\n aisnumeric = sent[i-1].isnumeric()\n aisplural= (sent[i-1][-1:].lower() == 's')\n #aunder= (sent[i-1].find('_') >= 0)\n aislower = sent[i-1].islower()\n aespecial = (1 in [c in sent[i-1].lower() for c in especiales]),\n\n #sobre la proxima\n if i==len(sent)-1: #si es la ultima\n plower = \"\"\n pistitle = False\n pisupper = False\n pisnumeric = False\n pisplural= False\n #punder=False\n pislower = False\n pespecial = False\n else:\n plower = sent[i + 1].lower()\n pistitle = sent[i + 1].istitle()\n pisupper = sent[i + 1].isupper()\n pisnumeric = sent[i + 1].isnumeric()\n pisplural= (sent[i + 1][-1:].lower() == 's')\n #punder = (sent[i + 1].find('_') >= 0)\n pislower = sent[i + 1].islower()\n pespecial = (1 in [c in sent[i+1].lower() for c in especiales]),\n\n return {\n 'lower': palabra.lower(),\n 'istitle': palabra.istitle(),\n 'isupper': palabra.isupper(),\n 'isnumeric': palabra.isnumeric(),\n 'isplural': (palabra[-1:].lower() == 's'),\n #'under': (palabra.find('_') >= 0),\n 'islower': palabra.islower(),\n 'especial': (1 in [c in palabra.lower() for c in especiales]),\n 'alower': alower,\n 'aistitle': aistitle,\n 'aisupper': aisupper,\n 'aisnumeric': aisnumeric,\n 'aisplural': aisplural,\n #'aunder': aunder,\n 'aespecial': aespecial,\n 'aislower': aislower,\n 'plower': plower,\n 'pistitle': pistitle,\n 'pisupper': pisupper,\n 'pisnumeric': pisnumeric,\n 'pisplural': pisplural,\n #'punder': punder,\n 'pislower': pislower,\n 'pespecial': pespecial,\n }", "def makePigLatin(word): \n m = len(word)\n vowels = \"a\", \"e\", \"i\", \"o\", \"u\", \"y\" \n # short words are not converted \n if m<3 or word==\"the\":\n return word\n else:\n for i in vowels:\n if word.find(i) < m and word.find(i) != -1:\n m = word.find(i)\n if m==0:\n return word+\"way\" \n else:\n return word[m:]+word[:m]+\"ay\"", "def leerUltrasonido() -> int:\n pass", "def design_grna(seq):\n\n transcript = {'A': 'U', 'C': 'G', 'G': 'C', 'T': 'A'}\n grna = \"\".join(transcript[n] for n in seq)\n\n return grna", "def _derive_lang_en(place):\n _LOGGER.debug(\"derive_lang_en: %r\", place)\n\n en_names = [place.name.lower(), place.asciiname.lower()]\n derive_from = _dedup(en_names + [x.lower() for x in place.alternativeNames])\n derived = []\n\n #\n # Hyphenated names should always have space-separated variants\n #\n derived.append(place.name.lower().replace(\"-\", \" \"))\n derived.append(place.asciiname.lower().replace(\"-\", \" \"))\n\n #\n # Saint X names should always have St X variants\n #\n if _SAINT_REGEX.search(place.name):\n derived.extend([_SAINT_REGEX.sub(\"st\", n) for n in derive_from])\n\n #\n # Mount X names should always have Mt X variants\n #\n if _MOUNT_REGEX.search(place.name):\n derived.extend([_MOUNT_REGEX.sub(\"mt\", n) for n in derive_from])\n\n #\n # X O' Y names should always have 'of' and 'o' variants\n #\n if _XOY_REGEX.search(place.name):\n derived.extend([_XOY_REGEX.sub(\" o' \", n) for n in derive_from])\n derived.extend([_XOY_REGEX.sub(\" o’ \", n) for n in derive_from])\n derived.extend([_XOY_REGEX.sub(\" of \", n) for n in derive_from])\n derived.extend([_XOY_REGEX.sub(\" o \", n) for n in derive_from])\n\n #\n # O' XYZ names should have variants with that space removed\n #\n if _O_REGEX.search(place.name):\n derived.extend([_O_REGEX.sub(\"o'\", n) for n in derive_from])\n derived.extend([_O_REGEX.sub(\"o’\", n) for n in derive_from])\n\n return [DerivedName(text, \"en\") for text in derived]", "def _step1b(self, word):\n # this NLTK-only block extends the original algorithm, so that\n # 'spied'->'spi' but 'died'->'die' etc\n if self.mode == self.NLTK_EXTENSIONS:\n if word.endswith(\"ied\"):\n if len(word) == 4:\n return self._replace_suffix(word, \"ied\", \"ie\")\n else:\n return self._replace_suffix(word, \"ied\", \"i\")\n\n # (m>0) EED -> EE\n if word.endswith(\"eed\"):\n stem = self._replace_suffix(word, \"eed\", \"\")\n if self._measure(stem) > 0:\n return stem + \"ee\"\n else:\n return word\n\n rule_2_or_3_succeeded = False\n\n for suffix in [\"ed\", \"ing\"]:\n if word.endswith(suffix):\n intermediate_stem = self._replace_suffix(word, suffix, \"\")\n if self._contains_vowel(intermediate_stem):\n rule_2_or_3_succeeded = True\n break\n\n if not rule_2_or_3_succeeded:\n return word\n\n return self._apply_rule_list(\n intermediate_stem,\n [\n (\"at\", \"ate\", None), # AT -> ATE\n (\"bl\", \"ble\", None), # BL -> BLE\n (\"iz\", \"ize\", None), # IZ -> IZE\n # (*d and not (*L or *S or *Z))\n # -> single letter\n (\n \"*d\",\n intermediate_stem[-1],\n lambda stem: intermediate_stem[-1] not in (\"l\", \"s\", \"z\"),\n ),\n # (m=1 and *o) -> E\n (\n \"\",\n \"e\",\n lambda stem: (self._measure(stem) == 1 and self._ends_cvc(stem)),\n ),\n ],\n )", "def aanleg(self):\n return self._aanleg.get_waarde()", "def dis(self):\n return self.nlegomena(2)", "def test_legacyi18n04(self):\n output = self.engine.render_to_string(\"legacyi18n04\", {\"anton\": \"Å\"})\n self.assertEqual(output, \"å\")", "def pig_latin(word):\n first_letter = word[0]\n rest_of_word = word[1 : ]\n #print(\"First letter is\", first_letter)\n #print(\"rest_of_word is\", rest_of_word)\n if first_letter == 'a' or first_letter == 'e' or first_letter == 'i' or first_letter == 'o' or first_letter == 'u': \n pig_latin_word = word + 'way'\n else: \n pig_latin_word = rest_of_word + first_letter + 'ay'\n return pig_latin_word", "def geminates_checker(self, s):\n s = re.sub(r'([йцкгшщзхфвпрлджчсмтб])\\1+', r'\\1', s)\n s = re.sub(r'н{2}([йцкгшщзхфвпрлджчсмтб ])', r'н\\1', s) \n return s", "def nintl(self):", "def _translate(self):\r\n\r\n for place, pseudo_binary in self.letters.items():\r\n for letter in self.alphabet:\r\n\r\n with open(os.path.join(self.training_data_folder, letter + '.json'), 'r', encoding = 'utf-8') as js:\r\n data = json.loads(js.read())\r\n\r\n if pseudo_binary in data:\r\n self.result[place] = letter\r\n break\r\n\r\n else:\r\n self.result[place] = '-'\r\n\r\n if not self.devmode:\r\n return 'Not solved'\r\n\r\n return ''.join(self.result.values())", "def mezclar_bolsa(self):", "def pig_latin(word):\n \n first_letter = word[0]\n rest_of_word = word[1 : ]\n \n # Student should complete function on the next lines.\n \n if first_letter == 'a' or first_letter == 'e' or first_letter == 'i' or first_letter == 'o' or first_letter == 'u':\n return word + \"way\"\n else:\n return rest_of_word + first_letter + \"ay\"", "def achthoekig(self):\n return self._achthoekig.get_waarde()", "def verteileKarten(anzahlSpieler):\n pass", "def find_abecedarian_words():\n pass", "def fix_greek_in_mathml(self, xml):\r\n def gettag(expr):\r\n return re.sub('{http://[^}]+}', '', expr.tag)\r\n\r\n for k in xml:\r\n tag = gettag(k)\r\n if tag == 'mi' or tag == 'ci':\r\n usym = unicode(k.text)\r\n try:\r\n udata = unicodedata.name(usym)\r\n except Exception:\r\n udata = None\r\n # print \"usym = %s, udata=%s\" % (usym,udata)\r\n if udata:\t\t\t# eg \"GREEK SMALL LETTER BETA\"\r\n if 'GREEK' in udata:\r\n usym = udata.split(' ')[-1]\r\n if 'SMALL' in udata:\r\n usym = usym.lower()\r\n #print \"greek: \",usym\r\n k.text = usym\r\n self.fix_greek_in_mathml(k)\r\n return xml", "def verb_lemma(word):\n if word.endswith(\"ed\"):\n if word[:-2].endswith(\"v\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"at\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"it\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"et\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"ut\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"ac\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"i\"):\n return word[:-3].lower() + \"y\"\n elif word[:-2].endswith(\"ir\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"ag\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"nc\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"nu\"):\n return word[:-2].lower() + \"e\"\n else:\n return word[:-2].lower() \n elif word.endswith(\"ing\"):\n if word[:-3].endswith(\"v\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"at\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"it\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"et\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"ut\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"ac\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"i\"):\n return word[:-4].lower() + \"y\"\n elif word[:-3].endswith(\"ir\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"ag\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"nc\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"nu\"):\n return word[:-3].lower() + \"e\"\n else:\n return word[:-3].lower()\n elif re.match(r\"(does|did|done)\", word):\n return (\"do\")\n elif re.match(r\"(is|are|am|was|will|were|been)\", word):\n return (\"be\")\n elif word == (\"'s\"):\n return (\"be\")\n elif re.match(r\"(had|has|'ve)\", word):\n return (\"have\")\n else:\n return word.lower()", "def test_langid(basic_multilingual):\n english_text = \"This is an English sentence.\"\n french_text = \"C'est une phrase française.\"\n docs = [english_text, french_text]\n\n docs = [Document([], text=text) for text in docs]\n basic_multilingual(docs)\n predictions = [doc.lang for doc in docs]\n assert predictions == [\"en\", \"fr\"]", "def ta2en(text):\n return IITB_translator(\"ta\", \"en\", text)", "def alphabet_war(fight):", "def g(self):\n return 2", "def use_en(self):\n pass", "def main():\n\tnuc = \"ATGAAGACCATCATTGCTTTGAGCTACATTTTCTGTCTGGCTCTCGGCCAAGACCTTCCAGGAAATGACAACAGCACAGCAACGCTGTGCCTGGGACATCATGCGGTGCCAAACGGAACACTAGTGAAAACAATCACAGATGATCAGATTGAAGTGACTAATGCTACTGAGCTAGTTCAGAGCTCCTCAACGGGGAAAATATGCAACAATCCTCATCGAATCCTTGATGGAATAGACTGCACACTGATAGATGCTCTATTGGGGGACCCTCATTGTGATGTTTTTCAAAATGAGACATGGGACCTTTTCGTTGAACGCAGCAAAGCTTTCAGCAACTGTTACCCTTATGATGTGCCAGATTATGCCTCCCTTAGGTCACTAGTTGCCTCGTCAGGCACTCTGGAGTTTATCACTGAGGGTTTCACTTGGACTGGGGTCACTCAGAATGGGGGAAGCAATGCTTGCAAAAGGGGACCTGGTAGCGGTTTTTTCAGTAGACTGAACTGGTTGACCAAATCAGGAAGCACATATCCAGTGCTGAACGTGACTATGCCAAACAATGACAATTTTGACAAACTATACATTTGGGGGGTTCACCACCCGAGCACGAACCAAGAACAAACCAGCCTGTATGTTCAAGCATCAGGGAGAGTCACAGTCTCTACCAGAAGAAGCCAGCAAACTATAATCCCGAATATCTGGTCCAGACCCTGGGTAAGGGGTCTGTCTAGTAGAATAAGCATCTATTGGACAATAGTTAAGCCGGGAGACGTACTGGTAATTAATAGTAATGGGAACCTAATCGCTCCTCGGGGTTATTTCAAAATGCGCACTGGGAAAAGCTCAATAATGAGGTCAGATGCACCTATTGATACCTGTATTTCTGAATGCATCACTCCAAATGGAAGCATTCCCAATGACAAGCCCTTTCAAAACGTAAACAAGATCACATATGGAGCATGCCCCAAGTATGTTAAGCAAAACACC\"\n\taa = translate(nuc[48:])\n\tep = epitope_sites(aa)\n\tne = nonepitope_sites(aa)\n\trb = receptor_binding_sites(aa)\n\tprint \"nuc: \" + nuc\n\tprint \"aa: \" + aa\n\tprint \"ep: \" + ep\n\tprint \"ne: \" + ne\n\tprint \"rb: \" + rb", "def translate(self):\n pass", "def algemene_informatie_aanvragen():\n # gegevens voor algemene informatie\n gegevens = csvread(\"gestald.csv\")\n\n vrije_plekken = 1000 - len(gegevens)\n\n return vrije_plekken", "def test_i18n04(self):\n output = self.engine.render_to_string(\"i18n04\", {\"anton\": \"Å\"})\n self.assertEqual(output, \"å\")", "def lire():\r\n price = give_price_websites_1(\"https://www.tgju.org/profile/price_try\")\r\n\r\n if users_language[update.effective_chat.id] == \"english\":\r\n return \"lire : \" + format(price / 10000, '.2f') + '0' + ' kTomans'\r\n elif users_language[update.effective_chat.id] == \"persian\":\r\n return \" هزارتومان\" + format(price/10000000, '.3f') + '0' + \"لیر : \"", "def yotated_checker(self, seq):\n seq = re.sub(r'([йцкнгшщзхфвпрлджчсмтб])(й(а|у|э))', r'\\1ь\\2', seq)\n seq = re.sub(r'(\\A| |[ьъ])йа', r'\\1я', seq)\n seq = re.sub(r'(\\A| |[ьъ])йу', r'\\1ю', seq)\n seq = re.sub(r'(\\A| |[ьъ])йэ', r'\\1е', seq)\n return seq", "def lemma(self) -> str:", "def get_aa (tRNA):\n\tpass", "def translate(self):\n\t\tvowels = \"aeiou\"\n\n\t\tif (self.word[0] not in vowels) and (self.word[1] in vowels):\n\t\t\tnew_word = self.word[1:] + self.word[0] + \"ay\"\n\t\telif self.word[0] in vowels:\n\t\t\tnew_word = self.word + \"way\"\n\t\telse:\n\t\t\tnew_word = self.word[2:] + self.word[:2] + \"ay\"\n\n\t\tprint(new_word)", "def armlengte(self):\n return self._armlengte.get_waarde()", "def armlengte(self):\n return self._armlengte.get_waarde()", "def vigenere(phrase, clef, operation):\n sortie, i = \"\", 0\n for caract in phrase: #parcours de la chaine a traiter\n if operation == \"1\": #chiffrement\n sortie = sortie + chr((ord(caract) + ord(clef[i])) % 256)\n i = i + 1 #parcours de la cle\n if i > len(clef) - 1:\n i = 0 #fin de cle atteinte, on repart au debut\n elif operation == \"2\": #dechiffrement\n sortie = sortie + chr((ord(caract) - ord(clef[i])) % 256)\n i = i + 1\n if i > len(clef) - 1:\n i = 0\n return sortie", "def refang(self, text: str):", "def test_i18n03(self):\n output = self.engine.render_to_string(\"i18n03\", {\"anton\": \"Å\"})\n self.assertEqual(output, \"Å\")", "def _getLilyAccidental(self):\n return \"\"", "def get_translation(self, lang):\n if lang==\"it\":\n url=self.backstring+\"en/\"+self.name\n image=self.backstring+\"img/uk.png\"\n alttext='English version'\n elif lang==\"en\":\n url=self.backstring+\"it/\"+self.name\n image=self.backstring+\"img/it.png\"\n alttext='Italian version'\n img='<img src=\"%s\" height=\"15\" alt=\"%s\"><br>%s' % (image, alttext,alttext, )\n a=A(img, url, \"translation\")\n return str(a)", "def get_text(self):", "def test_legacyi18n04(self):\n output = self.engine.render_to_string('legacyi18n04', {'anton': b'\\xc3\\x85'})\n self.assertEqual(output, 'å')", "def test_legacyi18n04(self):\n output = self.engine.render_to_string('legacyi18n04', {'anton': b'\\xc3\\x85'})\n self.assertEqual(output, 'å')", "async def geys(geyed):\n if not geyed.text[0].isalpha() and geyed.text[0] not in (\"/\", \"#\", \"@\", \"!\"):\n index = random.randint(0, len(GEY_STRINGS) - 1)\n reply_text = GEY_STRINGS[index]\n await geyed.edit(reply_text)", "def back_translate(self):\n base = Bio.Alphabet._get_base_alphabet(self.alphabet)\n if not isinstance(base, Bio.Alphabet.ProteinAlphabet):\n raise ValueError(\"Nucleic acids cannot be back translated!\")\n\n # right now this just uses the most-prevalent codon for each AA\n # TODO: select codons with a weighted average using random.choice\n return Seq(\n \"\".join([CodonUsage.SynonymousCodons[seq3(AA).upper()][0] for AA in str(self)]),\n IUPAC.unambiguous_dna,\n )", "def en_word(cls):\n return cls.random_element(cls.words)", "def test_hasta_el_numeral(self):\n fwa = FakeWikiArchivo('abcd <a href=\"/wiki/foobar#xy\">FooBar</a> dcba')\n _, r = self.peishranc(fwa)\n self.assertEqual(r, [(u'foobar', SCORE_PEISHRANC)])", "def get_sentence(self):", "def expected_rubbish(self):", "def step2(self):\n\t\tif self.b[self.k - 1] == 'a':\n\t\t\tif self.ends(\"ational\"): self.r(\"ate\")\n\t\t\telif self.ends(\"tional\"): self.r(\"tion\")\n\t\telif self.b[self.k - 1] == 'c':\n\t\t\tif self.ends(\"enci\"):\t self.r(\"ence\")\n\t\t\telif self.ends(\"anci\"): self.r(\"ance\")\n\t\telif self.b[self.k - 1] == 'e':\n\t\t\tif self.ends(\"izer\"):\t self.r(\"ize\")\n\t\telif self.b[self.k - 1] == 'l':\n\t\t\tif self.ends(\"bli\"):\t self.r(\"ble\") # --DEPARTURE--\n\t\t\t# To match the published algorithm, replace this phrase with\n\t\t\t#\tif self.ends(\"abli\"):\t self.r(\"able\")\n\t\t\telif self.ends(\"alli\"): self.r(\"al\")\n\t\t\telif self.ends(\"entli\"): self.r(\"ent\")\n\t\t\telif self.ends(\"eli\"):\t self.r(\"e\")\n\t\t\telif self.ends(\"ousli\"): self.r(\"ous\")\n\t\telif self.b[self.k - 1] == 'o':\n\t\t\tif self.ends(\"ization\"): self.r(\"ize\")\n\t\t\telif self.ends(\"ation\"): self.r(\"ate\")\n\t\t\telif self.ends(\"ator\"): self.r(\"ate\")\n\t\telif self.b[self.k - 1] == 's':\n\t\t\tif self.ends(\"alism\"):\t self.r(\"al\")\n\t\t\telif self.ends(\"iveness\"): self.r(\"ive\")\n\t\t\telif self.ends(\"fulness\"): self.r(\"ful\")\n\t\t\telif self.ends(\"ousness\"): self.r(\"ous\")\n\t\telif self.b[self.k - 1] == 't':\n\t\t\tif self.ends(\"aliti\"):\t self.r(\"al\")\n\t\t\telif self.ends(\"iviti\"): self.r(\"ive\")\n\t\t\telif self.ends(\"biliti\"): self.r(\"ble\")\n\t\telif self.b[self.k - 1] == 'g': # --DEPARTURE--\n\t\t\tif self.ends(\"logi\"):\t self.r(\"log\")\n\t\t# To match the published algorithm, delete this phrase", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def translate(self, language=None):", "def g():", "def abw(og, fg):\n\n oe = sg2plato(og)\n re = real_extract(og, fg)\n return (oe - re) / (2.0665 - 0.010665 * oe)", "def process_bereich_to(self):\r\n return self._tokens[2]", "def travailler_enveloppes(self, enveloppes):\n elements = enveloppes[\"l\"]\n elements.apercu = \"{valeur}\"\n elements.aide_courte = \\\n \"Entrez |ent|le nom d'un rang|ff| pour l'éditer ou :\\n\" \\\n \" |ent|/a <nom de l'élément à créer> / <probabilité> / <points> \" \\\n \"|ff|\\n (Exemple : |cmd|/a bras gauche / 8 / 3|ff|)\\n\" \\\n \" |ent|/s <nom de l'élément à supprimer>|ff|\\n\\n\" \\\n \"La probabilité de toucher un élément est calculée en \" \\\n \"fonciton\\nde la probabilité totale de tous les éléments.\\n\\n\" \\\n \"Éléments actuels de la cible :{valeur}\"", "def g_ub(self):\n pass", "def intf_ENTPGRAM(E):\n # !! Need to check for some eids being TRIs. Filter that out.\n if ( not inc.entid_or_LST_of_entids(E.The,3) or \n not inc.point_formatted_LST(E.The,2) or\n not inc.point_formatted_LST(E.The,1) ):\n print(\"Input Error: pgram\")\n print(intf_ENTPGRAM.__doc__)\n return # Without doing much of anything.\n oB= [ xyz.val for xyz in E.The.StackPop().val ] # A list [3.5 -2 0].\n oA= [ xyz.val for xyz in E.The.StackPop().val ] # A list [3.5 -2 0].\n myeids= E.The.StackPop().val\n if type(myeids)==type(list()):\n #myeids= map(lambda x:x.val, myeids) # Should now be a list of ints.\n myeids= [x.val for x in myeids] # Should now be a list of ints.\n else:\n myeids= [ myeids ] # Also a (1 item) list of ints.\n neweidlist= []\n for myeid in myeids:\n if myeid in MMEL.El: # Check if eid exists.\n src_ent= MMEL.El[myeid]\n new_ent= src_ent.duplicate()\n new_ent.translate([ oB[0]-oA[0], oB[1]-oA[1], oB[2]-oA[2] ])\n As= mm.Entity.allplist.PLdict[ src_ent.epts[0] ]\n Ae= mm.Entity.allplist.PLdict[ src_ent.epts[1] ]\n Bs= mm.Entity.allplist.PLdict[ new_ent.epts[0] ]\n Be= mm.Entity.allplist.PLdict[ new_ent.epts[1] ]\n neweidlist.append(new_ent.eid)\n MMEL.add_ent(new_ent)\n line_entS= mm.Line_Entity( [As,Bs] )\n neweidlist.append(line_entS.eid)\n MMEL.add_ent(line_entS)\n line_entE= mm.Line_Entity( [Ae,Be] )\n neweidlist.append(line_entE.eid)\n MMEL.add_ent(line_entE)\n tri_entA= mm.Tri_Entity( [As, Ae, Bs] )\n neweidlist.append(tri_entA.eid)\n MMEL.add_ent(tri_entA)\n tri_entB= mm.Tri_Entity( [Bs, Be, Ae] )\n neweidlist.append(tri_entB.eid)\n MMEL.add_ent(tri_entB)\n else:\n print(\"WARNING: Entity ID# %d does not exist.\" % myeid)\n if neweidlist:\n neweids= objectifier.StackOB_LST( [objectifier.StackOB_VAL(x) for x in neweidlist] )\n E.The.StackPush(neweids)\n OUT.default(MMEL,E) # AUTODUMP ", "def think(s):", "def map2mw_Aug(d,k1,entry):\n L = entry.metad['L']\n if L in ['7201','7202']: # 7203 relates to 'hay'\n return 'hA'\n if k1 in map2mw_special_Aug:\n return map2mw_special_Aug[k1]\n regexes = [\n u'<ab>aug.</ab> de {%(.*?)%}',\n u'<ab>aug.</ab> {%(.*?)%}',\n u'<ab>aug.</ab> du <ab>c.</ab> de {%(.*?)%}',\n\n ]\n line = entry.datalines[0] # first line of entry in bur.txt\n for regex in regexes:\n m = re.search(regex,line)\n if m:\n root = m.group(1) # root in \n root_slp1=roman_slp1_mw(root,'verb',d)\n if root_slp1 != None:\n return root_slp1\n\n return '?'", "def transcribe(seq):\n rna = ''\n for letter in seq:\n if letter == 'A':\n rna = rna + 'U'\n elif letter == 'T':\n rna = rna + 'A'\n elif letter == 'G':\n rna = rna + 'C'\n else:\n rna = rna + 'G'\n return rna", "def test_anglicize():\n print('Testing anglicize')\n result = funcs.anglicize(1)\n introcs.assert_equals(\"one\", result)\n\n result = funcs.anglicize(19)\n introcs.assert_equals(\"nineteen\", result)\n\n result = funcs.anglicize(20)\n introcs.assert_equals(\"twenty\", result)\n\n result = funcs.anglicize(35)\n introcs.assert_equals(\"thirty five\", result)\n\n result = funcs.anglicize(50)\n introcs.assert_equals(\"fifty\", result)\n\n result = funcs.anglicize(99)\n introcs.assert_equals(\"ninety nine\", result)\n\n result = funcs.anglicize(100)\n introcs.assert_equals(\"one hundred\", result)\n\n result = funcs.anglicize(301)\n introcs.assert_equals(\"three hundred one\", result)\n\n result = funcs.anglicize(999)\n introcs.assert_equals(\"nine hundred ninety nine\", result)\n\n result = funcs.anglicize(1000)\n introcs.assert_equals(\"one thousand\", result)\n\n result = funcs.anglicize(1009)\n introcs.assert_equals(\"one thousand nine\", result)\n\n result = funcs.anglicize(900000)\n introcs.assert_equals(\"nine hundred thousand\", result)\n\n result = funcs.anglicize(789436)\n introcs.assert_equals(\"seven hundred eighty nine thousand four hundred thirty six\",\n result)", "def get_ig_name ( base_name ) :\n return base_name + '-GW'", "def med_in_english(word):\r\n\treturn int(med(TextBlob(word).correct(), word))", "def REC_YAHTZEE():\n return 12" ]
[ "0.6085675", "0.60812867", "0.5822387", "0.5812007", "0.56937695", "0.56634116", "0.5626631", "0.56246907", "0.56246907", "0.56246907", "0.56246907", "0.56246907", "0.5621488", "0.5621488", "0.5549839", "0.55467236", "0.5458848", "0.5457804", "0.5457804", "0.54571193", "0.54569125", "0.5455175", "0.5446671", "0.53993636", "0.5370489", "0.5370489", "0.53680474", "0.53564864", "0.5345982", "0.53434145", "0.5322816", "0.5309913", "0.5299796", "0.528748", "0.5242758", "0.52240217", "0.5219772", "0.52160096", "0.51991147", "0.51943743", "0.51871115", "0.51850635", "0.5177403", "0.51762086", "0.5174151", "0.51734257", "0.51711416", "0.516081", "0.5131642", "0.5125354", "0.51238656", "0.51202714", "0.51185155", "0.5113304", "0.5111797", "0.5107531", "0.50932777", "0.5092705", "0.5089052", "0.50777495", "0.5076698", "0.50742954", "0.5068308", "0.5065976", "0.5065976", "0.506464", "0.5061968", "0.50617623", "0.5018978", "0.50073504", "0.5006888", "0.5006296", "0.5006296", "0.49988055", "0.49912825", "0.49903157", "0.4989614", "0.4987038", "0.49810112", "0.49798405", "0.49763125", "0.49763125", "0.49763125", "0.49763125", "0.49763125", "0.49763125", "0.4975931", "0.49725595", "0.49693617", "0.4966783", "0.49646077", "0.49638405", "0.49590036", "0.49499658", "0.4944587", "0.494266", "0.4941796", "0.49354348", "0.49334767", "0.493244" ]
0.7161606
0
This method is used to postprocess the form data. By default, it returns the raw `form.data` dictionary.
def process_step(self, form): #print(form.data) #print(form.data) #print(self) institution = {} inst_list = [] if self.steps.current == '1': institution['institution'] = form.data['1-0-institution'] institution['date_from'] = form.data['1-0-date_from'] institution['date_to'] = form.data['1-0-date_to'] inst_list.append(institution) inst_keys = dict(form.data.lists()) #Create dictionary dynamically for the other institutions incase more than two institutions are entered if inst_keys.get('1-NaN-institution') and type(inst_keys.get('1-NaN-institution')) is list: inst_list2 = [] #Add institutions for i,insti in enumerate(inst_keys.get('1-NaN-institution')): inst_i = {} #print(i) date_from = inst_keys['1-NaN-date_from'][i] date_to = inst_keys['1-NaN-date_to'][i] course_duration = inst_keys['1-NaN-course_duration'][i] inst_i['institution'] = insti inst_i['date_from'] = date_from inst_i['date_to'] = date_to inst_list2.append(inst_i) #print(inst_list2) inst_list.extend(inst_list2) #Create dictionary dynamically for the other institutions incase more than two institutions are entered if inst_keys.get('1-NaN-institution') and type(inst_keys.get('1-NaN-institution')) is not list: inst_0 = {} inst_0['institution'] = form.data['1-NaN-institution'] inst_0['date_from'] = form.data['1-NaN-date_from'] inst_0['date_to'] = form.data['1-NaN-date_to'] inst_0['course_duration'] = form.data['1-NaN-course_duration'] #inst_0['achievements'] = '' inst_list.append(inst_0) #Add the entered information to a session object self.request.session['institution'] = inst_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_post_data(self):\n if not self._form_data:\n self._form_data = async_to_sync(self.request.form)()\n return self._form_data", "def get_form_data(self) -> dict:\n with logging.LogCall(__file__, \"get_form_data\", self.__class__):\n return self.serialize()", "def form_data(self) -> dict:\n return copy.deepcopy(self.data)", "def get_form_data(self, request):\n return request.session[self.id]['form_data']", "def get_formdata(self):\n formdata = request.json\n return wtforms_json.MultiDict(\n wtforms_json.flatten_json(self.__class__, formdata)\n ) if formdata else None", "def req_data(self):\n return (self.request.data if hasattr(self.request, 'data') else\n self.request.POST)", "async def read_parse_form_data(self):\n # TODO: Probably there is better solution how to handle\n # request body, at least for simple urlencoded forms - by processing\n # chunks instead of accumulating payload.\n gc.collect()\n if b'Content-Length' not in self.headers:\n return {}\n # Parse payload depending on content type\n if b'Content-Type' not in self.headers:\n # Unknown content type, return unparsed, raw data\n return {}\n size = int(self.headers[b'Content-Length'])\n if size > self.params['max_body_size'] or size < 0:\n raise HTTPException(413)\n data = await self.reader.readexactly(size)\n # Use only string before ';', e.g:\n # application/x-www-form-urlencoded; charset=UTF-8\n ct = self.headers[b'Content-Type'].split(b';', 1)[0]\n try:\n if ct == b'application/json':\n return json.loads(data)\n elif ct == b'application/x-www-form-urlencoded':\n return parse_query_string(data.decode())\n except ValueError:\n # Re-generate exception for malformed form data\n raise HTTPException(400)", "def form_data(self):\n from couchforms import XMLSyntaxError\n from .utils import convert_xform_to_json, adjust_datetimes\n from corehq.form_processor.utils.metadata import scrub_form_meta\n xml = self.get_xml()\n try:\n form_json = convert_xform_to_json(xml)\n except XMLSyntaxError:\n return {}\n # we can assume all sql domains are new timezone domains\n with force_phone_timezones_should_be_processed():\n adjust_datetimes(form_json)\n\n scrub_form_meta(self.form_id, form_json)\n return form_json", "def post_data(request):\n if is_post(request):\n return request.POST\n return None", "def get_post_data(self):\n json_data = ''\n\n # check if JSON is passed as a file or as a body of POST request\n if self.request.files:\n json_data = self.request.files['file'][0][\n 'body'] # pick up only first file (index 0)\n elif self.request.body:\n json_data = self.request.body\n\n try:\n data = json.loads(json_data)\n except ValueError:\n data = None\n return data", "def deserialize_form(data):\r\n return QueryDict(query_string=unicode(data).encode('utf-8'))", "def _prepare_multipart_form_data(data):\n output = dict()\n for key in data:\n output[key] = (None, data[key])\n return output", "def form(self):\n if self._form is None:\n if self.content_type is None:\n return None\n mime_type = self.content_type.split(';')[0]\n if mime_type != 'application/x-www-form-urlencoded':\n return None\n self._form = self._parse_urlencoded(self.body)\n return self._form", "def PostData(self):\n if self._postdata is None:\n return None\n if len(self._postdata) == 0:\n return None\n return self._postdata", "def from_form_data(self, data: dict = {}):\n with logging.LogCall(__file__, \"from_form_data\", self.__class__):\n pass", "def compress(self, data_list):\n data = {}\n if data_list:\n data = dict(\n (f.name, data_list[i]) for i, f in enumerate(self.form))\n\n f = self.form.__class__(data)\n f.is_valid()\n return f.cleaned_data\n return data", "def post_process(cls, data):\n return data", "def get_plugin_form_data(self):\n form = self.get_form()\n\n return self._get_plugin_form_data(form.plugin_data_fields)", "def process_form(request):\n raw_data = request.form\n data = raw_data.copy()\n data['resources'] = request.form.getlist('resources')\n if request.remote_addr == '127.0.0.1':\n data['ip'] = '100.7.27.72'\n else:\n data['ip'] = request.remote_addr\n data['user_agent'] = request.user_agent.string\n data['@timestamp'] = datetime.utcnow()\n latitude = float(data['latitude'])\n longitude = float(data['longitude'])\n data['location'] = [latitude, longitude]\n return data", "def _get_normalized_form_data(self, form, key):\n data = {} if form.data else form.initial\n prefix = 'gc{}-'.format(key)\n\n for field_name in form.data:\n normalized_field_name = field_name[len(prefix):]\n\n if field_name in form.data and field_name.startswith(prefix) and form.data[field_name]:\n data[normalized_field_name] = form.data[field_name]\n\n for field_name in data:\n if field_name == 'quantity':\n data[field_name] = str(data[field_name])\n\n return data", "def get_processed_form_data(form, form_element_entries):\n keys_to_remove = get_ignorable_form_fields(form_element_entries)\n values_to_remove = get_ignorable_form_values()\n\n field_name_to_label_map = \\\n get_field_name_to_label_map(form, keys_to_remove, values_to_remove)\n\n keys_to_remove = list(field_name_to_label_map.keys())\n\n return (\n field_name_to_label_map,\n get_cleaned_data(form, keys_to_remove, values_to_remove)\n )", "def get_raw_data_form(self, data, view, method, request):\n # See issue #2089 for refactoring this.\n serializer = getattr(data, 'serializer', None)\n if serializer and not getattr(serializer, 'many', False):\n instance = getattr(serializer, 'instance', None)\n if isinstance(instance, Page):\n instance = None\n else:\n instance = None\n\n with override_method(view, request, method) as request:\n # Check permissions\n if not self.show_form_for_method(view, method, request, instance):\n return\n\n # If possible, serialize the initial content for the generic form\n default_parser = view.parser_classes[0]\n renderer_class = getattr(default_parser, 'renderer_class', None)\n if hasattr(view, 'get_serializer') and renderer_class:\n # View has a serializer defined and parser class has a\n # corresponding renderer that can be used to render the data.\n\n if method in ('PUT', 'PATCH'):\n serializer = view.get_serializer(instance=instance)\n else:\n serializer = view.get_serializer()\n\n # Render the raw data content\n renderer = renderer_class()\n accepted = self.accepted_media_type\n context = self.renderer_context.copy()\n context['indent'] = 4\n\n # strip HiddenField from output\n data = serializer.data.copy()\n for name, field in serializer.fields.items():\n if isinstance(field, serializers.HiddenField):\n data.pop(name, None)\n content = renderer.render(data, accepted, context)\n # Renders returns bytes, but CharField expects a str.\n content = content.decode()\n else:\n content = None\n\n # Generate a generic form that includes a content type field,\n # and a content field.\n media_types = [parser.media_type for parser in view.parser_classes]\n choices = [(media_type, media_type) for media_type in media_types]\n initial = media_types[0]\n\n class GenericContentForm(forms.Form):\n _content_type = forms.ChoiceField(\n label='Media type',\n choices=choices,\n initial=initial,\n widget=forms.Select(attrs={'data-override': 'content-type'})\n )\n _content = forms.CharField(\n label='Content',\n widget=forms.Textarea(attrs={'data-override': 'content'}),\n initial=content,\n required=False\n )\n\n return GenericContentForm()", "def _parse_request(self):\n if len(self.request.body) > 0:\n try:\n return tornado.escape.json_decode(self.request.body)\n except Exception:\n #Not Json, Using Form data\n return self.request.arguments\n else:\n return self.request.arguments", "def _get_plugin_form_data(self, fields):\n form_data = {}\n for field, default_value in fields:\n try:\n form_data.update(\n {field: self.plugin_data.get(field, default_value)}\n )\n except Exception as err:\n logger.debug(\n \"Error in class %s. Details: %s\",\n self.__class__.__name__,\n str(err)\n )\n return form_data", "def extract_data_from_form(\n self, data: JobForm, many: bool, **kwargs\n ) -> Dict[str, Any]:\n\n def slugify(text: str) -> str:\n return text.lower().strip().replace(\" \", \"-\")\n\n return {\n \"experiment_name\": slugify(data.experiment_name.data),\n \"queue\": slugify(data.queue.data),\n \"timeout\": data.timeout.data or None,\n \"entry_point\": data.entry_point.data,\n \"entry_point_kwargs\": data.entry_point_kwargs.data or None,\n \"depends_on\": data.depends_on.data or None,\n \"workflow\": data.workflow.data,\n }", "def get_form_kwargs(self):\n kwargs = {'instance': self.object}\n if self.request.method in ('POST', 'PUT'):\n kwargs.update({\n 'data': self.request.POST,\n 'files': self.request.FILES,\n })\n return kwargs", "def ProcessFormData(self, mr, post_data):\n raise MethodNotSupportedError()", "def get_data(self):\n data = self.request.body\n\n try:\n data = json.loads(data)\n except ValueError:\n data = None\n return data", "def _form_data(self, response):\n SQFI_audit_type = response.xpath(self.filters[6]).extract_first()\n SQFI_audit_type_val = response.xpath(self.filters[7]).extract_first()\n food_sector_categories = response.xpath(self.filters[8]).extract_first()\n food_sector_categories_val = response.xpath(self.filters[9]).extract()\n audit_rating = response.xpath(self.filters[10]).extract_first()\n audit_rating_val = response.xpath(self.filters[11]).extract()\n country = response.xpath(self.filters[12]).extract_first()\n country_val = response.xpath(self.filters[13]).extract()\n form_data = {\n SQFI_audit_type: SQFI_audit_type_val,\n food_sector_categories: food_sector_categories_val,\n audit_rating: audit_rating_val,\n country: country_val,\n }\n return form_data", "def _parse_form_data():\n form_data = str(request.get_data())\n if form_data[:2] == \"b'\":\n # looks like b'A=1&B=2'\n form_data = form_data[2:-1]\n for key, values in parse_qs(form_data).items():\n yield (key, values[0])", "def decode_request_content(self, datafile):\n content_type = self.headers.get(\"Content-Type\", \"notype\").lower()\n if 'application/x-www-form-urlencoded' in content_type:\n # The data is provided in a urlencoded format. Unencode it into\n # cgi FieldStorage/MiniFieldStorage objects in a form container\n form = cgi.FieldStorage(\n fp=datafile,\n headers=self.headers,\n environ=dict(REQUEST_METHOD='POST',\n CONTENT_TYPE=self.headers['Content-Type'])\n )\n itemdict = {}\n for item in form.list:\n if item.name == 'data':\n itemdict['data'] = \\\n SimpleLogRequestHandler.extract_form_fields(item)\n elif item.name == 'layout':\n # http://log4javascript.org/docs/manual.html#layouts\n itemdict['layout'] = item.value\n return itemdict\n else:\n self.send_response(501,\n \"Content-Type %r not supported\" % content_type)\n self.send_header(\"Content-length\", \"0\")\n self.end_headers()\n return None", "def get_form_kwargs(self):\n kwargs = {'initial': self.get_initial()}\n if self.request.method in ('POST', 'PUT'):\n kwargs.update({\n 'data': self.request.POST,\n 'files': self.request.FILES,\n })\n kwargs.update(self.get_additional_form_args())\n return kwargs", "def get_form_kwargs(self) -> Dict[str, Any]:\n kwargs = super().get_form_kwargs()\n\n if self.request.GET:\n kwargs['data'] = deepcopy(self.request.GET)\n\n return kwargs", "def get_postprocess(self) -> Dict:\n raise NotImplementedError", "def post_dict(self):\r\n contents = self.request_content\r\n\r\n # The POST dict will contain a list of values for each key.\r\n # None of our parameters are lists, however, so we map [val] --> val\r\n # If the list contains multiple entries, we pick the first one\r\n try:\r\n post_dict = urlparse.parse_qs(contents, keep_blank_values=True)\r\n return {\r\n key: list_val[0]\r\n for key, list_val in post_dict.items()\r\n }\r\n\r\n except:\r\n return dict()", "def pop_form(env):\n if 'wsgi.input' not in env:\n return None\n post_env = env.copy()\n post_env['QUERY_STRING'] = ''\n form = cgi.FieldStorage(\n fp=env.pop('wsgi.input'),\n environ=post_env,\n keep_blank_values=True\n )\n return {k: form[k].value for k in form}", "def get_cleaned_data(self, request, step):\n return self._get_state(request).form_data.get(step.slug, None)", "def get_context_data(self):\n return {\"form\": self.get_form()}", "def parse_post(request):\n\n fp = StringIO(request.raw_body)\n\n headers = {}\n headers['content-type'] = request.message.get('content-type')\n headers['content-length'] = request.message.get('content-length')\n\n environ = {}\n environ['REQUEST_METHOD'] = request.method\n\n boundary = request.message.get('boundary')\n\n post = cgi.FieldStorage( fp = fp\n , headers = headers\n , outerboundary = boundary\n , environ = environ\n , keep_blank_values = True\n , strict_parsing = False\n )\n\n return post", "def initial_form_data(self, request, step, form):\n return None", "def get_form_kwargs(self):\n kwargs = {\n 'initial': self.get_initial(),\n 'prefix': self.get_prefix(),\n }\n\n if self.request.method in ('POST', 'PUT'):\n kwargs.update({\n 'data': self.request.POST,\n 'files': self.request.FILES,\n })\n #print('kwargs',kwargs)\n return kwargs", "def process(self, formdata=None, obj=None, data=None, **kwargs):\n formdata = self.meta.wrap_formdata(self, formdata)\n\n if data is not None:\n # XXX we want to eventually process 'data' as a new entity.\n # Temporarily, this can simply be merged with kwargs.\n kwargs = dict(data, **kwargs)\n\n for name, field, in iteritems(self._fields):\n if obj is not None and hasattr(obj, name):\n # This if statement is the only change made to the original\n # code for BaseForm.process() - Dawn\n if name == 'studies':\n field.process(formdata, list(obj.studies.values()))\n else:\n field.process(formdata, getattr(obj, name))\n elif name in kwargs:\n field.process(formdata, kwargs[name])\n else:\n field.process(formdata)", "def forms(self):\r\n forms = FormsDict()\r\n for name, item in self.POST.iterallitems():\r\n if not hasattr(item, 'filename'):\r\n forms[name] = item\r\n return forms", "def clean(self) -> Dict:\n # The form must be multipart\n if not self.is_multipart():\n self.add_error(\n None,\n _('CSV upload form is not multiform'),\n )\n return {}\n\n form_data = super().clean()\n\n if form_data['skip_lines_at_top'] < 0:\n self.add_error(\n 'skip_lines_at_top',\n _('This number has to be zero or positive'),\n )\n return form_data\n\n if form_data['skip_lines_at_bottom'] < 0:\n self.add_error(\n 'skip_lines_at_bottom',\n _('This number has to be zero or positive'),\n )\n return form_data\n\n # Process CSV file using pandas read_csv\n try:\n self.data_frame = services.load_df_from_csvfile(\n TextIOWrapper(\n self.files['data_file'].file,\n encoding=self.data.encoding),\n self.cleaned_data['skip_lines_at_top'],\n self.cleaned_data['skip_lines_at_bottom'])\n except Exception as exc:\n self.add_error(\n None,\n _('File could not be processed ({0})').format(str(exc)))\n return form_data\n\n # Check the validity of the data frame\n self.validate_data_frame()\n\n return form_data", "def _template_data(self):\n return {\"form\": self.form.render()}", "def json_post_to_dict(form):\n message = str(form.json_message.data)\n try:\n dict_post = json.loads(message)\n except json.decoder.JSONDecodeError as e:\n print(\"json_post_to_dict: json decoder failed to parse message\")\n print(e)\n return None\n return dict_post", "def get_form_kwargs(self):\n\n kwargs = super().get_form_kwargs()\n kwargs.update({\n 'data': self.request.GET,\n })\n\n return kwargs", "def minimal_form_data():\n\n form_data = { \n 'status': '0',\n 'title': 'Recurso de teste',\n 'description': 'Recurso para testes',\n 'abstract': 'Resumo',\n \n 'main-descriptor-content_type-object_id-TOTAL_FORMS': '0', \n 'main-descriptor-content_type-object_id-INITIAL_FORMS': '0',\n\n 'main-keyword-content_type-object_id-TOTAL_FORMS': '0', \n 'main-keyword-content_type-object_id-INITIAL_FORMS': '0',\n\n 'main-resourcethematic-content_type-object_id-TOTAL_FORMS': '0',\n 'main-resourcethematic-content_type-object_id-INITIAL_FORMS': '0',\n }\n\n return form_data", "def get_form_kwargs(self):\n kwargs = {\n 'initial': self.get_initial(),\n 'prefix': self.get_prefix(),\n }\n\n if self.request.method in ('POST', 'PUT'):\n kwargs.update({\n 'data': self.request.POST,\n 'files': self.request.FILES,\n })\n return kwargs", "def post(self):\r\n data = request.form\r\n return create(data=data)", "def formdata_for(self, skip):\n formdata = self.formdata.copy()\n # skip=0 doesn't get put in the query\n if skip:\n formdata['skip'] = int(round(skip))\n return formdata", "def option_post_form(self):\n return six.next(six.itervalues(self.zap._request(self.zap.base + 'spider/view/optionPostForm/')))", "def post(self):\n file_ = self.verify_param('file', cgi.FieldStorage)\n data, filemask = self.build_post_data(file_)\n return data, filemask", "def form_data(self) -> Optional[dict]:\n self.data[\"id\"] = self.id\n self.data[\"week\"] = self.week\n self.data[\"time\"] = self.time\n self.data[\"name\"] = self._service.filter(\"id\", self.id).first()[0].get(\"name\")\n\n return self.data", "def prepare_data_for_requests_post(username, password, company_id_form=\"aqn.accountsight.com\"):\n raise NotImplementedError\n # data = {\"MIME Type\": \"application/x-www-form-urlencoded\",\n # \"companyIdForm\": company_id_form,\n # \"userName\": username,\n # \"userPwd\": password}\n #\n # # format dictionary as JSON\n # data = json.dumps(data)\n # # Convert to String\n # data = str(data)\n # # Convert string to byte\n # data = data.encode('utf-8')\n # return data", "def _parse_input(self):\r\n def _convert(item):\r\n if isinstance(item, list):\r\n return [converters.to_unicode(i.value) for i in item]\r\n if item.filename:\r\n return MultipartFile(item)\r\n return converters.to_unicode(item.value)\r\n fs = cgi.FieldStorage(fp=self._environ['wsgi.input'], environ=self._environ, keep_blank_values=True)\r\n inputs = dict()\r\n for key in fs:\r\n inputs[key] = _convert(fs[key])\r\n return inputs", "def _process(self):\n self.output[\"data\"] = get_form_data(self.kwargs[\"collect\"].ona_scan_form_pk)", "def request_data():\n if request.method in ('POST', \"PUT\"):\n return request.get_json(force=True)\n else:\n return request.values", "def clean_request_data(self):\n clean_data = self.request.registry.get(CLEAN_DATA, {})\n return clean_data", "def form_data(self, clear=[], **kwargs):\n form_data = {\n 'payer_name': 'William Williams',\n 'billing_address': '1 Main Street',\n 'billing_city': 'Anytown',\n 'country': 'USA',\n 'billing_state': 'MD',\n 'billing_zip': '20852',\n 'payment_type': 'CreditCard',\n 'project_code': '15-4FF',\n 'payment_amount': '3000',\n 'information_consent': True,\n }\n for key in clear:\n del form_data[key]\n for key, value in kwargs.items():\n form_data[key] = value\n return form_data", "def process_formdata(self, valuelist):\n if valuelist:\n self.data = self.get_tags_from_string(valuelist[0])\n else:\n self.data = []", "def _clean(self):\n return self._cleaned_data", "def test_clean_returns_cleaned_data(self):\n original_data = self.form.data\n original_fields = self.form.fields\n original_computed_fields = self.form.computed_fields\n original_errors = getattr(self.form, '_errors', None)\n original_cleaned_data = getattr(self.form, 'cleaned_data', None)\n self.form.data = original_data.copy()\n self.form.fields = original_fields.copy()\n self.form.computed_fields = original_computed_fields.copy()\n self.form._errors = ErrorDict() if original_errors is None else original_errors.copy()\n new_cleaned_data = {self.form.name_for_user: 'test_value', self.form.name_for_email: 'test_value'}\n new_cleaned_data[self.form.USERNAME_FLAG_FIELD] = False\n self.form.cleaned_data = new_cleaned_data.copy()\n expected_fields = {**original_fields, **original_computed_fields}\n\n cleaned_data = self.form.clean()\n self.assertDictEqual(new_cleaned_data, cleaned_data)\n self.assertDictEqual(expected_fields, self.form.fields)\n\n self.form.data = original_data\n self.form.fields = original_fields\n self.form.computed_fields = original_computed_fields\n self.form._errors = original_errors\n self.form.cleaned_data = original_cleaned_data\n if original_errors is None:\n del self.form._errors\n if original_cleaned_data is None:\n del self.form.cleaned_data", "def extract(environ, empty=False, err=False):\n formdata = cgi.parse(environ[\"wsgi.input\"], environ, empty, err)\n # Remove single entries from lists\n for key, value in iter(formdata.items()):\n if len(value) == 1:\n formdata[key] = value[0]\n return formdata", "def post_data_parser(post_data):\n post_data_json = {}\n for parameter in post_data.rsplit(\"&\"):\n post_data_json[parameter.rsplit(\"=\")[0]] = parameter.rsplit(\"=\")[1]\n return post_data_json", "def _doProcess(self, form, write, request):\n args = request.args.copy()\n kw = {}\n for field in form:\n inputType, displayName, inputName, inputValue = field[:4]\n if inputType == 'checkbox':\n if request.args.has_key('__checkboxes__'):\n if inputName in request.args['__checkboxes__']:\n formData = 1\n else:\n formData = 0\n else:\n formData = 0\n elif inputType in ['checkgroup', 'radio', 'multimenu']:\n if args.has_key(inputName):\n formData = args[inputName]\n del args[inputName]\n else:\n formData = []\n else:\n if not args.has_key(inputName):\n raise FormInputError(\"missing field %s.\" % repr(inputName))\n formData = args[inputName]\n del args[inputName]\n if not len(formData) == 1:\n raise FormInputError(\"multiple values for field %s.\" %repr(inputName))\n formData = formData[0]\n method = self.formParse.get(inputType)\n if method:\n try:\n formData = method(formData)\n except:\n raise FormInputError(\"%s: %s\" % (displayName, \"error\"))\n kw[inputName] = formData\n submitAction = args.get('submit')\n if submitAction:\n submitAction = submitAction[0]\n for field in ['submit', '__formtype__', '__checkboxes__']:\n if args.has_key(field):\n del args[field]\n if args and not self.formAcceptExtraArgs:\n raise FormInputError(\"unknown fields: %s\" % repr(args))\n return apply(self.process, (write, request, submitAction), kw)", "def process_request(self, request):\n\n # Does the request contain a JSON payload?\n content_type = request.META.get('CONTENT_TYPE', '')\n if content_type != '' and 'application/json' in content_type:\n\n # Ignore empty payloads (e.g. for deletes)\n content_length = 0\n if request.META.get('CONTENT_LENGTH', '') != '':\n content_length = int(request.META.get('CONTENT_LENGTH', 0))\n if content_length > 0:\n try:\n # Replace request.POST with flattened dictionary from JSON\n decoded_dict = simplejson.loads(request.raw_post_data)\n request.POST = request.POST.copy()\n request.POST = self._flatten_dict(decoded_dict)\n except:\n return HttpResponse('Invalid JSON', status=400)", "def process_data(self, data):\n return data", "def post(self):\n data = request.json\n return save_new_post(data=data)", "def serialize_form(self, form):\n data = []\n for field_name, field in form.fields.items():\n if field_name in form.cleaned_data:\n form_value = form.cleaned_data[field_name]\n display_value = None\n if isinstance(form_value, models.Model):\n ctype = ContentType.objects.get_for_model(form_value)\n form_value = '{0}{1}.{2}:{3}'.format(\n _CONTENT_TYPE_PREFIX,\n ctype.app_label,\n ctype.model,\n form_value.pk\n )\n elif isinstance(form_value, UploadedFile):\n file_name = _fs.get_available_name(form_value.name)\n file_path = _fs.path(file_name)\n with open(file_path, 'wb+') as destination:\n for chunk in form_value.chunks():\n destination.write(chunk)\n form_value = file_path\n display_value = file_name\n data.append({\n 'name': field_name,\n 'label': force_text(field.label) if field.label else None,\n 'value': form_value,\n 'display_value': display_value,\n })\n return data", "def clean(self):\n print(self.data)\n print(self.errors)\n cleaned_data = self.cleaned_data\n print(cleaned_data)\n return cleaned_data", "def post_dict(self):\n\n if isinstance(self.request_content, bytes):\n contents = self.request_content.decode('utf-8')\n else:\n contents = self.request_content\n\n # The POST dict will contain a list of values for each key.\n # None of our parameters are lists, however, so we map [val] --> val\n # If the list contains multiple entries, we pick the first one\n try:\n post_dict = six.moves.urllib.parse.parse_qs(contents, keep_blank_values=True)\n return {\n key: list_val[0]\n for key, list_val in post_dict.items()\n }\n\n except: # lint-amnesty, pylint: disable=bare-except\n return dict()", "def getFormData(page):\n soup = BeautifulSoup(page, 'html.parser')\n viewstate = soup.find('input', {'id': '__VIEWSTATE' })['value']\n generator = soup.find('input', {'id': '__VIEWSTATEGENERATOR'})['value']\n validation = soup.find('input', {'id': '__EVENTVALIDATION' })['value']\n return (viewstate, generator, validation)", "def convert_for_form(data):\n if \"name\" in data:\n data[\"full_name\"] = data[\"name\"].get(\"value\")\n try:\n data[\"given_names\"] = data[\"name\"].get(\n \"value\").split(\",\")[1].strip()\n except IndexError:\n data[\"given_names\"] = \"\"\n data[\"family_name\"] = data[\"name\"].get(\"value\").split(\",\")[0].strip()\n data[\"display_name\"] = data[\"name\"].get(\"preferred_name\")\n data[\"status\"] = data[\"name\"].get(\"status\", \"\").lower()\n if \"urls\" in data:\n data[\"websites\"] = []\n for url in data[\"urls\"]:\n if \"description\" not in url:\n data[\"websites\"].append({\"webpage\": url[\"value\"]})\n else:\n if url[\"description\"].lower() == \"twitter\":\n data[\"twitter_url\"] = url[\"value\"]\n elif url[\"description\"].lower() == \"blog\":\n data[\"blog_url\"] = url[\"value\"]\n elif url[\"description\"].lower() == \"linkedin\":\n data[\"linkedin_url\"] = url[\"value\"]\n del data[\"urls\"]\n if \"field_categories\" in data:\n data[\"research_field\"] = data['field_categories']\n if \"positions\" in data:\n data[\"institution_history\"] = []\n for position in data[\"positions\"]:\n if not any(\n [\n key in position for key in ('name', 'rank',\n 'start_year', 'end_year')\n ]\n ):\n if 'email' in position:\n # Only email available, take as public_email\n data[\"public_email\"] = position.get(\"email\")\n continue\n pos = {}\n pos[\"name\"] = position.get(\"institution\", {}).get(\"name\")\n pos[\"rank\"] = position.get(\"rank\", \"\")\n pos[\"start_year\"] = position.get(\"start_date\", \"\")\n pos[\"end_year\"] = position.get(\"end_date\", \"\")\n pos[\"current\"] = True if position.get(\"status\") else False\n pos[\"old_email\"] = position.get(\"old_email\", \"\")\n if position.get(\"email\"):\n pos[\"email\"] = position.get(\"email\", \"\")\n if not data.get(\"public_email\"):\n data[\"public_email\"] = position.get(\"email\")\n data[\"institution_history\"].append(pos)\n data[\"institution_history\"].reverse()\n if 'advisors' in data:\n advisors = data['advisors']\n data['advisors'] = []\n for advisor in advisors:\n adv = {}\n adv[\"name\"] = advisor.get(\"name\", \"\")\n adv[\"degree_type\"] = advisor.get(\"degree_type\", \"\")\n data[\"advisors\"].append(adv)\n if \"ids\" in data:\n for id in data[\"ids\"]:\n try:\n if id[\"type\"] == \"ORCID\":\n data[\"orcid\"] = id[\"value\"]\n elif id[\"type\"] == \"BAI\":\n data[\"bai\"] = id[\"value\"]\n elif id[\"type\"] == \"INSPIRE\":\n data[\"inspireid\"] = id[\"value\"]\n except KeyError:\n # Protect against cases when there is no value in metadata\n pass", "def _get_data(self):\n data = {}\n\n for name, field in self._get_fields().items():\n if isinstance(field, fields.Factory):\n # skip for factories for now\n continue\n value = getattr(self, name)\n raw_value = field.to_raw(value)\n if isinstance(field, fields.Secret):\n data[f\"__{name}\"] = raw_value\n else:\n data[name] = raw_value\n\n return data", "def parse_post_request(request):\n # type: (django.http.HttpRequest) -> Dict[str, Any]\n request_data = json.loads(request.body)\n parameters_dict = {\n PARAMETER_MESSAGE: request_data.get('message'),\n PARAMETER_ENTITY_NAME: request_data.get('entity_name'),\n PARAMETER_STRUCTURED_VALUE: request_data.get('structured_value'),\n PARAMETER_FALLBACK_VALUE: request_data.get('fallback_value'),\n PARAMETER_BOT_MESSAGE: request_data.get('bot_message'),\n PARAMETER_TIMEZONE: request_data.get('timezone'),\n PARAMETER_LANGUAGE_SCRIPT: request_data.get('language_script', ENGLISH_LANG),\n PARAMETER_SOURCE_LANGUAGE: request_data.get('source_language', ENGLISH_LANG),\n PARAMETER_MIN_DIGITS: request_data.get('min_number_digits'),\n PARAMETER_MAX_DIGITS: request_data.get('max_number_digits'),\n PARAMETER_NUMBER_UNIT_TYPE: request_data.get('unit_type'),\n PARAMETER_LOCALE: request_data.get('locale'),\n PARAMETER_RANGE_ENABLED: request_data.get('range_enabled')\n }\n\n return parameters_dict", "def get_register_form_data(cls, pipeline_kwargs):\r\n # Details about the user sent back from the provider.\r\n details = pipeline_kwargs.get('details')\r\n\r\n # Get the username separately to take advantage of the de-duping logic\r\n # built into the pipeline. The provider cannot de-dupe because it can't\r\n # check the state of taken usernames in our system. Note that there is\r\n # technically a data race between the creation of this value and the\r\n # creation of the user object, so it is still possible for users to get\r\n # an error on submit.\r\n suggested_username = pipeline_kwargs.get('username')\r\n\r\n return {\r\n 'email': cls.get_email(details) or '',\r\n 'name': cls.get_name(details) or '',\r\n 'username': suggested_username,\r\n }", "def get_extra_payload(form):", "def prepare_pipeline_dict(request_body, user):\n form_data = json.loads(request_body)\n form_data[\"user\"] = user.pk\n return form_data", "def get_cleaned_data(form, keys_to_remove=[], values_to_remove=[]):\n if not values_to_remove:\n values_to_remove = get_ignorable_form_values()\n\n cleaned_data = copy.copy(form.cleaned_data)\n cleaned_data = clean_dict(\n cleaned_data,\n keys=list(set(cleaned_data.keys()) - set(keys_to_remove)),\n values=values_to_remove\n )\n\n ordered_cleaned_data = OrderedDict()\n for key in form.fields.keys():\n if key in cleaned_data:\n ordered_cleaned_data[key] = cleaned_data[key]\n\n return ordered_cleaned_data", "def clean(self) -> Dict:\n form_data = super().clean()\n\n # # Process Excel file using pandas read_excel\n try:\n self.data_frame = services.load_df_from_excelfile(\n self.files['data_file'],\n form_data['sheet'])\n except Exception as exc:\n self.add_error(\n None,\n _('File could not be processed: {0}').format(str(exc)))\n return form_data\n\n # Check the validity of the data frame\n self.validate_data_frame()\n\n return form_data", "def getform():\n form = cgi.FieldStorage()\n host = form.getvalue('host')\n user = form.getvalue('user')\n passwd = form.getvalue('passwd')\n cert = form.getvalue('cert')\n proxy = form.getvalue('proxy')\n name = form.getvalue('name')\n return (host, user, passwd, cert, proxy, name)", "def get_dynamic_form_fields(self) -> Mapping[str, Any] | None:\n form_fields: Mapping[str, Any] | list[Any] | None = self.data.get(\"dynamic_form_fields\")\n if not form_fields:\n return None\n\n # Although this can be done with dict comprehension, looping for clarity.\n if isinstance(form_fields, list):\n fields = {}\n for field in form_fields:\n if \"name\" in field:\n fields[field[\"name\"]] = field\n return fields\n return form_fields", "def deliver_post(data, access=None):\n\n schema = get_post_schema(data)\n return deliver_fields(schema, data, access)", "def form(self):\n\t\treturn self._form", "def postprocess(self, inputs: Dict[str, Any], **kwargs) -> Dict[str, Any]:\n return inputs", "def post_contents(self):\r\n return self._post", "def payload_from_form(form, prefix='', delete=False):\n\n prefix = f'{prefix}-' if prefix else ''\n payload = {f'{prefix}{k}': form[k].value() for k, v in form.fields.items() if form[k].value()}\n if getattr(form.instance, 'id'):\n payload['id'] = form.instance.id\n\n if delete:\n payload['delete'] = True\n return payload", "def get_form_kwargs(self, form_key):\n kwargs = {\n \"initial\": {\"multiform_key\": form_key, **self.get_initial(form_key)},\n }\n if (\n self.request.method in [\"POST\", \"PUT\"]\n and self.request.POST[\"multiform_key\"] == form_key\n ):\n kwargs.update(\n {\"data\": self.request.POST, \"files\": self.request.FILES,}\n )\n return kwargs", "def postprocess(self, data, pagination):\n self.inject_data_hook(data)\n # Serialize ``data`` to python data structures\n python_data = self.serialize_to_python(data)\n # finalize any pending data processing\n self.finalize_pending(data)\n # Package the python_data to a dictionary\n return self.package(python_data, pagination)", "def __call__(self):\n \n form, error, appstruct = self.validate()\n \n data = None\n if error is not None:\n data = self.failure(error)\n if appstruct is not None:\n data = self.success(appstruct)\n return data if data is not None else {}", "def form_data(self, password1, password2=None):\n form_data = {'old_password': self.pwd}\n if password2 is None:\n password2 = password1\n form_data[self.password_field + '1'] = password1\n form_data[self.password_field + '2'] = password2\n return form_data", "def validate(self, data):\n # calling subserializer validate method (fields, and presets)\n data = super(FormidableSerializer, self).validate(data)\n # we check every field define in presets are define inside the form.\n if 'fields' in data and 'presets' in data:\n data = self.check_presets_cohesion(data)\n return data", "def get_form_kwargs(self):\n self.object = self.get_object()\n kwargs = super().get_form_kwargs()\n return kwargs", "def process_form_submission(self, form):\n return self.lookup_response_class.process_form_submission(self, form)", "def serialize_to_python(self, data):\n # NOTE: The request level field selection doesn not work if the\n # handler's ``template`` attribute uses ``django-preserialize``'s\n # pseudo selectors\n # See\n # <https://github.com/bruth/django-preserialize#my-model-has-a-ton-of-fields-and-i-dont-want-to-type-them-all-out-what-do-i-do>\n # It only works when the ``fields`` are defined one by one in a list.\n field_selection = set(self.request.GET.getlist('field'))\n if field_selection:\n intersection = field_selection.intersection(\n set(self.template['fields'])\n )\n template = {key: value for key, value in self.template.items()}\n template['fields'] = intersection\n return preserializer.serialize(data, **template)\n\n return preserializer.serialize(data, **self.template)", "def ingest_form_vars(request):\n data = {}\n for param, value in request.arguments.items():\n for i, item in enumerate(value):\n item = item.decode('utf-8')\n item = strings.as_numeric(item)\n value[i] = item\n data[param] = value[0] if len(value) == 1 else value\n return data", "def complete_form_data():\n\n missing_fields = {\n 'link' : 'http://bvsalud.org',\n 'originator' : 'BIREME',\n 'source_type': 1,\n 'source_language': 1,\n 'originator_location' : 1,\n\n 'main-descriptor-content_type-object_id-TOTAL_FORMS' : '1',\n\n 'main-descriptor-content_type-object_id-0-id' : '',\n 'main-descriptor-content_type-object_id-0-text' : 'malaria',\n 'main-descriptor-content_type-object_id-0-code' : '^d8462',\n 'main-descriptor-content_type-object_id-0-status' : '0',\n\n 'main-resourcethematic-content_type-object_id-TOTAL_FORMS' : '1',\n 'main-resourcethematic-content_type-object_id-0-thematic_area' : '1',\n 'main-resourcethematic-content_type-object_id-0-status' : '0',\n }\n\n complete_form_data = minimal_form_data()\n complete_form_data.update(missing_fields)\n\n return complete_form_data", "def decode_input_data(self, rawdata):\n return self.get_content_type().loads(rawdata, self)", "def _getPostData(data):\n\tparams = data.split(\"\\r\\n\\r\\n\")[1]\n\treturn params.split(\"&\")", "def get_session_form_data_as_dict(self, request, job_name):\n data_dict = OrderedDict()\n for value_dict in self.get_session_form_data_as_list(request, job_name):\n data_dict[value_dict['name']] = value_dict['value']\n return data_dict" ]
[ "0.79507184", "0.76475173", "0.738958", "0.73285085", "0.7258168", "0.6955681", "0.6792374", "0.65343493", "0.65144867", "0.6490603", "0.6459408", "0.6415936", "0.63819605", "0.63031137", "0.6298642", "0.6277921", "0.62373143", "0.6232082", "0.62192225", "0.60904664", "0.6056065", "0.6030949", "0.60233426", "0.59961677", "0.5961773", "0.5942794", "0.59405684", "0.5911468", "0.5902201", "0.58929735", "0.5868904", "0.5819991", "0.57929116", "0.5771258", "0.57638115", "0.5761413", "0.5756662", "0.5751492", "0.5738831", "0.5735935", "0.5733567", "0.57205945", "0.57038295", "0.56908286", "0.5674853", "0.56596625", "0.5657592", "0.5654229", "0.56441927", "0.56214845", "0.56173366", "0.5604944", "0.55999845", "0.55869734", "0.5584914", "0.5581603", "0.5567569", "0.5565684", "0.55569243", "0.5556009", "0.5545823", "0.5509508", "0.55063385", "0.547971", "0.54491097", "0.5447577", "0.5441229", "0.54289657", "0.54244065", "0.54194754", "0.5417856", "0.5408662", "0.5407582", "0.5394647", "0.5386529", "0.53654724", "0.535761", "0.535322", "0.5351853", "0.5345795", "0.53357124", "0.5326908", "0.5321672", "0.5315899", "0.530742", "0.53065944", "0.5304472", "0.5286497", "0.5285198", "0.5283171", "0.5274629", "0.5269422", "0.52693063", "0.52627575", "0.5259702", "0.52590364", "0.5253005", "0.52503073", "0.5234241", "0.52318424", "0.52236915" ]
0.0
-1
maps must match for joining to work
def mapsMatch(m1,m2): same = True f1 = file(m1,'r').readlines() f2 = file(m2,'r').readlines() for i, row in enumerate(f1): row = row.strip().split() row2 = f2[i].strip().split() if row[0] <> row2[0]: same = False break return same
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_mappingtojoin(self):\n usersaddresses = sql.join(users, addresses, users.c.user_id == addresses.c.user_id)\n m = mapper(User, usersaddresses, primary_key=[users.c.user_id])\n q = create_session().query(m)\n l = q.all()\n self.assert_result(l, User, *user_result[0:2])", "def test_if_map_are_empty(table_one, empty_hash_table):\n assert left_join(table_one, empty_hash_table) == [{'fond': 'enamored'}, {'guide': 'usher'}, {'diligent': 'employed'}, {'wrath': 'anger'}]", "def dojoin(ipath1,ipath2,opath):\n r1 = '%s.map' % ipath1\n r2 = '%s.map' % ipath2\n if not mapsMatch(r1,r2):\n print '### maps %s and %s do not match' % (r1,r2)\n sys.exit(1)\n outpath = '%s.map' % opath\n shutil.copyfile(r1,outpath)\n r1 = '%s.eigenstratgeno' % ipath1\n r2 = '%s.eigenstratgeno' % ipath2\n outpath = '%s.eigenstratgeno' % opath\n joinRows(r1,r2,outpath)\n outpath = '%s.ind' % opath\n r1 = '%s.ind' % ipath1\n r2 = '%s.ind' % ipath2\n joinInds(r1,r2,outpath)", "def test_mappingtoouterjoin(self):\n result = [\n {'user_id' : 7, 'address_id' : 1},\n {'user_id' : 8, 'address_id' : 2},\n {'user_id' : 8, 'address_id' : 3},\n {'user_id' : 8, 'address_id' : 4},\n {'user_id' : 9, 'address_id':None}\n ]\n\n j = join(users, addresses, isouter=True)\n m = mapper(User, j, allow_null_pks=True, primary_key=[users.c.user_id, addresses.c.address_id])\n q = create_session().query(m)\n l = q.all()\n self.assert_result(l, User, *result)", "def map():", "def smap(self, records, task):\n for key, json in records:\n record = happy.json.decode(json)\n if record.has_key(self.joinkey1):\n record['__joinorder__'] = 1\n task.collect(record[self.joinkey1], 1, happy.json.encode(record))\n if record.has_key(self.joinkey2):\n record['__joinorder__'] = 2\n task.collect(record[self.joinkey2], 2, happy.json.encode(record))", "def _merge_mapper(mapper1, mapper2):\n if len(mapper1) > 0:\n if len(mapper2) > 0:\n clusters1 = mapper1['cluster']\n clusters2 = mapper2['cluster']\n clusters = np.unique(np.concatenate((clusters1, clusters2), 0))\n\n mapper1['cluster'] = clusters\n mapper1['links'] += mapper2['links']\n else:\n mapper1 = mapper2\n return mapper1", "def requires_mapping(self):", "def testCorrectJoin(self):\n b_tree = OOBTree()\n b_tree.update({1: \"Monkey D. Luffy\", 2: \"Roronoa Zoro\", 3: \"Nami\"})\n failed_counter = 0\n key = 1\n data = {\"from\":\"East Blue\"}\n (mod_data, mod_tree, failed_counter) = self.processing.join(b_tree, key, data, failed_counter)\n self.assertEqual(mod_data, {\"from\":\"East Blue\", \"right_data\":\"Monkey D. Luffy\"})\n self.assertEqual(len(mod_tree), 2)\n self.assertEqual(failed_counter, 0)", "def applyMapping(self):\n pass", "def _test_map_repeatability():\n map1 = map.Map(config.MAP_HEIGHT, config.MAP_WIDTH, 3)\n map1.random_seed = libtcod.random_save(0)\n _build_map(map1)\n\n map2 = map.Map(config.MAP_HEIGHT, config.MAP_WIDTH, 3)\n map2.random_seed = map1.random_seed\n _build_map(map2)\n\n assert map1.terrain == map2.terrain\n for i in range(len(map1.rooms)):\n assert map1.rooms[i] == map2.rooms[i]", "def edge_mapping(self):\n ...", "def compatible(self, other):\n return (hasattr(other, 'map') and super(rmap, self).compatible(other))", "def _rewrite_join(self, node: saldag.Join):\n\n left_in_rel = node.get_left_in_rel()\n right_in_rel = node.get_right_in_rel()\n\n left_join_cols = node.left_join_cols\n right_join_cols = node.right_join_cols\n\n num_join_cols = len(left_join_cols)\n\n out_join_cols = node.out_rel.columns[:num_join_cols]\n key_cols_coll_sets = []\n for i in range(len(left_join_cols)):\n key_cols_coll_sets.append(utils.merge_coll_sets(\n left_join_cols[i].coll_sets, right_join_cols[i].coll_sets))\n out_join_cols[i].coll_sets = key_cols_coll_sets[i]\n\n abs_idx = len(left_join_cols)\n for in_col in left_in_rel.columns:\n if in_col not in set(left_join_cols):\n for key_col_coll_sets in key_cols_coll_sets:\n node.out_rel.columns[abs_idx].coll_sets = \\\n utils.merge_coll_sets(key_col_coll_sets, in_col.coll_sets)\n abs_idx += 1\n\n for in_col in right_in_rel.columns:\n if in_col not in set(right_join_cols):\n for key_col_coll_sets in key_cols_coll_sets:\n node.out_rel.columns[abs_idx].coll_sets = \\\n utils.merge_coll_sets(key_col_coll_sets, in_col.coll_sets)\n abs_idx += 1", "def _do_mapping(self):\n pass", "def mergelots(bigdict, tblstojoin, joincol, how='outer'):\n for tbl in tblstojoin:\n if tbl == tblstojoin[0]:\n bigtbl = bigdict[tbl].copy()\n else:\n bigtbl = bigtbl.merge(bigdict[tbl], how=how, on=joincol)\n return bigtbl", "def natural_join(*Rs):\n for rs in product(*Rs):\n #need to test all combintions of table rows to see if they conflict\n if all([dict_match(r, s) for r, s in combinations(rs, 2)]):\n yield reduce(dict_merge, rs, {})", "def join(self, a, *args):\n mapping = self._mapping\n set_a = mapping.setdefault(a, [a])\n\n for arg in args:\n set_b = mapping.get(arg)\n if set_b is None:\n set_a.append(arg)\n mapping[arg] = set_a\n elif set_b is not set_a:\n if len(set_b) > len(set_a):\n set_a, set_b = set_b, set_a\n set_a.extend(set_b)\n for elem in set_b:\n mapping[elem] = set_a", "def receiverMapping():", "def joined(self, a, b):\n mapping = self._mapping\n try:\n return mapping[a] is mapping[b]\n except KeyError:\n return False", "def _sanitize_joins(self) -> None:\n\n self.primaryjoin = _deep_deannotate(\n self.primaryjoin, values=(\"parententity\", \"proxy_key\")\n )\n if self.secondaryjoin is not None:\n self.secondaryjoin = _deep_deannotate(\n self.secondaryjoin, values=(\"parententity\", \"proxy_key\")\n )", "def Map(a, b):\n out = {}\n for key, value in a.items():\n if key in b:\n out[value] = b[key]\n return out", "def test_double_same_mappers(self):\n\n mapper(Address, addresses)\n mapper(Order, orders, properties={\n 'items':relation(Item, secondary=order_items, lazy=False, order_by=items.c.id),\n })\n mapper(Item, items)\n mapper(User, users, properties = dict(\n addresses = relation(Address, lazy=False),\n open_orders = relation(Order, primaryjoin = and_(orders.c.isopen == 1, users.c.id==orders.c.user_id), lazy=False),\n closed_orders = relation(Order, primaryjoin = and_(orders.c.isopen == 0, users.c.id==orders.c.user_id), lazy=False)\n ))\n q = create_session().query(User)\n\n def go():\n assert [\n User(\n id=7,\n addresses=[Address(id=1)],\n open_orders = [Order(id=3, items=[Item(id=3), Item(id=4), Item(id=5)])],\n closed_orders = [Order(id=1, items=[Item(id=1), Item(id=2), Item(id=3)]), Order(id=5, items=[Item(id=5)])]\n ),\n User(\n id=8,\n addresses=[Address(id=2), Address(id=3), Address(id=4)],\n open_orders = [],\n closed_orders = []\n ),\n User(\n id=9,\n addresses=[Address(id=5)],\n open_orders = [Order(id=4, items=[Item(id=1), Item(id=5)])],\n closed_orders = [Order(id=2, items=[Item(id=1), Item(id=2), Item(id=3)])]\n ),\n User(id=10)\n\n ] == q.all()\n self.assert_sql_count(testing.db, go, 1)", "def _natural_join_step(self, op: data_algebra.data_ops_types.OperatorPlatform, *, data_map: Dict[str, Any]):\n if op.node_name != \"NaturalJoinNode\":\n raise TypeError(\n \"op was supposed to be a data_algebra.data_ops.NaturalJoinNode\"\n )\n inputs = [self._compose_polars_ops(s, data_map=data_map) for s in op.sources]\n assert len(inputs) == 2\n how = op.jointype.lower()\n if how == \"full\":\n how = \"outer\"\n if how != \"right\":\n coalesce_columns = (\n set(op.sources[0].columns_produced()).intersection(op.sources[1].columns_produced()) \n - set(op.on_a))\n orphan_keys = [c for c in op.on_b if c not in set(op.on_a)]\n input_right = inputs[1]\n if len(orphan_keys) > 0:\n input_right = input_right.with_columns([\n pl.col(c).alias(f\"{c}_da_join_tmp_key\") for c in orphan_keys\n ])\n res = inputs[0].join(\n input_right,\n left_on=op.on_a,\n right_on=op.on_b,\n how=how,\n suffix = \"_da_right_tmp\",\n )\n if len(coalesce_columns) > 0:\n res = res.with_columns([\n pl.when(pl.col(c).is_null())\n .then(pl.col(c + \"_da_right_tmp\"))\n .otherwise(pl.col(c))\n .alias(c)\n for c in coalesce_columns\n ])\n if len(orphan_keys) > 0:\n res = res.rename({f\"{c}_da_join_tmp_key\": c for c in orphan_keys})\n else:\n # simulate right join with left join\n coalesce_columns = (\n set(op.sources[0].columns_produced()).intersection(op.sources[1].columns_produced()) \n - set(op.on_b))\n orphan_keys = [c for c in op.on_a if c not in set(op.on_b)]\n input_right = inputs[0]\n if len(orphan_keys) > 0:\n input_right = input_right.with_columns([\n pl.col(c).alias(f\"{c}_da_join_tmp_key\") for c in orphan_keys\n ])\n res = inputs[1].join(\n input_right,\n left_on=op.on_b,\n right_on=op.on_a,\n how=\"left\",\n suffix = \"_da_left_tmp\",\n )\n if len(coalesce_columns) > 0:\n res = res.with_columns([\n pl.when(pl.col(c + \"_da_left_tmp\").is_null())\n .then(pl.col(c))\n .otherwise(pl.col(c + \"_da_left_tmp\"))\n .alias(c)\n for c in coalesce_columns\n ])\n if len(orphan_keys) > 0:\n res = res.rename({f\"{c}_da_join_tmp_key\": c for c in orphan_keys})\n res = res.select(op.columns_produced())\n return res", "def join_op(it1, it2):\n\n d = defaultdict(list)\n for tpl in it1:\n d[tpl[0]].append(tpl)\n for tpl in it2:\n matches = d[tpl[0]]\n for match in matches:\n yield match + tpl[1:]", "def test_mapping_switch():\n\tassert nset != oset", "def test_if_two_tables(table_one, table_two):\n assert left_join(table_one, table_two) == [['fond', 'enamored', 'averse'], ['guide', 'usher', 'follow'], ['diligent', 'employed', 'idle'], ['wrath', 'anger', 'deligth']]", "def left_join(table_one, table_two):\n joined = {}\n\n if type(table_one) is not HashTable or type(table_two) is not HashTable:\n raise TypeError('You must use hash tables!')\n\n for key in table_one:\n joined[key] = [table_one.get(key), 'NULL']\n\n for key in table_two:\n if key in joined:\n joined[key][1] = table_two.get(key)\n\n return joined", "def _rewrite_join(self, node: saldag.Join):\n\n if node.is_mpc:\n out_rel = node.out_rel\n key_col_idx = 0\n # oversimplifying here. what if there are multiple singleton\n # coll_sets?\n singleton_coll_sets = filter(\n lambda s: len(s) == 1,\n out_rel.columns[key_col_idx].coll_sets)\n singleton_coll_sets = sorted(list(singleton_coll_sets))\n if singleton_coll_sets:\n trusted_party = next(iter(singleton_coll_sets[0]))\n hybrid_join_op = saldag.HybridJoin.from_join(node, trusted_party)\n parents = hybrid_join_op.parents\n for par in parents:\n par.replace_child(node, hybrid_join_op)", "def joins((u,v,o)):\r\n return { W : ((u,v), (u-1,v)),\r\n S : ((u,v), (u,v-1)) }[o]", "def test_left_join_method_is_successful(setup1, setup2):\n assert left_join(setup1, setup2) == [['Apple', 'Jack']]", "def sjoin(left_df, right_df, how=..., op=..., lsuffix=..., rsuffix=...):\n ...", "def test_join(self):\n s = djset()\n s.add([1, 2, 3])\n s.add([4, 5, 6])\n s.add([2, 5])\n self.assertEquals({1, 2, 3, 4, 5, 6}, s.data[1])\n self.assertFalse(2 in s.data)", "def spatial_join_facilities(left,\n right,\n lid_property,\n rid_property,\n lsimilarity_properties,\n rsimilarity_properties,\n similarity_weights=None,\n distance=150,\n dist_epsg='EPSG:5070',\n merge_unmatched=True):\n\n # Reproject to EPSG:5070 (NAD83 / Conus Albers)\n left = left.to_crs('EPSG:5070')\n right = right.to_crs('EPSG:5070')\n\n lprefix = 'left_'\n rprefix = 'right_'\n\n # Add prefixes\n left = left.add_prefix('{}_'.format(lprefix))\n right = right.add_prefix('{}_'.format(rprefix))\n def prefix(prefix, name):\n return '{}_{}'.format(prefix, name)\n\n # Reset geometry\n left['geometry'] = left[prefix(lprefix, 'geometry')]\n right['geometry'] = right[prefix(rprefix, 'geometry')]\n\n # Save point location to new field\n left['point_geometry'] = left['geometry']\n\n # Buffer\n left['geometry'] = left.buffer(distance)\n\n joined = gpd.sjoin(left, right, how='left', op='intersects')\n\n # Map columns to account for prefix\n pre_lid_property = '{}_{}'.format(lprefix, lid_property)\n pre_rid_property = '{}_{}'.format(rprefix, rid_property)\n lsimilarity_properties = ['{}_{}'.format(lprefix, p)\n for p in lsimilarity_properties]\n rsimilarity_properties = ['{}_{}'.format(rprefix, p)\n for p in rsimilarity_properties]\n\n # Deduplication - use a similarity index on name and address\n # and choose the matched item that has the highest score.\n sim_prop_len = len(lsimilarity_properties)\n assert sim_prop_len == len(rsimilarity_properties)\n if similarity_weights is None:\n similarity_weights = [1 / sim_prop_len for x in lsimilarity_properties]\n\n def get_similarity_score(row):\n score = 0.0\n for ((l, r), w) in zip(zip(lsimilarity_properties, rsimilarity_properties), similarity_weights):\n # For joins that don't match, right properties give float 'nan' value.\n if type(row[r]) is str:\n score += (fuzz.ratio(row[l], row[r]) * w)\n return score\n\n joined['similarity'] = joined.apply(get_similarity_score, axis = 1)\n\n joined_and_matched = joined[~joined[pre_rid_property].isnull()]\n joined_and_unmatched = joined[joined[pre_rid_property].isnull()]\n\n # Deduplicate left's object_ids\n deduplicated = joined_and_matched.sort_values(by=[pre_lid_property, 'similarity'], ascending=False) \\\n .drop_duplicates(subset=[pre_lid_property])\n\n # Deduplicate any 'right' facilities that were matched to more than one\n # 'left' facility. Do this by creating another dataframe containing the 'right'\n # facility and it's closest match, joining back with the dataframe,\n # and reseting matches on any where the identified 'left' ID isn't the same\n # as the row's 'left' ID.\n right_matches = deduplicated[[pre_rid_property, pre_lid_property, 'similarity']]\n right_matches = right_matches.sort_values(by=[pre_rid_property, 'similarity'], ascending=False)\n right_matches = right_matches.drop_duplicates(subset=[pre_rid_property])\n right_matches = right_matches.rename({pre_lid_property: 'matched_left_id'}, axis=1)\n deduplicated = deduplicated.merge(right_matches, on=pre_rid_property)\n deduplicated.loc[\n deduplicated[pre_lid_property] != deduplicated['matched_left_id'],\n pre_rid_property\n ] = np.NaN\n\n # Rejoin unmatched.\n deduplicated = pd.concat([deduplicated, joined_and_unmatched])\n\n # Join back 'right' unmatched if desired.\n if merge_unmatched:\n matched_right_ids = deduplicated[pre_rid_property]\n unmatched_right = right[~right[pre_rid_property].isin(matched_right_ids)]\n\n all_facilities = pd.concat([deduplicated, unmatched_right])\n else:\n all_facilities = deduplicated\n\n result = all_facilities.rename({ pre_lid_property: lid_property,\n pre_rid_property: rid_property }, axis=1)\n\n return result[[lid_property, rid_property]]", "def intercambiar(mapa, mapa2):\n for e in mapa.bloqueadas:\n mapa2.bloqueadas.append(e)", "def find_map(x1, x2, allow_double=False, join=True):\n kdtree = cKDTree(nm.vstack([x1, x2]))\n cmap = kdtree.query_pairs(eps, output_type='ndarray')\n\n dns1 = nm.where(cmap[:, 1] < x1.shape[0])[0]\n dns2 = nm.where(cmap[:, 0] >= x1.shape[0])[0]\n\n if (dns1.size + dns2.size):\n output('double node(s) in:')\n for dn in dns1:\n idxs = cmap[dn, :]\n output('x1: %d %d -> %s %s' % (idxs[0], idxs[1],\n x1[idxs[0], :], x1[idxs[1], :]))\n for dn in dns2:\n idxs = cmap[dn, :]\n output('x2: %d %d -> %s %s' % (idxs[0], idxs[1],\n x2[idxs[0], :], x2[idxs[1], :]))\n\n if not allow_double:\n raise ValueError('double node(s)! (see above)')\n\n cmap[:, 1] -= x1.shape[0]\n\n return cmap if join else (cmap[:, 0], cmap[:, 1])", "def _resolve_join_columns(left_table, right_table, indices):\n return [ merge_columns(left_table.columns[li], right_table.columns[ri]) for li, ri in indices ]", "def ana_merge_senzory_map(datas):\n#TODO: improve senzory map merging\n return iter(datas.viewvalues()).next()['senzory_map']", "def merge_maps(self, map_2d):\n x = map_2d.data.max(0, keepdim=True)[0]\n y = map_2d.data.max(1, keepdim=True)[0]\n return x, y", "def inner_join(sorted1, sorted2, key1, key2):\n p1 = 0\n p2 = 0\n result = []\n\n while (p1 < len(sorted1) and p2 < len(sorted2)):\n # if entries\n if sorted1[p1][key1] == sorted2[p2][key2]:\n entry = {}\n entry.update(sorted1[p1])\n entry.update(sorted2[p2])\n result.append(entry)\n p2 += 1\n elif sorted1[p1][key1] < sorted2[p2][key2]:\n p1 += 1\n elif sorted1[p1][key1] > sorted2[p2][key2]:\n p2 += 1\n return result", "def test_combine_mappings(self):\r\n\r\n self.tmp_dir = mkdtemp(dir=\"./\", suffix=\"/\")\r\n\r\n combine_mappings(\r\n fasta,\r\n denoiser_mapping,\r\n denoised_seqs,\r\n otu_picker_map,\r\n self.tmp_dir)\r\n\r\n observed_otu_map = \"\".join(\r\n list(open(self.tmp_dir + \"/denoised_otu_map.txt\")))\r\n\r\n expected_otu_map = \"\"\"1:\\tS1_1\\tS1_2\\tS2_4\\tS2_5\r\n2:\\tS2_3\\tS1_6\r\n\"\"\"\r\n self.assertEqual(observed_otu_map, expected_otu_map)\r\n\r\n observed_fasta = \"\".join(\r\n list(open(self.tmp_dir + \"/denoised_all.fasta\")))\r\n expected_fasta = \"\"\">S1_1 Read1\r\nAAA\r\n>S1_2 Read2\r\nTTT\r\n>S2_3 Read3\r\nGGG\r\n\"\"\"\r\n self.assertEqual(observed_fasta, expected_fasta)", "def join_map_and_trim(mapped, trimmed):\n out_file = \"%sfinal.fastq\" % os.path.commonprefix([mapped, trimmed])\n if not os.path.exists(out_file):\n with open(out_file, \"w\") as out_handle:\n for fname in [mapped, trimmed]:\n with open(fname) as in_handle:\n for line in in_handle:\n out_handle.write(line)\n return out_file", "def _join():\n df = pd.DataFrame({'key': ['K0', 'K1', 'K2', 'K3', 'K4', 'K5'],\n 'A': ['A0', 'A1', 'A2', 'A3', 'A4', 'A5']})\n other = pd.DataFrame({'key': ['K0', 'K1', 'K2'],\n 'B': ['B0', 'B1', 'B2']})\n print(df.join(other, lsuffix='_caller', rsuffix='_other')) # 为重复 column 添加前缀\n print(df.set_index('key').join(other.set_index('key')))\n print(df.join(other.set_index('key'), on='key', how='right')) # left,right表示以哪边的index为准\n print(df.join(other.set_index('key'), on='key', how='inner')) # inner,outer 表示交集、并集", "def test_join(trees, leaves, maps, nodeA, nodeB):\n cladeA = get_leaf_set(nodeA)\n cladeB = get_leaf_set(nodeB)\n cladeAB = cladeA.union(cladeB)\n\n if len(cladeA.intersection(cladeB)) > 0:\n raise Exception(\"Nodes are not disjoint on their leaf sets!\\n\")\n\n nodeAinTs = []\n nodeBinTs = []\n for i, tree in enumerate(trees):\n leaf = leaves[i]\n\n if cladeA == leaf:\n nodeAinTs.append(nodeA)\n else:\n nodeAinTs.append(get_node_from_clade(tree, maps[i], cladeA))\n\n if cladeB == leaf:\n nodeBinTs.append(nodeB)\n else:\n nodeBinTs.append(get_node_from_clade(tree, maps[i], cladeB))\n\n nAinTs = []\n for nodeAinT in nodeAinTs:\n nAinTs.append(nodeAinT is not None)\n\n if sum(nAinTs) < 1:\n raise Exception(\"Node A was not found in any tree!\\n\")\n\n nBinTs = []\n for nodeBinT in nodeBinTs:\n nBinTs.append(nodeBinT is not None)\n\n if sum(nBinTs) < 1:\n raise Exception(\"Node B was not found in any tree!\\n\")\n\n violates = False\n for i, tree1 in enumerate(trees[:-1]):\n map1 = maps[i]\n nAinT1 = nAinTs[i]\n nBinT1 = nBinTs[i]\n nodeAinT1 = nodeAinTs[i]\n nodeBinT1 = nodeBinTs[i]\n j = i\n for tree2 in trees[i+1:]:\n j = j + 1\n map2 = maps[j]\n nAinT2 = nAinTs[j]\n nBinT2 = nBinTs[j]\n nodeAinT2 = nodeAinTs[j]\n nodeBinT2 = nodeBinTs[j]\n if (sum([nAinT1, nAinT2]) > 0) and (sum([nBinT1, nBinT2]) > 0):\n if nAinT1 and nAinT2:\n # nodeA in *both* T1 and T2\n if nBinT1 and nBinT2:\n # Case 1: nodeB in *both* T1 and T2\n # Valid if nodeA and nodeB are siblings in both T1 & T2\n node1 = get_node_from_clade(tree1, map1, cladeAB)\n node2 = get_node_from_clade(tree2, map2, cladeAB)\n if (node1 is None) or (node2 is None):\n violates = True\n elif nBinT1:\n # Case 2: Node B in T1 only\n # Valid if nodeA and nodeB are siblings in T1\n node = get_node_from_clade(tree1, map1, cladeAB)\n if node is None:\n violates = True\n elif nBinT2:\n # Case 3: Node B in T2 only\n # Valid if nodeA and nodeB are siblings in T2\n node = get_node_from_clade(tree2, map2, cladeAB)\n if node is None:\n violates = True\n else:\n raise Exception(\"Node B not found in either tree!\\n\")\n elif nAinT1:\n # nodeA in T1 only\n if nBinT1 and nBinT2:\n # Case 4: nodeB in *both* T1 and T2\n node = get_node_from_clade(tree1, map1, cladeAB)\n if node is None:\n violates = True\n elif nBinT1:\n # Case 5: Node B in T1 only\n # Valid if nodeA and nodeB are siblings in T1\n node = get_node_from_clade(tree1, map1, cladeAB)\n if node is None:\n violates = True\n elif nBinT2:\n # Case 6: Node B in T2 only\n # Do join in both trees and test for compatibility\n # TODO: Remove deep copies!\n t1 = deepcopy(tree1)\n t2 = deepcopy(tree2)\n nA = deepcopy(nodeAinT1)\n nB = deepcopy(nodeBinT2)\n [t1, t2] = join_nodes_in_both_trees(t1, nA, cladeA,\n t2, nB, cladeB,\n test=True)\n if t1 is not None:\n violates = are_two_trees_incompatible(t1, t2)\n else:\n raise Exception(\"Node B not found in either tree!\\n\")\n elif nAinT2:\n # nodeA in T2 only\n if nBinT1 and nBinT2:\n # Case 7: nodeB in *both* T1 and T2\n node = get_node_from_clade(tree2, map2, cladeAB)\n if node is None:\n violates = True\n elif nBinT1:\n # Case 8 (reverse of Case 6): Node B in T1 only\n # Do join in both trees and test for compatibility\n # TODO: Remove deep copies!\n t1 = deepcopy(tree1)\n t2 = deepcopy(tree2)\n nA = deepcopy(nodeAinT2)\n nB = deepcopy(nodeBinT1)\n [t1, t2] = join_nodes_in_both_trees(t1, nB, cladeB,\n t2, nA, cladeA,\n test=True)\n if t1 is not None:\n violates = are_two_trees_incompatible(t1, t2)\n elif nBinT2:\n # Case 9: Node B in T2 only\n # Only valid if nodeA and nodeB are siblings in T2\n node = get_node_from_clade(tree2, map2, cladeAB)\n if node is None:\n violates = True\n else:\n raise Exception(\"Node B not found in either tree!\\n\")\n else:\n raise Exception(\"Node A not found in either tree!\\n\")\n\n if violates:\n return violates\n\n return violates", "def joint_map(*mappings):\n\n def fget(self):\n return mappings[0].fget(self)\n\n def fset(self, value):\n for m in mappings:\n m.fset(self, value)\n\n return property(fget=fget, fset=fset)", "def _plan_joins(projection, condition_fields, relations, db):\n joinmap = {}\n added = set()\n relset = set(relations)\n for qname in projection + list(condition_fields):\n if qname not in added:\n rel, _, col = qname.rpartition('.')\n relset.add(rel)\n joinmap.setdefault(rel, []).append(col)\n added.add(qname)\n # add necessary relations to span all requested relations\n keymap = _make_keymap(db)\n relset.update(_pivot_relations(relset, keymap, db))\n # always add keys\n for relation in relset:\n for field in db.schema[relation]:\n if field.is_key:\n qname = f'{relation}.{field.name}'\n if qname not in added:\n joinmap.setdefault(relation, []).append(field.name)\n # finally ensure joins occur in a valid order\n joined_keys = set()\n joins = []\n while joinmap:\n changed = False\n for rel in list(joinmap):\n if not joins or joined_keys.intersection(joinmap[rel]):\n joins.append((rel, joinmap.pop(rel)))\n joined_keys.update(keymap[rel])\n changed = True\n break\n if not changed:\n raise TSQLError('infinite loop detected!')\n\n return joins", "def cross_join(left, right, suffixes=(\"_left\", \"_right\")):\n left[\"_TMP_KEY\"] = 1\n right[\"_TMP_KEY\"] = 1\n res = pd.merge(left, right, on=\"_TMP_KEY\", suffixes=suffixes).drop(\"_TMP_KEY\", axis=1)\n left.drop(\"_TMP_KEY\", axis=1, inplace=True)\n right.drop(\"_TMP_KEY\", axis=1, inplace=True)\n return res", "def mapping_for_switch(mapping):\n return {key[0]: value for key, value in mapping.items()}", "def _add_mapping(self, mother_element: GraphElement,\n daughter_element: GraphElement) -> None:\n pass", "def concat(column_based_table_1: dict[str, list[str]], column_based_table_2: dict[str, list[str]]) -> dict[str, list[str]]:\n combined_data_table: dict[str, list[str]] = {}\n for column in column_based_table_1:\n combined_data_table[column] = column_based_table_1[column]\n keys_list = list(combined_data_table.keys())\n for column in column_based_table_2:\n if column in keys_list:\n column_data = combined_data_table[column]\n column_data_2 = column_based_table_2[column]\n # append to list\n for item in column_data_2:\n column_data.append(item)\n combined_data_table[column] = column_data\n else:\n combined_data_table[column] = column_based_table_2[column]\n return combined_data_table", "def _series_merging_map(self, map_list, feature_option=\"sift\"):\n print(\" --- Start ---\")\n # Transform state into 3 specified values\n for i in range(len(map_list)):\n map_list[i] = cv2.cvtColor(map_list[i], cv2.COLOR_RGB2GRAY)\n map_list[i] = MF._transform_state(map_list[i])\n \n\n map_ref = map_list[0]\n for i in range(len(map_list)-1):\n map_align = map_list[i+1]\n\n \n if feature_option == \"orb\":\n orb = cv2.ORB_create()\n key_points_1, descriptor_1 = orb.detectAndCompute(map_ref, None)\n key_points_2, descriptor_2 = orb.detectAndCompute(map_align, None)\n \n elif feature_option == \"surf\":\n surf = cv2.xfeatures2d.SURF_create(400)\n key_points_1, descriptor_1 = surf.detectAndCompute(map_ref, None)\n key_points_2, descriptor_2 = surf.detectAndCompute(map_align, None)\n else:\n siftDetector = cv2.xfeatures2d.SIFT_create()\n key_points_1, descriptor_1 = siftDetector.detectAndCompute(map_ref, None)\n key_points_2, descriptor_2 = siftDetector.detectAndCompute(map_align, None)\n\n bf = cv2.BFMatcher()\n matches = bf.knnMatch(descriptor_1, descriptor_2, k=2)\n\n good = []\n for m, n in matches:\n if m.distance < 0.75*n.distance:\n good.append(m)\n \n pts_1, pts_2 = [], []\n for i in good:\n query_idx = i.queryIdx\n train_idx = i.trainIdx\n\n pts_1.append([\n key_points_1[query_idx].pt[0],\n key_points_1[query_idx].pt[1],\n ])\n pts_2.append([\n key_points_2[train_idx].pt[0],\n key_points_2[train_idx].pt[1],\n ])\n \n pts1 = np.array(pts_1)\n pts2 = np.array(pts_2)\n\n # relation, value, _ = RMM._ransac_find_rotation_translation(pts_set_1=pts2, pts_set_2=pts1, sigma=0.5, max_iter=5000)\n # print(\"- Inlier Percent: %f\"%value)\n # # Because the coordinates between the maps and the SIFT features are different:\n # # SIFT Features: Right: +x, Down: +y\n # # Maps: Down: +x, Right: +y\n # # Hence the dx and dy should be changed.\n # dx = relation[1]\n # dy = relation[0]\n # dyaw = relation[2]\n # print(\"- (x, y, t): (%f, %f, %f)\"%(dx,dy,dyaw))\n\n # # index, agr, dis = RMM._similarity_index(x=[dy, dx, dyaw], map1=map_ref, map2=map_align)\n # # print(\"Similarity Index: %f\\nAgree Number: %f\\nDisargee Number: %f\"%(index, agr, dis))\n # index, agr, dis, _ = RMM._similarity_index_2(x=[dx, dy, dyaw], map1=map_ref, map2=map_align)\n # print(\"- Similarity Index: %f\\n- Agree Number: %f\\n- Disargee Number: %f\"%(index, agr, dis))\n \n # map_merged = MF._merging_map(dx=dx, dy=dy, dtheta=dyaw, map1=map_ref, map2=map_align)\n # map_ref = map_merged.astype(np.uint8)\n # map_ref = MF._modify_map_size(merged_map=map_ref)\n\n relation, value, _ = RANSAC_Map_Merging()._ransac_find_all(pts_set_1=pts2, pts_set_2=pts1, sigma=5, max_iter=2000)\n dx = relation[1]\n dy = relation[0]\n dyaw = relation[2]\n dr = relation[3]\n print(\"- Inlier Percent: %f\"%value)\n print(\"- (dx, dy, dyaw, dr) = %f, %f, %f, %f\"%(dx,dy,dyaw, dr))\n map_merged = MAP_Function()._merging_map_ratio(dx=dx, dy=dy, dtheta=dyaw, dr=dr, map1=map_ref, map2=map_align)\n map_ref = map_merged.astype(np.uint8)\n map_ref = MF._modify_map_size(merged_map=map_ref)\n\n # return map_ref, (dx, dy, dyaw)\n return map_ref, (dx, dy, dyaw, dr)", "def test_merge_outer_multipolygon_way_2():\n data = cache_query(ways=[16001, 16002], deps=True)\n assert data['ways']['16001']['relations'].keys() == ['16001']\n assert data['ways']['16002'] == None\n\n data = cache_query(relations=[16001], full=True)\n assert sorted(data['relations']['16001']['ways'].keys()) == ['16001', '16011']\n\n assert query_row(db_conf, 'osm_landusages', 16001) == None\n park_16001 = query_row(db_conf, 'osm_landusages', -16001)\n assert park_16001['type'] == 'park'\n assert_almost_equal(park_16001['geometry'].area, 12779350582, -1)\n assert query_row(db_conf, 'osm_roads', 16002) == None", "def test_map_sample_ids(self):\r\n expected = ['s4', 's1', 's2', 's3']\r\n actual = map_sample_ids(self.sample_ids1, self.sample_id_map1)\r\n self.assertEqual(actual, expected)\r\n expected = ['s1', 's2', 's3', 's4']\r\n actual = map_sample_ids(self.sample_ids2, self.sample_id_map1)\r\n self.assertEqual(actual, expected)\r\n expected = ['s1', 's2', 's3', 's4']\r\n actual = map_sample_ids(self.sample_ids3, self.sample_id_map1)\r\n self.assertEqual(actual, expected)\r\n # bad sample id raises KeyError\r\n self.assertRaises(KeyError, map_sample_ids, ['abcd', 'aaa', 'ccc'],\r\n self.sample_id_map1)", "def remap_ids(self, id_map: Dict[int, int]) -> None:", "def product_map(xs1, xs2):\n return jax.vmap(lambda x1: jax.vmap(lambda x2: pair_product(x1, x2))(xs2))(xs1)", "def map_primary_2_secondary_ANs(ids_2_map, Primary_2_Secondary_IDs_dict=None, read_from_flat_files=False, ENSPs_only=False, no_ENSPs=False):\n if Primary_2_Secondary_IDs_dict is None:\n ### don't read this from flat files (VERY slow) if there is a DB and low_memory then use DB\n Primary_2_Secondary_IDs_dict = get_Primary_2_Secondary_IDs_dict_from_prim(ids_2_map, read_from_flat_files)\n Primary_2_Secondary_IDs_dict_userquery = {}\n for id_ in ids_2_map:\n try:\n sec = Primary_2_Secondary_IDs_dict[id_]\n except KeyError:\n sec = False\n if sec: # sec is a list\n if ENSPs_only:\n for sec_id in sec:\n try:\n if int(sec_id.split(\".\")[0]) > 1:\n Primary_2_Secondary_IDs_dict_userquery[id_] = sec_id\n except:\n pass\n elif no_ENSPs:\n for sec_id in sec:\n try:\n if not int(sec_id.split(\".\")[0]) > 1:\n Primary_2_Secondary_IDs_dict_userquery[id_] = sec_id\n except:\n pass\n else: # take all IDs\n Primary_2_Secondary_IDs_dict_userquery[id_] = sec\n return Primary_2_Secondary_IDs_dict_userquery", "def _partial_remap(self):\n \n if not self.has_remap():\n return\n \n del_keys = {}\n keep_ids = set()\n\n for key, val in self.overlap_map.items():\n if key in self.mapping1:\n # find inputs that need to be remapped\n if self.mapping1[key] not in del_keys:\n del_keys[self.mapping1[key]] = set()\n del_keys[self.mapping1[key]].add(key)\n keep_ids.add(self.mapping1[key])\n else:\n keep_ids.add(key)\n new_overlap = {}\n\n # handle new overlaps since mapping could cause merge\n for (key2, val2) in val:\n new_key = key2\n if key2 in self.mapping2:\n new_key = self.mapping2[key2]\n if new_key not in new_overlap:\n new_overlap[new_key] = 0\n new_overlap[new_key] += val2\n \n # update overlap list\n new_overlap_set = set()\n for body, overlap in new_overlap.items():\n new_overlap_set.add((body, overlap)) \n self.overlap_map[key] = new_overlap_set\n \n temp_overlap = self.overlap_map.copy()\n \n # merge rows mapping to same body, remove old body\n for newbody, bodylist in del_keys.items():\n self.overlap_map[newbody] = set()\n for bodyold in bodylist:\n self._merge_row(self.overlap_map[newbody], temp_overlap[bodyold])\n if bodyold not in keep_ids:\n del self.overlap_map[bodyold]\n \n self.mapping1 = None\n self.mapping2 = None", "def map_secondary_2_primary_ANs(ids_2_map, Secondary_2_Primary_IDs_dict=None, read_from_flat_files=False):\n if Secondary_2_Primary_IDs_dict is None:\n ### don't read this from flat files (VERY slow) if there is a DB and low_memory then use DB\n Secondary_2_Primary_IDs_dict = get_Secondary_2_Primary_IDs_dict_from_sec(ids_2_map, read_from_flat_files)\n Secondary_2_Primary_IDs_dict_userquery = {}\n for id_ in ids_2_map:\n try:\n prim = Secondary_2_Primary_IDs_dict[id_]\n except KeyError:\n prim = False\n if prim:\n Secondary_2_Primary_IDs_dict_userquery[id_] = prim\n return Secondary_2_Primary_IDs_dict_userquery", "def map_inclusion(A, B):\n \n def _map(a):\n if a in B:\n status = \"MATCH\"\n else:\n status = \"MISS\"\n return (a, status)\n \n return map(_map, A)", "def relation_map(X, Y):\n info = {}\n for x,y in zip(X, Y):\n if x in info.keys():\n if y not in info[x]:\n info[x] = info[x] + [y]\n else: \n info[x] = [y]\n\n return info", "def join_distributions(a, b):\n assert a.keys() == b.keys()\n return {k: a[k] + b[k] for k in a}", "def mconcat(a, b):\r\n if a is None:\r\n return b\r\n if b is None:\r\n return a\r\n for key in b.keyset:\r\n value=get(b,key)\r\n put(a,key,value)\r\n return a", "def find_intersection(wire_one_map, wire_two_map):\n return set(wire_one_map.keys()) & set(wire_two_map.keys())", "def _combine(mappings):\n return {k: v for d in mappings for k, v in d.items()}", "def _get_mapper_0(model):\n # build the maps\n eids_all = (\n list(model.elements.keys()) +\n list(model.masses.keys()) +\n list(model.rigid_elements.keys())\n )\n eid_map = {eid : eid for eid in eids_all}\n nid_map = {nid : nid for nid in model.point_ids}\n cid_map = {cid : cid for cid in model.coord_ids}\n mid_map = {mid : mid for mid in model.material_ids}\n spc_map = _dicts_key_to_key((model.spcs, model.spcadds))\n mpc_map = _dicts_key_to_key((model.mpcs, model.mpcadds))\n method_map = _dict_key_to_key(model.methods)\n properties_map = _dict_key_to_key(model.properties)\n rigid_elements_map = _dict_key_to_key(model.rigid_elements)\n cmethod_map = _dict_key_to_key(model.cMethods)\n flfact_map = _dict_key_to_key(model.flfacts)\n flutter_map = _dict_key_to_key(model.flutters)\n caero_map = _dict_key_to_key(model.caeros)\n freq_map = _dict_key_to_key(model.frequencies)\n\n dload_map = _dicts_key_to_key((model.dload_entries, model.dloads))\n load_map = _dicts_key_to_key((model.loads, model.load_combinations))\n lseq_map = load_map # wrong???\n temp_map = load_map # wrong???\n\n tstep_map = _dict_key_to_key(model.tsteps)\n tstepnl_map = _dict_key_to_key(model.tstepnls)\n suport1_map = _dict_key_to_key(model.suport1)\n #suport_map = {}\n\n nlparm_map = _dict_key_to_key(model.nlparms)\n #nlpci_map = _dict_key_to_key(model.nlpcis)\n table_sdamping_map = _dict_key_to_key(model.tables_sdamping)\n dconadd_map = _dict_key_to_key(model.dconadds)\n dconstr_map = _dict_key_to_key(model.dconstrs)\n dessub_map = dconadd_map\n for key, value in dconstr_map.items():\n if key in dessub_map:\n raise NotImplementedError()\n dessub_map[key] = value\n dresp_map = _dict_key_to_key(model.dresps)\n gust_map = _dict_key_to_key(model.gusts)\n trim_map = _dict_key_to_key(model.trims)\n tic_map = _dict_key_to_key(model.tics)\n csschd_map = _dict_key_to_key(model.csschds)\n tranfer_function_map = _dict_key_to_key(model.transfer_functions)\n\n mapper = {\n 'elements' : eid_map,\n 'nodes' : nid_map,\n 'coords' : cid_map,\n 'materials' : mid_map,\n 'properties' : properties_map,\n 'rigid_elements': rigid_elements_map,\n 'spcs' : spc_map,\n 'mpcs' : mpc_map,\n 'METHOD' : method_map,\n 'CMETHOD' : cmethod_map,\n 'FLFACT' : flfact_map,\n 'FMETHOD' : flutter_map,\n 'caeros' : caero_map,\n 'FREQUENCY' : freq_map,\n\n 'DLOAD' : dload_map,\n 'LOAD' : load_map,\n 'LOADSET' : lseq_map,\n 'TSTEP' : tstep_map,\n 'TSTEPNL' : tstepnl_map,\n 'SUPORT1' : suport1_map,\n 'NLPARM' : nlparm_map,\n 'SDAMPING' : table_sdamping_map,\n 'DESSUB' : dessub_map,\n 'DESOBJ' : dresp_map,\n 'GUST' : gust_map,\n 'TRIM' : trim_map,\n 'IC' : tic_map,\n 'CSSCHD' : csschd_map,\n 'TFL' : tranfer_function_map,\n #'DESSUB' : dessub_map,\n # bad...\n 'TEMPERATURE(LOAD)' : temp_map,\n 'TEMPERATURE(INITIAL)' : temp_map,\n #'DATAREC' : datarec_map,\n #'ADAPT' : adapt_map,\n #'SUPER' : super_map,\n #'BOUTPUT' : boutput_map,\n #'OUTRCV' : outrcv_map,\n }\n\n return mapper", "def map_similar(self, loc1, loc2, aim, player):\n enemy_map = {}\n rows = len(self.map)\n cols = len(self.map[0])\n size = (rows, cols)\n for row in range(rows):\n for col in range(cols):\n row0, col0 = self.dest_offset(loc1, (row, col), size)\n row1, col1 = self.dest_offset(loc2, self.offset_aim((row, col), aim), size)\n # compare locations\n ilk0 = self.map[row0][col0]\n ilk1 = self.map[row1][col1]\n if ilk0 == 0 and ilk1 != player:\n # friendly ant not in same location\n return None\n elif ilk0 > 0 and (ilk1 < 0 or ilk1 == player):\n # enemy ant not in same location\n return None\n elif ilk0 < 0 and ilk1 != ilk0:\n # land or water not in same location\n return None\n if ilk0 >= 0 and enemy_map != None:\n enemy_map[ilk0] = ilk1\n return enemy_map", "def chain_maps(*args):\n def merge(d1, d2):\n d1.update(d2)\n return d1\n\n return reduce(merge, reversed(args), {})", "def left_join_list_two():\n return[\n ['wrath', 'anger', None],\n ['fond', 'enamored', 'averse'],\n ['guide', 'usher', 'jam'],\n ['outfit', 'garb', None],\n ['diligent', 'employed', 'idle'],\n ]", "def test_handles_duplicate_ids(t, _get_relationshipmap_diff):\n device = Mock(name=\"device\")\n\n om1 = ObjectMap({\"id\": \"eth0\"})\n om2 = ObjectMap({\"id\": \"eth0\"})\n om3 = ObjectMap({\"id\": \"eth0\"})\n objmaps = [\n om1,\n om2,\n om3,\n ]\n\n relmap = RelationshipMap(\n relname=\"interfaces\",\n modname=\"Products.ZenModel.IpInterface\",\n plugin_name=\"test.plugin\",\n objmaps=objmaps,\n )\n\n processed = _process_relationshipmap(relmap, device)\n\n t.assertEqual(processed.plugin_name, \"test.plugin\")\n t.assertEqual(len(processed.maps), 3)\n t.assertEqual(om1.id, \"eth0\")\n t.assertEqual(processed.maps[0].id, \"eth0\")\n t.assertEqual(processed.maps[0].plugin_name, \"test.plugin\")\n t.assertEqual(om2.id, \"eth0\")\n t.assertEqual(processed.maps[1].id, \"eth0_2\")\n t.assertEqual(om3.id, \"eth0\")\n t.assertEqual(processed.maps[2].id, \"eth0_3\")", "def after_map(self, map):\n return map", "def after_map(self, map):\n return map", "def _columns_are_mapped(self, *cols: ColumnElement[Any]) -> bool:\n\n secondary = self._init_args.secondary.resolved\n for c in cols:\n if secondary is not None and secondary.c.contains_column(c):\n continue\n if not self.parent.persist_selectable.c.contains_column(\n c\n ) and not self.target.c.contains_column(c):\n return False\n return True", "def best_map(l1, l2):\n if len(l1) != len(l2):\n print(\"L1.shape must == L2.shape\")\n exit(0)\n \n label1 = np.unique(l1)\n n_class1 = len(label1)\n \n label2 = np.unique(l2)\n n_class2 = len(label2)\n \n n_class = max(n_class1, n_class2)\n G = np.zeros((n_class, n_class))\n \n for i in range(0, n_class1):\n for j in range(0, n_class2):\n ss = l1 == label1[i]\n tt = l2 == label2[j]\n G[i, j] = np.count_nonzero(ss & tt)\n \n A = la.linear_assignment(-G)\n \n new_l2 = np.zeros(l2.shape)\n for i in range(0, n_class2):\n new_l2[l2 == label2[A[i][1]]] = label1[A[i][0]]\n return new_l2.astype(int)", "def join_data(self, base_data, join_data, base_field, join_fields):\n for data in base_data:\n extra = join_data[data[base_field]]\n for field in join_fields:\n data[field] = extra[field]\n \n return base_data", "def test_read_mapping_file_multiple(reference_multi):\n content, reference = reference_multi\n from_names = list(reference.keys())\n to_names = []\n block_names = []\n\n for k in reference:\n to_names.extend(reference[k].keys())\n for to in reference[k]:\n block_names.extend(reference[k][to].keys())\n force_fields = case_to_dummy_ffs(from_names + to_names, block_names,\n {(0, 'X1'): [(0, 'A')], (0, 'X2'): [(0, 'B')], (0, 'X3'): [(0, 'D')]},\n {(0, 'A'): {(0, 'X1'): 1.0}, (0, 'B'): {(0, 'X2'): 1.0}, (0, 'C'): {(0, 'X2'): 1.0}, (0, 'D'): {(0, 'X3'): 1.0}},\n [])\n mappings = vermouth.map_input.read_backmapping_file(content, force_fields)\n compare_old_new_mappings(mappings, reference)", "def test_recompile_on_othermapper(self):\n\n from sqlalchemy.orm import mapperlib\n \n mapper(User, users)\n compile_mappers()\n assert mapperlib._new_mappers is False\n \n m = mapper(Address, addresses, properties={'user':relation(User, backref=\"addresses\")})\n \n assert m._Mapper__props_init is False\n assert mapperlib._new_mappers is True\n u = User()\n assert User.addresses\n assert mapperlib._new_mappers is False", "def distance_map(xs1, xs2):\n return jax.vmap(lambda x1: jax.vmap(lambda x2: euclidean_distance(x1, x2))(xs2))(xs1)", "def preMerge(self, t):\n\t\tif t.table.LookupList:\n\t\t\tlookupMap = {i:id(v) for i,v in enumerate(t.table.LookupList.Lookup)}\n\t\t\tt.table.LookupList.mapLookups(lookupMap)\n\t\t\tif t.table.FeatureList:\n\t\t\t\t# XXX Handle present FeatureList but absent LookupList\n\t\t\t\tt.table.FeatureList.mapLookups(lookupMap)\n\n\t\tif t.table.FeatureList and t.table.ScriptList:\n\t\t\tfeatureMap = {i:id(v) for i,v in enumerate(t.table.FeatureList.FeatureRecord)}\n\t\t\tt.table.ScriptList.mapFeatures(featureMap)", "def mapper(record):\n matrix, row, col, value = record\n if matrix == A_MATRIX:\n # For all A(i,j) emit key (j, k) for k=1 to number of columns in B\n for k in range(0, B_COLS):\n mr.emit_intermediate((row, k), [matrix, col, value])\n else:\n # For all B(j, k) emit key (j, i) for i=1 to number of rows in B\n for i in range(0, A_ROWS):\n mr.emit_intermediate((i, col), [matrix, row, value])", "def crossover(self,\n ind1: Dict[str, Union[str, Dict[str, List[int]], Callable]],\n ind2: Dict[str, Union[str, Dict[str, List[int]], Callable]]\n ) -> Dict[str, Union[str, Dict[str, List[int]], Callable]]:\n ind1 = deepcopy(ind1)\n ind2 = deepcopy(ind2)\n\n mask = random.choices(list(self.grammar.keys()),\n k=random.randrange(len(self.grammar) - 2))\n for key in mask:\n ind1[\"genome\"][key], ind2[\"genome\"][key] = ind2[\"genome\"][key], ind1[\"genome\"][key]\n return ind1", "def mapfn(k, v):\n for row in v:\n # completar\n pass", "def map_params(self, **map_params):\n return self.where(map_params=map_params)", "def CanonicalFromDMAP2 (D0, D1):\n\n if butools.checkInput:\n if D0.shape[0]!=2:\n raise Exception(\"CanonicalFromDMAP2: size is not 2!\")\n if not CheckDMAPRepresentation(D0, D1):\n\t raise Exception(\"CanonicalFromDMAP2: Input is not a valid DMAP representation!\")\n\n ev = la.eigvals(D0)\n ix = np.argsort(-np.abs(np.real(ev)))\n ev = ev[ix]\n\n s1=ev[0]\n s2=ev[1]\n\n if s2>=0:\n G0, G1 = CanonicalFromMAP2 (D0-ml.eye(2),D1)\n G0 = G0 + ml.eye(2)\n return (G0, G1)\n\n #s2 is negative\n av = DRPSolve (la.inv(ml.eye(2)-D0)*D1)\n\n gamma = la.eigvals(la.inv(ml.eye(2)-D0)*D1)\n ix = np.argsort(-np.abs(np.real(gamma)))\n gamma = gamma[ix]\n gamma = gamma[1]\n\n w1=1.0/(s1-s2)*(np.sum(D0,1)-s2*ml.ones((2,1)))\n w2=ml.ones((2,1))-w1\n\n W=np.hstack((w1, w2))\n A=(1.0-s1)*(av*W)\n a1=A[0,0]\n\n if gamma>=0:\n a=-(1/(2*(-1+s1)*(-1+s1+s2)**2))*(1-4*s1+a1*s1+5*s1**2-a1*s1**2-2*s1**3-2*s2-a1*s2+5*s1*s2-3*s1**2*s2+s2**2+a1*s2**2-s1*s2**2-gamma+3*s1*gamma-a1*s1*gamma-3*s1**2*gamma+a1*s1**2*gamma+s1**3*gamma+s2*gamma+a1*s2*gamma-2*s1*s2*gamma+s1**2*s2*gamma-a1*s2**2*gamma+math.sqrt((-1+s1+s2)**2*((-1+s1**2*(-2+gamma)+gamma+s2*(1+a1-a1*gamma)+s1*(3-a1-s2-2*gamma+a1*gamma))**2-4*(-1+s1)*(-s1**3*(-1+gamma)+a1*(-1+s2)*s2*(-1+gamma)+s1**2*(-2+a1+s2+2*gamma-a1*gamma)+s1*(1-a1-s2-gamma+a1*gamma)))))\n b=1+(a*(-1+s1+s2-s1*s2)*gamma)/((a-1)*(-s1*s2+a*(-1+s1+s2)))\n\n G0=ml.matrix([[s1+s2, a*(1-s1-s2)], [s1*s2/(a*(s1+s2-1)), 0]])\n G1=ml.matrix([[(1-a)*(1-s1-s2), 0], [b*(1+s1*s2/(a*(1-s1-s2))), (1-b)*(1+s1*s2/(a*(1-s1-s2)))]])\n else:\n #gamma<0\n a=(a1*s1-a1*s1**2+s2-a1*s2-3*s1*s2+2*s1**2*s2-s2**2+a1*s2**2+s1*s2**2+s1*gamma-a1*s1*gamma-2*s1**2*gamma+a1*s1**2*gamma+s1**3*gamma+a1*s2*gamma-a1*s2**2*gamma+math.sqrt(-4*(-1+s1)*s1*s2*(-1+s1+s2)*(a1*(s1-s2)*(-1+gamma)+(-1+s1)*(s2+(-1+s1)*gamma))+(a1*(-s1+s1**2+s2-s2**2)*(-1+gamma)+(-1+s1)*((-1+2*s1)*s2+s2**2+(-1+s1)*s1*gamma))**2))/(2*(-1+s1+s2)*(a1*(s1-s2)*(-1+gamma)+(-1+s1)*(s2+(-1+s1)*gamma)))\n b=-((a*(1-s1)*(1-s2)*gamma)/((a-1)*(-a+a*s1+a*s2-s1*s2)))\n\n G0=ml.matrix([[s1+s2, a*(1-s1-s2)],[s1*s2/(a*(s1+s2-1)), 0]])\n G1=ml.matrix([[0, (1-a)*(1-s1-s2)],[b*(1-s1*s2/(a*(s1+s2-1))), (1-b)*(1-s1*s2/(a*(s1+s2-1)))]])\n return (G0, G1)", "def include(self, map):\n self.map.update(map)", "def get_equivalent_mapping(self, mapping):\n if not mapping.endswith(\".rmap\"):\n log.warning(\"Invalid comparison context\", repr(self.name), \"for\", repr(mapping))\n return None\n return self", "def use_idmap(df, idmap, newname, oldcols, remove=False):\n tmpname = '_new_%s' % newname\n idmap[tmpname] = idmap.index\n df = df.merge(idmap, how='left', left_on=newname, right_on=oldcols)\n df[newname] = df[tmpname]\n del df[tmpname]\n if remove:\n for colname in oldcols:\n del df[colname]\n return df", "def test_propconflict(self):\n mapper(Address, addresses)\n mapper(User, users,\n properties={\n 'addresses':relation(Address, backref='email_address')\n })\n self.assertRaises(exceptions.ArgumentError, compile_mappers)", "def secondary_keys_dicts(self):", "def map(self):\r\n pass", "def test_customjoin_deprecated(self):\n\n m = mapper(User, users, properties={\n 'orders':relation(mapper(Order, orders, properties={\n 'items':relation(mapper(Item, orderitems))\n }))\n })\n\n q = create_session().query(m)\n l = q.select((orderitems.c.item_name=='item 4'), from_obj=[users.join(orders).join(orderitems)])\n self.assert_result(l, User, user_result[0])", "def compare_stat_categorie_maps_to_player_record(self,sc_map):\n player_record = PlayerRecord()\n\n mapped_sc = []\n\n for sc in sc_map:\n if hasattr(player_record, sc_map[sc]):\n mapped_sc.append(sc_map[sc])\n else:\n print(\"No player record attribute found for \"+ str(sc_map[sc]))\n \n self.assertEqual(len(mapped_sc), len(sc_map))", "def sreduce(self, key, values, task):\n list1 = []\n found_file1 = False\n found_file2 = False\n outer_file1 = (self.outer=='left' or self.outer=='both')\n outer_file2 = (self.outer=='right' or self.outer=='both')\n for json in values:\n record = happy.json.decode(json)\n order = record['__joinorder__']\n newrec = {}\n for key in record.keys():\n newrec[key] = record[key]\n if (order==1):\n found_file1 = True\n list1.append(newrec)\n else:\n try:\n found_file2 = True\n for i in xrange(len(list1)):\n r = list1[i]\n emitrec = {}\n emitrec.update(newrec)\n emitrec.update(r)\n emitrec['__jointype__'] = 'inner'\n task.collect(key, happy.json.encode(emitrec))\n if outer_file2 and not found_file1:\n newrec['__jointype__'] = 'right'\n task.collect(key, happy.json.encode(newrec))\n except:\n logger.error(\"JOIN FAILED ON RECORD: (%s, %s)\" % (key, json))\n if outer_file1 and not found_file2:\n for i in xrange(len(list1)):\n r = list1[i]\n r['__jointype__'] = 'left'\n task.collect(key, happy.json.encode(r))", "def test_expand_failures_two_otu_maps(self):\r\n expected_f1 = ['seq1', 'seq2', 'seq5', 'seq6', 'seq7', 'seq8']\r\n\r\n actual = expand_failures(self.failures1,\r\n expand_otu_map_seq_ids(self.otu_map2, self.otu_map1))\r\n self.assertItemsEqual(actual, expected_f1)", "def join_featuresets(featureset1, featureset2):\n joined_instances = {}\n names = []\n for audio_name in featureset1.keys():\n if audio_name in featureset2:\n names.append(audio_name)\n for name in names:\n joined_vec = join_feature_vectors(featureset1[name], featureset2[name])\n joined_instances[name] = joined_vec\n return joined_instances", "def test_merge_outer_multipolygon_way_1():\n park_16001 = query_row(db_conf, 'osm_landusages', -16001)\n assert park_16001['type'] == 'park'\n assert_almost_equal(park_16001['geometry'].area, 12779350582, -1)\n assert query_row(db_conf, 'osm_roads', 16002)['type'] == 'residential'", "def findMatches3(personDict,matches,skepticalMatches,additionalMatches,personDict2):\n dictConsidered = personDict['ALIAS']\n for alias in dictConsidered:\n if alias == \"\":\n continue\n pairs = itertools.combinations(dictConsidered[alias],2)\n for p in pairs:\n k = tuple(sorted(p))\n if (k not in matches) and (k not in skepticalMatches) and (k not in additionalMatches):\n info1 = personDict['EnterpriseID'][p[0]]\n info2 = personDict['EnterpriseID'][p[1]]\n \n info1b = personDict2['EnterpriseID'][p[0]]\n info2b = personDict2['EnterpriseID'][p[1]]\n score = getScorePair(info1b,info2b)\n if score>=7:\n additionalMatches[k] = score\n\n return additionalMatches", "def add_ta_alias_to_map(ta_aliases, ta_map):\n\n for tup in ta_aliases:\n ta1, ta2 = tup\n s = ta_map[ta1]\n s.update(ta_map[ta2])\n # point key of all elements of the set to the same set.\n for x in s:\n ta_map[x] = s\n\n return ta_map", "def test_check_map_bcs_and_added_demultiplex(self):\r\n\r\n # Combinations of bc plus added demultiplex field need to be unique\r\n # but can be duplicated if both options are being used.\r\n header, mapping_data = check_map(\r\n self.valid_mapping_data_bcs_and_added_demultiplex,\r\n barcode_type=4, added_demultiplex_field=\"Added_Demultiplex\")\r\n\r\n expected_header =\\\r\n ['SampleID', 'BarcodeSequence', 'LinkerPrimerSequence',\r\n 'Added_Demultiplex', 'Description']\r\n expected_mapping_data =\\\r\n [['s1', 'AAAA', 'ATTCGATART', '1', 's1_description'],\r\n ['s2', 'AAAA', 'ATTCGATART', '2', 's2_description'],\r\n ['s3', 'CCCC', 'YATGCTGCCTCCCGTAGGAGT', '1', 's3_description']]\r\n\r\n self.assertEquals(header, expected_header)\r\n self.assertEquals(mapping_data, expected_mapping_data)", "def _match_dims(poly1, poly2, copy=None):\r\n if copy is None:\r\n copy = True\r\n\r\n if copy:\r\n p1 = deepcopy(poly1)\r\n p2 = deepcopy(poly2)\r\n else:\r\n p1 = poly1\r\n p2 = poly2\r\n\r\n dim1 = poly1.multi_index.spatial_dimension\r\n dim2 = poly2.multi_index.spatial_dimension\r\n if dim1 >= dim2:\r\n poly2.expand_dim(dim1)\r\n else:\r\n poly1.expand_dim(dim2)\r\n return poly1, poly2", "def test_map_reduce_custom_output(self):\n\n class Family(Document):\n id = IntField(primary_key=True)\n log = StringField()\n\n class Person(Document):\n id = IntField(primary_key=True)\n name = StringField()\n age = IntField()\n family = ReferenceField(Family)\n\n Family.drop_collection()\n Person.drop_collection()\n\n # creating first family\n f1 = Family(id=1, log=\"Trav 02 de Julho\")\n f1.save()\n\n # persons of first family\n Person(id=1, family=f1, name=\"Wilson Jr\", age=21).save()\n Person(id=2, family=f1, name=\"Wilson Father\", age=45).save()\n Person(id=3, family=f1, name=\"Eliana Costa\", age=40).save()\n Person(id=4, family=f1, name=\"Tayza Mariana\", age=17).save()\n\n # creating second family\n f2 = Family(id=2, log=\"Av prof frasc brunno\")\n f2.save()\n\n # persons of second family\n Person(id=5, family=f2, name=\"Isabella Luanna\", age=16).save()\n Person(id=6, family=f2, name=\"Sandra Mara\", age=36).save()\n Person(id=7, family=f2, name=\"Igor Gabriel\", age=10).save()\n\n # creating third family\n f3 = Family(id=3, log=\"Av brazil\")\n f3.save()\n\n # persons of thrird family\n Person(id=8, family=f3, name=\"Arthur WA\", age=30).save()\n Person(id=9, family=f3, name=\"Paula Leonel\", age=25).save()\n\n # executing join map/reduce\n map_person = \"\"\"\n function () {\n emit(this.family, {\n totalAge: this.age,\n persons: [{\n name: this.name,\n age: this.age\n }]});\n }\n \"\"\"\n\n map_family = \"\"\"\n function () {\n emit(this._id, {\n totalAge: 0,\n persons: []\n });\n }\n \"\"\"\n\n reduce_f = \"\"\"\n function (key, values) {\n var family = {persons: [], totalAge: 0};\n\n values.forEach(function(value) {\n if (value.persons) {\n value.persons.forEach(function (person) {\n family.persons.push(person);\n family.totalAge += person.age;\n });\n family.persons.sort((a, b) => (a.age > b.age))\n }\n });\n\n return family;\n }\n \"\"\"\n cursor = Family.objects.map_reduce(\n map_f=map_family,\n reduce_f=reduce_f,\n output={\"replace\": \"family_map\", \"db_alias\": \"test2\"},\n )\n\n # start a map/reduce\n next(cursor)\n\n results = Person.objects.map_reduce(\n map_f=map_person,\n reduce_f=reduce_f,\n output={\"reduce\": \"family_map\", \"db_alias\": \"test2\"},\n )\n\n results = list(results)\n collection = get_db(\"test2\").family_map\n\n assert collection.find_one({\"_id\": 1}) == {\n \"_id\": 1,\n \"value\": {\n \"persons\": [\n {\"age\": 17, \"name\": \"Tayza Mariana\"},\n {\"age\": 21, \"name\": \"Wilson Jr\"},\n {\"age\": 40, \"name\": \"Eliana Costa\"},\n {\"age\": 45, \"name\": \"Wilson Father\"},\n ],\n \"totalAge\": 123,\n },\n }\n\n assert collection.find_one({\"_id\": 2}) == {\n \"_id\": 2,\n \"value\": {\n \"persons\": [\n {\"age\": 10, \"name\": \"Igor Gabriel\"},\n {\"age\": 16, \"name\": \"Isabella Luanna\"},\n {\"age\": 36, \"name\": \"Sandra Mara\"},\n ],\n \"totalAge\": 62,\n },\n }\n\n assert collection.find_one({\"_id\": 3}) == {\n \"_id\": 3,\n \"value\": {\n \"persons\": [\n {\"age\": 25, \"name\": \"Paula Leonel\"},\n {\"age\": 30, \"name\": \"Arthur WA\"},\n ],\n \"totalAge\": 55,\n },\n }" ]
[ "0.6610568", "0.63814634", "0.61559236", "0.6086679", "0.58406264", "0.5825378", "0.5764346", "0.57383496", "0.57372516", "0.5714955", "0.56882495", "0.5687337", "0.5668063", "0.56529844", "0.56490266", "0.56440884", "0.56300807", "0.56268895", "0.5618193", "0.56174135", "0.5594393", "0.55926186", "0.55753213", "0.55718327", "0.55450535", "0.55341583", "0.5510678", "0.5491279", "0.54816043", "0.5471625", "0.54629284", "0.53700435", "0.53652936", "0.536051", "0.53459245", "0.5334854", "0.52826864", "0.527058", "0.52683365", "0.52641106", "0.5263875", "0.52577704", "0.5246494", "0.52340347", "0.52330357", "0.52168655", "0.5190186", "0.5184711", "0.51756185", "0.5167633", "0.5165961", "0.51647735", "0.5163893", "0.51591796", "0.5140158", "0.51279414", "0.5125189", "0.51246387", "0.5117534", "0.51135516", "0.5113022", "0.51013356", "0.50866103", "0.5086574", "0.50721204", "0.5071452", "0.5070639", "0.5053341", "0.505328", "0.5047561", "0.5047561", "0.5035174", "0.5034508", "0.5028116", "0.5025883", "0.5025434", "0.5023351", "0.5008709", "0.49983746", "0.49959323", "0.49946883", "0.49938416", "0.49901626", "0.4989787", "0.49889836", "0.49788746", "0.49776477", "0.49689013", "0.4963771", "0.49629545", "0.49582982", "0.49571913", "0.49507812", "0.49459633", "0.49398887", "0.4937157", "0.4927049", "0.49157262", "0.491379", "0.4911123" ]
0.54061365
31
geno codes must be appended row (marker) wise in subject (.ind file) order
def joinRows(r1,r2,outfname): outf = open(outfname,'w') f1 = file(r1,'r') f2 = file(r2,'r') for row1 in f1: if row1.strip() > '': row2 = f2.next() outf.write('%s%s\n' % (row1.strip(),row2.strip())) outf.close()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def gencode_dic(gencode_file,gene_type_dic):\n gen_dic = {}\n for i in range(1,len(gencode_file)):\n words_gen = gencode_file[i].strip().split('\\t')\n chr_no = words_gen[2]\n trans_id = words_gen[1]\n cds_info = words_gen[13]\n cde_info = words_gen[14]\n gene_type = gene_type_dic[trans_id]\n gene_name = words_gen[12]\n TSS_start = int(words_gen[4])\n TSS_end = int(words_gen[5])\n CDS_start = int(words_gen[6])\n CDS_end = int(words_gen[7])\n strand = words_gen[3]\n start_list = [int(x) for x in words_gen[9].split(',')[:-1]]\n end_list = [int(x) for x in words_gen[10].split(',')[:-1]]\n exon_no = int(words_gen[8])\n# if (chr_no,trans_id) in gen_dic: #Some trans_id are not unique, especially transcripts in chrX and chrY\n# print trans_id\n interval_list = [P.closedopen(start_list[x],end_list[x]) for x in range(0,exon_no)]\n interval_merge = P.empty()\n for i in range(0,len(interval_list)):\n interval_merge = interval_merge | interval_list[i]\n if gene_type == 'protein_coding':\n if (cds_info == 'cmpl') and (cde_info == 'cmpl'):\n # print (interval_merge)\n gen_dic.setdefault((chr_no,strand),[]).append([TSS_start,TSS_end,CDS_start,CDS_end,\\\n gene_name,gene_type,interval_merge])\n else:\n gen_dic.setdefault((chr_no,strand),[]).append([TSS_start,TSS_end,CDS_start,CDS_end,\\\n gene_name,gene_type,interval_merge])\n return gen_dic", "def readGenos(self,genofile):\n self.gen = np.zeros((len(self.ped),len(self.mark)))\n self.gen[:] = np.nan\n marklist = None\n with open(genofile,'r') as fin:\n for line in fin:\n if line.startswith('#'):\n if not marklist: marklist = line.strip('#').strip().split()\n continue\n l = line.strip().split()\n if len(l) < 1: continue\n try: irow = self.ped[l[self.nc]]['rank']\n except KeyError:\n continue\n for i,mark in enumerate(self.marklist):\n if mark not in self.mark: continue\n icol = self.mark[mark]['rank']\n if self.ia == 1:\n a = l[i+self.ic]\n elif self.ia == 2:\n a = self.tbase012(l[i+self.ic],mark)\n elif self.ia == 3:\n a = self.tbase012(l[i*2+self.ic]+l[i*2+1+self.ic],mark)\n if a not in ['0','1','2']: a = np.nan\n else: a = int(a)\n self.gen[irow,icol] = a", "def getReadOnGeneFile(rnameList, len_param):\n log.info(\"Select reads that are on genes\")\n for ch in rnameList:\n tcount = 0\n \n geneS = {}#gene start\n geneE = {}#gene end\n g_direct = {}#gene direction\n readS = {}#read start\n readE = {}#read End\n readDic = {}#readDic[id] = read\n sortGeneId = {}\n sortReadId = {}\n genefile = os.path.join(working_dir, 'removeOverlap.'+ch+'.gff')\n readfile = os.path.join(working_dir, 'MappedRead.'+ch+'.sam')\n rgfile = os.path.join(working_dir, 'ReadOnGeneList.'+ch+'.tab')\n log.info(\"Generate \" + rgfile)\n f=open(rgfile, \"w\") \n \n geneS, geneE, g_direct = getGFFStartEnd(genefile, len_param)\n sortGeneId = sortId(geneS)\n \n readS, readE,readDic = getSAMStartEnd(readfile)\n sortReadId = sortId(readS)\n ys = 0\n \n for x in range(len(sortGeneId)):\n \n gID = sortGeneId[x]#gene id\n gs = geneS.get(gID)#gene start\n ge = geneE.get(gID)#gene end\n gd = g_direct.get(gID)\n glineList = []\n sameG = False\n \n for y in range(ys,len(sortReadId)):\n rID = sortReadId[y]\n rs = readS.get(rID)\n re = readE.get(rID)\n if rs >= gs:\n if re <= ge:\n f.write(gID)\n f.write('\\t')\n f.write(str(gs))\n f.write('\\t')\n f.write(str(ge))\n f.write('\\t')\n f.write(gd)\n f.write('\\t')\n f.write(rID)\n f.write('\\t')\n f.write(str(rs))\n f.write('\\t')\n f.write(str(re))\n f.write('\\t')\n f.write(readDic.get(rID))\n elif re > ge:\n ys = y\n break\n elif rs > ge:\n ys = y\n break\n f.close()", "def filtraFileDiAnn(fileInput, geneNames):\n\n\t#---------------------\n\t# Creazione di una lista dove ogni elemento e' una riga del file \n\t# Ogni elem e' una lista di informazioni divise per colonne \n\t#\n\t# formato di un elemento di lines:\n\t#\n\t#\tPOSIZIONE \t\t\tCONTENUTO\n\t#\t\t0\t\t\t\t\tcromosoma\n\t#\t\t3\t\t\t\t\tstart\n\t#\t\t4\t\t\t\t\tend\n\t#\t\t6\t\t\t\t\tstrand\n\t#\t\t8\t\t\t\t\tgene_id\n\t#\t\t9\t\t\t\t\ttranscript_id\n\t#\t\t10\t\t\t\t\texon_number\n\t#\t\t11\t\t\t\t\tgene_name\n\t#\t\t12\t\t\t\t\ttranscript_name\t\n\t#\n\n\n\tstringa \t= '\\texon\\t'\n\tlines \t\t= []\n\tdictGeneChr = {}\n\t\n\t# Indici per il file di annotazione\n\t#\n\tidx_cromosoma = 0\n\tidx_geneName = 11\n\tidx_start = 3\n\tidx_end = 4\n\t\n\tfor x in open(fileInput):\n\t\triga = x.strip(';\\n').replace('; ','\\t').split('\\t')\n\n\t\tif not geneNames.has_key(riga[idx_geneName]):\n\t\t\tcontinue\n\t\t\t\t\n\t\t# Creazione del dizionario dei gene_name per ogni cromosoma\n\t\t#\n\t\tkey_geneChr = riga[idx_geneName] + '\\t' + riga[idx_cromosoma]\n\t\tif not dictGeneChr.has_key(key_geneChr):\n\t\t\tdictGeneChr[key_geneChr] = [riga[idx_start], riga[idx_end]]\n\t\telse:\n\t\t\t\n\t\t\t# Si aggiona il valore dello start del gene se si trova un \n\t\t\t# valore piu' piccolo\n\t\t\t#\n\t\t\tif int(dictGeneChr[key_geneChr][0]) > int(riga[idx_start]):\n\t\t\t\tdictGeneChr[key_geneChr][0] = riga[idx_start]\n\t\t\t\t\n\t\t\t# Si aggiorna il valore dell'end del gene se si trova un\n\t\t\t# valore piu' grande\n\t\t\t#\n\t\t\tif int(dictGeneChr[key_geneChr][1]) < int(riga[idx_end]):\t\n\t\t\t\tdictGeneChr[key_geneChr][1] = riga[idx_end]\n\t\t\n\t\t# Si filtra il file considerando solamente le regioni di tipo \"exon\"\n\t\t#\n\t\tif stringa in x:\n\t\t\tlines.append(riga)\n\n\treturn [lines, dictGeneChr]", "def collectMarkers(self, ingeno):\n with open(ingeno,'r') as fin:\n for line in fin:\n if line.startswith('#'):\n l = line.strip('#').strip().split()\n for i,e in enumerate(l):\n self.mark[e] = {'chrom':'0',\n 'pos':i,\n 'alleles': [],\n 'rank':i}\n self.marklist.append(e)\n break\n else:\n l = line.strip().split()\n if self.ia == 3:\n for i in xrange(0,len(l[self.ic:])//2):\n self.mark[str(i)] = {'chrom':'0',\n 'pos':i,\n 'alleles': [],\n 'rank':i}\n self.marklist.append(str(i))\n else:\n for i,e in enumerate(l[self.ic:]):\n self.mark[str(i)] = {'chrom':'0',\n 'pos':i,\n 'alleles': [],\n 'rank':i}\n self.marklist.append(str(i))", "def annotate_ISM(data_df, REFERENCE, position_list, reference_genbank_name=\"data/covid-19-genbank.gb\"):\n seq_list = data_df['sequence'].values.tolist()\n \n seq_index = []\n index = 0\n for base in REFERENCE[1]:\n if base == '-':\n seq_index.append(index)\n else:\n index += 1\n seq_index.append(index)\n reference_local_index_map = np.array(seq_index)\n mapped_reference_index = []\n for index, entropy in position_list:\n mapped_reference_index.append((index, reference_local_index_map[index], entropy))\n REFERENCE_ISM = ''.join([REFERENCE[1][item[0]] for item in position_list])\n logging.info('Reference ISM: {}.'.format(REFERENCE_ISM))\n \n gene_dict = load_gene_dict(reference_genbank_name)\n reference_raw = REFERENCE[1].replace('-', '')\n res = OrderedDict()\n res['Ref position'] = []\n res['Entropy'] = []\n res['Gene'] = []\n res['Is silent'] = []\n res['AA position'] = []\n for align_index, ref_index, entropy in mapped_reference_index:\n codon, codon_idx, name, codon_pos = find_SNP(ref_index, gene_dict, reference_raw)\n base_freq = Counter([item[align_index] for item in seq_list]).most_common()\n for alt_base, count in base_freq:\n if alt_base != reference_raw[ref_index-1]:\n break\n if codon is None:\n if_silence = True\n else:\n alt_codon = list(codon)\n alt_codon[codon_idx] = alt_base\n alt_codon = ''.join(alt_codon)\n ref_aa = translate(codon)\n ism_aa = translate(alt_codon)\n if ref_aa == ism_aa:\n if_silence = True\n else:\n if_silence = False\n res['Ref position'].append(ref_index)\n res['Entropy'].append(entropy)\n if name is None:\n name = 'Non-coding'\n res['Gene'].append(name)\n res['Is silent'].append(if_silence)\n if codon_pos is None:\n res['AA position'].append('NaN')\n else:\n res['AA position'].append('{}{}{}'.format(ref_aa, codon_pos, ism_aa))\n annotation_df = pd.DataFrame.from_dict(res)\n return annotation_df", "def _add_transform_genes(self):\n self._alleles.add(pu.make_int_gene(1, 1, 10, 1)) # 'AR' backshift (p)\n self._alleles.add(pu.make_choice_gene(1, [0, 1, 2])) # 'I' backshift (d) \n self._alleles.add(pu.make_choice_gene(1, [1, 2, 3])) # 'MA' backshift (q)\n self._loci_list += ['AR_order', 'I_order', 'MA_order']", "def feature_table(chr_id, source, orient, genes, transcripts, cds, exons, unk):\n for gname, ginfo in genes.items():\n line = [str(chr_id), \n 'gbk_to_gff',\n ginfo[3],\n str(ginfo[0]),\n str(ginfo[1]),\n '.',\n ginfo[2],\n '.',\n 'ID='+str(gname)+';Name='+str(gname)+';Note='+ginfo[-1]]\n print '\\t'.join(line) \n ## construct the transcript line is not defined in the original file \n t_line = [str(chr_id), 'gbk_to_gff', source, 0, 1, '.', ginfo[2], '.'] \n\n if not transcripts:\n t_line.append('ID=Transcript:'+str(gname)+';Parent='+str(gname))\n\n if exons: ## get the entire transcript region from the defined feature\n t_line[3] = str(exons[gname][0][0])\n t_line[4] = str(exons[gname][0][-1])\n elif cds:\n t_line[3] = str(cds[gname][0][0])\n t_line[4] = str(cds[gname][0][-1])\n print '\\t'.join(t_line) \n\n if exons:\n exon_line_print(t_line, exons[gname], 'Transcript:'+str(gname), 'exon')\n\n if cds:\n exon_line_print(t_line, cds[gname], 'Transcript:'+str(gname), 'CDS')\n if not exons:\n exon_line_print(t_line, cds[gname], 'Transcript:'+str(gname), 'exon')\n\n else: ## transcript is defined \n for idx in transcripts[gname]: \n t_line[2] = idx[3]\n t_line[3] = str(idx[0])\n t_line[4] = str(idx[1])\n t_line.append('ID='+str(idx[2])+';Parent='+str(gname))\n print '\\t'.join(t_line) \n \n ## feature line print call \n if exons:\n exon_line_print(t_line, exons[gname], str(idx[2]), 'exon')\n if cds:\n exon_line_print(t_line, cds[gname], str(idx[2]), 'CDS')\n if not exons:\n exon_line_print(t_line, cds[gname], str(idx[2]), 'exon')\n\n if len(genes) == 0: ## feature entry with fragment information \n \n line = [str(chr_id), 'gbk_to_gff', source, 0, 1, '.', orient, '.'] \n fStart = fStop = None \n\n for eid, ex in cds.items(): \n fStart = ex[0][0] \n fStop = ex[0][-1]\n\n for eid, ex in exons.items(): \n fStart = ex[0][0] \n fStop = ex[0][-1]\n\n if fStart or fStart:\n\n line[2] = 'gene'\n line[3] = str(fStart)\n line[4] = str(fStop)\n line.append('ID=Unknown_Gene_' + str(unk) + ';Name=Unknown_Gene_' + str(unk))\n print \"\\t\".join(line)\n\n if not cds:\n line[2] = 'transcript'\n else:\n line[2] = 'mRNA'\n line[8] = 'ID=Unknown_Transcript_' + str(unk) + ';Parent=Unknown_Gene_' + str(unk)\n print \"\\t\".join(line)\n \n if exons:\n exon_line_print(line, cds[None], 'Unknown_Transcript_' + str(unk), 'exon')\n \n if cds:\n exon_line_print(line, cds[None], 'Unknown_Transcript_' + str(unk), 'CDS')\n if not exons:\n exon_line_print(line, cds[None], 'Unknown_Transcript_' + str(unk), 'exon')\n \n unk +=1 \n\n return unk", "def translate_sequence(sequence, genetic_code = {'GUC': 'V', 'ACC': 'T', 'GUA': 'V', 'GUG': 'V', 'ACU': 'T', 'AAC': 'N', 'CCU': 'P', 'UGG': 'W', 'AGC': 'S', 'AUC': 'I', 'CAU': 'H', 'AAU': 'N', 'AGU': 'S', 'GUU': 'V', 'CAC': 'H', 'ACG': 'T', 'CCG': 'P', 'CCA': 'P', 'ACA': 'T', 'CCC': 'P', 'UGU': 'C', 'GGU': 'G', 'UCU': 'S', 'GCG': 'A', 'UGC': 'C', 'CAG': 'Q', 'GAU': 'D', 'UAU': 'Y', 'CGG': 'R', 'UCG': 'S', 'AGG': 'R', 'GGG': 'G', 'UCC': 'S', 'UCA': 'S', 'UAA': '*', 'GGA': 'G', 'UAC': 'Y', 'GAC': 'D', 'UAG': '*', 'AUA': 'I', 'GCA': 'A', 'CUU': 'L', 'GGC': 'G', 'AUG': 'M', 'CUG': 'L', 'GAG': 'E', 'CUC': 'L', 'AGA': 'R', 'CUA': 'L', 'GCC': 'A', 'AAA': 'K', 'AAG': 'K', 'CAA': 'Q', 'UUU': 'F', 'CGU': 'R', 'CGC': 'R', 'CGA': 'R', 'GCU': 'A', 'GAA': 'E', 'AUU': 'I', 'UUG': 'L', 'UUA': 'L', 'UGA': '*', 'UUC': 'F'}, start_pos = 0):\n #find first orf\n #first_orf_seq = find_first_orf(sequence)\n\n # ensure sequence is uppercase\n seq = sequence.upper()\n\n #translate the sequence\n protein = \"\"\n for i in range(0, len(seq) - (len(seq) % 3), 3):\n codon = seq[i:i + 3]\n if genetic_code[codon] == \"*\":\n break\n protein += genetic_code[codon]\n return protein", "def main():\n infile, outfile = get_args()\n seqs = mod.read_fasta(infile)\n\n coding_seqs = remove_guff(seqs)\n \n out = open(outfile, \"w\")\n for key, value in coding_seqs.items():\n out.write(\">\" + key + \"\\n\" + value + \"\\n\")\n out.close()", "def create_DESeqRscript_replicates(infile=\"/projects/dowellde/groseq/data/replicates/gffcoverage/set1andset2.coverage.protein_coding\",columns=\"[10, 15, 11, 14]\", type_transcript=\"gffcoverage\", conditions=\"['DMS0', 'DMSO', 'Nutlin', 'Nutlin']\", condition1=\"DMSO\", condition2=\"Nutlin\",title_of_names_column=\"group\"):\n\n f = open(infile)\n headers = f.readline()\n headers = headers.strip(\"\\n\")\n headers = headers.split(\"\\t\")\n f.close()\n infile_dir = infile.split(\"/\")[:-1]\n infile_dir = \"/\".join(infile_dir)+\"/\"\n infile_root = infile.split(\"/\")[-1].strip(\".txt\")\n\tset_conditions = set(eval(conditions))\n\tset_conditions = list(set_conditions)\n outfile = infile_dir+infile_root+\".\"+condition1+condition2+type_transcript\n write_file = outfile+\".R\"\n print write_file\n wf = open(write_file ,\"w\")\n R_dump_file = outfile+\".Rout\"\n graph_file = outfile+\".png\"\n outfileallinputs = outfile+\".res.txt\"\n outfilesig = outfile+\".resSig.txt\"\n outfilesig_orderpval = outfile+\".resSig_pvalue.txt\"\n wf.write('sink(\"'+R_dump_file+'\")\\n')\n wf.write('library( DESeq )\\n')\n wf.write('data <- read.delim(\"'+infile+r'\", sep=\"\\t\", header=TRUE)'+\"\\n\")#need to check that \\t comes out like it should. Might write it wrong.\n\tcolumns_list = []\n\tcolumns = eval(columns)\n\tline = \", \".join(map(str,columns))\n wf.write('countsTable <- subset(data, select=c('+line+'))\\n')\n wf.write('rownames(countsTable) <- data$'+title_of_names_column+'\\n')\n\tconditions = eval(conditions)\n line = '\", \"'.join(conditions)\n wf.write('conds <- c(\"'+line+'\")\\n')\n wf.write('cds <- newCountDataSet( countsTable, conds )\\n')\n wf.write('cds <- estimateSizeFactors( cds )\\n')\n wf.write('sizeFactors(cds)\\n')\n wf.write(\"cds <- estimateDispersions( cds )\\n\")\n wf.write('res <- nbinomTest( cds, \"'+condition1+'\", \"'+condition2+'\" )\\n')\n wf.write('plotDE <- function( res ) plot(res$baseMean,res$log2FoldChange, log=\"x\", pch=20, cex=.1, col = ifelse( res$padj < .1, \"red\", \"black\" ) )\\n')\n wf.write(\"png('\"+graph_file+\"')\\n\")\n wf.write('plotDE( res )\\n')\n wf.write('dev.off()\\n')\n wf.write('resSig <- res[ res$padj < .1, ]\\n')\n wf.write('write.table(res, file = \"'+outfileallinputs+r'\", append = FALSE, sep = \"\\t\")'+\"\\n\")\n wf.write('write.table(resSig, file = \"'+outfilesig+r'\", append = FALSE, sep = \"\\t\")'+\"\\n\")\n wf.write('write.table(resSig[ order(resSig$pval), ], file = \"'+outfilesig_orderpval+r'\", append = FALSE, sep = \"\\t\")'+\"\\n\")\n wf.write('sink()\\n')", "def preprocess_gene(gene_id,data_dict,t2g_mapping,out_paths,locks):\n \n # features = ['read_id','transcript_id','transcriptomic_position','reference_kmer','norm_mean','start_idx','end_idx'] # columns in the eventalign file per read.\n\n events = []\n condition_labels = []\n run_labels = []\n read_ids = []\n genomic_coordinates = []\n \n # Concatenate\n# if len(data_dict) == 0:\n# return\n\n\n for read_index,events_per_read in data_dict.items():\n# if len(events_per_read) > 0:\n # ===== transcript to gene coordinates ===== # TODO: to use gtf.\n# tx_ids = [tx_id.decode('UTF-8').split('.')[0] for tx_id in events_per_read['transcript_id']]\n tx_ids = [tx_id for tx_id in events_per_read['transcript_id']] \n tx_positions = events_per_read['transcriptomic_position']\n genomic_coordinate = list(itemgetter(*zip(tx_ids,tx_positions))(t2g_mapping)) # genomic_coordinates -- np structured array of 'chr','gene_id','genomic_position','kmer'\n genomic_coordinate = np.array(genomic_coordinate,dtype=np.dtype([('chr','<U2'),('gene_id','<U15'),('genomic_position','<i4'),('g_kmer','<U5')]))\n # ===== \n\n # Based on Ensembl, remove transcript version.\n\n events_per_read['transcript_id'] = tx_ids\n events_per_read = np.array(events_per_read,dtype=np.dtype([('transcript_id', 'S15'), ('transcriptomic_position', '<i8'), ('reference_kmer', 'S5'), ('norm_mean', '<f8')]))\n\n #\n\n events += [events_per_read]\n genomic_coordinates += [genomic_coordinate]\n n_events_per_read = len(events_per_read)\n# else:\n# print(read_index,len(events_per_read))\n\n events = np.concatenate(events)\n genomic_coordinates = np.concatenate(genomic_coordinates)\n \n # Sort and split # \n# idx_sorted = np.lexsort((events['reference_kmer'],genomic_coordinates['genomic_position'],genomic_coordinates['gene_id']))\n# key_tuples, index = np.unique(list(zip(genomic_coordinates['gene_id'][idx_sorted],genomic_coordinates['genomic_position'][idx_sorted],events['reference_kmer'][idx_sorted])),return_index = True,axis=0) #'chr',\n# y_arrays = np.split(events['norm_mean'][idx_sorted], index[1:])\n# # read_id_arrays = np.split(events['read_id'][idx_sorted], index[1:])\n# g_kmer_arrays = np.split(genomic_coordinates['g_kmer'][idx_sorted], index[1:])\n\n idx_sorted = np.argsort(genomic_coordinates['genomic_position'])\n unique_positions, index = np.unique(genomic_coordinates['genomic_position'][idx_sorted],return_index = True)\n y_arrays = np.split(events['norm_mean'][idx_sorted], index[1:])\n # read_id_arrays = np.split(events['read_id'][idx_sorted], index[1:])\n g_kmer_arrays = np.split(genomic_coordinates['g_kmer'][idx_sorted], index[1:])\n g_positions_arrays = np.split(genomic_coordinates['genomic_position'][idx_sorted], index[1:])\n\n # Prepare\n # print('Reformating the data for each genomic position ...')\n data = defaultdict(dict)\n # for each position, make it ready for json dump\n# data = dict(zip(key_tuples, y_arrays))\n\n asserted = True\n# for key_tuple,y_array,g_kmer_array in zip(key_tuples,y_arrays,g_kmer_arrays):\n for position,y_array,g_kmer_array,g_positions_array in zip(unique_positions,y_arrays,g_kmer_arrays,g_positions_arrays):\n# gene_id,position,kmer = key_tuple \n if (len(set(g_kmer_array)) == 1) and ('XXXXX' in set(g_kmer_array)) or (len(y_array) == 0):\n continue\n \n if 'XXXXX' in set(g_kmer_array):\n y_array = y_array[g_kmer_array != 'XXXXX'] \n assert len(y_array) == len(g_kmer_array) - (g_kmer_array=='XXXXX').sum()\n g_kmer_array = g_kmer_array[g_kmer_array != 'XXXXX'] \n \n try:\n assert len(set(g_kmer_array)) == 1\n assert {position} == set(g_positions_array)\n except:\n asserted = False\n break\n kmer = set(g_kmer_array).pop()\n\n data[position] = {kmer: list(y_array)} #,'read_ids': [read_id.decode('UTF-8') for read_id in read_id_array]}\n \n # write to file.\n log_str = '%s: %s' %(gene_id,asserted)\n\n with locks['json'], open(out_paths['json'],'a') as f:\n\n pos_start = f.tell()\n f.write('{')\n f.write('\"%s\":' %gene_id)\n ujson.dump(data, f)\n f.write('}\\n')\n pos_end = f.tell()\n\n with locks['index'], open(out_paths['index'],'a') as f:\n f.write('%s,%d,%d\\n' %(gene_id,pos_start,pos_end))\n \n with locks['readcount'], open(out_paths['readcount'],'a') as f: #todo: repeats no. of tx >> don't want it.\n n_reads = len(data_dict)\n f.write('%s,%d\\n' %(gene_id,n_reads))\n \n with locks['log'], open(out_paths['log'],'a') as f:\n f.write(log_str + '\\n')", "def main():\n args = get_args()\n FILE = args.FILE\n annotations = args.annotations\n outfile = args.outfile\n \n \n if not os.path.isfile(FILE):\n die('\"{}\" is not a file'.format(FILE))\n if not os.path.isfile(annotations):\n die('\"{}\" is not a file'.format(annotations))\n if os.path.isfile(FILE) and os.path.isfile(annotations):\n reader = csv.DictReader(open(FILE), delimiter = '\\t', fieldnames = (\"qseqid\", \"sseqid\", \"pident\", \"length\", \"mismatch\", \"gapopen\", \"qstart\", \"qend\", \"sstart\", \"send\", \"evalue\", \"bitscore\"))\n reader_a = csv.DictReader(open(annotations), fieldnames = (\"centroid\", \"domain\", \"kingdom\", \"phylum\", \"class\", \"order\", \"genus\", \"species\"))\n reader_b = csv.reader(open(annotations, 'r'))\n anno_dict = {}\n for row in reader_b:\n key1 = row[0]\n anno_dict[key1] = row[1:]\n\n #print(anno_dict)\n \n \"\"\"for dct in map(dict, reader_a):\n genus = (f\"{dct['genus']}\")\n species = (f\"{dct['species']}\")\n if genus == \"\": \n print(\"NA\")\n else:\n print(genus)\n if species == \"\":\n print(\"NA\")\n else:\n print(species)\"\"\"\n for dct in map(dict, reader):\n seq_id = (f\"{dct['sseqid']}\") \n pident = (f\"{dct['pident']}\")\n #print(seq_id)\n for dct_a in map(dict, reader_a):\n genus = (f\"{dct_a['genus']}\")\n species = (f\"{dct_a['species']}\")\n if any(seq_id == key for key in anno_dict): \n \"\"\"print(seq_id)\n print(pident)\n print(genus)\n print(species)\n #find a way to print genus and species of seq_id\n \"\"\"\n \n else:\n warn('Cannot find seq \"{}\" in lookup'.format(seq_id))\n \"\"\"for line_a in reader_a:\n an_id = (line_a['centroid']) \n print('\"{}\" is an_id'.format(an_id)) \n for line in reader:\n seq_id = (line['sseqid'])\n print('\"{}\" is seq_id'.format(seq_id))\n if seq_id == an_id:\n print(\"hi\")\n else:\n warn('Cannot find seq \"{}\" in lookup'.format(seq_id))\n \"\"\"\n #pprint.pprint(dict_list)\n #pprint.pprint(dict_list_a)\n #for key, value in d1.items():\n #if key is 'sseqid':\n #print(value)\n #print(dict_list_a['centroid']) ", "def addRG(in_files,args):\n #define readgroup header lines by combining the following\n\n \"\"\"\n -\n read group\n ID*\n Unique read group identifier. The value of the ID field is used in the RG tags of alignment records.\n SM*\n Sample (use pool name where a pool is being sequenced)\n LB\n Library\n DS\n Description\n PU\n Platform unit (e.g. lane for Illumina or slide for SOLiD); should be a full, unambiguous identifier\n PI\n Predicted median insert size (maybe different from the actual median insert size)\n CN\n Name of sequencing center producing the read.\n DT\n Date the run was produced (ISO 8601 date or date/time).\n PL\n Platform/technology used to produce the read.\"\"\"\n\n with open(args.barcodes,'r') as barcodes:\n sam_out= open(in_files['header'],'a')\n header = barcodes.readline().split('\\t')\n for line in barcodes:\n RG = ['@RG']\n split_line = line.split('\\t')\n if args.species and 'Species' in header:\n if split_line[(header.index('Species'))] != args.species:\n continue\n fc = split_line[(header.index('Flowcell'))]\n lane = split_line[(header.index('Lane'))]\n sample = split_line[(header.index('Sample'))]\n RG.append('ID:%s_%s_%s'%(fc,lane,sample))\n RG.append('SM:%s'%(sample))\n RG.append('LB:%s_%s'%(fc,sample))\n RG.append('PL:ILLUMINA\\n')\n sam_out.write('\\t'.join(RG))\n sam_out.close()\n return in_files", "def display_algn_seq():\n \n import os\n choice = input('Enter the name of the file: ')\n filepath = os.path.join('/home/njesh/python-mini-project-JaneNjeri/Data', choice)\n with open(filepath,'r') as file:\n seq_list = []\n for line in file:\n if line[:6] == 'SEQRES':\n line_split = line.split()[4:]\n seq_list.append(line_split)\n \n filepath1 = os.path.join('/home/njesh/python-mini-project-JaneNjeri/Results', 'outfile1')\n with open(filepath1, 'w') as outfile:\n for i in seq_list:\n outfile.writelines(i)\n \n filepath2 = os.path.join('/home/njesh/python-mini-project-JaneNjeri/Results', 'outfile2')\n j = os.path.join('/home/njesh/python-mini-project-JaneNjeri/Results', 'outfile1')\n with open(j, 'r') as fil:\n d = {'CYS':'C','ASP':'D','SER':'S','GLN':'Q','LYS':'K','ILE':'I','PRO':'P','THR':'T','PHE':'F','ASN':'N',\n 'GLY':'G','HIS':'H','LEU':'L','ARG':'R','TRP':'W','TER':'*','ALA':'A','VAL':'V','GLU':'E','TYR':'Y',\n 'MET':'M','XAA':'X'}\n with open(filepath2, 'w') as outf:\n for line in fil:\n if len(line) %3 == 0:\n upper_seq = line.upper()\n single_seq = ''\n for i in range(int(len(upper_seq)/3)):\n single_seq += d[upper_seq[3*i:3*i+3]]\n outf.write(single_seq) \n return single_seq\n else:\n print(\"ERROR: Line was not a factor of 3 in length!\")", "def addOmimAnnotation(merged_data, OmimAnnotationFile):\n omim_genes = dict.fromkeys(list(OmimAnnotationFile['ENSID']))\n has_omim = []\n for index, row in merged_data.iterrows():\n human_ensid = str(row['Human ENSID'])\n if human_ensid in omim_genes:\n has_omim.append('t')\n else:\n has_omim.append('f')\n\n merged_data['Has Omim Annotation'] = has_omim\n return", "def make_tag_data_raw_fast(mdp,filename):\n #\n fin = open(filename,'r')\n iter = 0\n for line in fin:\n lsp = line.split(' ')\n if len(lsp) > 1: # skip empty lines\n if lsp[0] == \"comb_path\":\n update_params(mdp,lsp)\n if not mdp.flag_out_open: ## -- try to open output file\n try:\n if mdp.flag_overwrite == \"True\": ## check string value!\n ## -- open save file for read+write\n try:\n mdp.save_file = open(mdp.output_path + '/' + mdp.output_fname,'r+')\n mdp.save_file.seek(0) # go to beginning\n mdp.save_file.truncate() # delete whatever was there before\n except IOError:\n mdp.save_file = open(mdp.output_path + '/' + mdp.output_fname,'w')\n mdp.save_file.close()\n mdp.save_file = open(mdp.output_path + '/' + mdp.output_fname,'r+')\n mdp.flag_out_open = True\n #for num,key in zip(mdp.corr_num,mdp.key):\n # corr_key=uf.get_str_key(mdp.corr_file,\"correlator_key\",num)\n mdp.flag_overwrite= False\n else:\n mdp.save_file = open(mdp.output_path + '/' + mdp.output_fname,'r+')\n mdp.save_file.seek(0,2) # seek the end of file\n mdp.flag_out_open = True\n #for num,key in zip(mdp.corr_num,mdp.key):\n # corr_key=uf.get_str_key(mdp.corr_file,\"correlator_key\",num)\n except (AttributeError):\n print \"Attempted to open invalid output file\"\n ## -- try open output file\n for file in glob.glob(mdp.input_path):\n # get sign which corrects for boundary condition\n tvals = file.split('/')[-1].split('_')[3].split('t')\n try:\n ## flip sign if requested\n bcsign = ((int(tvals[1])+int(tvals[2])) != (int(tvals[1])+int(tvals[2])) % mdp.corr_len)\n except IndexError:\n ## 2-point function\n bcsign = False\n try:\n # open correlator file\n mdp.corr_file = open(file,'r')\n except IOError:\n print \"Could not open file \",file\n continue\n ## -- get tag\n ## baryons:\n #mdp.tag = '_'+file.split('/')[-1].split('_')[1][1:]+'_r'+file.split('/')[-1].split('_')[4][-1]\n ## with time source tag\n #mdp.tag = file.split('/')[-1].split('_')[3][:3]\\\n # +'_'+file.split('/')[-1].split('_')[1][1:]+'_'+file.split('/')[-1].split('_')[4][0]\\\n # +file.split('/')[-1].split('_')[4][3:]\n ## no time source tag\n mdp.tag = '_'+file.split('/')[-1].split('_')[1][1:]+'_'+file.split('/')[-1].split('_')[4][0]\\\n +file.split('/')[-1].split('_')[4][3:]\n #print file,',',mdp.tag\n iter+=1\n ##endif ! flag_out_open\n\n #save_data_fast(mdp)\n save_data_fast_bc(mdp,bcsign)\n mdp.corr_file.close()\n if iter%400 == 0:\n print \"file\",iter\n max_iter = None\n if not(max_iter is None) and iter==max_iter:\n print \"reached max file iterations, ending loop...\"\n break\n ## end comb_path\n pass\n\n elif lsp[0] == \"for\": # indicates when to get correlator\n lsp.pop(0)\n update_params(mdp,lsp)\n try:\n # open correlator file\n mdp.corr_file = open(mdp.input_path + '/' + mdp.input_fname,'r')\n except IOError:\n print \"Could not open file \",mdp.input_fname\n continue\n print mdp.input_fname\n if not mdp.flag_out_open:\n try:\n if mdp.flag_overwrite:\n ## -- open save file for read+write\n try:\n mdp.save_file = open(mdp.output_path + '/' + mdp.output_fname,'r+')\n mdp.save_file.seek(0) # go to beginning\n mdp.save_file.truncate() # delete whatever was there before\n except IOError:\n mdp.save_file = open(mdp.output_path + '/' + mdp.output_fname,'w')\n mdp.save_file.close()\n mdp.save_file = open(mdp.output_path + '/' + mdp.output_fname,'r+')\n mdp.flag_out_open = True\n #for num,key in zip(mdp.corr_num,mdp.key):\n # corr_key=uf.get_str_key(mdp.corr_file,\"correlator_key\",num)\n mdp.flag_overwrite= False\n else:\n mdp.save_file = open(mdp.output_path + '/' + mdp.output_fname,'r+')\n mdp.save_file.seek(0,2) # seek the end of file\n mdp.flag_out_open = True\n #for num,key in zip(mdp.corr_num,mdp.key):\n # corr_key=uf.get_str_key(mdp.corr_file,\"correlator_key\",num)\n #except (IOError):\n # pass\n except (AttributeError):\n print \"Attempted to open invalid output file\"\n ##endif ! flag_out_open\n save_data_fast(mdp)\n mdp.corr_file.close()\n ##else \"for\" not found in control file\n else:\n update_params(mdp,lsp)\n ##endif lsp[0]==for\n ##endif len(lsp) > 1\n try:\n mdp.save_file.close()\n mdp.flag_out_open = False\n except (IOError,AttributeError):\n pass\n fin.close()\n return", "def _seq(codes, seq_file):\n \n seq_temp = 'oma_temporary_sequences.fasta'\n if os.path.isfile(seq_temp):\n info('Indexing pre-existed temporary protein sequences ('\n 'oma_temporary_sequences.fasta) ... ')\n seqs = SeqIO.index(seq_temp, 'fasta')\n else:\n info('Parsing OMA protein sequences (oma-seqs.fa.gz) ... ')\n handle = gzip.open(seq_file, 'rt') if _gz(seq_file) else open(seq_file)\n records = SeqIO.parse(handle, 'fasta')\n seqs = {record.id: record for record in records if\n record.id[:5] in codes}\n SeqIO.write(seqs.values(), seq_temp, 'fasta')\n handle.close()\n return seqs", "def lees_inhoud():\r\n\r\n try:\r\n bestand = open(\"Mus_musculus.GRCm38.pep.all.fa\")\r\n headers = []\r\n seqs = []\r\n seq = \"\"\r\n for line in bestand:\r\n line = line.strip()\r\n if \">\" in line:\r\n if seq != \"\":\r\n seqs.append(seq)\r\n seq = \"\"\r\n headers.append(line)\r\n else:\r\n seq += line.strip()\r\n seqs.append(seq)\r\n if headers == \"\":\r\n print(\"Weet u zeker of u wel het goede bestand heeft? Er zijn namelijk geen headers.\")\r\n return headers, seqs\r\n except FileNotFoundError:\r\n print(\"Het gegeven bestand bestaat niet, \"\r\n \"check of het bestand in het mapje met de code zit en of het wel FASTA is.\")\r\n exit()", "def dictagnum(kind, fname):\n\n with open(fname, 'r') as g:\n g.next()\n g.next()\n m = g.next()\n startdict = agline(m)\n genold = startdict['gen']\n\n f = open(fname)\n f.next()\n f.next()\n d = {}\n y = '1'\n nb = []\n for l in f:\n adict = agline(l)\n ks = kind + 's'\n gen = adict['gen']\n well = adict['well']\n\n if adict['gen'] not in d:\n d[gen] = []\n \n if gen != genold:\n d[genold].append(sum(nb))\n nb = []\n else: \n if adict['well'] != y:\n d[gen].append(sum(nb))\n nb = []\n \n if kind == 'charge':\n if adict[ks] == 'x':\n nb.append(0)\n elif int(adict[ks]) >= 0 and (adict['charget'] == 'c' or \n adict['charget'] == 'o'):\n nb.append(1)\n elif adict[ks] == '-':\n pass\n #print('nb', nb)\n\n if kind == 'escd' or kind == 'escm':\n if adict[ks] == '':\n nb.append(0)\n elif int(adict[ks]) >= 0:\n nb.append(1)\n elif adict[ks] == '-':\n pass\n\n y = adict['well']\n genold = adict['gen']\n \n d[gen].append(sum(nb))\n \n return(d)", "def translate(self,line):\n \n def trans(g,m):\n if g[0] != g[1]: return '1'\n if g[0] == m[0]: return '0'\n if g[0] == m[1]: return '2'\n return 'nan'\n\n l = line.strip().split()\n animal,genos = l[1],l[6:]\n if len(self.mark['marklist']) > 0:\n lmark = self.mark['marklist']\n rgen = [animal]+[trans(genos[i*2:i*2+2],self.mark[name]['a1']+self.mark[name]['a2']) for i,name in enumerate(lmark)]\n else:\n rgen = [animal]+genos\n return rgen # [sample,a1,a2,a3,....,an]", "def read_gff(gff):\n genome = getseq(args.genome)\n dictoftranscripts = {}\n for k in open(gff):\n if not k.startswith(\"#\"):\n lines = k.strip().split(\"\\t\")\n if lines[2] == \"exon\":\n strand = lines[6]\n chromosome = lines[0]\n start = lines[3]\n end = lines[4]\n transcriptid = re.search(\"Parent=transcript:(.*)\", lines[8]).group(1)\n if transcriptid + \"#\" + chromosome in dictoftranscripts:\n dictoftranscripts[transcriptid + \"#\" + chromosome].extend([start, end])\n else:\n dictoftranscripts[transcriptid + \"#\" + chromosome] = []\n dictoftranscripts[transcriptid + \"#\" + chromosome].extend([start, end])\n\n for key, value in dictoftranscripts.iteritems():\n value.sort()\n print value\n for coord1 in value:\n\n for coord2 in value[1:]:\n #print coord1, coord2\n if int(coord1) != int(value[-1]) and value.index(coord2) != value.index(coord1)+1 and value.index(coord2) > value.index(coord1):\n\n exon1_start = int(coord1)\n exon1_end = int(coord2)\n #print exon1_start, exon1_end\n #print key.split(\"#\")[1]\n #print value.index(coord1), value.index(coord2)\n exon_seq = genome.get(key.split(\"#\")[1],\"NA\")\n\n if exon_seq != \"NA\":\n sequence_exon = exon_seq[exon1_start:exon1_end+1]\n #print exon1_start, exon1_end, sequence_exon\n for start, end, strand, frame, pro in translate(sequence_exon):\n junction =\n print start, end, strand, frame, pro", "def write_coord_seq():\n \n import os\n choice = input('Enter the name of the file: ')\n filepath = os.path.join('/home/njesh/python-mini-project-JaneNjeri/Data', choice)\n lis = []\n with open(filepath, 'r') as file:\n for line in file:\n if line[:4] == 'ATOM':\n line_split = line.split()\n lis.append(line_split[3:4])\n choice1 = input('Enter name for the output file: ')\n filepath1 = os.path.join('/home/njesh/python-mini-project-JaneNjeri/Results', choice1)\n with open(filepath1, 'w') as myfile:\n for i in lis:\n myfile.writelines(i)\n print('Done!')\n \n with open(choice, 'r') as myfile:\n header = ''\n for line in myfile:\n if line.startswith(\"TITLE\"): \n head_split = line.split()\n header = header + ' '.join(head_split[1:])\n \n choice2 = input('Enter output file name with a .fasta extension: ')\n filepath2 = os.path.join('/home/njesh/python-mini-project-JaneNjeri/Results', choice2)\n z = os.path.join('/home/njesh/python-mini-project-JaneNjeri/Results', choice1)\n with open(z, 'r') as file:\n with open(filepath2, 'w') as output:\n for i in file:\n output.writelines('>' + header + '\\n' + i)\n print('>' + header + '\\n' + i)\n print('Fasta file generated!')", "def locateSigCells(run,ion,codeLoc,testing=0):\n\n singleCount = 0\n\n # Read in the galaxy's box\n boxfile = '{0:s}_GZa{1:s}.{2:s}.h5'.format(run.galID,run.expn,ion.name)\n box = pd.read_hdf(boxfile, 'data')\n if testing==1:\n print('Box read in')\n\n # Read in the LOS info from lines.info\n los_info = np.loadtxt('lines.info',skiprows=2)\n\n # Determine which transition to use\n # Only need on, use the transition with the weaker oscillator strength since\n # this will retain mroe cells than the other transitions\n waves = fi.transition_name(ion.name,testing)\n\n # Open the output file\n outfile = '{0:s}.{1:s}.{2:s}.i{3:d}.abs_cells.h5'.format(run.galID,\n run.expn,ion.name,int(run.incline))\n header = ['wave','LOS','D','cellID','redshift','logN','dobbler_b',\n 'x', 'y', 'z', 'vx', 'vy', 'vz',\n 'r', 'nH', 'temperature', 'cell_size', 'SNII', 'SNIa', \n 'alpha_Zmet', 'ion_density', 'fullLogNstart', 'fullLogNend']\n\n # Open the summary file\n sumfile = '{0:s}_a{1:s}_{2:s}_i{3:d}_absCellsSummary.h5'.format(run.galID,\n run.expn,ion.name,int(run.incline))\n sumheader = ['los', 'impact', 'phi', 'wave','startNumCells', 'startEW', 'startlogN', \n 'endNumCells', 'endEW', 'endlogN']\n summary = pd.DataFrame(columns=sumheader)\n\n # Write a header to the output file\n numcols = len(header)\n\n # Create a blank row of zeros to build the array with\n d = pd.DataFrame(columns=header)\n\n # Make a version of Mockspec.runpars that has zero SNR\n # Needed for sigcells\n lf.quiet_mockspec()\n\n # Get a list of LOS that have a sysabs file associated with it\n #sysabs_losnum = glob.glob('*los*sysabs')\n #sysabs_losnum.sort()\n \n if testing==1:\n print('Sysabs files aggregated')\n print('Number of sysabs files: ', len(sysabs_losnum))\n\n flog = open('sig_cells.log', 'w')\n falselog = open('falseDetections.log','w')\n falselog.write('los\\tEWsysabs\\tv-\\tv+\\tInitNumCells\\n')\n\n # Loop over lines of sight\n sysabsFilename = '{0:s}.{1:s}.los{2:s}.sysabs'\n for i in range(0,run.nlos):\n\n #losnum = sysabs_losnum[i].split('.')[2].split('los')[1]\n num = i+1\n losnum = '{0:04d}'.format(num)\n\n linesfile = '{0:s}.{1:s}.los{2:s}.lines'.format(run.galID,ion.name,losnum)\n\n # See if sysabs file exists for this losnum\n sysabsfname = sysabsFilename.format(run.galID,\n ion.name,losnum)\n try:\n f = open(sysabsfname,'r')\n except IOError:\n continue\n\n # Get the column density of the LOS from the lines file\n logNinitial = lf.linesLogN(linesfile)\n\n # Make sure the .lines file has cells in it\n numCells = 0\n with open(linesfile) as f:\n for line in f:\n numCells += 1\n\n # There is always the galaxy redshift\n if numCells==1:\n # If there are no cells, continue to the next LOS\n continue \n\n # Get the impact paramter of this LOS\n imp = los_info[num-1,1]\n phi = los_info[num-1,2]\n \n # Loop over the different transitions\n for wave in waves:\n losSummary = pd.Series(index=sumheader)\n losSummary['wave'] = wave\n losSummary['los'] = num\n losSummary['startNumCells'] = numCells\n losSummary['impact'] = imp\n losSummary['phi'] = phi\n losSummary['startlogN'] = lf.linesLogN(linesfile)\n\n # Copy the original lines file\n command = 'cp {0:s} {0:s}.tmp'.format(linesfile)\n sp.call(command, shell=True)\n\n # Perform the velocity cut\n lf.velcut(linesfile, testing=testing)\n\n # Find the significant cells\n endCut,startEW,endEW = sg.sigcells(linesfile,run.sigcellsCut,codeLoc,\n flog,falselog,wave,testing=testing)\n\n # Get the properties of the cells\n # Open the lines.final file\n finalLinesFile = linesfile.replace('.lines',\n '.{0:s}.lines.final'.format(wave))\n logNfinal = lf.linesLogN(finalLinesFile)\n losSummary['endlogN'] = logNfinal\n losSummary['startEW'] = startEW\n losSummary['endEW'] = endEW\n\n final_file = open(finalLinesFile)\n final_file.readline()\n\n endNumCells = 0\n \n # Loop over the significant cells\n for line in final_file:\n endNumCells += 1\n l = line.split()\n redshift = float(l[0])\n column = float(l[1])\n doppler = float(l[2])\n cellID = int(float(l[3]))\n \n # Get the cell's properties from the boxfile\n index = cellID-1\n x = box['x'].iloc[index]\n y = box['y'].iloc[index]\n z = box['z'].iloc[index]\n \n # Calculate the galactocentric distance\n r = np.sqrt(x*x + y*y + z*z)\n \n cell = pd.Series(index=header)\n cell['wave'] = wave\n cell['LOS'] = num\n cell['D'] = imp\n cell['cellID'] = cellID\n cell['redshift'] = redshift\n cell['logN'] = column \n cell['dobbler_b'] = doppler\n cell['x'] = x\n cell['y'] = y \n cell['z'] = z \n cell['vx'] = box['vx'].iloc[index]\n cell['vy'] = box['vy'].iloc[index]\n cell['vz'] = box['vz'].iloc[index]\n cell['r'] = r\n cell['nH'] = np.log10(box['nH'].iloc[index])\n cell['temperature'] = np.log10(box['temperature'].iloc[index])\n cell['cell_size'] = box['cell_size'].iloc[index]\n cell['SNII'] = np.log10(box['SNII'].iloc[index])\n cell['SNIa'] = np.log10(box['SNIa'].iloc[index])\n cell['alpha_Zmet'] = box['alpha_Zmet'].iloc[index]\n cell['ion_density'] = np.log10(box['nIon'].iloc[index])\n cell['fIon'] = np.log10(box['fIon'].iloc[index])\n cell['fullLogNstart'] = logNinitial\n cell['fullLogNend'] = logNfinal\n \n # Append to the main array\n d = d.append(cell,ignore_index=True)\n \n losSummary['endNumCells'] = endNumCells\n summary = summary.append(losSummary,ignore_index=True)\n\n # Rename the original .lines file back to its full name\n command = 'cp {0:s}.tmp {0:s}'.format(linesfile)\n sp.call(command, shell=True)\n \n # Write the outfile\n d.to_hdf(outfile,'data',mode='w')\n summary.to_hdf(sumfile,'data',mode='w')\n\n print('For {0:s}, {1:d} LOS are dominated by one cell'.format(ion.name,\n singleCount))\n\n flog.close()\n falselog.close()", "def _add_transform_genes(self):\n self._alleles.add(pu.make_int_gene(1, 1, 10, 1)) # 'AR' backshift (p)\n self._alleles.add(pu.make_choice_gene(1, [0, 1, 2])) # 'I' backshift (d) \n self._alleles.add(pu.make_choice_gene(1, [1, 2, 3])) # 'MA' backshift (q)\n self._alleles.add(pu.make_int_gene(1, 1, 10, 1)) # Seasonal 'AR' backshift (p)\n self._alleles.add(pu.make_choice_gene(1, [0, 1, 2])) # Seasonal 'I' backshift (d) \n self._alleles.add(pu.make_choice_gene(1, [1, 2, 3])) # Seasonal 'MA' backshift (q)\n self._loci_list += ['AR_order', 'I_order', 'MA_order',\n 'ssn_AR_order', 'ssn_I_order', 'ssn_MA_order']", "def _makeimap(self):\n self.map_['source'] = 'NAOJ'\n self.map_['provider'] = 'NRO'\n self.map_['instrument'] = 'NORH'\n self.map_['phyobs'] = ''", "def writeIntrons(self, filenameout):\n printed = {}\n with open(filenameout, \"w\") as out:\n for features in self.feature_dictionary:\n intro = self.feature_dictionary[features].getIntrons()\n print(self.feature_dictionary[features].introns)\n for introns in intro:\n if \"-\".join([str(i) for i in introns]) not in printed:\n out.write(self.feature_dictionary[features].chromosome_name + \"\\t\"\n + str(introns[0]) + \"\\t\" + str(introns[1]) + \"\\t\" + self.feature_dictionary[features].strand + \"\\n\")\n printed[\"-\".join([str(i) for i in introns])] = 0", "def inizializzazione(fileInput, geneNames):\n\t\n\tdictTranscript \t= {}\n\tdictGenes \t\t= {}\n\tdictEsoni \t\t= {}\n\tdictIntroni \t= {}\n\tdictGeneChr \t= {}\n\n\t# - Filtraggio file di annotazione in input per 'exon' e per nome gene\n\t# - Calcolo delle coordinate dei geni nei cromosomi\n\t#\n\tlines, dictGeneChr = filtraFileDiAnn(fileInput, geneNames)\n\t\n\t\n\t# Indici all'interno del dizionario degli esoni\n\t#\n\tidx_starts \t= 0\n\tidx_ends \t= 1\n\tidx_strand \t= 2\n\t\n\t# Indici all'interno del dizionario dei Geni\n\t#\n\tidx_transcripts = 2\n\n\n\t# Creazione dei dizionari utili alla risoluzione del problema B\n\t#\n\tfor riga in lines:\n\t\tcromosoma \t\t= riga[0]\n\t\tstart_esone \t= riga[3]\n\t\tend_esone \t\t= riga[4]\n\t\tstrand \t\t\t= riga[6]\n\t\tgeneName \t\t= riga[11]\n\t\ttranscriptName \t= riga[12]\n\t\t\n\t\tTranscriptID \t= riga[9]\n\t\tGeneID \t\t\t= riga[8]\n\t\n\t\t# Creazione del dizionario dei transcritti\n\t\t#\n\t\tdictTranscript[TranscriptID] = [transcriptName, GeneID]\n\t\t\n\t\t# Creazione del dizionario dei geni\n\t\t#\n\t\tif not dictGenes.has_key(GeneID):\t\t\t\t\t\t\t\t\t\t# Se il GeneID non e' presente..\n\t\t\tdictGenes[GeneID] = [geneName, cromosoma, [TranscriptID]]\t\t\t# ..nel dizionario (come key)\n\t\telif TranscriptID not in dictGenes[GeneID][idx_transcripts]:\t\t\t# Se il GeneID e' presente ma non lo e'..\n\t\t\tdictGenes[GeneID][idx_transcripts].append(TranscriptID)\t\t\t\t# ..il TranscriptID questo si aggiunge alla lista\n\t\t\n\t\t# Creazione del dizionario degli esoni\n\t\t#\n\t\tif not dictEsoni.has_key(TranscriptID):\t\t\t\t\t\t \t# Se il TranscriptID non e' presente.. \n\t\t\tdictEsoni[TranscriptID] = [[start_esone],[end_esone],strand] \t# ..nel dizionario (come key)\n\t\telse:\n\t\t\tdictEsoni[TranscriptID][idx_starts].append(start_esone)\t\t\t \t# Il TranscriptID e' gia' presente quindi..\n\t\t\tdictEsoni[TranscriptID][idx_ends].append(end_esone)\t\t\t \t# ..si aggiunge l'esone alla lista degli esoni\n\t\t\t\n\t\t\t\n\t# Creazione del dizionario degli introni\n\t#\n\tfor TranscriptID in dictEsoni:\n\t\tesoniPerTranscript = len(dictEsoni[TranscriptID][idx_starts])\t \t# Si valuta il nr di esoni per TranscriptID corrente\n\t\t\n\t\tif int(esoniPerTranscript) > 1:\n\t\t\tstart_introni \t= []\t\t\t\t\t\t\t\t\t\t\t # Si preparano le variabili necessarie\n\t\t\tend_introni \t= []\n\t\t\t\n\t\t\tstart_esoni \t= []\t\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\tend_esoni \t\t= []\n\t\t\t\n\t\t\t# Si considera lo strand relativo al TranscriptID\n\t\t\t#\n\t\t\tif dictEsoni[TranscriptID][idx_strand] == '+':\t\t\t\t\t \t# Strand positivo -> esoni scritti in ordine crescente\n\t\t\t\tstrand = True\n\t\t\t\tstart_esoni = dictEsoni[TranscriptID][idx_starts]\n\t\t\t\tend_esoni \t= dictEsoni[TranscriptID][idx_ends]\n\t\t\t\t\n\t\t\telse:\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \t# Strand negativo -> esoni scritti in ordine inverso..\n\t\t\t\tstrand = False\t\t\t\t\t\t\t\t\t\t\t\t \t# ..e per comodita' sono invertiti in ordine crescente\n\t\t\t\tstart_esoni = dictEsoni[TranscriptID][idx_starts][::-1] \t \n\t\t\t\tend_esoni \t= dictEsoni[TranscriptID][idx_ends][::-1]\n\n\t\t\t# Calcolo delle regioni introniche\n\t\t\t#\n\t\t\ti = 0\n\t\t\twhile i < int(esoniPerTranscript) - 1:\t\t\t\t\t\t\t \t# Per ogni coppia di esoni\n\t\t\t\tif (int(start_esoni[i+1]) - int(end_esoni[i])) > 2:\t\t\t \t# Se la regione tra due esoni consecutivi e' > 2..\n\t\t\t\t\tstart_introni.append(int(end_esoni[i]) + 1)\t\t\t \t# ..(considerando che gli estremi dell'introne sono..\n\t\t\t\t\tend_introni.append(int(start_esoni[i+1]) - 1)\t\t \t \t#..interni a quelli dei due esoni consecutivi correnti)\n\t\t\t\ti += 1\n\t\t\t\n\t\t\tif not strand:\t\t\t\t\t\t\t\t\t\t\t\t \t# Si mantiene traccia del fatto che derivano da un..\n\t\t\t\tstart_introni.reverse()\t\t\t\t\t\t\t\t\t \t# ..TranscriptID con strand negativo..\n\t\t\t\tend_introni.reverse()\t\t\t\t\t\t\t\t\t\t\t# ..(si inverte l'ordine degli introni)\n\t\t\n\t\t\tdictIntroni[TranscriptID] = [start_introni, end_introni]\n\n\n\t# Si eliminano i geni che non presentano regioni introniche:\n\t# \t- dalla lista di tutti i geni si rimuovono quelli che hanno introni;\n\t#\t- dal dizionario si rimuovono quelli rimasti nella lista.\n\t#\n\ttuttiIGeni = geneNames.keys()\n\tfor TranscriptID in dictIntroni:\n\t\tgeneID = dictTranscript[TranscriptID][1]\n\t\tnomeGene = dictGenes[geneID][0]\n\t\t\n\t\tif nomeGene in tuttiIGeni:\n\t\t\ttuttiIGeni.remove(nomeGene)\n\n\n\tfor nomeGene in tuttiIGeni:\n\t\tdel geneNames[nomeGene]\n\t\tprint 'Il gene %s non presenta regioni introniche.' % nomeGene\n\n\n\treturn [dictTranscript, dictGenes, dictEsoni, dictIntroni, dictGeneChr]", "def make_data_raw (mdp,do_makedata,filename):\n #\n fin = open(filename,'r')\n for line in fin:\n lsp = line.split(' ')\n if len(lsp) > 1: # skip empty lines\n if lsp[0] == \"for\": # indicates when to get correlator\n lsp.pop(0)\n update_params(mdp,lsp)\n ## -- do_makedata tells it to go ahead with generating a new data output file\n ## -- otherwise, just saves parameters to metadata\n if do_makedata:\n try:\n # open correlator file\n mdp.corr_file = open(mdp.input_path + '/' + mdp.input_fname,'r')\n except IOError:\n print \"Could not open file \",mdp.input_fname\n continue\n print mdp.input_fname,',',mdp.tag\n if not mdp.flag_out_open:\n try:\n if mdp.flag_overwrite:\n ## -- open save file for read+write\n try:\n mdp.save_file = open(mdp.output_path + '/' + mdp.output_fname,'r+')\n mdp.save_file.seek(0) # go to beginning\n mdp.save_file.truncate() # delete whatever was there before\n except IOError:\n mdp.save_file = open(mdp.output_path + '/' + mdp.output_fname,'w')\n mdp.save_file.close()\n mdp.save_file = open(mdp.output_path + '/' + mdp.output_fname,'r+')\n mdp.flag_out_open = True\n # write first header\n #corr_key=uf.get_str_key(mdp.corr_file,\"correlator_key\",\\\n # int(mdp.corr_num.strip(\"[]\").split(',')[0]))\n #uf.write_header(mdp.save_file,corr_key,mdp.corr_len)\n #uf.write_section(mdp.save_file,mdp.key)\n for num,key in zip(mdp.corr_num,mdp.key):\n corr_key=uf.get_str_key(mdp.corr_file,\"correlator_key\",num)\n uf.write_header(mdp.save_file,corr_key,mdp.corr_len)\n uf.write_section(mdp.save_file,key)\n mdp.flag_overwrite= False\n else:\n mdp.save_file = open(mdp.output_path + '/' + mdp.output_fname,'r+')\n mdp.save_file.seek(0,2) # seek the end of file\n mdp.flag_out_open = True\n # write another header\n #corr_key=uf.get_str_key(mdp.corr_file,\"correlator_key\",\\\n # int(mdp.corr_num.strip(\"[]\").split(',')[0]))\n #uf.write_header(mdp.save_file,corr_key,mdp.corr_len)\n #uf.write_section(mdp.save_file,mdp.key)\n for num,key in zip(mdp.corr_num,mdp.key):\n corr_key=uf.get_str_key(mdp.corr_file,\"correlator_key\",num)\n uf.write_header(mdp.save_file,corr_key,mdp.corr_len)\n uf.write_section(mdp.save_file,key)\n #except (IOError):\n # pass\n except (AttributeError):\n print \"Attempted to open invalid output file\"\n ##endif ! flag_out_open\n save_data(mdp)\n mdp.corr_file.close()\n ##endif do_makedata\n ##else \"for\" not found in control file\n else:\n update_params(mdp,lsp)\n ##endif lsp[0]==for\n ##endif len(lsp) > 1\n try:\n mdp.save_file.close()\n mdp.flag_out_open = False\n except (IOError,AttributeError):\n pass\n fin.close()\n return", "def assign_seqs(file_data,\r\n ids_bcs_added_field,\r\n bc_lens,\r\n all_bcs,\r\n keep_barcode=False,\r\n barcode_type=\"golay_12\",\r\n max_bc_errors=1.5,\r\n start_index=1,\r\n write_unassigned_reads=False,\r\n disable_bc_correction=False,\r\n added_demultiplex_field=None):\r\n\r\n log_data = initialize_log_data(ids_bcs_added_field)\r\n bc_freqs = defaultdict(int)\r\n\r\n seq_counts = 0\r\n enum_val = start_index\r\n corrected_bc_count = [0, 0]\r\n\r\n if file_data['qual_files']:\r\n for curr_fasta, curr_qual in zip(file_data['fasta_files'],\r\n file_data['qual_files']):\r\n for fasta_data, qual_data in izip(parse_fasta(curr_fasta),\r\n MinimalQualParser(curr_qual, full_header=True)):\r\n\r\n seq_counts += 1\r\n fasta_label, fasta_seq = fasta_data\r\n qual_label, qual_seq = qual_data\r\n\r\n bc, corrected_bc, num_errors, added_field =\\\r\n get_demultiplex_data(ids_bcs_added_field,\r\n fasta_label, fasta_seq, bc_lens, all_bcs, barcode_type,\r\n max_bc_errors, disable_bc_correction, added_demultiplex_field)\r\n\r\n bc_freqs[bc] += 1\r\n\r\n sample_id, log_id, bc_corrected_result =\\\r\n get_output_ids(ids_bcs_added_field,\r\n corrected_bc, num_errors, added_field, max_bc_errors,\r\n enum_val)\r\n if bc_corrected_result == 'corrected':\r\n corrected_bc_count[0] += 1\r\n if bc_corrected_result == 'not_corrected':\r\n corrected_bc_count[1] += 1\r\n\r\n label_line = get_label_line(sample_id, fasta_label, bc,\r\n corrected_bc, num_errors)\r\n\r\n if sample_id.startswith(\"Unassigned\") and\\\r\n write_unassigned_reads:\r\n write_fasta_line(file_data['unassigned_seqs_f'],\r\n fasta_seq, label_line, True, len(bc))\r\n write_qual_line(file_data['unassigned_qual_f'],\r\n list(qual_seq), label_line, True, len(bc))\r\n elif not sample_id.startswith(\"Unassigned\"):\r\n write_fasta_line(file_data['demultiplexed_seqs_f'],\r\n fasta_seq, label_line, keep_barcode, len(bc))\r\n write_qual_line(file_data['demultiplexed_qual_f'],\r\n list(qual_seq), label_line, keep_barcode, len(bc))\r\n\r\n if log_id:\r\n log_data[log_id] += 1\r\n\r\n enum_val += 1\r\n\r\n else:\r\n for curr_fasta in file_data['fasta_files']:\r\n for fasta_label, fasta_seq in parse_fasta(curr_fasta):\r\n seq_counts += 1\r\n bc, corrected_bc, num_errors, added_field =\\\r\n get_demultiplex_data(ids_bcs_added_field,\r\n fasta_label, fasta_seq, bc_lens, all_bcs, barcode_type,\r\n max_bc_errors, disable_bc_correction, added_demultiplex_field)\r\n\r\n bc_freqs[bc] += 1\r\n\r\n sample_id, log_id, bc_corrected_result =\\\r\n get_output_ids(ids_bcs_added_field,\r\n corrected_bc, num_errors, added_field, max_bc_errors,\r\n enum_val)\r\n\r\n if bc_corrected_result == 'corrected':\r\n corrected_bc_count[0] += 1\r\n if bc_corrected_result == 'not_corrected':\r\n corrected_bc_count[1] += 1\r\n\r\n label_line = get_label_line(sample_id, fasta_label, bc,\r\n corrected_bc, num_errors)\r\n\r\n if sample_id.startswith(\"Unassigned\") and\\\r\n write_unassigned_reads:\r\n write_fasta_line(file_data['unassigned_seqs_f'],\r\n fasta_seq, label_line, True, len(bc))\r\n elif not sample_id.startswith(\"Unassigned\"):\r\n write_fasta_line(file_data['demultiplexed_seqs_f'],\r\n fasta_seq, label_line, keep_barcode, len(bc))\r\n\r\n if log_id:\r\n log_data[log_id] += 1\r\n\r\n enum_val += 1\r\n\r\n return log_data, bc_freqs, seq_counts, corrected_bc_count", "def format_data_gene(outfile, zipcodes, encode, fitFile=\"win40.csv\", window=40, isGroup=True, place=\"All\", fold=1):\n seed(2012)\n span = window/2 ## define half of the window size\n pars_data = import_pars()\n genome = map2genome()\n output = open(outfile, 'w')\n data_size = 0\n\n probv, probs = 0, 0\n if \"ProbVS\" in encode:\n probv = read_table(fitFile, \"pbv\")\n probs = read_table(fitFile, \"pbs\")\n\n ## headers\n output.write(\"label,gene,pos\")\n if \"SeqIndex\" in encode:\n for j in xrange(-span, span):\n output.write(\",char%s\"%j)\n if \"SeqBinary\" in encode:\n for j in xrange(-span, span):\n output.write(\",A%s,U%s,C%s,G%s\"%(j,j,j,j))\n if \"SeqGC\" in encode:\n output.write(\",GC,AU\")\n if \"SeqDiNu\" in encode:\n for nu1 in ['A','U','C','G']:\n for nu2 in ['A','U','C','G']:\n output.write(\",%s%s\"%(nu1, nu2))\n if \"SeqRatio\" in encode:\n output.write(\",A,U,C,G\")\n if \"PredSS3\" in encode:\n for j in xrange(-span, span):\n output.write(\",SpL%s,SpR%s,SpU%s\"%(j,j,j))\n if \"PredSS2\" in encode:\n for j in xrange(-span, span):\n output.write(\",pP%s,pU%s\"%(j,j))\n if \"PARS\" in encode:\n for j in xrange(-span, span):\n output.write(\",pars%s\"%j)\n if \"PARS2\" in encode:\n for j in xrange(-span, span):\n output.write(\",pars2%s\"%j)\n if \"LogVS\" in encode:\n for j in xrange(-span, span):\n output.write(\",lV%s,lS%s\"%(j,j))\n if \"ProbVS\" in encode:\n for j in xrange(-span, span):\n output.write(\",pV%s,pS%s\"%(j,j))\n output.write(\"\\n\")\n\n for gene, zipcode, region in zipcodes:\n lens = get3length(genome[gene])\n pars_gene = pars_data[gene]\n seq = pars_gene[\"FOLD_SEQ\"]\n ss = pars_gene[\"FOLD_SS\"]\n lr = ss_to_lr(ss)\n prob_l = pars_gene[\"FOLD_PROB_L\"]\n prob_r = pars_gene[\"FOLD_PROB_R\"]\n score = pars_gene[\"PARS\"]\n v1 = pars_gene[\"V1\"]\n s1 = pars_gene[\"S1\"]\n\n pv, ps = 0, 0\n if \"ProbVS\" in encode:\n pv = [float(val) for val in probv[gene]]\n ps = [float(val) for val in probs[gene]]\n\n ## the index of region is begin with 1 and close on both end\n region_begin, region_end = [int(val)+lens[0] for val in region.split('~')]\n print gene, zipcode, region_begin, region_end, len(seq)==sum(lens)\n\n ## generate postive and negative sample index list\n region_list = range(max(span, region_begin-1), min(region_end, len(seq)-span))\n\n if len(region_list) == 0:\n print zipcode, \"is removed, please use smaller window size.\"\n continue\n\n neg_list = range(span, region_begin-1) + range(region_end, len(seq)-span)\n\n if isGroup: ## group by gene\n counter = 0\n for gene1, zipcode1, region1 in zipcodes:\n if gene1 != gene: ## not the same RNA, so go on\n continue\n counter += 1 ## count the number of zipcodes on this RNA\n if zipcode1 == zipcode and counter != 1: ## not the first one\n neg_list = [] ## clear negtive list\n break\n r_b1, r_e1 = [int(val)+lens[0] for val in region1.split('~')]\n for i in range(r_b1-1, r_e1):\n if i in neg_list:\n neg_list.remove(i)\n\n #region_list.extend(random.sample(neg_list, len(region_list)))\n region_list.extend(neg_list)\n\n split_name = gene\n if fold > 1:\n split_name = \"fold_%s\"%(choice(range(fold))+1)\n elif not isGroup:\n split_name = \"%s-%s\"%(gene,zipcode)\n\n for i in region_list:\n if place == \"All\":\n pass\n elif place == \"5UTR\":\n if i >= lens[0]:\n continue\n elif place == \"CDS\":\n if i < lens[0] or i >= lens[0]+lens[1]:\n continue\n elif place == \"3UTR\":\n if i < lens[0] + lens[1]:\n continue\n\n ## region [RL, i, RR); span [WL, i, WR)\n RL = region_begin - 1; RR = region_end\n WL = i - span; WR = i + span\n\n if RL <= i and i <= RR:\n label = 1\n else:\n label = -1\n\n ## begin package\n ele_list = [label, split_name, i+1]\n\n if \"SeqIndex\" in encode:\n for j in xrange(WL, WR):\n ele_list.append(\"ACGU\".find(seq[j]) + 1) ## return index\n if \"SeqBinary\" in encode:\n for j in xrange(WL, WR):\n ele_list.extend([int(seq[j]=='A'), int(seq[j]=='U'),\n int(seq[j]=='C'), int(seq[j]=='G')])\n if \"SeqGC\" in encode:\n ele_list.append((seq.count('G',WL,WR)+seq.count('C',WL,WR))/float(window))\n ele_list.append((seq.count('A',WL,WR)+seq.count('U',WL,WR))/float(window))\n if \"SeqDiNu\" in encode:\n for nu1 in ['A','U','C','G']:\n for nu2 in ['A','U','C','G']:\n ele_list.append(sum([int(seq[i]==nu1 and seq[i+1]==nu2)\n for i in xrange(WL,WR-1)])/float(window-1))\n if \"SeqRatio\" in encode:\n for nu in ['A','U','C','G']:\n ele_list.append(seq.count(nu,WL,WR)/float(window))\n if \"PredSS3\" in encode:\n for j in xrange(WL, WR):\n ele_list.extend([prob_l[j], prob_r[j], (1-prob_l[j]-prob_r[j])])\n if \"PredSS2\" in encode:\n for j in xrange(WL, WR):\n ele_list.extend([int(ss[j]!='.'), int(ss[j]=='.')])\n #ele_list.extend([prob_l[j]+prob_r[j], 1-prob_l[j]-prob_r[j]])\n if \"PARS\" in encode:\n for j in xrange(WL, WR):\n ele_list.append((score[j]+7)/14.0) ## normalize\n if \"PARS2\" in encode:\n for j in xrange(WL, WR):\n ele_list.append((score[j])**2/49.0) ## normalize\n if \"LogVS\" in encode:\n for j in xrange(WL, WR):\n ele_list.extend([math.log(v1[j]+1,2), math.log(s1[j]+1,2)])\n if \"ProbVS\" in encode:\n for j in xrange(WL, WR):\n ele_list.extend([pv[j], ps[j]])\n output.write(\",\".join([str(ele) for ele in ele_list])+\"\\n\")\n data_size += 1\n output.close()\n return data_size", "def anno_gene_stats(anno_gene, loc_file, gene_file, isConvert):\r\n LocationNum = collections.Counter()\r\n LocationGene = collections.defaultdict(list)\r\n\r\n\r\n GeneCatSample = collections.defaultdict(lambda: collections.defaultdict(list))\r\n CatGeneSample = collections.defaultdict(lambda: collections.defaultdict(list))\r\n\r\n allLocations = set()\r\n anno_h = open(anno_gene, \"r\")\r\n for line in anno_h:\r\n lines = line.strip().split(\"\\t\")\r\n sample, location, number, gene = lines[:4]\r\n number = int(number)\r\n\r\n ### whether convert the category to \"Exon\" or \"Intron\"\r\n if isConvert == \"True\":\r\n if location == \"Intron\":\r\n newLoc = \"Intron\"\r\n else:\r\n newLoc = \"Exon\"\r\n elif isConvert == \"False\":\r\n newLoc = location\r\n else:\r\n print(\"Please check whether convert the original category to 'Intron' or 'Exon' based on True of False.\")\r\n sys.exit(1)\r\n\r\n allLocations.add(newLoc)\r\n ### get the dict of gene -> location -> sample\r\n genes = gene.split(\",\")\r\n for g in genes:\r\n GeneCatSample[g][newLoc].append(sample)\r\n\r\n ### get the location -> gene -> sample\r\n CatGeneSample[newLoc][g].append(sample)\r\n anno_h.close()\r\n\r\n\r\n ## output gene and number in samples\r\n ### sort all locations\r\n sortedAllLocation = sorted(list(allLocations))\r\n\r\n gene_h = open(gene_file, \"w\")\r\n\r\n headerSample = [l + \"_samples\" for l in sortedAllLocation]\r\n gene_h.write(\"Gene\\tTotal\\t%s\\t%s\\n\" % (\"\\t\".join(sortedAllLocation), \"\\t\".join(headerSample)))\r\n\r\n GeneRecord = {}\r\n GeneNumber = {}\r\n\r\n allGenes = sorted(list(GeneCatSample.keys()))\r\n for ge in allGenes:\r\n ### get the number and samples for each location of each gene\r\n GeneNum = []\r\n GeneSample = []\r\n\r\n for loc in sortedAllLocation:\r\n if loc in GeneCatSample[ge]:\r\n samples = GeneCatSample[ge][loc]\r\n ##############################\r\n ####### unique for samples\r\n samples = sorted(list(set(samples)))\r\n sampleNum = len(samples)\r\n else:\r\n sampleNum = 0\r\n samples = [\"-\"]\r\n\r\n GeneNum.append(sampleNum)\r\n GeneSample.append(samples)\r\n\r\n GeneNumSum = sum(GeneNum)\r\n CatNumOut = \"\\t\".join([str(g) for g in GeneNum])\r\n CatSampleOut = \"\\t\".join([\",\".join(s) for s in GeneSample])\r\n\r\n record = \"%s\\t%d\\t%s\\t%s\\t\" % (ge, GeneNumSum, CatNumOut, CatSampleOut)\r\n GeneNumber[ge] = GeneNumSum\r\n GeneRecord[ge] = record\r\n \r\n ### output\r\n GeneNumSorted = sort_dict_value(GeneNumber)\r\n for g, n in GeneNumSorted:\r\n r = GeneRecord[g]\r\n gene_h.write(\"%s\\n\" % r)\r\n\r\n gene_h.close() \r\n\r\n\r\n ### location and genes\r\n loc_h = open(loc_file, \"w\")\r\n loc_h.write(\"Location\\tGeneNumber\\tGenes\\tSampleNumber\\tSamples\\n\")\r\n for loc in sortedAllLocation:\r\n geneSample = CatGeneSample[loc]\r\n genes = sorted(list(geneSample.keys()))\r\n geneNum = len(genes)\r\n samNum = 0\r\n samList = []\r\n for ge in geneSample:\r\n sam = geneSample[ge]\r\n samList.append(sam)\r\n samNum += len(sam)\r\n samOut = \";\".join([\",\".join(s) for s in samList])\r\n loc_h.write(\"%s\\t%d\\t%s\\t%d\\t%s\\n\" % (loc, geneNum, \",\".join(genes), samNum, samOut))\r\n loc_h.close()", "def insert_nmers_at(pos, nmer, seq):\n nmer_list = list(itertools.product('ATGC', repeat=nmer))\n\n # combine all nmers at pos together\n out_seqs = (''.join(flatten(\n (itertools.islice(seq, 0, pos), x, itertools.islice(seq, pos, None))\n )) for x in nmer_list)\n\n # name format Ins_(nmer)_(@pos)\n out_names = (\"Ins_{}_{}\".format(''.join(x), pos) for x in nmer_list)\n\n # generate sam tag\n # out_list will automatically place ins at last possible base\n if pos == 0:\n out_sam = \"{}I{}M\".format(nmer, len(seq))\n elif pos >= len(seq):\n out_sam = \"{}M{}I\".format(len(seq), nmer)\n else:\n out_sam = \"{}M{}I{}M\".format(pos, nmer, len(seq) - pos)\n\n # MD Tag is constant\n out_md = \"100\"\n\n # pad reference sequence\n out_ref = ''.join(flatten(\n (itertools.islice(seq, 0, pos), itertools.repeat('_', nmer), itertools.islice(seq, pos, None))))\n\n # combine everything together and repeat elements as needed!\n return(list(zip(\n out_names,\n itertools.repeat(out_sam, len(nmer_list)),\n itertools.repeat(out_md, len(nmer_list)),\n itertools.repeat(out_ref, len(nmer_list)),\n out_seqs)))", "def generatePositivePHASLoci(options,whole_mapped_data,phase,cycle):\n out_filename=options.output_directory_per_run+\"/\"+options.input_filename+\"_\"+str(phase)+\"_\"+str(cycle)+\".positive_phase_loci\"\n fhw=open(out_filename,\"w\")\n for chromosome in sorted(whole_mapped_data):\n filename=options.output_directory_per_run+\"/\"+options.input_filename+\"_\"+str(phase)+\"_\"+str(cycle)+\"_\"+chromosome+\".regionsOfInterest.concentrated\"\n try:\n fhr=open(filename,\"r\")\n except FileNotFoundError:\n continue\n flag_reg=1000\n window_start,window_end=0,0\n for line in fhr:\n \"\"\"pvalue=float(line.strip().split()[-1])\n if pvalue>=options.pvalue_cutoff:continue\"\"\"\n register,start,end=map(int,line.strip().split()[:3])\n if register==flag_reg:\n if window_end>start:\n window_end=end\n else:\n fhw.write(chromosome+\"\\t\"+str(window_start)+\"\\t\"+str(window_end)+\"\\n\")\n window_start=start\n window_end=end\n else:\n if flag_reg!=1000:\n fhw.write(chromosome+\"\\t\"+str(window_start)+\"\\t\"+str(window_end)+\"\\n\")\n window_start=start\n window_end=end\n flag_reg=register\n fhr.close()\n fhw.write(chromosome+\"\\t\"+str(window_start)+\"\\t\"+str(window_end)+\"\\n\")\n fhw.close()", "def main(args):\n fn = open(args.filename,\"r+\")\n for i, line in enumerate(fn, start = 1):\n f = open(\"string_examples_%i.txt\" %i,'w+')\n check = letter_check(line.rstrip())\n if check == 0:\n print('Sequence:', line.rstrip(), ' includes letters other than A,C,T or G, please revise this sequence')\n else:\n panda = create_panda(line.rstrip())\n LingC = calculate_LC(line.rstrip())\n f.write(line)\n f.write(str(LingC))\n f.close()\n panda.to_csv('data%i.csv' %i)", "def compatibility_g_a(gen, anot):\n print(\"Checking compatibility of genome with annotation file\")\n r_code = 0\n for seq in gen:\n if seq not in anot:\n print(\"WARN\\t{} sequence not found in annotaion file\".format(seq))\n r_code = 1\n for seq in anot:\n if seq not in gen:\n print(\"FAIL\\t{} sequence in annotation \"\n \"but not in genome.\".format(seq))\n r_code = 2\n elif anot[seq] > gen[seq]:\n print(\"FAIL\\tannotation interval on {} sequence is out of \"\n \"reference range.\".format(seq))\n r_code = 2\n print()\n return r_code", "def table_key(self, reindex_dict):\n reindexed_marks = []\n for m in self.component1.marks:\n new_m = reindex_dict.get(m)\n if new_m == None:\n if len(reindex_dict) == 0:\n new_m = 0\n else:\n new_m = max(reindex_dict.values())+1\n reindex_dict[m] = new_m\n reindexed_marks.append(new_m)\n return tuple( [self.component1.genus] + sorted(reindexed_marks) )", "def generate_data(input_file):\n \n mol_mass_list = []\n inchi_list = []\n SMILES_list = []\n identifier_list = []\n inchi_key1_list = [] \n inchi_key2_list = [] \n mol_formula_list = []\n NA_list = []\n \n pre_SMILES_list = []\n identifier_list = []\n all_lines = input_file.split('\\n')\n if all_lines[-1] == '':\n all_lines = all_lines[:-1]\n for line in all_lines:\n line = line.split('\\t')\n\n #Convert to mol and remove invalid structures \n smile_string = ''\n id_string = ''\n m = line[0]\n id_name = line[1]\n mol = Chem.MolFromSmiles(m)\n if mol != None:\n smile_string += m\n id_string += id_name\n pre_SMILES_list += [smile_string]\n \n #Source identifiers\n identifier_list += [id_string]\n \n pre_inchi_list = []\n for smile in pre_SMILES_list:\n #Generate mol\n m = Chem.MolFromSmiles(smile)\n #SMILES, canonical\n sm = Chem.MolToSmiles(m)\n SMILES_list += [sm]\n #Monoisotopic mass\n mol_weigth = Descriptors.ExactMolWt(m)\n mol_mass_list += [mol_weigth]\n #Mol Forumula\n mol_formula = rdMolDescriptors.CalcMolFormula(m)\n mol_formula_list += [mol_formula]\n # InChI \n inchi = rdinchi.MolToInchi(m)\n pre_inchi_list += [inchi[0]] \n \n \n # InChIKey1 and InChIKey2\n for inchi in pre_inchi_list:\n if not str(inchi).startswith('InCh'):\n inchi = 'NA'\n inchi_list += [inchi]\n \n pre_inchi_key_list =[]\n for inchi2 in inchi_list: \n if inchi2 == 'NA':\n inchi_key = \"NA-NA\"\n pre_inchi_key_list += [inchi_key]\n if inchi2 != 'NA':\n inchi_key = rdinchi.InchiToInchiKey(inchi2)\n pre_inchi_key_list += [inchi_key]\n \n for inchi_key in pre_inchi_key_list:\n inchi_key = inchi_key.split('-')\n inchi_key2 = inchi_key[1]\n inchi_key2_list += [inchi_key2]\n inchi_key1 = inchi_key[0]\n inchi_key1_list += [inchi_key1]\n\n # NA list \n nr_of_structures = len(SMILES_list)\n NA_list += ['NA'] * nr_of_structures\n\n overall_list = [mol_mass_list]+[inchi_list]+[SMILES_list]+\\\n [identifier_list]+[inchi_key2_list]+[inchi_key1_list]+[mol_formula_list]+\\\n [NA_list]+[NA_list]+[NA_list]+[NA_list]\n \n return overall_list", "def geneSpecificRecord (self, orfList, headList, num):\n sequenceInfo = []\n for gene in orfList: # Finds target gene in each genome\n sequenceInfo.append(gene[num]) # ***any gene can be utilized***\n longestLength = max(len(s) for s in sequenceInfo) # gets longest seq to match length with gap characters\n paddedSequences = [s.ljust(longestLength, '-') for s in sequenceInfo] # Adds gap characters\n \n records = (SeqRecord(Seq(s), id = str(paddedSequences.index(s))) for s in paddedSequences) #creating a SeqRecord\n return(records)", "def create_DESeqRscript_no_replicates(infile=\"/projects/dowellde/groseq/data/set1/clipped_fastqM10/samfiles/sortedbamfiles/lncRNAs/compare_cov_fileless1neg_istead.txt\",column1=11, column2=14, type_transcript=\"lncRNAs\", condition1=\"DMS0\", condition2=\"Nutlin\", title_of_names_column=\"name\", order_flip=\"N\"):\n\n\tf = open(infile)\n\theaders = f.readline()\n\theaders = headers.strip(\"\\n\")\n\theaders = headers.split(\"\\t\")\n\tf.close()\n\tinfile_dir = infile.split(\"/\")[:-1]\n\tinfile_dir = \"/\".join(infile_dir)+\"/\"\n\tinfile_root = infile.split(\"/\")[-1].strip(\".txt\")\n\theadercondition1 = headers[column1-1]#adjust for the fact python starts counting with 0 and R with 1\n\theadercondition2 = headers[column2-1]#adjust for the fact python starts counting with 0 and R with 1\n\tif order_flip==\"N\":\n\t\toutfile = infile_dir+infile_root+\".\"+headercondition1+headercondition2+type_transcript\n\telse:\n\t\toutfile = infile_dir+infile_root+\".\"+headercondition2+headercondition1+type_transcript\n\twrite_file = outfile+\".R\"\n\tprint write_file\n\twf = open(write_file ,\"w\")\n\tR_dump_file = outfile+\".Rout\"\n\tgraph_file = outfile+\".png\"\n\toutfileallinputs = outfile+\".res.txt\"\n\toutfilesig = outfile+\".resSig.txt\"\n\toutfilesig_orderpval = outfile+\".resSig_pvalue.txt\"\n\twf.write('sink(\"'+R_dump_file+'\")\\n')\n\twf.write('library( DESeq )\\n')\n\twf.write('data <- read.delim(\"'+infile+r'\", sep=\"\\t\", header=TRUE)'+\"\\n\")#need to check that \\t comes out like it should. Might write it wrong.\n\twf.write('countsTable <- subset(data, select=c('+str(column1-1)+','+str(column2-1)+'))\\n')\n\twf.write('rownames(countsTable) <- data$'+title_of_names_column+'\\n')\n\twf.write('conds <- c(\"'+condition1+'\", \"'+condition2+'\")\\n')\n\twf.write('cds <- newCountDataSet( countsTable, conds )\\n')\n\twf.write('cds <- estimateSizeFactors( cds )\\n')\n\twf.write('sizeFactors(cds)\\n')\n\twf.write(\"cds <- estimateDispersions( cds, method='blind', sharingMode='fit-only' )\\n\")\n\tif order_flip==\"N\":\n\t\twf.write('res <- nbinomTest( cds, \"'+condition1+'\", \"'+condition2+'\" )\\n')\n\telse:\n\t\twf.write('res <- nbinomTest( cds, \"'+condition2+'\", \"'+condition1+'\" )\\n')\n\twf.write('plotDE <- function( res ) plot(res$baseMean,res$log2FoldChange, log=\"x\", pch=20, cex=.1, col = ifelse( res$padj < .1, \"red\", \"black\" ) )\\n')\n\twf.write(\"png('\"+graph_file+\"')\\n\")\n\twf.write('plotDE( res )\\n')\n\twf.write('dev.off()\\n')\n\twf.write('resSig <- res[ res$padj < .1, ]\\n')\n\twf.write('write.table(res, file = \"'+outfileallinputs+r'\", append = FALSE, sep = \"\\t\")'+\"\\n\")\n\twf.write('write.table(resSig, file = \"'+outfilesig+r'\", append = FALSE, sep = \"\\t\")'+\"\\n\")\n\twf.write('write.table(resSig[ order(resSig$pval), ], file = \"'+outfilesig_orderpval+r'\", append = FALSE, sep = \"\\t\")'+\"\\n\")\n\twf.write('sink()\\n')", "def automark(self):\n\n index_list = self.ui.tableWidget.selectionModel().selectedIndexes()\n rows = []\n for i in index_list:\n rows.append(i.row())\n rows = list(set(rows)) # duplicate rows due to multiple columns\n if len(rows) == 0:\n return\n selected_files = []\n filenames = \"\"\n for r in rows:\n if self.allfiles[r][2] is not None and self.allfiles[r][2] != \"\":\n selected_files.append(self.allfiles[r])\n filenames += self.allfiles[r][1] + \" \"\n ui_se = DialogGetStartAndEndMarks(self.case['name'], filenames)\n ok = ui_se.exec()\n if not ok:\n return\n start_mark = ui_se.get_start_mark()\n end_mark = ui_se.get_end_mark()\n if start_mark == \"\" or end_mark == \"\":\n Message(self.app, _(\"Warning\"), _('Cannot have blank text marks'), \"warning\").exec()\n return\n msg = _(\"Auto assign text to case: \") + self.case['name']\n msg += _(\"\\nUsing \") + start_mark + _(\" and \") + end_mark + _(\"\\nIn files:\\n\")\n msg += filenames\n warning_msg = \"\"\n already_assigned = \"\"\n entries = 0\n cur = self.app.conn.cursor()\n for f in selected_files:\n cur.execute(\"select name, id, fulltext, memo, owner, date from source where id=?\",\n [f[0]])\n currentfile = cur.fetchone()\n text = currentfile[2]\n text_starts = [match.start() for match in re.finditer(re.escape(start_mark), text)]\n text_ends = [match.start() for match in re.finditer(re.escape(end_mark), text)]\n # Add new code linkage items to database\n already_assigned = \"\"\n for start_pos in text_starts:\n text_end_iterator = 0\n try:\n while start_pos >= text_ends[text_end_iterator]:\n text_end_iterator += 1\n except IndexError:\n text_end_iterator = -1\n warning_msg += _(\"Auto assign. Could not find an end mark: \") + f[1] + \" \" + end_mark + \"\\n\"\n if text_end_iterator >= 0:\n pos1 = text_ends[text_end_iterator]\n item = {'caseid': self.case['caseid'], 'fid': f[0],\n 'pos0': start_pos, 'pos1': pos1,\n 'owner': self.app.settings['codername'],\n 'date': datetime.datetime.now().astimezone().strftime(\"%Y-%m-%d %H:%M:%S\"), 'memo': \"\"}\n # Check if already assigned to case_text\n sql = \"select id from case_text where caseid=? and fid=? and pos0=? and pos1=?\"\n cur.execute(sql, [item['caseid'], item['fid'], item['pos0'], item['pos1']])\n res = cur.fetchone()\n if res is None:\n sql = \"insert into case_text (caseid,fid,pos0,pos1,owner,date,memo) values(?,?,?,?,?,?,?)\"\n cur.execute(sql, (item['caseid'], item['fid'], item['pos0'], item['pos1'],\n item['owner'], item['date'], item['memo']))\n entries += 1\n self.app.conn.commit()\n else:\n already_assigned = _(\"\\nAlready assigned.\")\n # Update messages and table widget\n self.get_files()\n self.fill_table()\n # Text file is loaded in browser then update the highlights\n self.load_case_text()\n self.highlight()\n msg += \"\\n\" + str(entries) + _(\" sections found.\")\n Message(self.app, _(\"File added to case\"), msg + \"\\n\" + warning_msg + \"\\n\" + already_assigned).exec()\n self.parent_textEdit.append(msg)\n self.parent_textEdit.append(warning_msg)\n self.app.delete_backup = False", "def find_sequence(filename_pdb, filename_txt):\n with open(filename_pdb, \"r\") as pdb_file, open(filename_txt, \"a\") as seq_file:\n ca_lines = []\n sequence = \"\"\n lines = pdb_file.readlines()\n\n amino_acids = {\"ALA\": \"A\", \"GLY\": \"G\", \"GLU\": \"E\", \"ARG\": \"R\",\n \"TRP\": \"W\", \"TYR\": \"Y\", \"SER\": \"S\", \"ASN\": \"N\",\n \"ASP\": \"D\", \"CYS\": \"C\", \"GLN\": \"Q\", \"HIS\": \"H\",\n \"ILE\": \"I\", \"LEU\": \"L\", \"LYS\": \"K\", \"MET\": \"M\",\n \"PHE\": \"F\", \"PRO\": \"P\", \"THR\": \"T\", \"VAL\": \"V\"}\n\n for line in lines:\n if line[12:16].strip() == \"CA\":\n ca_lines.append(line)\n sequence = sequence + amino_acids[line[17:20]]\n\n new_sequence = \"\"\n for aa in sequence:\n new_sequence += aa\n if len(new_sequence.replace(\"\\n\", \"\")) % 70 == 0:\n new_sequence += \"\\n\"\n\n\n seq_file.write(f\">{filename_pdb[11:18]}\\n\")\n seq_file.write(new_sequence)\n seq_file.write(\"\\n\")", "def compose_g_carpa(\n in_carpa_path: str,\n temp_carpa_path: str,\n words_mapping: MappingType,\n carpa_path: str,\n log_file: TextIO,\n):\n bos_symbol = words_mapping[\"<s>\"]\n eos_symbol = words_mapping[\"</s>\"]\n unk_symbol = words_mapping[\"<unk>\"]\n with open(in_carpa_path, \"r\", encoding=\"utf8\") as f, open(\n temp_carpa_path, \"w\", encoding=\"utf8\"\n ) as outf:\n current_order = -1\n num_oov_lines = 0\n for line in f:\n line = line.strip()\n col = line.split()\n if current_order == -1 and not re.match(r\"^\\\\data\\\\$\", line):\n continue\n if re.match(r\"^\\\\data\\\\$\", line):\n log_file.write(r\"Processing data...\\n\")\n current_order = 0\n outf.write(line + \"\\n\")\n elif re.match(r\"^\\\\[0-9]*-grams:$\", line):\n current_order = int(re.sub(r\"\\\\([0-9]*)-grams:$\", r\"\\1\", line))\n log_file.write(f\"Processing {current_order} grams...\\n\")\n outf.write(line + \"\\n\")\n elif re.match(r\"^\\\\end\\\\$\", line):\n outf.write(line + \"\\n\")\n elif not line:\n if current_order >= 1:\n outf.write(\"\\n\")\n else:\n if current_order == 0:\n outf.write(line + \"\\n\")\n else:\n if len(col) > 2 + current_order or len(col) < 1 + current_order:\n raise Exception(f'Bad line in arpa lm \"{line}\"')\n prob = col.pop(0)\n is_oov = False\n for i in range(current_order):\n try:\n col[i] = str(words_mapping[col[i]])\n except KeyError:\n is_oov = True\n num_oov_lines += 1\n break\n if not is_oov:\n rest_of_line = \" \".join(col)\n outf.write(f\"{prob}\\t{rest_of_line}\\n\")\n carpa_proc = subprocess.Popen(\n [\n thirdparty_binary(\"arpa-to-const-arpa\"),\n f\"--bos-symbol={bos_symbol}\",\n f\"--eos-symbol={eos_symbol}\",\n f\"--unk-symbol={unk_symbol}\",\n temp_carpa_path,\n carpa_path,\n ],\n stdin=subprocess.PIPE,\n stderr=log_file,\n stdout=log_file,\n env=os.environ,\n )\n carpa_proc.communicate()\n os.remove(temp_carpa_path)", "def main():\n\n args = get_args()\n seq = args.seq.upper()\n codon_to_aa = {\n 'AAA': 'K',\n 'AAC': 'N',\n 'AAG': 'K',\n 'AAU': 'N',\n 'ACA': 'T',\n 'ACC': 'T',\n 'ACG': 'T',\n 'ACU': 'T',\n 'AGA': 'R',\n 'AGC': 'S',\n 'AGG': 'R',\n 'AGU': 'S',\n 'AUA': 'I',\n 'AUC': 'I',\n 'AUG': 'M',\n 'AUU': 'I',\n 'CAA': 'Q',\n 'CAC': 'H',\n 'CAG': 'Q',\n 'CAU': 'H',\n 'CCA': 'P',\n 'CCC': 'P',\n 'CCG': 'P',\n 'CCU': 'P',\n 'CGA': 'R',\n 'CGC': 'R',\n 'CGG': 'R',\n 'CGU': 'R',\n 'CUA': 'L',\n 'CUC': 'L',\n 'CUG': 'L',\n 'CUU': 'L',\n 'GAA': 'E',\n 'GAC': 'D',\n 'GAG': 'E',\n 'GAU': 'D',\n 'GCA': 'A',\n 'GCC': 'A',\n 'GCG': 'A',\n 'GCU': 'A',\n 'GGA': 'G',\n 'GGC': 'G',\n 'GGG': 'G',\n 'GGU': 'G',\n 'GUA': 'V',\n 'GUC': 'V',\n 'GUG': 'V',\n 'GUU': 'V',\n 'UAA': 'Stop',\n 'UAC': 'Y',\n 'UAG': 'Stop',\n 'UAU': 'Y',\n 'UCA': 'S',\n 'UCC': 'S',\n 'UCG': 'S',\n 'UCU': 'S',\n 'UGA': 'Stop',\n 'UGC': 'C',\n 'UGG': 'W',\n 'UGU': 'C',\n 'UUA': 'L',\n 'UUC': 'F',\n 'UUG': 'L',\n 'UUU': 'F',\n }\n\n k = 3\n\n # 1: for loop\n # protein = ''\n # for codon in [seq[i:i + k] for i in range(0, len(seq), k)]:\n # aa = codon_to_aa.get(codon, '-')\n # if aa == 'Stop':\n # break\n # protein += aa\n\n # 2: list comprehension, slice to remove Stop\n # codons = [seq[i:i + k] for i in range(0, len(seq), k)]\n # aa = [codon_to_aa.get(codon, '-') for codon in codons]\n # if 'Stop' in aa:\n # aa = aa[:aa.index('Stop')]\n # print(''.join(aa))\n\n # 3: L.C. -> map(), slice -> takewhile\n # codons = map(lambda i: seq[i:i + k], range(0, len(seq), k))\n # aa = map(lambda codon: codon_to_aa.get(codon, '-'), codons)\n # print(''.join(takewhile(lambda c: c != 'Stop', aa)))\n\n # 4: combine map()\n # aa = map(lambda c: codon_to_aa.get(c, '-'),\n # map(lambda i: seq[i:i + k], range(0, len(seq), k)))\n # print(''.join(takewhile(lambda c: c != 'Stop', aa)))\n\n # 5: combine all\n # print(''.join(\n # takewhile(\n # lambda c: c != 'Stop',\n # map(lambda c: codon_to_aa.get(c, '-'),\n # map(lambda i: seq[i:i + k], range(0, len(seq), k))))))\n\n # 6: Seq\n print(str(Seq(args.seq).translate()).replace('*', ''))", "def _add_transform_genes(self):\n pass", "def convert_bismark_add_strand_and_seq(indf, outfn):\n logger.debug(f'Start add strand and seq to bismark cov file, total len={len(indf)}')\n\n outf = gzip.open(outfn, 'wt')\n\n for index, row in tqdm(indf.iterrows(), total=len(indf), desc='Bismark_cov'):\n # if report_num and index % report_num == 0:\n # logger.debug(f'processed index={index}')\n chr = row['chr']\n start = int(row['start']) # Keep raw 1-based format of bismark results\n ret = get_dna_base_from_reference(chr, start - 1, ref_fasta=ref_fasta)\n if ret[5] == 'C': # strand is +\n strand = '+'\n elif ret[5] == 'G':\n strand = '-'\n else:\n raise Exception(f'We can not identify this bg-truth file with non-CG results, such as row={row}')\n\n outstr = '\\t'.join([chr, str(start), strand, str(row['mcount']), str(row['ccount']), ret[4:7]])\n outf.write(f'{outstr}\\n')\n outf.close()\n logger.info(f'save to {outfn}')\n\n logger.debug(f'Finish add strand info task')", "def _gto_from_ccdata(self):\n\n gbasis = self.ccdata.gbasis\n lines = []\n\n for no, basis in enumerate(gbasis):\n lines.append(f\"{no + 1:3d} 0\")\n for prims in basis:\n lines.append(f\"{prims[0].lower():s} {len(prims[1]):5d} 1.00\")\n for prim in prims[1]:\n lines.append(f\"{prim[0]:15.9e} {prim[1]:15.9e}\")\n lines.append('')\n lines.append('')\n return lines", "def parse(self):\n with open(self.ofilename_pos, \"w\") as ofile_pos,\\\n open(self.ofilename_neg, \"w\") as ofile_neg:\n \n for (count, (code, header, sequence)) in enumerate(\n self.next_sequence()):\n # pick the right output file\n if code in self.codes: ofile = ofile_pos\n else: ofile = ofile_neg\n ofile.write(f\"{header}\\n{sequence}\\n\")", "def split_decode_file():\n # split files by chromosome\n header = []\n current_chrom = 'chr1'\n # file_template = decode_folder + '/{}.deCODE_2019.GRCh38.txt'\n file_template = decode_folder + '/{}.deCODE_2019_hg19.txt'\n decode_file = decode_folder + '/aau1043_DataS3_hg19_liftOver.bed'\n w = open(file_template.format(current_chrom), 'a')\n print('NOTE: appending to map files, not overwriting. may cause duplicates')\n with open(decode_file, 'r') as f:\n for line in f:\n # save the header info\n if line.startswith('#'):\n header.append(line)\n # save the column labels\n elif line.startswith('Chr'):\n header.append('# ' + line)\n # write header to first file now\n w.write(''.join(header))\n # the remaining lines are data\n else:\n # get the chromosome for the current line\n ch = line.split()[0]\n # if the chromosome matches the open file, write to it\n if ch == current_chrom:\n w.write(line)\n # if a new chromosome arises, switch to a new writefile\n else:\n w.close()\n current_chrom = ch\n w = open(file_template.format(current_chrom), 'a')\n # write header to file\n w.write(''.join(header))\n w.write(line)\n\n # close the last open file\n w.close()", "def convert_matrix(infile, names,refdict,nosamples):\n \n if infile.endswith(\".gz\"):\n inf = gzip.open(infile, \"rb\")\n \n else:\n inf = open(infile, \"r\")\n for line in inf:\n line = line.rsplit()\n if line[0] == \"chromosome\":\n pass # header\n else:\n \n\n chrom = line[0]\n start = line[1]\n stop = line[2]\n TE = line[4]\n n_te = str(len(TE.split(\",\")))\n tes=TE.split(\",\")\n tefam=[]\n tesuperfamily=[]\n \n \n for i in xrange(len(tes)):\n \n tefam.append(refdict[tes[i]][0])\n \n tesuperfamily.append(refdict[tes[i]][1])\n \n \n superfamily=list(set(tesuperfamily))\n if 'Unknown' in superfamily:\n superfamily.remove('Unknown')\n if not superfamily:\n superfamily.append('Unknown')\n \n pos = line[5].split(\",\")\n neg = line[6].split(\",\")\n#missing = 305-(len(pos)+len(neg))/305\n te_id = \"\\t\".join([chrom, start, stop])\n status = get_status(pos, neg, names)\n column_ordered = []\n for i in names:\n column_ordered.append(status[i])\n noNA = filter(lambda x: x != \"NA\", status.values()) \n noNA = map(int, noNA)\n pos_count = sum(noNA)\n l = len(noNA)\n neg_count = l - pos_count\n TE_present=pos_count\n TE_absent=neg_count\n if(pos_count < neg_count):\n Minor_allele=\"presence\"\n\n else:\n Minor_allele=\"absence\"\n#print Minor_allele\n q20=int(0.2*nosamples)\n q80=int(0.8*nosamples)\n if (TE_absent < q20):\n Absence_classification=\"True deletion\"\n elif (TE_absent > q80):\n Absence_classification=\"No insertion\"\n else:\n Absence_classification=\"NA\"\n original_call_deletion = 'T'\n MAF=float(min(TE_present, TE_absent))/nosamples\n #print int(min(TE_present, TE_absent)) ,MAF\n if(MAF < 0.025):\n Frequency_classification = \"Rare\"\n else:Frequency_classification =\"Common\"\n print(te_id + \"\\t\" + TE + \"\\t\" + \",\".join(tefam) + \"\\t\" +\",\".join(superfamily) + \"\\t\" +n_te + \"\\t\" + str(pos_count) + \"\\t\" + str(neg_count) + \"\\t\" +str(Minor_allele) + \"\\t\" +original_call_deletion + \"\\t\" +str(Absence_classification) + \"\\t\" +str(MAF) + \"\\t\" +str(Frequency_classification) + \"\\t\"+\"\\t\".join(column_ordered))\n inf.close()", "def encode(self, seq):", "def generate_inv_index(people):\n pass", "def parse_sam(in_file, out_file, read_type , strand):\n out_handle = open(out_file , 'a')\n if strand == 'watson':\n nt = ['C']\n else:\n nt = ['G']\n count = 0\n # print 'Warning, only works for forward mapped reads'\n mismatch = 0\n clip_count_total = 0\n for line in open(in_file, 'r'):\n modulo_line_no = count % 2\n #alternates between 0 and 1\n if line.startswith('@'):\n continue\n split_line = line.rstrip('\\n').split('\\t')\n #skip read pairs with improper flags.\n #TODO: do this filtering in mark_PCR_duplicates or elsewhere with access to pysam.\n if split_line[1] not in ['0', '99', '147']:\n mismatch += 1\n count += 1\n # continue\n char_count = ''\n clip_count = 0\n for char in split_line[5]:\n if not char.isalpha():\n char_count += char\n elif char == 'S':\n clip_count += int(char_count)\n else:\n char_count = ''\n if clip_count > 6:\n clip_count_total += 1\n count += 1\n # continue\n header = split_line[0].split('|')\n #meth_post list can be present for both R1 and R2 the last Samtools tag added should be the RN:Z: tag, look\n #to the right of this tag only\n meth_pos_list = split_line[0][split_line[0].rindex(':Z:'):].split('|')[1:]\n out_line = [header[0]]\n out_line += split_line[1:9]\n seq = list(split_line[9])\n try:\n meth_pos = [int(n) for n in meth_pos_list[-modulo_line_no].split(',')]\n for n in meth_pos:\n if n >= len(seq):\n break\n if seq[n] not in ['T','A']:\n break\n seq[n] = nt[-modulo_line_no]\n except ValueError:\n pass\n out_line += [''.join(seq)]\n out_line += split_line[10:]\n for item in header[1:]:\n if ':' in item and item not in out_line:\n out_line.append(item)\n # out_line += header[3:6]\n out_handle.write('\\t'.join(out_line) + '\\n')\n count += 1\n print('%s mismatches out of %s' % (mismatch, count))\n print('%s reads out of %s soft clipped more than 5' % (clip_count_total, count))", "def save_data (mdp):\n for num,key in zip(mdp.corr_num,mdp.key):\n lnum = find_corr(mdp,int(num)) # get the line number of the correlator, if possible\n if lnum > -1:\n cdat = extract_data(mdp,lnum) # found the correlator, save to array\n try: # write it to file\n ## -- organizing is too slow, just write to end of file\n mdp.save_file.seek(0,2) # seek the end of file\n mdp.save_file.write( key + ' ' + \\\n ' '.join('{:e}'.format(cdat[x]) for x in range(0,mdp.corr_len))+'\\n')\n #lsec = uf.find_data_section(mdp.save_file,key)\n #mdp.save_file.write( key + ' ' + ' '.\\\n #uf.ins_line(mdp.save_file, key + ' ' + ' '.\\\n # join('{:e}'.format(cdat[x]) for x in range(0,mdp.corr_len))\\\n # , lsec[1]+1\\\n # )\n #write_fname(mdp,lsec[0])\n except IndexError:\n print \"-- In file\",mdp.corr_file.name\n print \"Could not extract data from file\"\n else:\n print \"-- In file\",mdp.corr_file.name\n print \"Failed to find correlator #\",num", "def writeHeader( self ):\n for k in self.secondaryTargets.keys():\n fileName = self.treyGene[k] + \"-GenesinCommon.txt\" \n with open( fileName, 'w' ) as out:\n out.write(\"%s\\t%s\\t%s\\n\" %(\"Gene_trey\", \"Gene\", \"Gene_inCommon\" ))\n out.close()", "def merge_evio_skims(run, seqno, slices):\n inset = {\"BCAL-LED\": \"hd_rawdata_{0:06d}_{1:03d}+{2},{3}.BCAL-LED.evio\",\n \"DIRC-LED\": \"hd_rawdata_{0:06d}_{1:03d}+{2},{3}.DIRC-LED.evio\",\n \"FCAL-LED\": \"hd_rawdata_{0:06d}_{1:03d}+{2},{3}.FCAL-LED.evio\",\n \"CCAL-LED\": \"hd_rawdata_{0:06d}_{1:03d}+{2},{3}.CCAL-LED.evio\",\n \"random\": \"hd_rawdata_{0:06d}_{1:03d}+{2},{3}.random.evio\",\n \"omega\": \"hd_rawdata_{0:06d}_{1:03d}+{2},{3}.omega.evio\",\n \"sync\": \"hd_rawdata_{0:06d}_{1:03d}+{2},{3}.sync.evio\",\n \"ps\": \"hd_rawdata_{0:06d}_{1:03d}+{2},{3}.ps.evio\",\n }\n outset = {\"BCAL-LED\": \"BCAL-LED_{0:06d}_{1:03d}.evio\",\n \"DIRC-LED\": \"DIRC-LED_{0:06d}_{1:03d}.evio\",\n \"FCAL-LED\": \"FCAL-LED_{0:06d}_{1:03d}.evio\",\n \"CCAL-LED\": \"CCAL-LED_{0:06d}_{1:03d}.evio\",\n \"random\": \"random_{0:06d}_{1:03d}.evio\",\n \"omega\": \"omega_{0:06d}_{1:03d}.evio\",\n \"sync\": \"sync_{0:06d}_{1:03d}.evio\",\n \"ps\": \"ps_{0:06d}_{1:03d}.evio\",\n }\n badslices = []\n slicepatt = re.compile(r\"([1-9][0-9]*),([1-9][0-9]*)/\")\n for iset in inset:\n ofile = outset[iset].format(run, seqno)\n ifiles = []\n for sl in slices:\n ifile = \"{0},{1}/\".format(sl[0], sl[1]) +\\\n inset[iset].format(run, seqno, sl[0], sl[1])\n if iset == \"sync\" and not os.path.exists(ifile):\n print(\"Warning in merge_evio_skims - \",\n \"missing sync event skim \",\n \"in slice {0},{1}\".format(sl[0], sl[1])\n )\n continue\n elif iset == \"omega\" and not os.path.exists(ifile):\n print(\"Warning in merge_evio_skims - \",\n \"missing omega event skim \",\n \"in slice {0},{1}\".format(sl[0], sl[1])\n )\n continue\n ifiles.append(ifile)\n cmd = subprocess.Popen([\"eviocat\", \"-o\", ofile] + ifiles,\n stderr=subprocess.PIPE)\n elog = cmd.communicate()\n if cmd.returncode != 0:\n for eline in elog[1].decode(\"ascii\").split('\\n'):\n badslice = slicepatt.search(eline)\n if badslice:\n badslices.append(\"{0},{1}\".format(badslice.group(1),\n badslice.group(2)))\n sys.stderr.write(eline + '\\n')\n sys.stderr.write(\"Error on output file {0}\".format(ofile) +\n \" - evio file merging failed!\\n\")\n sys.stderr.flush()\n continue\n odir = output_area + \"/\" + iset + \"/{0:06d}\".format(run)\n upload(ofile, odir)\n return badslices", "def paired_interval_extend(uniq_fragment,fragment_cov,gtf_dic):\n out_dic = {}\n total_reads = 0\n for key in uniq_fragment.keys():\n chr_no = key[0]\n #print (frag_start,frag_end)\n frag_strand = key[3]\n interval_comp = uniq_fragment[key][0]\n complete_info = uniq_fragment[key][1]\n frag_cov = fragment_cov[key]\n total_reads += frag_cov\n geneNA = 'NA'\n geneType = 'NA'\n geneRegion = 'NA'\n flag = 0\n for trans in gtf_dic[(chr_no,frag_strand)]:\n frag_start,frag_end = key[1:3]\n # for trans in gtf_dic[('chr1','-')]:\n # if chr_no == 'chr1' and frag_strand == '-':\n if frag_start > trans[0] and frag_end < trans[1]:\n #print 'Hello!'\n # print (trans)\n geneNA = trans[4]\n geneType = trans[5]\n if geneType == 'protein_coding':\n CDS_start,CDS_end = trans[2:4]\n if frag_start >= CDS_start and frag_end <= CDS_end:\n geneRegion = 'CDS'\n elif frag_strand == '+':\n if frag_end <= CDS_start:\n geneRegion = '5UTR'\n elif frag_start < CDS_start and frag_end > CDS_start:\n geneRegion = '5UTR-CDS'\n elif frag_start < CDS_end and frag_end > CDS_end:\n geneRegion = 'CDS-3UTR'\n elif frag_start >= CDS_end:\n geneRegion = '3UTR'\n elif frag_strand == '-':\n if frag_end <= CDS_start:\n geneRegion = '3UTR'\n elif frag_start < CDS_start and frag_end > CDS_start:\n geneRegion = 'CDS-3UTR'\n elif frag_start < CDS_end and frag_end > CDS_end:\n geneRegion = '5UTR-CDS'\n elif frag_start >= CDS_end:\n geneRegion = '5UTR'\n else:\n geneRegion = 'Null'\n # print (frag_start,frag_end,CDS_start,CDS_end,geneNA,geneRegion)\n#------------------------------------------------------------------------------ intersect of fragments interval and exons interval\n frag_intersect = interval_comp & trans[-1]\n interval_comp_length = sum([interval_comp[a].upper- interval_comp[a].lower for a in range(0,len(interval_comp))])\n # print (interval_comp)\n # print (frag_intersect)\n#------------------------------------------------------------------------------ fragments located in introns\n if frag_intersect == P.empty(): \n flag = 1\n start_out = []\n length_out = []\n for interval_region in list(interval_comp):\n start_out.append(str(int(interval_region.lower - frag_start)))\n length_out.append(str(int(interval_region.upper - interval_region.lower)))\n out_dic.setdefault((chr_no,frag_start,frag_end,frag_strand),[]).append((chr_no,str(frag_start),str(frag_end),\\\n geneNA,geneType,frag_strand,\\\n str(frag_start),str(frag_end),'intron',str(len(start_out)),\\\n ','.join(length_out),','.join(start_out),str(frag_cov),flag,complete_info))\n else:\n if complete_info == 'complete':\n flag = 3\n #print interval_comp\n#------------------------------------------------------------------------------ reduce alignment noise\n frag_intersect_length = sum([frag_intersect[a].upper-frag_intersect[a].lower for a in range(0,len(frag_intersect))])\n absolute_diff = abs(frag_intersect_length-interval_comp_length)\n if absolute_diff == 0:\n#------------------------------------------------------------------------------ \n start_region = []\n length_region = []\n for region in frag_intersect:\n start_region.append(str(int(region.lower - frag_start)))\n length_region.append(str(int(region.upper - region.lower)))\n out_dic.setdefault((chr_no,frag_start,frag_end,frag_strand),[]).append((chr_no,str(frag_start),str(frag_end),\\\n geneNA,geneType,frag_strand,\\\n str(frag_start),str(frag_end),geneRegion,str(len(start_region)),\\\n ','.join(length_region),','.join(start_region),str(frag_cov),flag,complete_info))\n else:\n start_region = []\n length_region = []\n for region in interval_comp:\n start_region.append(str(int(region.lower - frag_start)))\n length_region.append(str(int(region.upper - region.lower)))\n out_dic.setdefault((chr_no,frag_start,frag_end,frag_strand),[]).append((chr_no,str(frag_start),str(frag_end),geneNA,geneType,\\\n frag_strand,str(frag_start),str(frag_end),'intron-containing',str(len(start_region)),\\\n ','.join(length_region),','.join(start_region),str(frag_cov),flag,complete_info))\n else:\n #print interval_comp\n #print frag_intersect\n#------------------------------------------------------------------------------ fragments boundaries located in exons\n #print frag_intersect[0][0],frag_start,frag_intersect[-1][1],frag_end\n #print abs_position\n # print (P.closedopen(frag_start,frag_end),trans[-1])\n interval_update = P.closedopen(frag_start,frag_end) & trans[-1]\n # print (interval_update)\n frag_trans_length = sum([interval_update[a].upper-interval_update[a].lower for a in range(0,len(interval_update))])\n absolute_diff = abs(frag_trans_length-interval_comp_length)\n #print absolute_diff\n #print geneRegion\n #print interval_comp\n #print abs_position\n if absolute_diff <= 300: #insert sequence length <=200nt\n #print frag_trans_length,interval_comp_length\n #print geneRegion\n flag = 2\n start_out = []\n length_out = []\n for interval_region in list(interval_update):\n start_out.append(str(int(interval_region.lower - frag_start)))\n length_out.append(str(int(interval_region.upper - interval_region.lower)))\n out_dic.setdefault((chr_no,frag_start,frag_end,frag_strand),[]).append((chr_no,str(frag_start),str(frag_end),\\\n geneNA,geneType,frag_strand,\\\n str(frag_start),str(frag_end),geneRegion,str(len(start_out)),\\\n ','.join(length_out),','.join(start_out),str(frag_cov),flag,complete_info))\n else:\n # print (trans)\n flag = 1\n start_out = []\n length_out = []\n for interval_region in list(interval_comp):\n start_out.append(str(int(interval_region.lower - frag_start)))\n length_out.append(str(int(interval_region.upper - interval_region.lower)))\n out_dic.setdefault((chr_no,frag_start,frag_end,frag_strand),[]).append((chr_no,str(frag_start),str(frag_end),\\\n geneNA,geneType,frag_strand,\\\n str(frag_start),str(frag_end),'intron-containing',str(len(start_out)),\\\n ','.join(length_out),','.join(start_out),str(frag_cov),flag,complete_info))\n if flag == 0:\n start_out = []\n length_out = []\n for interval_region in list(interval_comp):\n start_out.append(str(int(interval_region.lower - frag_start)))\n length_out.append(str(int(interval_region.upper - interval_region.lower)))\n out_dic[(chr_no,frag_start,frag_end,frag_strand)] = [(chr_no,str(frag_start),str(frag_end),'intergenic','intergenic',frag_strand,\\\n str(frag_start),str(frag_end),geneRegion,str(len(start_out)),\\\n ','.join(length_out),','.join(start_out),str(frag_cov),flag,complete_info)]\n print ('Total treated fragments: ' + str(total_reads))\n return out_dic", "def readMappedData(options,phase):\n whole_mapped_data={}\n mapped_data_per_size_per_register={}\n alignment_filename=options.output_directory+\"/\"+options.input_filename+\"_bowtie1.bwt\"\n fhr=open(alignment_filename,\"r\")\n for line in fhr:\n try:\n read_id, strand, chromosome, coordinate, sequence, quality, mapped_times = line.strip().split()\n except ValueError:\n print(line)\n continue\n try:\n coordinate=int(coordinate)\n mapped_times=int(mapped_times)+1\n length=len(sequence)\n except ValueError:\n print(line)\n continue\n if strand==\"-\":\n coordinate+=2\n if chromosome not in whole_mapped_data:\n whole_mapped_data[chromosome]={}\n if coordinate not in whole_mapped_data[chromosome]: \n whole_mapped_data[chromosome][coordinate]=0\n whole_mapped_data[chromosome][coordinate]+=1\n \n if phase!=length:\n continue\n if chromosome not in mapped_data_per_size_per_register:\n mapped_data_per_size_per_register[chromosome]={}\n register=coordinate % length\n if register not in mapped_data_per_size_per_register[chromosome]:\n mapped_data_per_size_per_register[chromosome][register]={}\n if coordinate not in mapped_data_per_size_per_register[chromosome][register]:\n mapped_data_per_size_per_register[chromosome][register][coordinate]=0\n mapped_data_per_size_per_register[chromosome][register][coordinate]+=1\n if mapped_data_per_size_per_register[chromosome][register][coordinate]>2:\n print(\"Trouble with alignments\",length,chromosome,register,coordinate)\n \n return whole_mapped_data,mapped_data_per_size_per_register", "def _read_bu_wig(self, handle, sequence_name):\n self.chromosomes[sequence_name] = () \n for line in handle:\n position, total, reverse, forward = line.rstrip(\"\\r\\n\").split()\n self.coverage[(sequence_name, position)] = (int(reverse), int(forward))", "def print_results(\n ID, IndelSNPSeq, OGSeq\n ):\n \n # make sure sequences are divisible by three\n if len(OGSeq) % 3 != 0:\n # determine the number of Ns (remainder) to add to create a codon length gene\n remainder=3-(len(OGSeq)%3)\n # add Ns to end of Seq\n OGSeq=OGSeq+(remainder*'N')\n if len(IndelSNPSeq) % 3 != 0:\n # determine the number of Ns (remainder) to add to create a codon length gene\n remainder=3-(len(IndelSNPSeq)%3)\n # add Ns to end of Seq\n IndelSNPSeq=IndelSNPSeq+(remainder*'N')\n\n # make OGSeq and IndelSNPSeq the same length\n OGlen = len(OGSeq)\n IndelSNPSeqLen = len(IndelSNPSeq)\n if OGlen > IndelSNPSeqLen:\n difference=OGlen-IndelSNPSeqLen\n IndelSNPSeq=IndelSNPSeq+(difference*'N')\n elif OGlen < IndelSNPSeqLen:\n difference=IndelSNPSeqLen-OGlen\n OGSeq=OGSeq+(difference*'N')\n\n # reassign seqs with the amino acid sequence\n OGSeq = OGSeq.translate()\n IndelSNPSeq = IndelSNPSeq.translate()\n \n # result lists\n res_arr = []\n res_arr.append([\"Pos\", \"Old\", \"New\"])\n # summarize differences between the two AA strings\n for AA in range(0, (int(len(OGSeq))), 1):\n SeqAA = OGSeq[AA:AA+1]\n IndelSNPSeqAA = IndelSNPSeq[AA:AA+1]\n #if strings are not equal\n if SeqAA != IndelSNPSeqAA:\n temp_arr = []\n temp_arr.append(AA+1)\n temp_arr.append(SeqAA[0])\n temp_arr.append(IndelSNPSeqAA[0])\n res_arr.append(temp_arr)\n\n # check if res_arr has no new additions and exit if so\n if res_arr==[[\"Pos\", \"Old\", \"New\"]]:\n sys.exit()\n\n # convert to numpy array\n res_arr = np.asarray(res_arr)\n\n # print old and new sequence\n print(\">\"+ID+\"|old\"+\"\\n\"+OGSeq)\n print(\">\"+ID+\"|new\"+\"\\n\"+IndelSNPSeq+\"\\n\")\n\n # print np array line by line tab delimited and show differences\n # between the old and new sequence\n for c0, c1, c2 in res_arr:\n print(\"{}\\t{}\\t{}\".format(c0, c1, c2))\n\n # determine percentage of sequence that differences from original sequence\n percentage=float((len(OGSeq)/(len(res_arr)-1))*100)\n print(\"\\n\"+str(len(res_arr)-1)+\"/\"+str(len(OGSeq)), \\\n \"\\nnucleotide differences between old and new sequence\")", "def parse_geno_file(folder,return_flag):\n\n perc_alt = defaultdict(list)\n perc_ref = defaultdict(list)\n abs_alt = defaultdict(list)\n abs_ref = defaultdict(list)\n\n perc_alt_inv = defaultdict(dict)\n perc_ref_inv = defaultdict(dict)\n abs_alt_inv = defaultdict(dict)\n abs_ref_inv = defaultdict(dict)\n\n for geno_file in glob.glob(folder+'*_test_summary.tsv'):\n strain = geno_file.split('/')[-1].split('_')[0]\n #print strain\n prev_coordinate = \"0\"\n count = 0\n alt_allele = {}\n amb_allele = {}\n ref_allele = {}\n flag = 0 \n\n TEMP_HANDLE = open(geno_file,'r')\n for line in TEMP_HANDLE:\n line = line.rstrip('\\n')\n\n if(line[0]!='v'): ## Skip the header\n coordinate = line.split('\\t')[0].split('::')[-1]\n if(coordinate != prev_coordinate):\n #prev_coordinate = coordinate\n count = count + 1\n if(count == 1):\n if(line.split('\\t')[-3]!='alt'): ## No reads supporting the alternate allele\n flag = 1 \n alt_allele[coordinate] = 0\n amb_allele[coordinate] = int(line.split('\\t')[-1])\n #print line\n else:\n alt_allele[coordinate] = int(line.split('\\t')[-1])\n if(count == 2):\n amb_allele[coordinate] = int(line.split('\\t')[-1])\n if(count == 3):\n if(line.split('\\t')[-3]!='ref'): ## No reads supporting the reference allele (all are ambiguous)\n ref_allele[coordinate] = 0\n else:\n ref_allele[coordinate] = int(line.split('\\t')[-1])\n prev_coordinate = coordinate\n count = 0\n if(flag == 1): ## The case where there are no alternate allele reads, counter is incremented to account for changed numbering\n count = count + 1 \n flag = 0 \n\n \n for key in alt_allele:\n if(alt_allele[key]+ref_allele[key]!= 0): ## Check to see if the denominator is not zero\n abs_alt[strain].append(float(alt_allele[key]))\n abs_ref[strain].append(float(ref_allele[key]))\n perc_alt[strain].append(float(alt_allele[key])/(alt_allele[key]+ref_allele[key]))\n perc_ref[strain].append(float(ref_allele[key])/(alt_allele[key]+ref_allele[key]))\n\n\n abs_alt_inv[strain][key] = float(alt_allele[key])\n abs_ref_inv[strain][key] = float(ref_allele[key])\n perc_alt_inv[strain][key] = float(alt_allele[key])/(alt_allele[key]+ref_allele[key])\n perc_ref_inv[strain][key] = float(ref_allele[key])/(alt_allele[key]+ref_allele[key])\n \n \n\n ## Keep only the common inversions, i.e. those between MC and the rest \n all_inversions = []\n common_inversions = []\n abs_alt_set = defaultdict(list)\n perc_alt_set = defaultdict(list)\n\n abs_alt_inv_set = defaultdict(dict)\n perc_alt_inv_set = defaultdict(dict)\n abs_ref_inv_set = defaultdict(dict)\n perc_ref_inv_set = defaultdict(dict)\n\n Rock = ['AC', 'CL','CM','CN','TI','PN','MC']\n Sand = ['MZ','DC','LF','MP','MS','CV']\n\n\n sand_inversions = []\n rock_inversions = []\n\n for strain in abs_alt_inv.keys():\n for inversion in abs_alt_inv[strain].keys():\n if(strain in Rock):\n rock_inversions.append(inversion)\n else:\n sand_inversions.append(inversion)\n all_inversions.append(inversion)\n \n \n common_inversions_sand = Counter(sand_inversions)\n common_inversions_rock = Counter(rock_inversions)\n #count_sand = 0\n common_inversions = Counter(all_inversions)\n return_inversions = []\n \n \n #print common_inversions\n for inversion in common_inversions.keys():\n if(common_inversions[inversion]==13):\n return_inversions.append(inversion)\n for strain in abs_alt_inv.keys():\n abs_alt_set[strain].append(abs_alt_inv[strain][inversion])\n perc_alt_set[strain].append(perc_alt_inv[strain][inversion])\n\n abs_alt_inv_set[strain][inversion] = abs_alt_inv[strain][inversion]\n perc_alt_inv_set[strain][inversion] = perc_alt_inv[strain][inversion]\n abs_ref_inv_set[strain][inversion] = abs_ref_inv[strain][inversion]\n perc_ref_inv_set[strain][inversion] = perc_ref_inv[strain][inversion]\n\n\n for inversion in abs_alt_inv_set['MC']:\n alternate_allele_sum_rock = 0\n reference_allele_sum_rock = 0\n alternate_allele_sum_sand = 0\n reference_allele_sum_sand = 0 \n for strain in Rock:\n alternate_allele_sum_rock = alternate_allele_sum_rock + abs_alt_inv_set[strain][inversion]\n reference_allele_sum_rock = reference_allele_sum_rock + abs_ref_inv_set[strain][inversion]\n\n for strain in Sand:\n alternate_allele_sum_sand = alternate_allele_sum_sand + abs_alt_inv_set[strain][inversion]\n reference_allele_sum_sand = reference_allele_sum_sand + abs_ref_inv_set[strain][inversion]\n\n abs_alt_set['Rock'].append(alternate_allele_sum_rock)\n perc_alt_set['Rock'].append(float((alternate_allele_sum_rock)/(alternate_allele_sum_rock + reference_allele_sum_rock)))\n \n abs_alt_set['Sand'].append(alternate_allele_sum_sand)\n perc_alt_set['Sand'].append(float((alternate_allele_sum_sand)/(alternate_allele_sum_sand + reference_allele_sum_sand)))\n \n with open('log_file.txt','a') as LOG_FILE:\n if(float((alternate_allele_sum_rock)/(alternate_allele_sum_rock + reference_allele_sum_rock))>float(sys.argv[2]) or float((alternate_allele_sum_sand)/(alternate_allele_sum_sand + reference_allele_sum_sand))>float(sys.argv[2])):\n print >> LOG_FILE,inversion \n \n\n print \"Sand : \"+str(count_sand)\n\n if return_flag == True:\n #print len([abs_alt_inv_set,abs_ref_inv_set,perc_alt_inv_set,perc_ref_inv_set])\n return perc_alt_inv_set\n else:\n return [abs_alt_set,perc_alt_set]", "def write_pos_seqs(infilepath, dbdirpath, outfilepath, prot_name=None):\n with open(infilepath) as i, open(outfilepath, 'w') as o:\n acc_dict = {}\n last_db_name = None\n for line in i:\n split_line = line.split(',')\n db_name = split_line[2]\n acc = split_line[4]\n acc_list = [split_line[4]]\n if db_name != last_db_name:\n acc_dict[db_name] = [acc]\n elif db_name == last_db_name:\n acc_dict[db_name].append(acc)\n last_db_name = db_name\n \n for key in acc_dict.keys():\n o.write(get_fas_from_db_dir(key, acc_dict[key], dbdirpath,\n prot_name))", "def temp_generate_position_label_data_matrix_All_label():\n temp_position_label = OrderedDict()\n with open(\"%s/temp_label_final_raw.txt\" % args.filter2_only_snp_vcf_dir, 'rU') as csv_file:\n print \"Reading temporary label positions file: %s/temp_label_final_raw.txt \\n\" % args.filter2_only_snp_vcf_dir\n csv_reader = csv.reader(csv_file, delimiter='\\t')\n next(csv_reader, None)\n for row in csv_reader:\n temp_position_label[row[0]] = row[1:]\n f33=open(\"%s/temp_Only_filtered_positions_for_closely_matrix.txt\" % args.filter2_only_snp_vcf_dir, 'w+')\n print_string_header = \"\\t\"\n for i in vcf_filenames:\n print_string_header = print_string_header + os.path.basename(i) + \"\\t\"\n f33.write('\\t' + print_string_header.strip() + '\\n')\n for value in temp_position_label:\n lll = ['reference_unmapped_position', 'LowFQ', 'LowFQ_DP', 'LowFQ_QUAL', 'LowFQ_DP_QUAL', 'LowFQ_QUAL_DP', 'HighFQ_DP', 'HighFQ_QUAL', 'HighFQ_DP_QUAL', 'HighFQ_QUAL_DP', 'HighFQ', 'LowFQ_proximate_SNP', 'LowFQ_DP_proximate_SNP', 'LowFQ_QUAL_proximate_SNP', 'LowFQ_DP_QUAL_proximate_SNP', 'LowFQ_QUAL_DP_proximate_SNP', 'HighFQ_DP_proximate_SNP', 'HighFQ_QUAL_proximate_SNP', 'HighFQ_DP_QUAL_proximate_SNP', 'HighFQ_QUAL_DP_proximate_SNP', 'HighFQ_proximate_SNP', '_proximate_SNP']\n ref_var = ['reference_allele', 'VARIANT']\n if set(ref_var) & set(temp_position_label[value]):\n if set(lll) & set(temp_position_label[value]):\n print_string = \"\"\n for i in temp_position_label[value]:\n print_string = print_string + \"\\t\" + i\n STRR2 = value + print_string + \"\\n\"\n f33.write(STRR2)\n f33.close()\n csv_file.close()\n\n \"\"\"\n Read temp_Only_filtered_positions_for_closely_matrix file and generate a matrix of positions that are being filtered just because of FQ\n \"\"\"\n temp_position_label_FQ = OrderedDict()\n with open(\"%s/temp_Only_filtered_positions_for_closely_matrix.txt\" % args.filter2_only_snp_vcf_dir, 'rU') as csv_file:\n print \"Reading temporary Only_filtered_positions label file: %s/temp_Only_filtered_positions_for_closely_matrix.txt \\n\" % args.filter2_only_snp_vcf_dir\n csv_reader = csv.reader(csv_file, delimiter='\\t')\n\n next(csv_reader, None)\n for row in csv_reader:\n temp_position_label_FQ[row[0]] = row[1:]\n f44=open(\"%s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir, 'w+')\n print_string_header = \"\\t\"\n for i in vcf_filenames:\n print_string_header = print_string_header + os.path.basename(i) + \"\\t\"\n f44.write('\\t' + print_string_header.strip() + '\\n')\n for value in temp_position_label_FQ:\n #lll = ['reference_unmapped_position', 'LowFQ', 'LowFQ_DP', 'LowFQ_QUAL', 'LowFQ_DP_QUAL', 'LowFQ_QUAL_DP', 'HighFQ_DP', 'HighFQ_QUAL', 'HighFQ_DP_QUAL', 'HighFQ_QUAL_DP', 'HighFQ', 'LowFQ_proximate_SNP', 'LowFQ_DP_proximate_SNP', 'LowFQ_QUAL_proximate_SNP', 'LowFQ_DP_QUAL_proximate_SNP', 'LowFQ_QUAL_DP_proximate_SNP', 'HighFQ_DP_proximate_SNP', 'HighFQ_QUAL_proximate_SNP', 'HighFQ_DP_QUAL_proximate_SNP', 'HighFQ_QUAL_DP_proximate_SNP', 'HighFQ_proximate_SNP', '_proximate_SNP']\n lll = ['LowFQ']\n #ref_var = ['reference_allele', 'VARIANT']\n if set(lll) & set(temp_position_label_FQ[value]):\n print_string = \"\"\n for i in temp_position_label_FQ[value]:\n print_string = print_string + \"\\t\" + i\n STRR2 = value + print_string + \"\\n\"\n f44.write(STRR2)\n f44.close()\n csv_file.close()\n\n ## Perform Sed\n subprocess.call([\"sed -i 's/_filter2_final.vcf_no_proximate_snp.vcf//g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/reference_unmapped_position/0/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/reference_allele/1/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/VARIANT/2/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_QUAL_DP_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_DP_QUAL_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_QUAL_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_DP_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_QUAL_DP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_DP_QUAL/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_QUAL/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_DP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_QUAL_DP_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_DP_QUAL_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_QUAL_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_DP_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_QUAL_DP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_DP_QUAL/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_QUAL/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_DP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ/3/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_FQ.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n \"\"\"\n Read temp_Only_filtered_positions_for_closely_matrix file and generate a matrix of positions that are being filtered just because of Dp\n \"\"\"\n temp_position_label_DP = OrderedDict()\n with open(\"%s/temp_Only_filtered_positions_for_closely_matrix.txt\" % args.filter2_only_snp_vcf_dir, 'rU') as csv_file:\n print \"Reading temporary Only_filtered_positions label file: %s/temp_Only_filtered_positions_for_closely_matrix.txt \\n\" % args.filter2_only_snp_vcf_dir\n csv_reader = csv.reader(csv_file, delimiter='\\t')\n next(csv_reader, None)\n for row in csv_reader:\n temp_position_label_DP[row[0]] = row[1:]\n f44=open(\"%s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir, 'w+')\n print_string_header = \"\\t\"\n for i in vcf_filenames:\n print_string_header = print_string_header + os.path.basename(i) + \"\\t\"\n f44.write('\\t' + print_string_header.strip() + '\\n')\n for value in temp_position_label_DP:\n #lll = ['reference_unmapped_position', 'LowFQ', 'LowFQ_DP', 'LowFQ_QUAL', 'LowFQ_DP_QUAL', 'LowFQ_QUAL_DP', 'HighFQ_DP', 'HighFQ_QUAL', 'HighFQ_DP_QUAL', 'HighFQ_QUAL_DP', 'HighFQ', 'LowFQ_proximate_SNP', 'LowFQ_DP_proximate_SNP', 'LowFQ_QUAL_proximate_SNP', 'LowFQ_DP_QUAL_proximate_SNP', 'LowFQ_QUAL_DP_proximate_SNP', 'HighFQ_DP_proximate_SNP', 'HighFQ_QUAL_proximate_SNP', 'HighFQ_DP_QUAL_proximate_SNP', 'HighFQ_QUAL_DP_proximate_SNP', 'HighFQ_proximate_SNP', '_proximate_SNP']\n lll = ['HighFQ_DP']\n #ref_var = ['reference_allele', 'VARIANT']\n if set(lll) & set(temp_position_label_FQ[value]):\n print_string = \"\"\n for i in temp_position_label_FQ[value]:\n print_string = print_string + \"\\t\" + i\n STRR2 = value + print_string + \"\\n\"\n f44.write(STRR2)\n f44.close()\n csv_file.close()\n\n\n #Perform Sed\n subprocess.call([\"sed -i 's/_filter2_final.vcf_no_proximate_snp.vcf//g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/reference_unmapped_position/0/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/reference_allele/1/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/VARIANT/2/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_QUAL_DP_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_DP_QUAL_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_QUAL_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_DP_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_QUAL_DP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_DP_QUAL/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_QUAL/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ_DP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_QUAL_DP_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_DP_QUAL_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_QUAL_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_DP_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_proximate_SNP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_QUAL_DP/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_DP_QUAL/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_QUAL/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ_DP/3/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/LowFQ/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n subprocess.call([\"sed -i 's/HighFQ/4/g' %s/temp_Only_filtered_positions_for_closely_matrix_DP.txt\" % args.filter2_only_snp_vcf_dir], shell=True)\n\n\n \"\"\"\n Read each Sample columns and calculate the percentage of each label to generate barplot statistics.\n This will give a visual explanation of how many positions in each samples were filtered out because of different reason\n \"\"\"\n\n c_reader = csv.reader(open('%s/temp_Only_filtered_positions_for_closely_matrix.txt' % args.filter2_only_snp_vcf_dir, 'r'), delimiter='\\t')\n columns = list(zip(*c_reader))\n counts = 1\n end = len(vcf_filenames) + 1\n f_bar_count = open(\"%s/bargraph_counts.txt\" % args.filter2_only_snp_vcf_dir, 'w+')\n f_bar_perc = open(\"%s/bargraph_percentage.txt\" % args.filter2_only_snp_vcf_dir, 'w+')\n f_bar_count.write(\"Sample\\tunmapped_positions\\treference_allele\\ttrue_variant\\tOnly_low_FQ\\tOnly_DP\\tOnly_low_MQ\\tother\\n\")\n f_bar_perc.write(\"Sample\\tunmapped_positions_perc\\ttrue_variant_perc\\tOnly_low_FQ_perc\\tOnly_DP_perc\\tOnly_low_MQ_perc\\tother_perc\\n\")\n for i in xrange(1, end, 1):\n \"\"\" Bar Count Statistics: Variant Position Count Statistics \"\"\"\n true_variant = columns[i].count('VARIANT')\n unmapped_positions = columns[i].count('reference_unmapped_position')\n reference_allele = columns[i].count('reference_allele')\n Only_low_FQ = columns[i].count('LowFQ')\n Only_DP = columns[i].count('HighFQ_DP')\n Only_low_MQ = columns[i].count('HighFQ')\n low_FQ_other_parameters = columns[i].count('LowFQ_QUAL_DP_proximate_SNP') + columns[i].count('LowFQ_DP_QUAL_proximate_SNP') + columns[i].count('LowFQ_QUAL_proximate_SNP') + columns[i].count('LowFQ_DP_proximate_SNP') + columns[i].count('LowFQ_proximate_SNP') + columns[i].count('LowFQ_QUAL_DP') + columns[i].count('LowFQ_DP_QUAL') + columns[i].count('LowFQ_QUAL') + columns[i].count('LowFQ_DP')\n high_FQ_other_parameters = columns[i].count('HighFQ_QUAL_DP_proximate_SNP') + columns[i].count('HighFQ_DP_QUAL_proximate_SNP') + columns[i].count('HighFQ_QUAL_proximate_SNP') + columns[i].count('HighFQ_DP_proximate_SNP') + columns[i].count('HighFQ_proximate_SNP') + columns[i].count('HighFQ_QUAL_DP') + columns[i].count('HighFQ_DP_QUAL') + columns[i].count('HighFQ_QUAL')\n other = low_FQ_other_parameters + high_FQ_other_parameters\n total = true_variant + unmapped_positions + reference_allele + Only_low_FQ + Only_DP + low_FQ_other_parameters + high_FQ_other_parameters + Only_low_MQ\n filename_count = i - 1\n bar_string = \"%s\\t%s\\t%s\\t%s\\t%s\\t%s\\t%s\\t%s\\n\" % (os.path.basename(vcf_filenames[filename_count].replace('_filter2_final.vcf_no_proximate_snp.vcf', '')), unmapped_positions, reference_allele, true_variant, Only_low_FQ, Only_DP, Only_low_MQ, other)\n f_bar_count.write(bar_string)\n\n \"\"\" Bar Count Percentage Statistics: Variant Position Percentage Statistics \"\"\"\n try:\n true_variant_perc = float((columns[i].count('VARIANT') * 100) / total)\n except ZeroDivisionError:\n true_variant_perc = 0\n try:\n unmapped_positions_perc = float((columns[i].count('reference_unmapped_position') * 100) / total)\n except ZeroDivisionError:\n unmapped_positions_perc = 0\n try:\n reference_allele_perc = float((columns[i].count('reference_allele') * 100) / total)\n except ZeroDivisionError:\n reference_allele_perc = 0\n try:\n Only_low_FQ_perc = float((columns[i].count('LowFQ') * 100) / total)\n except ZeroDivisionError:\n Only_low_FQ_perc = 0\n try:\n Only_DP_perc = float((columns[i].count('HighFQ_DP') * 100) / total)\n except ZeroDivisionError:\n Only_DP_perc = 0\n try:\n Only_low_MQ_perc = float((columns[i].count('HighFQ') * 100) / total)\n except ZeroDivisionError:\n Only_low_MQ_perc = 0\n try:\n low_FQ_other_parameters_perc = float(((columns[i].count('LowFQ_QUAL_DP_proximate_SNP') + columns[i].count('LowFQ_DP_QUAL_proximate_SNP') + columns[i].count('LowFQ_QUAL_proximate_SNP') + columns[i].count('LowFQ_DP_proximate_SNP') + columns[i].count('LowFQ_proximate_SNP') + columns[i].count('LowFQ_QUAL_DP') + columns[i].count('LowFQ_DP_QUAL') + columns[i].count('LowFQ_QUAL') + columns[i].count('LowFQ_DP')) * 100) / total)\n except ZeroDivisionError:\n low_FQ_other_parameters_perc = 0\n try:\n high_FQ_other_parameters_perc = float(((columns[i].count('HighFQ_QUAL_DP_proximate_SNP') + columns[i].count('HighFQ_DP_QUAL_proximate_SNP') + columns[i].count('HighFQ_QUAL_proximate_SNP') + columns[i].count('HighFQ_DP_proximate_SNP') + columns[i].count('HighFQ_proximate_SNP') + columns[i].count('HighFQ_QUAL_DP') + columns[i].count('HighFQ_DP_QUAL') + columns[i].count('HighFQ_QUAL')) * 100) / total)\n except ZeroDivisionError:\n high_FQ_other_parameters_perc = 0\n\n other_perc = float(low_FQ_other_parameters_perc + high_FQ_other_parameters_perc)\n bar_perc_string = \"%s\\t%s\\t%s\\t%s\\t%s\\t%s\\t%s\\n\" % (os.path.basename(vcf_filenames[filename_count].replace('_filter2_final.vcf_no_proximate_snp.vcf', '')), unmapped_positions_perc, true_variant_perc, Only_low_FQ_perc, Only_DP_perc, Only_low_MQ_perc, other_perc)\n f_bar_perc.write(bar_perc_string)", "def main(inCL=None):\n headList = [] # Stores header of coronavirus sequences in fasta file\n orfList = [] # Stores sequences containing ORFs of coronavirus sequences in fasta file\n validNucs = ['A', 'C', 'G', 'T']\n myReader = FastAreader('Combined-ALL-SARS-CoV.fasta') \n for head, seq in myReader.readFasta(): # Using fastAreader to read in .fasta files\n headList.append(head)\n for i in seq:\n if i not in validNucs: # Removing non-valid bases\n seq = seq.replace(i,\"\") \n orf = OrfFinder(seq, 300, True) # Includes the largest ORF greater than 300 nucleotides within a stop codon\n geneOrfList = orf.getOrfs()\n geneSeq = [] # Stores ORF sequences\n for openFrame in geneOrfList:\n geneSeq.append(seq[openFrame[1]-1:openFrame[2]-1])\n orfList.append(geneSeq)\n # Calls methods to create SeqRecords and then .py file to print gene trees\n myPhylo = GeneTree() \n for i in range(0,4,1): # Loops to print the first four gene trees of every sequence\n records = myPhylo.geneSpecificRecord(orfList, headList, i) # Creates list of SeqRecords that represent a sequence\n # alignments = myPhylo.fastaToPhylip(records) # Makes a .phy file using a .fasta file\n print(\"GENE \" + str(i+1) + \":\")\n # printTree = myPhylo.printGeneTree() # Prints Gene Trees\n x = 0\n print('\\n\\n============================================ K E Y ============================================\\n')\n for header in headList: # Loops through headers to print key\n header = header.split(',')\n header = header[0]\n print(\"{} = {}\" .format(x, header)) # Prints each line containing the header\n x += 1", "def insert_row_info( self, ncbi_code, row_info, row_annot, row_annot_match_col ):\n\t\tif row_annot == None:\n\t\t\tprint \"Row annotation file not suppled as 'row_annot' parameter. Attempting automated download from MicrobesOnline\"\n\t\t\tif ncbi_code != None:\n\t\t\t\tprint \"Downloading gene information for NCBI taxonomy ID:\", ncbi_code\n\t\t\t \turl = \"http://www.microbesonline.org/cgi-bin/genomeInfo.cgi?tId=\"+ncbi_code+\";export=tab\"\n\t\t\t\tsave_name=ncbi_code+\"_geneInfo.tab\"\n\t\t\t\tself.dlfile(url,save_name)\n\t\t\t\t\n\t\t\t\twith open( save_name, 'r' ) as f:\n\t\t\t\t\trow_annot = pd.read_csv( f, sep=\"\\t\" )\n\t\t\t \tleft_on=\"egrin2_row_name\"\n\t\t\t \tif row_annot_match_col == None:\n\t\t\t \t\trow_annot_match_col=\"sysName\"\n\t\t\t \t# join with row_annot\n\t\t\t \trow_table = pd.merge( row_info, row_annot, left_on=left_on, right_on=row_annot_match_col )\t\n\t\t \telse:\n\t\t \t\tprint \"WARNING: could not fetch additional gene information for NCBI taxonomy ID:\", ncbi_code\n\t\t\t \t# TODO:\n\t\t\t \t# Check whether documents are already present in the collection before insertion\n\t\t\t \t# In case where they are present, update them\n\t\t\t \trow_table = row_info\n\t \telse:\n\t\t\trow_annot = pd.read_csv( open( row_annot, 'rb' ), sep=\"\\t\" )\t\n\t\t\t# join with row_annot\n\t\t \trow_table = pd.merge( row_info, row_annot, left_on=left_on, right_on=row_annot_match_col )\t\n\n\t\t# write to mongoDB collection \n\t\trow_info_collection = self.db.row_info\n\t\t# Check whether documents are already present in the collection before insertion\n\t\td = row_table.to_dict( 'records' )\n\n\t\tif row_info_collection.count() > 0:\n\t\t\td_f = filter( None, [ self.check4existence( row_info_collection, i ) for i in d ] )\n\t\telse:\n\t\t\td_f = d\n\n\t\tprint \"%s new records to write\" % len( d_f )\n\n\t\tif len(d_f) > 0:\n\t\t\trow_info_collection.insert( d_f )\n\n\t\treturn row_info_collection", "def exon_desc(gff3, fasta):\n seqs = {}\n for defline, seq in LocusPocus.fasta.parse(fasta):\n exonpos = defline[1:].split(' ')[1]\n seqs[exonpos] = seq\n\n rnaid_to_accession = dict()\n reported_exons = {}\n exons, cdss = [], {}\n start, stop = None, None\n moltypes = ['mRNA', 'tRNA', 'ncRNA', 'transcript', 'primary_transcript',\n 'V_gene_segment', 'D_gene_segment', 'J_gene_segment',\n 'C_gene_segment']\n for entry in gff3:\n for moltype in moltypes:\n if ('\\t%s\\t' % moltype) in entry:\n accession = re.search(r'accession=([^;\\n]+)', entry).group(1)\n tid = re.search(r'ID=([^;\\n]+)', entry).group(1)\n rnaid_to_accession[tid] = accession\n\n if '\\texon\\t' in entry:\n exons.append(entry)\n elif '\\tCDS\\t' in entry:\n fields = entry.split('\\t')\n pos = '%s_%s-%s%s' % (fields[0], fields[3], fields[4], fields[6])\n cdss[pos] = entry\n elif '\\tstart_codon\\t' in entry:\n start = entry\n elif '\\tstop_codon\\t' in entry:\n stop = entry\n elif entry.startswith('###'):\n if len(exons) == 0:\n continue\n xcept = False\n for exonpos in cdss:\n if ';exception=ribosomal slippage' in cdss[exonpos]:\n xcept = True\n if xcept:\n exons, cdss = [], {}\n start, stop = None, None\n continue\n assert start, 'No start codon for exon(s): %s' % exons[0]\n assert stop, 'No stop codon for exon(s): %s' % exons[0]\n for exon in exons:\n fields = exon.split('\\t')\n assert len(\n fields) == 9, 'entry does not have 9 fields: %s' % exon\n mrnaid = re.search(r'Parent=([^;\\n]+)', fields[8]).group(1)\n exonpos = '%s_%s-%s%s' % (fields[0],\n fields[3], fields[4], fields[6])\n if exonpos in reported_exons:\n continue\n exonlength = int(fields[4]) - int(fields[3]) + 1\n exonseq = seqs[exonpos]\n assert len(exonseq) == exonlength, \\\n 'exon \"%s\": length mismatch; gff=%d, fa=%d' % (\n exonpos, exonlength, len(exonseq))\n gccontent = gc_content(exonseq)\n gcskew = gc_skew(exonseq)\n ncontent = n_content(exonseq)\n context = exon_context(exon, start, stop)\n phase = None\n remainder = None\n if context == 'cds':\n cexon = cdss[exonpos]\n phase = int(cexon.split('\\t')[7])\n remainder = (exonlength - phase) % 3\n values = '%s %s %d %.3f %.3f %.3f %s %r %r' % (\n exonpos, rnaid_to_accession[mrnaid], exonlength, gccontent,\n gcskew, ncontent, context, phase, remainder)\n reported_exons[exonpos] = 1\n yield values.split(' ')\n exons, cdss = [], {}\n start, stop = None, None", "def fetchRefSeqIntervalsIndexed(genome='hg18',proteinCodingOnly=False,verbose=False):\n cursor=gbdbConnect(gbdbname=genome)\n select=\"SELECT * FROM refGene\"\n if verbose:\n sys.stderr.write(\"Fetching RefSeq Sequences...\\n\")\n cursor.execute(select)\n rows=cursor.fetchall()\n output={}\n for chr in genomelib.chr_names:\n output[chr]={}\n output[chr]['+']=[]\n output[chr]['-']=[]\n if verbose:\n sys.stderr.write(\"Creating index by chr and strand...\\n\")\n \n for row in rows:\n if proteinCodingOnly and not row['name'].startswith('NM'):\n continue\n try:\n exonStarts = map(int,row['exonStarts'].rstrip().split(\",\")[:-1])\n exonEnds = map(int,row['exonEnds'].rstrip().split(\",\")[:-1])\n except:\n print \"\\t\".join([\"%s:%s\" % (k,v) for k,v in row.iteritems()])\n start = int(row['txStart'])\n exonOffsets = [x-start for x in exonStarts]\n exonLengths = []\n for i in xrange(len(exonStarts)):\n exonLengths.append(exonEnds[i]-exonStarts[i]+1)\n if row['chrom'] in genomelib.chr_names:\n output[row['chrom']][row['strand']].append(intervallib.SplicedInterval(row['chrom'],row['txStart'],row['txEnd'],row['strand'],\",\".join([str(x) for x in exonLengths]),\",\".join([str(x) for x in exonOffsets]),name=row['name2']))\n \n #Sort \n if verbose:\n sys.stderr.write(\"Sorting:\\n\")\n tstart = time.time()\n for key in output.keys():\n if verbose:\n sys.stderr.write(\"\\t%s\\t\" % key)\n output[key]['+'].sort()\n output[key]['-'].sort()\n tend = time.time()\n if verbose:\n sys.stderr.write('%0.2f sec\\n' % (tend-tstart))\n tstart = time.time()\n return output", "def tx_exon_aln_v_data():\n return [\"BRAF\", \"NM_004333.4\", 1802, 1921, \"NC_000007.13\", 140453074,\n 140453193, -1, \"splign\", 780494, 1927263]", "def getReadSamFile(read_file,rnameList):\n size = len(rnameList)\n prev = 0\n ends = range(0, size, 20)\n ends += [size]\n ends.pop(0)\n \n \n \n for i in ends:\n chrs = rnameList[prev:i]\n f = []\n ch_p = ''\n jj = 0\n for j in range(0,i-prev):\n samfile = os.path.join(working_dir, 'MappedRead.'+chrs[j]+'.sam')\n log.info('Generating ' + samfile)\n f.append(open(samfile, \"w\"))\n for line in open(read_file, \"r\"):\n \n itemList = line[:-1].split('\\t')\n \n if len(itemList) < 11:\n continue\n #print itemList\n if itemList[0][0:1] == '@':\n continue\n line_ch = itemList[2]\n if line_ch == '*':\n continue\n if int(itemList[1]) & 0b100 != 0:\n continue\n \n if ch_p != line_ch:\n for j in range(0,i-prev):\n if chrs[j] == line_ch:\n f[j].write(line)\n jj = j\n ch_p = line_ch\n continue\n #end for j in range(0,i-prev):\n elif ch_p == line_ch:\n f[jj].write(line)\n '''\n for j in range(0,i-prev):\n if chrs[j] == line_ch:\n f[j].write(line)\n continue\n '''\n for fp in f:\n fp.close()\n prev = i", "def merge_in(self, other, convert_to_string=True):\n assert isinstance(other, ExtendedAlignment)\n #_LOG.debug(\"Merging started ...\")\n if other.is_empty():\n return\n me = 0\n she = 0 # Assumption: alignments are female!\n me_len = self.get_length() if not self.is_empty() else 0\n she_len = other.get_length()\n insertion = -1\n\n merged_insertion_columns = 0\n\n ''' Add sequences from her to my alignment '''\n for f in other.fragments:\n self.fragments.add(f)\n if convert_to_string:\n self.from_string_to_bytearray()\n\n selfother = {}\n for k, v in other.items():\n # assert(k not in self,\n # \"Merging overlapping alignments not implemented\")\n if k not in self:\n selfother[k] = bytearray(v, encoding=\"utf8\")\n while True:\n ''' Check exit conditions'''\n if me == me_len and she == she_len:\n break\n\n ''' Check the 5 possible statuses between she and I '''\n if she != she_len and other.is_insertion_column(she):\n if me != me_len and self.is_insertion_column(me):\n ''' We both have a series of insertion columns'''\n start = me\n while(me != me_len and self.is_insertion_column(me) and\n she != she_len and other.is_insertion_column(she)):\n me += 1\n she += 1\n merged_insertion_columns += 1\n run = me - start\n self.col_labels[start:me] = list(range(\n insertion, insertion-run, -1))\n else:\n ''' Hers is a series of insertion columns'''\n start = she\n while she != she_len and other.is_insertion_column(she):\n she += 1\n run = she - start\n ins = bytearray(b\"-\") * run\n for seq in self.values():\n seq[me:me] = ins\n self._col_labels[me:me] = list(range(\n insertion, insertion - run, -1))\n insertion -= run\n me += run\n me_len += run\n elif me != me_len and self.is_insertion_column(me):\n ''' Mine is a series of insertion column'''\n start = me\n while me != me_len and self.is_insertion_column(me):\n me += 1\n run = me - start\n ins = bytearray(b\"-\") * run\n for v in selfother.values():\n v[start:start] = ins\n self.col_labels[start:me] = list(\n range(insertion, insertion-run, -1))\n insertion -= run\n elif(she == she_len or (me != me_len and\n self.col_labels[me] < other.col_labels[she])):\n ''' My column is not present (i.e. was allgap) in the\n \"other\"'''\n start = me\n while(me < me_len and (she == she_len or me != me_len and\n self.col_labels[me] < other.col_labels[she])):\n me += 1\n run = me - start\n ins = bytearray(b\"-\") * run\n for v in selfother.values():\n v[start:start] = ins\n elif(me == me_len or (she != she_len and\n self.col_labels[me] > other.col_labels[she])):\n ''' Her column is not present (i.e. was allgap) in \"me\"'''\n start = she\n while(she < she_len and (me == me_len or she != she_len and\n self.col_labels[me] > other.col_labels[she])):\n she += 1\n run = she - start\n ins = bytearray(b\"-\") * run\n for seq in self.values():\n seq[me:me] = ins\n self._col_labels[me:me] = other.col_labels[start:she]\n me += run\n me_len += run\n elif self.col_labels[me] == other.col_labels[she]:\n ''' A shared column'''\n while(me < me_len and she < she_len and\n self.col_labels[me] == other.col_labels[she]):\n she += 1\n me += 1\n else:\n raise \"hmmm, we thought this should be impossible? %d %d\" % (\n me, she)\n\n self.update(selfother)\n\n if convert_to_string:\n self.from_bytearray_to_string()\n #_LOG.debug(\"Merging finished ...\")\n\n return merged_insertion_columns", "def sequence(self):\n inigen = IniGen()\n fields = algorithm_fields.algorithms['sequence']\n\n output_uuid_map = {}\n\n # set up global parameters\n algorithm_path = fields['path']\n enabled = \"True\"\n inigen.emit_global(algorithm_path, enabled)\n\n label = \"SEQ\"\n for t in ['C','L']:\n run_label = label+'_'+t\n t1Mag_label = '{0}1MAG'.format(t)\n t2Mag_label = '{0}2MAG'.format(t)\n t3Mag_label = '{0}3MAG'.format(t)\n t1Ang_label = '{0}1ANG'.format(t)\n t2Ang_label = '{0}2ANG'.format(t)\n t3Ang_label = '{0}3ANG'.format(t)\n distillate_label = \"{0}-ALL\".format(t)\n\n # header\n inigen.emit_run_header(run_label, CHUNKING, MINTIME, MAXTIME)\n\n # body\n dep_1Mag_label = t1Mag_label\n dep_1Mag_name = fields['deps'][0]\n dep_1Mag_uuid = self.uuid_map[t1Mag_label]\n\n dep_2Mag_label = t2Mag_label\n dep_2Mag_name = fields['deps'][1]\n dep_2Mag_uuid = self.uuid_map[t2Mag_label]\n\n dep_3Mag_label = t3Mag_label\n dep_3Mag_name = fields['deps'][2]\n dep_3Mag_uuid = self.uuid_map[t3Mag_label]\n\n dep_1Ang_label = t1Ang_label\n dep_1Ang_name = fields['deps'][3]\n dep_1Ang_uuid = self.uuid_map[t1Ang_label]\n\n dep_2Ang_label = t2Ang_label\n dep_2Ang_name = fields['deps'][4]\n dep_2Ang_uuid = self.uuid_map[t2Ang_label]\n\n dep_3Ang_label = t3Ang_label\n dep_3Ang_name = fields['deps'][5]\n dep_3Ang_uuid = self.uuid_map[t3Ang_label]\n \n deps = [[dep_1Mag_label, dep_1Mag_name, dep_1Mag_uuid],\n [dep_2Mag_label, dep_2Mag_name, dep_2Mag_uuid],\n [dep_3Mag_label, dep_3Mag_name, dep_3Mag_uuid],\n [dep_1Ang_label, dep_1Ang_name, dep_1Ang_uuid],\n [dep_2Ang_label, dep_2Ang_name, dep_2Ang_uuid],\n [dep_3Ang_label, dep_3Ang_name, dep_3Ang_uuid]]\n\n param_section_name = fields['params'][0]\n param_section_value = \"Production/{0}/{1}/{2}\".format(self.location, self.name, distillate_label)\n param_name_name = fields['params'][1]\n param_name_value = \"SEQ\"\n params = [[param_section_name, param_section_value], [param_name_name, param_name_value]]\n\n outputs = fields['outputs']\n\n emitted = inigen.emit_run_body(deps, params, outputs)\n\n output_uuid_map[\"ZER_{0}ANG\".format(t)] = emitted[-9][-36:]\n output_uuid_map[\"ZER_{0}MAG\".format(t)] = emitted[-8][-36:]\n output_uuid_map[\"POS_{0}ANG\".format(t)] = emitted[-7][-36:]\n output_uuid_map[\"POS_{0}MAG\".format(t)] = emitted[-6][-36:]\n output_uuid_map[\"NEG_{0}ANG\".format(t)] = emitted[-5][-36:]\n output_uuid_map[\"NEG_{0}MAG\".format(t)] = emitted[-4][-36:]\n output_uuid_map[\"UNB_{0}NEG\".format(t)] = emitted[-3][-36:]\n output_uuid_map[\"UNB_{0}ZER\".format(t)] = emitted[-2][-36:]\n\n filename = \"{0}/SEQ_{1}.ini\".format(self.dirname, self.name)\n inigen.generate_file(filename)\n return output_uuid_map", "def generate_jaccard0_isoseq_bed(self):\n all = set(self.isoseqid2exonlen.keys())\n notwant = set(self.isoseqid2besttransidB.keys())\n want = all - notwant\n want_lines = []\n with open(\"../data/pacbio/\" + self.name + \".B.j0.bed\", 'w') as f:\n for line in self.linesPacBioBed:\n (chrom, chromStart, chromEnd, name, score, strand, thickStart, thickEnd, itemRgb, blockCount, blockSizes, blockStarts) = line.rstrip().split(\"\\t\")\n if name in want:\n f.write(line)", "def _process_genotypes(self, limit):\n if self.testMode:\n g = self.testgraph\n else:\n g = self.graph\n model = Model(g)\n line_counter = 0\n\n raw = '/'.join((self.rawdir, 'genotype'))\n logger.info(\"building labels for genotypes\")\n geno = Genotype(g)\n fly_tax = 'NCBITaxon:7227'\n with open(raw, 'r') as f:\n f.readline() # read the header row; skip\n filereader = csv.reader(f, delimiter='\\t', quotechar='\\\"')\n for line in filereader:\n line_counter += 1\n\n (genotype_num, uniquename, description, name) = line\n\n # if self.testMode is True:\n # if int(object_key) not in self.test_keys.get('genotype'):\n # continue\n\n # add the internal genotype to pub mapping\n genotype_id = 'MONARCH:FBgeno'+str(genotype_num)\n self.idhash['genotype'][genotype_num] = genotype_id\n\n if description == '':\n description = None\n\n if not self.testMode \\\n and limit is not None and line_counter > limit:\n pass\n else:\n if self.testMode and \\\n int(genotype_num) not in \\\n self.test_keys['genotype']:\n continue\n\n model.addIndividualToGraph(\n genotype_id, uniquename,\n Genotype.genoparts['intrinsic_genotype'],\n description)\n # we know all genotypes are in flies\n # FIXME we assume here they are in melanogaster,\n # but that isn't necessarily true!!!\n # TODO should the taxon be == genomic background?\n geno.addTaxon(fly_tax, genotype_id)\n genotype_iid = self._makeInternalIdentifier(\n 'genotype', genotype_num)\n model.addComment(\n genotype_id, genotype_iid)\n if name.strip() != '':\n model.addSynonym(genotype_id, name)\n\n return", "def _makeimap(self):\n self.map_['source'] = 'GOES'\n self.map_['provider'] = 'NOAA'\n self.map_['instrument'] = 'SUVI'\n self.map_['physobs'] = 'flux'", "def find_aligned_codons(aln):\n # throw out codons with non mod 3 gaps\n ind2 = []\n for i in range(0, aln.alignlen(), 3):\n bad = False\n\n for key, val in aln.iteritems():\n codon = val[i:i+3]\n if \"-\" in codon and codon != \"---\":\n bad = True\n break\n\n if not bad:\n ind2.extend([i, i+1, i+2])\n\n return ind2", "def createExon(strand_p, five_p_utr, cds_cod, three_p_utr):\n exon_pos = []\n if strand_p == '+': \n utr5_start, utr5_end = 0, 0\n if five_p_utr != []:\n utr5_start, utr5_end = five_p_utr[-1][0], five_p_utr[-1][1] \n cds_5start, cds_5end = cds_cod[0][0], cds_cod[0][1]\n jun_exon = []\n if cds_5start-utr5_end == 0 or cds_5start-utr5_end == 1:\n jun_exon = [utr5_start, cds_5end] \n if len(cds_cod) == 1:\n five_prime_flag = 0\n if jun_exon != []:\n five_p_utr = five_p_utr[:-1]\n five_prime_flag = 1\n for utr5 in five_p_utr:\n exon_pos.append(utr5)\n jun_exon = []\n utr3_start, utr3_end = 0, 0\n if three_p_utr != []: \n utr3_start = three_p_utr[0][0]\n utr3_end = three_p_utr[0][1]\n if utr3_start-cds_5end == 0 or utr3_start-cds_5end == 1:\n jun_exon = [cds_5start, utr3_end]\n three_prime_flag = 0\n if jun_exon != []: \n cds_cod = cds_cod[:-1]\n three_p_utr = three_p_utr[1:]\n three_prime_flag = 1\n if five_prime_flag == 1 and three_prime_flag == 1:\n exon_pos.append([utr5_start, utr3_end])\n if five_prime_flag == 1 and three_prime_flag == 0:\n exon_pos.append([utr5_start, cds_5end])\n cds_cod = cds_cod[:-1]\n if five_prime_flag == 0 and three_prime_flag == 1:\n exon_pos.append([cds_5start, utr3_end])\n for cds in cds_cod:\n exon_pos.append(cds)\n for utr3 in three_p_utr:\n exon_pos.append(utr3)\n else: \n if jun_exon != []:\n five_p_utr = five_p_utr[:-1]\n cds_cod = cds_cod[1:]\n for utr5 in five_p_utr:\n exon_pos.append(utr5)\n exon_pos.append(jun_exon) if jun_exon != [] else ''\n jun_exon = []\n utr3_start, utr3_end = 0, 0\n if three_p_utr != []:\n utr3_start = three_p_utr[0][0]\n utr3_end = three_p_utr[0][1]\n cds_3start = cds_cod[-1][0]\n cds_3end = cds_cod[-1][1]\n if utr3_start-cds_3end == 0 or utr3_start-cds_3end == 1: \n jun_exon = [cds_3start, utr3_end]\n if jun_exon != []:\n cds_cod = cds_cod[:-1]\n three_p_utr = three_p_utr[1:]\n for cds in cds_cod:\n exon_pos.append(cds)\n exon_pos.append(jun_exon) if jun_exon != [] else ''\n for utr3 in three_p_utr:\n exon_pos.append(utr3)\n elif strand_p == '-':\n utr3_start, utr3_end = 0, 0 \n if three_p_utr != []:\n utr3_start = three_p_utr[-1][0]\n utr3_end = three_p_utr[-1][1]\n cds_3start = cds_cod[0][0]\n cds_3end = cds_cod[0][1]\n jun_exon = []\n if cds_3start-utr3_end == 0 or cds_3start-utr3_end == 1:\n jun_exon = [utr3_start, cds_3end] \n if len(cds_cod) == 1: \n three_prime_flag = 0\n if jun_exon != []:\n three_p_utr = three_p_utr[:-1]\n three_prime_flag = 1\n for utr3 in three_p_utr:\n exon_pos.append(utr3)\n jun_exon = []\n (utr5_start, utr5_end) = (0, 0)\n if five_p_utr != []:\n utr5_start = five_p_utr[0][0]\n utr5_end = five_p_utr[0][1]\n if utr5_start-cds_3end == 0 or utr5_start-cds_3end == 1:\n jun_exon = [cds_3start, utr5_end]\n five_prime_flag = 0\n if jun_exon != []:\n cds_cod = cds_cod[:-1]\n five_p_utr = five_p_utr[1:]\n five_prime_flag = 1\n if three_prime_flag == 1 and five_prime_flag == 1:\n exon_pos.append([utr3_start, utr5_end])\n if three_prime_flag == 1 and five_prime_flag == 0:\n exon_pos.append([utr3_start, cds_3end])\n cds_cod = cds_cod[:-1]\n if three_prime_flag == 0 and five_prime_flag == 1:\n exon_pos.append([cds_3start, utr5_end]) \n for cds in cds_cod:\n exon_pos.append(cds)\n for utr5 in five_p_utr:\n exon_pos.append(utr5)\n else:\n if jun_exon != []:\n three_p_utr = three_p_utr[:-1]\n cds_cod = cds_cod[1:]\n for utr3 in three_p_utr:\n exon_pos.append(utr3) \n if jun_exon != []:\n exon_pos.append(jun_exon)\n jun_exon = []\n (utr5_start, utr5_end) = (0, 0)\n if five_p_utr != []:\n utr5_start = five_p_utr[0][0]\n utr5_end = five_p_utr[0][1] \n cds_5start = cds_cod[-1][0]\n cds_5end = cds_cod[-1][1]\n if utr5_start-cds_5end == 0 or utr5_start-cds_5end == 1:\n jun_exon = [cds_5start, utr5_end]\n if jun_exon != []:\n cds_cod = cds_cod[:-1]\n five_p_utr = five_p_utr[1:]\n for cds in cds_cod:\n exon_pos.append(cds)\n if jun_exon != []:\n exon_pos.append(jun_exon) \n for utr5 in five_p_utr:\n exon_pos.append(utr5)\n return exon_pos", "def sequence_to_zhuyin(self, sequence, add_eos=False, add_sos=False):\n\t\tindex_sequence = [self.zhuyin_ind['SOS']] if add_sos else []\n\n\t\tfor char in self.split_sequence(sequence):\n\t\t\tch = pinyin(char, style=Style.BOPOMOFO)[0][0][0]\n\t\t\tif ch not in self.zhuyin_table:\n\t\t\t\tindex_sequence.append((self.zhuyin_ind['UNK']))\n\t\t\telse:\n\t\t\t\tindex_sequence.append(self.zhuyin_ind[ch])\n\n\t\tif add_eos:\n\t\t\tindex_sequence.append(self.zhuyin_ind['EOS'])\n\n\t\treturn index_sequence", "def build_inverse_barcode_map(seqs):\r\n inverse_map = {}\r\n map_count = defaultdict(int)\r\n for (label, seq) in seqs:\r\n (map_id, seq_id) = label.split()[:2]\r\n map_id = map_id.split(\"_\")[0]\r\n inverse_map[seq_id] = map_id\r\n map_count[map_id] += 1\r\n\r\n return (inverse_map, map_count)", "def main():\n\n # Accept up to three command-line arguments\n input_terms = \"<input_GO_terms_file>\"\n input_annotations = \"<input_gene_associations_file>\"\n output_filename = \"<output_filename>\"\n\n\n # The first two arguments are required GO terms file ending with .obo\n # and gene association GAF file ending with .gaf\n if len(sys.argv) < 3:\n sys.exit(\"Please provide required GO terms .obo file and gene \" +\n \"assocatiion .gaf file.\")\n elif not sys.argv[1].endswith(\".obo\"):\n sys.exit(\"Please provide a GO terms .obo file.\")\n elif not sys.argv[2].endswith(\".gaf\"):\n sys.exit(\"Please provide a gene association .gaf file.\")\n else:\n input_terms = sys.argv[1]\n input_annotations = sys.argv[2]\n\n\n # Check if the provided import .obo or .gaf files exist\n if not input_terms:\n sys.exit(input_terms + \" not found. Check the file path and try again.\")\n elif not input_annotations:\n sys.exit(input_annotations + \" not found. Check the file path and try again.\")\n elif len(sys.argv) == 3:\n output_filename = \"results.tsv\"\n sys.stdout = open(\"results.tsv\", \"w\")\n elif len(sys.argv) == 4:\n output_filename = sys.argv[3] + \".tsv\"\n sys.stdout = open(output_filename, \"w\")\n\n\n # parse id and is_valeus and make a go_dict\n split_input_terms = split_terms(input_terms)\n go_dict = {}\n for record in split_input_terms:\n (go_id, is_a) = parse_go_term(record)\n key_go_dict = \"\".join(go_id)\n go_dict[key_go_dict] = is_a\n\n\n # Export an annotation gene information to tsv format into the output file\n gene_association_map = map_protein_to_go(input_annotations)\n for protein, go_ids in sorted(gene_association_map.items()):\n print(protein, end=\"\")\n\n for go_id in sorted(go_ids):\n parent_go_ids = find_parent_terms(go_id, go_dict)\n\n count = 0\n for parent_go_id in sorted(parent_go_ids):\n\n if count == 0:\n print(\"\\t\", go_id, \"\\t\", parent_go_id)\n count += 1\n else:\n print(\"\\t\", parent_go_id, sep=\"\\t\")\n\n sys.stdout.close()", "def annotate_indel_on_db(row, fasta, dbsnp, clnvr, chr_prefixed):\n chr = row[\"chr\"]\n pos = row[\"pos\"]\n idl_type = row[\"is_ins\"]\n idl_seq = row[\"indel_seq\"]\n\n # obj representing the indel in reference genome\n idl = curate_indel_in_genome(fasta, chr, pos, idl_type, idl_seq, chr_prefixed)\n # obj representing report of the indel\n report = IndelSnpFeatures(chr, pos, idl_type, idl_seq)\n\n # search for equivalent indels over pos +/- search_window nt\n search_window = 50\n start, end = pos - search_window, pos + search_window\n chr_vcf = row[\"chr\"].replace(\"chr\", \"\")\n\n for record in dbsnp.fetch(chr_vcf, start, end, parser=pysam.asTuple()):\n bambinos = vcf2bambino(record)\n for bb in bambinos:\n if idl_type == bb.idl_type and len(idl_seq) == len(bb.idl_seq):\n # indel on db representing in reference genome\n db_idl = curate_indel_in_genome(\n fasta, chr, bb.pos, bb.idl_type, bb.idl_seq, chr_prefixed\n )\n if idl == db_idl:\n rs = record[2]\n report.add_dbsnp_id(rs)\n report.add_dbsnp_freq(dbsnp_freq(record))\n # report.add_dbsnp_origin(dbsnp_origin(record))\n report.add_dbsnp_common(dbsnp_common(record))\n\n for record in clnvr.fetch(chr_vcf, start, end, parser=pysam.asTuple()):\n bambinos = vcf2bambino(record)\n for bb in bambinos:\n if idl_type == bb.idl_type and len(idl_seq) == len(bb.idl_seq):\n db_idl = curate_indel_in_genome(\n fasta, chr, bb.pos, bb.idl_type, bb.idl_seq, chr_prefixed\n )\n if idl == db_idl:\n id = record[2]\n report.add_clnvr_id(id)\n report.add_clnvr_freq(clnvr_freq(record))\n # report.add_clnvr_origin(clnvr_origin(record))\n report.add_clnvr_info(cln_info(record))\n\n return report", "def intervals2wig(iter,sampleName=\"\",outDir=os.getcwd(),scratchDir=os.getcwd()):\n seqs = {}\n count = 0\n print \"Preparing Dictionary of alignments\\nEach '.' is 10000 alignments\"\n for interval in iter:\n count = count+1\n if count % 10000 == 0:\n sys.stdout.write(\".\")\n if count % 100000 == 0:\n print \"\\n%d\" % (count)\n if not seqs.has_key(interval.chr):\n seqs[interval.chr]={'+':scratchDir+\"/\"+GenRandom(),'-':scratchDir+\"/\"+GenRandom()}\n FILE = open(seqs[interval.chr][interval.strand],'a')\n for i in range(interval.start,len(interval)+1):\n print >>FILE, \"%d\\t%d\" % (i,interval.readcount)\n print \"Done preparing dictionary, Begin sort and write\"\n chrKeys = seqs.keys()\n chrKeys.sort()\n for chr in chrKeys:\n print \"Printing \" + chr\n strands = seqs[chr].keys()\n for strand in strands:\n INPUT = open(seqs[chr][strand],'r')\n filename = outDir + \"/%s_%s_%s.wig\" % (sampleName,chr,strand)\n OUTPUT = open(filename,'w')\n OUTPUT.write(\"track type=wiggle_0 name='%s_%s_%s' description='Wiggle Track for read alignment of %s sample to %s'\\n\" % (sampleName,chr,strand,sampleName,chr))\n print strand\n positions = {}\n while True:\n line = INPUT.readline()\n if not line: break\n pos,obs = line.split(\"\\t\")\n pos,obs = int(pos),int(obs)\n try: positions[pos]=positions[pos]+obs\n except KeyError: positions[pos]=obs\n posKeys = positions.keys()\n posKeys.sort()\n for pos in posKeys:\n wigLine = \"%s\\t%d\\t%d\\t%d\" % (chr,int(pos),int(pos)+1,positions[pos])\n print >>OUTPUT, wigLine\n os.remove(seqs[chr][strand])\n return", "def processFiles(fileName):\n print fileName\n count_t1 = 0\n inFile=open(fileName,'r')\n all_angleList = Counter()\n rep_angleList = Counter()\n all_lengthsList = Counter()\n maxDist_List = Counter()\n global xCord, yCord, zCord\n aminoAcidName={}\n xCord={}\n yCord={}\n zCord={}\n seq_number={}\n counter=0\n for i in inFile:\n if (i[0:6].rstrip()==\"NUMMDL\"):\n numOfModels=i[10:14].rstrip()\n if ((i[0:6].rstrip()==\"ENDMDL\")or (i[0:6].rstrip()=='TER')):\n break\n if (i[0:6].rstrip()==\"MODEL\" and int(i[10:14].rstrip())>1):\n break\n \n if(i[0:4].rstrip())==\"ATOM\" and(i[13:15].rstrip())==\"CA\" and(i[16]=='A'or i[16]==' ')and i[17:20]!= \"UNK\" :\n aminoAcidName[counter]=int(aminoAcidLabel[i[17:20]])\n xCord[counter]=(float(i[30:38]))\n yCord[counter]=(float(i[38:46]))\n zCord[counter]=(float(i[46:54]))\n seq_number[counter]=str(i[22:27])\n counter+=1\n\n protLen=len(yCord)\n initialLabel=[]\n sortedLabel=[]\n sortedIndex=[]\n outDist={}\n for m in range(0,3):\n initialLabel.append(0)\n sortedLabel.append(0)\n sortedIndex.append(0)\n\n for i in range(0,protLen-2):\n for j in range(i+1,protLen-1):\n for k in range(j+1, protLen):\n global i1,j1,k1\n i1=i\n j1=j\n k1=k\n keepLabelIndex={}\n keepLabelIndex[aminoAcidName[i]]=i\n keepLabelIndex[aminoAcidName[j]]=j\n keepLabelIndex[aminoAcidName[k]]=k\n initialLabel[0]=aminoAcidName[i]\n initialLabel[1]=aminoAcidName[j]\n initialLabel[2]=aminoAcidName[k]\n sortedLabel=list(initialLabel)\n sortedLabel.sort(reverse=True)\n\n #Perform Rule- based labelling\n\n if (sortedLabel[0]==sortedLabel[1])and(sortedLabel[1]==sortedLabel[2]):\n dist1_2Temp=calcDist(i,j)\n dist1_3Temp=calcDist(i,k)\n dist2_3Temp=calcDist(j,k)\n if dist1_2Temp>=(max(dist1_2Temp,dist1_3Temp,dist2_3Temp)):\n indexOf0=i\n indexOf1=j\n indexOf2=k\n elif dist1_3Temp>=(max(dist1_2Temp,dist1_3Temp,dist2_3Temp)):\n indexOf0=i\n indexOf1=k\n indexOf2=j\n else:\n indexOf0=j\n indexOf1=k\n indexOf2=i\n elif(aminoAcidName[i]!=aminoAcidName[j])and(aminoAcidName[i]!=aminoAcidName[k]) and(aminoAcidName[j]!=aminoAcidName[k]): \n for index_ in range(0,3):\n sortedIndex[index_]=keepLabelIndex[sortedLabel[index_]]\n indexOf0=sortedIndex[0]\n indexOf1=sortedIndex[1]\n indexOf2=sortedIndex[2]\n elif(sortedLabel[0]==sortedLabel[1])and(sortedLabel[1]!=sortedLabel[2]):\n indexOf2=keepLabelIndex[sortedLabel[2]]\n indices=indexFind(indexOf2,i,j,k)\n a=indexOf2\n b=indices[0]\n c=indices[1]\n dist1_3Temp=calcDist(b,a)\n dist2_3Temp=calcDist(c,a)\n if dist1_3Temp>=dist2_3Temp:\n indexOf0=indices[0]\n indexOf1=indices[1] \n else:\n indexOf0=indices[1]\n indexOf1=indices[0]\n elif(sortedLabel[0]!=sortedLabel[1])and(sortedLabel[1]==sortedLabel[2]):\n indexOf0=keepLabelIndex[sortedLabel[0]]\n indices=indexFind(indexOf0,i,j,k)\n if calcDist(indexOf0,indices[0])>= calcDist(indexOf0,indices[1]):\n indexOf1=indices[0]\n indexOf2=indices[1] \n else:\n indexOf2=indices[0]\n indexOf1=indices[1]\n dist01=calcDist(indexOf0,indexOf1)\n s2=dist01/2\n dist02=calcDist(indexOf0,indexOf2)\n s1=dist02\n dist12=dist01\n dist03=calcDist(indexOf1,indexOf2)\n\n # All lengths calculation \n all_lengthsList[round(dist01,round_off_to)] += 1\n all_lengthsList[round(dist02,round_off_to)] += 1\n all_lengthsList[round(dist03,round_off_to)] += 1\n\n maxDist_List[round(max(dist01,dist02,dist03),round_off_to)] +=1\n\n s3=(((xCord[indexOf0]+xCord[indexOf1])/2-xCord[indexOf2])**2\n +((yCord[indexOf0]+yCord[indexOf1])/2-yCord[indexOf2])**2\n +((zCord[indexOf0]+zCord[indexOf1])/2-zCord[indexOf2])**2)**0.5\n \n \n Theta1=180*(math.acos((s1**2-s2**2-s3**2)/(2*s2*s3)))/3.14\n if Theta1<=90:\n all_angleList[round(Theta1,round_off_to)] +=1\n rep_angleList[round(Theta1,round_off_to)] +=1\n else:\n all_angleList[round(abs(180-Theta1),round_off_to)] +=1\n rep_angleList[round(abs(180-Theta1),round_off_to)] +=1\n \n #if Theta1>90: \n # Theta1=abs(180-Theta1)\n #print 'Second Theta1, ',Theta1\n #Theta 2\n dist02=calcDist(indexOf1,indexOf0)\n s1=dist02\n dist01=calcDist(indexOf1,indexOf2)\n s2=dist01/2\n s3=(((xCord[indexOf1]+xCord[indexOf2])/2-xCord[indexOf0])**2\n +((yCord[indexOf1]+yCord[indexOf2])/2-yCord[indexOf0])**2\n +((zCord[indexOf1]+zCord[indexOf2])/2-zCord[indexOf0])**2)**0.5\n \n Theta2=180*(math.acos((s1**2-s2**2-s3**2)/(2*s2*s3)))/3.14 \n #if Theta2 > 90:\n # Theta2 = abs(180-Theta2)\n if Theta2<=90:\n all_angleList[round(Theta2,round_off_to)] +=1\n else:\n all_angleList[round(abs(180-Theta2),round_off_to)] +=1\n\n #Theta 3\n dist02=calcDist(indexOf2,indexOf1)\n s1=dist02\n dist01=calcDist(indexOf2,indexOf0)\n s2=dist01/2\n s3=(((xCord[indexOf2]+xCord[indexOf0])/2-xCord[indexOf1])**2+\n ((yCord[indexOf2]+yCord[indexOf0])/2-yCord[indexOf1])**2+\n ((zCord[indexOf2]+zCord[indexOf0])/2-zCord[indexOf1])**2)**0.5\n \n Theta3=180*(math.acos((s1**2-s2**2-s3**2)/(2*s2*s3)))/3.14 \n #if Theta3 > 90:\n # Theta3 = abs(180-Theta3)\n if Theta3<=90:\n all_angleList[round(Theta3,round_off_to)] +=1\n else:\n all_angleList[round(abs(180-Theta3),round_off_to)] +=1\n # Either writting output to a file or using dictionary or \n # counter will save you from memory exceptions in this case.\n #all_angleList[round(Theta1,round_off_to)] +=1\n #all_angleList[round(Theta2,round_off_to)] +=1\n #all_angleList[round(Theta3,round_off_to)] +=1\n\n #rep_angleList[round(Theta1,round_off_to)] +=1\n\n count_t1 = count_t1+1\n\n print 'count_t1:',count_t1\n\n return [all_angleList,rep_angleList,all_lengthsList,maxDist_List]", "def writeMarkerGenes(self, location):\n try:\n writer = pd.ExcelWriter(location+\"marker_genes.xlsx\", engine=\"xlsxwriter\")\n for key in self.marker_genes:\n self.marker_genes.get(key).to_excel(writer, sheet_name=key)\n writer.save()\n except:\n print(\"Please run getMarkerGenes first to get marker genes. This step is needed to write them to excel.\")", "def makeSNPMap(snpfile, referencemap):\n\tbimfile = open(snpfile, \"r\") # open the input file\n\tmapfile = open(referencemap, \"r\")\n\toutfilename = re.sub(r'\\.bim', '.markerpos', snpfile)\n\tposfilename = re.sub(r'\\.bim', '.snp_locations', snpfile)\n\toutfile = open(outfilename, \"w\")\n\tposfile = open(posfilename, \"w\")\n\t# Initialize variables \n\tpreviousCM = 0\n\tpreviousPos = 0\n\ti=0\n\tbimline = bimfile.readline().strip().split() # Pos 1 is rsID, Pos 3 is location\n\tfor mapline in mapfile:\n\t\tif len(bimline) == 0:\n\t\t\tbreak\t\t\n\t\tif i==0:\n\t\t\ti+=1\n\t\t\tcontinue\n\t\tmapline = mapline.strip().split()\n\t\t# Three cases: 1. SNP pos gt map pos\n\t\twhile int(bimline[3]) < int(mapline[0]): # This means that the BIM file is behind the map file, so need to write output here with the interopolation\n\t\t# of the previous values\n\t\t\tdiffCM = float(mapline[2]) - float(previousCM)\n\t\t\tdiffpos = float(mapline[0]) - float(previousPos)\n\t\t\tmulti = (float(bimline[3]) - float(previousPos))/diffpos\n\t\t\tcmout = multi*diffCM + float(previousCM)\n\t\t\tif cmout < 0: # this should not happen so if it does dump data and quit\n\t\t\t\tprint i\n\t\t\t\tprint cmout\n\t\t\t\tprint diffCM\n\t\t\t\tprint diffpos\n\t\t\t\tprint previousCM\n\t\t\t\tprint previousPos\n\t\t\t\tprint bimline\n\t\t\t\tprint mapline\n\t\t\t\texit()\n\n\t\t\toutfile.write( str(cmout) +\"\\n\")\n\t\t\tposfile.write( str(bimline[3]) + \"\\t\" + str(cmout) + \"\\n\")\n\t\t\tbimline = bimfile.readline().strip().split()\n\t\t\tif len(bimline) == 0:\n\t\t\t\tbreak\t\t\n\t\tif len(bimline) ==0:\n\t\t\tbreak\n\t\tif bimline[3] == mapline[0]: # write out genetic position\n\t\t\toutfile.write( mapline[2]+ \"\\n\")\n\t\t\tposfile.write( str(bimline[3]) + \"\\t\" + mapline[2] + \"\\n\")\n\t\t\tbimline = bimfile.readline().strip().split()\n\t\n\t\t#if bimline[3] > mapline[0]: # read next line in the map file\n\t\t#\tpreviousCM = mapline[2]\n\t\t#\tpreviousPos = mapline[0]\n\t\t#\tcontinue\n\t\t# Hits this and continues if bimline is above mapline\n\t\tpreviousCM = mapline[2]\n\t\tpreviousPos = mapline[0]\n\t\ti += 1\n\toutfile.close()\n\treturn(outfile.name)", "def read_write_protein_files(dir_path, heme_files):\n for i in number_of_files:\n# seqs = {}\n input_files = (dir_path + heme_files[i])\n f = open(input_files)\n count = 0\n# output_file = (dir_path + heme_files[i] + \".txt\")\n# g = open(output_file, \"x\")\n with open(input_files) as f:\n for line in f:\n if line.startswith('>'):\n name = line[1:].rstrip('\\n')\n count = count + 1\n seqs =[]\n else: # sequence, not header\n seqs[name] = seqs[name] + line\n# sequences += line[:-1]\n# output_file = open(\"out_\" + str(count) + \"_.txt\", \"a\")\n# output_file.write(str(len(sequences)))\n print(\"Number of proteins read:\" + count)\n f.close", "def write_out_ITS_GFF(busco, prefix, out): # this is a long function\n\n try:\n busco_hits = parse_tab_outfile(busco)\n except:\n raise ValueError(\"something wrong with gff in file\")\n GFF_out = open(out, \"w\")\n \n ############################################################################\n EOG_counter = 0\n for i in busco_hits:\n if i.startswith(\"#\"): #allows line to have comment.\n continue\n if not i.strip():\n continue #if the last line is blank\n EOG_counter = EOG_counter+1\n EOG, scaff, start, stop = i.split(\"\\t\")\n # check start is less than stop. Fix if not. \n if int(start) > int(stop):\n temp_start = stop\n temp_stop = start\n start = temp_start\n stop = temp_stop\n if int(start) == 0:\n start = \"1\"\n line = \"%s\\t%s_Busco_gene\\t%d\\t%s\\t%s\\t.\\t+\\t.\\t%s\\n\" %(scaff,\\\n prefix,EOG_counter, start, stop, EOG)\n GFF_out.write(line)\n \n \n\n #close the write file\n GFF_out.close()", "def fillSongsArray():\r\n counter = 1\r\n notealt = 0.0\r\n frequenz = 0\r\n notencounter = 0\r\n\r\n file2write.write(\"\\n{\")\r\n for instrument in midi_data.instruments:\r\n while counter == 1:#first line of the instrument e.g piano it will only save the treble clef and NOT the bass clef\r\n for note in instrument.notes:\r\n if note.start - notealt >= 0.15: #If the note is a break it will save it as such\r\n value = dauer/((note.start - notealt)*1000)\r\n y = round(value)\r\n file2write.write(\"{0,\")\r\n file2write.write(str(y+1))\r\n file2write.write(\"},\")\r\n\r\n else:\r\n frequenz = int(pretty_midi.note_number_to_hz(note.pitch)) #convert the midi-note-number to a frequency with function of the library\r\n value = dauer/((note.end - note.start)*1000) #calculates the duration of the note\r\n x = round(value)\r\n file2write.write(\"{\")\r\n file2write.write(str(frequenz))\r\n file2write.write(\",\")\r\n file2write.write(str(x))\r\n file2write.write(\"},\")\r\n notealt = note.end\r\n counter += 1\r\n file2write.write(\"},\")\r\n #file2write.write(\"};\\n\")\r", "def add_from_file(writer):\n with open(\"to_test_db\", \"rb\") as reader:\n lines = reader.readlines()\n place = \"out\"\n codes = []\n code = []\n for i, line in enumerate(lines):\n if \"<--NEW CODE-->\" in line:\n if \"lang\" in place:\n codes += [(q_id, ans_id, \"\".join(code), lang)]\n place = \"q_id\"\n elif \"<--language\" in line:\n place = \"lang\"\n lang = line.split(\"<--language=\")[1].split(\"-->\")[0]\n elif \"q_id\" in place:\n q_id = line.split(\"<--question_id=\")[1].split(\"-->\")[0]\n place = \"a_id\"\n elif \"a_id\" in place:\n ans_id = line.split(\"<--ans_id=\")[1].split(\"-->\")[0]\n place = \"code\"\n code = []\n elif \"code\" in place:\n code += [line]\n codes += [(q_id, ans_id, \"\".join(code), lang)]\n for next_id, entry in enumerate(codes):\n (q_id, ans_id, code, lang) = entry\n print next_id, entry\n writer.add_document(question_id=return_unicode(int(q_id)), answer_id=return_unicode(int(ans_id)), code=return_unicode(code), language=return_unicode(lang),code_id=return_unicode(next_id))\n CR_DOCS_DB.insert({\"question_id\": return_unicode(int(q_id)), \"answer_id\": return_unicode(int(ans_id)), \"code\": return_unicode(code), \"language\": return_unicode(lang), \"code_id\": return_unicode(next_id)})\n return len(codes)", "def populate_code_list():\n\tletter_code_ST = \"JZIHGFEDCBA\"\n\tletter_code_FG = \"XWUTRQPNMLK\"\n\tfor pos in range(\n\t len(letter_code_ST)): #Interestingly, the values start from 0\n\t\tcode_ST.append(pos) # Number first\n\t\tcode_ST.append(letter_code_ST[pos])\n\tfor pos in range(len(letter_code_FG)):\n\t\tcode_FG.append(pos)\n\t\tcode_FG.append(letter_code_FG[pos])", "def make_iob(txt, ents, etypes):\r\n index = 0\r\n for i in ents:\r\n start = txt.index(i, index) #get the start of the entity\r\n tmp1, tmp2 = txt[:start], txt[start:]\r\n tmp1 += \" eeeeeeeeeeeeeeeeeeee \"\r\n txt = ' '.join([tmp1, tmp2])\r\n index = start + len(i) + len(\" eeeeeeeeeeeeeeeeeeee \")\r\n \r\n line_tokens = word_tokenize(txt)#tokenize the text\r\n \r\n #get the starting positions of the entities\r\n starts = []\r\n try: #in order to handle the last case where list.index doesnt finds anything\r\n while line_tokens.index(\"eeeeeeeeeeeeeeeeeeee\") > -1:\r\n tmp = line_tokens.index(\"eeeeeeeeeeeeeeeeeeee\")\r\n starts.append(tmp)\r\n del line_tokens[tmp]\r\n except ValueError:\r\n pass\r\n \r\n line_iob = ['O'] * len(line_tokens)# the iob tags of the whole text\r\n \r\n for i in range(0, len(ents)):\r\n #tokenize the entities\r\n entity_tokens = word_tokenize(ents[i])\r\n tmp = 'I-'+etypes[i]\r\n entity_iob = [tmp] * len(entity_tokens)\r\n entity_iob[0] = \"B-\" + etypes[i]\r\n \r\n #make changes to the iob tags to match the entities\r\n for j in range(0, len(entity_iob)):\r\n line_iob[starts[i] + j] = entity_iob[j]\r\n \r\n #the format is: token IOB-etypes\r\n for i in range(0, len(line_tokens)):\r\n output.write(\"{}\\t{}\\n\".format(line_tokens[i], line_iob[i]))\r\n output.write('\\n')#new document\r", "def write_output(output,fasta,CDR1_pos,CDR2_pos):\n # fasta file is the igblast input file\n with open(output, 'w') as f:\n header = \"\\t\".join(['Name', 'CDRL1_kabat_AA', 'CDRL2_kabat_AA'])\n f.write(header + '\\n')\n for record in SeqIO.parse(fasta, \"fasta\"):\n ID = str(record.id)\n seq = str(record.seq)\n CDR1_aa=''\n CDR2_aa = ''\n CDR1_index = CDR1_pos[ID]\n CDR2_index = CDR2_pos[ID]\n if CDR1_index != []:\n CDR1_start, CDR1_end = fix_aa_pos((int(CDR1_index[0]) - 1), int(CDR1_index[1]))\n CDR1_nuc = seq[CDR1_start:CDR1_end]\n CDR1_aa = translation(CDR1_nuc)\n if CDR2_index != []:\n CDR2_start, CDR2_end = fix_aa_pos((int(CDR2_index[0]) - 1), int(CDR2_index[1]))\n CDR2_nuc = seq[CDR2_start:CDR2_end]\n CDR2_aa = translation(CDR2_nuc)\n f.write(\"\\t\".join([ID, CDR1_aa, CDR2_aa]) + '\\n')", "def add_building_output_locations(self,dictionary, start,end,step): \n \"\"\"\n Given a dictionary of building footprints and associated nodes,element and sides, add the values \n to the netcdf grid file.\n \n The nodes, elements and sides associated with each footprint correspond to the there index in the RiCOM grid file\n \n Dictionary format:\n {id1: {'nodes': [n1, n2,...nn] }, {'elements': [e1,e2,...,en] },{'sides': [s1,s2,...,sn]}, id2: {}, id3 {}, ...., idn {} } \n \n idn = the id of the building footprint that the node, elements and sides belong to\n \n \"\"\"\n \n if (dictionary != {}):\n maxNodes = 0\n maxElements = 0\n maxSides = 0\n nodesAll = []\n elementsAll = []\n sidesAll = []\n id = []\n perimeter = []\n type = []\n for row in dictionary.iteritems(): \n id.append(row[0]) \n n = row[1]['nodes'] \n e = row[1]['elements']\n s = row[1]['sides']\n perimeter.append(row[1]['perimeter'])\n \n if row[1]['type'] == \"BUILDINGS_AS_HOLES\":\n typeNUM = 1\n elif row[1]['type'] == \"BUILDINGS_GRIDDED\":\n typeNUM = 2\n\n elif row[1]['type'] == \"BUILDINGS_AS_POINTS\":\n typeNUM = 3\n else:\n typeNUM = 0\n type.append(typeNUM)\n \n nodesAll.extend(n)\n elementsAll.extend(e)\n sidesAll.extend(s)\n if maxNodes < len(n): maxNodes = len(n)\n if maxElements < len(e): maxElements = len(e)\n if maxSides < len(s): maxSides = len(s)\n \n \n #remove repeated elements, sides and nodes\n nodesAll = list(set(nodesAll))\n elementsAll = list(set(elementsAll))\n sidesAll = list(set(sidesAll))\n \n print \"# elements = %s\" % len(elementsAll)\n print \"# sides = %s\" % len(sidesAll)\n print \"# nodes = %s\" % len(nodesAll)\n\n \n #initialise arrays for entry into netcdf file\n nodes = zeros((len(dictionary),maxNodes))\n elements = zeros((len(dictionary),maxElements))\n sides = zeros((len(dictionary),maxSides)) \n \n i = 0\n for row in dictionary.iteritems(): \n nodes[i,0:(len(row[1]['nodes']))] = row[1]['nodes']\n elements[i,0:(len(row[1]['elements']))] = row[1]['elements']\n sides[i,0:(len(row[1]['sides']))] = row[1]['sides']\n i+=1 \n \n #create dimensions\n try: self.buildings.createDimension('max_number_nodes',maxNodes)\n except Exception, e: print \"WARNING: %s\" % e\n try: self.buildings.createDimension('max_number_elements',maxElements)\n except Exception, e: print \"WARNING: %s\" % e\n try: self.buildings.createDimension('max_number_sides',maxSides)\n except Exception, e: print \"WARNING: %s\" % e\n try: self.buildings.createDimension('number_of_buildings',len(dictionary))\n except Exception, e: print \"WARNING: %s\" % e \n try: self.building_nodes.createDimension('number_of_nodes',len(nodesAll))\n except Exception, e: print \"WARNING: %s\" % e\n try: self.building_elements.createDimension('number_of_elements',len(elementsAll))\n except Exception, e: print \"WARNING: %s\" % e\n try: self.building_sides.createDimension('number_of_sides',len(sidesAll))\n except Exception, e: print \"WARNING: %s\" % e\n \n \n #create variables\n try: building_id = self.buildings.createVariable(varname = 'building_id',datatype = 'i', dimensions=('number_of_buildings',)) \n except Exception, e:\n building_id = self.buildings.variables['building_id']\n print \"WARNING: %s\" % e\n \n try: building_wkt = self.buildings.createVariable(varname = 'building_wkt',datatype = str, dimensions=('number_of_buildings',)) \n except Exception, e:\n building_wkt = self.buildings.variables['building_wkt'] \n print \"WARNING: %s\" % e\n\n try: building_perimeter = self.buildings.createVariable(varname = 'building_perimeter',datatype = 'd', dimensions=('number_of_buildings',)) \n except Exception, e:\n building_perimeter = self.buildings.variables['building_perimeter'] \n print \"WARNING: %s\" % e\n\n\n try: building_type = self.buildings.createVariable(varname = 'building_type',datatype = 'i', dimensions=('number_of_buildings',)) \n except Exception, e:\n building_type = self.buildings.variables['building_type'] \n print \"WARNING: %s\" % e\n\n try: building_nodes = self.buildings.createVariable(varname = 'building_nodes',datatype = 'i', dimensions=('number_of_buildings','max_number_nodes',)) \n except Exception, e:\n building_nodes = self.buildings.variables['building_nodes'] \n print \"WARNING: %s\" % e\n \n try: building_elements = self.buildings.createVariable(varname = 'building_elements',datatype = 'i', dimensions=('number_of_buildings','max_number_elements',)) \n except Exception, e:\n building_elements = self.buildings.variables['building_elements']\n print \"WARNING: %s\" % e\n \n try: building_sides = self.buildings.createVariable(varname = 'building_sides',datatype = 'i', dimensions=('number_of_buildings','max_number_sides',)) \n except Exception, e:\n building_sides = self.buildings.variables['building_sides']\n print \"WARNING: %s\" % e\n \n building_nodes[:] = nodes\n building_elements[:] = elements\n building_sides[:] = sides\n building_id[:] = array(id) \n building_perimeter[:] = array(perimeter)\n building_type[:] = array(type)\n #Set the attributes\n self.building_nodes.start = start\n self.building_nodes.finish = end\n self.building_nodes.step = step\n self.building_elements.start = start\n self.building_elements.finish = end\n self.building_elements.step = step\n self.building_sides.start = start\n self.building_sides.finish = end\n self.building_sides.step = step\n \n #assign the data\n output_ids = {'nodes': [], 'elements': [], 'sides': []}\n try: output_ids['nodes'] = self.building_nodes.createVariable(varname = 'id',datatype = 'i', dimensions=('number_of_nodes',))\n except Exception, e:\n output_ids['nodes'] = self.building_nodes.variables['id']\n print \"WARNING: %s\" % e\n try: output_ids['elements'] = self.building_elements.createVariable(varname = 'id',datatype = 'i', dimensions=('number_of_elements',))\n except Exception, e:\n output_ids['elements'] = self.building_elements.variables['id']\n print \"WARNING: %s\" % e\n try: output_ids['sides'] = self.building_sides.createVariable(varname = 'id',datatype = 'i', dimensions=('number_of_sides',))\n except Exception, e:\n output_ids['sides'] = self.building_sides.variables['id']\n print \"WARNING: %s\" % e\n \n \n output_ids['nodes'][:] = array(nodesAll)\n output_ids['elements'][:] = array(elementsAll)\n output_ids['sides'][:] = array(sidesAll)\n \n \n self.buildingsAdded = True\n else:\n #create dimensions\n try: self.buildings.createDimension('number_of_buildings',0)\n except Exception, e: print \"WARNING: %s\" % e \n try: self.building_nodes.createDimension('number_of_nodes',0)\n except Exception, e: print \"WARNING: %s\" % e\n try: self.building_elements.createDimension('number_of_elements',0)\n except Exception, e: print \"WARNING: %s\" % e\n try: self.building_sides.createDimension('number_of_sides',0)\n except Exception, e: print \"WARNING: %s\" % e \n self.buildingsAdded = True", "def _getSeqName(header):\n seqNameList = []\n unMapped=''\n for sn in header['sq']:\n if (sn[0].startswith('GL')) or (sn[0].startswith('chrUn')):\n unMapped += \" %s\" % sn[0]\n else:\n seqNameList.append(sn[0]) # first column is seqName\n\n if unMapped != '': \n seqNameList.append(unMapped)\n\n return seqNameList", "def get_ids_bcs_added_field(header,\r\n mapping_data,\r\n barcode_type=\"golay_12\",\r\n added_demultiplex_field=None):\r\n sample_id_ix = header.index(\"SampleID\")\r\n bc_ix = header.index(\"BarcodeSequence\")\r\n if added_demultiplex_field:\r\n added_demultiplex_ix = header.index(added_demultiplex_field)\r\n\r\n ids_bcs_added_field = {}\r\n\r\n for line in mapping_data:\r\n\r\n if barcode_type == 0:\r\n curr_bc = ''\r\n else:\r\n curr_bc = line[bc_ix]\r\n if added_demultiplex_field:\r\n curr_added_field = line[added_demultiplex_ix]\r\n else:\r\n curr_added_field = ''\r\n\r\n ids_bcs_added_field[(upper(curr_bc), curr_added_field)] =\\\r\n line[sample_id_ix]\r\n\r\n return ids_bcs_added_field", "def format_data_perm(outfile, zipcodes, encode, fitFile=\"win40.csv\", window=40, fold=1, rep=10):\n seed(2012)\n span = window/2 ## define half the window size\n pars_data = import_pars()\n genome = map2genome()\n output = open(outfile, 'w')\n\n probv, probs = 0, 0\n if \"ProbVS\" in encode:\n probv = read_table(fitFile, \"pbv\")\n probs = read_table(fitFile, \"pbs\")\n\n ## headers\n output.write(\"label,gene,pos\")\n if \"SeqIndex\" in encode:\n for j in xrange(-span, span):\n output.write(\",char%s\"%j)\n if \"SeqBinary\" in encode:\n for j in xrange(-span, span):\n output.write(\",A%s,U%s,C%s,G%s\"%(j,j,j,j))\n if \"SeqGC\" in encode:\n output.write(\",GC,AU\")\n if \"SeqDiNu\" in encode:\n for nu1 in ['A','U','C','G']:\n for nu2 in ['A','U','C','G']:\n output.write(\",%s%s\"%(nu1, nu2))\n if \"SeqRatio\" in encode:\n output.write(\",A,U,C,G\")\n if \"PredSS3\" in encode:\n for j in xrange(-span, span):\n output.write(\",SpL%s,SpR%s,SpU%s\"%(j,j,j))\n if \"PredSS2\" in encode:\n for j in xrange(-span, span):\n output.write(\",pP%s,pU%s\"%(j,j))\n if \"PARS\" in encode:\n for j in xrange(-span, span):\n output.write(\",pars%s\"%j)\n if \"PARS2\" in encode:\n for j in xrange(-span, span):\n output.write(\",pars2%s\"%j)\n if \"LogVS\" in encode:\n for j in xrange(-span, span):\n output.write(\",lV%s,lS%s\"%(j,j))\n if \"ProbVS\" in encode:\n for j in xrange(-span, span):\n output.write(\",pV%s,pS%s\"%(j,j))\n output.write(\"\\n\")\n\n data_size = 0\n for gene, zipcode, region in zipcodes:\n lens = get3length(genome[gene])\n pars_gene = pars_data[gene]\n seq = pars_gene[\"FOLD_SEQ\"]\n ss = pars_gene[\"FOLD_SS\"]\n lr = ss_to_lr(ss)\n prob_l = pars_gene[\"FOLD_PROB_L\"]\n prob_r = pars_gene[\"FOLD_PROB_R\"]\n score = pars_gene[\"PARS\"]\n v1 = pars_gene[\"V1\"]\n s1 = pars_gene[\"S1\"]\n\n split_name = gene\n if fold > 1:\n split_name = \"fold_%s\"%(choice(range(fold))+1)\n else:\n split_name = \"%s-%s\"%(gene,zipcode)\n\n pv, ps = 0, 0\n if \"ProbVS\" in encode:\n pv = [float(val) for val in probv[gene]]\n ps = [float(val) for val in probs[gene]]\n\n ## the index of region is begin with 1 and close on both end\n region_begin, region_end = [int(val)+lens[0] for val in region.split('~')]\n print gene, zipcode, region_begin, region_end, len(seq)==sum(lens)\n for i in xrange(region_begin-1, region_end):\n if i < span or i >= len(seq) - span:\n continue\n ## region [RL, i, RR); span [WL, i, WR)\n RL = region_begin - 1; RR = region_end\n WL = i - span; WR = i + span\n if RL <= i and i <= RR:\n label = 1\n else:\n label = -1\n ele_list = [label, split_name, i+1]\n\n ## permuate `rep' times to generate negative set\n neg_list = [[-1, split_name, i+1] for k in xrange(rep)]\n neg_idx = [sample(range(WL,WR),WR-WL) for k in xrange(rep)]\n\n if \"SeqIndex\" in encode:\n for j in xrange(WL, WR):\n ele_list.append(\"ACGU\".find(seq[j]) + 1) ## return index\n for k in xrange(rep):\n for j in neg_idx[k]:\n neg_list[k].append(\"ACGU\".find(seq[j]) + 1) ## return index\n if \"SeqBinary\" in encode:\n for j in xrange(WL, WR):\n ele_list.extend([int(seq[j]=='A'), int(seq[j]=='U'), int(seq[j]=='C'), int(seq[j]=='G')])\n for k in xrange(rep):\n for j in neg_idx[k]:\n neg_list[k].extend([int(seq[j]=='A'), int(seq[j]=='U'), int(seq[j]=='C'), int(seq[j]=='G')])\n if \"SeqGC\" in encode:\n ele_list.append((seq.count('G',WL,WR)+seq.count('C',WL,WR))/float(window))\n ele_list.append((seq.count('A',WL,WR)+seq.count('U',WL,WR))/float(window))\n if \"SeqDiNu\" in encode:\n for nu1 in ['A','U','C','G']:\n for nu2 in ['A','U','C','G']:\n ele_list.append(sum([int(seq[i]==nu1 and seq[i+1]==nu2)\n for i in xrange(WL,WR-1)])/float(window-1))\n if \"SeqRatio\" in encode:\n for nu in ['A','U','C','G']:\n ele_list.append(seq.count(nu,WL,WR)/float(window))\n for k in xrange(rep):\n neg_list[k].extend(ele_list[-4:])\n if \"PredSS3\" in encode:\n for j in xrange(WL, WR):\n ele_list.extend([prob_l[j], prob_r[j], (1-prob_l[j]-prob_r[j])])\n for k in xrange(rep):\n for j in neg_idx[k]:\n neg_list[k].extend([prob_l[j], prob_r[j], (1-prob_l[j]-prob_r[j])])\n if \"PredSS2\" in encode:\n for j in xrange(WL, WR):\n ele_list.extend([int(ss[j]!='.'), int(ss[j]=='.')])\n for k in xrange(rep):\n for j in neg_idx[k]:\n neg_list[k].extend([int(ss[j]!='.'), int(ss[j]=='.')])\n if \"PARS\" in encode:\n for j in xrange(WL, WR):\n ele_list.append((score[j]+7)/14.0) ## normalize\n for k in xrange(rep):\n for j in neg_idx[k]:\n neg_list[k].append((score[j]+7)/14.0) ## normalize\n if \"PARS2\" in encode:\n for j in xrange(WL, WR):\n ele_list.append((score[j])**2/49.0) ## normalize\n for k in xrange(rep):\n for j in neg_idx[k]:\n neg_list[k].append((score[j])**2/49.0) ## normalize\n if \"LogVS\" in encode:\n for j in xrange(WL, WR):\n ele_list.extend([math.log(v1[j]+1,2), math.log(s1[j]+1,2)])\n for k in xrange(rep):\n for j in neg_idx[k]:\n neg_list[k].extend([math.log(v1[j]+1,2), math.log(s1[j]+1,2)])\n if \"ProbVS\" in encode:\n for j in xrange(WL, WR):\n ele_list.extend([pv[j], ps[j]])\n for k in xrange(rep):\n for j in neg_idx[k]:\n neg_list[k].extend([pv[j], ps[j]])\n output.write(\",\".join([str(ele) for ele in ele_list])+\"\\n\")\n for k in xrange(rep):\n output.write(\",\".join([str(ele) for ele in neg_list[k]])+\"\\n\")\n data_size += 2\n output.close()\n return data_size", "def order_ideal(self, gens):", "def print_rep_seqs(mapping, seqs, out_fp):\r\n out_fh = open(out_fp + \"/prefix_dereplicated.fasta\", \"w\")\r\n for s in (get_representatives(mapping, seqs.iteritems())):\r\n out_fh.write(s.to_fasta())\r\n out_fh.close()", "def gen_ep_data(self,ntrials,trlen):\n ## instruction\n # for each trial, generate random instruction encoding sequence\n i_encoding_input = np.array([\n np.random.permutation(np.arange(1,self.nmaps+1)) \n for i in range(ntrials)\n ])\n i_test_input = np.zeros([ntrials,trlen])\n i_input = np.concatenate([\n i_encoding_input,i_test_input],\n 1).astype(int).reshape(-1) # (ntrials,trlen+)\n ## stimulus\n x_encoding_input = i_encoding_input\n x_test_input = np.random.randint(1,self.nmaps+1,[ntrials,trlen])\n x_input = np.concatenate([i_encoding_input,x_test_input],1)\n ''' \n embed x_input: \n [ntrials,nmaps+trlen] -> s_input [ntrials*(nmaps+trlen),edim]\n explicit loop required for flatten and embedd x_input\n because if switchmaps=1, matrix is resorted between trials\n and therefore same stimulus token integers correspond to\n different stimulus embeddings on different trials\n '''\n s_input = -np.ones([ntrials,(self.nmaps+trlen),self.stimdim])\n for trialn,x_input_trial in enumerate(x_input): \n if self.switchmaps: self.resort_emat()\n s_input[trialn] = self.emat[x_input_trial]\n \n # format output\n i_input = tr.unsqueeze(tr.LongTensor(i_input),1)\n s_input = tr.unsqueeze(tr.Tensor(np.concatenate(s_input)),1)\n yseq = tr.unsqueeze(tr.LongTensor(x_input.reshape(-1)),1)\n if return_trial_flag:\n tr_flag = np.concatenate([i*np.ones(self.nmaps+trlen) for i in range(ntrials)])\n tr_flag = tr.unsqueeze(tr.LongTensor(tr_flag),1)\n return tr_flag,i_input,s_input,yseq,\n else:\n return i_input,s_input,yseq", "def identify_expressed_gRNA_families(gRNAs, mRNAs, init_seq_len):\n gRNA_families = {'family_no':[], 'family_end':[], 'family_id':[]}\n strand_name = {'coding':'', 'template':'t'}\n index = []\n\n gRNAs['gene_mRNA_end'] = gRNAs['mRNA_end']+gRNAs['rel_pos'].apply(lambda x: 0 if x is pd.NA else x)\n gRNAs['gene_mRNA_end'] = gRNAs['gene_mRNA_end'].astype('Int32')\n gRNAs['tmp'] = gRNAs.apply(lambda x: x['cassette_label']+strand_name[x['strand']], axis=1)\n\n for mRNA_name, mRNA in sorted(mRNAs.items()):\n # get all gRNAs with an init_pos for this mRNA\n # nonexpressed gRNAs can be in an editing group if they have a init_seq. this is because\n # they have transcripts in the init_position but not enough to be called expressed\n # gRNAs without an init_seq have no transcripts within the initiation site\n # these are added to a group below\n mask1 = gRNAs['mRNA_name'] == mRNA_name\n mask2 = gRNAs['init_seq'].notnull()\n g = gRNAs[mask1 & mask2]\n\n # positions where the start of expressed gRNAs align to mRNA\n a = np.zeros(mRNA['length']+100)\n i = np.array(g['gene_mRNA_end']-1, dtype=int)\n for ii in range(init_seq_len):\n a[i-ii] = 1\n a = ''.join([str(int(i)) for i in a])\n g_end = 'gene_mRNA_end'\n\n tmp_g = []\n family_no = 0\n\n # find regions where groups of gRNAs anchor to mRNA starting from 3' end of edited mRNA\n for m in re.finditer('1+', a):\n s, e = m.start(0), m.end(0)\n # get all gRNAs that anchor at this region\n anchor_group = g[(g[g_end] >= s) & (g[g_end] <= e)]\n\n if len(anchor_group) == 0:\n continue\n\n # for each cassette position of these gRNAs create a dictionary of cassette position and editing position\n cas_pos = {}\n for _, gRNA in anchor_group.iterrows():\n pos = gRNA['tmp']\n if pos not in cas_pos:\n cas_pos[pos] = gRNA[g_end]\n cas_pos[pos] = max(gRNA[g_end], cas_pos[pos])\n\n # group gRNAs with the same cassette position ordered by editing position\n for pos, end in sorted(cas_pos.items(), key=lambda kv: kv[1]):\n group = anchor_group.query('tmp == @pos')\n index.extend(group.index.values)\n gRNA_families['family_no'].extend([family_no]*len(group))\n gRNA_families['family_end'].extend([end]*len(group))\n gRNA_families['family_id'].extend([f'{mRNA_name}-{pos}-{int(end)}']*len(group))\n tmp_g.append((family_no, end, f'{mRNA_name}-{pos}-{int(end)}'))\n family_no += 1\n\n # gRNAs without an init_seq\n mask2 = gRNAs['init_seq'].isnull()\n unknown = gRNAs[mask1 & mask2]\n # for each unknown gRNA\n for idx, gRNA in unknown.iterrows():\n # search for a group that ends just after mRNA_end of this unknown gRMA\n for f_no, gene_mRNA_end, family_id in sorted(tmp_g, key=itemgetter(1)):\n [g_mRNA_name, g_pos, g_end] = family_id.split('-')\n if g_mRNA_name == mRNA_name and gRNA['mRNA_end']-1 <= gene_mRNA_end and gRNA['cassette_label'] == g_pos:\n index.append(idx)\n gRNA_families['family_no'].append(f_no)\n gRNA_families['family_end'].append(gene_mRNA_end)\n gRNA_families['family_id'].append(f'{family_id}')\n break\n else:\n # no suitable gRNA found, so make a unique family for this non-expressed gRNA \n index.append(idx)\n gRNA_families['family_no'].append(family_no)\n gRNA_families['family_end'].append(gRNA['mRNA_end'])\n gRNA_families['family_id'].append(f'{mRNA_name}-{gRNA[\"cassette_label\"]}-{gRNA[\"mRNA_end\"]}')\n family_no += 1\n\n gRNAs = gRNAs.drop(['tmp'], axis=1)\n gRNAs = gRNAs.join(pd.DataFrame(gRNA_families, index=index))\n gRNAs['family_no'] = gRNAs['family_no'].astype('Int64')\n gRNAs['family_end'] = gRNAs['family_end'].astype('Int64')\n return gRNAs", "def test_ISM_suffix(self):\n conn, cursor = get_db_cursor()\n build = \"toy_build\"\n database = \"scratch/toy.db\"\n run_info = talon.init_run_info(database, build)\n talon.get_counters(database)\n\n edge_dict = init_refs.make_edge_dict(cursor)\n location_dict = init_refs.make_location_dict(build, cursor)\n transcript_dict = init_refs.make_transcript_dict(cursor, build)\n gene_starts = init_refs.make_gene_start_or_end_dict(cursor, build, \"start\")\n gene_ends = init_refs.make_gene_start_or_end_dict(cursor, build, \"end\")\n\n chrom = \"chr1\"\n strand = \"+\"\n positions = [ 500, 600, 900, 1000 ]\n edge_IDs = [4]\n vertex_IDs = [4, 5]\n v_novelty = [0, 0]\n\n all_matches = talon.search_for_ISM(edge_IDs, transcript_dict)\n gene_ID, transcript_ID, novelty, start_end_info = talon.process_ISM(chrom, \n positions, \n strand, edge_IDs,\n vertex_IDs, \n all_matches, \n transcript_dict,\n gene_starts, gene_ends, \n edge_dict, location_dict, \n run_info)\n\n correct_gene_ID = fetch_correct_ID(\"TG1\", \"gene\", cursor) \n\n assert gene_ID == correct_gene_ID\n assert start_end_info[\"vertex_IDs\"] == [3, 4, 5, 6]\n assert start_end_info[\"edge_IDs\"] == [3, 4, 5]\n assert start_end_info[\"start_novelty\"] == 0 # because the exon is known\n assert start_end_info[\"end_novelty\"] == 0\n assert transcript_dict[frozenset(start_end_info[\"edge_IDs\"])] != None\n conn.close()", "def write_exons(input_file, gtf_chrom_dict):\n\n dotexon = open(input_file + '.exon', 'w')\n if input_file[0:3] == 'ref':\n interval_best_matches = MatchingDicts.ref_best_matches\n dotexon.write(\"ExonID\\tChromosome\\tReference(Coordinates[strand]|Transcript[exon_number])\\tMatch_Type\\t\" +\n \"Query(Best_Match_Coordinates|Transcript[exon_number])\\tShared\\tBase_Difference\\tNotes\\n\")\n else:\n interval_best_matches = MatchingDicts.interval_best_matches\n dotexon.write(\"ExonID\\tChromosome\\tQuery(Coordinates[strand]|Transcript[exon_number])\\tMatch_Type\\t\" +\n \"Reference(Best_Match_Coordinates|Transcript[exon_number])\\tShared\\tBase_Difference\\tNotes\\n\")\n gtf_exons = {}\n for chrom in gtf_chrom_dict:\n for strand in gtf_chrom_dict[chrom]:\n len_after = len(gtf_exons) + len(gtf_chrom_dict[chrom][strand][1])\n gtf_exonc = gtf_exons.copy()\n gtf_exons.update(gtf_chrom_dict[chrom][strand][1])\n if len(gtf_exons) < len_after:\n print(\"Dictionary was OVERRITTEN\");\n ids = [(keyid, valid.id, valid.chrom, valid.strand, valid.begin, valid.end, \"next\", gtf_exonc[keyid].id, gtf_exonc[keyid].chrom, gtf_exonc[keyid].strand, gtf_exonc[keyid].begin, gtf_exonc[keyid].end) for keyid, valid in gtf_chrom_dict[chrom][strand][1].items() if keyid in gtf_exonc]\n print(ids)\n exit()\n for exon_id in sorted(gtf_exons):\n exon = gtf_exons[exon_id]\n cinter = Interval(exon.begin, exon.end, exon.gtf_interval)\n bests = interval_best_matches.get(cinter, None)\n # If a match (best match) was found write each match in .exon file\n if bests:\n for bintr, bval in bests.items():\n dotexon.write('{}\\t{}\\t{}-{}[{}]|{}[{}]\\t{}\\t{}-{}[{}]|{}\\t{}\\t({},{})\\t({})\\n'.format(\n exon_id, exon.chrom, cinter.begin, cinter.end - 1, cinter.data.strand, exon.transcript_id,\n cinter.data.transcriptIds[exon.transcript_id], bval[1], bintr.begin, bintr.end - 1,\n bintr.data.strand, '|'.join(['{}[{}]'.format(k, v) for k, v in bintr.data.transcriptIds.items()]),\n bval[0], bintr.begin - cinter.begin, cinter.end - bintr.end, NOTES[cinter.data.note]\n ))\n else:\n dotexon.write('{}\\t{}\\t{}-{}[{}]|{}[{}]\\tNovel\\t-\\t-\\t-\\t-\\n'.format(\n exon_id, exon.chrom, cinter.begin, cinter.end - 1, cinter.data.strand, exon.transcript_id,\n cinter.data.transcriptIds[exon.transcript_id]\n ))\n dotexon.close()" ]
[ "0.5696215", "0.5680898", "0.5666527", "0.55099", "0.54899204", "0.5471716", "0.5390018", "0.53788966", "0.53691655", "0.5336136", "0.533134", "0.5327106", "0.5315752", "0.5311932", "0.52991354", "0.5275869", "0.52664924", "0.5263865", "0.5257725", "0.5240358", "0.52117866", "0.52117014", "0.5200846", "0.51969147", "0.5186463", "0.51788986", "0.5177499", "0.51739126", "0.5158566", "0.51377827", "0.5119651", "0.5119612", "0.51143736", "0.5114319", "0.50892824", "0.5084778", "0.5078221", "0.50691956", "0.5061533", "0.50601196", "0.50505966", "0.5041351", "0.5039474", "0.50293976", "0.5025712", "0.5012891", "0.5008466", "0.50065356", "0.4999278", "0.49864262", "0.4975078", "0.49705136", "0.49677595", "0.4966175", "0.4962623", "0.49617845", "0.49617156", "0.49613082", "0.49611077", "0.4958771", "0.49467802", "0.49419504", "0.49360064", "0.4928192", "0.4927958", "0.4922701", "0.49196377", "0.4916733", "0.49088278", "0.48984554", "0.48869985", "0.488622", "0.48790067", "0.48774683", "0.48767424", "0.4870542", "0.4869029", "0.4853134", "0.48511752", "0.4851019", "0.4847011", "0.4840794", "0.48381582", "0.48370194", "0.48318142", "0.4830319", "0.48280558", "0.4826662", "0.4823158", "0.48191258", "0.48154023", "0.4814801", "0.480988", "0.48079246", "0.48047358", "0.48023865", "0.4801876", "0.48015502", "0.47990912", "0.47990906", "0.47923726" ]
0.0
-1
individual data must be appended in same order as genos are being added
def joinInds(r1,r2,outfname): outf = open(outfname,'w') f1 = file(r1,'r') f2 = file(r2,'r') for row1 in f1: outf.write('%s\n' % (row1.strip())) for row1 in f2: outf.write('%s\n' % (row1.strip())) outf.close()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def apilar(self,dato):\r\n\t\tself.elementos.append(dato)\r\n\t\tself.len += 1", "def concatenate_data():", "def append(self, batch: Batch):", "def append(self, data):\n if self._expand_mode:\n new_keys = set(data.keys()) - self.keys & self._keys\n self._expand(new_keys)\n self._cubas = None\n self._append(data)\n self._number_of_items += 1", "def _genotype_updated(self):\n if self.data.get(\"GT\", None) is None:\n self.gt_alleles = None\n self.called = None\n self.ploidy = None\n else:\n self.gt_alleles = []\n for allele in ALLELE_DELIM.split(str(self.data[\"GT\"])):\n if allele == \".\":\n self.gt_alleles.append(None)\n else:\n self.gt_alleles.append(int(allele))\n self.called = all([al is not None for al in self.gt_alleles])\n self.ploidy = len(self.gt_alleles)", "def append(self, data):\n self.data_list.append(data)", "def _add_data_to_model(self, qinfos):\n if len(qinfos) == 0:\n return\n new_points = np.empty((0, self.domain_dim))\n new_vals = np.empty(0)\n for i in range(len(qinfos)):\n new_points = np.concatenate((new_points,\n qinfos[i].point.reshape(-1, self.domain_dim)), axis=0)\n new_vals = np.append(new_vals, [qinfos[i].val], axis=0)\n if self.gp is not None:\n self.gp.add_data(new_points, new_vals)", "def store_in_bag(self, data):\n # timestamp is (s, nanos): data[\"ts\"], data[\"tnanos\"]\n\n self.bag.add(data)\n\n # Ensure that all data have the same timestamp and are not None\n # Also there can't be more than a sample per second.\n if self.bag.is_full():\n if random() > 0.99999:\n print(\"Telemetry data: \", data[\"topic\"])\n print(\"Bag data: \", self.bag.print_data())\n\n # Then flush the data to process it and empty the bag\n data = self.bag.get_data()\n self.on_full(data)", "def push(self, data: Dict[str, np.ndarray]) -> None:\n for key, value in data.items():\n self.data[key].extend(value)\n\n if self._keys is None:\n self._keys = list(self.data.keys())", "def handle_data(self, data):\r\n self.fed.append(data)", "def insert_data(self):\n\n pass", "def _add_transform_genes(self):\n self._alleles.add(pu.make_int_gene(1, 1, 10, 1)) # 'AR' backshift (p)\n self._alleles.add(pu.make_choice_gene(1, [0, 1, 2])) # 'I' backshift (d) \n self._alleles.add(pu.make_choice_gene(1, [1, 2, 3])) # 'MA' backshift (q)\n self._loci_list += ['AR_order', 'I_order', 'MA_order']", "def add_gene_ids(self, genes_list):\n orig_num_genes = len(self.genes)\n\n for g in list(set(genes_list)):\n if not self.genes.has_id(g):\n new_gene = GenePro(id=g, pdb_file_type=self.pdb_file_type, root_dir=self.genes_dir)\n if self.model:\n self.model.genes.append(new_gene)\n else:\n self.genes.append(new_gene)\n\n log.info('Added {} genes to GEM-PRO project'.format(len(self.genes)-orig_num_genes))", "def new_sample(self):\n\n self.u_seq.append([])\n self.r_exp.append(0)", "def __treeview_append(self, newbuffer):\n\t\tnew_iter = None\r\n\t\tfor data in newbuffer:\r\n\t\t\tdev = self.dev_dict.get(data[1], self.dev_dict.get(data[2], \"?\"))\t# try to get device/adapter\r\n\t\t\turl = data[0].replace('\\\\','')\t\t\t\t\t\t# remove Backslashes from link/url (filter 1)\n\t\t\turlinfo = urlparse.urlparse(url)\t\t\t\t\t# get protocol\n\t\t\tproto, port = urlinfo.scheme, urlinfo.port\t\t\t\t# \n\t\t\tdata = (self.capture_index, url, proto, dev)\t\t\t\t# create enhanced data\n\t\t\tself.capture_index += 1\t\t\t\t\t\t\t# increase capture index\n\t\t\tdup = (self.capture_last == data[1:])\t\t\t\t\t# is this a duplicate of last entry?\n\t\t\tself.capture_last = data[1:]\t\t\t\t\t\t# store this entry for next duplicate check\n\t\t\tif self.settings[\"del_dups\"] and dup: continue\t\t\t\t# if option set; skip adding of duplicates\n\t\t\tnew_iter = self.model1.append( (data,'#888888',dup,False,) + data[0:] )\t# add data (hidden + columns) as new row", "def _add_transform_genes(self):\n self._alleles.add(pu.make_int_gene(1, 1, 10, 1)) # 'AR' backshift (p)\n self._alleles.add(pu.make_choice_gene(1, [0, 1, 2])) # 'I' backshift (d) \n self._alleles.add(pu.make_choice_gene(1, [1, 2, 3])) # 'MA' backshift (q)\n self._alleles.add(pu.make_int_gene(1, 1, 10, 1)) # Seasonal 'AR' backshift (p)\n self._alleles.add(pu.make_choice_gene(1, [0, 1, 2])) # Seasonal 'I' backshift (d) \n self._alleles.add(pu.make_choice_gene(1, [1, 2, 3])) # Seasonal 'MA' backshift (q)\n self._loci_list += ['AR_order', 'I_order', 'MA_order',\n 'ssn_AR_order', 'ssn_I_order', 'ssn_MA_order']", "def __iadd__(self, population):\n self.chromosome_list += (to_chromosome(chromosome) for chromosome in population)", "def _save_data(self):\n super()._save_data()\n if self.data:\n self.state['inserted_elements'] = len(SeaLevelRiseMeasure.objects.bulk_create(self.data))\n self.logger.info('Successfully saved %d elements.' % self.state['inserted_elements'])\n else:\n self.logger.info('No elements were saved because no elements were available.')\n self.data = None", "def mutate_append(self, gene):\n gene.chromosome.append(self.tactics.mutate_select())", "def addData(self,data):\n\t\tif isinstance(data,list):\n\t\t\tif isinstance(data[0],dict):\n\t\t\t\tself.data.extend(data)\n\t\t\telif isinstance(data[0],list):\t\n\t\t\t\tfor r in data:\n\t\t\t\t\tacc= dict()\n\t\t\t\t\tfor h in self.header:\n\t\t\t\t\t\tacc[h]=r[self.header.index(h)]\t\n\t\t\t\t\tself.data.append(acc) \n\t\t\telse:\n\t\t\t\tself.data.append(dict(zip(self.header,data)))\n\t\telif isinstance(data,dict):\n\t\t\tself.data.append(data)\n\t\telse:\n\t\t\traise datatools.WrongTypeError(data)", "def insert(self, data):\r\n pass", "def gstruc_add(tstr):\n\n # data: large list of data, one element per position (with padding)\n # count: the number of elements in DATA we're using currently, also\n # the index of the next one to start with.\n # x/y: the x/y position for the gaussians\n\n global BTRACK, GSTRUC, NPIX\n\n if type(tstr) is list:\n print('list input')\n import pdb; pdb.set_trace()\n \n # Add new elements\n if len(tstr)+GSTRUC['count'] > len(GSTRUC['x']):\n print('Adding more elements to GSTRUC')\n for n in ['x','y']:\n GSTRUC[n] = np.hstack((GSTRUC[n],np.zeros(100000,int)-1))\n # Stuff in the new data\n count = GSTRUC['count']\n GSTRUC['data'].append(tstr)\n GSTRUC['x'][count] = tstr['x']\n GSTRUC['y'][count] = tstr['y']\n GSTRUC['ngauss'] += len(tstr['par'])//3\n GSTRUC['count'] += 1\n \n if type(GSTRUC['data'][-1]) is list:\n print('problem')\n import pdb; pdb.set_trace()", "def _add_population(self, info, index, population):\n info[index] = [self._representation.decode(item) for\n item in population]", "def append(self, data):\n # Check to see if main_list is full\n if self.num_elements == len(self.main_list):\n # Increase size of main_list\n self._expand_main_list()\n\n # Add element to mains_list\n self.main_list[self.num_elements] = data\n\n # Increment num elements counter\n self.num_elements += 1", "def add_building_output_locations(self,dictionary, start,end,step): \n \"\"\"\n Given a dictionary of building footprints and associated nodes,element and sides, add the values \n to the netcdf grid file.\n \n The nodes, elements and sides associated with each footprint correspond to the there index in the RiCOM grid file\n \n Dictionary format:\n {id1: {'nodes': [n1, n2,...nn] }, {'elements': [e1,e2,...,en] },{'sides': [s1,s2,...,sn]}, id2: {}, id3 {}, ...., idn {} } \n \n idn = the id of the building footprint that the node, elements and sides belong to\n \n \"\"\"\n \n if (dictionary != {}):\n maxNodes = 0\n maxElements = 0\n maxSides = 0\n nodesAll = []\n elementsAll = []\n sidesAll = []\n id = []\n perimeter = []\n type = []\n for row in dictionary.iteritems(): \n id.append(row[0]) \n n = row[1]['nodes'] \n e = row[1]['elements']\n s = row[1]['sides']\n perimeter.append(row[1]['perimeter'])\n \n if row[1]['type'] == \"BUILDINGS_AS_HOLES\":\n typeNUM = 1\n elif row[1]['type'] == \"BUILDINGS_GRIDDED\":\n typeNUM = 2\n\n elif row[1]['type'] == \"BUILDINGS_AS_POINTS\":\n typeNUM = 3\n else:\n typeNUM = 0\n type.append(typeNUM)\n \n nodesAll.extend(n)\n elementsAll.extend(e)\n sidesAll.extend(s)\n if maxNodes < len(n): maxNodes = len(n)\n if maxElements < len(e): maxElements = len(e)\n if maxSides < len(s): maxSides = len(s)\n \n \n #remove repeated elements, sides and nodes\n nodesAll = list(set(nodesAll))\n elementsAll = list(set(elementsAll))\n sidesAll = list(set(sidesAll))\n \n print \"# elements = %s\" % len(elementsAll)\n print \"# sides = %s\" % len(sidesAll)\n print \"# nodes = %s\" % len(nodesAll)\n\n \n #initialise arrays for entry into netcdf file\n nodes = zeros((len(dictionary),maxNodes))\n elements = zeros((len(dictionary),maxElements))\n sides = zeros((len(dictionary),maxSides)) \n \n i = 0\n for row in dictionary.iteritems(): \n nodes[i,0:(len(row[1]['nodes']))] = row[1]['nodes']\n elements[i,0:(len(row[1]['elements']))] = row[1]['elements']\n sides[i,0:(len(row[1]['sides']))] = row[1]['sides']\n i+=1 \n \n #create dimensions\n try: self.buildings.createDimension('max_number_nodes',maxNodes)\n except Exception, e: print \"WARNING: %s\" % e\n try: self.buildings.createDimension('max_number_elements',maxElements)\n except Exception, e: print \"WARNING: %s\" % e\n try: self.buildings.createDimension('max_number_sides',maxSides)\n except Exception, e: print \"WARNING: %s\" % e\n try: self.buildings.createDimension('number_of_buildings',len(dictionary))\n except Exception, e: print \"WARNING: %s\" % e \n try: self.building_nodes.createDimension('number_of_nodes',len(nodesAll))\n except Exception, e: print \"WARNING: %s\" % e\n try: self.building_elements.createDimension('number_of_elements',len(elementsAll))\n except Exception, e: print \"WARNING: %s\" % e\n try: self.building_sides.createDimension('number_of_sides',len(sidesAll))\n except Exception, e: print \"WARNING: %s\" % e\n \n \n #create variables\n try: building_id = self.buildings.createVariable(varname = 'building_id',datatype = 'i', dimensions=('number_of_buildings',)) \n except Exception, e:\n building_id = self.buildings.variables['building_id']\n print \"WARNING: %s\" % e\n \n try: building_wkt = self.buildings.createVariable(varname = 'building_wkt',datatype = str, dimensions=('number_of_buildings',)) \n except Exception, e:\n building_wkt = self.buildings.variables['building_wkt'] \n print \"WARNING: %s\" % e\n\n try: building_perimeter = self.buildings.createVariable(varname = 'building_perimeter',datatype = 'd', dimensions=('number_of_buildings',)) \n except Exception, e:\n building_perimeter = self.buildings.variables['building_perimeter'] \n print \"WARNING: %s\" % e\n\n\n try: building_type = self.buildings.createVariable(varname = 'building_type',datatype = 'i', dimensions=('number_of_buildings',)) \n except Exception, e:\n building_type = self.buildings.variables['building_type'] \n print \"WARNING: %s\" % e\n\n try: building_nodes = self.buildings.createVariable(varname = 'building_nodes',datatype = 'i', dimensions=('number_of_buildings','max_number_nodes',)) \n except Exception, e:\n building_nodes = self.buildings.variables['building_nodes'] \n print \"WARNING: %s\" % e\n \n try: building_elements = self.buildings.createVariable(varname = 'building_elements',datatype = 'i', dimensions=('number_of_buildings','max_number_elements',)) \n except Exception, e:\n building_elements = self.buildings.variables['building_elements']\n print \"WARNING: %s\" % e\n \n try: building_sides = self.buildings.createVariable(varname = 'building_sides',datatype = 'i', dimensions=('number_of_buildings','max_number_sides',)) \n except Exception, e:\n building_sides = self.buildings.variables['building_sides']\n print \"WARNING: %s\" % e\n \n building_nodes[:] = nodes\n building_elements[:] = elements\n building_sides[:] = sides\n building_id[:] = array(id) \n building_perimeter[:] = array(perimeter)\n building_type[:] = array(type)\n #Set the attributes\n self.building_nodes.start = start\n self.building_nodes.finish = end\n self.building_nodes.step = step\n self.building_elements.start = start\n self.building_elements.finish = end\n self.building_elements.step = step\n self.building_sides.start = start\n self.building_sides.finish = end\n self.building_sides.step = step\n \n #assign the data\n output_ids = {'nodes': [], 'elements': [], 'sides': []}\n try: output_ids['nodes'] = self.building_nodes.createVariable(varname = 'id',datatype = 'i', dimensions=('number_of_nodes',))\n except Exception, e:\n output_ids['nodes'] = self.building_nodes.variables['id']\n print \"WARNING: %s\" % e\n try: output_ids['elements'] = self.building_elements.createVariable(varname = 'id',datatype = 'i', dimensions=('number_of_elements',))\n except Exception, e:\n output_ids['elements'] = self.building_elements.variables['id']\n print \"WARNING: %s\" % e\n try: output_ids['sides'] = self.building_sides.createVariable(varname = 'id',datatype = 'i', dimensions=('number_of_sides',))\n except Exception, e:\n output_ids['sides'] = self.building_sides.variables['id']\n print \"WARNING: %s\" % e\n \n \n output_ids['nodes'][:] = array(nodesAll)\n output_ids['elements'][:] = array(elementsAll)\n output_ids['sides'][:] = array(sidesAll)\n \n \n self.buildingsAdded = True\n else:\n #create dimensions\n try: self.buildings.createDimension('number_of_buildings',0)\n except Exception, e: print \"WARNING: %s\" % e \n try: self.building_nodes.createDimension('number_of_nodes',0)\n except Exception, e: print \"WARNING: %s\" % e\n try: self.building_elements.createDimension('number_of_elements',0)\n except Exception, e: print \"WARNING: %s\" % e\n try: self.building_sides.createDimension('number_of_sides',0)\n except Exception, e: print \"WARNING: %s\" % e \n self.buildingsAdded = True", "def addRG(in_files,args):\n #define readgroup header lines by combining the following\n\n \"\"\"\n -\n read group\n ID*\n Unique read group identifier. The value of the ID field is used in the RG tags of alignment records.\n SM*\n Sample (use pool name where a pool is being sequenced)\n LB\n Library\n DS\n Description\n PU\n Platform unit (e.g. lane for Illumina or slide for SOLiD); should be a full, unambiguous identifier\n PI\n Predicted median insert size (maybe different from the actual median insert size)\n CN\n Name of sequencing center producing the read.\n DT\n Date the run was produced (ISO 8601 date or date/time).\n PL\n Platform/technology used to produce the read.\"\"\"\n\n with open(args.barcodes,'r') as barcodes:\n sam_out= open(in_files['header'],'a')\n header = barcodes.readline().split('\\t')\n for line in barcodes:\n RG = ['@RG']\n split_line = line.split('\\t')\n if args.species and 'Species' in header:\n if split_line[(header.index('Species'))] != args.species:\n continue\n fc = split_line[(header.index('Flowcell'))]\n lane = split_line[(header.index('Lane'))]\n sample = split_line[(header.index('Sample'))]\n RG.append('ID:%s_%s_%s'%(fc,lane,sample))\n RG.append('SM:%s'%(sample))\n RG.append('LB:%s_%s'%(fc,sample))\n RG.append('PL:ILLUMINA\\n')\n sam_out.write('\\t'.join(RG))\n sam_out.close()\n return in_files", "def _simulation_data_iterator(res, path, data):\n res.append(data)", "def store_data(self, data):\n self.data.append(data)", "def handle_data(self, data):\n if len(self.current_tags) > 0:\n self.current_tags[-1].add_data(data)", "def append_data(self, data):\n self.results.append(data)", "def _add_transform_genes(self):\n pass", "def ingest_many(self, data):\n raise NotImplementedError()", "def add(self, newdata):\n # check whether we have too much data and remove the excess\n if self.maxStored > 0:\n n = len(self.data[0]) - self.maxStored\n if n >= 0:\n for j in range(len(self.data)):\n tmp = self.data[j][n+1:]\n self.data[j] = tmp\n # add the new data\n for j in range(len(self.data)):\n self.data[j].append(newdata[j])", "def append(self, dato):\n\t\tself.insert(self.len, dato)", "def add_gene(self, human_gene, ortholog):\n if human_gene not in self.genes:\n self.genes[human_gene] = list()\n self.genes[human_gene].append(ortholog)", "def add_data(self, data):\n self.data = self.data + data", "def position_append(self, pos, gtid):\n return None", "def add_data(self, data: List[dict]):\n raise NotImplementedError()", "def update (self) :\n for met in self.gene :\n met(self)", "def test_add():\n data = io.create_sample_Dataset()\n tmp = data + data\n assert tmp[\"u\"][0, 0, 0] == 2.0", "def append(self, featureName, featureData):\n self.data = np.concatenate((self.data, np.array([featureData]).T), axis=1)\n self.featureNames = np.append(self.featureNames, featureName)\n return 0", "def insert_good_data():\n get_file_reply(files[0][0], files[0][1])\n get_file_reply(files[1][0], files[1][1])", "def addData(self,data,x,y):\n self.nSamples+=1\n if self.nSamples == 1:\n self.indata.append(data)\n (self.ny,self.nx)=data.shape\n self.ny=int(data.shape[0])\n self.x=x\n self.y=y\n self.lx=self.x[-1:][0]\n self.ly=self.y[-1:][0]\n\n\n else:\n if data.shape == self.indata[0].shape and x.all() == self.x.all() and y.all() == self.y.all():\n self.indata.append(data)\n else:\n logging.warning('Inconsistent data input!')\n logging.warning(\"Check data shape and X,Y sampling!\")", "def test_append(self):\n self.table.append(['Tom', 26])", "def addDataTo(self, other_sim_data):\n #---+----|----+----|----+----|----+----|----+----|----+----|----+----|\n TreeLikelihoodBase.addDataTo(self, other_sim_data)", "def append_data(self, state, action, reward, next_state, finish): \r\n if len(self.data) < self.max_data:\r\n # if the list isn't already full we simply push the result \r\n # in the end of the list\r\n self.data.append(tuple((state, action, reward,\r\n next_state, finish)))\r\n else:\r\n # otherwise we replace the data at the position \"pos\" of the list\r\n self.data[self.pos] = tuple((state, action, reward, next_state, finish))\r\n self.pos += 1\r\n self.pos %= self.max_data # when we arrive at the end of the list we restart at the beginning of the list\r\n # like that the old result is deleted from the list.\r", "def append(self, S, action, S_new, reward, done, prob):\n self.d['S'].append(S)\n self.d['action'].append(action)\n self.d['S_new'].append(S_new)\n self.d['reward'].append(reward)\n self.d['done'].append(1 if done else 0) \n self.d['prob'].append(prob)", "def _insertAllSteps(self): \n self.uMics = self.inputCoordinatesTiltedPairs.get().getUntilted().getMicrographs()\n self.tMics = self.inputCoordinatesTiltedPairs.get().getTilted().getMicrographs()\n\n self.inputMics = self._createSetOfParticles('auxMics')\n self.inputMics.copyInfo(self.uMics)\n self.inputMics.setStore(False)\n \n for micU, micT in izip(self.uMics, self.tMics):\n micU.cleanObjId()\n micT.cleanObjId()\n self.inputMics.append(micU)\n self.inputMics.append(micT)\n\n self.samplingInput = self.uMics.getSamplingRate()\n \n\n if self.downsampleType.get() != OTHER:\n # If 'same as picking' or 'original' get sampling rate from input micrographs\n #TODO: Review this when downsampling before picking is possible\n self.samplingFinal = self.samplingInput\n else:\n # If 'other' multiply the input sampling rate by the factor provided\n self.samplingFinal = self.samplingInput*self.downFactor.get()\n \n # Write pos files for each micrograph\n firstStepId = self._insertFunctionStep('writePosFilesStep')\n \n # For each micrograph insert the steps\n #run in parallel\n \n deps = []\n for mic in self.inputMics:\n localDeps = [firstStepId]\n micrographToExtract = mic.getFileName()\n micName = removeBaseExt(mic.getFileName())\n micId = mic.getObjId()\n\n # If downsample type is 'other' perform a downsample\n if self.downsampleType == OTHER:\n fnDownsampled = self._getTmpPath(micName+\"_downsampled.xmp\")\n downFactor = self.downFactor.get()\n args = \"-i %(micrographToExtract)s -o %(fnDownsampled)s --step %(downFactor)f --method fourier\"\n localDeps=[self._insertRunJobStep(\"xmipp_transform_downsample\", args % locals(),prerequisites=localDeps)]\n micrographToExtract = fnDownsampled\n \n # If remove dust \n if self.doRemoveDust:\n fnNoDust = self._getTmpPath(micName+\"_noDust.xmp\")\n \n thresholdDust = self.thresholdDust.get() #TODO: remove this extra variable\n args=\" -i %(micrographToExtract)s -o %(fnNoDust)s --bad_pixels outliers %(thresholdDust)f\"\n localDeps=[self._insertRunJobStep(\"xmipp_transform_filter\", args % locals(),prerequisites=localDeps)]\n micrographToExtract = fnNoDust\n \n #self._insertFunctionStep('getCTF', micId, micName, micrographToExtract)\n micName = removeBaseExt(mic.getFileName())\n \n # Actually extract\n deps.append(self._insertFunctionStep('extractParticlesStep', micId, micName, \n None, micrographToExtract, prerequisites=localDeps))\n # TODO: Delete temporary files\n \n # Insert step to create output objects \n self._insertFunctionStep('createOutputStep', prerequisites=deps)", "def gff3_parsed (gff3_file, sam_dic):\n\n #A special type of dictionary in which the values were saved in a list\n gff_dic = defaultdict(list)\n\n gff3_file = open(arg.gff3_infile)\n gff3_dic = {}\n\n gene_dic = {}\n exon_list = []\n gene_idx = 1\n\n counter_1 = 0\n counter_2 = 0\n counter_3 = 0\n counter_4 = 0\n counter_5 = 0\n counter_6 = 0\n counter_7 = 0\n idx_pseudogene = 0\n\n #A dictionary\n gene_idexes = {\"gene\": gene_idx, \"exon\": gene_idx,\n \"pseudogene\": \"pseudogene\"}\n\n\n for line in gff3_file:\n if line.startswith(\"##\"):\n pass\n elif line.startswith(\"#!\"):\n pass\n else:\n line_information = line.strip().split()\n\n # Make a dic with the genes present on Gg genome and its anotattion\n if line_information[2] == (\"gene\"):\n # deal with the PREVIOUS gene\n #This peace of code add to the gff3_dic(the main dic of gff3 file)\n #the information of which are the exons of one particular gene\n #Note: this happends at the same time that the gene information\n #were parsed\n if exon_list:\n gff3_dic[gene_idx][\"exon_list\"] = exon_list\n gene_idx += 1\n\n exon_list = []\n #parse the gene information and add this information to a new dic (gff3_dic)\n #with all the information related to the genes present in gff3 file (Cg_Nara5)\n # deal with CURRENT gene\n scaffold = line_information [0]\n gene_beg = line_information[3]\n gene_end = line_information [4]\n gene_loc = [gene_beg, gene_end]\n gene_strand = line_information[6]\n gene_information = line_information [8]\n gene_information = line.strip().split(\";\")\n gene_description = [gene_information[2]]\n gff3_dic[gene_idx] = {\"scaffold\": scaffold,\n \"gene_range\": gene_loc,\n \"description\": gene_description,\n \"exon_list\": None,\n \"strand\": gene_strand}\n\n # Make a list with the exons-genes present on Gg genome and its anotattion\n # If in this line the \"gene\" keyword is not present but the \"exon\"\n #keyword are append the range information to the exon list which\n # will be added to main gff3 dic\n elif line_information[2] == (\"exon\"):\n exon_beg = line_information[3]\n exon_end = line_information [4]\n exon_loc = (exon_beg, exon_end)\n exon_list.append(exon_loc)\n\n exon_information = line_information [8]\n exon_information = line.strip().split()[8].split(\";\")[0]\n gff3_dic[gene_idx][\"exon_reference\"] = exon_information\n #At the same time - regardless the previous code if the line has\n #any of this keywords the information of the gene_range were added\n # to the gff_dic.\n if line_information[2] in [\"gene\", \"exon\", \"pseudogene\"]:\n\n gene_range = (line_information[3], line_information[4])\n\n #Note: this peace of code happends because the gene description\n #of the gene is not the same as the exon description. Therefore,\n #the gene description has to be recovered\n\n if line_information[2] == \"gene\":\n gene_information = line_information [8]\n gene_information = line.strip().split(\";\")\n gene_description = [gene_information[2]]\n\n # Example:\n # gff_dic[scaffold1] = [[1, \"gene\", (82, 1159), description],\n # 1, \"exon\", (82, 603), description],\n # 2, \"gene\", (1440, 4998), description\n # pseudogene_idx, pseudogene, (1999, 3000)]]\n\n #To keep only the information regardless gene_idx (gene index)\n #to the gene or the exons present in this gene. When I have\n #pseudogenes, the gene index is replaced for pseudogene\n if line_information[2] in [\"exon\", \"gene\"]:\n idx = gene_idx\n else:\n idx_pseudogene += 1\n idx = \"pseudogene_\"+ str(idx_pseudogene)\n\n #add the previous information in a different format in which\n #the key is the sacffold and the values are the index (to easly\n #acess the information present in gff3 dictionary), the keyword\n #(gene, exon, pseudogene), the range, and the description.\n #All these informations will be used to perfome the SNP range\n # discover only within the true scaffold and not in all the scaffolds\n #present in the gff3 file. Making the code mor efficient and realibel\n gff_dic[line_information[0]].append([idx,\n line_information[2],\n gene_range,\n gene_description])\n\n # Add last exon list to last gene index\\\n else:\n if exon_list:\n gff3_dic[gene_idx][\"exon_list\"] = exon_list\n\n print (\"Step 3a - Parse the .gff3 file -- Done\")\n\n\n for locus, info_dict in sam_dic.items():\n\n # Get all info from current scaffold\n # scaffold_info is a list containing all genes, exons and pseudogenes\n # of the scaffold in sam_dic\n\n scaffold_info = gff_dic[info_dict[\"scaffold\"]]\n #we create two different \"values\" in the sam_dic dictionary with the len\n #of the real snp location in which all the \"values\" begin with \"intergenic\" or None\n #and as we make the check codes this values will be replaced for new\n # values or will be remain like this\n\n info_dict[\"element_type\"] = [\"intergenic\"] * len(info_dict[\"real_snp_localization\"])\n info_dict[\"element_range\"] = [None] * len(info_dict[\"real_snp_localization\"])\n info_dict[\"gene_index\"] = \"intergenic\"\n\n # Check if locus is in any range\n # The enumerate function give the value of the \"value\" as well as the\n #position of the value. Example: l = [\"a\", \"b\", \"c\"]\n #enumerate (l) --- (0, \"a\"); (1, \"b\"); (2, \"c\")\n #pos - the position of the snp in the list\n #snp - is the real snp localization under analyse\n\n # Get the position of the snp in the list. This position will\n # be used to create a key for the gene_inf_dic.\n for pos, snp in enumerate(info_dict[\"real_snp_localization\"]):\n # The \"element\" is the several lists present in the gff_dic.\n #Note: all the lists regardless the type has exactly the same length.\n # Example : [10459, \"gene\", (\"18930\", \"23805\"), [\"description=LysM domain-containing protein\"]\n #So for each list we will check if the SNP is in the range\n for element in scaffold_info:\n element_beg = int(element[2][0])\n element_end = int(element[2][1])\n element_range= range(element_beg, element_end)\n\n\n # YAY, one of the SNP matches one element of the scaffold\n if snp in element_range:\n\n info_dict[\"gene_index\"] = element[0]\n\n # ELEMENT KEY:\n # \"exon\": The SNP is in a coding region\n # \"gene\": The SNP is in an intron\n # \"pseudogene\": The SNP is in a pseudogene\n info_dict[\"element_type\"][pos] = element[1]\n\n info_dict[\"element_range\"][pos] = element[2]\n\n info_dict[\"description\"] = element[3]\n\n\n\n #Get the main statistics from our dataset\n\n for locus, locus_info in sam_dic.items():\n\n element_type = locus_info[\"element_type\"]\n\n # Adding information for loci in a intergenic region\n #The set return an object with only 1 \"element\" in that case \"intergenic\"\n #So if the locus has 2 snps 1 in a intergenic region and other in a gene\n # this locus will not count as a intergenic locus, because the set will\n #have two elenets {\"intergenic\", \"gene\"} and not only 1 {\"intergenic\"}.\n #Note: The set works for each element_type present in sam_dic (loop)\n if set(element_type) == {\"intergenic\"}:\n counter_1 += 1\n\n # Adding information for SNPs in intergenic region\n #This counter gives the number of times the intergenic word appears\n counter_2 += element_type.count(\"intergenic\")\n\n # Adding information for loci in pseudogenes\n if \"pseudogene\" in element_type:\n counter_3 += 1\n\n #Adding information for SNPs in pseudogene\n counter_4 += element_type.count(\"pseudogene\")\n\n #Adding information for loci in genes\n #As previously refered the gene information were recorded in two different formats\n #gene- when the SNP were in a gene but not in a exon (aka intron)\n #exon - when the SNP were in a gene and in a specific exon\n #So in order to have the statistics for the gene we need to search\n #booth keywords on the element_type . Not in this particular case the set\n #doesn\"t work because the set don\"t has an order (gene, exon) or (exon, gene)\n\n if \"gene\" in element_type or \"exon\" in element_type:\n counter_5 += 1\n\n #Adding information for SNPs in gene\n\n counter_6 += element_type.count(\"exon\") + element_type.count(\"gene\")\n\n #Adding information for SNPs in exons\n\n counter_7 += element_type.count(\"exon\")\n\n\n\n print(\"Data resume:\")\n print(\"Number of loci in a non coding region: {}\".format(counter_1))\n print(\"Number of SNPs in a non coding region: {}\".format(counter_2))\n\n print(\"Number of loci located in pseudogenes:{}\".format(counter_3))\n print(\"Number of SNPs located in pseudogenes:{}\".format(counter_4))\n\n print(\"Number of loci located in genes: {}\".format(counter_5))\n print(\"Number of SNPs located in genes: {}\".format(counter_6))\n print(\"Number of SNPs located in exons: {}\".format(counter_7))\n\n\n\n# print(gff3_dic[6207])\n return (sam_dic, gff3_dic)", "def add(self, data):\n if self._filter(data):\n id = self.db._generate_id(data)\n \n if not id == None:\n if self.db._store:\n self.db.append(id, str(data))\n print id, \"stored to\", self.db._generate_path(id)\n else:\n print id\n print data.show2()", "def add_data(self, df):\n # TODO: improve merging code\n self.data = self.data.append(df, ignore_index=False)\n self.data = self.data[~self.data.index.duplicated(keep='first')]", "def record_data(step, peds, mat):\n for ped in peds:\n record = [\n step,\n ped.id,\n ped.x[1],\n ped.x[0],\n ped.v[1],\n ped.v[0],\n ped.f[1],\n ped.f[0],\n ped.kind\n ]\n mat.append(record)", "def gatherData(data,neat,gen,iter_i,hyp,savePop=False):\n data.gatherData(neat.pop, neat.species)\n\n if savePop is True: # Get a sample pop to play with in notebooks\n global fileName\n pref = output_dir + '/iter_{}'.format(iter_i) + '/gen_' + str(gen).zfill(4)\n import pickle\n with open(pref+'.obj', 'wb') as fp:\n pickle.dump(neat.pop,fp)\n\n return data", "def __generate_genotype(self):\n if len(self.genotype) < self.__individual_genotype_length:\n gene = ''\n \n while len(self.genotype) < self.__individual_genotype_length:\n gene = str(random.randint(0,1))\n \n self.genotype = self.genotype + gene", "def prepare_data_for_g(data, id2motifs, generator, discriminator):\n\n paths = []\n g_s_args = []\n for i in range(data.x.size(0)):\n if np.random.rand() < 1:\n g_s_args.append((i, n_sample, False))\n\n z = generator(data.x, data.total_edge_index)\n\n motifs, paths = sampling(g_s_args, z, data)\n # pdb.set_trace()\n # motifs = [j for i in motifs for j in i]\n \n '''\n row, col = data.total_edge_index\n\n x_j = torch.index_select(z, 0, row)\n x_i = torch.index_select(z, 0, col)\n one_hop = torch.einsum(\"ef,ef->ef\", x_i, x_j)\n\n '''\n\n '''\n motifs=[]\n for neg in negs:\n motifs.extend(neg)\n '''\n rewards = []\n \n rewards.append(reward_d(discriminator, data, motifs).tolist())\n rewards = np.concatenate(rewards)\n \n motifs, reward = shuffle(motifs, rewards)\n return motifs, reward", "def generate_genotype(self):\n genes = []\n for i in range(self.n_genes):\n genes.append(self.Gene(n_bases=self.n_bases))\n self.genes = genes", "def creator(self, q, data, num_sub_proc):\n for d in data:\n idx = d[0]\n q.put((idx, d[1]))\n\n for i in range(0, num_sub_proc):\n q.put('DONE')", "def create_samples(self):\n for s_id in range(len(self.data[\"sample\"])):\n self.samples.add(Sample(s_id, [self.data[key][s_id] for key in self.data.keys() if key not in WRONG_KEYS],\n self.data[\"label\"][s_id]))", "def append_to_csv(self):\n appended_data = pd.concat([self.existing_data, self.new_data], axis = 1)\n appended_data.to_csv(filename_main, index = False)\n warnings.warn(\"Add new graphs to .vsz files to show the new data\")", "def push_dynamical_merging_index(self):\n Total_dyn_mergindex = np.zeros((0,), dtype=np.float)\n ParType0_dyn_mergindex = np.zeros((0,), dtype=np.float)\n ParType1_dyn_mergindex = np.zeros((0,), dtype=np.float)\n ParType4_dyn_mergindex = np.zeros((0,), dtype=np.float)\n ParType5_dyn_mergindex = np.zeros((0,), dtype=np.float)\n\n for r in self.cluster.generate_apertures():\n part_dyn_mergindex_aperture = self.cluster.group_dynamical_merging_index(aperture_radius=r, \n out_allPartTypes=True)\n ParType0_dyn_mergindex = np.concatenate((ParType0_dyn_mergindex, [part_dyn_mergindex_aperture[0]]), axis=0)\n ParType1_dyn_mergindex = np.concatenate((ParType1_dyn_mergindex, [part_dyn_mergindex_aperture[1]]), axis=0)\n ParType4_dyn_mergindex = np.concatenate((ParType4_dyn_mergindex, [part_dyn_mergindex_aperture[2]]), axis=0)\n ParType5_dyn_mergindex = np.concatenate((ParType5_dyn_mergindex, [part_dyn_mergindex_aperture[3]]), axis=0)\n\n Total_dyn_mergindex_apertur = self.cluster.group_dynamical_merging_index(aperture_radius=r, \n out_allPartTypes=False)\n Total_dyn_mergindex = np.concatenate((Total_dyn_mergindex, [Total_dyn_mergindex_apertur]), axis=0)\n\n data = {'/Total_dyn_mergindex' : np.array(Total_dyn_mergindex),\n '/ParType0_dyn_mergindex': np.array(ParType0_dyn_mergindex),\n '/ParType1_dyn_mergindex': np.array(ParType1_dyn_mergindex),\n '/ParType4_dyn_mergindex': np.array(ParType4_dyn_mergindex),\n '/ParType5_dyn_mergindex': np.array(ParType5_dyn_mergindex)}\n\n attributes = {'Description': \"\"\"Datasets with the dynamical merging index of the cluster, calculated \n from particles within a specific aperture radius from the Centre of Potential. Individual datasets contain \n merging index information about each particle type separately, as well as one with combined total \n contribution.\n The dynamical merging index is computed according to the equation:\n dynamical_merging_index = || CoM(r) - CoP(r) || / r.\n \n Note: The particle type infomation combines the CoM calculated for every particle type and the \n overall CoP of the whole FoF cluster. I.e., the CoP is not computed in a particle type-wise manner. \n If in doubt, use the Total_dynindex dataset, which contains the dynamical merging index computed for \n all particle types within a given aperture.\n \"\"\",\n 'Units': '[None]'}\n\n out = FOFOutput(self.cluster, filename='dynamical_merging_index.hdf5', data=data, attrs=attributes)\n out.makefile()", "def append(self, inp: I):", "def add_reads(self, new_reads): \n if self.sampling:\n self.convert_to_list()\n self.reads.extend(new_reads)", "def add_gifti_data_array(self, dataarr):\n if not isinstance(dataarr, GiftiDataArray):\n raise TypeError(\"Not a valid GiftiDataArray instance\")\n self.darrays.append(dataarr)", "def append(self, value):\n if len(self.data) >= n:\n self.data.pop(0)\n self.data.append(value)", "def add_data(self, data, identifier):\n if self.Lock is False:\n if len(self.index) + 1 > self.max_idx_num:\n print(\n \"\"\"\n WARNING: Reached maximum number of indexed data blocks\n '({0}), cannot add any more data!\n \"\"\".format(\n self.max_idx_num\n )\n )\n return False\n\n if not self.first_header_set:\n self._write_gen_header(Index=True)\n self.first_header_set = True\n else:\n # do we need this?\n self._write_gen_header(Index=False)\n\n self.index[identifier] = self.file_out.tell()\n self._write_data(data)\n return\n else:\n raise Exception(\"Cant add any more data if index is already written\")", "def write_csv(filename, i, q):\n with open(os.path.join(\"Data\", filename), 'a', newline='') as csvfile:\n writ = csv.writer(csvfile)\n j = 0\n k = len(q)\n while j < k:\n l = q.popleft()\n tak = l[0]\n #puts most important/salient points of info for health/phenotype\n #genomes - ident for health genes, weight for phenotype genes -\n #into lists for output\n healthchr_a = []\n healthchr_b = []\n if isinstance(tak.genome, tg.health_genome):\n for a in tak.genome.healthchr_a:\n healthchr_a.append(a.ident)\n for b in tak.genome.healthchr_b:\n healthchr_b.append(b.ident)\n pref = None\n if isinstance(tak.genome, tg.phen_genome):\n pref = [tak.genome.phen_gene_a.weight,\n tak.genome.phen_gene_b.weight,\n tak.pref]\n #first generation has 'str' parents rather than agent parents\n if tak.gen != 0:\n parents0 = tak.parents[0].ident\n parents1 = tak.parents[1].ident\n else:\n parents0 = tak.parents[0]\n parents1 = tak.parents[1]\n writ.writerow([i, l[2], tak.ident, parents0, parents1,\n tak.age, tak.gen, len(tak.children),\n tak.mating_attempts, tak.accum_pain, tak.cod,\n l[1], tak.genome.mut_record, tak.parent_degree,\n tak.parent_genoverlap,\n (tak.genome.disorder_count if \\\n isinstance(tak.genome, tg.health_genome)\\\n else \"\"),\n healthchr_a, healthchr_b, pref])\n j += 1", "def add(self, data):\n if data.shape != self.shape:\n self.shape = data.shape\n if isinstance(self.child, vmedian):\n self.child.add(data)\n if (self.child.index == 0):\n self.buffer[self.index, :] = self.child.get(reshape=False)\n self.index = self.index + 1\n else:\n self.buffer[self.index, :] = np.ravel(data)\n self.index = self.index + 1\n\n if self.index == 3:\n self.index = 0\n self.initialized = True", "def _generate_rows(self):\n logger.debug(\"Generating pre-genealogical coherence data for %s\", self.w1)\n if not self.rows:\n for w2 in self.all_mss:\n if self.w1 == w2:\n continue\n self._add_row(w2)\n\n self._sort()\n logger.debug(\"Generated pre-genealogical coherence data for %s\", self.w1)", "def add_task(self, robot_id, data):\n if isinstance(self.all_buffers[robot_id], np.float): # mesto je prazno\n individual_buffer = np.vstack((self.buffer_array, data))\n self.all_buffers[robot_id] = individual_buffer\n else:\n individual_buffer = self.all_buffers[robot_id]\n individual_buffer = np.vstack((individual_buffer, data))\n self.all_buffers[robot_id] = individual_buffer", "def gen_tag_addlist(self) -> tp.List[xml.TagAddList]:\r\n if not self.tag_adds:\r\n robot_config = self.main_config['ros']['robots'][self.robot]\r\n prefix = robot_config['prefix']\r\n model_base = robot_config['model']\r\n model_variant = robot_config.get('model_variant', '')\r\n\r\n if model_variant != '':\r\n model = f\"{model_base}_{model_variant}\"\r\n else:\r\n model = model_base\r\n\r\n desc_cmd = f\"$(find xacro)/xacro $(find {model_base}_description)/urdf/{model}.urdf.xacro\"\r\n for s in self.sizes:\r\n exp_adds = xml.TagAddList()\r\n pos_i = random.randint(0, len(self.positions) - 1)\r\n\r\n exp_adds.append(xml.TagAdd(\".\",\r\n \"master\",\r\n {},\r\n True))\r\n exp_adds.append(xml.TagAdd(\"./master\",\r\n \"group\",\r\n {\r\n 'ns': 'sierra'\r\n },\r\n False))\r\n exp_adds.append(xml.TagAdd(\"./master/group/[@ns='sierra']\",\r\n \"param\",\r\n {\r\n 'name': 'experiment/n_robots',\r\n 'value': str(s)\r\n },\r\n False))\r\n\r\n for i in range(0, s):\r\n\r\n ns = f'{prefix}{i}'\r\n pos = self.positions[pos_i]\r\n pos_i = (pos_i + 1) % len(self.positions)\r\n spawn_cmd_args = f\"-urdf -model {model}_{ns} -x {pos.x} -y {pos.y} -z {pos.z} -param robot_description\"\r\n\r\n exp_adds.append(xml.TagAdd(\"./robot\",\r\n \"group\",\r\n {\r\n 'ns': ns\r\n },\r\n True))\r\n\r\n exp_adds.append(xml.TagAdd(f\"./robot/group/[@ns='{ns}']\",\r\n \"param\",\r\n {\r\n \"name\": \"tf_prefix\",\r\n \"value\": ns\r\n },\r\n True))\r\n\r\n # These two tag adds are OK to use because:\r\n #\r\n # - All robots in Gazebo are created using spawn_model\r\n # initially.\r\n #\r\n # - All robots in Gazebo will provide a robot description\r\n # .urdf.xacro per ROS naming conventions\r\n exp_adds.append(xml.TagAdd(f\"./robot/group/[@ns='{ns}']\",\r\n \"param\",\r\n {\r\n \"name\": \"robot_description\",\r\n \"command\": desc_cmd\r\n },\r\n True))\r\n\r\n exp_adds.append(xml.TagAdd(f\"./robot/group/[@ns='{ns}']\",\r\n \"node\",\r\n {\r\n \"name\": \"spawn_urdf\",\r\n \"pkg\": \"gazebo_ros\",\r\n \"type\": \"spawn_model\",\r\n \"args\": spawn_cmd_args\r\n },\r\n True))\r\n\r\n self.tag_adds.append(exp_adds)\r\n\r\n return self.tag_adds", "def _push(self):\n if len(self._stat_now):\n self._stat_now['epoch_num'] = self.epoch_num\n self._stat_now['global_step'] = self.global_step\n\n self._stats.append(self._stat_now)\n self._stat_now = {}\n self._write_stat()", "def add_to_db(self):\r\n for filename in self.new_data_files:\r\n unique_name = form_unique_name(filename)\r\n extracted_date = extract_date(filename)\r\n if extracted_date is not None:\r\n # If we can parse the date from the filename we parse the file\r\n file_ = File(filename, unique_name, extracted_date)\r\n content = file_.get_content()\r\n for element in content:\r\n # If each of the spectra in the file has data, we\r\n # add it to the data base\r\n if element[1] is not None:\r\n self.add_to_db_single(element)\r\n status_msg('Elements of file {0} added to db'.format(\r\n unique_name), True)\r\n else:\r\n status_msg('File {0} not added, unknown filename format'.\r\n format(unique_name), False)", "def push_exg(self, packet):\n _, exg_data = packet.get_data(self.exg_fs)\n self.exg_outlet.push_chunk(exg_data.T.tolist())", "def append(self, other: Energy) -> None:\n\n for item in self:\n if other == item:\n logger.debug(\n f\"Not appending {other} to the energies - \"\n f\"already present. Moving to the end\"\n )\n self.append(self.pop(self.index(item)))\n return\n\n return super().append(other)", "def index_add(all_index, this_index, samples, caller):\n for key, record in this_index.iteritems():\n if key not in all_index:\n all_index[key] = {}\n for sample_id in samples:\n if sample_id not in all_index[key]:\n all_index[key][sample_id] = {caller: []}\n elif caller not in all_index[key][sample_id]:\n all_index[key][sample_id][caller] = []\n # NB: If caller was run twice, will have 2 records here\n all_index[key][sample_id][caller].append(record)", "def insert_values():\n pass", "def buscar_Newgenero(tempomin, tempomax, catalog):\n artistasNoRepetidos = lt.newList('ARRAY_LIST')\n artistasRepetidos = lt.newList('ARRAY_LIST')\n MapGeneros = mp.get(catalog['caraContenido'], 'tempo')\n RBTgenero = me.getValue(MapGeneros)\n lista_listas_musica = om.values(RBTgenero, tempomin, tempomax)\n lista_lista_musica = it.newIterator(lista_listas_musica)\n while it.hasNext(lista_lista_musica): \n lista_musica = it.next(lista_lista_musica)\n musicas = it.newIterator(lista_musica)\n while it.hasNext(musicas):\n musica = it.next(musicas)\n if int(lt.isPresent(artistasNoRepetidos, (musica['artist_id']))) == 0:\n lt.addLast(artistasNoRepetidos, musica['artist_id'])\n if int(lt.isPresent(artistasRepetidos, (musica['created_at'] + musica['user_id'] + musica['track_id']))) == 0:\n lt.addLast(artistasRepetidos, (musica['created_at'] + musica['user_id'] + musica['track_id']))\n else:\n if int(lt.isPresent(artistasRepetidos, (musica['created_at'] + musica['user_id'] + musica['track_id']))) == 0:\n lt.addLast(artistasRepetidos, (musica['created_at'] + musica['user_id'] + musica['track_id']))\n \n return artistasRepetidos, artistasNoRepetidos", "def add_data(self, new_data, *args):\n raise NotImplementedError", "def append_data(self, key, data):\n with self.write():\n try:\n self.handle.append(\n key, data, data_columns=True, complevel=5, complib='blosc')\n except AttributeError:\n self.handle.append(key, data, complevel=5, complib='blosc')", "def insertionGenero (cur, conn, genre_list):\n for genre in genre_list :\n idGenero = genre[0]\n genero=genre[1]\n # print(generoInsert.format(idTitulo,genero))\n # REGISTER DATA IN GENERO TABLE\n cur.execute(generoInsert.format(idGenero,genero))\n conn.commit()", "def prepare_data_for_d(data, id2motifs, generator):\n motifs = []\n labels = []\n g_s_args = []\n poss = []\n negs = []\n for i in range(data.x.size(0)):\n if np.random.rand() < 1:\n pos = random.sample(id2motifs[i], min(len(id2motifs[i]), n_sample))\n poss.append(pos)\n g_s_args.append((i, len(pos), True))\n\n\n z = generator(data.x, data.total_edge_index)\n # row, col = data.total_edge_index\n\n\n # x_j = torch.index_select(z, 0, row)\n # x_i = torch.index_select(z, 0, col)\n # one_hop = torch.einsum(\"ef,ef->ef\", x_i, x_j)\n\n negs, _ = sampling(g_s_args, z, data)\n\n # negs =[]\n # for i in range(data.x.size(0)):\n # neg=[]\n # if(len(poss[i])>0):\n # ps= torch.tensor(poss[i][0]).to(device)\n # # pdb.set_trace()\n # x_j = torch.index_select(one_hop, 0, ps)\n # x_i = torch.index_select(one_hop, 0, ps)\n # two_hop = torch.einsum(\"ef,ef->e\", x_j, x_i)\n # __, target = torch.topk(two_hop, len(poss[i]))\n # for k in range(len(poss[i])):\n # neg.append((i, row[target[k]].item(), col[target[k]].item()))\n # negs.append(neg)\n\n \n for pos, neg in zip(poss, negs):\n if len(pos) != 0 and neg is not None:\n motifs.extend(pos)\n labels.extend([1] * len(pos))\n motifs+=neg\n labels.extend([0] * len(neg))\n motifs, labels = shuffle(motifs, labels)\n pdb.set_trace()\n return motifs, labels", "def NewItems(self) -> _n_1_t_7:", "def add_locations(self):\n for _ in range(0, self.num_locations):\n detector_id = self.generate_id()\n detector_direction = self.generate_direction()\n detector_point = self.generate_point()\n self.dataset[detector_id] = (detector_direction, detector_point)\n assert len(self.dataset) == self.num_locations", "def __appendNewData(self, dir_name):\n person_path = os.path.join(self.base_dir, self.images_dir, dir_name)\n new_X, new_y = loadDataFromImagesPath(self.detector, person_path)\n #Easy expend 2 dimension array vertically using vs stack\n self.y, new_y = list(self.y), list(new_y)\n self.y.extend(new_y) #Expand 1d array in pythonic way\n self.X, self.y = np.vstack((self.X, new_X)), np.asarray(self.y)", "def insert_ensemble_info( self, db_files, db, run2id, row2id, col2id ):\n\t\tto_insert = [ self.assemble_ensemble_info( i, run2id, row2id, col2id ) for i in db_files ]\n\t\tensemble_info_collection = db.ensemble_info\n\n\t\t# Check whether documents are already present in the collection before insertion\n\t\tif ensemble_info_collection.count() > 0:\n\t\t\td_f = filter( None, [ self.check4existence( ensemble_info_collection, i, \"run_name\", i[\"run_name\"] ) for i in to_insert ] )\n\t\telse:\n\t\t\td_f = to_insert\n\n\t\tprint \"%s new records to write\" % len( d_f )\n\n\t\tif len(d_f) > 0:\n\t\t\tensemble_info_collection.insert( d_f )\n\n\t\treturn ensemble_info_collection", "def _producer(self) -> None:\n while (gtex_path := self.gtex.pop(0)) is not None and (\n bm_path := self.bm.pop(0)\n ) is not None:\n data = merge_data(gtex_path, bm_path, self.mane)\n self._q.put(data)\n logger.info(f\"Contents of file {gtex_path} added to queue\")\n else:\n self._q.put(None) # Send end signal to consumer\n logger.info(\"All files added. None signal sent. Producer returns\")\n return", "def add_data(self, datasheet, freqs):\n self.sheets.append(datasheet)\n self.freqs.append(freqs)", "def test_file_access():\n file = gff.GFFFile()\n entry_scaffold = (\"ab\", \"cd\", 1, 2, None, None, None, {\"Id\":\"foo\"})\n entry = (\"a\",) + entry_scaffold\n file.append(*entry)\n assert file[0] == entry\n file.append(*((\"b\",) + entry_scaffold))\n file.insert(1, *((\"c\",) + entry_scaffold))\n file[1] = (\"d\",) + entry_scaffold\n file.insert(3, *((\"e\",) + entry_scaffold))\n del file[2]\n assert [seqid for seqid, _, _, _, _, _, _, _, _ in file] \\\n == [\"a\", \"d\", \"e\", ]", "def populate_data(self):\r\n # Importing StationData with the standard imports causes a redundancy\r\n # problem, so it is imported here only when it is needed.\r\n from stationData import StationData\r\n # Find data requirements from all plumes.\r\n requirements = describe.PLUMES\r\n # Loop over plumes and define parameters to be used for pulling data.\r\n grib_file = pygrib.open(self.grib_file_path)\r\n for req in requirements:\r\n (plume,data_types,grid_level_type,grid_level,unused) = req\r\n selected = grib_file.select(shortName=data_types,\r\n typeOfLevel=grid_level_type,\r\n level=grid_level)\r\n for i, message in enumerate(selected):\r\n if i % 20 == 0:\r\n print '%s %s/%s Grib messages processed for %s' %\\\r\n (PRETEXT, i + 1, len(selected), req[0])\r\n for sdo in StationData.instances:\r\n if sdo.grib_i is None:\r\n StationData.populate_grid_information(message,\r\n self.config)\r\n sdo.add_data(plume,self.member_name,message)\r\n grib_file.close()\r\n return", "def prepare_data(self, train_data, **kwargs):\n data_len = len(train_data[\"done\"])\n for index in range(data_len):\n if self.multi_step == 1:\n self.buff.add(train_data[\"cur_state\"][index],\n train_data[\"action\"][index],\n train_data[\"reward\"][index],\n train_data[\"next_state\"][index],\n float(train_data[\"done\"][index])) # Add replay buffer", "def __init__(self):\n self._data=[]", "def DoAdd(self,event):\r\n newItem = self.data.add()\r\n if newItem and newItem not in self.items:\r\n self.items = self.data.getItemList()\r\n index = self.items.index(newItem)\r\n self.list.InsertItems([newItem],index)", "def add_sample(self, time_received, current_label, emg_list, accel_1, accel_2, accel_3, gyro_1, gyro_2,\n gyro_3, orient_w, orient_x, orient_y, orient_z):\n\n self.add_data_lock.lock()\n\n self.timestamps.append(time_received)\n self.labels.append(current_label)\n\n for i, emg_channel in enumerate(emg_list):\n self.emg[i].append(emg_channel)\n\n self.accel[0].append(accel_1 / MYOHW_ACCELEROMETER_SCALE)\n self.accel[1].append(accel_2 / MYOHW_ACCELEROMETER_SCALE)\n self.accel[2].append(accel_3 / MYOHW_ACCELEROMETER_SCALE)\n\n self.gyro[0].append(gyro_1 / MYOHW_GYROSCOPE_SCALE)\n self.gyro[1].append(gyro_2 / MYOHW_GYROSCOPE_SCALE)\n self.gyro[2].append(gyro_3 / MYOHW_GYROSCOPE_SCALE)\n\n self.orient[0].append(orient_w / MYOHW_ORIENTATION_SCALE)\n self.orient[1].append(orient_x / MYOHW_ORIENTATION_SCALE)\n self.orient[2].append(orient_y / MYOHW_ORIENTATION_SCALE)\n self.orient[3].append(orient_z / MYOHW_ORIENTATION_SCALE)\n\n self.sync_data(self.is_master)\n\n self.add_data_lock.unlock()", "async def add_data(self,x):\n ss = self.create_filter(x)\n if ss == True: # if there id is new, i.e. data is coming from a new source\n try:\n self.filters[x[\"_id_\"]].bulk_load(headings=['index','apples','mangos']).map(analyse).sink(save_data1)\n \n except KeyError as ex:\n print(ex)\n raise KeyError(\"the data must contain an id key in the form '_id_' \")\n\n await self.source.emit(x)", "def _add(self, sample):#obs_t, action, reward, obs_tp1, done):\n\n if self._next_idx >= len(self._buffer): #appends data if max capacity not reached yet\n self._buffer.append(sample)\n else:\n self._buffer[self._next_idx] = sample #drops old entry and appends new data if max capacity\n self._next_idx = (self._next_idx + 1) % self._capacity", "def process_data(base_path, gauge_data_dir, job_name): \n \n # Collect all gauge files and put them into one array\n storm_gauges_files = os.listdir(gauge_data_dir) \n g = numpy.zeros((len(gauges), len(storm_gauges_files))) \n \n #for storm_gauges in storm_gauges_files: \n # data_path = os.join(gauage_data_dir, storm_gauges)\n # for i in range(0, len(gauges)): \n \n for (index, storm_gauges) in enumerate(storm_gauges_file): \n with open(os.path.join(gauge_data_dir ,storm_gauges), 'r') as gauges_data: \n data = numpy.loadtxt(gauges_data, delimiter = ',', skiprows=1) \n g[:, index] = data[:, 1]\n\n return g", "def add_data(self, data):\n for i, row in enumerate(self._grid):\n for j, column in enumerate(row):\n if self._grid[i][j] is None:\n self._grid[i][j] = data\n return True\n return False", "def add_data(self, data: np.ndarray):\n data = np.asarray(data)\n if data.ndim < 2:\n data = np.reshape(data, (-1, 1))\n\n self.create_storage(data)\n\n start = self._count\n finish = start + data.shape[0]\n self._data_store[start:finish, :] = data\n self._count += data.shape[0]", "def _save_data(self):\n super()._save_data()\n if self.data:\n # FIXES [BUG-034].\n WeatherForecastObservation.objects.all().delete()\n self.state['inserted_elements'] = len(WeatherForecastObservation.objects.bulk_create(self.data))\n self.logger.info('Successfully saved %d elements.' % self.state['inserted_elements'])\n else:\n self.logger.info('No elements were saved because no elements were available.')\n self.data = None", "def savedata(outfile):\n\n global BTRACK, GSTRUC, NPIX\n \n print('SAVING DATA to '+outfile)\n\n # Back up any existing file\n picklefile = outfile.replace('.fits','.pkl')\n backpicklefile = picklefile+'.backup' \n if os.path.exists(picklefile):\n if os.path.exists(backpicklefile):\n shutil.move(picklefile,backpicklefile)\n \n # Write tracking structures to pickle file\n with open(picklefile, 'wb') as f:\n pickle.dump(BTRACK, f)\n pickle.dump(GSTRUC, f) \n\n # Remove backup file if it exists\n if os.path.exists(backpicklefile):\n os.remove(backpicklefile)\n \n # Construct gstruc output structure\n count = GSTRUC['count']\n ngauss = GSTRUC['ngauss']\n dtype = np.dtype([('x',int),('y',int),('par',float,3),('sigpar',float,3),('rms',float),\n ('noise',float),('lon',float),('lat',float)])\n gstruc = np.zeros(ngauss,dtype=dtype)\n cnt = 0\n for i in range(count):\n tstr1 = GSTRUC['data'][i]\n ngauss1 = len(tstr1['par'])//3\n gstruc1 = np.zeros(ngauss1,dtype=dtype)\n gstruc1['x'] = tstr1['x']\n gstruc1['y'] = tstr1['y']\n gstruc1['lon'] = tstr1['lon']\n gstruc1['lat'] = tstr1['lat'] \n gstruc1['rms'] = tstr1['rms']\n gstruc1['noise'] = tstr1['noise']\n gstruc1['par'] = tstr1['par'].reshape(ngauss1,3)\n gstruc1['sigpar'] = tstr1['sigpar'].reshape(ngauss1,3)\n gstruc[cnt:cnt+ngauss1] = gstruc1\n cnt += ngauss1\n gstruc = Table(gstruc)\n gstruc.write(outfile,overwrite=True)\n print(str(len(gstruc))+' gaussians')\n \n return gstruc", "def add_data(self, data):\n self._data += data" ]
[ "0.6317219", "0.62716526", "0.60660607", "0.5965007", "0.5827419", "0.58169585", "0.58123124", "0.57943267", "0.57887346", "0.5719474", "0.5695228", "0.5687933", "0.5677406", "0.56611586", "0.5614816", "0.5614328", "0.55467606", "0.5543412", "0.55387115", "0.55334795", "0.5531016", "0.5529502", "0.5529201", "0.55071676", "0.5497054", "0.5496848", "0.54738456", "0.5472718", "0.5458702", "0.5454891", "0.5450871", "0.5446637", "0.5442571", "0.5423795", "0.5414935", "0.5412649", "0.5410991", "0.5407744", "0.5397021", "0.539531", "0.5386346", "0.53849906", "0.53808755", "0.5380407", "0.537666", "0.5355692", "0.5354255", "0.53484666", "0.5343824", "0.534107", "0.53372425", "0.53339475", "0.53118765", "0.53113407", "0.5306595", "0.5305747", "0.52985245", "0.527464", "0.5270445", "0.52650875", "0.5256216", "0.5256151", "0.5251991", "0.52507824", "0.52449155", "0.5241939", "0.5234026", "0.5232005", "0.5228133", "0.5227795", "0.5227503", "0.52266073", "0.5221766", "0.52177286", "0.5216644", "0.52122235", "0.5210855", "0.5210187", "0.5208117", "0.5207838", "0.5206552", "0.52013063", "0.5201046", "0.51987517", "0.51950353", "0.5194577", "0.5192163", "0.5188777", "0.51873887", "0.5178368", "0.5175707", "0.5174622", "0.5173453", "0.5172028", "0.5157228", "0.5156989", "0.5156088", "0.5154748", "0.5151346", "0.51432556", "0.5141525" ]
0.0
-1
provide both input file names up to extension and outfile path including name up to extension
def dojoin(ipath1,ipath2,opath): r1 = '%s.map' % ipath1 r2 = '%s.map' % ipath2 if not mapsMatch(r1,r2): print '### maps %s and %s do not match' % (r1,r2) sys.exit(1) outpath = '%s.map' % opath shutil.copyfile(r1,outpath) r1 = '%s.eigenstratgeno' % ipath1 r2 = '%s.eigenstratgeno' % ipath2 outpath = '%s.eigenstratgeno' % opath joinRows(r1,r2,outpath) outpath = '%s.ind' % opath r1 = '%s.ind' % ipath1 r2 = '%s.ind' % ipath2 joinInds(r1,r2,outpath)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ConvertFileName(cls,infile,band):\r\n try:\r\n import os\r\n except:\r\n raise ImportError(\"Can not find module os\")\r\n try:\r\n base = str.split(infile,\"_metadata.xml\")[0]\r\n print base\r\n ext=\"_band\"+str(band)+\".ntf\"\r\n outfile=base+ext\r\n return outfile\r\n except:\r\n raise ImportError(\"Can not covert file names\")", "def get_filename_with_new_ext(original_file_path, new_ext, output_directory):\r\n return path.join(output_directory,\r\n path.splitext(path.split(original_file_path)[1])[0] + new_ext)", "def get_output_name(input_path):\n file_name, file_ext = os.path.splitext(os.path.basename(input_path))\n return os.path.abspath(\"out\" + os.path.sep + file_name + \"_geo\" + file_ext)", "def output_file_name_maker(args):\n log.debug(\"Entering output_file_name_maker()\")\n path = os.getcwd() + '/out_files/'\n if not os.path.isdir(path):\n os.mkdir(path)\n\n if args.output is None:\n out_file_name = path + args.input[:-4] + '_' + args.type + '_' + args.layer\n else:\n out_file_name = path + args.output\n\n log.debug(\"Exiting output_file_name_maker()\")\n return out_file_name", "def outputFilename(name=\"\", ext=\"\", time=True):\n # get the date in the format specifed\n dateTime = datetime.now()\n dateTimeFormat = \"%Y-%m-%d__%H-%M-%S\" if time else \"%Y-%m-%d\"\n fileName = dateTime.strftime(dateTimeFormat)\n\n # construct the filename\n fileName = fileName + \"_\" + name if fileName != \"\" else fileName\n ext = \".\" + ext if ext != \"\" else \"\"\n\n return fileName + ext", "def test_get_filename_with_new_ext(self):\r\n test_paths = [('/from/root/test.xxx', 'test.yyy'),\r\n ('../relative/path/test.xxx', 'test.yyy'),\r\n ('/double/extension/in/filename/test.zzz.xxx',\r\n 'test.zzz.yyy')]\r\n\r\n for input, exp_output in test_paths:\r\n exp_output = join(self.output_dir, exp_output)\r\n\r\n self.assertEquals(\r\n get_filename_with_new_ext(input, '.yyy', self.output_dir),\r\n exp_output)", "def getOutputFilename(self, filename):\n return filename[:-4] + \".txt\"", "def replsuffix(files, suffix):\n\toutfiles = []\n\tif suffix is None: return\n\tif type(files) is type(\"\"):\n\t\tfiles = [files]\n\tfor f in files:\n\t\tfname, ext = os.path.splitext(f)\n\t\tnewfname = fname + suffix\n\t\toutfiles.append(newfname)\n\treturn outfiles", "def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None):\n if not basename:\n msg = \"Unable to generate filename for command %s. \" % self.cmd\n msg += \"basename is not set!\"\n raise ValueError(msg)\n\n if cwd is None:\n cwd = os.getcwd()\n if ext is None:\n ext = Info.output_type_to_ext(self.inputs.outputtype)\n if change_ext:\n suffix = \"\".join((suffix, ext)) if suffix else ext\n\n if suffix is None:\n suffix = \"\"\n fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd)\n return fname", "def prepare_io(filename, input_dataset, output_dataset):\n file_id = filename[1:] if filename.startswith(os.sep) else filename\n file_in = os.path.join(input_dataset.path, 'files', file_id)\n file_out = os.path.join(output_dataset.path, 'files', file_id)\n ensure_path(os.path.dirname(file_out))\n return file_in, file_out", "def output_files(self):\n return [self.input_files()[0].replace(\".lhe.gz\", \".stdhep\").replace(\".lhe\", \".stdhep\")]", "def get_outfilenames(outfile, folders):\n outfilenames = []\n avgname = None\n if outfile:\n if len(folders) == 1:\n # next two lines would put the file by default in the input folder. Desired behaviour?\n # if not os.path.dirname(outfile):\n # outfile = os.path.dirname(inputpath) + outfile\n outfilenames.append(outfile)\n else:\n dirname = os.path.dirname(outfile)\n if not dirname: # if only filename without path\n outfilenames = [os.path.join(d, outfile) for d in folders]\n avgname = os.path.join(os.path.dirname(os.path.dirname(folders[0])), outfile) # same name in base directory\n else: # put all files in given folder with numbers to differentiate\n basename = os.path.basename(outfile)\n numbers = [folder.split(os.path.sep)[-2] for folder in folders]\n outfilenames = [os.path.join(dirname, basename + \".\" + i) for i in numbers]\n avgname = os.path.join(dirname, basename + \".avg\")\n else: # no filename given\n basename = \"delta_F\"\n outfilenames = [folder + basename for folder in folders]\n avgname = os.path.join(os.path.dirname(os.path.dirname(folders[0])), \"delta_F\") # same name up one directory\n\n return (outfilenames, avgname)", "def add_filename_suffix(filepath, suffix):\r\n root, extension = splitext(basename(filepath))\r\n return root + suffix + extension", "def _file_name(self, dtype_out_time, extension='nc'):\n out_lbl = utils.io.data_out_label(self.intvl_out, dtype_out_time,\n dtype_vert=self.dtype_out_vert)\n in_lbl = utils.io.data_in_label(self.intvl_in, self.dtype_in_time,\n self.dtype_in_vert)\n ens_lbl = utils.io.ens_label(self.ens_mem)\n yr_lbl = utils.io.yr_label((self.start_date.year, self.end_date.year))\n return '.'.join(\n [self.name, out_lbl, in_lbl, self.model.name,\n self.run.name, ens_lbl, yr_lbl, extension]\n ).replace('..', '.')", "def get_savename(outdir, plot_name, extension):\n save_name = '.'.join([plot_name, extension])\n save_name = '/'.join([outdir, save_name])\n return save_name", "def py_simple_output_filename(filename, tag, ending):\n\n py_simple_output_filename = (py_output_dir(tag, ending) + \"/\"\n + filename + \".\"\n + ending)\n\n return py_simple_output_filename", "def get_file_name_with_extension(file_name: str):\n return file_name + '.txt'", "def py_output_filename(tag, filename, spec, ending):\n\n py_output_filename = (py_output_dir(tag, ending) + \"/\"\n + filename + \"_\" + spec + \".\"\n + ending)\n\n return py_output_filename", "def _app_outfile_path(path, app_name, extension):\n filename = '%s.%s' % (app_name, extension)\n return os.path.join(path, filename)", "def extract_file_name(self, input_file):\n self.file_name_with_ext, self.file_name = extract_file_name(input_file)", "def GetOutputFilename(fname):\n return os.path.join(outdir, fname)", "def bulk_rename_files(input_path, output_path, suffix, new_suffix):\n for dir_path, dir_names, filenames in os.walk(input_path):\n structure = os.path.join(output_path, dir_path[len(input_path) + 1:])\n for file in filenames:\n src = os.path.join(dir_path, file)\n f_name, ext = os.path.splitext(file)\n if not f_name.endswith(suffix):\n file = f_name + new_suffix + ext\n dest = os.path.join(structure, file)\n os.rename(src, dest)", "def output_files(filepath):\n\n infile = open(filepath, 'r')\n lines = infile.readlines()\n\n rel_path = './'\n rel_path += lines[6][lines[6].find(':')+1:].strip()\n rel_path += lines[7][lines[7].find(':')+1:].strip()\n\n filename_I1 = lines[9][lines[9].find(':')+1:].strip()\n filename_I2 = lines[10][lines[10].find(':')+1:].strip()\n filename_IW = lines[12][lines[12].find(':')+1:].strip()\n filename_WE = lines[13][lines[13].find(':')+1:].strip()\n filename_CFLx = lines[15][lines[15].find(':')+1:].strip()\n filename_CFLv = lines[16][lines[16].find(':')+1:].strip()\n filename_S = lines[18][lines[18].find(':')+1:].strip()\n\n filepath_I1 = rel_path + filename_I1\n filepath_I2 = rel_path + filename_I2\n filepath_IW = rel_path + filename_IW\n filepath_WE = rel_path + filename_WE\n # filepath_CFLx = rel_path + filename_CFLx\n # filepath_CFLv = rel_path + filename_CFLv\n filepath_S = rel_path + filename_S\n\n outfile_I1 = open(filepath_I1, 'w')\n outfile_I2 = open(filepath_I2, 'w')\n outfile_IW = open(filepath_IW, 'w')\n outfile_WE = open(filepath_WE, 'w')\n # outfile_CFLx = open(filepath_CFLx, 'w')\n # outfile_CFLv = open(filepath_CFLv, 'w')\n outfile_S = open(filepath_S, 'w')\n\n outfiles = dict(I1 = outfile_I1,\n I2 = outfile_I2,\n IW = outfile_IW,\n WE = outfile_WE,\n # CFLx = outfile_CFLx,\n # CFLv = outfile_CFLv,\n S = outfile_S)\n\n return outfiles", "def output_files(filepath):\n\n infile = open(filepath, 'r')\n lines = infile.readlines()\n\n rel_path = './'\n rel_path += lines[6][lines[6].find(':')+1:].strip()\n rel_path += lines[7][lines[7].find(':')+1:].strip()\n\n filename_I1 = lines[9][lines[9].find(':')+1:].strip()\n filename_I2 = lines[10][lines[10].find(':')+1:].strip()\n filename_IW = lines[12][lines[12].find(':')+1:].strip()\n filename_WE = lines[13][lines[13].find(':')+1:].strip()\n filename_CFLx = lines[15][lines[15].find(':')+1:].strip()\n filename_CFLv = lines[16][lines[16].find(':')+1:].strip()\n filename_S = lines[18][lines[18].find(':')+1:].strip()\n\n filepath_I1 = rel_path + filename_I1\n filepath_I2 = rel_path + filename_I2\n filepath_IW = rel_path + filename_IW\n filepath_WE = rel_path + filename_WE\n # filepath_CFLx = rel_path + filename_CFLx\n # filepath_CFLv = rel_path + filename_CFLv\n filepath_S = rel_path + filename_S\n\n outfile_I1 = open(filepath_I1, 'w')\n outfile_I2 = open(filepath_I2, 'w')\n outfile_IW = open(filepath_IW, 'w')\n outfile_WE = open(filepath_WE, 'w')\n # outfile_CFLx = open(filepath_CFLx, 'w')\n # outfile_CFLv = open(filepath_CFLv, 'w')\n outfile_S = open(filepath_S, 'w')\n\n outfiles = dict(I1 = outfile_I1,\n I2 = outfile_I2,\n IW = outfile_IW,\n WE = outfile_WE,\n # CFLx = outfile_CFLx,\n # CFLv = outfile_CFLv,\n S = outfile_S)\n\n return outfiles", "def _get_output_filename(dataset_dir, split_name):\n return '%s/%s*.tfrecord' % (dataset_dir, split_name)", "def get_file_name(x, feature_name, ext='npy'):\n # this is kind-of standard\n name = '.'.join(x.split('.')[:-1])\n filename = '{}.{}.{}'.format(name, feature_name, ext)\n return filename", "def append_to_filename(filepath: str, name_suffix: str, new_ext: Optional[str] = None) -> str:\n ext = new_ext or filepath_ext(filepath)\n name = filepath_name_only(filepath)\n return str(pathlib.Path(filepath).with_name(name+name_suffix).with_suffix(ext))", "def make_file_format(filename, format_extension):\r\n \r\n if filename[len(filename)-len(format_extension):len(filename)] != format_extension:\r\n filename += format_extension\r\n \r\n return(filename)", "def check_file_name_extensions(self, file_name, input_output):\n file_type = FileTypes ()\n extension_types = file_type.get_extension_types ()\n for extension in extension_types:\n if file_name.endswith (extension):\n if input_output == 'input':\n self._input_file = file_type.get_file_type (extension)\n else:\n self._output_file = file_type.get_file_type (extension)\n return True\n print (\"File name must end with:\")\n for extension in extension_types:\n print (extension)\n return False", "def FileNameToFile(files):\n files = files.replace('%20%28ja%29', '.ja')\n if files in up_list:\n if files == 'UserManual':\n return \"index.html\"\n elif files == 'UserManual.ja':\n return \"index.ja.html\"\n else:\n return files.lower() + \".html\"\n else: # modules\n sol = files.replace('.py', '').replace('%2F', '_')\n return 'modules/' + sol + '.html'", "def jarvis(input_path, output_path): \n\n if not os.path.exists(f'{output_path}'):\n os.makedirs(f'{output_path}')\n\n file_list = [filename for filename in os.listdir(f'{input_path}') if '.tif' in filename]\n\n for filename in file_list:\n pathname = os.path.join(input_path, filename)\n new_name = f\"{output_path}{filename.replace('.lif - ', '_').replace('_5x-', '_')}\"\n copyfile(pathname, new_name)\n logger.info(f'{new_name}')", "def create_input_file(fpath):\n with open(fpath, 'w') as f:\n f.write(os.path.basename(fpath).split(\"_\")[1])\n f.write(\"\\n\")", "def generate_file_name(old_file_name: str) -> str:\r\n return old_file_name.split(\".\")[0] + '_features' + '.npy'", "def combine_files(output_filename, *passes):\n all_columns = {}\n for x in passes:\n sp = pyvyu.load_opf(x)\n column_list = sp.get_column_list()\n for c in column_list:\n all_columns[c] = sp.get_column(c)\n sp = pyvyu.Spreadsheet()\n sp.name = output_filename\n sp.columns = all_columns\n pyvyu.save_opf(sp, output_filename, True, *all_columns.keys())\n return output_filename", "def output_files(self):\n # Output file for Moller generation\n if 'moller' in self.name:\n return ['moller.stdhep']\n # Output file for beam generation\n return ['beam.stdhep']", "def create_fileters(*exts):\n ret = []\n for e in exts:\n ret += ['{} (*.{})'.format(*e)]\n return ret", "def add_suffix_to_filename(filename, suffix):\n name, ext = os.path.splitext(filename)\n return ''.join([name, suffix, ext])", "def normalized_export_filename(title, extension):\n filename = timezone.localtime().strftime('%Y-%m-%d_%H-%M-%S__') + slugify(title)\n if extension.startswith(os.path.extsep):\n filename += extension\n else:\n filename += os.path.extsep + extension\n return filename", "def _add_file_extension(file_name: str, extension: str) -> str:\n fname = file_name.strip()\n slice_offset = -1 * (len(extension) + 1)\n if fname[slice_offset:] != f\".{extension}\":\n fname = fname + f\".{extension}\"\n return fname", "def get_filename(self, txt, ext=None):\n new_filename = txt.lower().translate(str.maketrans(\"\", \"\", string.punctuation)).replace(\" \",\"\")\n new_filename = (new_filename[:MAX_FILENAME_LEN] + \"..\") \\\n if len(new_filename) > MAX_FILENAME_LEN else new_filename\n if ext is not None:\n new_filename = f\"{new_filename}.{ext}\"\n return new_filename", "def file_name(product, ext='json'):\n return f\"./output/{product}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}_transformed_{version}.{ext}\"", "def treat(input, output):\n files = find(input)\n acc = []\n for file in files:\n fileInfo = extract(file)\n out = makeOutputPath(output, fileInfo[\"path\"], fileInfo[\"filename\"])\n if not out == None:\n fileInfo[\"outPath\"] = out\n acc += [fileInfo]\n return acc", "def get_filename(\n self,\n name,\n ext=\".npz\",\n map_tag=None,\n iter_index=None,\n extra_tag=None,\n bp_opts=False,\n ):\n if self.output_root is None:\n return None\n\n if bp_opts:\n if self.ensemble_mean:\n name = \"{}_mean\".format(name)\n elif self.ensemble_median:\n name = \"{}_median\".format(name)\n elif self.sim_index is not None:\n name = \"{}_sim{:04d}\".format(name, self.sim_index)\n if self.signal_type_sim:\n name = \"{}_{}\".format(name, self.signal_type_sim)\n if self.noise_type_sim:\n name = \"{}_{}\".format(name, self.noise_type_sim)\n else:\n if self.data_type != \"raw\":\n name = \"{}_{}\".format(name, self.data_type)\n if getattr(self, \"template_cleaned\", False):\n name = \"{}_clean_{}\".format(name, self.template_type)\n if getattr(self, \"planck_sub\", False):\n name = \"{}_planck_sub\".format(name)\n if self.weighted_bins:\n name = \"{}_wbins\".format(name)\n if getattr(self, \"return_cls\", False):\n name = \"{}_cl\".format(name)\n\n if map_tag is not None:\n name = \"{}_map_{}\".format(name, map_tag)\n if iter_index is not None:\n name = \"{}_iter{:03d}\".format(name, iter_index)\n if extra_tag is not None:\n name = \"{}_{}\".format(name, extra_tag)\n\n tag = \"_{}\".format(self.output_tag) if self.output_tag else \"\"\n if not ext.startswith(\".\"):\n ext = \".{}\".format(ext)\n return os.path.join(self.output_root, \"{}{}{}\".format(name, tag, ext))", "def test_add_filename_suffix(self):\r\n self.assertEqual(add_filename_suffix('/foo/bar/baz.txt', 'z'),\r\n 'bazz.txt')\r\n self.assertEqual(add_filename_suffix('baz.txt', 'z'),\r\n 'bazz.txt')\r\n self.assertEqual(add_filename_suffix('/foo/bar/baz', 'z'),\r\n 'bazz')\r\n self.assertEqual(add_filename_suffix('baz', 'z'),\r\n 'bazz')\r\n self.assertEqual(add_filename_suffix('/baz.fasta.txt', 'z'),\r\n 'baz.fastaz.txt')\r\n self.assertEqual(add_filename_suffix('baz.fasta.txt', 'z'),\r\n 'baz.fastaz.txt')\r\n self.assertEqual(add_filename_suffix('/foo/', 'z'), 'z')", "def get_file_inter_name(self):\n\t\td = DIImportExternal.get_file_inter_name(self)\n\t\td,_ = os.path.split(d)\n\t\tf,_ = os.path.splitext(self.file)\n\t\treturn \tos.path.join(d,f+'.odt')", "def _get_rename_command(self,\r\n out_filenames,\r\n tmp_output_dir,\r\n output_dir):\r\n result = ''\r\n result_filepaths = []\r\n for fn in out_filenames:\r\n tmp_result_filepath = '%s/%s' % (tmp_output_dir, fn)\r\n result_filepath = '%s/%s' % (output_dir, fn)\r\n result += \\\r\n '; mv %s %s' % (tmp_result_filepath, result_filepath)\r\n result_filepaths.append(result_filepath)\r\n return result, result_filepaths", "def generate_filename(self):\n file_pattern = os.path.join(self.path, \"TCGA-*\")\n for f in glob(file_pattern):\n organ = get_organ(f)\n for raw_f in glob(os.path.join(f, \"*.tif\")):\n gt_f = raw_f.replace(\".tif\", \".xml\")\n yield raw_f, gt_f, organ", "def fname( file_, base=None, new_base=None, new_ext=None ):\n if base and new_base:\n file_ = file_.replace(base, new_base, 1)\n if new_ext:\n file_ = os.path.splitext(file_)[0] + new_ext\n return file_", "def convertFilename (pattern, name):\n\tresult = \"\"\n\tj = 0\n\ti = 0\n\twhile j < len (pattern) or i < len(name):\n\t\t# If the format ended \n\t\tif j >= len (pattern):\n\t\t\tbreak\n\t\t# If one charactere must be ignored \n\t\telif pattern [j] == '?':\n\t\t\tif i < len(name):\n\t\t\t\tresult = result + name [i]\n\t\t\t\ti += 1\n\t\t\tif j < len(pattern):\n\t\t\t\tj += 1\n\t\t# If one or more characteres must be ignored \n\t\telif pattern [j] == '*':\n\t\t\tif i < len(name):\n\t\t\t\tresult = result + name [i]\n\t\t\t\ti += 1\n\t\t\telse :\n\t\t\t\tbreak\n\t\telse:\n\t\t\tif i < len(name):\n\t\t\t\ti += 1\n\n\t\t\tif j < len(pattern):\n\t\t\t\tresult = result + pattern [j]\n\t\t\t\tj += 1\n\treturn result", "def handle_file_name(self):\r\n self.tmp_name = (os.path.basename(self.source_file_name)).split('.')[0]\r\n result_name = self.tmp_name + '_result_'\r\n log_name = self.tmp_name + '_log.csv'\r\n \r\n self.result_file_name = os.path.join(self.save_path , result_name) \r\n self.log_file_name = os.path.join(self.log_path , log_name)", "def get_filename(out_dir, file_date, extension):\n return path.join(out_dir, f'CrossrefCitations_{file_date}.{extension}')", "def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True,\n ext='.mif'):\n\n if basename == '':\n msg = 'Unable to generate filename for command %s. ' % self.cmd\n msg += 'basename is not set!'\n raise ValueError(msg)\n if cwd is None:\n cwd = os.getcwd()\n if change_ext:\n if suffix:\n suffix = ''.join((suffix, ext))\n else:\n suffix = ext\n if suffix is None:\n suffix = ''\n fname = fname_presuffix(basename, suffix=suffix,\n use_ext=False, newpath=cwd)\n return fname", "def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True,\n ext='.mif'):\n\n if basename == '':\n msg = 'Unable to generate filename for command %s. ' % self.cmd\n msg += 'basename is not set!'\n raise ValueError(msg)\n if cwd is None:\n cwd = os.getcwd()\n if change_ext:\n if suffix:\n suffix = ''.join((suffix, ext))\n else:\n suffix = ext\n if suffix is None:\n suffix = ''\n fname = fname_presuffix(basename, suffix=suffix,\n use_ext=False, newpath=cwd)\n return fname", "def _make_fname(song, ext=None, av=None, subdir=None):\n # pylint: disable=E1103\n # Instance of 'bool' has no 'extension' member (some types not inferable)\n ddir = os.path.join(Config.DDIR.get, subdir) if subdir else Config.DDIR.get\n if not os.path.exists(ddir):\n os.makedirs(ddir)\n\n if ext:\n extension = ext\n\n else:\n stream = streams.select(streams.get(song),\n audio=av == \"audio\", m4a_ok=True)\n extension = stream['ext']\n\n # filename = song.title[:59] + \".\" + extension\n filename = song.title + \".\" + extension\n filename = os.path.join(ddir, mswinfn(filename.replace(\"/\", \"-\")))\n filename = filename.replace('\"', '')\n return filename", "def _gen_basename(param_dict, clargs):\n if param_dict['output_basename'] in ['', 'auto']:\n return clargs.input_fname.lower().split('.json')[0]\n\n else:\n return param_dict['output_basename']", "def WriteFilename(self):\n print(f\"Copying {self.input_file} to {self.output_file}\")\n copyfile(self.input_file, self.output_file)\n # Open r+b to open as binary for writing to\n with open(self.output_file, \"r+b\") as fh:\n seeker = (self.root_directory_offset*self.sector_size)+((self.index_number-1)*self.directory_index_size)\n # Convert to little-endian\n f_array = bytearray()\n print(f\"Reversing {self.filename}\")\n f_array.extend(map(ord, self.filename))\n #f_array.reverse()\n print(f\"f_array is {f_array}\")\n print(f\"Preparing to write {f_array} to {seeker}\")\n fh.seek(seeker)\n fh.write(f_array)\n e_array = bytearray()\n print(f\"Reversing {self.extension}\")\n e_array.extend(map(ord, self.extension))\n #e_array.reverse()\n print(f\"e_array is {e_array}\")\n print(f\"Preparing to write {e_array} to {seeker}\")\n fh.seek(seeker+8)\n fh.write(e_array)\n print(\"Filename and extension written to root directory\")\n return True", "def _out(self, *args):\n suffix = '_'.join(map(str, args))\n return os.path.join(self._out_folder, suffix )", "def get_output_file(path):\n root, _ = os.path.splitext(path)\n return os.path.basename(root) + get_task_number() + \".txt\"", "def build_file_path(dir_name, file_name, ext):\n return os.path.join(dir_name, os.path.extsep.join((file_name, ext)))", "def join_file(first_file):\n first_file_no_numbers = first_file[:-3] # Remove 001 from file name\n output_file_name = first_file[:-4] # Remove .001 from file name\n file_number = 1 # Create counter starting at 1\n\n with open(output_file_name, 'wb') as output_file: # Output file loop\n while True: # For ever loop\n try:\n # Open file by pasting 3digit number as extension\n with open(first_file_no_numbers + ('%03d' % file_number), 'rb') as current_input:\n # Read the whole file and write it to output file. (Maybe dangerous if file size > memory)\n output_file.write(current_input.read())\n # Go on to the next file\n file_number += 1\n except FileNotFoundError:\n # End loop when no more 3digit extension files are found\n break", "def output_filename(self, prefix, suffix):\n filename = \"%s%s%s\" % (prefix, _ExecutionWrapper._file_index, suffix)\n _ExecutionWrapper._file_index += 1\n return filename", "def make_img_name(file_ext='.png'):\r\n fn = []\r\n # format seqs and write out to temp file\r\n for i in range(0, 30):\r\n fn.append(choice(ALPHABET))\r\n return ''.join(fn) + file_ext", "def make_files(dir_in, dir_out):\n try:\n listaFisiere = os.listdir(f\"{dir_in}\")\n except Exception as eroare:\n print(\"Path to input file is invalid, exiting...\")\n quit()\n if not os.path.exists(f\"{dir_out}\"):\n os.mkdir(f\"{dir_out}\")\n paths_out = []\n for numeFisier in listaFisiere:\n numeFisierOutput=\"output_\"+numeFisier\n f=open(f\"{dir_out}/\"+numeFisierOutput,\"w\")\n paths_out.append(f\"{dir_out}/\"+numeFisierOutput)\n f.close()\n for i in range(len(listaFisiere)):\n listaFisiere[i] = dir_in + \"/\" + listaFisiere[i]\n return listaFisiere, paths_out", "def convert_01(filename,ext_out=\".json\"):\n summary=[]\n try:\n ext_in = os.path.splitext(filename)[1]\n if (ext_in == ext_out):\n print(\"\\tError input extention matches output extention\")\n else:\n summary=load_result_01(filename)\n except:\n print(\"\\tError loading \"+filename)\n if summary:\n try:\n print(\"\\t saving :\"+os.path.splitext(filename)[0]+ext_out)\n save_data_01(os.path.splitext(filename)[0]+ext_out,summary)\n except:\n print(\"\\tError converting \"+filename+\" to \"+ext_out)\n else:\n print(\"\\tCould not load \"+filename)", "def __add_filename_suffix(filename, suffix):\n return \"{}{}.pdf\".format(filename.split(\".pdf\", 1)[0], suffix)", "def _add_output_files(self):\n self._output_files = []\n base = os.path.join(os.path.dirname(self.in_fpath),\n os.path.splitext(os.path.basename(self.in_fpath))[0])\n\n output_path = f'{base}_out.csv'\n\n suffix = 2\n while os.path.exists(output_path):\n self._output_files.append(output_path)\n if os.path.getsize(output_path) < self.limit_fsize:\n return\n output_path = f'{base}_out_{suffix}.csv'\n suffix += 1\n\n open(output_path, 'a').close()\n self._output_files.append(output_path)", "def supplement_file_name(file: Union[str, pathlib.Path], sup: str) -> pathlib.Path:\n\n file = pathlib.Path(file)\n\n # the `suffix` is incorporated into the file name\n return file.with_name(file.stem + f'_{sup}' + file.suffix)", "def filename(i):\n rand_name = os.path.join(os.getcwd(), \"input-%d.txt\" % i)\n ref_name = os.path.join(os.getcwd(), \"input-%d.ref\" % i)\n return rand_name, ref_name", "def filePathToFileName(path):\n return os.path.splitext(os.path.basename(path))[0]", "def _make_input_file_list(binnedfile, num_files):\n outdir_base = os.path.abspath(os.path.dirname(binnedfile))\n outbasename = os.path.basename(binnedfile)\n filelist = \"\"\n for i in range(num_files):\n split_key = \"%06i\" % i\n output_dir = os.path.join(outdir_base, split_key)\n filepath = os.path.join(output_dir,\n outbasename.replace('.fits', '_%s.fits' % split_key))\n filelist += ' %s' % filepath\n return filelist", "def _extr_ext(p: str) -> str:\n file_name = os.path.basename(p)\n _, ext = os.path.splitext(file_name)\n return ext", "def get_file_name(filepath): # need pytest\n filename, extension = os.path.splitext(filepath.split('/')[-1])\n return filename, extension", "def out_filename(self, filetype, dir, format='old'):\n filename = self.filename(filetype=filetype, format=format)\n return Path(dir) / filename", "def add_ext_if_needed(fileName, ext):\n ls = fileName.split(\".\")\n if( ( len(ls)==1) or (not (ls[-1] == ext))):\n return fileName + \".\" + ext\n else:\n return fileName", "def rename_file(source, oldname, newname):\n #source = client_variables.output_folder\n renamefiles = os.listdir(source)\n ext = (\".xlsx\", \".csv\", \".pdf\", \".png\")\n for renamefile in renamefiles:\n if renamefile.endswith(ext):\n renamefile = source + \"/\" + renamefile\n print \"renaming:\", renamefile\n newname = source + \"/\" + newname\n print \"newname:\", newname\n os.rename(renamefile, newname)\n elif renamefile.startswith(oldname):\n renamefile = source + \"/\" + renamefile\n print \"renaming:\", renamefile\n newname = source + \"/\" + newname\n print \"newname:\", newname\n os.rename(renamefile, newname)", "def collate_data(in_dir, extension='.csv', out_dir=None):\n if out_dir is None:\n out_dir = './' + re.search('^\\.(.*)', extension).groups(0)[0]\n\n if not os.path.isdir(out_dir):\n os.mkdir(out_dir)\n\n for p, d, fs in os.walk(in_dir):\n for f in fs:\n if extension in f:\n shutil.copy(p + '/' + f, out_dir + '/' + f)\n return", "def _get_output_filenames(output_path, dpp=None):\n ret = []\n for fname in os.listdir(output_path):\n ext = _ext(dpp)\n if re.match(r\"get[^_]+[_free\\d?]?\" + ext, fname):\n ret.append(fname)\n return ret", "def generate_filename(\r\n filepath,\r\n filestartwith,\r\n fileendwith,\r\n run_date,\r\n filemask):\r\n\r\n filedate = generate_dateformat(run_date, filemask)\r\n if not filedate:\r\n filename = filestartwith\r\n else:\r\n filename = filestartwith + filedate\r\n\r\n if fileendwith:\r\n filename = filename + fileendwith\r\n\r\n if filepath and len(filepath.strip()) > 0:\r\n filename = filepath.strip() + '/' + filename\r\n\r\n return filename", "def force_suffix(fname, suffix):\r\n head, tail = pp.split(fname)\r\n if len(tail) == 0:\r\n return head\r\n if suffix[0] == \".\":\r\n suffix = suffix[1:]\r\n fpart, fext = pp.splitext(tail)\r\n newp = pp.join(head, fpart + \".\" + suffix)\r\n return pp.normpath(newp)", "def gen_file_name(filename, path=UPLOAD_FOLDER):\n\n i = 1\n while os.path.exists(os.path.join(path, filename)):\n name, extension = os.path.splitext(filename)\n filename = '%s_%s%s' % (name, str(i), extension)\n i += 1\n\n return filename", "def binder(folder_name: str, output_name: str = \"output.exe\", verbose=True):\n\n # we get all the files from the given folder\n files: List[str] = os.listdir(folder_name)\n\n if files == []:\n print(\" No file in \", folder_name, \" folder\")\n return\n\n # we sort then by comparing the concatenated number\n files = sorted(files, key=lambda x: int(x.split(\"_\")[0]))\n\n if verbose:\n print(\"encoutered {} files:\".format(len(files)))\n for file in files:\n print(file)\n\n # we open an output stream\n with open(output_name, \"wb+\") as output_stream:\n # And for every gathered files\n for file in files:\n with open(os.path.join(folder_name, file), \"rb\") as input:\n # we add it at the end of the document\n output_stream.write(input.read())\n\n print(\"Done!\")", "def handle_filenames(filenames):\n suffixes = [\".mod\", \".dat\", \".run\"]\n if len(filenames) == 1:\n return (filenames[0].with_suffix(suffix) for suffix in suffixes)\n else:\n try:\n return sorted(filenames, key=lambda x: suffixes.index(x.suffix))\n except ValueError:\n click.echo(click.style(f\"Invalid filename.\", fg=\"red\", bold=True))", "def filename_formatter(source, destination, file, order_format):\n\n # Attempt loading taglib.\n try:\n song = taglib.File(file)\n except:\n raise CoreError('taglib.File() failed.')\n\n # Begin formatting the filename.\n new_name = destination\n for i in range(0, len(order_format)):\n # If next item is a tag, this block will execute.\n try:\n sub_name = song.tags[order_format[i]][0]\n # Clear filename of any illegal characters.\n for j in range(0, len(MuzikArkive.illegal_name_characters)):\n if MuzikArkive.illegal_name_characters[j] in sub_name:\n sub_name = sub_name.replace(\n MuzikArkive.illegal_name_characters[j], '_'\n )\n new_name += sub_name\n # If next item is a char or string, this block will execute.\n except:\n new_name += order_format[i]\n\n # Add the files extenstion to the new name formatting.\n new_name += '.' + file.rsplit('.', 1)[1]\n\n song.close()\n return new_name", "def split_ext(filepath):\n\t(fn, ext) = os.path.splitext(filepath)\n\tif ext=='.gz':\n\t\t(fn, ext) = os.path.splitext(fn)\n\t\text += '.gz'\n\treturn (fn, ext)", "def input_name_from_func_name(func_name):\n\treturn os.path.join(INPUTS_DIR, ''.join(func_name.split('make_')[1:])) \\\n\t\t\t+ '.%s' % EXTENSION", "def cat_files(files, output):\n for file in files:\n with open(file, 'r') as fd:\n shutil.copyfileobj(fd, output)", "def cat_files(files, output):\n for file in files:\n with open(file, 'r') as fd:\n shutil.copyfileobj(fd, output)", "def create_filename (self):\n\t\tassert self.__patient_name and self.__location_name, \"New filename could not be determined, one or more needed arguments is empty!\"\n\t\t_patient_name = self.__patient_name.split(' ')\n\t\t_patient_name.reverse()\n\t\t\n\t\treturn os.path.join(os.path.dirname(self.file._path), \"%s MR %s%s\" % (self.__location_name, ', '.join(_patient_name).upper(), self._file.extension))", "def construct_filename(output_dir,\n file_descriptor,\n extension,\n *args,\n **kwargs):\n if len(args) == 0 and len(kwargs) == 0:\n return Path(output_dir,\n '{}{}'.format(file_descriptor, extension))\n elif len(args) == 0:\n return Path(output_dir,\n '{}_{}{}'.format('_'.join([f'{k}{v}' for k, v in kwargs.items()\n if v is not None]),\n file_descriptor,\n extension))\n elif len(kwargs) == 0:\n return Path(output_dir,\n '{}_{}{}'.format('_'.join([ar for ar in args if ar is not None]),\n file_descriptor,\n extension))\n else:\n return Path(output_dir,\n '{}_{}_{}{}'.format('_'.join([ar for ar in args if ar is not None]),\n '_'.join([f'{k}{v}' for k, v in kwargs.items()\n if v is not None]),\n file_descriptor,\n extension))", "def get_csv_file_name(output_dir, file_prefix, file_suffix):\n\tcsv_filename = \"\".join([file_prefix, '_', file_suffix, '.csv'])\n\treturn os.path.join(output_dir, csv_filename)", "def format_filename(prefix, suffix, seq_len, uncased):\n seq_str = \"seq-{}\".format(seq_len)\n if uncased:\n case_str = \"uncased\"\n else:\n case_str = \"cased\"\n\n file_name = \"{}.{}.{}.{}\".format(prefix, seq_str, case_str, suffix)\n\n return file_name", "def get_output_raw_name(journal_file_name, output_type='txt'):\n dot_pos = journal_file_name.rfind('.')\n if dot_pos != -1:\n output_file_name = journal_file_name[0: dot_pos]\n else:\n output_file_name = journal_file_name\n num_of_output = 1\n if output_type == 'txt':\n while True:\n output_file = '%s_%d.txt'%(output_file_name,num_of_output)\n if not os.path.exists(output_file):\n break\n else:\n num_of_output += 1\n else:\n output_file = '%s.%s'%(output_file_name,output_type)\n return output_file", "def new_filename(fname=None,ndigits=3):\n if fname is None:\n ext = (\"%%.%ii\" % ndigits) % 1\n fname = \"%s.%s\" % (random_string(6), ext)\n \n if os.path.exists(fname): \n fname = increment_filename(fname,ndigits=ndigits)\n\n return fname", "def out_filename(self, filetype, format='old', dir=Location.OUT_DIR):\n filename = self.filename(filetype=filetype, format=format)\n #return Path(dir) / filename\n return filename", "def get_fixed_filename(filename):\n new_name = \"\"\n for i, char in enumerate(filename):\n if i + 1 != len(filename):\n previous_character = filename[i - 1]\n next_character = filename[i + 1]\n if char.islower() and next_character.isupper():\n new_name += char + \"_\"\n elif previous_character == \".\":\n new_name += char\n elif char.islower() and not previous_character.isalpha():\n new_name += char.upper()\n else:\n new_name += char\n else:\n new_name += char\n new_name = new_name.replace(\" \", \"_\").replace(\".TXT\", \".txt\")\n return new_name", "def get_output_filepaths(output_dir,\r\n fasta_fp,\r\n qual_fp):\r\n\r\n if not output_dir.endswith('/'):\r\n output_dir += '/'\r\n\r\n fasta_out_fp = output_dir + basename(fasta_fp).split('.')[0] +\\\r\n \"_filtered.fasta\"\r\n\r\n qual_out_fp = output_dir + basename(qual_fp).split('.')[0] +\\\r\n \"_filtered.qual\"\r\n\r\n return fasta_out_fp, qual_out_fp", "def data_filename_create(movie_filename):\n path, filename = os.path.split(movie_filename)\n filename_stub, ext = os.path.splitext(filename)\n if os.path.splitext(movie_filename)[1] in ['.png','.jpg','.tiff','.JPG']: \n data_filename = os.path.join(path, ''.join([letter for letter in filename_stub if letter.isalpha()]) + '.hdf5')\n else:\n data_filename = os.path.join(path, filename_stub + '.hdf5')\n return data_filename", "def extract_file_with_ext(self, ext, newname=None):\n for filename in self.project.namelist():\n if filename.endswith('.' + ext):\n self.project.extract(filename)\n if newname is not None:\n if os.path.exists(newname):\n os.remove(newname)\n copyfile(filename, newname)\n os.remove(filename)\n return", "def assemble_files():\r\n path = os.path.expanduser(sys.argv[1])\r\n if os.path.isdir(path):\r\n file_root = path + \"/\"\r\n for file in os.listdir(path):\r\n filename = os.path.splitext(file)\r\n if filename[1] == \".asm\":\r\n hack_file_name = file_root + filename[0] + \".hack\"\r\n assemble_file(file_root + file, hack_file_name)\r\n else:\r\n filename = os.path.splitext(path)\r\n hack_file_name = filename[0] + \".hack\"\r\n assemble_file(path, hack_file_name)", "def create_final_name(fname, date, fc_id, sample_name):\n \n # Split the file name according to CASAVA convention\n m = re.match(r'(\\S+?)_(?:[ACGTN\\-]+|NoIndex|Undetermined)_L0*(\\d+)_R(\\d)_\\d+\\.fastq(.*)', fname)\n if m is not None:\n lane = m.group(2)\n read = m.group(3)\n ext = m.group(4)\n else:\n # Split the file name according to bcbb convention\n m = re.match(r'(\\d+)_(\\d+)_([^_]+)_(\\d+)_(?:nophix_)?(\\d+)_fastq.txt(.*)', fname)\n if m is None:\n raise ValueError(\"Could not parse file name {:s} correctly!\".format(fname))\n lane = m.group(1)\n read = m.group(5)\n ext = m.group(6)\n \n dest_file_name = \"{:s}.fastq{:s}\".format(\"_\".join([lane,\n date,\n fc_id,\n sample_name,\n read]),\n ext.replace('..','.'))\n return dest_file_name", "def generate_filename(extension, with_path=True, base_folder=None):\n name = get_md5(str(uuid4()))\n # if not extension:\n # extension = get_file_extension()\n if base_folder is not None:\n base_folder = \"%s/\" % base_folder.rstrip(\"/\")\n else:\n base_folder = \"\"\n\n if with_path:\n return \"%s%s/%s/%s/%s.%s\" % (base_folder, name[0], name[1], name[2], name, extension)\n else:\n return \"%s%s.%s\" % (base_folder, name, extension)" ]
[ "0.66169494", "0.6289752", "0.62640893", "0.62422895", "0.6218007", "0.62115467", "0.61658305", "0.61075085", "0.6085868", "0.60749346", "0.60616755", "0.60532296", "0.60232437", "0.5990847", "0.5986506", "0.59429944", "0.5934633", "0.59320587", "0.5926914", "0.59200156", "0.5913195", "0.59061915", "0.58967596", "0.58967596", "0.5884681", "0.5870656", "0.58545", "0.5847096", "0.58267003", "0.5776346", "0.5748421", "0.5746539", "0.5744306", "0.5733245", "0.5727256", "0.57207364", "0.57069486", "0.5698577", "0.56976146", "0.56874514", "0.56806356", "0.56736946", "0.5672197", "0.56713104", "0.5655762", "0.56551063", "0.56548667", "0.56511825", "0.5649188", "0.5648664", "0.5646996", "0.5624065", "0.5624065", "0.56214064", "0.56152236", "0.56126255", "0.5612133", "0.5608955", "0.5600184", "0.5595946", "0.55886495", "0.5583875", "0.55814385", "0.55720717", "0.55711097", "0.55581886", "0.5555707", "0.5553036", "0.55504066", "0.55490744", "0.55488956", "0.55459034", "0.55380744", "0.5537478", "0.55333847", "0.5530478", "0.5507369", "0.5505945", "0.55039203", "0.55038536", "0.55029804", "0.5496823", "0.54926413", "0.54923975", "0.5490417", "0.5487431", "0.5487431", "0.5481491", "0.547925", "0.5475782", "0.5466882", "0.5464235", "0.5461853", "0.54542387", "0.54499865", "0.54434717", "0.54412675", "0.5440951", "0.54345036", "0.5431419", "0.54313207" ]
0.0
-1
Initialize your data structure here.
def __init__(self): self.queue = []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _init_empty(self):\n self._data = []", "def __init__(self):\n self._data = []", "def __init__(self):\n self._data = []", "def __init__(self):\n self._data = []", "def __init__(self):\n self._data = []", "def __init__(self):\n self._data = []", "def __init__(self):\n self._data = []", "def __init__(self):\n self.data = []\n self.record = {}", "def initialize(self):\n self.data = None\n self.errors = []", "def initialize(self):\n\t\tpass", "def initialize(self):\n pass", "def initialize(self):\n pass", "def initialize(self):\n pass", "def initialize(self):\n pass", "def initialize(self):\n pass", "def initialize(self):\r\n pass", "def initialize(self):\r\n pass", "def __init__(self):\n self.structure = {}", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def __init__(self):\n\n # initialise the empty mappings dictionary\n self.data = {\n 'loan_id': None,\n 'product': None,\n 'origination_date': None,\n 'reversion_date': None,\n 'rate_term': None,\n 'loan_amount': None,\n 'initial_rate': None,\n 'reversion_rate': None,\n 'term': None,\n 'interest_only_amount': None,\n 'upfront_fees': None,\n 'upfront_costs': None,\n 'entity_eir': None\n }", "def __init__(self):\n self._data = [] # non-public underlying Python list as storage", "def __init__(self):\n self._distance_data = []\n self._location_data = []\n self._package_data = []", "def __init__(self):\n\t\tsuper().__init__()\n\t\t\n\t\t# Typically a list of data here\n\t\t# Typically a dict of header keys and values here", "def __init__(self):\n self.data = []", "def __init__(self):\n self.data = []", "def __init__(self):\n self.data = []\n self.idx = {}", "def __init__(self):\n self._dict = {}\n self._array = []", "def init(self) -> None:", "def __init__(self):\n self.relation = ''\n self.attributes = []\n self.attribute_types = dict()\n self.attribute_data = dict()\n self.comment = []\n self.data = []\n pass", "def _init(self):\n pass", "def initialize(self):\n return", "def initialize(self):\n self.muondEdx = []\n self.muondNdx = []\n self.muonmomentum = []\n self.piondEdx = []\n self.piondNdx = []\n self.pionmomentum = []\n self.kaondEdx = []\n self.kaondNdx = []\n self.kaonmomentum = []\n self.protdEdx = []\n self.protdNdx = []\n self.protmomentum = []\n self.elecdEdx = []\n self.elecdNdx = []\n self.elecmomentum = []", "def initialize(self) -> None:\n pass", "def initialize(self):\n pass # pragma: no cover", "def __init__(self):\n self.d = {}\n self.l = []", "def _initialize(self):\n pass", "def _initialize(self):\n pass", "def _initialize(self):\n pass", "def initialize(self):\n raise NotImplementedError", "def initialize(self):\n raise NotImplementedError", "def initialize(self):\n raise NotImplementedError", "def initialise(self):", "def __init__(self):\n self.l = {}\n self.s = {}", "def __init__(self):\n self._data=[]", "def __init__(self, initial_data=[]):\n hdict.__init__(self)\n\n for elt in initial_data:\n self.add(elt)", "def initialize(self):", "def initialize(self):", "def initialize(self):", "def initialize(self):", "def init(self):\n raise NotImplementedError", "def init(self):\n raise NotImplementedError", "def init(self) -> None:\n ...", "def initialize(self):\n\n db = dict()\n\n db['meta'] = Meta(None)\n db['race'] = Race(None, None, None, None, None)\n db['track'] = Track(None, None)\n db['classes'] = set([])\n db['teams'] = set([])\n db['drivers'] = set([])\n\n self.db = db", "def __init__(self):\n self.keys = []\n self.values = []", "def __init__(self):\n self.d = {}\n self.h = []", "def memb_init(self):\n self.initialize()", "def __init__(self):\n self.dic={}\n self.data=[]", "def _init_data(self) -> None:\n self.dtype = dict()\n self.shape = dict()\n self.size = dict()\n self.attrs = dict()\n self.data_ptr = dict()\n\n if self.mode == 'r':\n for k in self.fp.keys():\n self.dtype[k] = self.fp[k].dtype\n self.shape[k] = self.fp[k].shape\n self.size[k] = self.fp[k].shape[0]\n self.data_ptr[k] = 0", "def __init__(self):\n dict.__init__(self)\n self.datatype = None", "def __init__(self, data={}):\n self._update_(data)", "def _init_data(self, data):\n assert type(data) is dict, \"dict expected: %r\" % type(data)\n assert len(data) is 1, \"size of dict should be 1: %r\" % len(data)\n self._name = data.keys()[0]\n self._data = np.asarray(data[self._name])\n self._set = True", "def __init__(self):\n self._data = PositionalList() # list of _Item instances", "def __init__(self):\n self.d = {}", "def __init__(self):\n self.d = {}", "def __init__(self):\n self.d = {}", "def __init__(self):\n self.d = {}", "def _init(self):\n raise NotImplementedError", "def __init__(self):\r\n self.indices = {}\r\n self.data = []\r\n self.len = 0", "def __init__(self):\n self.metadata = {}\n self.geometry = {'array': None, \n 'geom': None, \n 'wkt': None}", "def __init__(self, data):\n self.data = data\n return", "def __init__(self):\n self.root = [None, dict(), False] # val, sons, end-able", "def _initialize_data(self):\n self.unique_id = 123\n\n self.gas_valve_open = False\n self.buffer_valve_open = False\n self.pump_valve_open = False\n\n self.operatingmode = 0\n\n self.sample_pressure_high_limit = 100\n self.sample_pressure_low_limit = 10\n self.sample_pressure = 0\n\n self.error = 0\n\n self.buffer_pressure_high = True", "def __init__(self):\n self.data = {}\n self.refresh()", "def initialize(self):\n self.voteskips = []\n self.response = {}\n self.route = {}\n self.userlist = []\n self.poll = []\n self.media = []\n self.init = False\n self.question = None\n self.jumble = None\n self.imgur = None", "def _initialize_data(self):\n self.reset_count = 0\n self._idn_no_firmware = \"KEPCO,BOP 50-20,E1234,\"\n self._firmware = 2.6\n self._init_data()", "def init(self):", "def init(self):", "def __init__(self):\n self.x = {}\n self.len = 0\n self.annotations = {}", "def __init__(self):\n # Dict of minecraft object in form of \"dict[id] = name\"\n self.data_values = dict()\n self.parser = self.setup_parser()", "def initialize(self):\n self.keys = [None] * BUCKET_SIZE\n self.values = [None] * BUCKET_SIZE", "def __init__(self, data: dict = {}):\n pass", "def initialize(self): \r\n pass", "def __init__(self):\n self._data = PositionalList() # list of Item instances", "def __init__(self):\n self.table = {}\n self.ls = []", "def initialize(self):\r\n self.bucket_array.initialize()", "def initialise(self):\r\n return", "def initialise(self):\r\n return", "def __init__(self, data):\n self.data = data", "def __init__(self, data):\n self.data = data", "def __init__(self, data):\n self.data = data", "def __init__(self, data):\n self.data = data", "def __init__(self, data=None):\n self.data = data", "def __init__(self):\n self._data = set()", "def __init__(self):\n self.key_dict = {}\n self.value_dict = {}\n self.head, self.last = None, None" ]
[ "0.7765608", "0.7645274", "0.7645274", "0.7645274", "0.7645274", "0.7645274", "0.7645274", "0.7595176", "0.75853467", "0.7558298", "0.7530608", "0.7530608", "0.7530608", "0.7530608", "0.7530608", "0.74971247", "0.74971247", "0.7478105", "0.7477832", "0.7477832", "0.7477832", "0.7477832", "0.7477832", "0.7477832", "0.7477832", "0.7477832", "0.744441", "0.7426435", "0.74157697", "0.74143684", "0.73898417", "0.73898417", "0.7389144", "0.7387738", "0.7383786", "0.7324126", "0.731669", "0.73065454", "0.729799", "0.7287291", "0.7271846", "0.725931", "0.72522944", "0.72522944", "0.72522944", "0.72494334", "0.72494334", "0.72494334", "0.7243696", "0.7239823", "0.72368526", "0.7208368", "0.72016877", "0.72016877", "0.72016877", "0.72016877", "0.71985286", "0.71985286", "0.7195241", "0.71885264", "0.71857035", "0.7176733", "0.7160906", "0.7159325", "0.7149614", "0.71474445", "0.7135992", "0.7128525", "0.7123646", "0.71142536", "0.71142536", "0.71142536", "0.71142536", "0.71109176", "0.71011794", "0.7099338", "0.708543", "0.70676583", "0.70648897", "0.70618606", "0.70606047", "0.7059818", "0.7039291", "0.7039291", "0.7035077", "0.70237756", "0.70142615", "0.6999669", "0.69952625", "0.6994778", "0.6987417", "0.6981039", "0.6976582", "0.6976582", "0.6976431", "0.6976431", "0.6976431", "0.6976431", "0.69684774", "0.69561034", "0.69411176" ]
0.0
-1
Push element x to the back of queue.
def enqueue(self, x): self.queue.append(x) return self.queue
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def push(self, x): # time O(n)\n self.queue.append(x)\n for _ in range(len(self.queue)-1):\n self.queue.append(self.queue.popleft())", "def push(self, x: int) -> None:\n self.queue.append(x)\n for _ in range(len(self.queue)-1):\n self.queue.append(self.queue.popleft())", "def push(self, x: int) -> None:\n self.q.append(x)\n for _ in range(len(self.q) - 1):\n self.q.append(self.q.popleft())", "def push(self, x: int) -> None:\n self.q.append(x)\n for _ in range(len(self.q) - 1):\n self.q.append(self.q.pop(0))", "def push(self, x):\n self.queue.insert(len(self.queue), x)", "def push(self, x: int) -> None:\n self.queue.put(x)\n for _ in range(self.queue.qsize()-1):\n tmp = self.queue.get()\n self.queue.put(tmp)", "def push(self, x):\r\n self.queue.append(x)", "def push(self, x):\n self.queue.insert(0, x)", "def push(self, x):\n self.queue.insert(0, x)", "def push(self, x):\r\n self.queue.append(x)\r\n self.topele = x\r\n self.num+=1", "def push(self, x):\n self.queue1.append(x)", "def push(self, x: int) -> None:\n #例如:q = [1,2,3,4,5,x],反转后q为[x,1,2,3,4,5],即将X添加为栈顶元素\n self.q.append(x)\n q_length = len(self.q)\n while q_length > 1:\n self.q.append(self.q.pop(0)) #反转前n-1个元素,栈顶元素始终保留在队首\n q_length -= 1", "def push(self, x):\r\n # 队列的push操作\r\n self.stack.append(x)", "def push(self, x: int) -> None:\n self.que.append(x)", "def push(self, x: int) -> None:\n self.queue.append(x)", "def push(self, x: int) -> None:\n self.top += 1\n self.q1.append(x)", "def push(self, x):\n self.queue[self.tag].put(x)", "def push(self, x):\n self.values.append(x)\n if len(self.values) == 1:\n self.front = x", "def push(self, x: int) -> None:\n self.q1.append(x)\n self.size += 1", "def push(self, x: int) -> None:\n self.data.put(x)\n for i in range(self.data.qsize() - 1):\n self.data.put(self.data.get())", "def push(self, x: int) -> None:\n _deque_len = len(self._deque)\n self._deque.append(x)\n for i in range(_deque_len):\n self._deque.append(self._deque.pop(0))", "def push(self, x: int) -> None:\n self.q1.append(x)\n self.topEle = x\n self.n += 1", "def push(self, x):\n # enqueue new element to q2\n self.q2.enqueue(x)\n # dequeue from q1 and enqueue in q2\n\n while self.q1.qlist:\n self.q2.enqueue(self.q1.dequeue())\n # swap q1 and q2\n temp = self.q1.qlist\n self.q1.qlist = self.q2.qlist\n self.q2.qlist = temp", "def push(self, x: int) -> None:\n self.queue1.append(x)", "def push(self, x: int) -> None:\n self.push_queue.append(x)", "def push(self, x: int) -> None:\n self.q1.append(x)", "def push(self, x):\n self.a.append(x)\n print(self.a)\n # 将末尾元素移至 first\n self.a[0], self.a[1:] = self.a[-1], self.a[:-1]\n print(self.a)", "def push(self, x: int) -> None:\n self.queue.append(x)\n print(self.queue)", "def push(self, x: int) -> None:\n queue = Stack()\n queue.push(x)\n\n # reverse the order of stack\n reversed = Stack()\n while not self._stack.empty():\n reversed.push(self._stack.pop())\n\n # push reversed order of stack to another stack\n while not reversed.empty():\n queue.push(reversed.pop())\n\n self._stack = queue", "def push(self, x):\n if len(self.stack1) == 0:\n self.front = x\n self.stack1.append(x)", "def push(self, x: int) -> None:\n if not self.q1:\n self.qpush(self.q1,x)\n else:\n q2=[]\n while self.q1:\n self.qpush(q2,self.qpop(self.q1))\n self.qpush(self.q1,x)\n while q2:\n self.qpush(self.q1,self.qpop(q2))\n print(self.q1)", "def push(self, x):", "def push(self, x):\n otherStateIndex = 1 - self.activeStackIndex\n while len(self.stacks[self.activeStackIndex]) > 0:\n lastElement = self.stacks[self.activeStackIndex].pop()\n self.stacks[otherStateIndex].append(lastElement)\n self.stacks[otherStateIndex].append(x)\n while len(self.stacks[otherStateIndex]) > 0:\n lastElement = self.stacks[otherStateIndex].pop()\n self.stacks[self.activeStackIndex].append(lastElement)", "def push(self, x):\n self.elements.append(x)\n self._heapify()", "def push(self, x):\n self.mystack1.append(x)\n # print(self.mystack1)", "def push(self, x):\r\n self.pushStack.append(x)", "def push(self, x: 'int') -> 'None':\n if self.empty():\n self.que_one.append(x)\n return\n \n if self.que_one:\n self.que_one.append(x)\n \n if self.que_two:\n self.que_two.append(x)", "def enqueue(self, x):\n self.s1.push(x)", "def push(self, x):\n self.a.append(x)", "def push(self, x):\n self.a.append(x)", "def push(self, x):\r\n if self.a:\r\n self.a.append(x)\r\n else:\r\n self.b.append(x)", "def push(self, x):\n self.stack.append(x)", "def push(self, x):\n self.stack.append(x)", "def push(self, x):\n if self.top == self.size - 1:\n print(\"Stack Overflow\")\n else:\n self.top += 1\n self.arr[self.top] = x", "def push(self, x):\n assert self._data is not None\n if len(self._data) < self._n:\n heapq.heappush(self._data, x)\n else:\n heapq.heappushpop(self._data, x)", "def put(self, x):\n if self._num_serial > 0 or len(self._threads) == 0:\n self._num_serial -= 1\n out = self._parallizable.forward_backward(x)\n self._out_queue.put(out)\n else:\n self._in_queue.put(x)", "def push(self, x: int) -> None:\n if len(self.a) != 0:\n self.a.append(x)\n else:\n self.b.append(x)\n self.topvalue = x", "def push(self, x):\n while self.s1:\n self.s2.append(self.s1.pop())\n self.s1.append(x)\n while self.s2:\n self.s1.append(self.s2.pop())", "def push(self, x):\n heapq.heappush(self.array, x)", "def push(self, x):\n \n self._s.append(x)", "def push(self, x):\n self.value.append(x)", "def push(self, x):\n self.stack1.append(x)", "def push(self, x):\n assert x not in self.rank\n i = len(self.heap)\n self.heap.append(x) # add a new leaf\n self.rank[x] = i\n self.up(i) # maintain heap order", "def push(self, x):\n assert x not in self.rank\n i = len(self.heap)\n self.heap.append(x) # add a new leaf\n self.rank[x] = i\n self.up(i) # maintain heap order", "def push(self, x):\n self.list_x.append(x)", "def push(self, x):\r\n if self.point_to_head.chi == None:\r\n self.point_to_head.chi = MyQueueNode(x)\r\n self.point_to_tail.chi = self.point_to_head.chi\r\n else:\r\n self.point_to_tail.chi.chi = MyQueueNode(x)\r\n self.point_to_tail.chi = self.point_to_tail.chi.chi", "def push(self, x: int) -> None:\n \n self.elements.append(x)", "def push(self, x: int) -> None:\n self.sk1.push(x)", "def push(self, x):\r\n self.s1.append(x)", "def push(self, x: int) -> None:\r\n self.items.append(x)", "def push(self, x: int) -> None:\n self.stack.append(x)", "def push(self, x: int) -> None:\n if not self.stack:\n self.head = x\n self.stack.append(x)", "def push(self, x):\n self.input_stack.append(x)", "def enqueue(Q, x):\n # Q.append(x)\n Q.put_nowait(x)\n if debug: \n print(\"enqueue\", x, \":\", end=\" \")\n show_queue(Q)\n return Q", "def push(self, x: int) -> None:\n self.stack.insert(0,x)", "def push(self, x: int) -> None:\n self.stackIn.append(x)", "def push(self, x):\n self.List_store.append(x)", "def push(self, x: int) -> None:\n self.stack.append(x)\n return self.stack", "def push(self, x: int) -> None:\n if not self.s1:\n self.first = x\n self.s1.append(x)", "def push(self, x: int) -> None:\n self.inStack.append(x)", "def push(self, x: int) -> None:\n self.data.append(x)", "def push(self, x: int) -> None:\n self.input_stack.append(x)", "def push(self, x: int) -> None:\n self.input_stack.append(x)", "def push(self, x: int) -> None:\n self.sk1[self.sk1_len] = x\n self.sk1_len += 1", "def push(self, x: int) -> None:\n self._input_stack.append(x)", "def push(self, x: int) -> None:\n self.data.push(x)", "def push(self, x: int) -> None:\n\t\twhile self.s1:\n\t\t\tself.s2.append(self.s1.pop())\n\t\tm = self.s1.append(x)\n\t\twhile self.s2:\n\t\t\tself.s1.append(self.s2.pop())\n\t\treturn m", "def push_back(self, e):\n if(self.size_ >= self.capacity_):#If our Deque is full we need to resize it first\n self.resize_back()\n self.back_+=1\n self.data_[self.back_]= e\n self.size_+=1\n #print(\"case 1\")\n elif (self.front_ == -1 and self.size_==0):#If the Deque is intially empty then when we add the first item that will be both the front and the back \n self.front_= 0\n self.back_=0\n self.data_[self.back_]= e\n self.size_+=1\n else:#The Back is not at the first index(possibly somewhere in between) and if we push back it we have to go up by one to move to the new back\n self.back_+=1\n self.data_[self.back_] =e \n self.size_+=1", "def push(self, val):\r\n return self.deque.append(val)", "def move(self):\n active_item = self.stack.pop()\n self.backlog.put(active_item)", "def push(self, elt):\n if len(self._queue) == 0: self._queue.append(elt); return\n for i in range(len(self._queue)):\n if self._queue[i].priority < elt.priority:\n self._queue.insert(i, elt)\n return\n #if we get here, elt is lower than all the other procs in the queue, so\n #just append it\n self._queue.append(elt)", "def push(self, item):\n self.stack.append(item)\n\n if not self.max or item >= self.max[-1]: # add if empty or if greater\n self.max.append(item)", "def push(self, element):\n if not self.full():\n heapq.heappush(self.queue, element)\n self.size += 1\n return True\n else:\n if element >= self.queue[0]:\n heapq.heapreplace(self.queue, element)\n return True\n else:\n return False", "def push(self, value):\n self.last = self.current\n self.current = np.array(value)", "def enqueue(self, val):\n self.stack1.push(val)", "def push(self,element):\n self.stack.append(element)\n \n if self.maxx == []:\n self.maxx.append(element)\n else:\n #LessThan or equalTo caters for a repetition of maximum element.\n #This would ensure that the maximum element is always retrieved\n if self.maxx[-1] <= element:\n self.maxx.append(element)", "def enqueue(self, item):\n while len(self._stack1) > 0:\n self._stack2.push(self._stack1.pop())\n self._stack2.push(item)", "def push(self, x: int) -> None:\n new_min = x\n if self.minStack:\n last_min = self.minStack[-1]\n new_min = min(last_min, x)\n \n self.minStack.append(new_min)\n self.stack.append(x)", "def push(self, item):\n\t\tself.top+=1;\n\t\tself.arr.insert(self.top, item);", "def push(num):\r\n i = len(stack) - 1\r\n while i:\r\n stack[i] = stack[i - 1]\r\n i -= 1\r\n stack[0] = float(num)", "def push(self, item: tuple):\n self.__heap.append(item)\n self.__sift_up(self.__len__() - 1)", "def bypass_queue(self, name):\n # self.queue = [name] + self.queue\n # self.queue.insert(0, name)\n\n # self.lst = [name] + self.lst # This person is brought to the front of the queue\n self.lst.insert(0, name) #Not constant time as the pointer is moved for all the members of the queue, 0(n)\n print(f\"{name} has bypassed the queue\")", "def push(self, item):\n self.heap.append(self.m * item)\n self._sift_up()", "def AdvanceQueue(self):\r\n self.data.pop(0)\r\n return", "def push(self, value):\n if self.please_stop and not self.allow_add_after_close:\n Log.error(\"Do not push to closed queue\")\n\n with self.lock:\n self._wait_for_queue_space()\n if not self.please_stop:\n self.queue.appendleft(value)\n return self", "def push(self, element):\n self.the_stack.append(element)", "def put(self, element):\n self.heap.append(element)\n # sift up the element append before\n self.sift_up(self.size() - 1)", "def push_back(self, e):\n # initialize new node with data e\n newNode = Node(e)\n # if deque is empty\n if self.size == 0:\n # set both front and back to new node\n self.front = self.back = newNode\n # if deque is not empty\n else:\n # set new node's next as the previous back, set previous back's\n # prior to the new node, and set the back of the deque to new node\n newNode.next = self.back\n self.back.prior = newNode\n self.back = newNode\n # increment deque size\n self.size += 1", "def push(self, item):\r\n self.stack.insert(0, item)", "def push(self, element):\n self.__stack.append(element)\n\n if len(self.__stack) == 1:\n self.__max_values.append(element)\n return\n\n if element > self.__max_values[-1]:\n self.__max_values.append(element)\n else:\n self.__max_values.append(self.__max_values[-1])" ]
[ "0.8549472", "0.83887035", "0.8322536", "0.8316233", "0.8296685", "0.8249803", "0.82164633", "0.81946427", "0.81946427", "0.8059892", "0.79438967", "0.7901358", "0.7855948", "0.78556633", "0.78169864", "0.77913237", "0.77355105", "0.76975566", "0.767749", "0.76648164", "0.7645645", "0.76412225", "0.76337427", "0.7621006", "0.76166123", "0.7593764", "0.75858164", "0.75630456", "0.75073016", "0.7483144", "0.7471622", "0.73952186", "0.738198", "0.738082", "0.73702794", "0.7334488", "0.7329917", "0.7327278", "0.7327092", "0.7327092", "0.7318719", "0.7270392", "0.7270392", "0.7262178", "0.72535866", "0.7241293", "0.7215877", "0.7180181", "0.7179436", "0.7174942", "0.7173351", "0.7151509", "0.71267927", "0.71267927", "0.71220046", "0.71171737", "0.71170014", "0.7116122", "0.7065785", "0.7054267", "0.70378476", "0.69980085", "0.6978364", "0.69747704", "0.6963676", "0.6949234", "0.69377387", "0.69319206", "0.69234455", "0.68903655", "0.68758386", "0.68550295", "0.68550295", "0.68291897", "0.6804329", "0.68037033", "0.67595303", "0.67591727", "0.67428035", "0.67291635", "0.67285174", "0.6712488", "0.6684668", "0.6590031", "0.65889347", "0.65855664", "0.65464777", "0.6525118", "0.644089", "0.64334315", "0.6425943", "0.638634", "0.6385419", "0.6318784", "0.6311439", "0.6309498", "0.63006836", "0.6297228", "0.6291015", "0.62809277" ]
0.7416999
31
Removes the element from in front of queue and returns that element.
def dequeue(self): return self.queue.pop(0)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pop(self):\n self.queue.insert(len(self.queue), self.queue[0])\n self.queue.remove(self.queue[0])\n return self.queue.pop()", "def front(self):\n return self.queue[0] if not self.empty() else None", "def dequeue(self):\n if not self.front:\n raise AttributeError(\"Can't dequeue from an empty queue\")\n\n removed = self.front\n self.front = self.front.next\n return removed.value\n # try:\n # removed = self.front\n # self.front = self.front.next\n # return removed.value\n # except AttributeError:\n # return \"Can't dequeue from an empty queue\"", "def pop(self): # O(1)\n if not self.queue:\n return None\n return self.queue.popleft()", "def getFront(self):\n\t\tfront = self.queue[self.front]\n\t\treturn front\n\t\tpass", "def pop(self):\r\n return self.queue.pop(0)", "def pop(self):\n if self._size > 0:\n elem = self.first.data\n self.first = self.first.next\n self._size = self._size - 1\n return elem\n \n raise IndexError('The queue is empty! ')", "def remove_front(self):\n\n if self.items:\n return self.items.pop(0)\n return None", "def remove(self):\n return self.queue.popleft()", "def pop(self):\n return self.queue.pop(0)", "def dequeue(self):\n return self.the_queue.pop(0)", "def pop(self):\n while not self.queue[self.tag].empty():\n temp = self.queue[self.tag].get()\n if not self.queue[self.tag].empty():\n self.queue[1 - self.tag].put(temp)\n else:\n self.tag = 1 - self.tag\n return temp", "def dequeue(self):\n return self.queue.popleft()", "def front(queue):\n if empty_queue(queue):\n raise IndexError(\"Queue is empty!\")\n else:\n return queue.front.value", "def pop(self):\n if not self.empty():\n self.size -= 1\n return heapq.heappop(self.queue)\n else:\n return None", "def remove(self) -> T:\n if not self.is_empty():\n return self._queue.pop()", "def dequeue_front(self):\n try:\n return self._items.pop(0)\n except:\n raise IndexError('The deque is empty')", "def back(queue):\n if empty_queue(queue):\n raise IndexError(\"Queue is empty!\")\n else:\n return queue.back.value", "def dequeue(self):\n return self.queue.pop(0)", "def pop(self) -> int:\n cur = None\n if(not self.empty()):\n cur = self.queue[0] \n self.queue = self.queue[1:] \n return cur", "def dequeue(self):\n try:\n temp = self.front\n self.front = self.front.next\n temp.next = None\n return temp.value\n except Exception:\n return \"the queue is empty\"", "def remove(self):\n if len(self.line) == 0:\n raise ValueError, \"No more elements in the queue to remove.\"\n\n frontOfLine = self.line[-1]\n self.line = self.line[:-1]\n return frontOfLine", "def peek(self):\n if self.isEmpty(): \n raise Exception(\"Queue underflow\")\n return self._q[self._first]", "def _dequeue(self):\n return self._queue.popleft()", "def pop(self):\n if not self.empty():\n return self.queue.pop()\n return None", "def dequeue(self):\n\n # del self._queue[0]\n return self._queue.pop(0)", "def dequeue(queue):\n item = front(queue)\n queue.front = queue.front.next\n if empty_queue(queue):\n queue.back = None\n\n queue.size = queue.size - 1\n\n return item", "def pop(self):\n if self.isEmpty():\n raise KeyError(\"The queue is empty.\")\n oldItem = self._front.data\n self._front = self._front.next\n if self._front is None:\n self._rear = None\n self._size -= 1\n return oldItem", "def pop(self):\n if len(self.priority_queue.values()):\n nextkey = 0\n while nextkey not in self.priority_queue:\n nextkey += 1\n up_next = self.priority_queue[nextkey][0]\n self.priority_queue[nextkey] = self.priority_queue[nextkey][1:]\n return up_next\n else:\n raise IndexError(\"There's nothing in your queue\")", "def remove_front(self):\n\n if self.front.next is None:\n temp = self.front\n self.front = None\n return temp.data\n\n temp = self.front\n self.front = self.front.next\n return temp.data", "def dequeue(self):\n try:\n return self._container.pop()\n except IndexError:\n raise IndexError(\"Cannot dequeue from empty queue.\")", "def peek(self):\n if not self.empty():\n return self.queue[-1]\n return None", "def peek(self):\n if self.is_empty():\n raise ValueError('Queue underflow')\n return self.first.item", "def dequeue(self):\n\n item = self.__items__.pop(0)\n return item", "def Front(self):\r\n if (len(self.queue) >= 1):\r\n return self.queue[0]\r\n else:\r\n return -1", "def pop_front(self):\n if self.is_empty():\n return None\n val = self.head.value\n # Update head and size\n self.head = self.head.next_node\n self.size -= 1\n # If the only node was removed, also need to update tail\n if self.is_empty():\n self.tail = None\n return val", "def removeFront(self):\n if self._size == 0:\n raise AttributeError(\"Cannot removeFront from an empty Deque\")\n \n temp = self._front\n self._front = self._front.getPrevious()\n if self._size == 1:\n # removing only item which is the rear as well as the front item\n self._rear = None\n else:\n self._front.setNext(None)\n self._size -= 1\n \n return temp.getData()", "def dequeue(self):\n temp = self.front\n self.front = self.front.getPtr()\n return temp.getData()", "def top(self): # O(1)\n if not self.queue:\n return None\n return self.queue[0]", "def pop(self):\n return heappop(self.priority_queue)[1]", "def Front(self):\n if self.isEmpty():\n return -1\n else:\n return self.queue[0]", "def Front(self):\n if self.isEmpty():\n return -1\n else:\n return self.queue[0]", "def peek(self):\r\n if self.size():\r\n return self.queue[0]\r\n else:\r\n return None", "def dequeue(self):\n if self.items:\n return self.items.pop()\n return None", "def pop(self):\r\n try:\r\n return self.pop_from_deque()\r\n except IndexError:\r\n return None", "def dequeue(self): # total O(1)\n topItem = self._queue[self._start] #O(1)\n self._queue[self._start] = None #O(1)\n self._start = (self._start+1)% self._capacity #O(1)\n self._size -= 1 #O(1)\n return topItem #O(1)", "def pop(self):\n self.move()\n return self.queue2.pop()", "def dequeue(self):\n if len(self) == 1:\n self.tail = None\n return self.pop()", "def pop(self):\n return super().remove_item_from_front()", "def dequeue(self):\n if self.is_empty():\n raise Empty('Queue is empty')\n answer = self._head._element\n self._head = self._head._next\n self._size -= 1\n if self.is_empty(): # special case as queue is empty\n self._tail = None # removed head had been the tail\n return answer", "def peek(self):\r\n return self.queue[0]", "def peek(self):\r\n return self.queue[0]", "def dequeue(self):\n if self.is_empty():\n raise Empty(\"Queue underflow.\")\n element = self._head._element\n self._head = self._head._next\n self._size -= 1\n if self.is_empty():\n self._tail = None\n return element", "def remove(self, index):\n if index < 0 or index >= len(self):\n raise AttributeError(\"i must be >= 0 and < size of queue\")\n if index == 0:\n oldItem = self._front.data\n self._front = self._front.next\n else:\n probe = self._front\n while index > 1:\n probe = probe.next\n index -= 1\n oldItem = probe.next.data\n probe.next = probe.next.next\n self._size -= 1\n if self.isEmpty():\n self._rear = None\n return oldItem", "def pop_front(self):\n # set temp to deque's front for return\n temp = self.front\n # if deque is empty\n if self.size == 0:\n # raise IndexError\n raise IndexError()\n # if deque has one element\n elif self.size == 1:\n # empty the deque completely\n self.back = None\n self.front = None\n self.size -= 1\n # if the deque has more than one element\n else:\n # set front to front's prior node, set that node's next to\n # none, and decrement deque's size by 1\n self.front = self.front.prior\n self.front.next = None\n self.size -= 1\n # return previous front node's data\n return temp.data", "def first(self):\n if self.is_empty():\n raise Empty('Queue is empty')\n return self._head._element # front aligned with head of list", "def dequeue(self):\n if self.is_empty():\n raise ValueError('stack is empty')\n else:\n val = self.list.head.data\n self.list.delete(val)\n return val", "def dequeue(self):\n return self.__queue.pop()", "def pop(self):\n if len(self) == 0:\n if self.none_for_empty:\n return None\n raise ValueError(\"Buffer is empty\")\n pt = self.buf[self.front]\n if self.rear == self.front:\n self.rear = None\n else:\n self.front = self.length - 1 if self.front == 0 else self.front - 1\n return pt", "def dequeue(self):\n if self.is_empty():\n raise Empty(\"Queue underflow.\")\n head = self._tail._next\n element = head._element\n self._size -= 1\n if self.is_empty():\n self._tail = None\n else:\n self._tail._next = head._next\n return element", "def peek(self):\n return self.the_queue[0]", "def dequeue(self):\n\t\tif self.is_empty():\n\t\t\traise Empty('Queue is empty')\n\t\tanswer = self._head._element\n\t\tself._head = self._head._next\n\t\tself._size -= 1\n\t\tif self.is_empty():\n\t\t\tself._tail = None\n\t\treturn answer", "def Front(self):\n if self.isEmpty():\n return -1\n else:\n return self.queue[self.front]", "def pop_front(self):\n if (self._size == 0):\n return None\n\n output_value = self._head.value\n\n self._head = self._head.next\n self._head.prev = None\n self._size -= 1\n\n # Edge case, list is now empty\n if (self._size == 0):\n self._tail = None\n\n return output_value", "def dequeue(self):\n if self.size() < 1:\n raise ValueError('Priority queue is empty and has no front item')\n else:\n # TODO: Remove and return min item from heap, if any\n ...", "def front(self):\n if self.is_empty():\n raise Exception(\"Queue is empty !!! Please add data to the Queue :) \")\n else:\n return self.data[0]", "def peek(self):\n if not self.front:\n raise AttributeError(\"Can't peek from an empty queue\")\n return self.front.value\n\n # try:\n # return self.front.value\n # except AttributeError:\n # return \"Can't peek front from an empty queue\"", "def dequeue(self):\n\n temp = self.front\n self.front = self.front.next\n return temp.data", "def peek(self):\n return self.queue[0]", "def pop(self):\n return self.q1.dequeue()", "def first(self):\n\t\tif self.is_empty():\n\t\t\traise Empty('Queue is empty')\n\t\treturn self._head._element", "def dequeue(self):\r\n if self.is_empty():\r\n raise Empty(\"Queue is empty\")\r\n answer = self._head._element\r\n self._head = self._head._next\r\n self._size -= 1\r\n if self.is_empty():\r\n self._tail = None\r\n return answer", "def Front(self):\n if self.count == 0:\n return -1\n return self.queue[self.headIndex]", "def top(self):\n while not self.queue[self.tag].empty():\n temp = self.queue[self.tag].get()\n self.queue[1 - self.tag].put(temp)\n self.tag = 1 - self.tag\n return temp", "def first(self):\n if self.is_empty():\n raise Empty('Queue is empty')\n return self._data[self._front]", "def dequeue(self):\n if self.is_empty():\n raise Empty(\"Queue is empty\")\n answer = self._data[self._front]\n self._data[self._front]\n self._data = (self._front+1)%len(self._data)\n self._size-=1\n return answer", "def pop(self) -> int:\n last = self.queue.popleft()\n while self.queue:\n self.aux_queue.append(last)\n last = self.queue.popleft()\n self.queue, self.aux_queue = self.aux_queue, self.queue\n return last", "def popleft(self, timeout=None):\n item = super(ExclusiveQueue, self).popleft(timeout)\n try:\n self.remove(item)\n except ValueError:\n pass\n return item", "def first(self):\n if self.is_empty():\n raise Empty(\"Queue undeflow.\")\n return self._head._element", "def first(self):\r\n if self.is_empty():\r\n raise Empty(\"Queue is empty\")\r\n return self._head._element", "def pop(self) -> T:\n while self.priority_queue:\n _, _, (item,) = heapq.heappop(self.priority_queue)\n if item is not None:\n del self.entry_finder[item] # type: ignore\n return cast(T, item)\n raise KeyError('pop from an empty priority queue')", "def dequeue(self):\n\t\treturn self.items.pop()", "def dequeue(self): ##################### <-\n value = self.lst[0]\n self.lst = self.lst[1:]\n return value", "def dequeue(self):\n if self.is_empty():\n raise ValueError('Queue underflow')\n\n item = self.first.item\n self.first = self.first.next_node\n self.N -= 1\n\n if self.is_empty():\n self.last = None # To avoid loitering\n\n return item", "def pop(self):\n\t\tif self.heap:\n\t\t\treturn heapq.heappop(self.heap)[1]\n\t\telse:\n\t\t\traise Exception('Trying to pop from empty PriorityQueue.')", "def top(self):\n if len(self) == 0:\n raise IndexError('top from empty queue')\n return self.lst[self.head]", "def dequeue(self) -> object:\n return self._data.pop(0)", "def dequeue(self) -> object:\n return self._data.pop(0)", "def _dequeue(self):\n node = self.head.next\n self._remove_node(node)\n return node", "def dequeue(self):\n return self.items.pop()", "def dequeue(self):\n return self.items.pop()", "def front(self):\n if self.size() < 1:\n return None\n else:\n # TODO: Return min item from heap, if any\n ...", "def Pop(self):\n # Alternativly use built-in pop()\n #return self.list.pop()\n top = self.list[len(self.list) - 1]\n self.list.remove(top)\n return top", "def pop(self):\n self._raise_if_empty()\n item = self._top.data\n self._top = self._top.next\n return item", "def pop(self):\n popped_node = self.top[0]\n self.top = self.top[1:]\n return popped_node", "def pop(self) -> int:\n # remove each element except for the last one in first queue\n while(len(self.q1) > 1):\n self.q2.append(self.q1.pop(0))\n\n pop_element = self.q1.pop(0) # element we're gonna return\n # switch queue1 and queue2\n self.q1, self.q2 = self.q2, self.q1\n return pop_element", "def pop(self):\n if self.size is 0:\n print \"Heap is empty\"\n return\n # Swap the top most element with the last one\n self._swap(0, self.size - 1)\n poppedKey = self.queue[self.size - 1]\n # Reduce the size of the queue\n self.size -= 1\n # Rebalance\n self._heapify(0)\n return poppedKey", "def pop(self) -> int:\n \n temp = deque()\n while self.elements:\n temp.append(self.elements.pop())\n \n front_el = temp.pop()\n \n while temp:\n self.elements.append(temp.pop())\n \n return front_el", "def top(self):\n self.move()\n return self.queue2[-1]", "def pop_front(self):\n if self.head is None:\n raise IndexError('pop_front from empty list')\n node = self.head \n if node.next_node is None:\n self.tail = None \n else: \n node.next_node.prev_node = None \n self.head = node.next_node\n return node.value" ]
[ "0.80894065", "0.7844948", "0.7837598", "0.7796653", "0.779448", "0.7678269", "0.76743865", "0.7673315", "0.7659757", "0.7606835", "0.75895596", "0.7558154", "0.7557646", "0.7546004", "0.7543471", "0.75360584", "0.7531314", "0.75289166", "0.752424", "0.7517075", "0.74940664", "0.74725115", "0.7451167", "0.7449855", "0.74477494", "0.74348253", "0.7429877", "0.74290013", "0.74018097", "0.73898137", "0.7378244", "0.7373218", "0.7353203", "0.73427486", "0.7338113", "0.7332868", "0.7325759", "0.73162013", "0.73104006", "0.73086196", "0.7301456", "0.7301456", "0.7298208", "0.72919524", "0.72891027", "0.72759646", "0.72743464", "0.72741973", "0.72615325", "0.7249343", "0.7238589", "0.7238589", "0.7234747", "0.7228341", "0.7226442", "0.7216501", "0.72116035", "0.72108084", "0.72075224", "0.7201053", "0.7200428", "0.7196795", "0.71957576", "0.7195025", "0.7180904", "0.7175542", "0.71653813", "0.71560353", "0.71545917", "0.7152496", "0.7148595", "0.71465445", "0.7137427", "0.71199906", "0.71093214", "0.71021736", "0.70986587", "0.70985025", "0.7090758", "0.70892054", "0.70749253", "0.70717907", "0.7067194", "0.70388085", "0.7036801", "0.7033137", "0.70307475", "0.70307475", "0.7024874", "0.7016978", "0.7016978", "0.70167994", "0.7014167", "0.7008134", "0.7004337", "0.6987719", "0.6986681", "0.6985809", "0.6984771", "0.69843525" ]
0.7609435
9
Get the front element.
def peek(self): return self.queue[0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getFront(self):\n\t\tfront = self.queue[self.front]\n\t\treturn front\n\t\tpass", "def getFront(self):\n if not self.isEmpty():\n return self._data[0]\n else:\n return -1", "def front(self):\n return self.queue[0] if not self.empty() else None", "def peek_front(self):\n\n if self.items:\n return self.items[0]\n return None", "def front(self):\n return self.sentinel.next.item", "def peek(self):\n return self.front", "def firstElement(self):\n return self.top()", "def Front(self):\n if self.isEmpty():\n return -1\n else:\n return self.queue[self.front]", "def peek_front(self):\n\n if (self._size == 0):\n return None\n\n return self._head.value", "def Front(self):\n if self.isEmpty():\n return -1\n else:\n return self.queue[0]", "def Front(self):\n if self.isEmpty():\n return -1\n else:\n return self.queue[0]", "def peek_front(self):\n if ((self.is_empty()) or self.data_[self.front_]== None): #If we trip this if block we raise an error since we know the deque should be empty \n raise IndexError\n return self.data_[self.front_]", "def getFront(self) -> int:\n if not self.isEmpty():\n return self.q[self.move_forward(self.front)]\n else:\n return -1", "def Front(self):\r\n if (len(self.queue) >= 1):\r\n return self.queue[0]\r\n else:\r\n return -1", "def first(self):\n if self.is_empty():\n raise Empty('Queue is empty')\n return self._head._element # front aligned with head of list", "def getFront(self) -> int:\n if self.isEmpty():\n return -1\n\n return self.arr[self.front]", "def Front(self):\n if self.count == 0:\n return -1\n return self.head.value", "def front_node(self):\n return self.sentinel.next if self.N != 0 else None", "def Front(self):\n return -1 if self.isEmpty() else self.queue[self.start]", "def front(queue):\n if empty_queue(queue):\n raise IndexError(\"Queue is empty!\")\n else:\n return queue.front.value", "def front(self):\n if self.size() < 1:\n return None\n else:\n # TODO: Return min item from heap, if any\n ...", "def Front(self):\n if self.count == 0:\n return -1\n return self.queue[self.headIndex]", "def peek_front(self):\n # if the deque is empty\n if self.is_empty():\n # call an IndexError\n raise IndexError()\n # if deque is not empty, return front's data\n return self.front.data", "def front_value(self):\n if self.is_empty():\n return None\n return self.head.value", "def get_front(self) -> int:\r\n if self.size == 0:\r\n return -1\r\n if self.frontIndex == self.capacity - 1:\r\n return self.deque[0]\r\n return self.deque[self.frontIndex + 1]", "def peekFront(self):\n if self._size == 0:\n raise AttributeError(\"Cannot peekFront from an empty Deque\")\n return self._front.getData()", "def first(self):\n if self.is_empty():\n raise Empty('list is empty')\n return self._head._element # front aligned with head of list", "def Front(self):\n return -1 if self.isEmpty() else self.__buffer[self.__start]", "def front(self):\n heap = self.heap\n if len(heap) == 0:\n return None\n item = heap[0]\n to_node = item[self.TO_NODE]\n from_node = item[self.FROM_NODE]\n value = item[self.VALUE]\n return from_node, to_node, value", "def front(self):\n if self.is_empty():\n raise Exception(\"Queue is empty !!! Please add data to the Queue :) \")\n else:\n return self.data[0]", "def getFront(self) -> int:\n return self._deque[(self._front - 1)%self._k] if not self.isEmpty() else -1", "def front(self):\n if self.isEmpty():\n return None\n else:\n return self.__head.getPayload()", "def front(self):\n return _osgAnimation.VertexList_front(self)", "def first(self):\n return self.deque[0]", "def front(self):\n return self._values.tail", "def first(self):\n if self.is_empty():\n raise Empty('Queue is empty')\n return self._data[self._front]", "def peek(self):\n pop = self.list_x[0]\n return pop", "def top(self):\n\t\tif self.is_empty():\n\t\t\traise Empty('Stack is empty')\n\t\treturn self._head._element", "def peek(self):\n if not self.front:\n raise AttributeError(\"Can't peek from an empty queue\")\n return self.front.value\n\n # try:\n # return self.front.value\n # except AttributeError:\n # return \"Can't peek front from an empty queue\"", "def peek(self) -> int:\n\n temp = deque()\n while self.elements:\n temp.append(self.elements.pop())\n \n front_el = temp[-1]\n \n while temp:\n self.elements.append(temp.pop())\n return front_el", "def first(self):\n\t\tif self.is_empty():\n\t\t\traise Empty('Queue is empty')\n\t\treturn self._head._element", "def peek(self):\n return self.top()", "def first(self):\n if self.is_empty():\n raise Empty('list is empty')\n return self._head._next._element # front aligned with head of list", "def peek(self):\n return self.list.head", "def first(self):\n if self.is_empty():\n raise Empty(\"Queue undeflow.\")\n return self._head._element", "def peek(self):\n try:\n return self._container.head.value\n except AttributeError:\n return None", "def front(self) -> str:\n if not self._fully_loaded:\n self._load()\n return self._front", "def peek(self):\n return self.first", "def peek(self):\n\n if self.top is None:\n raise IndexError(\"peek from empty\")\n else:\n return self.top.value", "def front(self):\n if self.empty():\n return \"Linked List is Empty\"\n return self.head.data", "def front(self):\n return _uhd_swig.device_addr_vector_t_front(self)", "def first(self):\r\n if self.is_empty():\r\n raise Empty(\"Queue is empty\")\r\n return self._head._element", "def Front(self) -> int:\n return self.circular[self.front] if self.size else -1\n # if self.size:\n # return self.cicular[self.front]\n # else:\n # return -1", "def front(self):\n return _osgAnimation.vectorFloatKeyframe_front(self)", "def front(self):\n return _osgAnimation.vectorVec3Keyframe_front(self)", "def peek(self):\n if self.is_empty():\n return None\n\n return self.container[-1]", "def Front(self) -> int:\n if self.count == 0:\n return -1\n return self.queue[self.headIndex]", "def first(self):\n if self.is_empty():\n raise Empty('La cola está vacía')\n return self._head._element # frente alineado con la cabeza de la lista", "def Front(self) -> int:\n if self.count == 0:\n return -1\n return self.head.value", "def top(self):\n return self[0]", "def first(self):\n if self.is_empty():\n raise ValueError('Queue is empty!')\n return self.root().element().value()", "def first(self):\n if self.is_empty():\n raise Empty(\"Queue is empty.\")\n head = self._tail._next\n return head._element", "def peek(self):\n return self.the_queue[0]", "def first(self):\n if self.is_empty():\n raise Empty(\"Deque está vacío\")\n return self._header._next._element # un artículo real justo después de la cabecera", "def top(self):\n if self.is_empty():\n raise Empty('Stack is empty')\n return self._head._element # top of stack is at head of list", "def first(self):\n return self.__head", "def peek(self):\n return self.iterator[0]", "def Front(self) -> int:\n q, k, front, rear, empty = self.q, self.k, self.front, self.rear, self.empty\n if self.isEmpty():\n return -1\n return q[front]", "def peek(self):\n return self.queue[0]", "def first(self):\r\n return self.__head", "def peek(self):\n # TODO: Return top item, if any\n print('self.is_empty()', self.is_empty())\n if self.is_empty():\n return None\n print('self.top', self.list.head.data)\n return self.list.head.data", "def peek(self):\n\n if self.is_empty():\n return None\n\n return self._list[-1]", "def first(self) -> Element:\n return typing.cast(Element, self[0])", "def top_ele(self):\n if not self.is_empty():\n return self.arr[self.top]\n return -1", "def front(self):\n return _osgAnimation.vectorVec2Keyframe_front(self)", "def peek(self):\n if len(self.stack2):\n top = self.stack2.pop()\n self.stack2.append(top)\n return top\n\n return self.front", "def peek(self):\n if self.heap:\n return self.heap[0]\n else:\n raise IndexError(\"there is no root\")", "def peek(self):\n if self.is_empty():\n return None\n list_length = len(self.list) - 1\n return self.list[list_length]", "def remove_front(self):\n\n if self.items:\n return self.items.pop(0)\n return None", "def front(self):\n return _uhd_swig.range_vector_t_front(self)", "def peek(self):\n lastElementInex = len(self.stacks[self.activeStackIndex]) - 1\n return self.stacks[self.activeStackIndex][lastElementInex]", "def front(self):\n return _osgAnimation.vectorMatrixKeyframe_front(self)", "def peek(self):\n\n return self._queue[0]", "def peek(self):\n if self.is_empty():\n return None\n return self.list.head.data", "def peek_back(self):\n if ((self.is_empty()) or self.data_[self.back_]== None):#If we trip this if block we raise an error since we know the deque should be empty \n raise IndexError\n return self.data_[self.back_]", "def peek(self):\n return self.stack[0]", "def top(self):\n if self.is_empty():\n raise Empty('Stack is empty!')\n last = (self._front + self._size - 1) % len(self._data)\n return self._data[last]", "def get_focus(self):\n return self._get_at(self._current)", "def peek(self):\n return self.top.value", "def front(self):\n return _osgAnimation.vectorVec4Keyframe_front(self)", "def peek(self):\n if self.is_empty():\n raise ValueError('Queue underflow')\n return self.first.item", "def peek(self):\n if self.isEmpty():\n raise KeyError(\"The queue is empty.\")\n return self._front.data", "def peek(self):\n assert len(self.heap) > 0, \"ERROR: Heap is empty.\"\n return self.heap[0]", "def peek(self):\n if self.isEmpty():\n raise Exception(\"Stack underflow\") # Nothing to peek at\n return self.first.Item # most recently added item", "def peek_first(self):\n if len(self._heap) == 0:\n return None\n else:\n return self._heap[0]", "def peek(self):\r\n if self.size():\r\n return self.queue[0]\r\n else:\r\n return None", "def peek(self):\n return self.a[-1]", "def peek(self):\n if len(self.items) > 0:\n return self.items[-1]\n else:\n return None", "def peek(self):\n return self.list.head.data" ]
[ "0.82009244", "0.79261875", "0.7852642", "0.7791182", "0.77886623", "0.75723046", "0.752337", "0.7510604", "0.7478953", "0.7379256", "0.7379256", "0.734284", "0.7323573", "0.7320296", "0.7291721", "0.72772557", "0.7269801", "0.72657627", "0.7253295", "0.72355044", "0.72213495", "0.721701", "0.7172111", "0.71702486", "0.7128405", "0.7115387", "0.70875424", "0.70799214", "0.70768297", "0.7048858", "0.7029289", "0.6988492", "0.6977587", "0.69611204", "0.69603276", "0.69588816", "0.695754", "0.69507664", "0.6894067", "0.6880089", "0.6832054", "0.6826359", "0.6823448", "0.68121237", "0.6794244", "0.67631525", "0.6757173", "0.67457724", "0.6741961", "0.67395526", "0.6723454", "0.6722964", "0.6711899", "0.6704623", "0.66996056", "0.6691709", "0.66839993", "0.6647072", "0.6637371", "0.662254", "0.6615963", "0.6597008", "0.6594808", "0.6588543", "0.6578934", "0.6575146", "0.6571657", "0.6552622", "0.654919", "0.6547857", "0.65347576", "0.6533558", "0.65073824", "0.65012014", "0.6474211", "0.6472695", "0.64631075", "0.64520144", "0.64516985", "0.6424049", "0.6418471", "0.6409006", "0.64079547", "0.6401981", "0.63808656", "0.6372947", "0.6369692", "0.63513863", "0.63319856", "0.6331002", "0.63300645", "0.6326027", "0.6318459", "0.6317603", "0.63066554", "0.6299589", "0.6291726", "0.62882775", "0.62867796" ]
0.66313344
60
Returns whether the queue is empty.
def empty(self): return self.queue == []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_empty(self):\n return len(self.the_queue) == 0", "def is_empty(self):\n return len(self.__queue) > 0", "def is_empty(self):\n return len(self.queue) == 0", "def is_empty(self):\n return len(self.queue) == 0", "def is_empty(self):\n return self.queue == []", "def is_empty(self):\n\n return not self._queue", "def is_empty(self):\n return not self._queue", "def empty(self) -> bool:\n if self.queue.empty():\n return True\n else:\n return False", "def is_empty(self):\n return self._queue_items == []", "def empty(self):\n return len(self.queue) == 0", "def isEmpty(self):\n return 0 == len(self.queue)", "def empty(self) -> bool:\n return self._queue.empty()", "def empty(self):\n return self.queue == []", "def empty(self) -> bool:\n return not self.queue", "def empty(self) -> bool:\n return not self.queue", "def isEmpty(self):\r\n if (len(self.queue) >= 1):\r\n return False\r\n else:\r\n return True", "def empty(self):\n return not self.queue", "def is_empty(self):\n return len(self.priority_queue) == 0", "def is_empty(self):\n\n # If the queue is an empty list, self._data would return False\n # So if the queue is empty we want to return true\n # modify with not self._data\n return not self._data", "def empty(self) -> bool: \n if(self.queue is not None and len(self.queue) > 0):\n print(\"len > 0\" )\n return False\n else:\n print(\"len = 0\" )\n return True", "def empty(self) -> bool:\n if self.push_queue or self.pop_queue:\n return False\n return True", "def empty(self) -> bool:\n return len(self.queue1) == 0", "def isEmpty(self):\n\t\tself.logger.debug('Check if queue job is empty')\n\t\tisEmpty = self.queue.empty()\n\t\tself.logger.debug('Queue job is empty ?: %s'%(isEmpty))\n\t\treturn isEmpty", "def empty(self):\n return self.queue[0].empty() and self.queue[1].empty()", "def empty(self): # O(1)\n return not self.queue", "def empty(self):\n return not self.queue1", "def empty(self) -> bool:\n return self.qsize() == 0", "def is_empty(self):\n return len(self.deque) == 0", "def empty(self) -> bool:\n return len(self.q) == 0", "def empty(self) -> bool:\n return len(self.q) == 0", "def queue_empty(self, queue_name):\n return self.queue_message_count(queue_name) == 0", "def is_empty(self):\n return self.heap.is_empty()", "def empty_queue(queue):\n return queue.front is None", "def isFull(self):\n return len(self.queue) == self.size", "def empty(self) -> bool:\n return self.que == []", "def is_empty(self) -> bool:\n return self.heap.length() == 0", "def is_empty(self) -> bool:\n return self.heap.length() == 0", "def empty(self) -> bool:\n return len(self._deque) == 0", "def is_empty(self):\n return self.url_queue.empty()", "def empty(self): \n return self.qsize() == 0", "def is_empty(self):\n\t\treturn (self._size == 0)", "def is_empty(self):\n return len(self.__heap) == 0", "def is_empty(self):\n return self.heap_size <= 0", "def is_empty(self) -> bool:\r\n return self.size == 0", "def empty(self):\n return self.q_size.current_value == 0", "def is_empty(self):\n\t\treturn self._size == 0", "def empty(self) -> bool:\n return not bool(self.q)", "def is_empty(self):\r\n\r\n return self._size == 0", "def is_empty(self) -> bool:\n return self.peek(1) == []", "def has_queue(self):\n return (os.path.exists(self._queue_path) and\n os.path.getsize(self._queue_path) > 0)", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def isFull(self):\r\n if (len(self.queue) == self.maxlen):\r\n return True\r\n else:\r\n return False", "def is_empty(self):\r\n return self._size == 0", "def is_empty(self):\r\n return self._size == 0", "def is_empty(self):\n\n if self.size() == 0:\n return True\n else:\n return False", "def is_empty(self):\n if self._size == 0:\n return True\n return False", "def is_empty(self):\n if self.size() == 0:\n return True\n else:\n return False", "def is_empty(self):\n return self.size() == 0", "def is_empty(self):\n return self.size() == 0", "def is_empty(self):\n return self.size == 0", "def is_empty(self):\n return self.size == 0", "def is_empty(self):\n return self.size == 0", "def is_empty(self):\n return self.size == 0", "def is_empty(self):\n return self.size == 0", "def empty(self):\n return self.queue.empty()", "def is_empty(self):\n return self._size == 0", "def isEmpty(self):\n return self.qSize == 0", "def empty(self) -> bool:\n return self.size == 0", "def is_empty(self):\n return not bool(self._heap)", "def _is_empty(self):\n return self.size == 0", "def is_empty(self):\r\n if self.size == 0:\r\n return True\r\n return False", "def any(self) -> bool:\n return len(self.queue) > 0", "def is_Empty(self):\n return self.size == 0", "def empty(self) -> bool:\n\n return (self._size == 0)", "def is_empty(self):\n return self.size == []", "def is_empty(self):\n return self.list_size == 0", "def is_empty(self):\n return self.__size == 0", "def empty(self):\n return self._size is 0", "def is_empty(self) -> bool:\n return self.size_bytes == 0", "def is_empty(self):\n return self.list.is_empty()", "def is_empty(self):\n return self.__len__() == 0", "def is_empty(self):\r\n return self.buff==[]", "def is_empty(self) -> bool:\n return len(self._items) == 0", "def isEmpty(self):\n return self._size == 0", "def is_empty(self) -> bool:\n return self._items == []" ]
[ "0.9270249", "0.92506766", "0.923635", "0.923635", "0.9155843", "0.9098441", "0.9095006", "0.90922606", "0.90731245", "0.9053733", "0.89205354", "0.89137155", "0.8877591", "0.88436323", "0.88436323", "0.88038146", "0.8758203", "0.87577534", "0.87310195", "0.86462456", "0.8628709", "0.85990715", "0.8504052", "0.84871966", "0.84274644", "0.84245306", "0.84162986", "0.8396675", "0.8374641", "0.8374641", "0.83431935", "0.8305679", "0.82668346", "0.8255768", "0.82457167", "0.8239745", "0.8239745", "0.8230912", "0.82216316", "0.81727064", "0.80984956", "0.8093921", "0.80882764", "0.80775905", "0.80715466", "0.807064", "0.8064401", "0.80314356", "0.8020934", "0.8004644", "0.80035424", "0.80035424", "0.80035424", "0.80035424", "0.80035424", "0.80035424", "0.80035424", "0.80035424", "0.80035424", "0.80035424", "0.80035424", "0.80035424", "0.80035424", "0.80035424", "0.80035424", "0.80035424", "0.7994702", "0.7988454", "0.7988454", "0.79823923", "0.7977268", "0.79677725", "0.7966174", "0.7966174", "0.79639655", "0.79639655", "0.79639655", "0.79639655", "0.79639655", "0.7933118", "0.7926898", "0.7924976", "0.7921839", "0.79146785", "0.7899344", "0.7892503", "0.78909135", "0.7889288", "0.78655005", "0.7857252", "0.7841281", "0.7840879", "0.78313005", "0.7817387", "0.77962303", "0.7795335", "0.7787568", "0.7786147", "0.77842236", "0.77832806" ]
0.8835949
15
Define the class balanced cross entropy loss to train the network
def class_balanced_cross_entropy_loss(output, label): labels = tf.cast(tf.greater(label, 0.5), tf.float32) num_labels_pos = tf.reduce_sum(labels) num_labels_neg = tf.reduce_sum(1.0 - labels) num_total = num_labels_pos + num_labels_neg output_gt_zero = tf.cast(tf.greater_equal(output, 0), tf.float32) loss_val = tf.multiply(output, (labels - output_gt_zero)) - tf.log( 1 + tf.exp(output - 2 * tf.multiply(output, output_gt_zero))) loss_pos = tf.reduce_sum(-tf.multiply(labels, loss_val)) loss_neg = tf.reduce_sum(-tf.multiply(1.0 - labels, loss_val)) final_loss = num_labels_neg / num_total * loss_pos + num_labels_pos / num_total * loss_neg return final_loss
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup_loss(self):\n self.loss = nn.CrossEntropyLoss(weight = self.to_device(self.datasetManager.class_weights))\n #self.loss = nn.CrossEntropyLoss()", "def cross_entropy_loss():\n return nn.CrossEntropyLoss()", "def _classification_loss(self, logits, labels, num_classes):\n labels = tf.to_int64(labels)\n onehot_labels = tf.one_hot(labels, num_classes)\n with tf.name_scope('finetuning_loss'):\n cross_entropy = tf.losses.softmax_cross_entropy(\n onehot_labels=onehot_labels, logits=logits)\n cross_entropy = tf.reduce_mean(cross_entropy)\n regularization = tf.reduce_sum(\n tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))\n loss = cross_entropy + self.weight_decay * regularization\n return loss", "def classification_loss(self, logit, target):\n return F.cross_entropy(logit, target)", "def _loss(self):\n\n cross_entropy = tf.reduce_mean(-tf.log(self.probability + epsilon) * self.y)\n self.loss = cross_entropy\n\n self.accuracy = tf.reduce_mean(\n tf.cast(tf.equal(tf.argmax(self.y, 1), self.prediction), tf.float32))", "def classification_loss(self, classes_true, classes_logits):\n entropy_fn = tf.nn.sparse_softmax_cross_entropy_with_logits\n\n classes_max = tf.argmax(classes_true, axis=1)\n class_loss = entropy_fn(\n labels=classes_max,\n logits=classes_logits)\n return class_loss", "def frcnn_cls_loss(*args):\n y_true, y_pred = args if len(args) == 2 else args[0]\n lf = tf.losses.CategoricalCrossentropy()\n return lf(y_true, y_pred)", "def rpn_cls_loss(*args):\n y_true, y_pred = args if len(args) == 2 else args[0]\n indices = tf.where(tf.not_equal(y_true, -1))\n target = tf.gather_nd(y_true, indices)\n output = tf.gather_nd(y_pred, indices)\n lf = tf.losses.BinaryCrossentropy()\n return lf(target, output)", "def crossentropy_loss(y_true, y_pred):\n ce = tf.keras.losses.categorical_crossentropy(y_true, y_pred, from_logits=True) \n return ce", "def _init_loss(self) -> None:\n labels_one_hot = tf.one_hot(self.labels, depth=Data.num_classes)\n cross_entropy_loss = tf.losses.softmax_cross_entropy(labels_one_hot, self.logits)\n # cross_entropy_loss is a scalar\n tf.add_to_collection(tf.GraphKeys.LOSSES, cross_entropy_loss)\n self.loss = tf.add_n(tf.get_collection(tf.GraphKeys.LOSSES))\n self.logger_factory.add_scalar('loss', self.loss, log_frequency=10)\n self.logger_factory.add_scalar('cross_entropy_loss', cross_entropy_loss, log_frequency=25)", "def _rpn_loss_cls(y_true, y_pred):\n y_true = y_true[0][0]\n cls_keep = tf.where(tf.not_equal(y_true, -1))[:, 0]\n cls_true = tf.gather(y_true, cls_keep)\n cls_pred = tf.gather(y_pred[0], cls_keep)\n cls_true = tf.cast(cls_true, 'int64')\n loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=cls_true, logits=cls_pred)\n return K.switch(tf.size(loss) > 0, K.clip(K.mean(loss), 0, 10), K.constant(0.0))", "def compute_loss(self):\n self.test_logits = self.compute_logits()\n loss = tf.nn.sparse_softmax_cross_entropy_with_logits(\n labels=self.data.test_labels, logits=self.test_logits)\n cross_entropy_loss = tf.reduce_mean(loss)\n regularization = tf.reduce_sum(\n tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))\n loss = cross_entropy_loss + self.weight_decay * regularization\n return loss", "def Weighted_Cross_Entropy(y_true, y_pred, eps = 1e-10):\n y_pred = tf.cast(y_pred, 'float64')\n y_true = tf.cast(y_true, 'float64')\n # deduce weights based on true pixel value\n class_weights = weights * y_true\n # compute your (unweighted) softmax cross entropy loss\n unweighted_losses = y_true*tf.math.log(y_pred + eps)\n ##print(unweighted_losses.dtype, weights.dtype)\n weighted_losses = unweighted_losses * class_weights\n # reduce the result to get your final loss\n loss = -tf.reduce_sum(weighted_losses)\n return loss", "def add_loss_op(self, pred):\n ### YOUR CODE HERE\n loss = cross_entropy_loss(self.labels_placeholder,pred)\n ### END YOUR CODE\n return loss", "def class_balanced_cross_entropy(this,pred, label, name='cross_entropy_loss'):\n with tf.name_scope('class_balanced_cross_entropy'):\n z = this.batch_flatten(pred)\n y = tf.cast(this.batch_flatten(label), tf.float32)\n\n count_neg = tf.reduce_sum(1. - y)\n count_pos = tf.reduce_sum(y)\n beta = count_neg / ((count_neg + count_pos)+this.EPS)\n\n eps = 1e-12\n loss_pos = -beta * tf.reduce_mean(y * tf.log(z + eps))\n loss_neg = (1. - beta) * tf.reduce_mean((1. - y) * tf.log(1. - z + eps))\n cost = tf.subtract(loss_pos, loss_neg, name=name)\n return cost", "def class_balanced_cross_entropy(this,pred, label, name='cross_entropy_loss'):\n with tf.name_scope('class_balanced_cross_entropy'):\n z = this.batch_flatten(pred)\n y = tf.cast(this.batch_flatten(label), tf.float32)\n\n count_neg = tf.reduce_sum(1. - y)\n count_pos = tf.reduce_sum(y)\n beta = count_neg / ((count_neg + count_pos)+this.EPS)\n\n eps = 1e-12\n loss_pos = -beta * tf.reduce_mean(y * tf.log(z + eps))\n loss_neg = (1. - beta) * tf.reduce_mean((1. - y) * tf.log(1. - z + eps))\n cost = tf.subtract(loss_pos, loss_neg, name=name)\n return cost", "def setup_loss(self):\n with vs.variable_scope(\"loss\"):\n self.loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.label_placeholder, logits=self.label_predictions))", "def loss(self, X, y):\n\n # Initialize the loss to zero.\n loss = 0.0\n num_classes = self.W.shape[0] # C = num_classes\n num_train = X.shape[0]\n \n exp_a = np.zeros((num_classes,num_train))\n # ================================================================ #\n # YOUR CODE HERE:\n # Calculate the normalized softmax loss. Store it as the variable loss.\n # (That is, calculate the sum of the losses of all the training \n # set margins, and then normalize the loss by the number of \n # training examples.)\n # ================================================================ #\n \n \n for i in np.arange(num_train):\n \n Loss = 0.0\n\n class_scores = np.dot(self.W,X[i,:].T) # calculating class scores (C x 1 vector)\n class_scores -= np.max(class_scores) # considering the possible issue for numerical instability and account for it\n\n exp_a[:,i] = np.exp(class_scores) # turning class scores to probabilities (C x 1 vector), without normalization\n\n Loss -= np.log(exp_a[y[i],i]/np.sum(exp_a[:,i]))\n \n\n #p[:,i] = exp_a[:,i]/np.sum(exp_a[:,i]) # p now is a valid probability matrix\n #print(p[:,i])\n\n loss += Loss \n #print(Loss,i) \n \n pass\n loss /= num_train\n # ================================================================ #\n # END YOUR CODE HERE\n # ================================================================ #\n\n return loss", "def set_loss_function(self):\r\n self.criterion = nn.CrossEntropyLoss().cuda()", "def class_balanced_cross_entropy_no_norm(this,pred, label, name='cross_entropy_loss_no_norm'):\n z = this.batch_flatten(pred)\n y = tf.cast(this.batch_flatten(label), tf.float32)\n\n count_neg = tf.reduce_sum(1. - y)\n count_pos = tf.reduce_sum(y)\n beta = count_neg / (count_neg + count_pos+this.EPS)\n\n eps = 1e-12\n loss_pos = -beta * tf.reduce_sum(y * tf.log(z + eps))\n loss_neg = (1. - beta) * tf.reduce_sum((1. - y) * tf.log(1. - z + eps))\n cost = tf.subtract(loss_pos, loss_neg, name=name) / (tf.cast(tf.shape(pred)[0], tf.float32)+this.EPS)\n return cost", "def class_balanced_cross_entropy_no_norm(this,pred, label, name='cross_entropy_loss_no_norm'):\n z = this.batch_flatten(pred)\n y = tf.cast(this.batch_flatten(label), tf.float32)\n\n count_neg = tf.reduce_sum(1. - y)\n count_pos = tf.reduce_sum(y)\n beta = count_neg / (count_neg + count_pos+this.EPS)\n\n eps = 1e-12\n loss_pos = -beta * tf.reduce_sum(y * tf.log(z + eps))\n loss_neg = (1. - beta) * tf.reduce_sum((1. - y) * tf.log(1. - z + eps))\n cost = tf.subtract(loss_pos, loss_neg, name=name) / (tf.cast(tf.shape(pred)[0], tf.float32)+this.EPS)\n return cost", "def loss_fn(y_true,y_pred): \n loss = tf.nn.softmax_cross_entropy_with_logits_v2(y_true,\n y_pred,\n axis=-1,\n )\n loss = tf.reduce_mean(loss,name=\"loss\")\n return loss", "def _create_loss(self):\n with tf.device('/cpu:0'):\n with tf.name_scope('loss'):\n self.loss = tf.reduce_mean(\n tf.nn.softmax_cross_entropy_with_logits(\n labels=self.labels_placeholder, \n logits=self.logits, name='loss'))", "def rpn_cls_loss(num_anchors):\n def rpn_cls_loss_helper(y_true, y_pred):\n \"\"\"\n Parameters:\n y_true: (batch, feature_map_width, feature_map_height, 2 * num_anchors)\n y_pred: (batch, feature_map_width, feature_map_height, num_anchors)\n Returns:\n loss\n \"\"\"\n loss = K.binary_crossentropy(y_true[:, :, :, num_anchors:], y_pred)\n loss = y_true[:, :, :, : num_anchors] * loss\n return K.sum(loss) / K.sum(y_true[:, :, :, : num_anchors] + 1e-6)\n return rpn_cls_loss_helper", "def loss(y, y_pred):\n # assert_is_binary(y)\n # assert_is_stochastic(y_pred)\n is_binary(y)\n is_stochastic(y_pred)\n\n # prevent taking the log of 0\n eps = np.finfo(float).eps\n\n # each example is associated with a single class; sum the negative log\n # probability of the correct label over all samples in the batch.\n # observe that we are taking advantage of the fact that y is one-hot\n # encoded!\n cross_entropy = -np.sum(y * np.log(y_pred + eps))\n return cross_entropy", "def _compute_loss(self, predictions, targets, **params):\n pass", "def compute_loss(self):\n self.prototypes = self.compute_prototypes()\n self.test_logits = self.compute_logits()\n loss = tf.nn.sparse_softmax_cross_entropy_with_logits(\n labels=self.episode.test_labels, logits=self.test_logits)\n cross_entropy_loss = tf.reduce_mean(loss)\n regularization = tf.reduce_sum(\n tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))\n loss = cross_entropy_loss + self.weight_decay * regularization\n return loss", "def compute_loss(self):", "def mrcnn_class_loss_graph(target_class_ids, pred_class_logits, active_class_ids):\n target_class_ids = tf.cast(target_class_ids, 'int64')\n\n # Loss\n loss = tf.nn.sparse_softmax_cross_entropy_with_logits(\n labels=target_class_ids, logits=pred_class_logits)\n\n # Computer loss mean for 2 classes:\n loss = K.switch(tf.size(loss) > 0, K.mean(loss), tf.constant(0.0))\n return loss", "def loss(self, X_batch, y_batch, reg):\n pass", "def loss_(self, batch):\n raise NotImplementedError", "def _loss(self, preds, labels):\n if self.sigmoid_loss:\n assert preds.shape == labels.shape\n return torch.nn.BCEWithLogitsLoss()(preds, labels) * preds.shape[1]\n else:\n if len(labels.shape) == 2: # flatten to 1D\n labels = torch.max(labels, axis=1)[1] # this can handle both bool and float types\n return torch.nn.CrossEntropyLoss()(preds, labels)", "def loss(self, prediction_dict, groundtruth_lists):\r\n pass", "def cross_entropoy_loss_naive(W, X, y, reg):\n # pylint: disable=too-many-locals\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n ############################################################################\n # TODO: Compute the cross-entropy loss and its gradient using explicit #\n # loops. Store the loss in loss and the gradient in dW. If you are not #\n # careful here, it is easy to run into numeric instability. Don't forget #\n # the regularization! #\n ############################################################################\n num_train_sample = X.shape[0] #row of train data\n num_class = W.shape[1] #column of weight, plane,horse..\n for i in range(num_train_sample):\n p_score = X[i].dot(W) #a row of score corresponding to each class\n p_score -= np.max(p_score) #normalize, highest is 1\n\n ###compute softmax loss\n # sum of scores corresponding to different classes of a sample \n sum_score = np.sum(np.exp(p_score)) \n # each class's score over sum_score of a sample \n score_i = lambda k: np.exp(p_score[k]) / sum_score\n # for the correct label in each sample, find softmax loss over sum\n # iteration make loss sum up all samples\n loss = loss - np.log(score_i(y[i]))\n\n for k in range(num_class):\n p_k = score_i(k)\n # gradient of softmax\n dW[:, k] += (p_k - (k == y[i])) * X[i]\n\n loss /= num_train_sample\n loss += 0.5 * reg * np.sum(W * W)\n dW /= num_train_sample\n dW += reg*W\n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n\n return loss, dW", "def compute_class_loss(y_true, raw_prediction, object_mask, batch_size):\n true_class_probabilities = y_true[..., 5:]\n predicted_class_probabilities = raw_prediction[..., 5:]\n\n cross_entropy = K.binary_crossentropy(target=true_class_probabilities, output=predicted_class_probabilities,\n from_logits=True)\n class_loss = object_mask * cross_entropy\n class_loss = K.sum(class_loss) / batch_size\n return class_loss", "def cross_entropy_loss(self, logits, labels):\n return F.cross_entropy(logits, labels)", "def cross_entropy(y_observed, p):\n\n pass", "def loss(self, X, y=None):\r\n mode = 'test' if y is None else 'train'\r\n\r\n if self.dropout_param is not None:\r\n self.dropout_param['mode'] = mode\r\n if self.use_batchnorm:\r\n for bn_param in self.bn_params:\r\n bn_param[mode] = mode\r\n\r\n\r\n W1, b1 = self.params['W1'], self.params['b1']\r\n W2, b2 = self.params['W2'], self.params['b2']\r\n W3, b3 = self.params['W3'], self.params['b3']\r\n gamma1, beta1 = self.params['gamma1'], self.params['beta1']\r\n gamma2, beta2 = self.params['gamma2'], self.params['beta2']\r\n # pass conv_param to the forward pass for the convolutional layer\r\n filter_size = W1.shape[2]\r\n conv_param = {'stride': 1, 'pad': int((filter_size - 1) / 2)}\r\n\r\n # pass pool_param to the forward pass for the max-pooling layer\r\n pool_param = {'pool_height': 2, 'pool_width': 2, 'stride': 2}\r\n\r\n scores = None\r\n ############################################################################\r\n # TODO: Implement the forward pass for the three-layer convolutional net, #\r\n # computing the class scores for X and storing them in the scores #\r\n # variable. #\r\n ############################################################################\r\n alpha = 0.1\r\n csrp1, csrp1_cache = conv_sbn_lrelu_pool_forward(X, W1, b1, gamma1, beta1, self.bn_params[0], conv_param, pool_param, alpha)\r\n abr1, abr1_cache = affine_bn_lrelu_forward(csrp1, W2, b2, gamma2, beta2, self.bn_params[1], alpha)\r\n scores, out_cache = affine_forward(abr1, W3, b3)\r\n ############################################################################\r\n # END OF YOUR CODE #\r\n ############################################################################\r\n\r\n if y is None:\r\n return scores\r\n\r\n loss, grads = 0, {}\r\n ############################################################################\r\n # TODO: Implement the backward pass for the three-layer convolutional net, #\r\n # storing the loss and gradients in the loss and grads variables. Compute #\r\n # data loss using softmax, and make sure that grads[k] holds the gradients #\r\n # for self.params[k]. Don't forget to add L2 regularization! #\r\n ############################################################################\r\n loss, dp = softmax_loss(scores, y)\r\n loss += 0.5 * self.reg * np.sum(\r\n np.sum(W1 ** 2) + np.sum(W2 ** 2) + np.sum(W3 ** 2)\r\n )\r\n dp, dw3, db3 = affine_backward(dp, out_cache)\r\n dp, dw2, db2, dgamma2, dbeta2 = affine_bn_lrelu_backward(dp, abr1_cache)\r\n dp, dw1, db1, dgamma1, dbeta1 = conv_sbn_lrelu_pool_backward(dp, csrp1_cache)\r\n grads['W1'] = dw1 + self.reg * W1\r\n grads['W2'] = dw2 + self.reg * W2\r\n grads['W3'] = dw3 + self.reg * W3\r\n grads['b1'] = db1\r\n grads['b2'] = db2\r\n grads['b3'] = db3\r\n grads['gamma2'] = dgamma2\r\n grads['gamma1'] = dgamma1\r\n grads['beta2'] = dbeta2\r\n grads['beta1'] = dbeta1\r\n \r\n ############################################################################\r\n # END OF YOUR CODE #\r\n ############################################################################\r\n\r\n return loss, grads", "def add_loss_op(self, preds):\n ### YOUR CODE HERE (~2-4 lines)\n trans = tf.get_variable('trans',\n shape=[Config.n_classes, Config.n_classes],\n initializer=tf.contrib.layers.xavier_initializer())\n log_likelihood, _ = crf_log_likelihood(preds,\n self.labels_placeholder,\n self.length_placeholder,\n trans)\n #log_likelihood = tf.boolean_mask(log_likelihood, self.mask_placeholder)\n loss = tf.reduce_mean(-1.0 * log_likelihood)\n \n ### END YOUR CODE\n return trans, loss", "def __init__(self):\n # Call the parent constructor\n super(GBLoss, self).__init__()\n self.ce = nn.CrossEntropyLoss() # Cross entropy loss", "def _initLoss(self):\n\n return torch.nn.CrossEntropyLoss()", "def _define_loss(self):\n\n cost = []\n unit_cost = []\n for nn in range(len(self.ffnet_out)):\n data_out = self.data_out_batch[nn]\n if self.filter_data:\n # this will zero out predictions where there is no data,\n # matching Robs here\n pred = tf.multiply(\n self.networks[self.ffnet_out[nn]].layers[-1].outputs,\n self.data_filter_batch[nn])\n else:\n pred = self.networks[self.ffnet_out[nn]].layers[-1].outputs\n\n nt = tf.cast(tf.shape(pred)[0], tf.float32)\n # define cost function\n if self.noise_dist == 'gaussian':\n with tf.name_scope('gaussian_loss'):\n cost.append(tf.nn.l2_loss(data_out - pred) / nt)\n unit_cost.append(tf.reduce_mean(tf.square(data_out-pred), axis=0))\n\n elif self.noise_dist == 'poisson':\n with tf.name_scope('poisson_loss'):\n\n if self.poisson_unit_norm is not None:\n # normalize based on rate * time (number of spikes)\n cost_norm = tf.multiply(self.poisson_unit_norm[nn], nt)\n else:\n cost_norm = nt\n\n cost.append(-tf.reduce_sum(tf.divide(\n tf.multiply(data_out, tf.log(self._log_min + pred)) - pred,\n cost_norm)))\n\n unit_cost.append(-tf.divide(\n tf.reduce_sum(\n tf.multiply(\n data_out, tf.log(self._log_min + pred)) - pred, axis=0),\n cost_norm))\n\n elif self.noise_dist == 'bernoulli':\n with tf.name_scope('bernoulli_loss'):\n # Check per-cell normalization with cross-entropy\n # cost_norm = tf.maximum(\n # tf.reduce_sum(data_out, axis=0), 1)\n cost.append(tf.reduce_mean(\n tf.nn.sigmoid_cross_entropy_with_logits(\n labels=data_out, logits=pred)))\n unit_cost.append(tf.reduce_mean(\n tf.nn.sigmoid_cross_entropy_with_logits(\n labels=data_out, logits=pred), axis=0))\n else:\n TypeError('Cost function not supported.')\n\n self.cost = tf.add_n(cost)\n self.unit_cost = unit_cost\n\n # Add regularization penalties\n reg_costs = []\n with tf.name_scope('regularization'):\n for nn in range(self.num_networks):\n reg_costs.append(self.networks[nn].define_regularization_loss())\n self.cost_reg = tf.add_n(reg_costs)\n\n self.cost_penalized = tf.add(self.cost, self.cost_reg)\n\n # save summary of cost\n # with tf.variable_scope('summaries'):\n tf.summary.scalar('cost', self.cost)\n tf.summary.scalar('cost_penalized', self.cost_penalized)\n tf.summary.scalar('reg_pen', self.cost_reg)", "def loss(self, logits, labels):\r\n return tf.reduce_mean(tf.keras.losses.binary_crossentropy(labels,logits))", "def loss(self, **kwargs):\n pass", "def setup_loss(logits, labels):\n predictions = tf.nn.softmax(logits)\n cost = tf.losses.softmax_cross_entropy(onehot_labels=labels,\n logits=logits,\n )\n return predictions, cost", "def my_loss(y_pred,y_true,n_outputs):\n y_true = tf.one_hot(tf.cast(y_true,tf.int64), n_outputs, dtype=tf.float32)\n return tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(y_true,y_pred))", "def loss(self, X, y):\n pass", "def cross_entropy(y_pred,y):\n \n epsilon = 0.001 # To prevent overflow and ensure numerical stability\n return sum(-y*np.log(y_pred+epsilon))", "def loss(self, X, y=None, reg=0.0):\n\n self.layers = []\n layers = self.layers\n layers.append(X)\n\n # Unpack variables from the params dictionary\n W1, b1 = self.params['W1'], self.params['b1']\n W2, b2 = self.params['W2'], self.params['b2']\n N, D = X.shape\n H, C = W2.shape\n\n # Compute the forward pass\n scores = None\n #############################################################################\n # TODO: Perform the forward pass, computing the class scores for the input. #\n # Store the result in the scores variable, which should be an array of #\n # shape (N, C). #\n #############################################################################\n mid = np.maximum(0, X.dot(W1) + b1.reshape(1, -1)) # activation\n scores = mid.dot(W2) + b2.reshape(1, -1)\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n # If the targets are not given then jump out, we're done\n if y is None:\n return scores\n\n # Compute the loss\n loss = None\n #############################################################################\n # TODO: Finish the forward pass, and compute the loss. This should include #\n # both the data loss and L2 regularization for W1 and W2. Store the result #\n # in the variable loss, which should be a scalar. Use the Softmax #\n # classifier loss. So that your results match ours, multiply the #\n # regularization loss by 0.5 #\n #############################################################################\n exp_score = np.exp(scores)\n exp_score_sum = exp_score.sum(axis=1)\n correct_score = exp_score[np.arange(N), y]\n probability = (correct_score / exp_score_sum).reshape(-1, 1)\n loss = -np.log(probability).sum()\n\n loss /= N\n loss += 0.5 * reg * (np.sum(W1 * W1) + np.sum(W2 * W2))\n\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n # Backward pass: compute gradients\n grads = {}\n #############################################################################\n # TODO: Compute the backward pass, computing the derivatives of the weights #\n # and biases. Store the results in the grads dictionary. For example, #\n # grads['W1'] should store the gradient on W1, and be a matrix of same size #\n #############################################################################\n des = np.tile((-correct_score / np.square(exp_score_sum)).reshape(-1, 1), (1, C))\n des[np.arange(N), y] += 1.0 / exp_score_sum\n dsoftmax = des * (-np.ones((mid.shape[0], 1)) / probability) * np.exp(scores)\n\n # W2\n grads['W2'] = mid.T.dot(dsoftmax)\n grads['W2'] /= N\n grads['W2'] += reg * W2\n\n # b2\n grads['b2'] = np.ones_like(b2.reshape(1, -1)) * dsoftmax\n grads['b2'] = np.mean(grads['b2'], axis=0).reshape(-1)\n\n # W1\n binary = np.zeros_like(mid)\n binary[mid > 0] = 1\n grads['W1'] = X.T.dot(binary * dsoftmax.dot(W2.T)) # chain rule, compute dmid/dW1 * dscore/dmid * dsoftmax\n grads['W1'] /= N\n grads['W1'] += reg * W1\n\n # b1\n grads['b1'] = np.ones_like(b1.reshape(1, -1)) * binary * dsoftmax.dot(W2.T)\n grads['b1'] = np.mean(grads['b1'], axis=0).reshape(-1)\n\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, grads", "def cross_entropy_loss(batch_out, batch_gt):\r\n criterion = torch.nn.CrossEntropyLoss()\r\n target = torch.argmax(batch_gt, 1)\r\n loss = criterion(batch_out, target)\r\n\r\n return loss", "def loss(self, class_weights):\n losses = self.model_args.get('loss', 'categorical_crossentropy')\n\n if type(losses) is str:\n multi_loss = False\n losses = {losses: 1.0}\n elif type(losses) is dict:\n multi_loss = True\n\n if class_weights is not None:\n class_weights = tf.convert_to_tensor(class_weights, dtype=tf.float32)\n\n # custom 'ordinal' loss option\n if 'ordinal_squared_error' in losses.keys():\n k = float(self.data.num_classes)\n a = tf.expand_dims(tf.range(0, k, dtype=tf.float32), axis=-1)\n k_factor = tf.constant((k+1)/k, shape=[1,1], name='k_factor')\n min_regr = tf.constant(-0.5, shape=[1,1], name='min_regression_value')\n\n def ordinal_loss(y_true, y_pred):\n y_estimate = tf.tensordot(y_pred, a, [[-1], [0]])\n y_estimate = k_factor * y_estimate + min_regr # scale to range [-0.5, k+0.5]\n y_values = tf.cast(tf.argmax(y_true, -1), dtype=y_estimate.dtype)\n\n min_class = tf.convert_to_tensor(0.0, dtype=y_estimate.dtype)\n max_class = tf.convert_to_tensor( k, dtype=y_estimate.dtype)\n sqr_error = tf.square(y_values - tf.squeeze(tf.clip_by_value(y_estimate, min_class, max_class)))\n\n if class_weights is not None:\n weight_vec = tf.gather(class_weights, tf.argmax(y_true, -1))\n sqr_error *= weight_vec\n\n return tf.reduce_mean(sqr_error)\n\n if not multi_loss:\n return ordinal_loss\n\n if 'categorical_crossentropy' in losses.keys():\n # TODO: option for clipping?\n def categorical_loss(y_true, y_pred):\n epsilon_ = tf.convert_to_tensor(1e-5, dtype=y_pred.dtype)\n y_pred = tf.clip_by_value(y_pred, epsilon_, 1. - epsilon_)\n\n cross_entropy = -tf.reduce_sum(y_true * tf.math.log(y_pred), axis=-1)\n\n if class_weights is not None:\n weight_vec = tf.gather(class_weights, tf.argmax(y_true, -1))\n cross_entropy *= weight_vec\n\n return cross_entropy\n\n if not multi_loss:\n return categorical_loss\n\n # weighted multi-loss option\n if multi_loss:\n def weighted_loss(y_true, y_pred):\n ord_weight = tf.constant(losses['ordinal_squared_error'], shape=[1,1])\n cat_weight = tf.constant(losses['categorical_crossentropy'], shape=[1,1])\n loss = ord_weight * ordinal_loss(y_true, y_pred) \\\n + cat_weight * categorical_loss(y_true, y_pred)\n return loss\n return weighted_loss", "def cross_entropy_loss(outputs, labels): \n# works properly\n \n m = labels.shape[0]\n p = outputs\n log_likelihood = -1*torch.log(p[range(m),labels])\n loss = torch.sum(log_likelihood) / m\n return loss.item()", "def class_loss_graph(target_class_ids, pred_class_logits):\n # During model building, Keras calls this function with\n # target_class_ids of type float32. Unclear why. Cast it\n # to int to get around it.\n target_class_ids = tf.cast(target_class_ids, 'int64')\n\n # Find predictions of classes that are not in the dataset.\n # pred_class_ids = tf.argmax(pred_class_logits, axis=2)\n\n # Loss\n print(pred_class_logits)\n y = tf.layers.flatten(pred_class_logits)\n loss = tf.nn.softmax_cross_entropy_with_logits(\n labels=target_class_ids, logits=y)\n\n\n # Computer loss mean. Use only predictions that contribute\n # to the loss to get a correct mean.\n loss = tf.reduce_mean(loss)\n return loss", "def cross_entropoy_loss_naive(W, X, y, reg):\n # pylint: disable=too-many-locals\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n \n\n C = W.shape[1]\n# print(\"no. of classes {}\".format(C))\n N,D = X.shape\n# print(\"no. of data {} and dimension {}\".format(N,D))\n for i in range(N):\n xi = X[i,:]\n# print(\"one record shape: {}\".format(xi.shape))\n scores = np.zeros(C)\n for c in range(C):\n w = W[:,c]\n# print(\"weight for one record {}\".format(w.shape))\n scores[c] = xi.dot(w)\n scores -= np.max(scores)\n actual_y = y[i]\n total_score = np.sum(np.exp(scores)) \n loss_i = -scores[actual_y] + np.log(total_score)\n# print('naive score : {}'.format(scores[actual_y]))\n loss += loss_i\n \n #gradient\n probability = np.exp(scores)/total_score\n for j in range(C):\n dW[:,j] += probability[j]*xi\n \n dW[:,actual_y] -= xi\n loss = loss/N\n reg_loss = 0.5*reg*np.sum(W*W)\n loss = loss + reg_loss\n print(\"loss : {}\".format(loss))\n dW = dW/N\n dW += reg*W\n \n \n \n \n\n ############################################################################\n # TODO: Compute the cross-entropy loss and its gradient using explicit #\n # loops. Store the loss in loss and the gradient in dW. If you are not #\n # careful here, it is easy to run into numeric instability. Don't forget #\n # the regularization! #\n ############################################################################\n\n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n\n return loss, dW", "def train_step(self, X_batch: np.ndarray, Y_batch: np.ndarray):\n\n # Almost the same as previous task, calculates the cross entropy loss for multiple classes using the softmax loss equation provided in the assignment.\n targets = Y_batch\n outputs = self.model.forward(X_batch)\n self.model.backward(X_batch, outputs, targets)\n \n self.model.w += -self.learning_rate*self.model.grad\n \n loss = cross_entropy_loss(targets, outputs)\n return loss", "def cross_entropy(y_pred, y_true, normalize=True, eps=1e-15):\n if type(y_pred) != np.ndarray:\n raise TypeError(\"Require np.ndarray type,{} checked\".format(type(y_pred)))\n if type(y_true) != np.ndarray:\n raise TypeError(\"Require np.ndarray type,{} checked\".format(type(y_true)))\n # clip = np.vectorize(lambda x: max(eps, min(1 - eps, x)))\n # y_pred = clip(y_pred)\n y_pred = np.array(list(map(lambda x: max(eps, min(1 - eps, x)), y_pred)))\n l = np.multiply(y_true, np.log(y_pred)) + np.multiply(1 - y_true, np.log(1 - y_pred))\n loss = -1 * np.sum(l).item()\n if normalize:\n loss = loss / len(y_pred)\n return loss", "def cross_entropy(y_prob,y):\n from numpy import log, sum\n m = y.shape[0]\n p = y_prob\n log_likelihood = -log(p[range(m),y])\n loss = sum(log_likelihood) / m\n return loss", "def __init__(self, dense_weight=1.0, cls_weight = 1.0, mixup_active=True, smoothing=0.1,\n classes = 1000):\n super(RelabelPooledCrossEntropy, self).__init__()\n\n\n self.CE = SoftTargetCrossEntropy()\n\n self.dense_weight = dense_weight\n self.smoothing = smoothing\n self.mixup_active = mixup_active\n self.classes = classes\n self.cls_weight = cls_weight\n assert dense_weight+cls_weight>0", "def multiclass_log_loss(y_true, y_pred, eps=1e-15):\r\n predictions = np.clip(y_pred, eps, 1 - eps)\r\n\r\n # normalize row sums to 1\r\n predictions /= predictions.sum(axis=1)[:, np.newaxis]\r\n\r\n actual = np.zeros(y_pred.shape)\r\n n_samples = actual.shape[0]\r\n actual[np.arange(n_samples), y_true.astype(int)] = 1\r\n vectsum = np.sum(actual * np.log(predictions))\r\n loss = -1.0 / n_samples * vectsum\r\n return loss", "def construct_loss(self, logits, labels):\n model_loss = tf.reduce_mean(\n tf.nn.softmax_cross_entropy_with_logits_v2(labels=labels,\n logits=logits))\n return model_loss", "def loss_fn(self, recons, inputs, mu, log_var, **kwargs):\n# kld_weight = kwargs['M_N'] # Account for the minibatch samples from the dataset\n recons_loss = F.mse_loss(recons, inputs)\n# recons_loss = F.binary_cross_entropy(recons, inputs)\n KLD = torch.mean(-0.5 * torch.sum(1 + log_var - mu**2 - log_var.exp(), dim=1), dim=0)\n loss = recons_loss - KLD\n return loss, recons_loss, KLD", "def get_loss(self, criterion, target, preds, epoch=0):\n assert isinstance(target, dict)\n assert isinstance(preds, dict)\n assert isinstance(criterion, dict)\n\n loss = {\"total\": 0, \"all_class\": 0}\n\n for key in target[\"class\"].keys():\n labels = target[\"class\"][key]\n batch_size = target[\"class\"][key].shape[0]\n loss[key] = criterion[\"crossentropy\"](preds[key], labels)\n loss[\"all_class\"] += loss[key]\n\n loss[\"total\"] += loss[\"all_class\"]\n\n if self.use_attention and not self.cfg.model.attention.use_fixed:\n if self.training and epoch + 1 < self.cfg.model.attention.decay_step:\n prior_multiplier = 0\n contrast_multiplier = 0\n entropy_multiplier = 0\n else:\n prior_multiplier = self.cfg.model.attention.wt_decay\n contrast_multiplier = self.cfg.model.attention.contrast_decay\n entropy_multiplier = self.cfg.model.attention.entropy_decay\n\n wts = preds[\"weights\"].squeeze(1)\n\n if self.cfg.model.attention.use_prior:\n b, n, _, _ = target[\"weights\"].shape\n assert wts.shape[0] == b * n\n prior = target[\"weights\"].reshape(b * n, -1)\n if self.cfg.model.attention.wt_loss == \"kl\":\n wts = torch.log(wts + 1e-7)\n loss[\"prior\"] = criterion[\"prior\"](wts, prior)\n loss[\"total\"] += prior_multiplier * loss[\"prior\"]\n if self.cfg.model.attention.use_contrast:\n loss[\"contrast\"] = criterion[\"contrast\"](wts)\n loss[\"total\"] += contrast_multiplier * loss[\"contrast\"]\n if self.cfg.model.attention.use_entropy:\n loss[\"entropy\"] = Categorical(probs=wts + 1e-6).entropy().mean()\n # if the loss minimization goes below threshold, stop training with entropy loss\n if (\n self.training\n and entropy_multiplier > 0\n and loss[\"entropy\"] < self.cfg.model.attention.entropy_thresh\n ):\n entropy_multiplier = 0\n loss[\"total\"] += entropy_multiplier * loss[\"entropy\"]\n\n return loss, batch_size", "def compute_C_loss(data):\n c_pred = net(data[\"B\"])\n c_real = torch.argmax(data[\"B_class\"], dim=1)\n\n from torch.autograd import Variable\n loss = nn.CrossEntropyLoss()\n\n loss = loss(c_pred, c_real)\n loss = Variable(loss, requires_grad=True)\n return loss", "def loss(self, logits, labels):\n\n ########################\n # PUT YOUR CODE HERE #\n #######################\n loss = tf.nn.softmax_cross_entropy_with_logits(\n labels=labels,\n logits=logits,\n name='softmax_cross_entropy_loss'\n )\n loss = tf.reduce_mean(loss, name='mean_softmax_cross_entropy_loss')\n\n tf.summary.scalar('mean cross entropy loss', loss)\n\n complexity_cost = self._complexity_cost()\n if complexity_cost is not None:\n loss = tf.add(loss, complexity_cost, name='total_loss')\n tf.summary.scalar('total loss', loss)\n\n ########################\n # END OF YOUR CODE #\n #######################\n\n return loss", "def loss_fn(self, targets, outputs, model):", "def class_balanced_sigmoid_cross_entropy(this,logits, label, name='cross_entropy_loss'):\n with tf.name_scope('class_balanced_sigmoid_cross_entropy'):\n y = tf.cast(label, tf.float32)\n\n count_neg = tf.reduce_sum(1. - y)\n count_pos = tf.reduce_sum(y)\n beta = count_neg / ((count_neg + count_pos)+this.EPS)\n\n pos_weight = beta / ((1 - beta)+this.EPS)\n cost = tf.nn.weighted_cross_entropy_with_logits(logits=logits, targets=y, pos_weight=pos_weight)\n cost = tf.reduce_mean(cost * (1 - beta))\n zero = tf.equal(count_pos, 0.0)\n return tf.where(zero, 0.0, cost, name=name)", "def class_balanced_sigmoid_cross_entropy(this,logits, label, name='cross_entropy_loss'):\n with tf.name_scope('class_balanced_sigmoid_cross_entropy'):\n y = tf.cast(label, tf.float32)\n\n count_neg = tf.reduce_sum(1. - y)\n count_pos = tf.reduce_sum(y)\n beta = count_neg / ((count_neg + count_pos)+this.EPS)\n\n pos_weight = beta / ((1 - beta)+this.EPS)\n cost = tf.nn.weighted_cross_entropy_with_logits(logits=logits, targets=y, pos_weight=pos_weight)\n cost = tf.reduce_mean(cost * (1 - beta))\n zero = tf.equal(count_pos, 0.0)\n return tf.where(zero, 0.0, cost, name=name)", "def genLoss(self, *data):\r\n _, (x_unlab, _) = data\r\n z = self.getInputNoise(self.hypers['ul_BS'])\r\n fake_logits = self.D(self.G(z))\r\n g_losses = -1*logOneMinusSoftmax(fake_logits)[:,self.D.numClasses-1]\r\n return torch.mean(g_losses)", "def loss(logits, labels):\n labels = tf.to_int64(labels)\n# labels = tf.to_float(labels)\n cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels, logits=logits, name='xentropy')\n# y_conv = tf.nn.softmax(logits)\n# cross_entropy = -tf.reduce_sum(labels*tf.log(y_conv))\n return tf.reduce_mean(cross_entropy, name='xentropy_mean')", "def loss_fn(outputs, labels):\n return nn.CrossEntropyLoss()(outputs, labels)", "def calculate_loss(self, train_x, train_y):\n self.log.info(\"Calculating average categorical crossentropy loss...\")\n\n num_words = np.sum([len(y) for y in train_y])\n return self.calculate_total_loss(train_x, train_y)/float(num_words)", "def loss_softmax_cross_entropy(self, unet, predict, ground_truth):\n\n loss = -F.mean(F.log(predict+1e-16) * ground_truth)\n\n chainer.report({\"loss\":loss}, unet)#mistery\n return loss", "def loss(y_true, y_pred):\n ce = tf.keras.losses.categorical_crossentropy(y_true, y_pred, from_logits=True)\n l2_loss = l2_regularization_loss(model, weight_decay)\n return ce + l2_loss", "def loss(params: hk.Params, batch, label) -> jnp.ndarray:\r\n logits = net.apply(params, batch)\r\n labels = jax.nn.one_hot(label, n_classes)\r\n\r\n # Cross Entropy Loss\r\n softmax_xent = -jnp.sum(labels * jax.nn.log_softmax(logits))\r\n softmax_xent /= labels.shape[0]\r\n return softmax_xent", "def loss(self, labels, input_data):\n\n pred, out = self.inference(input_data)\n loss = tf.reduce_mean(tf.losses.sparse_softmax_cross_entropy(labels, out), name=\"loss\") + \\\n tf.losses.get_regularization_loss()\n return loss, pred", "def loss(self, batch: base.Batch, key: base.RngKey) -> base.Array:", "def class_balanced_sigmoid_cross_entropy_no_norm(this,logits, label, name='cross_entropy_loss_no_norm'):\n\n y = tf.cast(label, tf.float32)\n\n count_neg = tf.reduce_sum(1. - y) # the number of 0 in y\n count_pos = tf.reduce_sum(y) # the number of 1 in y (less than count_neg)\n beta = count_neg / ((count_neg + count_pos)+this.EPS);\n\n pos_weight = beta / ((1 - beta)+this.EPS)\n cost = tf.nn.weighted_cross_entropy_with_logits(logits=logits, targets=y, pos_weight=pos_weight)\n\n cost = tf.reduce_sum(cost * (1 - beta), name=name) / (tf.cast(tf.shape(logits)[0], tf.float32)+this.EPS)\n return cost", "def class_balanced_sigmoid_cross_entropy_no_norm(this,logits, label, name='cross_entropy_loss_no_norm'):\n\n y = tf.cast(label, tf.float32)\n\n count_neg = tf.reduce_sum(1. - y) # the number of 0 in y\n count_pos = tf.reduce_sum(y) # the number of 1 in y (less than count_neg)\n beta = count_neg / ((count_neg + count_pos)+this.EPS);\n\n pos_weight = beta / ((1 - beta)+this.EPS)\n cost = tf.nn.weighted_cross_entropy_with_logits(logits=logits, targets=y, pos_weight=pos_weight)\n\n cost = tf.reduce_sum(cost * (1 - beta), name=name) / (tf.cast(tf.shape(logits)[0], tf.float32)+this.EPS)\n return cost", "def loss(self, X, labels):\n features = self.get_conv_features(X)\n loss = blah\n return loss", "def get_loss(self, xs, y):\n \"*** YOUR CODE HERE ***\"\n predictedY = self.run(xs)\n return nn.SoftmaxLoss(predictedY, y)\n # return nn.SquareLoss(predictedY, y)", "def loss_op(logits, labels):\n cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, labels, name='xentropy_eval')\n\n loss = math_ops.reduce_mean(cross_entropy)\n return loss", "def calculate_loss(self, pred, gold, smoothing=False):\n gold = gold.contiguous().view(-1)\n if smoothing:\n epsilon = 0.1\n n_class = pred.size(1)\n one_hot = torch.zeros_like(pred).scatter(1, gold.view(-1, 1), 1)\n one_hot = one_hot * (1 - epsilon) + \\\n (1 - one_hot) * epsilon / (n_class - 1)\n\n log_prb = F.log_softmax(pred, dim=1)\n # create non-padding mask with torch.ne()\n non_pad_mask = gold.ne(self.constants.PAD)\n loss = -(one_hot * log_prb).sum(dim=1)\n # losses are averaged later\n loss = loss.masked_select(non_pad_mask).sum()\n else:\n loss = F.cross_entropy(\n pred, gold, ignore_index=self.constants.PAD, reduction='sum')\n return loss", "def train(self, X, y):\n h1_input, h1_output, h2_input, h2_output, final_output = self.forwardpass_train(\n X\n )\n # calculate average loss per one data\n train_loss = self.cross_entropy_loss(y, final_output)\n dW1, db1, dW2, db2, dW3, db3 = self.backpropagation(\n X, y, h1_input, h1_output, h2_input, h2_output, final_output\n )\n self.update_weights(dW1, db1, dW2, db2, dW3, db3)\n return train_loss", "def get_loss(self, xs, y):\n \"*** YOUR CODE HERE ***\"\n y_pred = self.run(xs)\n return nn.SoftmaxLoss(y_pred,y)", "def loss(self, X, y=None):\n\t\tmode = 'test' if y is None else 'train'\n\t\tif self.dropout_param is not None:\n\t\t\tself.dropout_param['mode'] = mode\n\t\tif self.use_batchnorm:\n\t\t\tfor bn_param in self.bn_params:\n\t\t\t\tbn_param[mode] = mode\n\n\t\tW1, b1 = self.params['W1'], self.params['b1']\n\t\tW2, b2 = self.params['W2'], self.params['b2']\n\t\tW3, b3 = self.params['W3'], self.params['b3']\n\t\tW5, b5 = self.params['W5'], self.params['b5']\n\t\t\n\t\tgamma1, beta1 = self.params['gamma1'], self.params['beta1']\n\t\tgamma2, beta2 = self.params['gamma2'], self.params['beta2']\n\t\tgamma3, beta3 = self.params['gamma3'], self.params['beta3']\t\n\n\t\t# pass conv_param to the forward pass for the convolutional layer\n\t\tfilter_size1 = W1.shape[2]\n\t\tconv_param1 = {'stride': 1, 'pad': (filter_size1 - 1) / 2}\n\t\tfilter_size2 = W2.shape[2]\n\t\tconv_param2 = {'stride': 1, 'pad': (filter_size2 - 1) / 2}\n\t\t\n\t\t# pass pool_param to the forward pass for the max-pooling layer\n\t\tpool_param = {'pool_height': 2, 'pool_width': 2, 'stride': 2}\n\t\t\n\t\tscores = None\n\t\n\t\t# Convolutional layers\t\n\t\tz1, cache1 = conv_relu_forward(X, W1, b1, conv_param1)\n\t\tz2, cache2 = conv_relu_pool_forward(z1, W2, b2, conv_param2, pool_param)\n\t\tz3, cache3 = spatial_batchnorm_forward(z2, gamma1, beta1, self.bn_params[1])\n\n\t\t# Fully Connected layers\n\t\tz4, cache4 = affine_relu_bn_forward(z3, W3, b3, gamma2, beta2, self.bn_params[2])\n\t\tz4, cache9 = dropout_forward(z4, self.dropout_params)\n\n\t\t# Output layer\n\t\tz6, cache6 = affine_forward(z4, W5, b5)\n\t\tz7, cache7 = batchnorm_forward(z6, gamma3, beta3, self.bn_params[3])\n\t\t#z8, cache8 = dropout_forward(z7, self.dropout_params)\n\t\tscores = z7\n\t\t\n\t\tif y is None:\n\t\t\treturn scores\n\t\t\n\t\tloss, grads = 0, {}\n\t\tloss, dout = softmax_loss(scores, y)\n\t\tloss += self.reg * 0.5 * (np.power(self.params['W1'], 2).sum() +\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnp.power(self.params['W2'], 2).sum() +\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnp.power(self.params['W5'], 2).sum() +\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnp.power(self.params['W3'], 2).sum())\n\t\t\n\t\t#dx8 = dropout_backward(dout, cache8)\n\t\tdx7, grads['gamma3'], grads['beta3'] = batchnorm_backward(dout, cache7)\n\t\tdx6, grads['W5'], grads['b5'] = affine_backward(dx7, cache6)\n\t\tdx6 = dropout_backward(dx6, cache9)\n\t\tdx4, grads['W3'], grads['b3'], grads['gamma2'], grads['beta2'] = affine_relu_bn_backward(dx6, cache4)\n\t\t\n\t\tdx3, grads['gamma1'], grads['beta1'] = spatial_batchnorm_backward(dx4, cache3)\n\t\tdx2, grads['W2'], grads['b2'] = conv_relu_pool_backward(dx3, cache2)\n\t\tdx1, grads['W1'], grads['b1'] = conv_relu_backward(dx2, cache1)\n\t\t\n\t\treturn loss, grads", "def loss_fn(outputs, labels, wts):\n\n # reshape labels to give a flat vector of length batch_size*seq_len\n loss_noreduce = nn.BCEWithLogitsLoss(reduce=False)\n loss = torch.mean(loss_noreduce(outputs, labels)*wts)\n\t\n # compute cross entropy loss for all tokens\n return loss", "def loss_fn(gr_truth, pred):\n return 100 * dice_loss(pred, gr_truth) + softmax_weighted_loss(pred, gr_truth)", "def __init__(self, grid_size=7, num_bboxes=2, num_classes=20):\r\n super(Loss, self).__init__()\r\n self.S = grid_size\r\n self.B = num_bboxes\r\n self.C = num_classes", "def custom_loss(y_true, y_pred):\n #print(K.int_shape(y_pred))\n sr = y_pred[:,:,:,0]\n sr_clear = y_pred[:,:,:,1]\n hr = y_true[:,:,:,0]\n hr_clear = y_true[:,:,:,1]\n dim = 384 #K.int_shape(y_pred)[1]\n diff = hr - sr\n denominateur = K.sum(hr_clear, axis=(1,2))\n\n b = K.sum( diff * hr_clear, axis=(1,2))/denominateur #batchsize dim\n #print(K.int_shape(y_pred), K.int_shape(y_pred[:,:,:]))\n b = K.expand_dims(b, axis=-1)\n b = K.expand_dims(b, axis=-1)\n b = K.repeat_elements(b, dim, axis=1 )\n b = K.repeat_elements(b, dim, axis=-1 )\n \n cMSE = K.sum(np.square( (diff-b)*hr_clear), axis=(1,2))/denominateur\n\n #cMSE = K.sum(np.square( (diff)*hr_clear), axis=(1,2))/denominateur\n cPSNR = -10*K.log(cMSE)/K.log(10.0)\n loss1 = 46.5 / cPSNR\n ce = binary_crossentropy(K.reshape(hr_clear, (-1, dim*dim)), K.reshape(sr_clear, (-1, dim*dim)))\n \n #print(K.int_shape(loss1), K.int_shape(ce))\n return loss1 + 0.5*ce", "def __init__(self, model=None, reduction: str = \"mean\"):\n super().__init__()\n self.loss_fun = CrossEntropyLoss(reduction=reduction)", "def classweighted_mortality_loss(class_weights):\n\n def tmp_mortality_loss(y_true, y_pred):\n sample_weights = (1 - y_true) * class_weights[0] + y_true * class_weights[1]\n bce = K.binary_crossentropy(y_true, y_pred)\n return K.mean(sample_weights * bce, axis=-1)\n\n return tmp_mortality_loss", "def balancing_loss(h):\n n_batch = h.data.shape[0]\n n_units = h.data.shape[1]\n\n mean = F.sum(h, axis=1) / n_units\n loss = F.sum((mean - 0.5) ** 2) / n_batch\n return loss", "def loss(self, X, labels):\n features = self.get_conv_feats(X)\n loss = blah\n return loss", "def cross_entropy(X, y, using_onehot=True):\n\tM = y.shape[0]\n\tif using_onehot :\n\t\tlog_likelihood = -np.log(np.max(X * y, -1))\n\telse:\n\t\tlog_likelihood = -np.log(X[range(M), y]) # 找到y对应的那个类别所对应的logit\n\tloss = np.sum(log_likelihood) / M\n\treturn loss", "def calculate_loss(self, output, target, redmode = 'mean'):\n\n loss = F.cross_entropy(output, target, reduction = redmode)\n return loss", "def class_balanced_cross_entropy_loss(output, label, size_average=True, batch_average=True, void_pixels=None):\r\n assert(output.size() == label.size())\r\n labels = torch.ge(label, 0.5).float()\r\n num_labels_pos = torch.sum(labels)\r\n num_labels_neg = torch.sum(1.0 - labels)\r\n num_total = num_labels_pos + num_labels_neg\r\n\r\n output_gt_zero = torch.ge(output, 0).float()\r\n loss_val = torch.mul(output, (labels - output_gt_zero)) - torch.log(\r\n 1 + torch.exp(output - 2 * torch.mul(output, output_gt_zero)))\r\n\r\n loss_pos_pix = -torch.mul(labels, loss_val)\r\n loss_neg_pix = -torch.mul(1.0 - labels, loss_val)\r\n\r\n if void_pixels is not None:\r\n w_void = torch.le(void_pixels, 0.5).float()\r\n loss_pos_pix = torch.mul(w_void, loss_pos_pix)\r\n loss_neg_pix = torch.mul(w_void, loss_neg_pix)\r\n num_total = num_total - torch.ge(void_pixels, 0.5).float().sum()\r\n\r\n loss_pos = torch.sum(loss_pos_pix)\r\n loss_neg = torch.sum(loss_neg_pix)\r\n\r\n final_loss = num_labels_neg / num_total * loss_pos + num_labels_pos / num_total * loss_neg\r\n\r\n if size_average:\r\n final_loss /= np.prod(label.size())\r\n elif batch_average:\r\n final_loss /= label.size()[0]\r\n\r\n return final_loss", "def loss(self, prediction_dict, groundtruth_lists):\r\n eyeFace_logits = prediction_dict['eyeFace_logits']\r\n eyeFace_logits = tf.nn.softmax(eyeFace_logits)\r\n logits = eyeFace_logits\r\n #softmax只是一个分类器\r\n slim.losses.sparse_softmax_cross_entropy(\r\n logits=logits, labels=groundtruth_lists)\r\n loss = slim.losses.get_total_loss()\r\n loss_dict = {'loss': loss}\r\n return loss_dict", "def loss(self, predictions, labels, labels_2, inputs, raw_inp):\n next_word = labels\n curr_label = tf.cast(labels_2, tf.float32)\n\n \n prediction_word = predictions[0]\n prediction_label = predictions[1]\n\n #initialising variables\n cross_entropy_next = tf.constant(0)\n cross_entropy_label = tf.constant(0)\n cross_entropy_label_similarity = tf.constant(0)\n cross_entropy_emb = tf.constant(0)\n \n self.prec_label, self.prec_label_op = tf.constant(1), tf.constant(1)\n self.recall_label, self.recall_label_op = tf.constant(1), tf.constant(1)\n self.label_sigmoid = tf.constant(0)\n\n \n if self.config.solver._next_node_loss:\n #<EOS> and <UNK> get encoded as 1 and 0 respectively\n #Count loss only for actual nodes\n \n raw_inp1 = tf.greater(tf.slice(raw_inp, [0,0],[-1, 1]), -1) #Make first column all True\n raw_inp2 = tf.greater(tf.slice(raw_inp, [0,1],[-1, -1]), 1) #Make only non (<EOS>,<UNK>) True\n raw_inp = tf.concat(1, [raw_inp1, raw_inp2]) #concatenate back to original shape\n raw_inp = tf.transpose(raw_inp) #Transpose raw_inp from batch*step to step*batch\n mask = [tf.reshape(tf.cast(raw_inp, tf.float32), [-1])] #Convert from bool to float and flatten array\n\n\n #<EOS> and <UNK> get encoded as 1 and 0 respectively\n #Transpose raw_inp from batch*step to shape*batch\n #Count loss only for actual nodes\n #Convert from bool to float and flatten array\n #mask = [tf.reshape(tf.cast(tf.greater(tf.transpose(raw_inp), 0), tf.float32), [-1])]\n\n #Vector to weigh different word losses\n #all_ones = [tf.ones([self.config.batch_size * self.config.num_steps])]\n\n #cross entropy loss for next word prediction\n cross_entropy_next = sequence_loss([prediction_word],[tf.reshape(next_word, [-1])], mask, self.config.data_sets._len_vocab)\n tf.add_to_collection('total_loss', cross_entropy_next)\n\n if self.config.solver._curr_label_loss:\n #Get the slice of tensor representing label '0' for all batch.seq\n #'0' label is assigned for <EOS> and the nodes whose labels are not known\n #Valid errors are only those which don't have '0' label\n valid = tf.cast(tf.less(tf.slice(curr_label, [0,0,0], [self.config.num_steps, self.config.batch_size, 1]), tf.constant(0.5)), tf.float32)\n #replicate along 3rd axis\n valid = tf.tile(valid, tf.pack([1,1,tf.shape(curr_label)[2]]))\n \n #Sigmoid activation\n self.label_sigmoid = tf.sigmoid(prediction_label)\n #binary cross entropy for labels\n cross_loss = tf.add(tf.log(1e-10 + self.label_sigmoid)*curr_label,\n tf.log(1e-10 + (1-self.label_sigmoid))*(1-curr_label))\n #only consider the loss for valid label predictions\n #[TODO] mean of all or mean of only valid ???\n cross_entropy_label = -1*tf.reduce_mean(tf.reduce_sum(cross_loss*valid,2))\n tf.add_to_collection('total_loss', cross_entropy_label)\n\n\n if self.config.solver._label_similarity_loss: \n #Label similarity loss \n label_sigmoid = tf.sigmoid(pred_label_reshaped)\n part1 = tf.slice(label_sigmoid, [0,0,0], [self.config.num_steps-1, self.config.batch_size, self.config.data_sets._len_labels])\n part2 = tf.slice(label_sigmoid, [1,0,0], [self.config.num_steps-1, self.config.batch_size, self.config.data_sets._len_labels])\n\n #Exponential weightage -> [r**(n-1), r**(n-2), ... , r**2. r**1]\n label_diffusion = tf.constant([self.config.data_sets._diffusion_rate**i for i in range(self.config.num_steps-1,0,-1)])\n cross_loss_sim = tf.add(tf.log(1e-10 + part1)*part2, tf.log(1e-10 + (1-part1))*(1-part2))\n #prediction is 3 dimensional (seq x batch x label_len), reduce along axis of label_len\n #Sum over each label error -> take mean over the batch -> sum for the sequence\n cross_entropy_label_similarity = tf.reduce_sum(tf.reduce_mean(-tf.reduce_sum(cross_loss_sim, 2),1) * label_diffusion)\n tf.add_to_collection('total_loss', cross_entropy_label_similarity)\n\n \n if self.config.solver._embedding_loss:\n #embedding similarity loss\n #Matching First input's embeddings with embeddings of other inputs\n #[TODO] reverse feed of input AND reverse diffusion rate\n \n emb_part1 = tf.slice(inputs, [self.config.num_steps-2,0,0], [1, self.config.batch_size, self.config.mRNN._embed_size])\n emb_part2 = tf.slice(inputs, [0,0,0], [self.config.num_steps-1, self.config.batch_size, self.config.mRNN._embed_size])\n\n #Exponential weightage -> [r**(n-1), r**(n-2), ... , r**2. r**1]\n label_diffusion = tf.constant([self.config.data_sets._diffusion_rate**i for i in range(self.config.num_steps-1,0,-1)])\n #Broadcastive Subtraction\n mse_emb = tf.reduce_mean(tf.square(emb_part2 - emb_part1),2)\n cross_entropy_emb = tf.reduce_sum(tf.reduce_mean(mse_emb,1) * label_diffusion) * self.config.data_sets._emb_factor\n tf.add_to_collection('total_loss', cross_entropy_emb)\n\n if self.config.solver._L2loss:\n vars = tf.trainable_variables() \n lossL2 = tf.add_n([tf.nn.l2_loss(v) for v in vars])*0.00001\n tf.add_to_collection('total_loss', lossL2)\n\n loss = tf.add_n(tf.get_collection('total_loss'))\n grads, = tf.gradients(loss, [self.embedding]) \n\n tf.summary.scalar('next_node_loss', cross_entropy_next)\n tf.summary.scalar('curr_label_loss', cross_entropy_label)\n tf.summary.scalar('label_similarity_loss', cross_entropy_label_similarity )\n tf.summary.scalar('emb_loss', cross_entropy_emb)\n tf.summary.scalar('total_loss', tf.reduce_sum(loss))\n \n return [loss, cross_entropy_next, cross_entropy_label, cross_entropy_label_similarity, cross_entropy_emb, grads]", "def loss(self, X, y=None):\n\n # In dev testing, the loss fnc stops at \"scores\" , unfollowed by \"softmax\" probability prediction.\n # In real testing, \"self.predict()\" needs to be implemented in Solver() class.\n \n if y is None:\n for bn_param in self.bn_params:\n bn_param[\"mode\"] = \"test\"\n\n\n W1, b1 = self.params['W1'], self.params['b1']\n gamma1, beta1 = self.params[\"sbnGamma1\"], self.params[\"sbnBeta1\"]\n bn_param1 = self.bn_params[0]\n\n W2, b2 = self.params['W2'], self.params['b2']\n gamma2, beta2 = self.params[\"sbnGamma2\"], self.params[\"sbnBeta2\"]\n bn_param2 = self.bn_params[1]\n\n W3, b3 = self.params['W3'], self.params['b3']\n gamma3, beta3 = self.params[\"bnGamma3\"], self.params[\"bnBeta3\"]\n bn_param3 = self.bn_params[2]\n\n W4, b4 = self.params['W4'], self.params['b4']\n \n # pass conv_param to the forward pass for the convolutional layer\n conv_param = self.conv_param\n\n # pass pool_param to the forward pass for the max-pooling layer\n pool_param = self.maxpool_params\n\n ############################################################################\n # TODO: Implement the forward pass for the three-layer convolutional net, #\n # computing the class scores for X and storing them in the scores #\n # variable. #\n ############################################################################\n \n scores = None \n cache = {}\n # def conv_sbn_relu_forward(x, w, b, gamma, beta, conv_param, bn_param): return out, cache;\n out, cache[\"layer1\"] = layer_utils.conv_sbn_relu_forward(X, W1, b1, gamma1, beta1, conv_param, bn_param1) \n out, cache[\"layer2\"] = layer_utils.conv_sbn_relu_forward(out, W2, b2, gamma2, beta2, conv_param, bn_param2)\n\n # def max_pool_forward_fast(x, pool_param): return out, cache;\n out, cache[\"maxpool\"] = fast_layers.max_pool_forward_fast(out, pool_param)\n\n # def affine_bn_relu_forward(x, w, b, gamma, beta, bn_param): return out, cache;\n \n out, cache[\"layer3\"] = layer_utils.affine_bn_relu_forward(out, W3, b3, gamma3, beta3, bn_param3)\n\n # def affine_forward(x, w, b): return out, cache;\n scores, cache[\"layer4\"] = layers.affine_forward(out, W4, b4)\n\n\n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n \n if y is None:\n return scores\n \n ############################################################################\n # TODO: Implement the backward pass for the three-layer convolutional net, #\n # storing the loss and gradients in the loss and grads variables. Compute #\n # data loss using softmax, and make sure that grads[k] holds the gradients #\n # for self.params[k]. Don't forget to add L2 regularization! #\n ############################################################################\n \n loss, grads = 0, {}\n\n # def softmax_loss(x, y): return loss, dscore;\n loss, dscores = layers.softmax_loss(scores, y)\n loss += 0.5 * self.reg * (np.sum(W1 * W1) + np.sum(W2 * W2) + np.sum(W3 * W3) + np.sum(W4 * W4))\n\n # def affine_backward(dout, cache): return dx, dw, db;\n dout, dW4, db4 = layers.affine_backward(dscores, cache[\"layer4\"]) \n\n # def affine_bn_relu_backward(dout, cache): return dx, dw, db, dgamma, dbeta;\n dout, dW3, db3, dgamma3, dbeta3 = layer_utils.affine_bn_relu_backward(dout, cache[\"layer3\"])\n\n # print cache[\"layer3\"]\n\n # def max_pool_backward_fast(dout, cache): return max_pool_backward_im2col(dout, real_cache);\n # def max_pool_backward_im2col(dout, cache): return dx;\n dout = fast_layers.max_pool_backward_fast(dout, cache[\"maxpool\"])\n\n # def conv_sbn_relu_backward(dout, cache): return dx, dw, db, dgamma, dbeta;\n dout, dW2, db2, dgamma2, dbeta2 = layer_utils.conv_sbn_relu_backward(dout, cache[\"layer2\"])\n _, dW1, db1, dgamma1, dbeta1 = layer_utils.conv_sbn_relu_backward(dout, cache[\"layer1\"])\n\n # reg\n grads['W4'], grads['b4'] = dW4 + self.reg * W4, db4\n \n grads['W3'], grads['b3'] = dW3 + self.reg * W3, db3\n grads[\"bnGamma3\"], grads[\"bnBeta3\"] = dgamma3, dbeta3\n\n grads['W2'], grads['b2'] = dW2 + self.reg * W2, db2\n grads[\"sbnGamma2\"], grads[\"sbnBeta2\"] = dgamma2, dbeta2\n\n grads['W1'], grads['b1'] = dW1 + self.reg * W1, db1\n grads[\"sbnGamma1\"], grads[\"sbnBeta1\"] = dgamma1, dbeta1\n\n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n \n return loss, grads", "def get_loss(self, xs, y):\n \"*** YOUR CODE HERE question 4 ***\"\n return nn.SoftmaxLoss(self.run(xs), y)" ]
[ "0.7614255", "0.72014856", "0.7150281", "0.7139225", "0.7041426", "0.69943374", "0.69878465", "0.6987561", "0.69841796", "0.69580585", "0.69393665", "0.6922004", "0.6916779", "0.69158214", "0.691163", "0.691163", "0.6885535", "0.67836154", "0.6734367", "0.6716514", "0.6716514", "0.66966033", "0.6688523", "0.66861326", "0.66765404", "0.66245586", "0.6618933", "0.6601016", "0.6599499", "0.6597173", "0.6566706", "0.6532219", "0.65219325", "0.6521887", "0.6489266", "0.6485533", "0.6483446", "0.6481844", "0.64796466", "0.64772135", "0.64722764", "0.646938", "0.6456198", "0.6446881", "0.64429855", "0.64190704", "0.6412616", "0.6410022", "0.64016503", "0.6391534", "0.63874865", "0.6383763", "0.637991", "0.63789517", "0.63675284", "0.6356364", "0.63528925", "0.6351244", "0.63503784", "0.6349311", "0.6345331", "0.63333607", "0.63302857", "0.63225603", "0.6319365", "0.6309768", "0.6309768", "0.63072044", "0.6306261", "0.6287944", "0.6285986", "0.62841034", "0.62784755", "0.6277072", "0.62752324", "0.6263703", "0.6253968", "0.6253968", "0.6241085", "0.6240468", "0.62330204", "0.6230955", "0.62304646", "0.62304044", "0.6227251", "0.62253916", "0.6224296", "0.62206006", "0.621606", "0.6214118", "0.62087697", "0.62070924", "0.6206064", "0.6201123", "0.61977094", "0.6194868", "0.61903673", "0.6186404", "0.6185354", "0.61850923" ]
0.6909369
16
Add a Pseudocode Operation at the actual active buffer.
def AddPseudoCode(self, pcode): self.buffers[self.buffergrade].append(pcode)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_code(self, code):\n self.code += code", "def add_operation(self):\n arg1 = self.memory[self.memory[self._cursor + 1]]\n arg2 = self.memory[self.memory[self._cursor + 2]]\n arg3 = self.memory[self._cursor + 3]\n self.memory[arg3] = arg1 + arg2\n # print(f'Cursor: {self._cursor}\\tAssigning position {position} with value {n1 + n2}')\n self._cursor += 4\n return", "def add_operation(self):\n n1 = self.memory[self.memory[self._cursor + 1]]\n n2 = self.memory[self.memory[self._cursor + 2]]\n position = self.memory[self._cursor + 3]\n self.memory[position] = n1 + n2\n # print(f'Cursor: {self._cursor}\\tAssigning position {position} with value {n1} + {n2} = {n1 + n2}')\n return", "def _add_to_buffer(self, data):\n for byte in data:\n self.next_fn(byte) \n self._parse_cmds()", "def addOp(self, op):\n self.operations << op", "def _putCode(self, code):\n assert(type(code) == int)\n self.code[self.codeptr] = code\n self.codeptr += 1", "def append(self, char):\n self.sequence += char", "def addop(self, mask, target, args):\n\n self.set_user(args)\n yield \"Added operator.\"", "def append(self,instr):\n self.instructions.append(instr)", "def add(text):\n orig = dispb[\"text\"]\n new = orig + text\n ops = [\"+\",\"-\",\"*\",\"/\"]\n # conditions\n # length 21\n if len(new) > 21:\n dispb[\"text\"] = orig\n return 0\n \n # one calc at a time\n if len(orig) > 0:\n if (orig[-1] in ops) & (text in ops):\n dispb[\"text\"] = orig\n return 0\n\n dispb[\"text\"] = new\n return 0", "def start(self, pos = 0, lib_call = False) -> None:\n from utils.instructions import instructions\n self.scope_push()\n self.pos = pos\n while self.pos < len(self.code.instructions):\n self.pos += 1 + instructions[self.code.get_instruction(self.pos)].run(self, self.code, self.pos + 1)", "def add_char(self, char):\n if self.pos >= self.line_length():\n self.buffer.append_char(char, self.line)\n else:\n self.buffer.insert_char(char, self.line, self.pos)\n \n self.pos += 1\n self.has_changes = True", "def add_code(self, s):\n self.code += ' ' * self.indent + s + '\\n'", "def make_codes(self):\n\t\troot = heapq.heappop(self.heap)#obtenemos la raiz del arbol\n\t\tcurrent_code = \"\"\n\t\tself.make_codes_helper(root, current_code)", "def _insert_op(self, op):", "def iadd(state: State) -> State:\n cell = state.array[state.index] or 0\n return state._replace(acc=state.acc + cell)", "def add_op(self, expr):\n from cascada.bitvector import operation\n assert isinstance(expr, operation.Operation)\n assert not self.contain_op(expr)\n name = \"{}{}\".format(self.id_prefix, self.counter)\n self.counter += 1\n identifier = core.Variable(name, expr.width)\n self.table[identifier] = expr\n\n return identifier", "def AddOperation(self, op):\n self._operations.append(op)", "def add_op(self, op):\n self._operations.append(op)", "def asm(self, text):\n self.text.append(text)", "def open_pseudocode(self, *args):\n return _ida_hexrays.Hexrays_Hooks_open_pseudocode(self, *args)", "def add(self, state, action, reward, new_state, done):\n experience = (state, action, reward, new_state, done)\n self.buffer.append(experience)", "def advance(self):\n self.pos += 1\n if self.pos > len(self.syntax) - 1:\n self.current_char = None\n else:\n self.current_char = self.syntax[self.pos]", "def add(\n self,\n state: np.ndarray,\n action: np.ndarray,\n reward: np.float64,\n next_state: np.ndarray,\n done: float,\n ):\n data = (state, action, reward, next_state, done)\n\n if len(self.buffer) == self.buffer_size:\n self.buffer[self.idx] = data\n self.idx = (self.idx + 1) % self.buffer_size\n else:\n self.buffer.append(data)", "def render(self, code_proxy):\n code_proxy.bytecode.add(opcode.opmap[self.opname])", "def operate_cipher(self):", "def process(opcode):\n opcode.process()", "def operator_c(buf, input_line, pos1, pos2, overwrite=False):\n operator_d(buf, input_line, pos1, pos2, overwrite)\n set_mode(\"INSERT\")", "def advance(self):\n if self.instr is not None:\n self.instr.opcode = self.instr.binary[25:]\n if opcode_decode[self.instr.opcode] == 'R-type':\n self.decode_rtype()\n elif opcode_decode[self.instr.opcode] == 'I-type' or opcode_decode[self.instr.opcode] == 'Load':\n self.decode_itype()\n else:\n raise SyntaxError(\"Invalid opcode\")", "def incr_operand(self):\n pass", "def add(self, newSym):\n if newSym != \"?\":\n self.n = self.n + 1\n self.has[newSym] = 1 + self.has.get(newSym, 0)\n if self.has[newSym] > self.most:\n self.most = self.has[newSym]\n self.mode = newSym", "def addChar(self, char):\n self.guessedChars.append(char)", "def add(self, state, action, reward, next_state, done):\n self.replay_buffer.append([state, action, reward, next_state, done])", "def asm(self, name, ucode, flags=None):\n\n print \"Adding assembly word %s\" % name\n\n self.create(name, flags)\n self.space.write(ucode)\n self.space.write(assemble(SET, PC, self.asmwords[\"next\"]))", "def key_a(buf, input_line, cur, count):\n set_cur(buf, input_line, cur + 1, False)\n set_mode(\"INSERT\")", "def RespAddCode(builder, code):\n return AddCode(builder, code)", "def switch_pseudocode(self, *args):\n return _ida_hexrays.Hexrays_Hooks_switch_pseudocode(self, *args)", "def code():", "def add_experience(self, action, state, reward, terminal):\n self.replay_buffer.add_experience(action, state, reward, terminal)", "def forward_character():\r\n set_point(point()+1)", "def append_lexeme(self, char: str):\r\n\r\n self._lexeme_buffer += char", "def pre_code_block(self):\n return self.new_code_block(content=self.precode.strip('\\n'),\n IO='input')", "def add_content(self, addition):\n self.content = self.content + addition", "def insert(self, position, insert):\n assert all(new in self.ALPHABET for new in insert)\n if position < 1 or position - 1 > len(self.sequence):\n raise ValueError(f\"Insertion position {position} out of bonds for given sequence.\")\n self.sequence = f\"{self.sequence[: position - 1]}{insert}{self.sequence[position:]}\"\n if \"mutations\" in self.metadata.keys():\n self.metadata[\"mutations\"] += f\" ins{position}{insert}\"\n else:\n self.metadata[\"mutations\"] = f\"ins{position}{insert}\"", "def _append_operator(self, operator):", "def advance(self):\n if self.instr is not None:\n self.simulator.registers[int(self.instr.binary[20:25], 2)][1] = self.instr.result", "def InsertNextPoint(self, ):\n ...", "def append(self, state):\n self._buffer[:-1] = self._buffer[1:]\n self._buffer[-1] = state", "def _add_cmd(self, cmd):\n if cmd.gate == Allocate:\n self._active_qubits += 1\n elif cmd.gate == Deallocate:\n self._active_qubits -= 1\n elif cmd.gate == Measure:\n for qureg in cmd.qubits:\n for qubit in qureg:\n self.main_engine.set_measurement_result(qubit, 0)\n elif self._is_rotation(cmd):\n self._num_rotations += 1\n self._rotations.append(self._decompose_rotation(cmd))", "def _add_token(self, token_type: TokenType, literal: Any = None):\n text = self.source[self.start : self.current]\n\n self.tokens.append(\n Token(token_type=token_type, lexeme=text, literal=literal, line=self.line)\n )", "def emit(self, op):\n assert self._curblock, \"Builder is not positioned!\"\n\n if op.result is None:\n op.result = self.func.temp()\n\n if self._lastop == 'head' and self._curblock.ops.head:\n op.insert_before(self._curblock.ops.head)\n elif self._lastop in ('head', 'tail'):\n self._curblock.append(op)\n else:\n op.insert_after(self._lastop)\n self._lastop = op", "async def add_line(self, data: str) -> t.Optional[Result]:\n self.buf += data\n try:\n # remove the last newline\n astob = ast_compile_interactive(self.buf[:-1])\n except Exception:\n self.buf = \"\"\n raise\n else:\n if astob is not None:\n self.buf = \"\"\n return (await self.eval_single(astob))\n return None", "def add_program(self, new_instruction):\n if self._programs is None:\n self._programs = new_instruction\n else:\n self._programs += self._separator + new_instruction", "def add(self, state, action, reward, next_state, done, priority):\n e = self.experience(state, action, reward, next_state, float(np.uint8(done)))\n if isinstance(priority, complex): raise ValueError('In ReplayMem::add, Priority is complex ', priority)\n if isinstance(self.alpha, complex): raise ValueError('In ReplayMem::add, self.alpha is complex ', self.alpha)\n if isinstance(priority**self.alpha, complex): \n raise ValueError('In ReplayMem::add, priority {} ** self.alpha {} = {} is complex '.format(priority,\n self.alpha, priority**self.alpha))\n \n self.tree.add(e, float(priority**self.alpha))", "def process_next_char(self): \n self.current_position += 1\n if self.current_position >= len(self.code_input):\n '''End of file since the position is equal to or greater than the input's position'''\n self.current_char = '\\0' #EOF\n print('end of line')\n self.current_char = self.code_input[self.current_position]", "def add(self, proc: ImageProcessor):\n self.chain.append(proc)\n return self", "def add_to_disassembly(\n self, operation: int, *args: Union[int, Tuple[int, ...]]\n ) -> None:\n\n try:\n self.disassembly[self.current_address] = self.opcodes[operation].format(\n *args\n )\n except KeyError as key_error:\n click.secho(\n f\"\\nThe key '{hex(key_error.args[0])}' is not \"\n \"part of the opcode dictionary.\\n\",\n fg=\"red\",\n bold=True,\n )", "def __iadd__(self, term):\n self.add(term)\n return self", "def next_operation(self):\n raise NotImplementedError", "def preprocess_program(self, input_code=None):\n if (input_code == None):\n return;\n map(self.program.append, filter((lambda (char): char in self.instructions), input_code))\n #self.program.append(False) # Causes interpretation to stop after the program has finished.\n self.build_jump_map(self.program)", "def add_operator(self, operator: Callable) -> None:\n self.operators.append(operator)", "def key_I(buf, input_line, cur, count):\n pos, _, _ = motion_carret(input_line, cur, 0)\n set_cur(buf, input_line, pos)\n set_mode(\"INSERT\")", "def addChar (self, c) :\r\n # Notice the \\n so we can notice when new lines begin\r\n if (c=='\\n') :\r\n self.lineNumber_ += 1\r\n self.charNumber_ = 0\r\n \r\n # Keep the last 1024 or so characters\r\n if (self.data_.full()) :\r\n self.data_.get()\r\n self.data_.put(c)\r\n self.charNumber_ += 1", "def precmd(self, statement):\n return statement", "def output_op(self, op):\n self.output['text'] += ' ' + op + ' ' \n self.seen_op = True", "def _insChar(self, char, pos, color):\n char, vertices, glyph = self._extractGlyph(char, glm.vec4(color))\n if not self.text:\n off, kern = self._updateMetric(pos, char)\n if char in self.NO_GLYPH_CHARS:\n self.colors.insert(pos, [char, None])\n else:\n vertices['vtx'] += off + glyph['offset']\n self.allVertices = np.hstack(vertices)\n self.allIndices = self._baseInd\n self.colors.insert(pos, [char, color])\n self.text += char\n else:\n self.logger.debug(\"Inserting %r at %d\" % (char, pos))\n nonGlyph = countInSet(self.text[:pos], self.NO_GLYPH_CHARS)\n # Arrange vertices\n if pos < len(self.text):\n self.allVertices = self.allVertices[:(pos - nonGlyph) * 4]\n self.allIndices = self.allIndices[:pos - nonGlyph]\n\n # Set the metric\n off, kern = self._updateMetric(pos, char)\n if char in self.NO_GLYPH_CHARS:\n color = None\n else:\n vertices['vtx'] += off + kern + glyph['offset']\n if self.allVertices is None:\n self.allVertices = np.hstack(vertices)\n else:\n self.allVertices = np.append(self.allVertices, vertices)\n if self.allIndices is None:\n self.allIndices = self._baseInd\n else:\n self.allIndices = np.vstack((self.allIndices,\n self._baseInd + (pos - nonGlyph) * 4))\n\n self.colors.insert(pos, [char, color])\n if pos < len(self.text):\n self.text = self.text[:pos] + char + self.text[pos:]\n self._updateGlyphs(pos, char)\n else:\n self.text += char", "def addch(self, stdscr, y, x, text):\n stdscr.addch(y, x, text, curses.color_pair(self.i))", "def cb_plus(event):\n delta_alpha = pm_rate\n # Increase Alpha \n sAlpha0.set_val( np.clip(sAlpha0.val + delta_alpha, alpha_min[0], alpha_max[0]) )\n sAlpha1.set_val( np.clip(sAlpha1.val + delta_alpha, alpha_min[1], alpha_max[1]) )\n sAlpha2.set_val( np.clip(sAlpha2.val + delta_alpha, alpha_min[2], alpha_max[2]) )\n print(\"+++\")", "def InsertText(self, pos, text):\n self.stc.InsertText(pos, text)\n if self.IsInsertMode():\n self.buffer += text", "def get_pseudocode(self, *args):\n return _ida_hexrays.cfuncptr_t_get_pseudocode(self, *args)", "def append_operator(cls, operator):\n for context in cls._active_contexts:\n context._append_operator(operator) # pylint: disable=protected-access", "def get_pseudocode(self, *args):\n return _ida_hexrays.cfunc_t_get_pseudocode(self, *args)", "def add(self, exp):\n self.batch.append(exp)", "def update(self):\n self.__token += self.__lines[self.__i]\n self.__i += 1", "def add(self, state, action, reward, next_state, done):\n e = self.transition(state, action, reward, next_state, done)\n self.buffer.append(e)", "def command(self, arr):\n self.bitmap(arr, 0)", "def characters(self, content):\n if self.in_source: self.chars += content", "def add_token(self, amount):\n self.M += amount", "def add_block(self, cxnode, code, **magic_vars):\n ast = cparse(code)\n # ast.show()\n generator = MagicCGenerator(cxnode, magic_vars)\n generator.indent_level = self.indent_level\n hdr = '\\n%s// %s\\n' % (' ' * self.indent_level,\n cxnode.__class__.__name__)\n self.code += hdr + generator.visit(ast)", "def open_pseudocode(*args):\n return _ida_hexrays.open_pseudocode(*args)", "def key_A(buf, input_line, cur, count):\n set_cur(buf, input_line, len(input_line), False)\n set_mode(\"INSERT\")", "def draw_instruction():\r\n arcade.draw_text(\r\n \"This is a game of Santa, Reindeer, Snowman\", 0, 50, arcade.color.WHITE, 15\r\n )\r\n arcade.draw_text(\r\n \"Santa beats snowman, snowman beats reindeer, reindeer beats santa\",\r\n 0,\r\n 30,\r\n arcade.color.WHITE,\r\n 13,\r\n )\r\n arcade.draw_text(\r\n \"Press button 1 for santa, 2 for reindeer, and 3 for snowman\",\r\n 0,\r\n 10,\r\n arcade.color.WHITE,\r\n 15,\r\n )\r\n arcade.draw_text(\r\n \"User Choice\", WINDOW_WIDTH - 175, WINDOW_HEIGHT - 60, arcade.color.WHITE, 15\r\n )\r\n arcade.draw_text(\"CPU Choice\", 75, WINDOW_HEIGHT - 60, arcade.color.WHITE, 15)", "def test_overwrite_codeword(self):\n self.assertEqual(self.sess.query(db.CodeWord).count(), 0)\n self.alice.add_codeword('muppet', 'ranged')\n self.assertEqual(self.alice.translate_codeword('muppet'), 'ranged')\n self.assertEqual(self.sess.query(db.CodeWord).count(), 1)\n self.alice.add_codeword('muppet', 'infantry')\n self.assertEqual(self.alice.translate_codeword('muppet'), 'infantry')\n self.assertEqual(self.sess.query(db.CodeWord).count(), 1)", "def apply_gate_operation(self, operation:[str, cirq.Gate], qubit_expr, params:Optional[Dict]=None):\n gate_ops = self.get_gate_operation(operation, qubit_expr, params)\n strategy = self.insert_strategy\n self.append(gate_ops, strategy=strategy)", "def processingInstruction(self, target, data):\n pass", "def _ProcessKey(self, key_code):\n char = unichr(key_code)\n if self.IsNormalMode() or self.IsVisualMode():\n self.buffer += char\n if ed_vim.Parse(self.buffer, self.commander):\n # command was handled (or invalid) so clear buffer\n self.buffer = u''\n\n if self.IsVisualMode():\n self.commander.ExtendSelection()\n\n elif self.IsInsertMode():\n self.buffer += char", "def increment_instr(self):\n self.instruction_count += 1", "def flag(code):\n\tOFFSET = ord('🇦') - ord('A')\n\tif not code:\n\t\treturn u''\n\tpoints = list(map(lambda x: ord(x) + OFFSET, code.upper()))\n\ttry:\n\t\treturn chr(points[0]) + chr(points[1])\n\texcept ValueError:\n\t\treturn ('\\\\U%08x\\\\U%08x' % tuple(points)).decode('unicode-escape')", "def insert(self,\n position,\n add_value=1):\n self.array[position] += add_value\n ibloc = int(position/self.bloc_size)\n self.blocsum[ibloc] += add_value", "def add_operation(self, op):\n\n self.operations[op.name] = op", "def add(self, experience):\n self.buffer.append(experience)", "def store(self, code):\n key = self.stash_key % str(self.count)\n self.stash[key] = code\n self.count += 1\n return STX + key + ETX", "def addch(self, posy, posx, character, color_pair):\r\n if posy < 0 or posy > self.height - 1:\r\n return\r\n if posx < 0 or posx > self.width - 1:\r\n return\r\n if posx == self.width - 1 and posy == self.height - 1:\r\n return\r\n self.win.addch(posy, posx, character, color_pair)", "def alpha(self, state, pctx):\n raise NotImplementedError", "def add_command(self, expr, name, priority=0):\n add_expr = Group(expr).setResultsName(name)\n for i, (p, _) in enumerate(self.commands):\n if priority >= p:\n self.commands.insert(i, (priority, add_expr))\n break\n else:\n self.commands.append((priority, add_expr))\n self.reinit_exprs()", "def _sequence(self, phrase):\n for index in range(len(phrase)-1):\n self.markov_chain.add(\n phrase[index], phrase[index+1])\n\n # Add last as a dead-end state\n self.markov_chain.add(phrase[index+1], phrase[index+1])", "def operationAt(self, op, n, at):\n self._changed = True\n\n bpTokenStart = 0 # in bp (base pairs)\n # iToken and tokenLength are used ouside of loop\n for iStartToken,tokenLength in enumerate(t[0] for t in self._tokens):\n if bpTokenStart + tokenLength > at: break\n bpTokenStart += tokenLength\n rem = (n, op)\n opAt = at - bpTokenStart\n\n out = self._tokens[0:iStartToken]\n for i in range(iStartToken, len(self._tokens)):\n t, rem = CIGAR._mutateToken(self._tokens[i], opAt, rem)\n # Replace the current token with the output t\n out.extend(t)\n if rem == (): \n # We're done applying the operation to the CIGAR string\n out.extend(self._tokens[i+1:])\n break\n else: \n # Apply remaining operation at start of next token\n opAt = 0 \n\n # If an operation remains after all tokens have been dealt with\n if rem != (): \n if(rem[1] == 'I'):\n out.append(rem)\n else:\n raise ValueError((\"The operation {} at {}bp \"\n +\"exceeds the end of the string (and is no insert)\")\n .format((n, op), at))\n self._tokens = out", "def add(self, state, action, reward, next_state):\n exp = self.experience(state.detach().numpy(), action, reward, next_state.detach().numpy())\n self.memory.append(exp)", "def add_char(self, coord, char, modify=False):\n if modify:\n range_y, range_x = self._map_dims\n new_coord = [coord[0]+range_y[0]-1, coord[1]+range_x[0]-1]\n self._screen.addch(new_coord[0], new_coord[1], char)\n self._screen.refresh()\n return new_coord\n else:\n self._screen.addch(coord[0], coord[1], char)\n self._screen.refresh()\n return coord", "def push(self, op):\n self.top += 1\n self.stack.append(op)" ]
[ "0.5870351", "0.5767791", "0.57653064", "0.5667283", "0.5466743", "0.54478973", "0.52387804", "0.52152646", "0.5208209", "0.5186463", "0.51665425", "0.51497793", "0.51468796", "0.5108849", "0.5103262", "0.50743014", "0.5067011", "0.50647295", "0.5024439", "0.50223887", "0.49839905", "0.49773774", "0.49730378", "0.49691683", "0.4950089", "0.4927656", "0.49242634", "0.49180722", "0.49170488", "0.48808628", "0.48772925", "0.48742586", "0.48737797", "0.48590717", "0.48472977", "0.4845546", "0.48418736", "0.48396322", "0.48128814", "0.48086298", "0.4793467", "0.47868565", "0.47798464", "0.47768855", "0.47713625", "0.47711918", "0.47617954", "0.47509566", "0.47491333", "0.47478592", "0.47463506", "0.4745082", "0.47393468", "0.4726641", "0.4724848", "0.47105917", "0.47050604", "0.47048992", "0.47021043", "0.4667289", "0.46660745", "0.4665804", "0.46615452", "0.46610895", "0.46570238", "0.46492362", "0.46460134", "0.46406683", "0.46392468", "0.46384728", "0.46352133", "0.46294177", "0.46192044", "0.46118835", "0.46083897", "0.46062636", "0.4601907", "0.46006408", "0.4590799", "0.45838618", "0.45813426", "0.45771897", "0.4576301", "0.45756057", "0.45747203", "0.45737034", "0.4566728", "0.45638907", "0.4562491", "0.4554513", "0.4553198", "0.4549705", "0.454962", "0.45490193", "0.45483246", "0.45478967", "0.45431218", "0.45381337", "0.45368764", "0.45368698" ]
0.7316713
0
Increment the BufferGrade and initialize a new empty buffer.
def IndentBuffer(self): self.buffergrade += 1 self.buffers[self.buffergrade] = []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fillBuffer():\n buff[bufferCounter].next = dataIn", "def __init__(self, capacity):\n self.experiences = RingBuf(capacity)", "def init_buffer(self):\n \n self.shape.buf = [pi3d.Buffer(self.shape, self.verts, self.texcoords, self.inds, self.norms)]\n self.shape.set_draw_details(self.shader, [self.spritesheet.img])", "def _initialize_buffers(self) -> None:", "def re_init_buffer(self):\n #~ print(self.verts)\n #~ print(self.texcoords)\n #~ print(self.inds)\n self.shape.buf[0].re_init(pts=np.array(self.verts, 'f'),texcoords=np.array(self.texcoords, 'f'))", "def _refresh_buffers(self) -> None:", "def fill_buffer(self):\n num_of_smp = 0\n while num_of_smp < self.buf_size:\n c, t = self.inlet.pull_chunk(timeout=0.0)\n new_c = []\n new_t = []\n while c:\n new_c += c\n new_t += t\n c, t = self.inlet.pull_chunk(timeout=0.0)\n\n # add samples to buffer\n if any(new_c):\n # add samples\n num_of_smp += len(new_c)\n data_v = [item for sublist in new_c for item in sublist]\n self.gbuffer = np.roll(self.gbuffer, -len(data_v))\n self.gbuffer[-len(data_v):] = data_v\n # add timestamps\n if new_t:\n self.gtimes = np.roll(self.gtimes, -len(new_t))\n self.gtimes[-len(new_t):] = new_t", "def initBuffer(self, env):\n cnt = 0\n while len(self.memory) < self.memory.capacity:\n cnt += 1\n print(\"\\rWarmup Buffer [{:d}]\".format(cnt), end=\"\")\n s = env.reset()\n actionIdx, actionIdxTuple = self.select_action(s, explore=True)\n s_, r, done, info = env.step(actionIdxTuple)\n self.store_transition(s, actionIdx, r, s_, info)\n print(\"\\n => Warmup Buffer Ends\")", "def reset(self):\n self._buffer.fill(0)", "def reset(self):\r\n self.buffer = np.zeros(self.nBins)\r\n self.counter = 0", "def __init__(self, buffer_size):\n self.num_experiences = 0\n self.buffer = deque(maxlen=buffer_size)", "def __init__(self, capacity, alpha, beta_i, beta_f, beta_anneal,\n weight_offset):\n self.weight_offset = weight_offset\n self.alpha = alpha\n\n assert beta_i < beta_f, \"Beta update assumes beta_i < beta_f\"\n self.beta = beta_i\n self.beta_f = beta_f\n self.beta_update = (beta_f - beta_i) / beta_anneal\n\n self.experiences = WeightedRingBuf(capacity)\n # ids of experiences that haven't been used for training yet.\n self.unplayed_experiences = deque(maxlen=capacity)", "def updateGACount(self):\n self.ga_count += 1", "def _fill_buffer(self, in_data, *args, **kwargs):\n self._buff.put(in_data)\n return None, pyaudio.paContinue", "def _fill_buffer(self, in_data, *args, **kwargs):\r\n self._buff.put(in_data)\r\n return None, pyaudio.paContinue", "def bufferCnt():\n if(reset == 1):\n bufferCounter.next = 0\n else:\n if(decimationRatio > 0):\n if(bufferCounter == (decimationRatio-1)):\n bufferCounter.next = 0\n else:\n bufferCounter.next = bufferCounter + 1", "def create_buffers(self):", "def next_buffer(self):\n selected_window = self.selected_window()\n selected_window.set_buffer(self._find_next_buffer(selected_window.buffer()))", "def Buffer(self) -> _n_0_t_7[_n_0_t_6]:", "def Buffer(self) -> _n_0_t_7[_n_0_t_6]:", "def Buffer(self) -> _n_0_t_7[_n_0_t_6]:", "def _fill_buffer(buff, in_data, frame_count, time_info, status_flags):\n buff.put(in_data)\n return None, pyaudio.paContinue", "def __init__(self, buffer_size, random_seed=None):\n self.buffer_size = buffer_size\n self.count = 0\n self.oldPos = 0\n self.currPos = 0\n self.full = False\n self.buffer = []\n self.featCount = 3\n random.seed(random_seed)\n self.useSubBuffer = False", "def grow(self):\n self.capacity = self.capacity * 2\n self.rehash()", "def add(self, grad_shard_id):\n self.timeline.start(\"add\")\n self.timeline.start(\"get_buffers\")\n oid = ray.pyarrow.plasma.ObjectID(grad_shard_id)\n grads = ray.worker.global_worker.plasma_client.get(oid)\n self.timeline.end(\"get_buffers\")\n self.accumulated += grads\n self.acc_counter += 1\n self.timeline.end(\"add\")", "def _init_buffers(self, v, n, _):\n super()._init_buffers(v, n, _)\n\n self.vbos.append(gl.glGenBuffers(1))\n\n # init VBO 2 - dynamic color data\n gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self.vbos[3])\n loc = self.get_attribute_location(\"carried\")\n gl.glEnableVertexAttribArray(loc)\n gl.glVertexAttribPointer(loc, 1, gl.GL_FLOAT, gl.GL_FALSE, 0, ctypes.c_void_p(0))\n gl.glVertexAttribDivisor(loc, 1)\n gl.glBufferData(gl.GL_ARRAY_BUFFER, 0, np.array([], dtype=np.float32), gl.GL_DYNAMIC_DRAW)", "def reset_output_buffer(self):\n self._main_buffer = BufferUtils.create_buffer()\n self._secondary_buffer = BufferUtils.create_buffer()", "def _fill_buffer(self, in_data, frame_count, time_info, status_flags):\n self._buff.put(in_data)\n return None, pyaudio.paContinue", "def update(self):\n self.clear()\n self.score += 1\n self.write(f\"Score : {self.score}\",\n align=\"center\", font=(\"Arial Black\", 20))", "def DeIndentBuffer(self):\n if self.buffergrade == 0:\n raise Exception(\"You can't deindent more.\")\n self.buffergrade -= 1\n tmp = self.buffers[self.buffergrade + 1]\n del self.buffers[self.buffergrade + 1]\n return tmp", "def _create_ring_buffer(self, bufferSize, blocks, key, numa_node):\n # always clear buffer first. Allow fail here\n yield command_watcher(\"dada_db -d -k {key}\".format(key=key), allow_fail=True)\n\n cmd = \"numactl --cpubind={numa_node} --membind={numa_node} dada_db -k {key} -n {blocks} -b {bufferSize} -p -l\".format(key=key, blocks=blocks, bufferSize=bufferSize, numa_node=numa_node)\n log.debug(\"Running command: {0}\".format(cmd))\n yield command_watcher(cmd)\n\n M = DbMonitor(key, self._buffer_status_handle)\n M.start()\n self._dada_buffers.append({'key': key, 'monitor': M})", "def inc_gains_of_free_cells(self):\r\n for cell in self.cells:\r\n if not cell.locked:\r\n cell.gain += 1\r\n cell.yank()", "def append_buffer(self, buffer):\n\n first_data_idx = self.data[0][-1] + 1 if self.__len__() > 0 else 0\n\n d0 = [first_data_idx + i for i, _ in enumerate(buffer.memory)] # indexes\n d1 = [b[0] for b in buffer.memory] # actions\n d2 = [b[1][0] for b in buffer.memory] # speeds\n d3 = [b[1][3] for b in buffer.memory] # images\n d4 = [b[3] or b[4] for b in buffer.memory] # eoes\n d5 = [b[2] for b in buffer.memory] # rewards\n d6 = [b[5] for b in buffer.memory] # infos\n d7 = [b[1][1] for b in buffer.memory] # gears\n d8 = [b[1][2] for b in buffer.memory] # rpms\n d9 = [b[3] for b in buffer.memory] # terminated\n d10 = [b[4] for b in buffer.memory] # truncated\n\n if self.__len__() > 0:\n self.data[0] += d0\n self.data[1] += d1\n self.data[2] += d2\n self.data[3] += d3\n self.data[4] += d4\n self.data[5] += d5\n self.data[6] += d6\n self.data[7] += d7\n self.data[8] += d8\n self.data[9] += d9\n self.data[10] += d10\n else:\n self.data.append(d0)\n self.data.append(d1)\n self.data.append(d2)\n self.data.append(d3)\n self.data.append(d4)\n self.data.append(d5)\n self.data.append(d6)\n self.data.append(d7)\n self.data.append(d8)\n self.data.append(d9)\n self.data.append(d10)\n\n to_trim = self.__len__() - self.memory_size\n if to_trim > 0:\n self.data[0] = self.data[0][to_trim:]\n self.data[1] = self.data[1][to_trim:]\n self.data[2] = self.data[2][to_trim:]\n self.data[3] = self.data[3][to_trim:]\n self.data[4] = self.data[4][to_trim:]\n self.data[5] = self.data[5][to_trim:]\n self.data[6] = self.data[6][to_trim:]\n self.data[7] = self.data[7][to_trim:]\n self.data[8] = self.data[8][to_trim:]\n self.data[9] = self.data[9][to_trim:]\n self.data[10] = self.data[10][to_trim:]\n\n return self", "def __init__(self):\n self.block = 1000\n self._map = [] \n self.len = 0 \n self.incr()", "def use_buffer(self, buffer_size):\n self.__buffer_size = buffer_size\n if self.__buffer is None:\n self.__buffer = []", "def append_buffer(self, buffer):\n\n first_data_idx = self.data[0][-1] + 1 if self.__len__() > 0 else 0\n\n d0 = [first_data_idx + i for i, _ in enumerate(buffer.memory)] # indexes\n d1 = [b[0] for b in buffer.memory] # actions\n d2 = [b[1][0] for b in buffer.memory] # speeds\n d3 = [b[1][2] for b in buffer.memory] # lidar\n d4 = [b[3] or b[4] for b in buffer.memory] # eoes\n d5 = [b[2] for b in buffer.memory] # rewards\n d6 = [b[5] for b in buffer.memory] # infos\n d7 = [b[1][1] for b in buffer.memory] # progress\n d8 = [b[3] for b in buffer.memory] # terminated\n d9 = [b[4] for b in buffer.memory] # truncated\n\n if self.__len__() > 0:\n self.data[0] += d0\n self.data[1] += d1\n self.data[2] += d2\n self.data[3] += d3\n self.data[4] += d4\n self.data[5] += d5\n self.data[6] += d6\n self.data[7] += d7\n self.data[8] += d8\n self.data[9] += d9\n else:\n self.data.append(d0)\n self.data.append(d1)\n self.data.append(d2)\n self.data.append(d3)\n self.data.append(d4)\n self.data.append(d5)\n self.data.append(d6)\n self.data.append(d7)\n self.data.append(d8)\n self.data.append(d9)\n\n to_trim = self.__len__() - self.memory_size\n if to_trim > 0:\n self.data[0] = self.data[0][to_trim:]\n self.data[1] = self.data[1][to_trim:]\n self.data[2] = self.data[2][to_trim:]\n self.data[3] = self.data[3][to_trim:]\n self.data[4] = self.data[4][to_trim:]\n self.data[5] = self.data[5][to_trim:]\n self.data[6] = self.data[6][to_trim:]\n self.data[7] = self.data[7][to_trim:]\n self.data[8] = self.data[8][to_trim:]\n self.data[9] = self.data[9][to_trim:]\n\n return self", "def _set_and_fill_buffer(self):\n # Set the buffer size for the nodal solution steps data. Existing nodal\n # solution step data may be lost.\n buffer_size = self.GetMinimumBufferSize()\n self.mesh_model_part.SetBufferSize(buffer_size)\n # Cycle the buffer. This sets all historical nodal solution step data to\n # the current value and initializes the time stepping in the process info.\n delta_time = self.mesh_model_part.ProcessInfo[KratosMultiphysics.DELTA_TIME]\n time = self.mesh_model_part.ProcessInfo[KratosMultiphysics.TIME]\n step =-buffer_size\n time = time - delta_time * buffer_size\n self.mesh_model_part.ProcessInfo.SetValue(KratosMultiphysics.TIME, time)\n for i in range(0, buffer_size):\n step = step + 1\n time = time + delta_time\n self.mesh_model_part.ProcessInfo.SetValue(KratosMultiphysics.STEP, step)\n self.mesh_model_part.CloneTimeStep(time)\n self.mesh_model_part.ProcessInfo[KratosMultiphysics.IS_RESTARTED] = False", "def buf_init(self):\n self.buffer = []\n for _ in range(1000):\n hash_str = '{}{}'.format(self.salt, self.forward_idx).encode()\n self.buffer.append(md5(hash_str).hexdigest())\n self.forward_idx += 1", "def _set_and_fill_buffer(self):\n # Set the buffer size for the nodal solution steps data. Existing nodal\n # solution step data may be lost.\n required_buffer_size = self.settings[\"buffer_size\"].GetInt()\n if required_buffer_size < self.GetMinimumBufferSize():\n required_buffer_size = self.GetMinimumBufferSize()\n current_buffer_size = self.main_model_part.GetBufferSize()\n buffer_size = max(current_buffer_size, required_buffer_size)\n self.main_model_part.SetBufferSize(buffer_size)\n # Cycle the buffer. This sets all historical nodal solution step data to\n # the current value and initializes the time stepping in the process info.\n delta_time = self.main_model_part.ProcessInfo[KratosMultiphysics.DELTA_TIME]\n time = self.main_model_part.ProcessInfo[KratosMultiphysics.TIME]\n step =-buffer_size\n time = time - delta_time * buffer_size\n self.main_model_part.ProcessInfo.SetValue(KratosMultiphysics.TIME, time)\n for i in range(0, buffer_size):\n step = step + 1\n time = time + delta_time\n self.main_model_part.ProcessInfo.SetValue(KratosMultiphysics.STEP, step)\n self.main_model_part.CloneTimeStep(time)", "def __init__(self, buffer_size, seed=5,\n compute_weights=True, alpha=0.5, beta=0.5):\n self.buffer_size = buffer_size\n self.alpha = alpha\n self.alpha_decay_rate = 0.99\n self.epsilon = 1e-11\n self.beta = beta\n self.beta_growth_rate = 1.001\n random.seed(seed)\n self.compute_weights = compute_weights\n self.experience_count = 0\n self.current_size = 0\n self.experience = [[0, 0, 0, 0, 0] for i in range(self.buffer_size)]\n self.data = [[0, 0, 0, 0] for i in range(self.buffer_size)]\n self.priorities_sum_alpha = 0\n self.priorities_max = 1 + self.epsilon\n self.weights_max = 1", "def new_sample(self):\n\n self.u_seq.append([])\n self.r_exp.append(0)", "def _add_grade_to_row(self, component, score):\r\n component_index = self.components.setdefault(component, len(self.components))\r\n self._current_row[component_index] = score", "def _add_buffer(self, p_buffer_element:PyTorchIOElement):\r\n\r\n self._buffer.add_element(p_buffer_element)", "def _initialize_fill_alpha_if_not_initialized(self) -> None:\r\n if hasattr(self, '_fill_alpha'):\r\n return\r\n self._fill_alpha = Number(1.0)", "def _fill_buffer(self, in_data, frame_count, time_info, status_flags):\n frames = in_data\n self._data_frame.append(frames)\n self._buff.put(in_data)\n return None, pyaudio.paContinue", "def __init__(self, buffer_size, random_seed=123):\n self.buffer_size = buffer_size\n self.count = 0\n self.buffer = deque()\n random.seed(random_seed)\n self.last_recent_batch = 0", "def RefBuffer(self):\n return self.buffers[self.buffergrade]", "def refresh(self) -> None:\n if self._is_buffer_full():\n self.stream.close()\n self._open_stream() # re-initial self.stream\n self._buffer = bytearray()\n self._buffer_pointer = -1", "def assign_grade(github, title, grade):\n QUERY = \"\"\"\n INSERT INTO Grades VALUES (?, ?, ?)\n \"\"\"\n\n db_cursor.execute(QUERY, (github, title, grade))\n db_connection.commit()\n\n print \"Successfully graded %s with a %s on %s\" % (github, grade, title)", "def _buffer_all(self):\n self._buffer()", "def set(self, i, buf):\n self.buf = buf\n self.buf_i = i\n self.avg = None if len(buf) == 0 else (sum(buf) / len(buf))", "def flush(self) -> None:\n super().put(self.buffer)\n self.buffer = np.ndarray((0, 1), dtype=np.int16)", "def append(self, experience):\n self.buffer.append(experience)\n self.number += 1", "def _create_buffer(self, name, uri, byte_length):\n new_buffer = self._build_buffer(uri=uri, byte_length=byte_length)\n\n self.buffers.append(new_buffer)\n\n if name:\n self.buffers_map[name] = self._last_index(self.buffers)\n\n return self._last_index(self.buffers)", "def assign_grade(github, title, grade):\n QUERY = \"\"\"INSERT INTO Grades VALUES(?,?,?)\"\"\"\n db_cursor.execute(QUERY, (github, title, grade))\n db_connection.commit()\n print \"Success! %s received a grade of %s on the %s project!\" % (github, grade, title)", "def collectGem(self):\n self.gems += 1", "def __init__(self, verts=None, frags=None, geoms=None, count=0):\n\n GLObject.__init__(self)\n self._count = count\n self._buffer = None\n\n # Make sure shaders are shaders\n self._verts = self._get_shaders(verts, VertexShader)\n self._frags = self._get_shaders(frags, FragmentShader)\n self._geoms = self._get_shaders(geoms, GeometryShader)\n\n self._uniforms = {}\n self._attributes = {}\n\n # Build hooks, uniforms and attributes\n self._build_hooks()\n self._build_uniforms()\n self._build_attributes()\n\n # Build associated structured vertex buffer if count is given\n if self._count > 0:\n dtype = []\n for attribute in self._attributes.values():\n dtype.append(attribute.dtype)\n self._buffer = np.zeros(self._count, dtype=dtype).view(VertexBuffer)\n self.bind(self._buffer)", "def assignment(kind, grade, weight=1):\n global running_total\n global total_weight\n global total_grade\n\n if kind not in running_total:\n running_total[kind] = grade\n total_grade[kind] = 0\n total_weight[kind] = 0\n if weight > 1:\n grade *= weight\n total_grade[kind] += grade\n total_weight[kind] += weight\n running_total[kind] = total_grade[kind] / total_weight[kind]", "def new(self):\n self._digits.clear()\n self._appended = False\n self._digits.append(self.Symbols.ZERO.value)", "def fill_tank(self):\r\n self.fuel_level = self.fuel_capacity", "def refresh(self):\n self.reward = 0\n self.score = 0", "def __init__(self, buffer_size, random_seed=123):\n self.buffer_size = buffer_size\n self.count = 0\n self.buffer = deque()\n random.seed(random_seed)", "def __init__(self, buffer_size, random_seed=123):\n self.buffer_size = buffer_size\n self.count = 0\n self.buffer = deque()\n random.seed(random_seed)", "def __init__(self, buffer_size, random_seed=123):\n self.buffer_size = buffer_size\n self.count = 0\n self.buffer = deque()\n random.seed(random_seed)", "def __init__(self, buffer_size, random_seed=123):\n self.buffer_size = buffer_size\n self.count = 0\n self.buffer = deque()\n random.seed(random_seed)", "def __init__(self, buffer_size, random_seed=123):\n self.buffer_size = buffer_size\n self.count = 0\n self.buffer = deque()\n random.seed(random_seed)", "def __init__(self, buffer_size, random_seed=123):\n self.buffer_size = buffer_size\n self.count = 0\n self.buffer = deque()\n random.seed(random_seed)", "def _select(self):\r\n opengles.glBindBuffer(GL_ARRAY_BUFFER, self.vbuf)\r\n opengles.glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.ebuf)", "def initialize_supply(self):\n unit_count = 0\n for i in range(self.start_allocation[0 ] -1, self.start_allocation[1]):\n for j in range(len(self.capacity_list[i][1])):\n self.capacity_list[i][1][j] = 1\n unit_count += 1\n self.total_supply -= unit_count", "def set_buffer(self, i, data, append = False):\n if i not in range(1, self.NUM_BUFFERS + 1):\n raise IndexError(\"Error: Could not set buffer %d. Must be \"\n \"between 1 and 9\" % i)\n if type(data) == types.UnicodeType:\n data = data.encode('ascii', 'xmlcharrefreplace')\n if append:\n self.logger.debug(\"set_buffer(%d), appending: %s\" % (i,data))\n self.m_param[i] += data\n else:\n self.logger.debug(\"set_buffer(%d): %s\" % (i,data))\n self.m_param[i] = data\n return", "def __init__(self):\n super(RobinBoundary, self).__init__()\n self.value = RobinBoundary.value\n RobinBoundary.value += 1\n self.update(param=\"1\")", "def reset(self):\n self.tot = 0\n self.cnt = [0.0 for _ in range( self.alpha.getLen() )]", "def prepareUniformBuffers(self):\n # Vertex shader uniform buffer block\n uboVSSize = sum([glm.sizeof(ubo) for ubo in self.uboVS.values()])\n bufferInfo = vk.VkBufferCreateInfo(\n sType = vk.VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,\n size = uboVSSize,\n # This buffer will be used as a uniform buffer\n usage = vk.VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT\n )\n # Create a new buffer\n self.uniformBufferVS['buffer'] = vk.vkCreateBuffer(self.device, bufferInfo, None)\n # Get memory requirements including size, alignment and memory type\n memReqs = vk.vkGetBufferMemoryRequirements(self.device, self.uniformBufferVS['buffer'])\n # Get the memory type index that supports host visibile memory access\n # Most implementations offer multiple memory types and selecting the correct one to allocate memory from is crucial\n # We also want the buffer to be host coherent so we don't have to flush (or sync after every update.\n #Note: This may affect performance so you might not want to do this in a real world application that updates buffers on a regular base\n allocInfo = vk.VkMemoryAllocateInfo(\n sType = vk.VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,\n pNext = None,\n allocationSize = memReqs.size,\n memoryTypeIndex = self.vulkanDevice.getMemoryType(memReqs.memoryTypeBits, vk.VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | vk.VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)\n )\n # Allocate memory for the uniform buffer\n self.uniformBufferVS['memory'] = vk.vkAllocateMemory(self.device, allocInfo, None)\n # Bind memory to buffer\n vk.vkBindBufferMemory(self.device, self.uniformBufferVS['buffer'], self.uniformBufferVS['memory'], 0)\n # Store information in the uniform's descriptor that is used by the descriptor set\n self.uniformBufferVS['descriptor'] = vk.VkDescriptorBufferInfo(\n buffer = self.uniformBufferVS['buffer'],\n offset = 0,\n range = uboVSSize\n )\n\n self.updateUniformBuffers()", "def bufferManager(self):\r\n # intialize index to current processor time\r\n index = int(time.time()%self.nBins) \r\n # open arduino connection\r\n while(True):\r\n time.sleep(0.1)\r\n curr_time = int(time.time()%self.nBins)\r\n # if new bin is entered\r\n if (not(index == curr_time)): \r\n # fill count of last second in buffer\r\n self.wpm += self.counter - self.buffer[index]\r\n self.buffer[index] = self.counter\r\n # increment index to next bin\r\n index = curr_time \r\n # reset counter\r\n self.counter = 0\r\n # push to arduino\r\n start_new_thread(self.arduPusherWPM,())", "def __init__(self, buffer_size, random_seed=0):\n self.buffer_size = buffer_size\n self.count = 0\n self.buffer = deque()\n random.seed(random_seed)", "def initialize(self):\r\n for cell in self.free_cell_list:\r\n cell.unlock()\r\n self.add_cell(cell)\r\n self.free_cell_list.clear()", "def allocate(self, val):\n self.at_options.allocate = 1 if val else 0", "def fill(self, r, g, b, start=0, end=0):\n\t\tif start < 0: raise NameError(\"Start invalid:\" + str(start))\n\t\tif end == 0: end = self.leds\n\t\tif end > self.leds: raise NameError(\"End invalid: \" + str(end))\n\n\t\tfor led in range(start, end):\n\t\t\tself.buffer[led][0] = self.gamma[g]\n\t\t\tself.buffer[led][1] = self.gamma[r]\n\t\t\tself.buffer[led][2] = self.gamma[b]\n\n\t\tself.update()", "def allocate_room_space(self):\n if self._is_full == True:\n return -1\n else:\n self.allocated_spaces += 1\n\n self.unallocated_spaces = self.capacity - self.allocated_spaces\n return \"Room Allocated\"", "def populate_texture(self, texture):\n texture.blit_buffer(self._cbuffer, colorfmt='bgr', bufferfmt='ubyte')", "def __init__(self, buffer_size, random_seed=123):\n\t\tself.buffer_size = buffer_size\n\t\tself.count = 0\n\t\tself.buffer = deque()\n\t\trandom.seed(random_seed)", "def inc_ring_setting(self):\n self._rng_offset = self._change_offset(self._rng_offset, 1)", "def clearBuffer(self):\r\n self.__buffer =[[Pixel() for i in range(self.__width)] for i in range(self.__height)]", "def add_student(self, name: str, grade: int) -> None:\n school_grade = self.students.setdefault(grade, [])\n school_grade.append(name)\n school_grade.sort()", "def add_grade(self, student, grade):\n try:\n self.grades[student.id].append(grade)\n except KeyError:\n raise ValueError('Student not in Grade Book.')", "def addGrade(self, student, grade):\n try:\n self.grades[student.getIDNumber()].append(grade)\n except KeyError:\n raise ValueError(\"Student not in Gradebook\")", "def __init__(self, *args, **kwargs):\n super(HSG4SL1024AFR98, self).__init__(guard_count=4, *args, **kwargs)\n\n self._guard_size = self._second_named_alloc // 2", "def increment(self):\n self.data[self.pointer] += 1\n self.data[self.pointer] %= 256", "def fill_buffer_mono(self, block):\n\n if block.size < self.block_size:\n # print('Fill up last block')\n block = np.concatenate((block, np.zeros((1, (self.block_size - block.size)))), 1)\n\n if self.processCounter == 0:\n # insert first block to buffer\n self.buffer[self.block_size:self.block_size * 2] = block\n\n else:\n # shift buffer\n self.buffer = np.roll(self.buffer, -self.block_size)\n # insert new block to buffer\n self.buffer[self.block_size:self.block_size * 2] = block\n # shift FDLs\n self.FDL_left = np.roll(self.FDL_left, self.block_size + 1)\n self.FDL_right = np.roll(self.FDL_right, self.block_size + 1)\n\n # transform buffer into freq domain and copy to FDLs\n self.FDL_left[0:self.block_size + 1] = self.FDL_right[0:self.block_size + 1] = self.bufferFftPlan(\n self.buffer)", "def startGrading(event):\n window.unbind(\"<s>\", bind_id)\n cur.execute(\n \"SELECT comment1, comment2, comment3, comment4, comment5 FROM cannedComments WHERE user_id =%s and moduleCode = %s and assignmentNo = %s\",\n (userID, assignmentModuleCode, assignmentNo))\n fetchedComments = cur.fetchone()\n\n cur.execute(\n \"SELECT valueKeyA, commentA, valueKeyB, commentB, valueKeyC, commentC, valueKeyD, commentD, total FROM keysComments WHERE user_id =%s and moduleCode = %s and assignmentNo = %s\",\n (userID, assignmentModuleCode, assignmentNo))\n fetchedKeyValues = cur.fetchone()\n\n cur.execute(\n \"SELECT categoryA, categoryB, categoryC, categoryD, categoryE FROM gradingCategories WHERE user_id =%s and moduleCode = %s and assignmentNo = %s\",\n (userID, assignmentModuleCode, assignmentNo))\n fetchedCategories = cur.fetchone()\n conn.commit()\n\n global total1\n total1 = fetchedKeyValues[8]\n valueKeyA = fetchedKeyValues[0]\n commentA = fetchedKeyValues[1]\n valueKeyB = fetchedKeyValues[2]\n commentB = fetchedKeyValues[3]\n valueKeyC = fetchedKeyValues[4]\n commentC = fetchedKeyValues[5]\n valueKeyD = fetchedKeyValues[6]\n commentD = fetchedKeyValues[7]\n a = total1\n\n the_queue.put(\"Grading has started - Total marks: \" + str(total1))\n\n def keyA(event):\n global total1\n total1 += valueKeyA\n the_queue.put(\"Awarded \" + str(valueKeyA) + \" marks\\n\" + str(total1) + \" marks - \" + commentA)\n keystrokeGrading.delete('1.0', tk.END)\n\n def keyB(event):\n global total1\n total1 += valueKeyB\n the_queue.put(\"Awarded \" + str(valueKeyB) + \" marks\\n\" + str(total1) + \" marks - \" + commentB)\n keystrokeGrading.delete('1.0', tk.END)\n\n def keyC(event):\n global total1\n total1 += valueKeyC\n the_queue.put(\"Awarded \" + str(valueKeyC) + \" marks\\n\" + str(total1) + \" marks - \" + commentC)\n keystrokeGrading.delete('1.0', tk.END)\n\n def keyD(event):\n global total1\n total1 += valueKeyD\n the_queue.put(\"Awarded \" + str(valueKeyD) + \" marks\\n\" + str(total1) + \" marks - \" + commentD)\n keystrokeGrading.delete('1.0', tk.END)\n\n def keyE(event):\n global total1\n global final\n the_queue.put(\"Final Grade: \" + str(total1) + \" marks\")\n keystrokeGrading.delete('1.0', tk.END)\n # sets the total to the initial value again\n final = total1\n total1 = a\n the_queue.empty()\n\n def cannedComment1(event):\n try:\n comment1 = fetchedComments[0]\n the_queue.put(\"Comment 1: \" + str(comment1))\n except TypeError:\n the_queue.put(\"You have not added a comment for Key 1\")\n keystrokeGrading.delete('1.0', tk.END)\n\n def cannedComment2(event):\n try:\n comment2 = fetchedComments[1]\n the_queue.put(\"Comment 2: \" + str(comment2))\n except TypeError:\n the_queue.put(\"You have not added a comment for Key 2\")\n\n keystrokeGrading.delete('1.0', tk.END)\n\n def cannedComment3(event):\n try:\n comment3 = fetchedComments[2]\n\n the_queue.put(\"Comment 3: \" + str(comment3))\n except TypeError:\n the_queue.put(\"You have not added a comment for Key 3\")\n keystrokeGrading.delete('1.0', tk.END)\n\n def cannedComment4(event):\n try:\n comment4 = fetchedComments[3]\n the_queue.put(\"Comment 4: \" + str(comment4))\n except TypeError:\n the_queue.put(\"You have not added a comment for Key 4\")\n keystrokeGrading.delete('1.0', tk.END)\n\n def cannedComment5(event):\n try:\n comment5 = fetchedComments[4]\n the_queue.put(\"Comment 5: \" + str(comment5))\n except TypeError:\n the_queue.put(\"You have not added a comment for Key 5\")\n keystrokeGrading.delete('1.0', tk.END)\n\n def gradingCategoryA(event):\n try:\n categoryA = fetchedCategories[0]\n the_queue.put(\"Category: \" + str(categoryA))\n except TypeError:\n the_queue.put(\"You have not added category A\")\n keystrokeGrading.delete('1.0', tk.END)\n\n def gradingCategoryB(event):\n try:\n categoryB = fetchedCategories[1]\n the_queue.put(\"Category: \" + str(categoryB))\n except TypeError:\n the_queue.put(\"You have not added category B\")\n keystrokeGrading.delete('1.0', tk.END)\n\n def gradingCategoryC(event):\n try:\n categoryC = fetchedCategories[2]\n the_queue.put(\"Category: \" + str(categoryC))\n except TypeError:\n the_queue.put(\"You have not added category C\")\n keystrokeGrading.delete('1.0', tk.END)\n\n def gradingCategoryD(event):\n try:\n categoryD = fetchedCategories[3]\n the_queue.put(\"Category: \" + str(categoryD))\n except TypeError:\n the_queue.put(\"You have not added category D\")\n keystrokeGrading.delete('1.0', tk.END)\n\n def gradingCategoryE(event):\n try:\n categoryE = fetchedCategories[4]\n the_queue.put(\"Category: \" + str(categoryE))\n except TypeError:\n the_queue.put(\"You have not added category E\")\n keystrokeGrading.delete('1.0', tk.END)\n\n # Bind functions to keys\n keystrokeGrading.bind('a', keyA) and keystrokeGrading.bind(\"<A>\", keyA)\n keystrokeGrading.bind('b', keyB) and keystrokeGrading.bind(\"<B>\", keyB)\n keystrokeGrading.bind('c', keyC) and keystrokeGrading.bind(\"<C>\", keyC)\n keystrokeGrading.bind('d', keyD) and keystrokeGrading.bind(\"<D>\", keyD)\n keystrokeGrading.bind('e', keyE) and keystrokeGrading.bind(\"<E>\", keyE)\n keystrokeGrading.bind('1', cannedComment1)\n keystrokeGrading.bind('2', cannedComment2)\n keystrokeGrading.bind('3', cannedComment3)\n keystrokeGrading.bind('4', cannedComment4)\n keystrokeGrading.bind('5', cannedComment5)\n\n # Bind functions to control + key\n keystrokeGrading.bind('<Control-a>', gradingCategoryA) and keystrokeGrading.bind('<Control-A>',\n gradingCategoryA)\n keystrokeGrading.bind('<Control-b>', gradingCategoryB) and keystrokeGrading.bind('<Control-B>',\n gradingCategoryB)\n keystrokeGrading.bind('<Control-c>', gradingCategoryC) and keystrokeGrading.bind('<Control-C>',\n gradingCategoryC)\n keystrokeGrading.bind('<Control-d>', gradingCategoryD) and keystrokeGrading.bind('<Control-D>',\n gradingCategoryD)\n keystrokeGrading.bind('<Control-e>', gradingCategoryE) and keystrokeGrading.bind('<Control-E>',\n gradingCategoryE)", "def test_grade_with_string_min_count(self):\n weighted_grader = graders.grader_from_conf([\n {\n 'type': \"Homework\",\n 'min_count': '12',\n 'drop_count': 2,\n 'short_label': \"HW\",\n 'weight': 0.25,\n },\n {\n 'type': \"Lab\",\n 'min_count': '7',\n 'drop_count': 3,\n 'category': \"Labs\",\n 'weight': 0.25\n },\n {\n 'type': \"Midterm\",\n 'min_count': '0',\n 'drop_count': 0,\n 'name': \"Midterm Exam\",\n 'short_label': \"Midterm\",\n 'weight': 0.5,\n },\n ])\n\n graded = weighted_grader.grade(self.test_gradesheet)\n assert round(graded['percent'] - 0.50812499999999994, 7) >= 0\n assert len(graded['section_breakdown']) == (((12 + 1) + (7 + 1)) + 1)\n assert len(graded['grade_breakdown']) == 3", "def clear_buffers(self):\n self.m_param = [\"\" for x in range(self.NUM_BUFFERS + 1)]\n return", "def append_buffer(self, buffer):\n\n first_data_idx = self.data[0][-1] + 1 if self.__len__() > 0 else 0\n\n d0 = [first_data_idx + i for i, _ in enumerate(buffer.memory)] # indexes\n d1 = [b[0] for b in buffer.memory] # actions\n d2 = [b[1][0] for b in buffer.memory] # speeds\n d3 = [b[1][1] for b in buffer.memory] # lidar\n d4 = [b[3] or b[4] for b in buffer.memory] # eoes (terminated or truncated)\n d5 = [b[2] for b in buffer.memory] # rewards\n d6 = [b[5] for b in buffer.memory] # infos\n d7 = [b[3] for b in buffer.memory] # terminated\n d8 = [b[4] for b in buffer.memory] # truncated\n\n if self.__len__() > 0:\n self.data[0] += d0\n self.data[1] += d1\n self.data[2] += d2\n self.data[3] += d3\n self.data[4] += d4\n self.data[5] += d5\n self.data[6] += d6\n self.data[7] += d7\n self.data[8] += d8\n else:\n self.data.append(d0)\n self.data.append(d1)\n self.data.append(d2)\n self.data.append(d3)\n self.data.append(d4)\n self.data.append(d5)\n self.data.append(d6)\n self.data.append(d7)\n self.data.append(d8)\n\n to_trim = self.__len__() - self.memory_size\n if to_trim > 0:\n self.data[0] = self.data[0][to_trim:]\n self.data[1] = self.data[1][to_trim:]\n self.data[2] = self.data[2][to_trim:]\n self.data[3] = self.data[3][to_trim:]\n self.data[4] = self.data[4][to_trim:]\n self.data[5] = self.data[5][to_trim:]\n self.data[6] = self.data[6][to_trim:]\n self.data[7] = self.data[7][to_trim:]\n self.data[8] = self.data[8][to_trim:]\n\n return self", "def update_carried(self, data):\n self.use()\n gpu_data = np.array(data, dtype=np.float32)\n gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self.vbos[3])\n gl.glBufferData(gl.GL_ARRAY_BUFFER, gpu_data.nbytes, gpu_data, gl.GL_DYNAMIC_DRAW)", "def OpenBuffer(self, buffer, filetype, progress): # real signature unknown; restored from __doc__\n pass", "def inc_size(self):\r\n self.__length += 1", "def rating(grade_count):\r\n if grade_count == 0 :\r\n grade = 7\r\n else:\r\n grade = 2\r\n \r\n return grade", "def swap_buffers(self):\n raise NotImplementedError()", "def grow(self):\r\n\r\n old = self._data\r\n self._capacity = 2 * self._capacity\r\n self._data = [0] * self._capacity\r\n\r\n for i in range(self._size):\r\n\r\n self._data[i] = old[i]", "def add_course_grade(self, course, grade):\n course_grade_tuple = (course, grade)\n self.courses_grades.append(course_grade_tuple)" ]
[ "0.58674264", "0.5741333", "0.5509345", "0.54187745", "0.537456", "0.53408396", "0.5315984", "0.5291879", "0.52720207", "0.5246771", "0.5140495", "0.51366794", "0.51127", "0.5102545", "0.5096625", "0.5072083", "0.5029141", "0.5018385", "0.4994677", "0.4994677", "0.4994677", "0.49749547", "0.49570173", "0.49432805", "0.49428412", "0.4932598", "0.49317503", "0.49314246", "0.49311882", "0.49127075", "0.4911213", "0.489584", "0.48850343", "0.48678294", "0.4859613", "0.48439172", "0.48436385", "0.48283407", "0.48230717", "0.48206827", "0.4812205", "0.47921604", "0.47900885", "0.477822", "0.47733262", "0.4763202", "0.4762195", "0.4754895", "0.47547626", "0.47528562", "0.47410327", "0.4738358", "0.47319734", "0.47305194", "0.47238734", "0.47189876", "0.47135773", "0.4711962", "0.47088775", "0.4706025", "0.47004226", "0.46919358", "0.46919358", "0.46919358", "0.46919358", "0.46919358", "0.46919358", "0.46890932", "0.46833557", "0.4682207", "0.46807393", "0.46793878", "0.467876", "0.46640307", "0.4662706", "0.46536303", "0.46504", "0.46458146", "0.4641985", "0.4641632", "0.46391416", "0.46389857", "0.46325108", "0.46295977", "0.4626938", "0.462123", "0.46164468", "0.46134746", "0.4611331", "0.46004692", "0.4589637", "0.45873478", "0.4578962", "0.45767444", "0.4576363", "0.4575857", "0.45696864", "0.45659795", "0.45611203", "0.45597523" ]
0.730155
0
Decrement the BufferGrade and pop out the buffer active before.
def DeIndentBuffer(self): if self.buffergrade == 0: raise Exception("You can't deindent more.") self.buffergrade -= 1 tmp = self.buffers[self.buffergrade + 1] del self.buffers[self.buffergrade + 1] return tmp
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def decrease(self):\n self.score -= self.score", "def decrement(self):\n self.data[self.pointer] -= 1\n self.data[self.pointer] %= 256", "def IndentBuffer(self):\n self.buffergrade += 1\n self.buffers[self.buffergrade] = []", "def RemoveGrade(self, grade):\n if not self.__data['g'].HasKey(grade.ID):\n raise NonExistentItemIDError(\"Grade does not exist.\")\n self.__data['g'].RemoveItems([grade.ID])\n self.__undo_list.append(['g'])\n self.__redo_list.clear()", "def back(self):\n self.position -= 1", "def goBackInTime(self):\n if (len(self.history) == 0):\n return\n notBusy, notVisible = self.history.pop()\n for cell in notVisible:\n for item in cell[0] + cell[1]:\n self.canvas.delete(item)\n for x, y in notBusy:\n self.gridBusy[x][y] = 0\n self.onBoard -= 1\n self.refreshScore()", "def popBuffer(self):\n return self.ecg_buffer.get()", "def decrement_frame(self, increment=1, freeze_cursor=False):\n if self.current_frame > 0 or self.selected_index < self.frame_size - increment:\n self.current_frame -= increment\n\n process_result = self.__process_selected_change(True, freeze_cursor)\n if process_result:\n self.current_frame += increment", "def pop(self):\n while self.number > self.maxlength:\n self.buffer.popleft()\n self.number -= 1", "def release(self):\n if self.points > 0 and self.waiting:\n self.points = self.points - 1\n d = self.waiting.pop(0)\n d.callback(self)", "def dec_gains_of_free_cells(self):\r\n for cell in self.cells:\r\n if not cell.locked:\r\n cell.gain -= 1\r\n cell.yank()", "def decrement_max_gain(self):\r\n while self.max_gain > -self.pmax:\r\n self.max_gain -= 1\r\n if len(self[self.max_gain]) != 0:\r\n break", "def dec_greediness(self):\n self._greediness -= 1", "def decrement(self, x, y):\n self.field.add(x, y, -1)\n self.depth += 1", "def dec(self, by=1):\n assert by > 0\n self.counter -= by\n if self.counter <= 0:\n # Don't leave self.counter < 0, that will screw things up in\n # future calls.\n self.counter = 0\n # Transitioning from nonzero to 0 means wait() need no longer wait.\n self.event.send()", "def bass_decrease():\n request_command(tv_command=TVCommand.bassDecrease)", "def cb_minus(event):\n delta_alpha = pm_rate\n # Decrease Alpha \n sAlpha0.set_val( np.clip(sAlpha0.val - delta_alpha, alpha_min[0], alpha_max[0]) )\n sAlpha1.set_val( np.clip(sAlpha1.val - delta_alpha, alpha_min[1], alpha_max[1]) )\n sAlpha2.set_val( np.clip(sAlpha2.val - delta_alpha, alpha_min[2], alpha_max[2]) )\n print(\"---\")", "def dec( self ):\n if self.count > 0: self.count -= 1", "def backspace(self) -> None:\n if self.index:\n self.buffer.delete(self.index - 1)\n self.index -= 1", "def pop_focus(self):\n self._focus.pop()", "def decrement(self, stats, sample_rate=1):\n self.update_stats(stats, -1, sample_rate=sample_rate)", "def decrement_misses_remaining(self):\n self.misses_remaining -=1", "def dec(self):\n self._numBooksOut -= 1", "def cancel(self):\n self.blackened = self.blackened_history[-1]\n self.blackened_history.pop()\n if self.victory:\n self.victory = False\n self.blackened_history_size -= 1", "def remove_cell(self, cell: Cell):\r\n assert isinstance(cell, Cell)\r\n cell.bucket().remove(cell)\r\n if self[self.max_gain] == cell.bucket() and len(cell.bucket()) == 0:\r\n self.decrement_max_gain()\r\n cell.bucket_num = None", "def pop(self):\r\n return self.buff.pop(-1)", "def pop_current_line(self):\n self.current_line.pop()", "def pop_from_deque(self):", "def bkg_subtract(self, analyte, bkg, ind=None):\n\n if 'bkgsub' not in self.data.keys():\n self.data['bkgsub'] = {}\n\n self.data['bkgsub'][analyte] = self.focus[analyte] - bkg\n\n if ind is not None:\n self.data['bkgsub'][analyte][ind] = np.nan\n\n return", "def rePop(self):\n nScan = 0\n pScan = self.num-1\n while not self.isFull():\n while self.genepool[0][pScan] == None:\n pScan = (pScan-1)%self.num\n while self.genepool[0][nScan] != None:\n nScan = (nScan+1)%self.num\n self.genepool[0][nScan] = self.genepool[0][pScan].mutate()\n self.genepool[1][nScan] = self.genepool[1][pScan].mutate()\n nScan = (nScan+1)%self.num\n pScan = (pScan-1)%self.num", "def rePop(self):\n nScan = 0\n pScan = self.num-1\n while not self.isFull():\n while self.genepool[0][pScan] == None:\n pScan = (pScan-1)%self.num\n while self.genepool[0][nScan] != None:\n nScan = (nScan+1)%self.num\n self.genepool[0][nScan] = self.genepool[0][pScan].mutate()\n self.genepool[1][nScan] = self.genepool[1][pScan].mutate()\n nScan = (nScan+1)%self.num\n pScan = (pScan-1)%self.num", "def _remove_buffer(self):\n if self._buffer is not None:\n self._engine.remove_window(self._buffer)\n self._buffer = None\n self._region = None", "def ramp_down(self):\n value = self.current_event[\"ramp_down\"][\"value\"]\n self.current_value.append(self.current_value[-1] - value)", "def decrement(self):\r\n return self.add(-1)", "def decrement_depth(self):\r\n self.depth = self.depth - 1", "def ace_degrade(player_cards, player_score):\n if '11' in player_cards and player_score > 21:\n for card in player_cards:\n position_11 = player_cards.index('11')\n player_cards[position_11] = 1", "def decrement_notice(self):\n\t\tassert not any(self.feat_time_left==0)\n\t\tself.feat_time_left[self.feat_time_left>0]-=1\n\t\tbool_feats_to_remove=(self.feat_time_left==0)\n\t\tself.remove_feats(bool_feats_to_remove)", "def pop_item(self, index):\n ix, obj = self.items\n if index < len(ix):\n self.d_buffer.pop(ix[index])\n else:\n raise IndexError('Buffer does not have {0} elements'.format(index))", "def pop(self, char):\n assert self.chars[char] > 0\n self.chars[char] -= 1", "def decrease_priority(self):\n self._priority += 1", "def pop(self):\n pass", "def pop(self):\n pass", "def dec(self, key):\n if key not in self.keyCountMap:\n return\n self._updateCount(key, -1)", "def discard(self):\r\n self.pushes.pop()", "def undo(self):\n if self.history:\n xy0, xy1, data_size = self.history.pop()\n x0, y0 = xy0\n x1, y1 = xy1\n self._used[y1][x1] -= data_size\n self._used[y0][x0] = data_size\n if self.goal == xy1:\n self.goal = xy0", "def __remove_brick(self, g_object):\n if type(g_object) == GRect:\n self.__window.remove(g_object)\n self.__bricks_total -= 1\n self.__score += 1\n self.__set_record_board()", "def free(self, gpu=None):\n if self.gpus:\n self.gpu_running_procs[gpu] -= 1\n self.running_procs -= 1", "def pop(self):\n value = self.buffer[self.end - 1]\n self.buffer[self.end - 1] = None\n self.end = (self.end - 1) % len(self.buffer)\n return value", "def atender(self):\n\n if self.enfila>0:\n \n self.enfila-=1\n self.fila.pop(0)", "def back( self ):\n self._has_change = True\n print( \"Back\" )", "def backward(self, g, lx):\n if isinstance(g, sequence._Seq):\n self._Matr__c_elem().backward(g._Seq__c_elem(),lx._Lexique__c_elem())\n elif isinstance(g, _Matr):\n self._Matr__c_elem().backward(g._Matr__c_elem(),lx._Lexique__c_elem())\n self._Matr__maj()", "def pop(self):", "def pop(self):", "def clean(self, ref):\n # NOTE: This currently only works on the top-most frame\n f1 = self.frames[0]\n f2 = ref.frames[0]\n f1.subtract(f2)", "def _decrement_file_counter(self):\n self._add_to_file_counter(-1)", "def deallocate_room_space(self):\n if self._is_empty:\n return - 1\n else:\n self.allocated_spaces = self.allocated_spaces - 1\n self.unallocated_spaces = self.capacity - self.allocated_spaces", "def RefBuffer(self):\n return self.buffers[self.buffergrade]", "def pop(self):\n self._a_to_b()\n r = self.b[-1]\n self.b.pop()\n return r", "def __exit__(self, *_) -> None:\n self.__cursor.top.pop()", "def eat_coin(self):\r\n self.app.coins.remove(self.grid_pos)\r\n self.current_score += 1", "def clearBuffer(self):\r\n self.__buffer =[[Pixel() for i in range(self.__width)] for i in range(self.__height)]", "def decrease_newly(self, quantitiy):\n self._newly = self._newly - quantitiy", "def dec(self, key):\n if key not in self.key_dict:\n return\n self.decrease(key)", "def popitem(self):\n pass", "def down(self):\n if self.bottom == self.current:\n return\n else:\n self.current -= 1", "def discard(self,):\n self.stack.pop()", "def draw(self):\n\n return self.deck.popleft()", "def decrease_key(self, old_item, new_item):", "def update(self):\n self.count -= 1\n if not self.count:\n self.kill()", "def _backwards(self, letter):\n\t\tl = letter\n\t\tfor i in range(self.n_rotors):\n\t\t\tl = self._rotor_left2right(self.rotors[i], l, self.offsets[i],\n\t\t\t\t\t\t\t\t\tself.rings[i])\n\t\treturn l", "def backspace(self):\n if self.current_index > 0:\n self.current_index -= 1\n self.line[self.current_index] = gamestate.PlayerPeg.empty", "def back(self):\n self.cursor.back()", "def unconfigure_acquisition(self):\r\n self._buffer = None\r\n logger.debug(\"buffer UNREFERENCED\")", "def previous_buffer(self):\n selected_window = self.selected_window()\n selected_window.set_buffer(self._find_previous_buffer(selected_window.buffer()))", "def pop_memory(self, **kwarg):\n for name, obs in kwarg.items():\n self.buffers[name] = obs[-self.memory_size:]\n return self", "def pop(self) -> int:\n return self._deque.pop(0)", "def dec_ring_setting(self):\n self._rng_offset = self._change_offset(self._rng_offset, -1)", "def AdvanceQueue(self):\r\n self.data.pop(0)\r\n return", "def UnlockSeqBuf(self,number):\r\n r = CALL('UnlockSeqBuf',self,INT(number),self.image)\r\n return self.CheckForSuccessError(r)", "def delSplitValue(self, split):\n self.balance -= split.value", "def clear_buffer(self):\n for i, value in enumerate(self.buffer):\n self.buffer[i] = 0", "def drop_curr_piece(self):\n if self.over: return\n delta = (0, 0) # now make this as big as possible\n while True:\n new_delta = tuple_add(delta, (0, 1))\n if self.can_move_curr_piece(new_delta):\n delta = new_delta\n else:\n break\n self.increment_score(delta[1])\n self.move_curr_piece(delta)\n self.lock_curr_piece()\n self.queue_draw()", "def swap_discard(renderer):\n\n now = 0\n render_start = None\n render_done = None\n buf_count = 0\n buffers = [(-1, 0)]\n while True:\n vblank = yield buffers[0]\n\n while True:\n if render_done is None:\n if len(buffers) >= 3:\n # Discard oldest waiting buffer.\n del buffers[1]\n render_start = now\n render_done = render_start + renderer(now)\n if render_done > vblank:\n break\n else:\n buffers.append((buf_count, render_start))\n buf_count += 1\n now = render_done\n render_done = None\n\n if len(buffers) > 1:\n del buffers[0]", "def pop(self):\n b = self.a[-1]\n del self.a[-1]\n return b", "def pop():", "def removeJob(self, job_number):\n job = self.retrieveJob(job_number)\n job_type = job.getType() - 1\n del (self.assigned_jobs[job_number])\n self.span -= job.getLength()\n self.types[job_type] = self.types[job_type] - 1\n self.types_sums[job_type] = self.types_sums[job_type] - job.length\n job.in_machine = -1", "def pop(self):\n return self.new_dll.shift()", "def pop(self) -> int:\n self.move()\n return self.outStack.pop()", "def deactivate(self):\n self._glir.command('FRAMEBUFFER', self._id, False)", "def back_patch(self, *args, **kwargs):\n self.pb[self.ss_i(0)] = \"JPF\", _m(self.ss_i(1)), _m(self.pc)\n self.pop(2)", "def deQueue(self):\n if self.isEmpty():\n return False\n self.__start = (self.__start+1) % len(self.__buffer)\n self.__size -= 1\n return True", "def delete_ball_sequence(self, start, end):\n self.music_queue.append('score_up')\n amount = end - start + 1\n scored = 0\n for i in range(amount):\n if self.balls[start + i].status != 3:\n scored += 1\n self.balls[start + i].status = 3\n if len(self.balls) - 1 != end and start != 0:\n self.come_back.append(start)\n self.score += scored * (50 + 10 * (scored - 3))", "def pop_counters(self, *_, **__): # pylint: disable=arguments-differ\n pass", "def leave_group(self):\n\t\tself.sendMessage(ID_CTRL + \"LEAVE\", True)\n\t\tself.joinstate = 0\n\t\tself.createstate = 0\n\t\tself.__key = None", "def backward(self, grad, index):\n pass", "def pop_bubble(self):\n i = self.cursor_bubble_collide()\n if i != -1:\n bubble = self.all_bubbles.sprites()[i]\n bubble.bubblekill()\n self.increase_score(bubble.get_value() * Settings.points_multiplier)", "def _clearstamp(self, stampid):\n if stampid in self.stampItems:\n if isinstance(stampid, tuple):\n for subitem in stampid:\n self.screen._delete(subitem)\n else:\n self.screen._delete(stampid)\n self.stampItems.remove(stampid)\n # Delete stampitem from undobuffer if necessary\n # if clearstamp is called directly.\n item = (\"stamp\", stampid)\n buf = self.undobuffer\n if item not in buf.buffer:\n return\n index = buf.buffer.index(item)\n buf.buffer.remove(item)\n if index <= buf.ptr:\n buf.ptr = (buf.ptr - 1) % buf.bufsize\n buf.buffer.insert((buf.ptr+1)%buf.bufsize, [None])", "def backspace(self):\n if self._is_zero():\n return\n\n is_one_digit_pos = len(self._digits) == 1 and\\\n (self._digits[0] != self.Symbols.ZERO.value)\n\n is_one_digit_neg = len(self._digits) == 2 and\\\n (self._digits[0] == self.Symbols.NEGATIVE.value)\n\n if is_one_digit_pos or is_one_digit_neg:\n self.new()\n return\n\n self._digits.pop()", "def discart(self):\n self.queue.clear()\n self.fetchable = 0", "def __del__(self):\n if self._alloc:\n _pychidg.f90wrap_graphics_bc_t_finalise(this=self._handle)" ]
[ "0.62998307", "0.6187696", "0.6057455", "0.5972686", "0.58809793", "0.578995", "0.5724787", "0.57013", "0.5689314", "0.5660951", "0.5610543", "0.5608327", "0.55659264", "0.55524766", "0.5540197", "0.55076224", "0.5456718", "0.5449712", "0.5409593", "0.54038453", "0.53746724", "0.5361388", "0.5355655", "0.53549105", "0.5351609", "0.52295595", "0.52095085", "0.52076095", "0.5201225", "0.5176964", "0.5176964", "0.517421", "0.51589525", "0.5136176", "0.51355195", "0.5113943", "0.5113156", "0.5107907", "0.51014346", "0.5099955", "0.5068606", "0.5068606", "0.5067939", "0.5059949", "0.503056", "0.5019851", "0.5018515", "0.50161654", "0.50109255", "0.49889278", "0.49772254", "0.49695688", "0.49695688", "0.4968564", "0.49682188", "0.4962261", "0.49581948", "0.49505922", "0.49490315", "0.49239868", "0.49199998", "0.4903829", "0.48930132", "0.4882085", "0.48707935", "0.48572335", "0.4853811", "0.48439705", "0.483706", "0.48303518", "0.482905", "0.4827932", "0.48266354", "0.4814016", "0.4812712", "0.48122072", "0.48121884", "0.47982255", "0.47872177", "0.47845498", "0.47829783", "0.478042", "0.47712478", "0.47632882", "0.47619337", "0.47608995", "0.47606894", "0.47579458", "0.4755344", "0.4754902", "0.47546238", "0.47530925", "0.4745101", "0.4743625", "0.47375304", "0.47308633", "0.4722575", "0.47213647", "0.47190994", "0.47111" ]
0.71859
0
Will return the shared buffer of all the self subclasses.
def GetMainBuffer(self): tmp = self.buffers[0] self.buffers[0] = [] return tmp
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getAll(self):\n return self.dataBuffer", "def read_shared(self):\n return self._read_all()", "def read_shared(self):\n return self._read_all()", "def common(self):\n return self._common", "def get_bases(self):\n # TODO: subclassing\n return (self.py_class,)", "def create_buffers(self):", "def buffers_with_matches(self):\n _set = set(self.misc.buffers()) & set(self.matches.keys())\n _set.add(self.curr_buf.number)\n return list(_set)", "def _buffer_all(self):\n self._buffer()", "def Buffer(self) -> _n_0_t_7[_n_0_t_6]:", "def Buffer(self) -> _n_0_t_7[_n_0_t_6]:", "def Buffer(self) -> _n_0_t_7[_n_0_t_6]:", "def RefBuffer(self):\n return self.buffers[self.buffergrade]", "def BufferList(self) -> _n_2_t_0[_n_0_t_11[_n_0_t_6]]:", "def get_bases(self):\n return self.py_class.__bases__", "def shared(self):\n return self._shared", "def buffer(self):\n return self._buffer", "def getBuffer(self):\n return self.buffer", "def getBufferChildren(self, buffertop):\n children = [v for k, v in self.GS_arcs if k == buffertop]\n return children", "def common(self):", "def getBufferedData(self):\n if not self.ringBuffer: # first time when buffer is empty\n return np.zeros((1, self.windowLength, self.sensorChannels)) \n return np.array(self.ringBuffer)", "def buffer(self) -> np.ndarray:\n return np.array(self._image_data, copy=False)", "def buffer_backend(cls, *args, **kwargs):\n return cls._buffer_context", "def modules_base_ring(self):\n return self._modules_base_ring", "def __getstate__(self):\n state = self.__dict__.copy()\n del state['_DoubleBufferedSharedNumpyArray__shared1']\n del state['_DoubleBufferedSharedNumpyArray__shared2']\n return state", "def _get_shared(self, array):\n\n dtype = self.NUMPY_TO_C_DTYPE[array.dtype.type]\n\n shape = array.shape\n shared = RawArray(dtype, array.reshape(-1))\n return np.frombuffer(shared, dtype).reshape(shape)", "def _get_shared(self, array, dtype=c_float):\n\n shape = array.shape\n shared = RawArray(dtype, array.reshape(-1))\n return np.frombuffer(shared, dtype).reshape(shape)", "def _make_buffers_maps(self):\n self.buffer_names = ['actions', 'states', 'states_next', 'rewards',\n 'terminals']\n\n self.buffer_types = ['1-dim', 'state_space', 'state_space',\n '1-dim', '1-dim']\n # TODO: make this more general buffer type will cause the image crash\n self.buffer_dtype = [np.int32, np.float32, np.float32,\n np.float32, np.bool]\n\n buffers = {}\n for i, name in enumerate(self.buffer_names):\n if self.buffer_types[i] != '1-dim':\n try:\n buffers[name] = ((eval('self.{}.n'.format(\n self.buffer_types[i])),),\n self.buffer_dtype[i])\n except AttributeError:\n buffers[name] = (eval('self.{}.shape'.format(\n self.buffer_types[i])), self.buffer_dtype[i])\n else:\n buffers[name] = ((1,), self.buffer_dtype[i])\n\n return buffers", "def _cast_buffers(self,\n dtype: Optional[torch.dtype] = None,\n memo: Optional[Set] = None) -> None:\n if memo is None:\n memo = set()\n for module in self.modules():\n if module is not self and isinstance(module, XlaFullyShardedDataParallel):\n # Allow any child FSDP instances to handle their own buffers.\n module._cast_buffers(dtype=dtype, memo=memo)\n elif module not in memo:\n memo.add(module)\n for name, buf in module.named_buffers(recurse=False):\n if buf is None:\n continue\n if torch.is_floating_point(buf):\n orig_dtype = buf.dtype\n cast_dtype = dtype or self.buffer_dtype\n if orig_dtype != cast_dtype:\n buf = buf.to(cast_dtype)\n buf._orig_dtype = orig_dtype\n if buf.device != self.xla_device:\n buf = buf.to(self.xla_device)\n setattr(module, name, buf)", "def _get_buffer(self):\n return memoryview(self._write_buffer)[: self._buffer_seek]", "def get_all_instance(self):\n\t\tself.batch_h = Variable(torch.from_numpy(self.config.batch_h)).cuda()\n\t\tself.batch_t = Variable(torch.from_numpy(self.config.batch_t)).cuda()\n\t\tself.batch_r = Variable(torch.from_numpy(self.config.batch_r)).cuda()\n\t\treturn self.batch_h, self.batch_t, self.batch_r", "def pull_buffer(self, last_shared_index):\n buffer = []\n explicit_buffer = []\n\n # report_implicit_count = 0\n # report_last_shared_time = self.ledger[last_shared_index][\"time\"]\n # report_now_last_shared_time = rospy.get_rostime()\n # report_duration = report_now_last_shared_time - report_last_shared_time\n\n meas_dict = self._get_shareable_meas_dict(last_shared_index)\n print(\"PULLING BUFFER: current index {}\".format(len(self.ledger)))\n for msg_id in meas_dict:\n times = meas_dict[msg_id][\"times\"] # Should be sorted\n explicit = meas_dict[msg_id][\"explicit\"]\n bursts = self._get_bursts(times)\n # print(\"Delta: {} | Msg id: {} | Num Explicit: {}\".format(self.delta_multiplier, msg_id, len(explicit)))\n # print(\"size(times): {}\".format(len(times)))\n # print(\"size(explicit): {}\".format(len(explicit)))\n # print(\"bursts: {}\".format(bursts))\n\n if len(bursts) > 1:\n print(\"ERROR MULTIPLE BURSTS DETECTED\")\n print(bursts)\n\n b = bursts[-1] # Only use last burst\n b_numpy = np.array(b)\n start_time = b[0]\n # print(\"Constructing msg: {}\".format(msg_id))\n if len(b) > 1:\n cumdiff = b_numpy[1:] - b_numpy[:-1] # Get the adjacent difference\n latencies = [lat.to_sec() for lat in cumdiff]\n mean_lat = np.mean(latencies)\n # print(\"Avg latency: {}\".format(mean_lat))\n else:\n mean_lat = 0\n # print(\"Num msgs: {}\".format(len(b)))\n burst_msg = self._make_burst_msg(msg_id, len(b), start_time, mean_lat)\n buffer.append( burst_msg )\n explicit_buffer.extend( explicit )\n # report_implicit_count += (len(b) - len(explicit))\n \n meas_sort = lambda x : x.stamp\n explicit_buffer.sort(key=meas_sort, reverse=True)\n buffer.extend(explicit_buffer)\n\n # REPORT\n # print(\"******* BUFFER SHARING REPORT FOR {} w/ Delta {}*******\".format(self.my_name, self.delta_multiplier))\n # print(\"Last shared time: {}\".format(report_last_shared_time.to_sec()))\n # print(\"Sharing duration: {}\".format(report_duration.to_sec()))\n # print(\"Sharing time now: {}\".format(report_now_last_shared_time.to_sec()))\n # print(\"Implicit cnt: {}\".format(report_implicit_count))\n # print(\"Explicit cnt: {}\".format(len(explicit_buffer)))\n\n return buffer # Delta-Tiering\n # return explicit_buffer # N-most recent", "def all(self):\n return self._clone()", "def objects():\n subclasses = StorableObject.descendants()\n return {subclass.__name__: subclass for subclass in subclasses\n if not subclass.__module__.startswith(\n 'openpathsampling.experimental.storage'\n )}", "def hbObjects(self):\r\n return self.__hbObjs", "def __init__(self, buffer_size=3000):\n self.buffer = []\n self.buffer_size = buffer_size", "def __init__(self, buffer_size=3000):\n self.buffer = []\n self.buffer_size = buffer_size", "def _cast_buffers(\n self,\n device: Optional[torch.device] = None,\n dtype: Optional[Dict[str, torch.dtype]] = None,\n memo: Optional[Set] = None,\n recurse: bool = True,\n ) -> None:\n if memo is None:\n memo = set()\n for module in self.modules():\n if module is not self and isinstance(module, FullyShardedDataParallel) and recurse:\n # Allow any child FSDP instances to handle their own buffers.\n module._cast_buffers(device=device, dtype=dtype, memo=memo, recurse=recurse)\n elif module not in memo:\n memo.add(module)\n for name, buf in module.named_buffers(recurse=False):\n if buf is None:\n continue\n buf = buf.to(device=device or self.compute_device)\n if name not in self._buffer_name_to_orig_dtype:\n self._buffer_name_to_orig_dtype[name] = buf.dtype\n # If given, cast buffer to the given dtype. This is used to\n # suppport mixed precision for buffers\n # (given by self.mixed_precision.buffer_dtype) and also used\n # to restore the buffer dtype to the original precision for\n # state_dict() calls.\n # Note that non-floating point buffers are not casted.\n if torch.is_floating_point(buf):\n # We are restoring the original buffer type in\n # preparation for checkpoint.\n if dtype:\n buf = buf.to(dtype=dtype[name])\n # Note that we don't pass in self.mixed_precision.buffer_dtype\n # recursively into _cast_buffers, as we want to respect\n # mp config for child FSDP instances.\n elif self._mixed_precision_enabled_for_buffers():\n buf = buf.to(self.mixed_precision.buffer_dtype)\n\n setattr(module, name, buf)", "def __getattr__(self, name):\n if name == 'buffer':\n return self.__buffer\n raise AttributeError", "def get(self):\n assert self.ptr == self.max_size\n self.ptr, self.path_start_idx = 0, 0\n return [self.obs_buf, self.act_buf, self.adv_buf, self.ret_buf, self.val_buf]", "def get_all_rcv(self) -> \"list[tuple[float, bytes]]\":\n\n return self._rcv_queue", "def _get_all(cls):\r\n # BaseProvider does so have __subclassess__. pylint: disable-msg=no-member\r\n return {klass.NAME: klass for klass in BaseProvider.__subclasses__()}", "def get_buffer(self):\n \n # Get sources for documents which are in elasticsearch\n # and they are not in local buffer\n if self.doc_to_get: self.update_sources()\n \n ES_buffer = deepcopy(self.action_buffer)\n self.action_buffer = []\n self.sources = {}\n return ES_buffer", "def bufferView( self ):\n if not self.buffer:\n raise AttributeError( 'No buffer currently' )\n buffer = self.buffer.buffer\n # okay, now slice-and-dice it...\n # TODO: watch for cases where the buffer is something \n # other than the native-size? Shouldn't be possible given \n # the typed nature of the buffer property.\n if self.bufferKey:\n # pull out just the first items (this property) from the \n # buffer, will *likely* want to collapse with .view() as well...\n return buffer[self.bufferKey][:,0].view( '%sf'%( self.size,))\n shape = buffer.shape\n offset = self.offset//buffer.itemsize\n stride = self.stride//buffer.itemsize\n # okay, are we a multi-dimensional buffer?\n if len(shape) == 2:\n if stride%shape[-1]:\n # is not evenly divisble...\n raise ValueError( \n \"\"\"Stride %s is not evenly divisible into matrix shape %s\"\"\"%(\n stride, shape\n ) \n )\n else:\n step = stride//shape[-1]\n # TODO: support higher-order shapes\n if step > 1:\n return buffer[::step,offset:offset+self.size]\n else:\n return buffer[:,offset:offset+self.size]\n elif len(shape) == 1:\n # we're a ravelled array...\n buffer = reshape( buffer, (-1,stride))\n return buffer[:,offset:offset+self.size]\n else:\n raise NotImplemented( \n \"\"\"Haven't implemented view support for N dimensional arrays\"\"\"\n )", "def base_ring(self):\n return self.domain().base_ring()", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def get_common_food(cls):\n objs = cls.objects\n return objs", "def getBlobs( self ):\n return self.__blobs;", "def get_ring(self):\n return self", "def get_batch(self):\n result = self.buffer_.get()\n self._async_next()\n return result", "def get_batch(self):\n result = self.buffer_.get()\n self._async_next()\n return result", "def __getstate__(self):\n self._bottom_sub = [base.Blob() for _ in range(self._group)]\n self._top_sub = [base.Blob() for _ in range(self._group)]\n return self.__dict__", "def __iter__(self):\n\n return [self]", "def __iter__(self):\n\n return [self]", "def sharedVertices(self):\n return self._sharedVertices", "def objects(self, cls):\n for name, info in direct_fields(self.__class__).items():\n if issubclass(cls, info.sub_fields[0].type_):\n return getattr(self, name)\n raise TypeError(cls)", "def classes(self):\n raise NotImplementedError(\"Please implement this yourself.\")", "def get_merged_buffers(ptr):\n\n\thdata = weechat.hdata_get(\"buffer\")\n\tbuffers = weechat.hdata_get_list(hdata, \"gui_buffers\")\n\tbuffer = weechat.hdata_search(hdata, buffers, \"${buffer.number} == %i\" % weechat.hdata_integer(hdata, ptr, \"number\"), 1)\n\tnbuffer = weechat.hdata_move(hdata, buffer, 1)\n\n\tret = []\n\twhile buffer:\n\t\tret.append(weechat.hdata_string(hdata, buffer, \"full_name\"))\n\n\t\tif (weechat.hdata_integer(hdata, buffer, \"number\") == weechat.hdata_integer(hdata, nbuffer, \"number\")):\n\t\t\tbuffer = nbuffer\n\t\t\tnbuffer = weechat.hdata_move(hdata, nbuffer, 1)\n\t\telse:\n\t\t\tbuffer = None\n\n\treturn ret", "def get_non_inheriting_objects(self):\n return get_non_inheriting_objects(self)", "def alltoall_recvbuffer(\n self, obj: torch.Tensor\n ) -> List[Union[MPI.memory, Tuple[int, int], MPI.Datatype]]:\n mpi_type, _ = self.__mpi_type_mappings[obj.dtype], torch.numel(obj)\n\n nproc = self.size\n shape = obj.shape[1:]\n strides = [1] * len(shape)\n strides[0] = obj.stride()[-1]\n strides = strides[::-1]\n offsets = [obj.element_size() * stride for stride in obj.stride()[:-1]]\n\n # Step 1: Wrap along axes > 0 (all axes except recv_axis)\n for i in range(len(shape) - 1, -1, -1):\n mpi_type = mpi_type.Create_vector(shape[i], 1, strides[i]).Create_resized(0, offsets[i])\n mpi_type.Commit()\n\n # Step 2: Receive blocks along the recv axis\n # Prepare recvcount, senddispls and sendtypes for alltoallw\n recvcount = np.full((nproc,), obj.shape[0] // nproc)\n recvcount[: obj.shape[0] % nproc] += 1\n # size/extent of mpitype = offsets[0]\n tmp_displs = [0] * nproc\n tmp_displs[1:] = np.cumsum(recvcount[:-1])\n recvdispls = [offsets[0] * d for d in tmp_displs]\n recvtypes = [mpi_type] * nproc\n\n return self.as_mpi_memory(obj), (recvcount, recvdispls), recvtypes", "def _classes_(cls):\n for base_cls in cls.__bases__:\n # Avoid infinite loop\n if base_cls == Sandbox:\n continue\n\n yield base_cls", "def get_all_bw_counters(self, instances):\n bw = []\n return bw", "def __call__(self) -> buffer.Buffer:\n processed_buffer = self.output_queue.get()\n\n return processed_buffer", "def get_classes(self):\n return", "def _next_base_class(self) -> type:\n\n self._next_worker = self._next_from_pool\n return self.base_class", "def get(self):\r\n\t\treturn list(self)", "def getSharedVariables(self):\n return self.sharedVariables.copy()", "def __copy__(self):\n return Bag(self.items)", "def get_all_bw_counters(self, instances):\n\n bw = []\n return bw", "def queues(self):\r\n return queues.Queues(self)", "def binary_bases(cls):\n return cls._BINARY_BASES", "def get_all_object_classes(cls) -> Dict[str, Type[objects.BaseObject]]:\n cls._refresh_registry()\n return copy.deepcopy(cls.objects_dict)", "def get_buffer_range(self):\n\n return (self._buffer_top, self._buffer_bottom)", "def buffer_data(self):\n # create a ctypes pointer to the buffer\n buffer_ptr = cast(self.data.buffer, POINTER(c_ubyte * self.data.bufferSize))\n\n # contents always returns a copy\n return buffer_ptr.contents", "def getSharedDict(self):\n return self._sharedDict", "def get_batch(self, batch_size):\n n, _ = self.contexts.shape\n if self.buffer_s == -1:\n # use all the data\n ind = np.random.choice(range(n), batch_size)\n else:\n # use only buffer (last buffer_s observations)\n ind = np.random.choice(range(max(0, n - self.buffer_s), n), batch_size)\n return self.contexts[ind, :], self.rewards[ind, :]", "def get_subclasses(self, class_name):\n return class_name.__subclasses__()", "def _get_raw_data(self):\n raw_data_bytes = self.nbytes + self._heapsize\n base = self\n while hasattr(base, \"base\") and base.base is not None:\n base = base.base\n # Variable-length-arrays: should take into account the case of\n # empty arrays\n if hasattr(base, \"_heapoffset\"):\n if hasattr(base, \"nbytes\") and base.nbytes > raw_data_bytes:\n return base\n # non variable-length-arrays\n else:\n if hasattr(base, \"nbytes\") and base.nbytes >= raw_data_bytes:\n return base", "def getAllAndClearBuffer(self):\n data = self.dataBuffer\n\n self.dataBuffer = []\n self.timestamps = []\n\n return data", "def __copy__(self):\n return self.parent()([gg for gg in self._g],\n check=False,\n mutable=self._mutable)", "def all(self):\n return list(self)", "def all(self):\n return self[:]", "def all(self):\n return FileStorage.__objects", "def all(self):\n return FileStorage.__objects", "def all(self):\n return FileStorage.__objects", "def exts(self):\n return type(self).class_ext()", "def get_declared_queues(self):\n return self.queues.copy()", "def get_buf(self, data_type = \"void\"):\n if self.buf is not None:\n return ffi.cast(data_type + \"*\", self.buf)\n else:\n raise RuntimeError(\"Buffer not created.\")", "def get_shared_type(self):\n\n\t\treturn self.__shared_type", "def blobs(self):\n if not self._blobs:\n workspace = self.attributes.workspace\n # Instantiates a google client, & get all blobs in bucket\n storage_client = storage.Client(project=self._user_project)\n bucket = storage_client.bucket(workspace['bucketName'], user_project=self._user_project)\n # get subset of data\n _blobs = {}\n try:\n for b in bucket.list_blobs(fields='items(size, etag, crc32c, name, timeCreated),nextPageToken'):\n name = f\"gs://{workspace['bucketName']}/{b.name}\"\n # cache.put(name, {'size': b.size, 'etag': b.etag, 'crc32c': b.crc32c, 'time_created': b.time_created, 'name': name})\n _blobs[name] = AttrDict({'size': b.size, 'etag': b.etag, 'crc32c': b.crc32c, 'time_created': b.time_created, 'name': name})\n self._blobs = _blobs\n except Exception as e:\n print(f\"{self.id} {workspace['bucketName']} {e}\")\n self._blobs = _blobs\n return self._blobs", "def current_buffer_app(self):\n return self.session.current_buffer", "def shared_scope(self):\n return self._shared_scope", "def shared_scope(self):\n return self._shared_scope" ]
[ "0.58079183", "0.5729144", "0.5729144", "0.5667983", "0.5634361", "0.5608551", "0.5481564", "0.54492664", "0.5438543", "0.5438543", "0.5438543", "0.5422512", "0.54199225", "0.5405491", "0.539883", "0.53901875", "0.53715754", "0.53574383", "0.53372616", "0.5331279", "0.5319714", "0.5305637", "0.5302012", "0.52857244", "0.52732503", "0.5261373", "0.52495295", "0.5237778", "0.52294904", "0.51798135", "0.51509655", "0.5109341", "0.50852764", "0.5082498", "0.5054614", "0.5054614", "0.5054584", "0.5053043", "0.5052833", "0.5045352", "0.50301355", "0.5028617", "0.5018211", "0.5016574", "0.4988238", "0.4988238", "0.4988238", "0.4988238", "0.4988238", "0.4988238", "0.4988238", "0.4988238", "0.4988238", "0.49869612", "0.49861875", "0.49839646", "0.4982921", "0.4982921", "0.49690896", "0.49649543", "0.49649543", "0.49644858", "0.49595344", "0.4959438", "0.4931429", "0.49299747", "0.49297178", "0.4929051", "0.49163", "0.49140945", "0.4912665", "0.490219", "0.49018154", "0.48883486", "0.48677102", "0.48547277", "0.48486498", "0.48454306", "0.48445842", "0.48429626", "0.48400322", "0.4837951", "0.48343977", "0.48277497", "0.48196626", "0.48194835", "0.48158675", "0.48129734", "0.48118618", "0.479754", "0.479754", "0.479754", "0.47905952", "0.47884256", "0.47858146", "0.47704273", "0.47695202", "0.47660133", "0.47599548", "0.47599548" ]
0.5415913
13
Get a reference to the actual buffer activated.
def RefBuffer(self): return self.buffers[self.buffergrade]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def current_buffer(self):\n return self.layout.current_buffer", "def getBuffer(self):\n return self.buffer", "def buffer_backend(cls, *args, **kwargs):\n return cls._buffer_context", "def current_buffer_app(self):\n return self.session.current_buffer", "def buffer(self):\n return self._buffer", "def reward_buffer(self):\n return self._reward_buffer", "def __getattr__(self, name):\n if name == 'buffer':\n return self.__buffer\n raise AttributeError", "def current_buffer(self, no_minibuffer=False):\n return \\\n self._mini_buffer \\\n if self.mini_buffer_state and not no_minibuffer else \\\n self.selected_window().buffer()", "def get_buffername(self):\n return self.__buffername", "def GetMainBuffer(self):\n tmp = self.buffers[0]\n self.buffers[0] = []\n return tmp", "def _get_buffer(self):\n return memoryview(self._write_buffer)[: self._buffer_seek]", "def _determine_context_buffer(self,s):\n try: return self.buffers[inspect.stack()[2][3]]\n except KeyError: return self.buffers['default']", "def getPixelsBuffer(self):\n\t\treturn self.leds", "def Buffer(self) -> _n_0_t_7[_n_0_t_6]:", "def Buffer(self) -> _n_0_t_7[_n_0_t_6]:", "def Buffer(self) -> _n_0_t_7[_n_0_t_6]:", "def grab_frame(self):\n with self._buflock:\n if self._buffer is None:\n return None\n buf = self._buffer.tostring()\n return buf", "def buffer_get():\n buffer = weechat.buffer_search('python', SCRIPT_NAME)\n\n if not buffer:\n buffer = weechat.buffer_new(SCRIPT_NAME, 'buffer_input', '', '', '')\n weechat.buffer_set(buffer, 'time_for_each_line', '0')\n weechat.buffer_set(buffer, 'nicklist', '0')\n weechat.buffer_set(buffer, 'title', 'Google Calendar')\n weechat.buffer_set(buffer, 'localvar_set_no_log', '1')\n\n return buffer", "def _get_activate(self):\n return self.__activate", "def create_buffers(self):", "def _get_input_buffer(self):\n return ConsoleWidget._get_input_buffer(self)", "def getBufferedData(self):\n if not self.ringBuffer: # first time when buffer is empty\n return np.zeros((1, self.windowLength, self.sensorChannels)) \n return np.array(self.ringBuffer)", "def get_buffer(self, i):\n\n if i not in range(1, self.NUM_BUFFERS + 1):\n raise IndexError(\"Error: Could not get buffer %d. Must be \"\n \"between 1 and 9\" % i)\n return self.m_param[i]", "def activate(self):\n # Send command\n self._glir.command('FRAMEBUFFER', self._id, True)\n # Associate canvas now\n canvas = get_current_canvas()\n if canvas is not None:\n canvas.context.glir.associate(self.glir)", "def _is_buffered(self):\n return self.buffered or type(self)._buffer_context", "def popBuffer(self):\n return self.ecg_buffer.get()", "def GetBitmapFocus(self):\n\n return self.bmpFocus", "def buffer(self) -> np.ndarray:\n return np.array(self._image_data, copy=False)", "def get_focus(self):\n return self._get_at(self._current)", "def get_signalBufferHostPointer(self):\n return self.GPU_bufSignalTime_cpu_handle", "def getSendMessageBuffer(self):\n return self.SendMessageBuffer", "def next_buffer(self):\n selected_window = self.selected_window()\n selected_window.set_buffer(self._find_next_buffer(selected_window.buffer()))", "def _select(self):\r\n opengles.glBindBuffer(GL_ARRAY_BUFFER, self.vbuf)\r\n opengles.glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.ebuf)", "def get_bluetooth(self):\n return self._bluetooth", "def getFIFO(self):\n return self._buf", "def get_beam_current(self):\n raise NotImplementedError", "def __call__(self) -> buffer.Buffer:\n processed_buffer = self.output_queue.get()\n\n return processed_buffer", "def vbo( self, mode ):\n uploaded = mode.cache.getData( self, 'buffer' )\n if uploaded is None:\n uploaded = vbo.VBO( \n self.buffer, \n usage=self.gl_usage(), \n target=self.gl_target(),\n ) # TODO: stream type\n holder = mode.cache.holder( self, uploaded, 'buffer' )\n holder.depend( self, 'buffer' )\n return uploaded", "def get_buffer(number):\n\n buffers = [buffer for buffer in vim.buffers if buffer.number == number]\n assert len(buffers) == 1\n return buffers[0]", "def get_cycle_buffer(self, cycle: int):\n state = self.get_cycle_state(cycle)\n return state.get_metadata_by_key(\"buffer\")", "def feedback_activation(self):\n return self._feedback_activation", "def active_brdch(self):\n return self._faux._active_brdch.copy()", "def get_buffer( self ):\n last_byte = self.current_bits if (self.bits_remaining < 8) else None\n\n result = self.output\n if last_byte is not None:\n result = bytearray( result )\n result.append( last_byte )\n\n if self.bytes_reverse:\n return bytes( reversed( result ) )\n else:\n return bytes( result )", "def reward_buffer(self) -> Deque[float]:\n return self._reward_buffer", "def rb_attached(self):\r\n return self._rb", "def current_document(self):\n return self.current_buffer.document", "def cb_vi_buffer(data, item, window):\n return vi_buffer", "def backend_is_buffered(cls):\n return bool(cls._buffer_context)", "def info(self):\n buffer, byte_length, byte_offset = self.bufferView.info(\n byte_offset=self.byteOffset\n )\n return (\n buffer,\n self.bufferView,\n byte_length,\n byte_offset,\n self.componentType,\n ACCESSOR_TYPE[self.type],\n self.count,\n )", "def _get_active(self):\n return self.__active", "def _get_active(self):\n return self.__active", "def _get_active(self):\n return self.__active", "def _get_active(self):\n return self.__active", "def get_buf(self, data_type = \"void\"):\n if self.buf is not None:\n return ffi.cast(data_type + \"*\", self.buf)\n else:\n raise RuntimeError(\"Buffer not created.\")", "def get_ring(self):\n return self", "def get_buffer(self):\n \n # Get sources for documents which are in elasticsearch\n # and they are not in local buffer\n if self.doc_to_get: self.update_sources()\n \n ES_buffer = deepcopy(self.action_buffer)\n self.action_buffer = []\n self.sources = {}\n return ES_buffer", "def _BitmapFromBufferAlpha(*args, **kwargs):\n return _gdi_._BitmapFromBufferAlpha(*args, **kwargs)", "def create_or_get_buffer(self, name):\n for b in self.nvim.buffers:\n bname = path.basename(b.name)\n if bname == name:\n return b\n\n # Create new buffer\n self.nvim.command('set splitbelow')\n self.nvim.command('new')\n self.nvim.command('setlocal buftype=nofile noswapfile ro')\n self.nvim.command('res 2')\n\n b = self.nvim.current.buffer\n b.name = name\n\n return b", "def __enter__(self):\n gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, self.fbo)", "def grab(self) -> KeyboardGrab:\n return KeyboardGrab(self)", "def GetDataBuffer(self):\n pass", "def function(self):\n return self.generator.module.bounce_back", "def init_buffer(self):\n \n self.shape.buf = [pi3d.Buffer(self.shape, self.verts, self.texcoords, self.inds, self.norms)]\n self.shape.set_draw_details(self.shader, [self.spritesheet.img])", "def buffered(self):\n # type: () -> int\n return self._buffered", "def get_color(self, _pos):\n return self.__framebuffer[_pos]", "def read_buffer(self):\n message=self._message\n return message", "def bufferValue(self):\n return self.bufferSpinBox.value()", "def _getForBinding (self):\n return self.__forBinding", "def getBufferStatus(self):\n cmd_string = '?10'\n data = self.sendRcv(cmd_string)\n return int(data)", "def get_character_reference(self):\n\n return self.current_character", "def value(self):\n return self._buffer", "def _refresh_buffers(self) -> None:", "def get_buffer_range(self):\n\n return (self._buffer_top, self._buffer_bottom)", "def _create_bufferview(self, name, buffer, byte_length, byte_offset, byte_stride, target=None):\n new_buffer_view = self._build_bufferview(buffer=self._resolve_mapping(inp=buffer, mapping=self.buffers_map),\n target=target,\n byte_length=byte_length,\n byte_offset=byte_offset,\n byte_stride=byte_stride)\n\n self.bufferViews.append(new_buffer_view)\n\n if name:\n self.bufferViews_map[name] = self._last_index(self.bufferViews)\n\n return self._last_index(self.bufferViews)", "def bufferView( self ):\n if not self.buffer:\n raise AttributeError( 'No buffer currently' )\n buffer = self.buffer.buffer\n # okay, now slice-and-dice it...\n # TODO: watch for cases where the buffer is something \n # other than the native-size? Shouldn't be possible given \n # the typed nature of the buffer property.\n if self.bufferKey:\n # pull out just the first items (this property) from the \n # buffer, will *likely* want to collapse with .view() as well...\n return buffer[self.bufferKey][:,0].view( '%sf'%( self.size,))\n shape = buffer.shape\n offset = self.offset//buffer.itemsize\n stride = self.stride//buffer.itemsize\n # okay, are we a multi-dimensional buffer?\n if len(shape) == 2:\n if stride%shape[-1]:\n # is not evenly divisble...\n raise ValueError( \n \"\"\"Stride %s is not evenly divisible into matrix shape %s\"\"\"%(\n stride, shape\n ) \n )\n else:\n step = stride//shape[-1]\n # TODO: support higher-order shapes\n if step > 1:\n return buffer[::step,offset:offset+self.size]\n else:\n return buffer[:,offset:offset+self.size]\n elif len(shape) == 1:\n # we're a ravelled array...\n buffer = reshape( buffer, (-1,stride))\n return buffer[:,offset:offset+self.size]\n else:\n raise NotImplemented( \n \"\"\"Haven't implemented view support for N dimensional arrays\"\"\"\n )", "def ref(self):\n return self._ref", "def b(self):\r\n return self.__b", "def bind( self, mode ):\n vbo = self.vbo(mode)\n vbo.bind()\n return vbo", "def activation(self):\n return self.__activation", "def current_bytes(self):\n return self._current_bytes", "def getBackref(self):\n return self._backref", "def glGetBufferPointerv( baseOperation, target, pname, params=None ):\n if params is None:\n size = glGetBufferParameteriv( target, GL_BUFFER_SIZE )\n data = arrays.ArrayDatatype.zeros( (size,), GL_UNSIGNED_BYTE )\n baseOperation( target, pname, ctypes.byref( data ) )\n return data\n else:\n return baseOperation( target, pname, params )", "def X(self):\n return None if self.pX is None else self.pX.buf", "def getPDFBuffer():\n buf, n = dislin.pdfbuf(0)\n buf, n = dislin.pdfbuf(n)\n return buf", "def current(self):\n with driver.get_active_context() as ac:\n devnum = ac.devnum\n if devnum is not None:\n return self[devnum]", "def previous_buffer(self):\n selected_window = self.selected_window()\n selected_window.set_buffer(self._find_previous_buffer(selected_window.buffer()))", "def OpenBuffer(self, buffer, filetype, progress): # real signature unknown; restored from __doc__\n pass", "async def _retrieve_frame(self, mode: BufferRetrieveMode) -> RawArray:", "def latch(self):\n return self._latch", "def _add_buffer(self, p_buffer_element:PyTorchIOElement):\r\n\r\n self._buffer.add_element(p_buffer_element)", "def getCommandBuffer(self, begin):\n cmdBufAllocateInfo = vk.VkCommandBufferAllocateInfo(\n sType = vk.VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,\n commandPool = self.cmdPool,\n level = vk.VK_COMMAND_BUFFER_LEVEL_PRIMARY,\n commandBufferCount = 1\n )\n cmdBuffer = vk.vkAllocateCommandBuffers(self.device, cmdBufAllocateInfo)[0]\n\n if begin:\n cmdBufInfo = vk.VkCommandBufferBeginInfo(sType = vk.VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO)\n vk.vkBeginCommandBuffer(cmdBuffer, cmdBufInfo)\n return cmdBuffer", "def get(self):\n assert self.ptr == self.max_size\n self.ptr, self.path_start_idx = 0, 0\n return [self.obs_buf, self.act_buf, self.adv_buf, self.ret_buf, self.val_buf]", "def C(self):\n return None if self.pC is None else self.pC.buf", "def _initialize_buffers(self) -> None:", "def __init__(self):\n self.buffer = bytearray()", "def GetUseFocusIndicator(self):\n\n return self.useFocusInd", "def _make_buffer(self, width, height):\n fb_prop = p3d.FrameBufferProperties(p3d.FrameBufferProperties.get_default())\n fb_prop.set_multisamples(self._multisamples)\n fb_prop.set_srgb_color(self._srgb_color)\n\n self._buffer = self._engine.make_output(\n self._pipe, name=\"offscreen\", sort=0,\n fb_prop=p3d.FrameBufferProperties.get_default(),\n win_prop=p3d.WindowProperties(size=(width, height)),\n flags=p3d.GraphicsPipe.BFRefuseWindow)\n\n self._region = self._buffer.make_display_region()\n\n self._depth_tex = p3d.Texture()\n self._depth_tex.setFormat(p3d.Texture.FDepthComponent)\n self._buffer.addRenderTexture(\n self._depth_tex, p3d.GraphicsOutput.RTMCopyRam, p3d.GraphicsOutput.RTPDepth)\n\n self._color_tex = p3d.Texture()\n self._color_tex.setFormat(p3d.Texture.FRgba8)\n self._buffer.addRenderTexture(\n self._color_tex, p3d.GraphicsOutput.RTMCopyRam, p3d.GraphicsOutput.RTPColor)", "def get_focus(self):\n\n self.activateWindow()\n self.setFocus()", "def __getitem__(self, key):\n return self.__buffer[key]", "def rtt_get_buf_descriptor(self, buffer_index, up):\n desc = structs.JLinkRTTerminalBufDesc()\n desc.BufferIndex = buffer_index\n desc.Direction = 0 if up else 1\n self.rtt_control(enums.JLinkRTTCommand.GETDESC, desc)\n return desc" ]
[ "0.70962286", "0.69201726", "0.67237633", "0.6671908", "0.6613722", "0.62484485", "0.61964005", "0.61714095", "0.61454296", "0.61443", "0.6099056", "0.60289884", "0.5986562", "0.5955822", "0.5955822", "0.5955822", "0.59430313", "0.58911574", "0.5882561", "0.5830065", "0.58167636", "0.57994443", "0.5758549", "0.5744728", "0.5742756", "0.57116264", "0.5705924", "0.5680586", "0.5651987", "0.56310374", "0.56224096", "0.5615884", "0.5606402", "0.55863976", "0.5577218", "0.55655956", "0.556122", "0.5558596", "0.55583125", "0.5535508", "0.55342287", "0.55330575", "0.5527275", "0.55048686", "0.54879653", "0.5478901", "0.5434787", "0.5421178", "0.5420057", "0.5412072", "0.5412072", "0.5412072", "0.5412072", "0.54109466", "0.54085046", "0.5407927", "0.54041445", "0.5403223", "0.5398549", "0.5392055", "0.5375054", "0.5370737", "0.53698367", "0.5368097", "0.53543204", "0.53220963", "0.53186333", "0.5313605", "0.5312246", "0.53099114", "0.53037363", "0.52842754", "0.5278944", "0.52646494", "0.52625895", "0.52614975", "0.52571267", "0.52511966", "0.5246487", "0.523614", "0.5231214", "0.52300495", "0.5218898", "0.52084666", "0.5203731", "0.5193957", "0.51828986", "0.51569563", "0.51562715", "0.5147727", "0.5144232", "0.51432025", "0.51339525", "0.51300544", "0.51280457", "0.5121927", "0.5118485", "0.5115862", "0.5113", "0.51069516" ]
0.7871913
0
Track a code indentation index for successive utilization.
def TrackIfIndex(self, index): self.indentindex.append(index)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def increase_code_indent(self) -> None:\n self._parent_node.increase_code_indent()", "def _increaseindentation(self):\n self._indentlist.append(self._curindent)\n if not self._equalsigns[-1]:\n self._curindent = self._curindent + self._indent", "def getIndentationLevel(self, code_line):\n print(\"the code line : \", code_line)\n return len(code_line) - len(code_line.lstrip(\" \"))", "def indentation(self, pad, linepad, lang='c++', *args):\n pad.edit_separator()\n if lang == 'c++':\n curr = pad.get('1.0', GUI.INSERT)\n till_end = pad.get('1.0', GUI.END)\n indent = max(curr.count(\"{\") - curr.count('}'), 0)\n diff = till_end.count('{') - till_end.count('}')\n pad.insert(GUI.INSERT, ' ' * indent)\n cordinate = map(int, pad.index(GUI.INSERT).split('.'))\n if diff > 0:\n pad.insert(GUI.INSERT, '\\n' + ' ' * 4 * max(indent - 1, 0) + '}')\n pad.mark_set(GUI.INSERT, '%d.%d' % (cordinate[0], cordinate[1]))\n if lang == 'py':\n coordinates1 = map(int, pad.index(GUI.INSERT).split('.'))\n if coordinates1[0] != 1:\n coordinates = str(coordinates1[0] - 1) + '.0'\n r = pad.get(coordinates, coordinates + 'lineend')\n letters = list(str(r))\n cnt = 0\n # find indentation level\n for i in letters:\n if i == ' ':\n cnt += 1\n else:\n break\n cnt = cnt / 4\n # check if indentation increasing keywords present\n f = 0\n for i in keywords['py']['loops']:\n if i in r:\n f = 1\n break\n\n if f:\n pad.insert(GUI.INSERT, (' ' * (cnt + 1) * 4))\n else:\n pad.insert(GUI.INSERT, (' ' * (cnt) * 4))\n self.linenumber(pad, linepad)", "def indent(self):\n self.indent_level += self.INDENT_STEP", "def indent(self):\n self.indent_level += self.INDENT_STEP", "def indent_code(self, code):\n\n if isinstance(code, string_types):\n code_lines = self.indent_code(code.splitlines(True))\n return ''.join(code_lines)\n\n tab = \" \"\n inc_token = ('{', '(', '{\\n', '(\\n')\n dec_token = ('}', ')')\n\n code = [ line.lstrip(' \\t') for line in code ]\n\n increase = [ int(any(map(line.endswith, inc_token))) for line in code ]\n decrease = [ int(any(map(line.startswith, dec_token)))\n for line in code ]\n\n pretty = []\n level = 0\n for n, line in enumerate(code):\n if line == '' or line == '\\n':\n pretty.append(line)\n continue\n level -= decrease[n]\n pretty.append(\"%s%s\" % (tab*level, line))\n level += increase[n]\n return pretty", "def determine_indentation(self):\n # Ensuring NEWLINE tokens are actually specified as such\n if self.current.tokenum != NEWLINE and self.current.value == \"\\n\":\n self.current.tokenum = NEWLINE\n\n # I want to change dedents into indents, because they seem to screw nesting up\n if self.current.tokenum == DEDENT:\n self.current.tokenum, self.current.value = self.convert_dedent()\n\n if (\n self.after_space\n and not self.is_space\n and (not self.in_container or self.just_started_container)\n ):\n # Record current indentation level\n if not self.indent_amounts or self.current.scol > self.indent_amounts[-1]:\n self.indent_amounts.append(self.current.scol)\n\n # Adjust indent as necessary\n while self.adjust_indent_at:\n self.result[self.adjust_indent_at.pop()] = (\n INDENT,\n self.indent_type * (self.current.scol - self.groups.level),\n )\n\n # Roll back groups as necessary\n if not self.is_space and not self.in_container:\n while not self.groups.root and self.groups.level >= self.current.scol:\n self.finish_hanging()\n self.groups = self.groups.parent\n\n # Reset indentation to deal with nesting\n if self.current.tokenum == INDENT and not self.groups.root:\n self.current.value = self.current.value[self.groups.level :]", "def addIndentationLevel(self, original_line, trace_call):\n # apply same level of indentation\n number_spaces = self.getIndentationLevel(original_line)\n print(\"step 3 spaces : \", number_spaces)\n \n # copy the original trace_call in the new_trace_call using\n # the correct number of spaces\n new_trace_call = []\n index_new_trace_call = 0\n for trace_line in trace_call:\n # calculate new size of the trace_line\n added_space_length = len(trace_line) + number_spaces\n # append spaces at the beginning of the line\n new_trace_call.append(trace_line.rjust(added_space_length)) \n index_new_trace_call = index_new_trace_call + 1\n return new_trace_call", "def indent(self, n):\n self._ind = max(0, self._ind + n)", "def enter(self):\n self.indent += 1", "def addIndents(self, prevLevel=0):\n for num in range(len(self)):\n nextLevel = 0\n if num + 1 < len(self):\n nextLevel = self[num + 1].level\n prevLevel = self[num].addIndents(prevLevel, nextLevel)", "def linenum(self):\n return self.source_frame_stack.linenum()", "def get_function_indent(line: str) -> int:\n first_function_entrance = line.index('def')\n indents = line[:first_function_entrance]\n indents_space_count = len(indents)\n return indents_space_count", "def addIndents(self, prevLevel, nextLevel):\n for num in range(self.level - prevLevel):\n self.textLines[0] = u'<div>%s' % self.textLines[0]\n for num in range(self.level - nextLevel):\n self.textLines[-1] = u'%s</div>' % self.textLines[-1]\n return self.level", "def __enter__():\n IndentedLogger._indent_level += 1\n return IndentedLogger", "def indentation(self, indent: str) -> None:\n self._indent = indent\n self._update()", "def indent_level(self):\n return len(self._tagstack) - 1", "def menu_indentation(self, event=None):\n self.parentPanel.indentation_guides(event)", "def processindentation( lexer, blanks ):\r\n indentsize = blanks and len( blanks ) or 0\r\n \r\n indentlevel = len(lexer.levels)\r\n if ( indentsize > lexer.levels[-1] ):\r\n lexer.levels.append( indentsize )\r\n lexer.pendingtokens.append( create_indent( indentlevel ) )\r\n else:\r\n while ( indentsize < lexer.levels[-1] ):\r\n lexer.levels.pop()\r\n lexer.pendingtokens.append( create_dedent( indentlevel ) )", "def highlight_source(linenumber, index, lines, offset=None):\n # The following if statements are left-over diagnostic\n # from the hack to integrate into Idle.\n # they are harmless tests which could potentially be useful.\n if lines is None:\n return \"\", \"\"\n if index is None:\n print(\"problem in highlight_source(): index is None\")\n index = 0\n\n # The weird index arithmetic below is based on the information returned\n # by Python's inspect.getinnerframes()\n\n new_lines = []\n problem_line = \"\"\n nb_digits = len(str(linenumber + index))\n no_mark = \" {:%d}: \" % nb_digits\n with_mark = \" -->{:%d}: \" % nb_digits\n if offset is not None:\n offset_mark = \" \" * (8 + nb_digits + offset) + \"^\"\n i = linenumber - index\n\n for line in lines:\n if i == linenumber:\n num = with_mark.format(i)\n problem_line = line\n new_lines.append(num + line.rstrip())\n if offset is not None:\n new_lines.append(offset_mark)\n break\n else:\n num = no_mark.format(i)\n new_lines.append(num + line.rstrip())\n i += 1\n return \"\\n\".join(new_lines), problem_line", "def test_reset_limit_on_indent(self):\n indenter = indent.Indenter()\n indenter.indentation = -2\n self.assertRaises(ValueError, indenter.indent)\n indenter.indentation = -1\n self.assertRaises(ValueError, indenter.indent)\n indenter.indentation = 0\n indenter.indent()\n indenter.indentation = +1\n indenter.indent()\n indenter.indentation = +2\n indenter.indent()", "def indent(self):\n self.x_pos += 10", "def insert_indent(event):\n env = XSH.env\n event.cli.current_buffer.insert_text(env.get(\"INDENT\"))", "def addIndent( self, increment=0 ):\n self.context.append( self.context[-1] )\n self.log_indent.debug( \"addIndent {!s}: {!r}\".format(self.lastIndent, self.context) )", "def indent(self, increment=1):\n # increase the indentation level\n self._level += increment\n # and adjust the margin filler\n self.leader = self._indenter * self._level\n # all done\n return self", "def AutoIndent(self):\n cpos = self.GetCurrentPos()\n\n # Check if a special purpose indenter has been registered\n if self._code['indenter'] is not None:\n self.BeginUndoAction()\n self._code['indenter'](self, cpos, self.GetIndentChar())\n self.EndUndoAction()\n else:\n # Default Indenter\n line = self.GetCurrentLine()\n text = self.GetTextRange(self.PositionFromLine(line), cpos)\n if text.strip() == u'':\n self.AddText(self.GetEOLChar() + text)\n self.EnsureCaretVisible()\n return\n indent = self.GetLineIndentation(line)\n i_space = indent / self.GetTabWidth()\n ndent = self.GetEOLChar() + self.GetIndentChar() * i_space\n txt = ndent + ((indent - (self.GetTabWidth() * i_space)) * u' ')\n self.AddText(txt)\n\n self.EnsureCaretVisible()", "def __editIndent(self):\n self.activeWindow().indentLineOrSelection()", "def delta_indent(self, delta=1):\n self.manual_push += delta", "def fix_indents(self):\n indent_map = list(map(self._get_indent, self.config_lines_str))\n fixed_indent_map = []\n for i in range(len(indent_map)):\n if i == 0:\n ### Assume the first line is not indented\n fixed_indent_map.append(0)\n continue\n if indent_map[i] == 0:\n fixed_indent_map.append(0)\n continue\n # If indent is same preceding line, copy its indent\n if indent_map[i] == indent_map[i-1]:\n fixed_indent_map.append(fixed_indent_map[-1])\n # If indent is higher that preceding line, increase by one\n elif indent_map[i] > indent_map[i-1]:\n fixed_indent_map.append(fixed_indent_map[-1]+1)\n # If indent is lower that preceding l\n elif indent_map[i] < indent_map[i-1]:\n fixed_indent_map.append(fixed_indent_map[-1]-1)\n for i, val in enumerate(fixed_indent_map):\n self.config_lines_str[i] = \" \"*val + self.config_lines_str[i].strip()\n #print(val, \"'{}'\".format(self.config_lines_str[i]))", "def trace_control_flow(self):\n already_instrumented = set()\n\n offset = self._cfg[0].instructions[0].offset\n total_size, to_insert = self._generate_trace_branch_invocation(\n self._cfg[0].instructions[0].lineno, offset)\n self._adjust(offset, total_size)\n self._cfg[0].instructions = to_insert + self._cfg[0].instructions\n\n for basic_block in self._cfg.values():\n if len(basic_block.edges) == 2:\n for edge in basic_block.edges:\n bb = self._cfg[edge]\n\n if bb.id not in already_instrumented:\n already_instrumented.add(bb.id)\n source_instr = []\n offset = bb.instructions[0].offset\n\n for source_bb in self._cfg.values():\n if bb.id in source_bb.edges and source_bb.instructions[\n -1].reference == offset:\n source_instr.append(source_bb.instructions[-1])\n\n total_size, to_insert = self._generate_trace_branch_invocation(\n bb.instructions[0].lineno, offset)\n\n self._adjust(offset, total_size, *source_instr)\n\n bb.instructions = to_insert + bb.instructions\n\n self._handle_size_changes()", "def position(self):\n return len(self) #, len(self.indentation)", "def indent(self):\n cursor = self.parent.textCursor()\n # Check if something is selected\n if cursor.hasSelection():\n # get the line/block nr\n temp = cursor.blockNumber()\n # Move to last line of the selection\n cursor.setPosition(cursor.selectionEnd())\n # calculate range of selection\n diff = cursor.blockNumber() - temp\n # Go over all the selected lines\n for n in range(diff + 1):\n cursor.movePosition(QTextCursor.StartOfLine)\n # insert tab\n cursor.insertText(\"\\t\")\n # move back up\n cursor.movePosition(QTextCursor.Up)\n else:\n # There is no selection, simply insert a TAB\n cursor.movePosition(QTextCursor.StartOfLine)\n cursor.insertText(\"\\t\")", "def indent(fptr, i):\n\n # Return indentation level\n return len(fptr[i]) - len(fptr[i].lstrip())", "def adjust_line_numbers(self, code):\n store_lines = set()\n make_function_lines = set()\n for opcode in itertools.chain.from_iterable(_collect_bytecode(code)):\n if opcode.name.startswith(\"STORE_\"):\n store_lines.add(opcode.line)\n elif opcode.name == \"MAKE_FUNCTION\":\n make_function_lines.add(opcode.line)\n\n def adjust(line, allowed_lines, min_line=1):\n adjusted_line = line\n while adjusted_line not in allowed_lines and adjusted_line >= min_line:\n adjusted_line -= 1\n return adjusted_line if adjusted_line >= min_line else None\n\n # Process type comments.\n for type_comment_set in self._type_comments:\n for line, comment in sorted(type_comment_set.type_comments.items()):\n adjusted_line = adjust(line, store_lines, type_comment_set.start_line)\n if not adjusted_line:\n # vm._FindIgnoredTypeComments will take care of error reporting.\n continue\n if line != type_comment_set.end_line:\n self._errorlog.ignored_type_comment(self._filename, line, comment)\n del type_comment_set.type_comments[line]\n elif adjusted_line != line:\n type_comment_set.type_comments[adjusted_line] = comment\n del type_comment_set.type_comments[line]\n\n # Process decorators.\n for line in sorted(self._decorators):\n adjusted_line = adjust(line, make_function_lines)\n if not adjusted_line:\n log.error(\n \"No MAKE_FUNCTION opcode found for decorator on line %d\", line)\n elif adjusted_line != line:\n self._decorators.add(adjusted_line)\n self._decorators.remove(line)\n\n # Process variable annotations.\n for line, annot in sorted(self._variable_annotations.items()):\n adjusted_line = adjust(line, store_lines)\n if not adjusted_line:\n log.error(\n \"No STORE_* opcode found for annotation %r on line %d\", annot, line)\n del self._variable_annotations[line]\n elif adjusted_line != line:\n self._variable_annotations[adjusted_line] = annot\n del self._variable_annotations[line]", "def check_indent_allowed(self) -> bool:\n return True", "def code_indent_string(self) -> str:\n return \" \" * self._parent_node.code_indent_level", "def indent(self):\r\n editor = self.get_current_editor()\r\n if editor is not None:\r\n editor.indent()", "def indentation_guides(self,event):\n for child in self.app.children:\n child.source.SetIndentationGuides(event.IsChecked())\n self.set('IndentationGuides',event.IsChecked())", "def list_code(self, ofile=sys.stdout):\r\n for i, line in enumerate(self.code().split('\\n')):\r\n print >> ofile, ('%4i' % (i + 1)), line\r\n ofile.flush()", "def log(line):\n try:\n terms = line[:-1].split(' ')\n size[0] += int(terms[-1])\n code = int(terms[-2])\n if code in codes:\n codes[code] += 1\n except:\n pass", "def lineno():\n return str(' - Statement - line number: '+str(inspect.currentframe().f_back.f_lineno))", "def validate_indentation(code: str, path: pathlib.Path = None):\n if not isinstance(code, str):\n raise TypeError('code must be string but {} given'.format(type(code)))\n assert path is None or isinstance(path, pathlib.Path), type(path)\n\n lines = code.splitlines(keepends=True)\n whitespace = r'[ \\t]*'\n mixed_indent = r'( {0}\\t{0})|(\\t{0} {0})'.format(whitespace)\n indent_by_spaces = r'[ ]+'\n indent_by_tabs = r'[\\t]+'\n indented_with_spaces = None # type: t.Optional[bool]\n for i, line in enumerate(lines):\n # check if indentation is not mixed\n if re.match(mixed_indent, line) is not None:\n raise ValueError('{}:{} mixed indentation found in {}'.format(\n '<string>' if path is None else path, i, repr(line)))\n\n # check if indentation type is consistent\n if indented_with_spaces is None:\n if re.match(indent_by_spaces, line) is not None:\n indented_with_spaces = True\n elif re.match(indent_by_tabs, line) is not None:\n indented_with_spaces = False\n elif indented_with_spaces:\n if re.match(indent_by_tabs, line) is not None:\n raise ValueError(\n '{}:{} after space indent in previous lines, tab indent found in {}'\n .format('<string>' if path is None else path, i, repr(line)))\n else:\n if re.match(indent_by_spaces, line) is not None:\n raise ValueError(\n '{}:{} after tab indent in previous lines, space indent found in {}'\n .format('<string>' if path is None else path, i, repr(line)))", "def lineno():\n return \"line \" + str(inspect.currentframe().f_back.f_lineno) + \": \"", "def codeBlock( self, text ):\n indent= self.context[-1]\n lines= text.split( '\\n' )\n if len(lines) == 1: # Fragment with no newline.\n self.write('{!s}{!s}'.format(self.lastIndent*' ', lines[0]) )\n self.lastIndent= 0\n self.fragment= True\n else:\n first, rest= lines[:1], lines[1:]\n self.write('{!s}{!s}\\n'.format(self.lastIndent*' ', first[0]) )\n for l in rest[:-1]:\n self.write( '{!s}{!s}\\n'.format(indent*' ', l) )\n if rest[-1]:\n self.write( '{!s}{!s}'.format(indent*' ', rest[-1]) )\n self.lastIndent= 0\n self.fragment= True\n else:\n # Buffer a next indent\n self.lastIndent= len(rest[-1]) + indent\n self.fragment= False", "def OldStartingIndex(self) -> int:", "def _check_indentation(self, indentation):\n if self._expected_indentation is None:\n self._expected_indentation = indentation\n return\n if indentation != self._expected_indentation:\n self.tokenizer.syntax_error(\"Inconsistent indentation.\")", "def IndentBuffer(self):\n self.buffergrade += 1\n self.buffers[self.buffergrade] = []", "def set_indent_level(self, indent_level):\n self.indent_level = indent_level", "def check_indent_allowed(self) -> bool:\n return False", "def check_indent_allowed(self) -> bool:\n return False", "def lineno():\r\n\treturn inspect.currentframe().f_back.f_lineno", "def indent(self):\n cursor = self.editor.textCursor()\n assert isinstance(cursor, QtGui.QTextCursor)\n if cursor.hasSelection():\n self.indent_selection(cursor)\n else:\n # simply insert indentation at the cursor position\n tab_len = self.editor.tab_length\n cursor.beginEditBlock()\n if self.editor.use_spaces_instead_of_tabs:\n nb_space_to_add = tab_len - cursor.positionInBlock() % tab_len\n cursor.insertText(nb_space_to_add * \" \")\n else:\n cursor.insertText('\\t')\n cursor.endEditBlock()", "def get_linenumber():\n\n # inspect.stack()[0][2] returns line number in this function\n lineno = str(inspect.stack()[1][2])\n\n return lineno", "def currentLineno():\n cf = inspect.currentframe()\n return cf.f_back.f_lineno", "def lineno():\n return inspect.currentframe().f_back.f_lineno", "def lineno():\n return inspect.currentframe().f_back.f_lineno", "def lineno():\n return inspect.currentframe().f_back.f_lineno", "def lineno():\n return inspect.currentframe().f_back.f_lineno", "def lineno():\n return inspect.currentframe().f_back.f_lineno", "def lineno():\n return inspect.currentframe().f_back.f_lineno", "def depth_from_indentation(function):\n def wrap(start, values):\n #print 'Depth %d | %d %s' %(self._depth, start, values)\n #self._depth = start\n self._current_node = function(values)\n #print self._current_node\n return ''\n\n return wrap", "def GetIfIndex(self):\n return self.indentindex[-1]", "def write_code(self, code):\n self.buffer.scope_line(code.lstrip(' \\t'))", "def _code_indices(self) -> Tuple[int, ...]:\n return tuple(idx for idx, seg in enumerate(self.segments) if seg.is_code)", "def get_debug_index(name=None):\r\n # Set group and index for each debug view\r\n breakpoint_group = get_value(S.KEY_BREAKPOINT_GROUP, -1)\r\n breakpoint_index = get_value(S.KEY_BREAKPOINT_INDEX, 0)\r\n context_group = get_value(S.KEY_CONTEXT_GROUP, -1)\r\n context_index = get_value(S.KEY_CONTEXT_INDEX, 0)\r\n stack_group = get_value(S.KEY_STACK_GROUP, -1)\r\n stack_index = get_value(S.KEY_STACK_INDEX, 0)\r\n watch_group = get_value(S.KEY_WATCH_GROUP, -1)\r\n watch_index = get_value(S.KEY_WATCH_INDEX, 0)\r\n\r\n # Create list with all debug views and sort by group/index\r\n debug_list = []\r\n debug_list.append((breakpoint_group, breakpoint_index, TITLE_WINDOW_BREAKPOINT))\r\n debug_list.append((context_group, context_index, TITLE_WINDOW_CONTEXT))\r\n debug_list.append((stack_group, stack_index, TITLE_WINDOW_STACK))\r\n debug_list.append((watch_group, watch_index, TITLE_WINDOW_WATCH))\r\n debug_list.sort(key=operator.itemgetter(0,1))\r\n\r\n # Recalculate group/index position within boundaries of active window\r\n window = sublime.active_window()\r\n group_limit = window.num_groups()-1\r\n sorted_list = []\r\n last_group = None\r\n last_index = 0\r\n for debug in debug_list:\r\n group, index, title = debug\r\n # Set group position\r\n if group > group_limit:\r\n group = group_limit\r\n # Set index position\r\n if group == last_group:\r\n last_index += 1\r\n else:\r\n index_limit = len(window.views_in_group(group))\r\n if index > index_limit:\r\n index = index_limit\r\n last_group = group\r\n last_index = index\r\n # Add debug view with new group/index\r\n sorted_list.append((group, last_index, title))\r\n # Sort recalculated list by group/index\r\n sorted_list.sort(key=operator.itemgetter(0,1))\r\n\r\n # Find specified view by name/title of debug view\r\n if name is not None:\r\n try:\r\n return [view[2] for view in sorted_list].index(name)\r\n except ValueError:\r\n return None\r\n\r\n # List with all debug views\r\n return sorted_list", "def visit(self, token: tokenize.TokenInfo) -> None:\n self._check_extra_indentation(token)", "def reset_indentation(self, amount):\n while self.result and self.result[-1][0] == INDENT:\n self.result.pop()\n self.result.append((INDENT, amount))", "def lines_of_code(project: Project) -> int:\n ret = sh.cloc(\"--quiet\", \"--include-lang=Python\", \"--yaml\", str(project.root))\n ret_obj = list(yaml.safe_load_all(str(ret)))\n return ret_obj[0][\"Python\"][\"code\"]", "def computeIndentationLevel(indentChars):\n # DOC {{{\n # }}}\n\n # CODE {{{\n # initialize the indentation level\n indentLevel = 0\n\n # compute the indentation level (expand tabs) {{{\n for char in indentChars:\n if (char == '\\t'):\n indentLevel += SimplePythonTagsParser.TABSIZE\n else:\n indentLevel += 1\n # }}}\n\n # return the computed indentation level\n return indentLevel\n # }}}", "def GetLineno():\n return inspect.currentframe().f_back.f_lineno", "def get_code(self, indent=0):\n indent = indent * ' '\n return '\\n'.join([indent + line for line in self._lines])", "def lineno():\n\treturn inspect.currentframe().f_back.f_lineno", "def indentation(self, text):\n\n tab = text.rfind(' '*4)\n\n if tab != -1: \n if tab%4 == 0:\n if tab//4 + 1 == self.indent:\n return True\n\n else:\n self.indent = tab//4 + 1\n return False\n \n else:\n return True\n\n else:\n return True", "def indentation(self):\n return self.options.indentation_char * sum(self._indentation_levels)", "def increment_instr(self):\n self.instruction_count += 1", "def test_incorrect_indent(self, x=1, y=2): # noqa: D207, D213, D407", "def NewStartingIndex(self) -> int:", "def test_with_custom_indent(self):\n self.assertEqual(indent('foo', 3), ' foo')", "def set_visual_indent(self, indent):\n self._visual_indent = indent", "def _mk_index(self):\n index = defaultdict(list)\n for line_no, line in enumerate(self._stripped_lines):\n if line:\n index[line].append(line_no)\n return index", "def outerLineno():\n cf = inspect.currentframe()\n return cf.f_back.f_back.f_lineno", "def go_py_enumerate_start():\n for i,k in enumerate(list(range(1,5)), 5):\n print(i, k)", "def __enter__(self) -> T:\r\n from apysc.type import revert_interface\r\n self._snapshot_name = \\\r\n revert_interface.make_snapshots_of_each_scope_vars(\r\n locals_=self._locals, globals_=self._globals)\r\n i_or_key: Union[Int, String]\r\n if isinstance(self._arr_or_dict, Array):\r\n i_or_key = Int(0)\r\n self._append_arr_enter_expression(i=i_or_key)\r\n else:\r\n i_or_key = String('')\r\n self._append_dict_enter_expression(key=i_or_key)\r\n self._indent.__enter__()\r\n return i_or_key # type: ignore\r", "def lineno():\n\n return inspect.currentframe().f_back.f_lineno", "def test_back_to_indentation(self):\n before_b = \"\"\"\\\n first line\n line 1\n line a\n line b\n line c\n last line\n \"\"\"\n after_b = \"\"\"\\\n first line\n line 1\n line a\n line b\n line c\n last line\n \"\"\"\n self.run_test(\n before_b=before_b,\n after_b=after_b,\n before_sel=(\"4.13\", \"4.13\"),\n after_sel=(\"4.8\", \"4.8\"),\n command_name=\"back-to-indentation\",\n )", "def retab(code):\n tabs, tabbed_code = 0, \"\"\n for line in code.split(\"\\n\"):\n if line.strip() == \"}\":\n tabs -= 1\n\n tabbed_code += tabs * \"\\t\" + line + \"\\n\"\n if line.strip().endswith(\"{\"):\n tabs+=1\n\n return tabbed_code", "def tab_insert_indent():\n before_cursor = get_app().current_buffer.document.current_line_before_cursor\n\n return bool(before_cursor.isspace())", "def GetIndentSize(self):\r\n \r\n return 0", "def tabing_tool(code):\n for i, line in enumerate(code):\n code[i] = ' '*4 + line\n return code", "def indent(text, *args):\n _, module_name, line_no, *_ = inspect.stack()[1]\n module_info = _get_module_info(module_name)\n module_source, template_source = module_info.code, module_info.source\n\n source_map = ModuleInfo.get_module_source_metadata(\n module_source,\n full_line_map=True\n )\n\n line_map = source_map['full_line_map']\n template_ln_no = line_map[line_no - 1]\n template_line = template_source.split('\\n')[template_ln_no - 1]\n\n indent = re.match('[ \\t]*', template_line).group(0)\n return indent.join(x for x in text.splitlines(keepends=True))", "def outerLineno2():\n cf = inspect.currentframe()\n return cf.f_back.f_back.f_back.f_lineno", "def line_indentation(line):\n line = line.replace(\"\\t\", \" \" * 8)\n return len(line) - len(line.lstrip())", "def analyzePythonCode(self, sourceFile):\n numLines = 0 # Number of lines of code\n numDocStr = 0 # Number of doc strings in code\n numComments = 0 # Number of comments in the code\n numDefs = 0 # Number of functions\n numClasses = 0 # Number of classes\n f=self.openFile(sourceFile)\n for line in f:\n numLines += 1;\n loc = 0\n while (loc != -1): #count the # of times the '#' characters appears\n loc = line.find(\"#\", loc)\n if (loc != -1):\n loc += 1\n numComments += 1\n loc = 0\n while (loc != -1):\n loc = line.find('\"#', loc) #discount the # of times the '#' char appears as the 1st char in double quotes (skip hex constants)\n if (loc != -1):\n loc += 1\n numComments -= 1\n loc = 0\n while (loc != -1):\n loc = line.find(\"'#\", loc) #discount the # of times the '#' char appears as the 1st char in single quotes (skip hex constants)\n if (loc != -1):\n loc += 1\n numComments -= 1\n loc = 0\n while (loc != -1): #count the # of ''' found\n loc = line.find(\"'''\", loc)\n if (loc != -1):\n loc += 1\n numDocStr += 1\n loc = 0\n while (loc != -1): #count the # of \"\"\" found\n loc = line.find('\"\"\"', loc)\n if (loc != -1):\n loc += 1\n numDocStr += 1\n\n if line.strip(AutoGrader.Const.PYTHON_WHITE_SPACES) != '':\n if line.strip(AutoGrader.Const.PYTHON_WHITE_SPACES).split()[0] == 'def': #count # of defs\n numDefs += 1\n if line.strip(AutoGrader.Const.PYTHON_WHITE_SPACES).split()[0] == 'class': #count # of classes\n numClasses += 1\n \n f.close()\n numDocStr /= 2 #assume that the \"\"\" and ''' chars appear in pairs \n return numLines, numDocStr, numComments, numDefs, numClasses", "def _check_brackets(line_index, input_line):\n global _total_lines_of_code\n if input_line.endswith('{') or input_line.endswith('}'):\n _code_lines.append(line_index)\n _total_lines_of_code += 1", "def fixIndentation(code, newIndent, governingLine=0):\n\tcodeLines = [line for line in code.split(\"\\n\")]\n\treserved, codeLines = codeLines[:governingLine], codeLines[governingLine:]\n\twhile codeLines:\n\t\tif codeLines[0].strip():\n\t\t\tfirstIndent = re.match(\"^\\s*\", codeLines[0]).group()\n\t\t\tbreak\n\t\telse:\n\t\t\treserved.append(codeLines.pop(0))\n\tif codeLines:\n\t\tfixedLines = []\n\t\tfor line in codeLines:\n\t\t\tif not line.strip():\n\t\t\t\tfixedLines.append(newIndent)\n\t\t\telse:\n\t\t\t\tif line[:len(firstIndent)]!=firstIndent:\n\t\t\t\t\traise Error(\"Bad indent in line %s\"%repr(line))\n\t\t\t\tfixedLines.append(newIndent+line[len(firstIndent):])\n\telse:\n\t\tfixedLines = codeLines\n\treserved = [newIndent+l.lstrip() for l in reserved]\n\treturn \"\\n\".join(reserved+fixedLines)", "def segment_counter(self, _):\n raise NotImplementedError(\n \"We do not support externally altering the segment counter\")", "def change_indent_class(self, delta=1):\n handlers = self.handlers\n if len(handlers) > 0:\n formatter = handlers[-1].formatter\n if isinstance(formatter, IndentFormatter):\n formatter.delta_indent(delta)", "def initial_indentation(self):\n if self._indent_first_line[-1] is None:\n return self.indentation\n else:\n return self._indent_first_line[-1]", "def indentation(self) -> str:\n return self._indent" ]
[ "0.61810654", "0.60863245", "0.5817759", "0.5709027", "0.5614261", "0.5614261", "0.5520226", "0.55129415", "0.54851073", "0.54282254", "0.53754544", "0.53624535", "0.532262", "0.52931166", "0.5262821", "0.52572817", "0.5244937", "0.52138895", "0.5192445", "0.5161051", "0.51534206", "0.5143496", "0.5142681", "0.5134809", "0.51189923", "0.5105761", "0.5100565", "0.5091507", "0.50707287", "0.50539064", "0.4995846", "0.49858242", "0.49855903", "0.49808475", "0.49735138", "0.49589375", "0.4958566", "0.49576384", "0.49451044", "0.49448788", "0.49341252", "0.4929665", "0.4926276", "0.49191678", "0.49140555", "0.48937544", "0.48883882", "0.4866934", "0.48590243", "0.4858238", "0.4858238", "0.48463863", "0.48313448", "0.48274112", "0.48248747", "0.4822265", "0.4822265", "0.4822265", "0.4822265", "0.4822265", "0.4822265", "0.481904", "0.47991616", "0.4791882", "0.4786208", "0.47810692", "0.47809482", "0.4779615", "0.4777962", "0.4777014", "0.47757173", "0.47744775", "0.47731042", "0.47706124", "0.47694176", "0.47655708", "0.47590554", "0.4758674", "0.4757131", "0.47478876", "0.47303745", "0.47168455", "0.47150558", "0.47140777", "0.4703775", "0.46973002", "0.46947664", "0.46758246", "0.46681008", "0.46589306", "0.46574467", "0.46535122", "0.46490806", "0.4645168", "0.46449414", "0.46442273", "0.46381527", "0.46328014", "0.46283922", "0.46177417" ]
0.70099694
0
Get the last code indentation index tracked as reference.
def GetIfIndex(self): return self.indentindex[-1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def last_index(self) -> int:\n return self._last_index", "def indentation_level(self):\n return self._indentation_levels[-1]", "def last_sequence_ind(self,):\n return self.last_sequence_ind_", "def _get_last_code_line():\n return max(_code_lines) + 2", "def get_last_index(self):\n return len(self.chain) - 1", "def linenum(self):\n return self.source_frame_stack.linenum()", "def current_index(self) -> int:\n return self._current_index", "def GetLineno():\n return inspect.currentframe().f_back.f_lineno", "def last_position(self):\n return self.visited_positions[-1]", "def last_pos(self):\n return self.locs[self.indices[-1], 2:4]", "def lineno():\n return inspect.currentframe().f_back.f_lineno", "def lineno():\n return inspect.currentframe().f_back.f_lineno", "def lineno():\n return inspect.currentframe().f_back.f_lineno", "def lineno():\n return inspect.currentframe().f_back.f_lineno", "def lineno():\n return inspect.currentframe().f_back.f_lineno", "def lineno():\n return inspect.currentframe().f_back.f_lineno", "def lineno():\r\n\treturn inspect.currentframe().f_back.f_lineno", "def lineno():\n\treturn inspect.currentframe().f_back.f_lineno", "def lineno():\n\n return inspect.currentframe().f_back.f_lineno", "def current_index(self):\n return self._current_index", "def get_last_step(self):\n return self.get_step_by_index(-1)", "def outerLineno2():\n cf = inspect.currentframe()\n return cf.f_back.f_back.f_back.f_lineno", "def currentLineno():\n cf = inspect.currentframe()\n return cf.f_back.f_lineno", "def get_current_line(self, document):\r\n return document.get_iter_at_mark(document.get_insert()).get_line() + 1", "def get_linenumber():\n\n # inspect.stack()[0][2] returns line number in this function\n lineno = str(inspect.stack()[1][2])\n\n return lineno", "def depth(self):\n return self.loc.index(-1) -1", "def _get_end_index(self):\n return max(self.index + self.source_window,\n self._get_target_index() + self.target_window)", "def get_end_loc(self) -> Tuple[int, int]:\n assert self.pos_marker\n return self.pos_marker.working_loc_after(\n self.raw,\n )", "def outerLineno():\n cf = inspect.currentframe()\n return cf.f_back.f_back.f_lineno", "def get_current(self) -> int:\n return self._current", "def getLastBlock(self):\n if (len(self.chain) == 0):\n return -1\n else:\n return self.chain[len(self.chain)-1]", "def lineno(self):\n return str('line: ' + str(inspect.currentframe().f_back.f_lineno))", "def get_curpos(self):\n for i in range(len(self.tree)):\n if self.path == self.tree[i][2]:\n return i\n else:\n return -1", "def get_node_loc(node):\n lineno = node.lineno\n end_lineno = get_last_deep_child(node).lineno\n return end_lineno - lineno", "def get_ct_last_index_connection(self):\n return self.m_connection.ct_last_index", "def last(self):\n return self._make_position(self._trailer._prev)", "def last_block(self):\n return self.chain[-1]", "def last_block(self):\n return self.chain[-1]", "def get_parent_index(self):\n return (self.index - 1) // 2", "def lineno():\n return \"line \" + str(inspect.currentframe().f_back.f_lineno) + \": \"", "def lastline(node):\n return max( [ node.lineno if hasattr(node,'lineno') else -1 , ]\n +[lastline(child) for child in ast.iter_child_nodes(node)] )", "def get_current_token(self):\n with self._lock:\n if self._unfinished_ids:\n return self._unfinished_ids[0] - self._step\n\n return self._current", "def last_index(self, item):\n return _(self.size()._ - 1 - self.reverse().index(item)._)", "def get_current_index(self):\n assert(self.is_started())\n return self.currIndex", "def dcurrent(self):\n return self.dirlist[-1]", "def position_last(self):\n return self._position_last", "def get_last_tab_id():\n return list(get_tabs())[-1]", "def editor_line(self) -> int:\n return self.raw_line # raw_line is already one-indexed.", "def current_index(self):\n job = self.client.query(\"SELECT MAX(ID) FROM {}.{};\".format(self.database_name, self.table_name))\n for row in job.result():\n if row[0] == None:\n return 1\n current_index = row[0] + 1\n return current_index", "def PopIfIndex(self):\n return self.indentindex.pop()", "def get_last(self):\n return self.get_block(len(self.chain)-1)", "def get_current_index(self):\r\n return self.contents_widget.currentRow()", "def indent_level(self):\n return len(self._tagstack) - 1", "def lineno(self):\n return str(' Line: ' + str(inspect.currentframe().f_back.f_lineno))", "def index(self):\n return self._ll_tree.get_index()", "def get_latest_locant(self):\n return self._next_locant - 1", "def get_line_no(obj):\n try:\n lineno = getsourcelines(obj)[1]\n except:\n # no code found\n lineno = None\n return lineno", "def get_lineno(self):\n return self.lexer.get_lineno()", "def last_block(self):\n return self.chain[len(self.chain) - 1]", "def get_last_revision(self):\n return self.index.get_index_revision(self.name)", "def peek(self) -> int:\n return self.stack[len(self.stack)-1]", "def _previous(self):\n return self.token_list[self._current - 1]", "def i (self):\n\n return self.end - 1", "def target_location(self):\n lst = self.cell_list()\n return lst[-1]", "def position(self):\n return len(self) #, len(self.indentation)", "def getLastPlotIndexKey(self):\n return self._lineIndex-1", "def result(self):\n # most pythonic way to get last in last is -1\n return self.history[-1]", "def getCurr(self):\r\n if self.currLoc < 0 or self.currLoc > len(self.history):\r\n raise ValueError(\"History is empty or invalid\")\r\n return self.history[self.currLoc]", "def lineage(self) -> 'lngmod.Level':\n return self._parent", "def final_instr(self):\n\n return self.instr_instances[-1]", "def _get_yacc_lookahead_token(self):\n return self.lexer.last_token", "def last(self, trace):\n return trace[-1]", "def current(cls):\n return stackless.getcurrent()", "def get_end_brace(self):\n # Find the code to run\n\n brack_num, found_first = 0, False\n for iline, line in enumerate(self.file_ltxt[self.line_num:]):\n if '{' in line: brack_num += 1\n if '}' in line: brack_num -= 1\n\n if not found_first:\n if brack_num > 0: found_first = True\n else: continue\n\n if brack_num == 0: break\n\n else:\n self.print_error(\"Can't find the closing brace\")\n\n end_line = self.line_num + iline\n return end_line", "def get_oldest_index(self):\n for k in self.go_forward(start=self.write_pos):\n if self.log[k] != None:\n break\n return k", "def _last_node(self):\n if self.trail[-1][1] is None or self.trail[-1][1].group():\n return self.trail[-1][0]\n else:\n return self.trail[-2][0]", "def end(self):\n if len(self._trace) == 0:\n return 0\n return self._trace.keys()[-1]", "def get_last_block(self) -> Block:\n return self.blockchain[-1]", "def curr_name(self):\n return self.name_stack[-1]", "def _get_lback_index(self, model, last) -> int:\n assert last > 0\n # last state cannot be loop-back.\n assert model.get_value(self.totime(self._in_loop, last)).is_true()\n assert model.get_value(self.totime(self._in_loop, 0)).is_false()\n idx = last - 1\n while model.get_value(self.totime(self._in_loop, idx)).is_true():\n idx -= 1\n assert idx >= 0\n assert model.get_value(self.totime(self._in_loop, idx + 1)).is_true()\n assert model.get_value(self.totime(self._in_loop, idx)).is_false()\n assert model.get_value(self.totime(self.start_loop, idx)).is_true()\n return idx", "def get_cloc(self):\n return self.order_hist[-1]", "def stop(self):\n try:\n return self.index[-1]\n except:\n pass", "def getStackPosition(self):\r\n return self.callstack.getStack()", "def lineno():\n return str(' - Statement - line number: '+str(inspect.currentframe().f_back.f_lineno))", "def get_last_token(node: ast.AST) -> Token:\n return node.last_token # type: ignore", "def getLastEdgeNumber(self):\n return self.__lastEdgeNumber", "def get_last_framenumber(self):\n return self._frame_number", "def Current (cls):\n if cls.__ContextStack:\n return cls.__ContextStack[-1]\n return None", "def lineno():\n linenum = inspect.currentframe().f_back.f_lineno\n frameinfo = inspect.getframeinfo(inspect.currentframe())\n filename = frameinfo.filename\n return str(\"File: \" + str(filename) + \" Line: \" + str(linenum))", "def get_previous_index(self, current_index_string):\n # current index is a string, so cast to int\n current_index = int(current_index_string)\n\n return current_index-1", "def get_previous_block(self):\r\n return self.chain[-1] # Return the previous block\r", "def last_start(self):\n return self._last_start", "def get_last_blockchain_value():\n return blockchain[-1]", "def get_current_id(self):\n\n id = self.ids[-1]\n\n if id is None:\n raise KeyError()\n\n return id", "def _index(self) -> int:\n return -1", "def get_frame(self):\n return self.last_frame", "def linenum(self):\n return self.tos().linenum", "def next_position(self) -> List[int]:\n return self.__path_to_end()[1]", "def position(self):\n return self.stack.position()", "def get_sequence(self):\n self.__sequence = self.__sequence + 1\n return self.__sequence - 1" ]
[ "0.6989419", "0.68731844", "0.67984694", "0.65894204", "0.64901143", "0.64773256", "0.63904345", "0.6377186", "0.6306196", "0.6305273", "0.62961644", "0.62961644", "0.62961644", "0.62961644", "0.62961644", "0.62961644", "0.6286268", "0.62604773", "0.6259666", "0.61679256", "0.6141181", "0.6129842", "0.61106765", "0.60988796", "0.60984886", "0.60974604", "0.6097398", "0.607525", "0.60750955", "0.6071579", "0.6048553", "0.6042196", "0.6034944", "0.6022076", "0.6000809", "0.5994906", "0.59804094", "0.59804094", "0.59637433", "0.5955169", "0.593416", "0.591608", "0.591352", "0.5912524", "0.58957005", "0.589065", "0.58820856", "0.5874631", "0.5873716", "0.5846394", "0.5833156", "0.58206415", "0.5818932", "0.5798506", "0.57966536", "0.57901925", "0.57875365", "0.5773993", "0.57638645", "0.5763738", "0.5759113", "0.5756827", "0.5750298", "0.57480013", "0.57391024", "0.5737851", "0.57234067", "0.57221764", "0.5720618", "0.571175", "0.57061654", "0.569852", "0.56937647", "0.56839633", "0.5679437", "0.56776464", "0.56661016", "0.5657947", "0.564409", "0.5638953", "0.5638331", "0.5619061", "0.56178516", "0.56173164", "0.56054175", "0.5601731", "0.5594067", "0.5591657", "0.5589564", "0.55887556", "0.5583808", "0.55742663", "0.5573748", "0.5557299", "0.55551", "0.5551185", "0.5546666", "0.55404425", "0.5539483", "0.5538874" ]
0.66052157
3
Pop (get and remove) the last code indentation index tracked.
def PopIfIndex(self): return self.indentindex.pop()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _decreaseindentation(self):\n self._curindent = self._indentlist.pop()", "def pop(self) -> int:\n return self.stack.pop()", "def pop(self) -> int:\n return self._stack.pop()", "def pop(self) -> int:\n for i in range(len(self.stack) - 1):\n self.stack.append(self.stack.pop())\n return self.stack.pop()", "def pop_current_line(self):\n self.current_line.pop()", "def indentation_level(self):\n return self._indentation_levels[-1]", "def pop(self) -> int:\n self._maybe_prepare_output_stack()\n return self._output_stack.pop()", "def pop(self) -> int:\n tmp = self.stack[-1]\n for i in range(len(self.stack)-1):\n self.stack.append(self.stack.popleft())\n self.stack.popleft()\n return tmp", "def DeIndentBuffer(self):\n if self.buffergrade == 0:\n raise Exception(\"You can't deindent more.\")\n self.buffergrade -= 1\n tmp = self.buffers[self.buffergrade + 1]\n del self.buffers[self.buffergrade + 1]\n return tmp", "def dedent(self):\n self._indent_first_line.pop()\n return self._indentation_levels.pop()", "def pop(self):\n try:\n frame = self.stack.pop()\n return frame[0]\n except:\n pass", "def scope_pop(self) -> None:\n self.scope_stack.popleft()", "def pop(self) -> int:\n self.move()\n return self.outStack.pop()", "def pop_at(self, index):\n if len(self.stacks[index]) < 1:\n return\n popped = self.stacks[index].pop()\n if index == len(self.stacks)-1:\n return popped\n for i in range(index, len(self.stacks)-1):\n # append to last operation\n self.stacks[i].append(self.stacks[i+1].pop(0))\n if len(self.stacks[-1]) < 1:\n self.stacks.pop()\n return popped", "def pop(self):\n return self.stack.pop(0)", "def pop_last(self):\n self.pop_item(-1)", "def pop(self):\n if not self.isEmpty():\n self.top -= 1\n return self.stack.pop()\n else:\n raise Exception(\"Stack Underflow\")", "def pop(self, index=-1):\n if not self.stack:\n raise ReversePolishCalcError, \"Stack is empty\"\n try:\n del self.stack[index]\n except IndexError:\n errmsg = \"Cannot pop element '%s' from stack\" % index\n raise ReversePolishCalcError, errmsg\n return self.stack", "def pop(self):\n old = self.stack.pop()\n if self.stack:\n self.current = self.stack.pop()\n else:\n self.current = None\n return old", "def pop(self):\n return self.stack.pop()", "def pop(self) -> int:\r\n return self.items.pop(0)", "def pop(self) -> int:\n tmp = list()\n while self.stack:\n tmp.append(self.stack.pop())\n \n ret = tmp.pop()\n self.head = tmp[-1] if tmp else None\n while tmp:\n self.stack.append(tmp.pop())\n \n print(self.stack)\n return ret", "def GetIfIndex(self):\n return self.indentindex[-1]", "def pop(self):\n\n return self.stack.pop()", "def pop(self):\n return self.the_stack.pop()", "def pop(self):\n item = self.stack[-1]\n self.stack = self.stack[:-1]\n return item", "def pop(self):\n return self._stack.pop()", "def pop(self):\n stack = self.stack\n if len(stack)>1:\n stack.pop()\n self._setspaces()", "def rollback(self):\n if len(self.__stack) == 0:\n raise EmptyStackException()\n self.__current_pos = self.__stack[-1][0]\n self.line = self.__stack[-1][1]\n self.linePos = self.__stack[-1][2]\n self.__stack = self.__stack[:-1]", "def pop(self):\n if self.stack:\n return self.stack.pop()", "def leave(self):\n assert(self.indent > 0)\n self.indent -= 1", "def indent_level(self):\n return len(self._tagstack) - 1", "def goBack(self):\r\n if self.currLoc > 0:\r\n self.currLoc -= 1\r\n return self.history[self.currLoc]", "def pop(self) -> None:\n \n self.minStack.pop()\n return self.stack.pop()", "def pop(self) -> int:\n if len(self.output_stack) == 0:\n for _ in range(len(self.input_stack)):\n tmp = self.input_stack.pop()\n self.output_stack.append(tmp)\n\n return self.output_stack.pop()", "def pop(self) -> int:\n if self.input_stack is None and self.output_stack is None:\n return None\n else:\n for i in range(0,len(self.input_stack)):\n self.output_stack.append(self.input_stack.pop())\n return self.output_stack.pop()", "def pop(self):\n return self.stacks[self.activeStackIndex].pop()", "def pop(self):\n\n self.__max_stack.pop()\n\n return self.__stack.pop()", "def pop(state):\n return state.env.stack.pop()", "def remove(self):\n return self.stack_list.pop()", "def pop(self):\n x = -1\n if self.top == -1:\n print(\"Stack is Underflow\")\n else:\n x = self.arr[self.top]\n self.top -= 1\n return x", "def stack_pop(self):\n value = self.stack.pop()\n\n return value", "def pop(self) -> object:\n if self.is_empty()== True: # if size of array is 0, raise exception\n raise StackException\n else:\n top_stack = self.da.get_at_index(self.size()-1) # initialize the top of the stack (last element)\n self.da.remove_at_index(self.size()-1) # remove it\n return top_stack # return variable\n pass", "def peek(self) -> int:\n if len(self.stackOut) ==0:\n while len(self.stackIn) !=0:\n y = self.stackIn.pop()\n self.stackOut.append(y)\n return self.stackOut[-1]\n else:\n return self.stackOut[-1]", "def pop(self) -> int:\n return self._deque.pop(0)", "def get(self):\n return self.stack.pop()", "def undo(self) -> CompilerEnv:\n if not self.stack:\n return\n self.env.close()\n self.env = self.stack.pop()\n return self.env", "def _get_last_code_line():\n return max(_code_lines) + 2", "def pop(self):\n self._stack.pop()", "def pop(self):\n try:\n item = self._items.pop()\n # This operation decrements the number of items\n # in the stack, we need to update the count variable\n self._update_count()\n return item\n except IndexError:\n raise IndexError(\"Stack is empty\")", "def undo(self):\n self.setIndex(self._index-1)", "def pop(self):\n try:\n return self._items.pop()\n except:\n raise IndexError('The stack is empty.')", "def pop(self): ##################### <-\n value = self.lst[-1]\n self.lst = self.lst[:-1]\n return value", "def BackTab(self):\n sel = self.GetSelection()\n if sel[0] == sel[1]:\n # There is no selection\n cpos = self.GetCurrentPos()\n cline = self.GetCurrentLine()\n cipos = self.GetLineIndentPosition(cline)\n if cpos <= cipos:\n # In indentation so simply backtab\n super(EditraBaseStc, self).BackTab()\n else:\n # In middle of line somewhere\n text = self.GetLine(cline)\n column = max(0, self.GetColumn(cpos) - 1)\n if len(text) > column and text[column].isspace():\n\n # Find the end of the whitespace\n end = column\n while end < len(text) and \\\n text[end].isspace() and \\\n text[end] not in '\\r\\n':\n end += 1\n\n # Find the start of the whitespace\n end -= 1\n start = end\n while end > 0 and text[start].isspace():\n start -= 1\n\n diff = end - start\n if diff > 1:\n # There is space to compress\n isize = self.GetIndent()\n if isize < diff:\n # More space than indent to remove\n repeat = isize\n else:\n # Less than one indent width to remove\n repeat = end - (start + 1)\n\n # Update the control\n self.BeginUndoAction()\n self.SetCurrentPos(cpos + (end - column))\n for x in range(repeat):\n self.DeleteBack()\n self.EndUndoAction()\n\n else:\n # There is a selection\n super(EditraBaseStc, self).BackTab()", "def pop(self):\n return self.history.pop()", "def pop(self):\n if len(self._data) == 0:\n raise StackError(\"Stek je prazan. Ne moze se izvrsiti funkcija pop.\")\n\n vrh_steka = self._data[-1]\n del self._data[-1]\n return vrh_steka", "def unindent(self):\r\n editor = self.get_current_editor()\r\n if editor is not None:\r\n editor.unindent()", "def pop(self):\n try:\n if self.size() > 0:\n top = self.top()\n self.items.pop()\n return top\n else:\n raise IndexError('Cannot pop item, stack is empty.')\n except IndexError as err:\n print(err)\n raise", "def pop(self):\n if self.is_empty():\n raise Exception(\"Stack is empty.\")\n\n self.size -= 1\n return self.arr.pop()", "def unget(self):\n self.lookahead += 1\n \n if self.lookahead == 4: \n raise ParseError(\"PANIC: too much lookahead!\", self.fileId, self.line)\n \n self.tokenIndex = (self.tokenIndex - 1) & 3", "def pop(self):\r\n return self.buff.pop(-1)", "def peek(self) -> int:\n return self.stack[len(self.stack)-1]", "def remove_last_item_from_stack(self):\n if self.length > 0:\n return self.container.pop()\n else:\n return None", "def peek(self) -> int:\n self.move()\n return self.outStack[-1]", "def exp_pop(self) -> Any:\n return self.exp_stack.popleft()", "def pop(self, idx):\n tmp = np.copy(self.arr[idx])\n self.arr[idx] = self.arr[self.current-1] # the last one is moved before\n self.current -= 1\n return tmp", "def pop():\n stack = _get_stack()\n return _pop(stack)", "def pop(self):\n if self.stack:\n return self.stack.pop()\n return None", "def pop(self):\n return self.new_dll.shift()", "def __exit__(self, *_) -> None:\n self.__cursor.top.pop()", "def pop(self) -> int:\n\t\treturn self.s1.pop()", "def Pop(self):\n # Alternativly use built-in pop()\n #return self.list.pop()\n top = self.list[len(self.list) - 1]\n self.list.remove(top)\n return top", "def pop(self):\n if len(self._substacks) == 0:\n raise Exception('Stack is empty.')\n else:\n if self._substacks[self._current_stack_index].size() == 1: # if last element in current stack,\n deleted_element = self._substacks[self._current_stack_index].pop()\n self._substacks.pop(self._current_stack_index) # remove the empty stack\n self._current_stack_index -= 1\n else:\n deleted_element = self._substacks[self._current_stack_index].pop()\n return deleted_element", "def pop(self) -> int:\n return self.q.popleft()", "def top(self) -> int:\n return self.stack[-1]", "def top(self) -> int:\n return self.stack[-1]", "def deleteScope():\n global currScope\n scopeStack.pop()\n currScope = scopeStack[-1]", "def pop(self):\n self.restore(self.stack.pop())", "def cleartomark():\n stack = currentframe().f_back.f_locals.setdefault(SN, [])\n obj = object()\n while obj is not MARK:\n try:\n obj = stack.pop()\n except IndexError:\n pass\n if stack:\n return stack[-1]\n return MARK", "def pop(self):\n b = self.a[-1]\n del self.a[-1]\n return b", "def trace_stack_pop(trace_stack_var: ContextVar) -> None:\n trace_stack = trace_stack_var.get()\n trace_stack.pop()", "def _goBack(self) -> None:\n if abs(self._stackIndex) < len(self._pathStack):\n self._stackIndex -= 1\n self._openPath(path=self._pathStack[self._stackIndex], ignoreStack=True)", "def top(self):\n if self.stack:\n return self.stack[-1]\n raise ValueError", "def stop(self):\n try:\n return self.index[-1]\n except:\n pass", "def pop(self) -> int:\n #popleft()用于collections中,不带参数\n while len(self.data) > 1:\n self.help.append(self.data.popleft())\n tmp = self.data.popleft()\n self.data, self.help = self.help, self.data\n return tmp", "def pop(self):\n if self.stack == [] and self.maxx == []:\n return None\n \n if self.stack[-1] == self.maxx[-1]:\n self.maxx.pop(-1)\n return self.stack.pop(-1)", "def pop(self):\n return self.path.pop(0)", "def top(self):\n return self.stack[-1]", "def unindent(self):\n self.x_pos -= 10", "def peek(self) -> int:\n self._maybe_prepare_output_stack()\n return self._output_stack[-1]", "def pop(self):\n popped = self.__list[-1]\n self.__list = self.__list[:-1]\n return popped", "def _trim_end(self, tokens: list[Token]) -> Block:\n i = last_token = self.end - 1\n while tokens[i].name in NON_CODING_TOKENS | {'DEDENT', 'NEWLINE'}:\n # if we find an indented comment inside our block, keep it\n if (\n tokens[i].name in {'NL', 'NEWLINE'} and\n tokens[i + 1].name == UNIMPORTANT_WS and\n len(tokens[i + 1].src) > self._initial_indent(tokens)\n ):\n break\n # otherwise we've found another line to remove\n elif tokens[i].name in {'NL', 'NEWLINE'}:\n last_token = i\n i -= 1\n return self._replace(end=last_token + 1)", "def last(self, trace):\n return trace[-1]", "def pop(self):\n item = self.stack.pop()\n\n if item == self.max[-1]: # pop if the same element\n self.max.pop()\n\n return item", "def pop(self) -> int:\n #if len(self.sk2.stack) == 0:\n if self.sk2.is_empty():\n for i in range(len(self.sk1.stack)):\n self.sk2.push(self.sk1.top())\n self.sk1.pop()\n a = self.sk2.top()\n self.sk2.pop()\n return a", "def peek(self):\n return self.the_stack[-1]", "def pop(self) -> int:\n if len(self.a) != 0:\n while len(self.a) != 1:\n self.topvalue = self.a.popleft()\n self.b.append(self.topvalue)\n\n return self.a.popleft()\n else:\n while len(self.b) != 1:\n self.topvalue = self.b.popleft()\n self.a.append(self.topvalue)\n return self.b.popleft()", "def pop(self): # 06:30 Lecture Week 2 \"Stacks\" (16:24)\n if self.isEmpty():\n raise Exception(\"Stack underflow\")\n item = self.first.Item # save item to return\n self.first = self.first.Next # delete last Node added\n self.N -= 1\n return item # return the saved item", "def pop(self):\n if self.is_empty():\n raise RuntimeError(\"Attempt to pop the empty stack!\")\n item = self.top()\n self._items = self._items[:-1]\n return item", "def pop_token(self):\n return self.tokens.pop()" ]
[ "0.687736", "0.662047", "0.65572536", "0.64603764", "0.6362599", "0.6348772", "0.63447845", "0.630998", "0.6291612", "0.6259269", "0.624546", "0.61962366", "0.6184571", "0.61161476", "0.611117", "0.60742784", "0.6037302", "0.60337037", "0.60090023", "0.59744895", "0.5953556", "0.5936488", "0.5899665", "0.58855796", "0.58819073", "0.587673", "0.5873068", "0.5872587", "0.5872319", "0.5849548", "0.58432376", "0.58337134", "0.5808501", "0.57831645", "0.578296", "0.57282215", "0.5722751", "0.5720113", "0.5713412", "0.56910336", "0.56695974", "0.5639449", "0.5639418", "0.563377", "0.56309044", "0.5627808", "0.56211954", "0.56191343", "0.5618254", "0.5618001", "0.56064385", "0.5603996", "0.5603025", "0.5590198", "0.55737174", "0.55708325", "0.55633944", "0.555839", "0.55562985", "0.55497247", "0.55460924", "0.554251", "0.5540682", "0.55339485", "0.55307055", "0.5526477", "0.55184406", "0.5513343", "0.5511489", "0.5511272", "0.55050564", "0.5496445", "0.54949474", "0.5487328", "0.5467233", "0.5467233", "0.54626006", "0.5461494", "0.54603183", "0.5450041", "0.5444069", "0.5432102", "0.54260725", "0.5419035", "0.54147005", "0.541305", "0.5411303", "0.5411232", "0.539914", "0.53881216", "0.53752536", "0.53677326", "0.5362747", "0.53610986", "0.5360882", "0.53600776", "0.53567725", "0.5356655", "0.53554434", "0.535281" ]
0.802693
0
Initialization of protected Operation Object attribute for subclasses.
def __init__(self): self._OPERATION = None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self):\n\n self.operations = {}", "def _init(self):\n raise NotImplementedError", "def __init__(self):\r\n self.operation_map = {}", "def init(self):\n raise NotImplementedError", "def init(self):\n raise NotImplementedError", "def __init__(self):\n raise NotImplementedError", "def __init__(self):\n raise NotImplementedError", "def __init__(self):\n raise NotImplementedError", "def __init__(self):\n raise NotImplementedError", "def _init(self):\n pass", "def initialize(cls):", "def __init__ (self):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(__self__):\n pass", "def __init__(self):\n raise NotImplementedError()", "def __init__(self):\n self.sharedRef=self\n #raise NotImplementedError, \"This is abstract class. No instance allowed.\"\n # Despite Shared is abstract, its children would inherit the constructor.", "def __init__(self, osi):\n self.osi = osi\n self._parameters = [self.op_type]\n self.to_process(osi)", "def __init__(self, osi):\n self.osi = osi\n self._parameters = [self.op_type]\n self.to_process(osi)", "def __init__(self, osi):\n self.osi = osi\n self._parameters = [self.op_type]\n self.to_process(osi)", "def __init__(self, osi):\n self.osi = osi\n self._parameters = [self.op_type]\n self.to_process(osi)", "def __init__(self, osi):\n self.osi = osi\n self._parameters = [self.op_type]\n self.to_process(osi)", "def __init__(self):\n self.inputs = []\n self.op = None\n self.const_attr = None\n self.name = \"\"", "def __init__(self, operations = []):\n self.operations = operations", "def __init__(self):\n self.instantiable = {self: self}\n self.is_generic = False", "def __init__(self):\n PrimaryObject.__init__(self)\n NoteBase.__init__(self)\n AddressBase.__init__(self)\n UrlBase.__init__(self)\n self.type = RepositoryType()\n self.name = \"\"", "def __init__(self, **kwds):\n raise NotImplementedError", "def __init__(self):\n\n super().__init__()\n self.operations = {}\n\n self.op = _OperationGetter(self)\n\n self.logger = get_logger()\n self.observer = LoggingContextObserver(self.logger)\n\n self.retry_allow = []\n self.retry_deny = []", "def __init__(self) -> None:\n # Values are already set on __new__.\n # Override this method when value modification on initialization is\n # required.\n raise NotImplementedError()", "def __init__(self, base, **kwargs):\n self.base = base", "def __init__(self, base):\n Category_realization_of_parent.__init__(self, base)", "def __init__(self, base):\n Category_realization_of_parent.__init__(self, base)", "def __init__():", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def __init__(object):", "def _setup_object(self) -> object:\n raise NotImplementedError", "def __init__(self):\n super().__init__()\n self.base = None", "def initialize(self):\n raise NotImplementedError", "def initialize(self):\n raise NotImplementedError", "def initialize(self):\n raise NotImplementedError", "def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.expected_cont_uuid = None\n self.daos_cmd = None", "def __init__(self):\n raise NotImplementedError('cannot create independent state')", "def __init__(self):\n self.soul = self._cnew(py_object(self),self._cmethods)\n self.name = self.__class__.__name__\n # required to keep this object around in the C world\n Py_INCREF(self)", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n\t\tpass", "def __init__(self):\n super(Modules, self).__init__()\n \n global superclasses\n superclasses['universe'] = []\n superclasses['actions'] = ['universe']\n superclasses['booleans'] = ['universe']\n\n global instances\n instances['universe'] = set()\n instances['actions'] = set()\n instances['booleans'] = set()", "def __init_subclass__(cls) -> None:\n super().__init_subclass__()\n dataclass(cls)", "def __init__(self):\n raise NotImplementedError(\"This class cannot be instantiated!\")", "def __init__(self, base):\r\n\r\n self.base = base", "def __init__(self):\r\n self.inputs = []\r\n self.op = None\r\n self.const_attr = None\r\n self.name = \"\"\r\n self.dtype = None", "def __init__(self):\n super().__init__(interface.RemoteControl, DEFAULT_PRIORITIES)", "def init(self) -> None:", "def __init__(self):\r\n\t\tpass", "def __init__(self) -> None:\n # TODO: Provide the complete constructor for this object", "def __init__(self, **kwargs):\n\n super(RefactoringOperation, self).__init__(**kwargs)", "def __init__(self, op_name, attr_key, attr_value):\n self.op = relay.op.get(op_name)\n self.attr_key = attr_key\n self.attr_value = attr_value", "def __init__(self, operation, constargs, randomargs):\n Operation.__init__(self)\n self.operation = operation\n self.constargs = constargs\n self.randomargs = randomargs\n if type(operation) is str:\n import CCAugmentation.outputs as cca_out\n import CCAugmentation.transformations as cca_trans\n self.operation = eval(self._get_op_str())\n self.args = {'operation': self.operation.__name__, 'constargs': constargs, 'randomargs': randomargs}", "def __init__(self):\r\n return", "def __init__(self):\r\n return", "def __init__(self):\r\n return", "def __init__(self):\r\n return", "def __init__(self):\n self.command_orchestrator = None # type: CommandOrchestrator", "def do_init(self):\n\n pass", "def init(self) -> None:\n ...", "def __init__(self, **kwargs):\n raise NotImplementedError", "def __init_subclass__(cls):\n super().__init_subclass__()\n if not isabstract(cls):\n cls._buffer_context = _CounterFuncContext(cls._flush_buffer)", "def init(self):\n self.is_init = True\n raise NotImplementedError", "def test_private_attr(self):\n obj = Base()\n self.assertTrue('_Base__nb_objects' in Base.__dict__)", "def _init(self):", "def __init__(self):\r\n self.__type = ALL_USERS\r\n self.__user = None", "def __init__(self, owner, uid, clsName, tag):\r\n Interface.__init__(self, owner, uid, tag)\r\n\r\n self._clsName = clsName", "def __init__(self):\n super(OperatorCodegen, self).__init__()", "def __init__(self, *args, **kwargs):\n raise NotImplementedError", "def __init__(self):\n # Define class API\n self.api = API2()\n # Initialize a list of SAVED_OBJECTS (used in get_ancestors)\n self.SAVED_OBJECTS = []\n # Initialize the total number of objects\n self.total = 0\n # Load saved objects from file to continue the last execution\n self.load_saved_objects()" ]
[ "0.69042325", "0.6817324", "0.66968143", "0.65770996", "0.65770996", "0.6513839", "0.6513839", "0.6513839", "0.6513839", "0.64662194", "0.6441285", "0.642362", "0.6404217", "0.6404217", "0.6404217", "0.6404217", "0.6404217", "0.6404217", "0.6404217", "0.6404217", "0.6404217", "0.6404217", "0.63641566", "0.6345559", "0.6332557", "0.6332557", "0.6332557", "0.6332557", "0.6332557", "0.6329752", "0.63220483", "0.6320713", "0.6317538", "0.6305521", "0.62721527", "0.62716806", "0.62627363", "0.62595296", "0.62595296", "0.6239322", "0.6213408", "0.6213408", "0.6213408", "0.6213408", "0.6213408", "0.6213408", "0.6213408", "0.6213408", "0.62101156", "0.6202952", "0.61907756", "0.617837", "0.617837", "0.617837", "0.61770475", "0.61755717", "0.6155811", "0.6149564", "0.6149564", "0.6149564", "0.6149564", "0.6149564", "0.6149564", "0.6149564", "0.6149564", "0.6149564", "0.6149564", "0.6149564", "0.6149564", "0.6149564", "0.6149564", "0.6128192", "0.61228883", "0.61226827", "0.6117411", "0.6106822", "0.6101371", "0.60914814", "0.6091018", "0.6073759", "0.60736763", "0.60710895", "0.60673165", "0.60485643", "0.60485643", "0.60485643", "0.60485643", "0.6047894", "0.6020954", "0.60205734", "0.6018852", "0.60125184", "0.60118204", "0.60072887", "0.6005369", "0.59989834", "0.5998876", "0.59952974", "0.59882903", "0.59870875" ]
0.7562495
0
Get the Operation Object generated by the command.
def getOp(self): return self._OPERATION
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_operation_obect(self, method):\n pass", "def get_operation(\n self,\n ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"get_operation\" not in self._stubs:\n self._stubs[\"get_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/GetOperation\",\n request_serializer=operations_pb2.GetOperationRequest.SerializeToString,\n response_deserializer=operations_pb2.Operation.FromString,\n )\n return self._stubs[\"get_operation\"]", "def get_operation(\n self,\n ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"get_operation\" not in self._stubs:\n self._stubs[\"get_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/GetOperation\",\n request_serializer=operations_pb2.GetOperationRequest.SerializeToString,\n response_deserializer=operations_pb2.Operation.FromString,\n )\n return self._stubs[\"get_operation\"]", "def get_operation(\n self,\n ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"get_operation\" not in self._stubs:\n self._stubs[\"get_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/GetOperation\",\n request_serializer=operations_pb2.GetOperationRequest.SerializeToString,\n response_deserializer=operations_pb2.Operation.FromString,\n )\n return self._stubs[\"get_operation\"]", "def operation(self, name):\n\n try:\n return self.operations[name]\n except KeyError:\n return self.operation_not_found(name)", "def operation(cls):\n return relationship.many_to_one(cls, 'operation')", "def operation(cls):\n return relationship.many_to_one(cls, 'operation')", "def GetOperation(\n self,\n request: google.longrunning.operations_pb2.GetOperationRequest,\n context: grpc.ServicerContext,\n ) -> google.longrunning.operations_pb2.Operation:", "def get_operagion(self):\n if self.OP_GID not in self._data_dict:\n return None\n return dao.get_operation_by_gid(self._data_dict.get(self.OP_GID, None))", "def current_operation(self):\n return self._current_operation", "def current_operation(self):\n return self._current_operation", "def getOperation(self):\n return _libsbml.FluxBound_getOperation(self)", "def cloudflare_waf_get_operation_command(client: Client, operation_id) -> CommandResults:\n response = client.cloudflare_waf_get_operation_request(operation_id)\n output = response['result']\n\n readable_output = 'The command was executed successfully'\n return CommandResults(\n readable_output=readable_output,\n outputs_prefix='CloudflareWAF.Operation',\n outputs_key_field='id',\n outputs=output,\n raw_response=output\n )", "def command(self):\n return self.package(\"SyntaxObjects\").Command", "def get_operations(self):\n return self.operations[:] # Returns a copy instead of actual attribute", "def offending_op(self):\r\n return type(self.r.owner.op)", "def get_operation_old(operation_name):\n op = operations_api.get_operation(operation_name)\n return op", "def get_operation_by_name(operation_name: str) -> Operation:\n client = vmwareengine_v1.VmwareEngineClient()\n request = GetOperationRequest()\n request.name = operation_name\n return client.get_operation(request)", "def op(self):\n return self.__op", "def op(self):\n return self.__op", "def get_command(self):\n req_type = type(self.req)\n\n if req_type == ureq.CreateEntryRequest:\n return commands.CreateCommand(self.req.results)\n elif req_type == ureq.ReadEntryRequest:\n return commands.ReadCommand(self.req.results)\n elif req_type == ureq.UpdateEntryRequest:\n return commands.UpdateCommand(self.req.results)\n elif req_type == ureq.DeleteEntryRequest:\n return commands.DeleteCommand(self.req.results)", "def op(self):\n\n return self._op", "def get_command(self, object_name, user_key = None):\n\t\treturn self.get_object('command',object_name, user_key = user_key)", "def deserialize(cls, payload):\n return operations_pb2.Operation.FromString(payload)", "def operation(self) -> str:\n return self._operation", "def get_operation(project_id: str, region: str, operation_id: str) -> Operation:\n return get_operation_by_name(\n f\"projects/{project_id}/locations/{region}/operations/{operation_id}\"\n )", "def operation_command(self, persist=False):\n pass", "def op(self):\n return self.getop(self.pc)", "def getCommand(self):\n return self.__cmd", "def get_command(self):\n return self.c_dict['COMMAND']", "def GetOperation(name):\n client = GetClientInstance()\n messages = client.MESSAGES_MODULE\n request = messages.ApikeysOperationsGetRequest(name=name)\n try:\n return client.operations.Get(request)\n except (apitools_exceptions.HttpForbiddenError,\n apitools_exceptions.HttpNotFoundError) as e:\n exceptions.ReraiseError(e, exceptions.OperationErrorException)", "def command(self):\n return self._command", "def get_command(self):\n return self.command", "def make(self):\n return make_operation_space()", "def get_o(self):\n return self.o", "def operation_class(self):\n clazzname = self.name.title() + \"Op\"\n if clazzname == \"NopOp\":\n clazz = BaseOp\n else:\n clazz = globals()[clazzname]\n return clazz", "def op(self) -> Node:\n return self._step_execution_context.op", "def get_raw(self):\n if not self.ops:\n return\n ops = [self.operations.Op_wrapper(op=o) for o in list(self.ops)]\n data = {\n 'author': self.proposer,\n 'title': self.title,\n 'memo': self.memo,\n 'proposed_operations': [o.json() for o in ops],\n 'expiration_time': formatTimeFromNow(self.proposal_expiration),\n 'extensions': [],\n }\n if self.proposal_review:\n data.update({\"review_period_time\": formatTimeFromNow(self.proposal_review)})\n\n ops = self.operations.Proposal_create(**data)\n return self.operation_class(ops)", "def operation(self):\n pass", "def get_operation(operation):\n if operation == 'query':\n return banking_pb2.QUERY\n if operation == 'deposit':\n return banking_pb2.DEPOSIT\n if operation == 'withdraw':\n return banking_pb2.WITHDRAW", "def to_op(self):\n raise NotImplementedError", "def operation_id(self) -> Optional[str]:\n return pulumi.get(self, \"operation_id\")", "def restore_operation(cls, operation_record):\n classname = operation_record[\"OPE_TYPE\"]\n module = \"\" #TODO Implement modulename from database if Operation belongs to Module\n is_operation_of_module = False\n exec \"\"\"\ntry:\n type(%(class)s)\nexcept NameError,e:\n is_operation_of_module = True\"\"\"%{'class':classname}\n\n if is_operation_of_module:\n exec \"\"\"\nfrom %(module)s import %(class)s\noperation = %(class)s(cls._core)\"\"\"%{'class':classname,'module':module}\n else:\n exec \"\"\"\noperation = %(class)s(cls._core)\"\"\"%{'class':classname}\n\n operation.set_id(operation_record['OPE_ID'])\n db = cls._core.get_db()\n stmnt = \"SELECT OPD_KEY, OPD_VALUE, OPD_TYPE FROM OPERATIONDATA WHERE OPD_OPE_ID = ? ;\"\n cur = db.query(cls._core,stmnt,(operation_record[\"OPE_ID\"],))\n for row in cur.fetchallmap():\n val = row[\"OPD_VALUE\"]\n exec \"\"\"val = %s(val)\"\"\"%row[\"OPD_TYPE\"]\n operation.set_value(row[\"OPD_KEY\"], val)\n return operation", "def get_cmd(self):\n return self.cmd", "def operation_type(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"operation_type\")", "def current_operation(self):\n return self.state", "def get_cmd(self, command):\n return self.commands[command][\"cmd\"]", "def post_get_operation(\n self, response: operations_pb2.Operation\n ) -> operations_pb2.Operation:\n return response", "def post_get_operation(\n self, response: operations_pb2.Operation\n ) -> operations_pb2.Operation:\n return response", "def get_svc(command):\n command_name = command\n\n svc = get_sqobject(command_name)\n return svc", "def getOperationByName(self, name):\n for item in self.operations:\n if item.name == name:\n return item\n raise KeyError, \"No operation named %s\" % name", "def to_operator(self) -> Operator:\n return Operator(self.to_instruction())", "def read_operation(self, opcode: int) -> int:\n\n if self.insight:\n self.insight.operation(opcode)\n\n return opcode & 0xF000", "def getOperationByName(self, name):\n for item in self.operations:\n if item.name == name:\n return item\n raise KeyError(\"No operation named %s\" % name)", "def getOperationByName(self, name):\n for item in self.operations:\n if item.name == name:\n return item\n raise KeyError(\"No operation named %s\" % name)", "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def command(self) -> TelnetCommand:\n return self._command", "def get_op_type(self):\n return self.op_type", "def __init__(self):\n self._OPERATION = None", "def __getattr__(self, name):\n return Command(self.cmd, name)", "def operation(self, other=None, operator=None):\n terms = [self]\n if other is not None and operator is not EmptyQuery:\n terms.append(other)\n return Operation(terms, operator=operator)", "def get_operations(self):\n op = self.act.get_operations()\n op.extend(Character.decr_attr)\n return op", "def _get_op_str(self):\n import CCAugmentation.outputs as cca_out\n import CCAugmentation.transformations as cca_trans\n\n if type(self.operation) is str:\n op_name_str = self.operation\n else:\n op_name_str = self.operation.__name__\n\n try:\n getattr(cca_trans, op_name_str)\n op_str = f\"cca_trans.{op_name_str}\"\n except AttributeError:\n try:\n getattr(cca_out, op_name_str)\n op_str = f\"cca_out.{op_name_str}\"\n except AttributeError:\n op_str = op_name_str\n\n return op_str", "def operation_mode(self):\n return self._operation_mode", "def operation_definition(servicename, operationname):\n with open(service_definition_file(servicename), encoding=\"UTF-8\") as definition_file:\n service_definition = json.loads(definition_file.read())\n return service_definition['operations'][operationname]", "def trans_op(self):\n\n return self._trans_op", "def operation_template(cls):\n return relationship.many_to_one(cls, 'operation_template')", "def operation_template(cls):\n return relationship.many_to_one(cls, 'operation_template')", "def command(self):\n if self.model is self.model_action:\n return self.command_action\n else:\n return self.command_candidate", "def get_operation_id(self):\n operation_id = self.yaml_parser.object.get('operationId', None)\n if not operation_id:\n operation_id = self.method + \"-\" + self.path.strip(\"/\").replace(\"/\", \"-\")\n\n return operation_id", "def _GetTpuOperationRef(self, operation):\n return resources.REGISTRY.ParseRelativeName(\n operation.name, collection='tpu.projects.locations.operations')", "def get_op(self, op_complete_url):\n url_parsed = urlsplit(op_complete_url)\n op_url = url_parsed.path\n\n conf, op = self.best_match(op_url)\n if op is not None:\n return Operation(\n op_complete_url,\n op,\n conf[\"conf\"][op],\n conf[\"tp\"],\n conf[\"sparql_http_method\"],\n conf[\"addon\"],\n )\n else:\n sc = 404\n return (\n sc,\n \"HTTP status code %s: the operation requested does not exist\" % sc,\n \"text/plain\",\n )", "def op(self) -> str:\n return self._node.get(\"op\")", "def operation_list(self):\n return OPERATION_LIST", "def _extract_operation(self, words):\n operation = self.client\n\n for word in words:\n attr = getattr(operation, word, None)\n if attr is None:\n return operation, words[-1]\n\n operation = attr\n\n return operation, \"\"", "def lookup_operation(self, result):\r\n if (not isinstance(result, autodiff.tensor.Tensor) or \r\n result.id not in self.operation_map):\r\n return None\r\n return self.operation_map[result.id]", "def _GetCommand(self):\n cmd = [FLAGS.openstack_cli_path]\n cmd.extend(self.args)\n for flag_name, values in six.iteritems(self.flags):\n flag_name_str = '--%s' % flag_name\n if values is True:\n cmd.append(flag_name_str)\n else:\n values_iterable = values if isinstance(values, list) else [values]\n for value in values_iterable:\n cmd.append(flag_name_str)\n cmd.append(str(value))\n cmd.extend(self.additional_flags)\n return cmd", "def command_type(self):\n return self._command_type", "def get_operation(self, name, signature):\n # Get all signatures registered for the operation\n operations = self.operation_list(name)\n\n for op in operations:\n if op.signature == signature:\n return op\n raise OperationError(\"No operation '%s' with signature '%s'\" %\n (name, signature))", "def _create_operation(self,\n identifier,\n arguments=None,\n return_type=None,\n extended_attributes=None,\n node=None):\n if not return_type:\n return_type = self._create_type('void')\n elif isinstance(return_type, str):\n return_type = self._create_type(return_type)\n if isinstance(extended_attributes, dict):\n extended_attributes = self._create_extended_attributes(\n extended_attributes)\n debug_info = self._build_debug_info(node) if node else None\n\n return Operation.IR(\n identifier,\n arguments=(arguments or []),\n return_type=return_type,\n extended_attributes=extended_attributes,\n component=self._component,\n debug_info=debug_info)", "def opcode(self):\n if self.op is None:\n return None\n return self.op.value", "def current_operation(self):\n return GH_STATE_TO_HA[self._boiler.mode]", "def create_ops(self):\n return self._create_ops", "def get_undo_cmd(self):\r\n return self.command_manager.get_undo_cmd()", "def __convert(self, oplog):\n op = oplog['op']\n if op == 'u':\n # it could be an update or replace\n # @ref https://docs.mongodb.com/manual/reference/limits/#naming-restrictions\n is_update = False\n for key in oplog['o'].iterkeys():\n if key[0] == '$':\n is_update = True\n break\n if is_update:\n return pymongo.operations.UpdateOne({'_id': oplog['o2']['_id']}, oplog['o'])\n else:\n return pymongo.operations.ReplaceOne({'_id': oplog['o2']['_id']}, oplog['o'], upsert=True)\n elif op == 'i':\n return pymongo.operations.ReplaceOne({'_id': oplog['o']['_id']}, oplog['o'], upsert=True)\n elif op == 'd':\n return pymongo.operations.DeleteOne({'_id': oplog['o']['_id']})\n else:\n log.error('invaid op: %s' % oplog)\n return None", "def operations(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"operations\")", "def operations(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"operations\")", "def operations(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"operations\")", "def operations(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"operations\")", "def from_dict(cls, dikt) -> 'Operations':\n return util.deserialize_model(dikt, cls)", "def new_command(self, content=None):\n return PrimitiveControllerCommand(content)", "def get_operation(self, project, operation_id):\n headers = {'AppScale-Secret': self.secret}\n operation_url = '{prefix}/{project}/operations/{operation_id}'.format(\n prefix=self.prefix, project=project, operation_id=operation_id)\n response = requests.get(operation_url, headers=headers, verify=False)\n return self.extract_response(response)", "def get_operator(self):\n distribution = self.get_distribution_operator()\n temp = self.get_unit_conversion_operator()\n aperture = self.get_aperture_integration_operator()\n filter = self.get_filter_operator()\n projection = self.get_projection_operator()\n hwp = self.get_hwp_operator()\n polarizer = self.get_polarizer_operator()\n integ = self.get_detector_integration_operator()\n trans_inst = self.instrument.get_transmission_operator()\n trans_atm = self.scene.atmosphere.transmission\n response = self.get_detector_response_operator()\n\n with rule_manager(inplace=True):\n H = CompositionOperator([\n response, trans_inst, integ, polarizer, hwp * projection,\n filter, aperture, trans_atm, temp, distribution])\n if self.scene == 'QU':\n H = self.get_subtract_grid_operator()(H)\n return H", "def import_operation(self):\n return self._import_operation", "def get_copied_op(org_instance, graph, scope=\"\"):\n\n #The name of the copied instance\n if scope != '':\n new_name = scope + '/' + org_instance.name\n else:\n new_name = org_instance.name\n\n return graph.as_graph_element(new_name, allow_tensor=True,\n allow_operation=True)", "def get_command(self, ctx, name):\n commands = self._iter_commands()\n return commands[name].load()", "def _GetOperationDescription(\n self,\n router_method: Any,\n required_path_params: Iterable[FieldDescriptor],\n optional_path_params: Iterable[FieldDescriptor],\n query_params: Iterable[FieldDescriptor],\n body_params: Iterable[FieldDescriptor],\n ) -> Dict[str, Any]:\n\n # The `Operation Object` associated with the current http method.\n operation_obj = {\n \"tags\": [router_method.category or \"NoCategory\"],\n \"description\":\n router_method.doc or \"No description.\",\n \"operationId\":\n urlparse.quote(f\"{router_method.name}\"),\n \"parameters\":\n self._GetParameters(required_path_params, optional_path_params,\n query_params),\n \"responses\": {\n \"200\": (self._GetResponseObject200(router_method.result_type,\n router_method.name)),\n \"default\": self._GetResponseObjectDefault(router_method.name),\n },\n }\n # Only POST methods should have an associated `requestBody`.\n if body_params:\n operation_obj[\"requestBody\"] = self._GetRequestBody(body_params)\n\n return operation_obj" ]
[ "0.6874343", "0.67712283", "0.67712283", "0.67712283", "0.67545724", "0.6746998", "0.6746998", "0.66104776", "0.65664905", "0.6563017", "0.6563017", "0.6541429", "0.6439676", "0.64120996", "0.64114374", "0.640891", "0.64030427", "0.6372547", "0.6367869", "0.6367869", "0.6332308", "0.63110846", "0.62660116", "0.62420034", "0.62235296", "0.62012535", "0.61448616", "0.6118598", "0.6110245", "0.6091709", "0.60741967", "0.60731685", "0.6068771", "0.6030418", "0.6018848", "0.5999223", "0.5972263", "0.59685504", "0.5956263", "0.5934824", "0.5865418", "0.5857728", "0.58560103", "0.58410496", "0.58316344", "0.5824548", "0.58092964", "0.57720095", "0.57720095", "0.5762796", "0.57557714", "0.57520527", "0.5750632", "0.57327574", "0.57327574", "0.5702556", "0.5702556", "0.5702556", "0.5702556", "0.5700001", "0.5692228", "0.5680253", "0.567736", "0.567451", "0.5661543", "0.5655156", "0.56539994", "0.56345433", "0.56302136", "0.56261694", "0.56261694", "0.56220174", "0.5602088", "0.55944437", "0.55749196", "0.5568599", "0.5567907", "0.5556536", "0.5555606", "0.55391324", "0.5526357", "0.5525083", "0.5514969", "0.55037314", "0.55027187", "0.5495519", "0.5493191", "0.5489816", "0.5487014", "0.5487014", "0.5487014", "0.5487014", "0.5485618", "0.5477503", "0.5467234", "0.546019", "0.5460031", "0.54542017", "0.5453973", "0.54532766" ]
0.71553755
0
Creates a temporary image for manipulation, and handles optional RGB conversion.
def _create_tmp_image(self, content): content.seek(0) image = Image.open(content) if self.force_rgb and image.mode not in ('L', 'RGB', 'RGBA'): image = image.convert('RGB') return image
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def temporary_unsupported_image(self):\n image = Image.new('RGB', (1, 1))\n tmp_file = tempfile.NamedTemporaryFile(suffix='.ppm')\n image.save(tmp_file, 'ppm')\n # important because after save(),\n # the fp is already at the end of the file\n tmp_file.seek(0) # retrieves the created temp file\n return tmp_file", "def imageprepare():\r\n file_name = 'temp_image.png'\r\n im = Image.open(file_name).convert('L')\r\n im = im.resize((20, 20))\r\n p = Image.new('L', (28,28), (255))\r\n p.paste(im,(4,4,24,24))\r\n p.save(\"last_image.png\")\r\n\r\n tv = list(p.getdata()) # get pixel values\r\n # normalize pixels to 0 and 1. 0 is pure white, 1 is pure black.\r\n tva = [(255 - x) * 1.0 / 255.0 for x in tv]\r\n tva = np.reshape(tva, (28, 28))\r\n\r\n return tva", "def temporary_image(self):\n\n image = Image.new('RGB', (1, 1))\n tmp_file = tempfile.NamedTemporaryFile(suffix='.jpg')\n image.save(tmp_file, 'jpeg')\n # important because after save(),\n # the fp is already at the end of the file\n tmp_file.seek(0) # retrieves the created temp file\n return tmp_file", "def test_write_rgb(self):\n with tempfile.TemporaryDirectory() as out_dir:\n image_name = os.path.join(out_dir, \"test.png\")\n img = np.random.rand(2, 3, 3)\n img_save_val = (255 * img).astype(np.uint8)\n writer_obj = PILWriter(output_dtype=np.uint8)\n writer_obj.set_data_array(img, channel_dim=-1)\n writer_obj.write(image_name)\n out = np.asarray(Image.open(image_name))\n out = np.moveaxis(out, 0, 1)\n np.testing.assert_allclose(out, img_save_val)", "def _prepare_image(image, img_lib_name, img_mod, img_save_path):\n # Pillow\n if img_lib_name == \"pil\":\n pil_image = image.copy()\n\n try:\n chan = pil_image.split()\n\n if len(chan) == 4:\n pil_image = img_mod.merge(\"RGB\", (chan[0], chan[1], chan[2]))\n\n try:\n pil_image.save(img_save_path, \"BMP\")\n except IOError as e:\n _warn(\n \"_prepare_image: (Pillow) Could not save the image to '{0}'. \"\n \"I/O Error ({1}): {2}.\".format(img_save_path, e.errno, e.strerror)\n )\n except Exception as e:\n _warn(\n \"_prepare_image: (Pillow) Unable to split and convert \"\n \"the image to RGB. Error: {0}.\".format(e)\n )\n finally:\n del pil_image\n\n # wxPython\n elif img_lib_name == \"wx\":\n wx_image = image.Copy()\n\n try:\n # No idea if 'ClearAlpha' can raise an exception\n if wx_image.HasAlpha():\n wx_image.ClearAlpha()\n\n try:\n wx_image.SaveFile(img_save_path, img_mod.BITMAP_TYPE_BMP)\n except IOError as e:\n _warn(\n \"_prepare_image: (wxPython) Could not save the image to '{0}'. \"\n \"I/O Error({1}): {2}.\".format(img_save_path, e.errno, e.strerror)\n )\n except Exception as e:\n _warn(\n \"_prepare_image: (wxPython) Unable to remove the alpha channel \"\n \"from the image. Error: {0}.\".format(e)\n )\n finally:\n del wx_image\n\n # PyQt/PySide\n elif img_lib_name == \"qt\":\n qt_image = img_mod(image)\n\n try:\n if qt_image.hasAlphaChannel():\n qt_image = qt_image.convertToFormat(img_mod.Format_RGB32)\n\n try:\n # Save the image with max quality\n qt_image.save(img_save_path, \"BMP\", 100)\n except Exception as e:\n _warn(\n \"_prepare_image: (PyQt/PySide) Could not save the image to \"\n \"'{0}'. Error: {1}.\".format(img_save_path, e)\n )\n except Exception as e:\n _warn(\n \"_prepare_image: (PyQt/PySide) Unable to convert the image to RGB.\"\n \"Error: {0}.\".format(e)\n )\n finally:\n del qt_image\n\n # OpenCV\n elif img_lib_name == \"cv\":\n cv_image = image.copy()\n\n # OpenCV 'imwrite' require a valid file extension\n img_save_path_bmp = \"{0}.bmp\".format(img_save_path)\n\n try:\n if len(cv_image.shape) > 2 and cv_image.shape[2] == 4:\n rt, th = img_mod.threshold(\n cv_image[:, :, 3], 254, 255, img_mod.THRESH_BINARY\n )\n cv_image = img_mod.bitwise_not(\n img_mod.bitwise_not(cv_image[:, :, :3], mask=th)\n )\n\n if img_mod.imwrite(img_save_path_bmp, cv_image):\n try:\n os.rename(img_save_path_bmp, img_save_path)\n except OSError as e:\n _warn(\n \"_prepare_image: (OpenCV) Could not rename the image \"\n \"file from '{0}' to '{1}'. Error: {2}.\".format(\n img_save_path_bmp, img_save_path, e\n )\n )\n else:\n _warn(\n \"_prepare_image: (OpenCV) Could not save the image to \"\n \"'{0}'.\".format(img_save_path_bmp)\n )\n except Exception as e:\n _warn(\n \"_prepare_image: (OpenCV) Unable to remove the alpha channel \"\n \"from the image. Error: {0}.\".format(e)\n )\n finally:\n del cv_image", "def _create_image(self):\n if hasattr(self, '_image') and self._image:\n return self._image\n try:\n command = \"tex2im -b transparent -t cyan\"\n subprocess.run([*command.split(), self._formula])\n except Exception as e:\n import traceback\n print(traceback.format_exc())\n return None\n # tex2im converts to out.png by default\n img = Image.open('out.png').convert('RGBA')\n # create a new rgba image to blend the latex with the alpha\n subprocess.run([\"rm\", \"out.png\"])\n return img", "def new_image(x, y, out, data):\n img = Image.new('RGB', (x, y))\n img.putdata(data)\n img.save(out)", "def convert_img(self):\r\n self.img = self.img.convert('RGB')", "def new_test_image():\n warnings.warn(DeprecationWarning(\n \"new_test_image() is deprecated in favour of the get_sample_image() \"\n \"context manager.\"), stacklevel=2)\n image_name = 'test-{}.png'.format(uuid.uuid4())\n image = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))\n ImageDraw.Draw(image)\n byte_io = BytesIO()\n image.save(byte_io, 'png')\n byte_io.seek(0)\n return image_name, ContentFile(byte_io.read(), image_name)", "def create_colorful_test_image(self):\n ch255 = np.full([100, 200, 1], 255, dtype=np.uint8)\n ch128 = np.full([100, 200, 1], 128, dtype=np.uint8)\n ch0 = np.full([100, 200, 1], 0, dtype=np.uint8)\n imr = np.concatenate((ch255, ch128, ch128), axis=2)\n img = np.concatenate((ch255, ch255, ch0), axis=2)\n imb = np.concatenate((ch255, ch0, ch255), axis=2)\n imw = np.concatenate((ch128, ch128, ch128), axis=2)\n imu = np.concatenate((imr, img), axis=1)\n imd = np.concatenate((imb, imw), axis=1)\n image = np.concatenate((imu, imd), axis=0)\n return image", "def imageprepare(argv):\n im = Image.open(argv).convert('L')\n width = float(im.size[0])\n height = float(im.size[1])\n newImage = Image.new('L', (28, 28), (255)) # creates white canvas of 28x28 pixels\n\n if width > height: # check which dimension is bigger\n # Width is bigger. Width becomes 20 pixels.\n nheight = int(round((20.0 / width * height), 0)) # resize height according to ratio width\n if (nheight == 0): # rare case but minimum is 1 pixel\n nheight = 1\n # resize and sharpen\n img = im.resize((20, nheight), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)\n wtop = int(round(((28 - nheight) / 2), 0)) # calculate horizontal position\n newImage.paste(img, (4, wtop)) # paste resized image on white canvas\n else:\n # Height is bigger. Heigth becomes 20 pixels.\n nwidth = int(round((20.0 / height * width), 0)) # resize width according to ratio height\n if (nwidth == 0): # rare case but minimum is 1 pixel\n nwidth = 1\n # resize and sharpen\n img = im.resize((nwidth, 20), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)\n wleft = int(round(((28 - nwidth) / 2), 0)) # caculate vertical pozition\n newImage.paste(img, (wleft, 4)) # paste resized image on white canvas\n\n # newImage.save(\"sample.png\n\n tv = list(newImage.getdata()) # get pixel values\n\n # normalize pixels to 0 and 1. 0 is pure white, 1 is pure black.\n tva = [(255 - x) * 1.0 / 255.0 for x in tv]\n print(tva)\n return tva", "def imageprepare(image_data):\n im = Image.open(io.BytesIO(image_data))\n im = remove_transparency(im)\n im = im.resize((28,28))\n width = float(im.size[0])\n height = float(im.size[1])\n new_image = Image.new('L', (28, 28), 255) # creates white canvas of 28x28 pixels\n\n if width > height: # check which dimension is bigger\n # Width is bigger. Width becomes 20 pixels.\n nheight = int(round((20.0 / width * height), 0)) # resize height according to ratio width\n if nheight == 0: # rare case but minimum is 1 pixel\n nheight = 1\n # resize and sharpen\n img = im.resize((20, nheight), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)\n wtop = int(round(((28 - nheight) / 2), 0)) # calculate horizontal position\n new_image.paste(img, (4, wtop)) # paste resized image on white canvas\n else:\n # Height is bigger. Heigth becomes 20 pixels.\n nwidth = int(round((20.0 / height * width), 0)) # resize width according to ratio height\n if nwidth == 0: # rare case but minimum is 1 pixel\n nwidth = 1\n # resize and sharpen\n img = im.resize((nwidth, 20), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)\n wleft = int(round(((28 - nwidth) / 2), 0)) # caculate vertical pozition\n new_image.paste(img, (wleft, 4)) # paste resized image on white canvas\n\n # new_image = ImageOps.invert(new_image)\n\n tv = list(new_image.getdata()) # get pixel values\n\n # normalize pixels to 0 and 1. 0 is pure white, 1 is pure black.\n tva = [(255 - x) * 1.0 / 255.0 for x in tv]\n return tva, new_image", "def imageprepare(argv):\r\n im = Image.open(argv).convert('L')\r\n width = float(im.size[0])\r\n height = float(im.size[1])\r\n newImage = Image.new('L', (28, 28), (255)) #creates white canvas of 28x28 pixels\r\n \r\n if width > height: #check which dimension is bigger\r\n #Width is bigger. Width becomes 20 pixels.\r\n nheight = int(round((20.0/width*height),0)) #resize height according to ratio width\r\n if (nheight == 0): #rare case but minimum is 1 pixel\r\n nheight = 1 \r\n # resize and sharpen\r\n img = im.resize((20,nheight), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)\r\n wtop = int(round(((28 - nheight)/2),0)) #caculate horizontal pozition\r\n newImage.paste(img, (4, wtop)) #paste resized image on white canvas\r\n else:\r\n #Height is bigger. Heigth becomes 20 pixels. \r\n nwidth = int(round((20.0/height*width),0)) #resize width according to ratio height\r\n if (nwidth == 0): #rare case but minimum is 1 pixel\r\n nwidth = 1\r\n # resize and sharpen\r\n img = im.resize((nwidth,20), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)\r\n wleft = int(round(((28 - nwidth)/2),0)) #caculate vertical pozition\r\n newImage.paste(img, (wleft, 4)) #paste resized image on white canvas\r\n \r\n #newImage.save(\"sample.png\")\r\n\r\n tv = list(newImage.getdata()) #get pixel values\r\n \r\n #normalize pixels to 0 and 1. 0 is pure white, 1 is pure black.\r\n tva = [ (255-x)*1.0/255.0 for x in tv] \r\n #print(tva)\r\n return tva", "def create_temporary_image(image):\n\n temp = tempfile.NamedTemporaryFile()\n temp.write(image)\n temp.seek(0)\n\n return temp", "def imageprepare(self,argv):\r\n\t\tim = Image.open(argv).convert('L')\r\n\t\twidth = float(im.size[0])\r\n\t\theight = float(im.size[1])\r\n\t\tnewImage = Image.new('L', (28, 28), (255)) # creates white canvas of 28x28 pixels\r\n\r\n\t\tif width > height: # check which dimension is bigger\r\n\t\t\t# Width is bigger. Width becomes 20 pixels.\r\n\t\t\tnheight = int(round((20.0 / width * height), 0)) # resize height according to ratio width\r\n\t\t\tif nheight == 0: # rare case but minimum is 1 pixel\r\n\t\t\t\tnheight = 1\r\n\t\t\t\t# resize and sharpen\r\n\t\t\timg = im.resize((20, nheight), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)\r\n\t\t\twtop = int(round(((28 - nheight) / 2), 0)) # caculate horizontal pozition\r\n\t\t\tnewImage.paste(img, (4, wtop)) # paste resized image on white canvas\r\n\t\telse:\r\n\t\t\t# Height is bigger. Heigth becomes 20 pixels.\r\n\t\t\tnwidth = int(round((20.0 / height * width), 0)) # resize width according to ratio height\r\n\t\t\tif (nwidth == 0): # rare case but minimum is 1 pixel\r\n\t\t\t\tnwidth = 1\r\n\t\t\t\t# resize and sharpen\r\n\t\t\timg = im.resize((nwidth, 20), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)\r\n\t\t\twleft = int(round(((28 - nwidth) / 2), 0)) # caculate vertical pozition\r\n\t\t\tnewImage.paste(img, (wleft, 4)) # paste resized image on white canvas\r\n\r\n\t\t# newImage.save(\"sample.png\")\r\n\r\n\t\ttv = list(newImage.getdata()) # get pixel values\r\n\r\n\t\t# normalize pixels to 0 and 1. 0 is pure white, 1 is pure black.\r\n\t\ttva = [(255 - x) * 1.0 / 255.0 for x in tv]\r\n\t\treturn tva", "def createRGBImage(self, filepath, width=None, outdir=None):\n print('[createRGBImage] filepath, outdir', filepath, outdir)\n\n index = 0\n rgb_data = []\n\n # Read binary file\n binary_data = self.getBinaryData(filepath)\n\n # Create R,G,B pixels\n while (index + 3) < len(binary_data):\n R = binary_data[index]\n G = binary_data[index+1]\n B = binary_data[index+2]\n index += 3\n rgb_data.append((R, G, B))\n\n size = self.get_size(len(rgb_data), width)\n image = Image.new('RGB', size)\n image.putdata(rgb_data)\n if width > 0:\n image = image.resize((width, width))\n if outdir is not None:\n self.save_file(filepath, image, size, 'RGB', width, outdir)\n # print('np.array(image)', np.array(image).shape)\n return np.array(image)/255.0", "def create_image(self):\n\n self._image = 255 * np.ones((self._height, self._width, 3), np.uint8)", "def genrandimg(args) -> None:\n\n size = (int(args.x), int(args.y))\n fp = Image.new(\"RGB\", size)\n data = []\n\n if not args.c: # If color\n for i in range(size[0]*size[1]):\n r = random.choice([0x00, 0xff])\n data.append((r, r, r)) # Each RGB value is the same random value\n else: # Else black-and-white\n for i in range(size[0]*size[1]):\n r = [random.choice(range(0, 256)) for _ in range(0, 3)]\n r = (r[0], r[1], r[2]) # Choose 3 random numbers for different RGB values\n data.append(r)\n\n fp.putdata(data)\n print(\"Saving to %s...\" % args.o)\n fp.save(args.o)\n fp.close()", "def recreate_image(im_as_var):\n recreated_im = im_as_var.data.numpy()[0]\n recreated_im[recreated_im > 1] = 1\n recreated_im[recreated_im < 0] = 0\n # recreated_im = np.round(recreated_im * 255)\n return recreated_im", "def test_write_lossless_rgb(tmp_path):\n\n temp_file = str(tmp_path / \"temp.webp\")\n # temp_file = \"temp.webp\"\n\n pil_image = hopper(\"RGBA\")\n\n mask = Image.new(\"RGBA\", (64, 64), (128, 128, 128, 128))\n # Add some partially transparent bits:\n pil_image.paste(mask, (0, 0), mask)\n\n pil_image.save(temp_file, lossless=True)\n\n with Image.open(temp_file) as image:\n image.load()\n\n assert image.mode == \"RGBA\"\n assert image.size == pil_image.size\n assert image.format == \"WEBP\"\n image.load()\n image.getdata()\n\n assert_image_equal(image, pil_image)", "def test_no_rgb_colorspace(self):\n user = UserFactory.create()\n file_path = os.path.join(os.path.dirname(__file__), \"broken_colorspace.gif\")\n self._upload_photo(user, file_path)", "def red_filter(img):\r\n #with Image.open(filename) as img:\r\n w = img.width\r\n h = img.height\r\n\r\n newimg = Image.new('RGB', (w,h))\r\n for y in range(h):\r\n for x in range(w):\r\n r, g, b = img.getpixel((x,y))\r\n \r\n newimg.putpixel((x, y), (r, 0, 0))\r\n \r\n return newimg", "def rgb_processing(rgb_img, center, scale, rot=0):\n rgb_img = crop(rgb_img, center, scale, \n [constants.IMG_RES, constants.IMG_RES], rot=rot)\n # (3,224,224),float,[0,1]\n rgb_img = np.transpose(rgb_img.astype('float32'),(2,0,1))/255.0\n return rgb_img", "def imageprepare():\r\n file_name = '9-test.png'\r\n im = Image.open(file_name).convert('L')\r\n\r\n im.save(\"9-t.png\")\r\n plt.imshow(im)\r\n plt.show()\r\n tv = list(im.getdata())\r\n\r\n # normalize pixels to 0 and 1. 0 is pure white, 1 is pure black.\r\n tva = [(255 - x) * 1.0 / 255.0 for x in tv]\r\n return tva", "def imageprepare(filename):\n img = Image.open(filename).convert('L')\n rect = img.getbbox()\n im = img.crop(rect)\n im.save(filename + '_pressprocessed.png')\n\n width = float(im.size[0])\n height = float(im.size[1])\n newImage = Image.new('L', (28, 28), (0)) #creates white canvas of 28x28 pixels\n if width > height: #check which dimension is bigger\n #Width is bigger. Width becomes 20 pixels.\n nheight = int(round((20.0/width*height),0)) #resize height according to ratio width\n if (nheight == 0): #rare case but minimum is 1 pixel\n nheight = 1\n # resize and sharpen\n img = im.resize((20,nheight), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)\n wtop = int(round(((28 - nheight)/2),0)) #caculate horizontal pozition\n newImage.paste(img, (4, wtop)) #paste resized image on white canvas\n newImage.save(filename + '_final.png')\n else:\n #Height is bigger. Heigth becomes 20 pixels. \n nwidth = int(round((20.0/height*width),0)) #resize width according to ratio height\n if (nwidth == 0): #rare case but minimum is 1 pixel\n nwidth = 1\n # resize and sharpen\n img = im.resize((nwidth,20), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)\n wleft = int(round(((28 - nwidth)/2),0)) #caculate vertical pozition\n newImage.paste(img, (wleft, 4)) #paste resized image on white canvas\n newImage.save(filename + '_final.png')\n tv = list(newImage.getdata()) #get pixel values\n #normalize pixels to 0 and 1. 0 is pure white, 1 is pure black.\n tva = [ (x)*1.0/255.0 for x in tv] \n return tva", "def recompute_final_image(self):\n if self._image is None:\n self.final_image = None\n return\n if isinstance(self._image, np.ndarray):\n if self._image.dtype == np.float and np.any(self._image > 1):\n im = self._image / 255\n else:\n im = self._image\n if self.cmap is not None:\n im = cm.get_cmap(self.cmap)(im)\n im = PIL.Image.fromarray((im * 255).astype(np.uint8))\n else: # we hope it is a PIL image or equivalent\n im = self._image\n im = im.convert('RGBA')\n if self.make_square:\n new_size = max(im.width, im.height)\n im = im.resize((new_size, new_size), PIL.Image.NEAREST)\n if self.resolution is not None:\n if self.resolution.size == 1:\n im = im.resize((self.resolution, self.resolution),\n PIL.Image.NEAREST)\n else:\n im = im.resize(self.resolution,\n PIL.Image.NEAREST)\n if self.circ_cut is not None:\n middle = np.array(im.size) / 2\n x = np.arange(im.size[0]) - middle[0] + 0.5\n x = x / np.max(np.abs(x))\n y = np.arange(im.size[1]) - middle[1] + 0.5\n y = y / np.max(np.abs(y))\n yy, xx = np.meshgrid(y, x)\n r = np.sqrt(xx ** 2 + yy ** 2)\n alpha = np.empty(r.shape)\n alpha[r > 1] = 0\n alpha[r <= self.circ_cut] = 1\n val = (r > self.circ_cut) & (r <= 1)\n alpha[val] = (\n 0.5 + 0.5 * np.cos(\n np.pi * (r[val] - self.circ_cut)\n / (1 - self.circ_cut)))\n alpha = alpha.T * np.array(im.getchannel('A'))\n alpha = PIL.Image.fromarray(np.uint8(alpha))\n im.putalpha(alpha)\n if self.col is not None:\n if self.border_type is None:\n pass\n elif self.border_type == 'alpha':\n bg_alpha = np.array(im.getchannel('A'))\n bg_alpha = bg_alpha > 0\n bg_alpha = PIL.Image.fromarray(255 * np.uint8(bg_alpha))\n bg = PIL.Image.new('RGBA', im.size, color=self.col)\n bg.putalpha(bg_alpha)\n im = PIL.Image.alpha_composite(bg, im)\n elif self.border_type == 'pad':\n im = PIL.ImageOps.expand(\n im,\n border=self.border_width,\n fill=self.col)\n elif self.border_type == 'conv':\n im = PIL.ImageOps.expand(\n im,\n border=self.border_width,\n fill=(0, 0, 0, 0))\n bg_alpha = im.getchannel('A')\n bg_alpha = bg_alpha.filter(PIL.ImageFilter.BoxBlur(\n self.border_width))\n bg_alpha = np.array(bg_alpha)\n bg_alpha = 255 * np.uint8(bg_alpha > 0)\n bg_alpha = PIL.Image.fromarray(bg_alpha)\n bg = PIL.Image.new('RGBA', im.size, color=self.col)\n bg.putalpha(bg_alpha)\n im = PIL.Image.alpha_composite(bg, im)\n self.final_image = im", "def blank_image(height, width):\n all_green = create_uniform_image(height, width, [0, 255, 0])\n return all_green", "def save_file(self, _filename):\n imgsize = (self.__resolution[0], self.__resolution[1])\n print imgsize\n\n if(self.__resolution[2] == 1):\n # grayscale -> convert to RGB\n bg_white = (255, 255, 255)\n img = Image.new(\"RGB\", imgsize, bg_white)\n\n for x in xrange(0, self.__resolution[0], 1):\n for y in xrange(0, self.__resolution[1], 1):\n col = self.get_color(_pos)\n # duplicate the channels\n ucharcol = (255 * col[0], 255 * col[0], 255 * col[0])\n img.putpixel((x, self.__resolution[1] - y - 1), ucharcol)\n\n elif(self.__resolution[2] == 3):\n # RGB\n bg_white = (255, 255, 255)\n img = Image.new(\"RGB\", imgsize, bg_white)\n\n for x in xrange(0, self.__resolution[0], 1):\n for y in xrange(0, self.__resolution[1], 1):\n col = self.get_color(_pos)\n ucharcol = (255 * col[0], 255 * col[1], 255 * col[2])\n img.putpixel((x, self.__resolution[1] - y - 1), ucharcol)\n\n elif(self.__resolution[2] == 4):\n # RGBA\n bg_white = (255, 255, 255, 255)\n img = Image.new(\"RGBA\", imgsize, bg_white)\n\n for x in xrange(0, self.__resolution[0], 1):\n for y in xrange(0, self.__resolution[1], 1):\n col = 255 * self.get_color((x, y))\n ucharcol = (int(col[0]), int(col[1]), int(col[2]), int(col[3]))\n img.putpixel((x, self.__resolution[1] - y - 1), ucharcol)\n else:\n raise StandardError, ('supported number of channels are 1, 3, and 4, only.')\n\n img.save(_filename)", "def __make_png(self, abspath_img_rgb):\n if not os.path.exists(DIR_PNG):\n os.makedirs(DIR_PNG)\n\n outsize = '{}%'.format(OUTSIZE_RGB)\n img_name_rgb = os.path.basename(abspath_img_rgb)\n suffix_extension_tif = Utils.get_suffix_tif(img_name_rgb)\n img_png = img_name_rgb.replace(suffix_extension_tif, '.png')\n path_img_png = os.path.join(DIR_PNG, img_png)\n\n command = \"gdal_translate -ot byte -of PNG -outsize {} {} \" \\\n \"-a_nodata 0 -q {} {}\".format(\n outsize, outsize, abspath_img_rgb, path_img_png\n )\n os.system(command)\n return os.path.join(DIR_PNG_TO_DB, img_png)", "def prepare_image(im):\n width, height = im.size\n if width > 256 or height > 256:\n factor = 256.0 / max(width, height)\n im = im.resize((int(factor * width), int(factor * height)),\n Image.BILINEAR)\n return im", "def test_fromarray_rgb_fail():\n arr = numpy.zeros((20, 10, 3), dtype='float')\n\n parameters = {'data': [arr]}\n\n images.fromarray(parameters).convert('RGB')", "def img_recolor(self, args, input_image_path):\n \n ec = encoder.Encoder(output_path=args.intermediate_representation, method=args.method,\n size=args.size, p=args.p, grid_size=args.grid_size, plot=args.plot, quantize=args.quantize)\n dc = decoder.Decoder(output_path=args.output_path, method=args.method, size=args.size, p=args.p, gpu_id=args.gpu_id, plot=args.plot)\n\n ec.encode(input_image_path)\n img_gray_name = ar_utils.gen_new_gray_filename(input_image_path)\n img_gray_path = os.path.join(args.intermediate_representation, img_gray_name)\n dc.decode(img_gray_path)\n\n if args.delete_gray and os.path.exists(img_gray_path):\n os.remove(img_gray_path)", "def _process_img_rgb(self, sensor_data):\n img = np.array(sensor_data.raw_data).reshape((self.img_y, self.img_x, 4))\n img = img[:, :, :3] # sensor is actualy rgba, we dont need alpha values\n self.rgb = img # need to scale rgb values to be {0,1}", "def create_image_if_needed(w, h, num_channels, dtype, image_in):\n if image_in is not None:\n dim = (w, h, num_channels)\n attrib_out = (dim, dtype)\n attrib_in = get_vtk_image_attrib(image_in)\n if attrib_in == attrib_out:\n return image_in\n # Otherwise, create new image.\n return create_image(w, h, num_channels, dtype)", "def make_image(self, save=False):\n\n # image_grid = np.full((self.size_x, self.size_y), '#888888', dtype=str)\n image_grid = np.full((self.size_x, self.size_y, 3), 0, dtype=np.uint8)\n\n # self.grid = np.flip(self.grid, 1)\n\n # self.grid = np.swapaxes(self.grid, 0, 0)\n \"\"\"\n image_grid[self.grid == 0] = 'FFFFFF'\n image_grid[self.grid == 1] = '000000'\n image_grid[self.grid == 2] = '00FF00'\n image_grid[self.grid == 3] = '0000FF'\n image_grid[self.grid == 4] = 'FFFF00'\n image_grid[self.grid == 5] = '00FFFF'\n image_grid[self.grid == 6] = 'FF00FF'\n \"\"\"\n image_grid[self.grid == 0] = (1, 1, 1)\n image_grid[self.grid == 1] = (0, 0, 0)\n image_grid[self.grid == 2] = (1, 0, 1)\n image_grid[self.grid == 3] = (0, 1, 0)\n image_grid[self.grid == 4] = (0, 0, 1)\n image_grid[self.grid == 5] = (0, 1, 1)\n image_grid[self.grid == 6] = (1, 1, 0)\n\n #for ant in self.ants:\n # image_grid[ant.x, ant.y] = (1, 0, 0)\n\n # image_grid = image_grid.swapaxes(0, 1)\n # self.grid = self.grid.swapaxes(0, 1)\n\n\n\n DPI = 100\n width, height = 1000, 1000\n fig = plt.figure(figsize=(width / DPI, height / DPI), dpi=DPI, facecolor='k')\n ax = fig.add_subplot()\n\n plt.axis('equal')\n plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)\n\n for y in range(self.size_x):\n for x in range(self.size_y):\n if self.grid[x, y] != 0:\n # Only plot a hexagon if its state is not zero.\n plot_hex(ax, x, y, image_grid[x, y])\n\n ax.set_xlim(0, self.size_x)\n ax.set_ylim(0, self.size_y)\n\n plt.show()\n\n logging.info(\"Finished Image Processing\")", "def _build_final_image(self, image):\n raise NotImplementedError", "def redlogo(original_image,red,template):\r\n #set the radius of the rounded corners\r\n width, height = template.size\r\n resized_image = original_image.resize((width,height))\r\n \r\n \r\n ###\r\n #create a mask\r\n ###\r\n \r\n #start with transparent mask\r\n rounded_mask = PIL.Image.new('RGBA', (width, height))\r\n drawing_layer = PIL.ImageDraw.Draw(rounded_mask)\r\n \r\n # Overwrite the RGBA values with A=255.\r\n # The 127 for RGB values was used merely for visualizing the mask\r\n rounded_mask.paste(red, (0,0))\r\n # Uncomment the following line to show the mask\r\n # plt.imshow(rounded_mask)\r\n \r\n # Make the new image, starting with all transparent\r\n \r\n #resizes the image to 200 to 200 for easy pasting\r\n \r\n \r\n result = PIL.Image.new('RGBA', template.size, (0,0,0,0))\r\n result.paste(resized_image, (0,0), mask=rounded_mask)\r\n return result", "def test_create_image(self):\n pass", "def create_blank(w, h, rgb_color=(0, 0, 0)):\n image = np.zeros((h, w), np.uint8)\n color = tuple(reversed(rgb_color))\n image[:] = 0\n return image", "def create_blank(w, h, rgb_color=(0, 0, 0)):\n image = np.zeros((h, w), np.uint8)\n color = tuple(reversed(rgb_color))\n image[:] = 0\n return image", "def _processimage(inpath, outpath, outformat=\"TIFF\", filter=\"ANTIALIAS\", scale=None, crop=None):\n\n try:\n image = Image.open(inpath)\n except (IOError, OSError):\n # workaround for Pillow not handling 16bit sRGB images\n if \"16-bit\" in check_output((\"identify\", inpath)):\n with NamedTemporaryFile() as tmpfile:\n check_call((\"convert\", inpath, \"-depth\", \"8\", tmpfile.name))\n image = Image.open(tmpfile.name)\n else:\n raise Exception\n\n if crop:\n image = image.crop(crop)\n\n if scale:\n imagefilter = getattr(Image, filter.upper())\n size = [x * scale for x in image.size]\n image.thumbnail(size, imagefilter)\n\n image.save(outpath, outformat)", "def test_keep_rgb_values_when_transparent(tmp_path):\n\n image = hopper(\"RGB\")\n\n # create a copy of the image\n # with the left half transparent\n half_transparent_image = image.copy()\n new_alpha = Image.new(\"L\", (128, 128), 255)\n new_alpha.paste(0, (0, 0, 64, 128))\n half_transparent_image.putalpha(new_alpha)\n\n # save with transparent area preserved\n temp_file = str(tmp_path / \"temp.webp\")\n half_transparent_image.save(temp_file, exact=True, lossless=True)\n\n with Image.open(temp_file) as reloaded:\n assert reloaded.mode == \"RGBA\"\n assert reloaded.format == \"WEBP\"\n\n # even though it is lossless, if we don't use exact=True\n # in libwebp >= 0.5, the transparent area will be filled with black\n # (or something more conducive to compression)\n assert_image_equal(reloaded.convert(\"RGB\"), image)", "def single_to_rgb(R_file,G_file,B_file): \n R=gdal_array.LoadFile(R_file)\n G=gdal_array.LoadFile(G_file)\n B=gdal_array.LoadFile(B_file)\n \n \n basename=os.path.basename(R_file)\n basename=basename[:3]+basename[4:]\n basename=basename[:-4]+\"_rgb_.tif\" \n \n\n file_path=os.path.dirname(os.path.abspath(R_file))+\"/\"+basename\n\n \n driver=osgeo.gdal.GetDriverByName(\"GTiff\")\n options = ['PHOTOMETRIC=RGB', 'PROFILE=GeoTIFF']\n print(file_path)\n print(np.max(np.array([R.shape[1],B.shape[1],G.shape[1]])), np.max(np.array([R.shape[0],B.shape[0],G.shape[0]])))\n Xlen=np.max(np.array([R.shape[1],B.shape[1],G.shape[1]]))\n Ylen= np.max(np.array([R.shape[0],B.shape[0],G.shape[0]]))\n dataset=driver.Create(file_path, int(Xlen),int(Ylen), 3, osgeo.gdal.GDT_UInt16, options) \n \n dataset.GetRasterBand(1).WriteArray(R)\n dataset.GetRasterBand(2).WriteArray(G)\n dataset.GetRasterBand(3).WriteArray(B)\n \n return file_path", "def _convert_to_yolo_img(self, img):\n\n img = img / 255.0\n h, w, c = img.shape\n img = img.transpose(2, 0, 1)\n outimg = make_image(w, h, c)\n img = img.reshape((w*h*c))\n data = c_array(c_float, img)\n outimg.data = data\n rgbgr_image(outimg)\n return outimg", "def prepare_image(path: str):\n\n im = Image.open(path).convert('L')\n width = float(im.size[0])\n height = float(im.size[1])\n new_image = Image.new('L', (28, 28), (255)) # creates white canvas of 28x28 pixels\n\n if width > height: # check which dimension is bigger\n # Width is bigger. Width becomes 20 pixels.\n nheight = int(round((20.0 / width * height), 0)) # resize height according to ratio width\n if (nheight == 0): # rare case but minimum is 1 pixel\n nheight = 1\n # resize and sharpen\n img = im.resize((20, nheight), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)\n wtop = int(round(((28 - nheight) / 2), 0)) # calculate horizontal position\n new_image.paste(img, (4, wtop)) # paste resized image on white canvas\n else:\n # Height is bigger. Heigth becomes 20 pixels.\n nwidth = int(round((20.0 / height * width), 0)) # resize width according to ratio height\n if (nwidth == 0): # rare case but minimum is 1 pixel\n nwidth = 1\n # resize and sharpen\n img = im.resize((nwidth, 20), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)\n wleft = int(round(((28 - nwidth) / 2), 0)) # caculate vertical pozition\n new_image.paste(img, (wleft, 4)) # paste resized image on white canvas\n\n pixels = list(new_image.getdata()) # get pixel values\n pixels_normalized = [(255 - x) * 1.0 / 255.0 for x in pixels]\n\n # Need adequate shape\n adequate_shape = np.reshape(pixels_normalized, (1, 28, 28))\n output = torch.FloatTensor(adequate_shape).unsqueeze(0)\n return output", "def testImageProcessing():\n Im_pix = getRGB( 'in.png' ) # read in the in.png image\n print \"The first two pixels of the first row are\",\n print Im_pix[0][0:2]\n # remember that Im_pix is a list (the image)\n # of lists (each row) of lists (each pixel is [R,G,B])\n New_pix = [ [ [255 - num for num in p] for p in row ] for row in Im_pix ]\n # now, save to the file 'out.png'\n saveRGB( New_pix, 'out.png' )", "def open_image(self):\n self.orig_image = Image.open(self.filename)\n if self.in_rgb:\n self.orig_image = self.orig_image.convert(\"RGB\")\n if self.min_filter:\n self.orig_image.filter(ImageFilter.MinFilter(self.min_filter))", "def _dwd_create_RGB_image(self, channels, cranges):\n if not isinstance(channels, (list, tuple, set)) and \\\n not isinstance(cranges, (tuple, list, set)) and \\\n not len(channels) == len(cranges) and \\\n not (len(channels) == 3 or len(channels == 4)):\n raise ValueError(\"Channels and color ranges must be list/tuple/set \\\n and they must have the same length of 3 or 4 elements\")\n\n if len(channels) == 3:\n return geo_image.GeoImage(channels,\n self.area,\n get_first(self.time_slot),\n fill_value=(0, 0, 0),\n mode=\"RGB\",\n crange=cranges)\n if len(channels) == 4:\n return geo_image.GeoImage(channels,\n self.area,\n get_first(self.time_slot),\n fill_value=(0, 0, 0, 0),\n mode=\"RGBA\",\n crange=cranges)", "def img_operation(args):\n\n a = args.a\n b = args.b\n o = args.o\n op = args.operation\n\n convtoRGB(a)\n convtoRGB(b)\n a = Image.open(a)\n b = Image.open(b)\n\n if not ((a.mode and b.mode) == \"RGB\" and a.height == b.height and a.width == b.width):\n print(\"Error: the dimensions of the images do not match\")\n exit(-1)\n\n c = Image.new(a.mode, (a.width, a.height))\n a_data = a.getdata()\n b_data = b.getdata()\n c_data = []\n\n for i in range(len(a_data)):\n t_data = tuple_operation(a_data[i], b_data[i], op)\n c_data.append(t_data)\n c.putdata(c_data)\n\n if o:\n print(\"Saving to %s...\" % args.o)\n c.save(o)\n else:\n print(\"Saving to %s...\" % args.b)\n c.save(b)\n c.close()", "def _createimage(self, image):\n return self.cv.create_image(0, 0, image=image)", "def create_blank(width, height, rgb_color=(0, 0, 0)):\r\n # Create black blank image\r\n image = np.zeros((height, width, 3), np.uint8)\r\n\r\n # Since OpenCV uses BGR, convert the color first\r\n color = tuple(reversed(rgb_color))\r\n # Fill image with color\r\n image[:] = color\r\n\r\n return image", "def _build_final_image(self, image):\n if self._overlay_image:\n overlay = Image.open(self._overlay_image).convert('RGBA')\n overlay, _, _ = self._image_resize_keep_ratio(overlay, self.width, self.height, True)\n image = Image.alpha_composite(image.convert('RGBA'), overlay)\n image = image.convert('RGB')\n return image", "def test_write_rgba(tmp_path):\n\n temp_file = str(tmp_path / \"temp.webp\")\n\n pil_image = Image.new(\"RGBA\", (10, 10), (255, 0, 0, 20))\n pil_image.save(temp_file)\n\n if _webp.WebPDecoderBuggyAlpha():\n return\n\n with Image.open(temp_file) as image:\n image.load()\n\n assert image.mode == \"RGBA\"\n assert image.size == (10, 10)\n assert image.format == \"WEBP\"\n image.load()\n image.getdata()\n\n # Early versions of WebP are known to produce higher deviations:\n # deal with it\n if _webp.WebPDecoderVersion() <= 0x201:\n assert_image_similar(image, pil_image, 3.0)\n else:\n assert_image_similar(image, pil_image, 1.0)", "def convert_image(image_path):\n with Image.open(image_path) as img:\n new_image = img.convert(\"RGBA\" if img.format == \"PNG\" else \"RGB\")\n new_image.save(image_path)", "def create_test_image(dirname, array, normalization=None):\n filename = str(dirname / 'tmp.tif')\n create_test_file(filename, array)\n satellite = 'quickbird'\n image = Image(filename, satellite, normalization_parameters=normalization)\n return image", "def export_image(self, name):\n\t\tred = Color(\"red\")\n\t\tblue = Color(\"blue\")\n\t\twhite = Color(\"white\")\n\t\tblack = Color(\"black\")\n\t\tgold = Color(\"gold\")\n\t\trgb_gold = []\n\t\tfor part in gold.rgb:\n\t\t\tpart = part * 255\n\t\t\trgb_gold.append(part)\n\t\trgb_black = []\n\t\tfor part in black.rgb:\n\t\t\tpart = part * 255\n\t\t\trgb_black.append(part)\n\t\trgb_white = []\n\t\tfor part in white.rgb:\n\t\t\tpart = part * 255\n\t\t\trgb_white.append(part)\n\t\tcolours = list(red.range_to(blue, int(self.grains)))\n\t\timage = np.zeros([self.space.shape[1],self.space.shape[0], 3], dtype=np.uint(8))\n\t\tfor grain in range(self.grains+1):\n\t\t\trgb = []\n\t\t\tfor part in colours[grain-1].rgb:\n\t\t\t\tpart = part * 255\n\t\t\t\trgb.append(part)\n\t\t\tfor cell in self.space.flat:\n\t\t\t\tif cell.state == grain:\n\t\t\t\t\tx,y = cell.find_id()\n\t\t\t\t\timage[x,y] = rgb\n\t\t\t\tif cell.state == 999:\n\t\t\t\t\tx,y = cell.find_id()\n\t\t\t\t\timage[x,y] = rgb_black\n\t\t\t\tif cell.state == 500:\n\t\t\t\t\tx,y = cell.find_id()\n\t\t\t\t\timage[x,y] = rgb_gold\n\t\timg = Image.fromarray(image.astype('uint8'))\n\t\timg = img.resize((self.space.shape[1]*3,self.space.shape[0]*3))\n\t\timg.save('./static/temp/'+str(name)+'.png')", "def make_image(self, path):\n\t\treturn None", "def create_blank(width, height, rgb_color=(0, 0, 0)):\n # Create black blank image\n image = np.zeros((height, width, 3), np.uint8)\n\n # Since OpenCV uses BGR, convert the color first\n color = tuple(reversed(rgb_color))\n # Fill image with color\n image[:] = color\n\n return image", "def create_blank(width, height, rgb_color=(0, 0, 0)):\n # Create black blank image\n image = np.zeros((height, width, 3), np.uint8)\n\n # Since OpenCV uses BGR, convert the color first\n color = tuple(reversed(rgb_color))\n # Fill image with color\n image[:] = color\n\n return image", "def PMakeImage (inUV, outImage, channel, doBeam, doWeight, err):\n ################################################################\n # Checks\n if not UV.PIsA(inUV):\n raise TypeError,\"inUV MUST be a Python Obit UV\"\n if not Image.PIsA(outImage):\n print \"Actually \",outImage.__class__\n raise TypeError,\"outImage MUST be a Python Obit Image\"\n if not OErr.OErrIsA(err):\n raise TypeError,\"err MUST be an OErr\"\n #\n Obit.ImageUtilMakeImage(inUV.me, outImage.me, channel, doBeam, doWeight, err.me)\n if err.isErr:\n OErr.printErrMsg(err, \"Error creating Image from UV data\")\n # end PMakeImage", "def encode_image(text_to_encode, template_image=\"images/template_image.jpg\"):\n raw_image = Image.open(template_image)\n hidden_message = write_text(text_to_encode,raw_image.size)\n\n x_size = raw_image.size[0]\n y_size = raw_image.size[1]\n\n red_channel = raw_image.split()[0]\n green_channel = raw_image.split()[1]\n blue_channel = raw_image.split()[2]\n # get all channels from raw_image\n encoded_image = Image.new(\"RGB\", raw_image.size)\n\n for x in range(x_size):\n for y in range(y_size):\n hidden_pixel = hidden_message.getpixel((x, y))\n\n encoded_red_pixel = red_channel.getpixel((x, y))\n if (hidden_pixel == (255, 255, 255)):\n red_channel_pixel = red_channel.getpixel((x, y))\n red_binary = bin(red_channel_pixel)\n red_binary = red_binary[:-1] + \"1\"\n # change the last binary value\n encoded_red_pixel = int(red_binary,2)\n # covert binary back to int\n\n else: # if pixel doesnt = white, that means theres no value, set last binary = 0\n red_channel_pixel = red_channel.getpixel((x, y))\n red_binary = bin(red_channel_pixel)\n red_binary = red_binary[:-1] + \"0\"\n encoded_red_pixel = int(red_binary,2)\n\n encoded_rgb = (encoded_red_pixel,\n green_channel.getpixel((x, y)),\n blue_channel.getpixel((x, y)))\n\n encoded_image.putpixel((x, y), encoded_rgb)\n encoded_image.save(\"images/hidden_message_image.png\")", "def recreate_image(x):\n reverse_mean = [-0.485, -0.456, -0.406]\n reverse_std = [1/0.229, 1/0.224, 1/0.225]\n in_channel = x.shape[-1]\n recreated_im = copy.copy(x) # C, H, W\n if in_channel == 3:\n for c in range(in_channel):\n recreated_im[:, :, c] /= reverse_std[c]\n recreated_im[:, :, c] -= reverse_mean[c]\n elif in_channel == 1:\n recreated_im[:, :, 0] /= reverse_std[1]\n recreated_im[:, :, 0] -= reverse_mean[1]\n recreated_im[recreated_im > 1] = 1\n recreated_im[recreated_im < 0] = 0\n recreated_im = np.round(recreated_im * 255)\n\n recreated_im = np.uint8(recreated_im) # H, W, C\n return recreated_im", "def newimagefromimage(self, *args, **kwargs):\n return _image.image_newimagefromimage(self, *args, **kwargs)", "def dwd_RGB_12_12_1_N(self, backup_orig_data=False):\n self.check_channels(0.635, 0.85, \"HRV\", 10.8)\n\n if not self._dwd_channel_preparation([0.635, 0.85, \"HRV\", 10.8],\n backup_orig_data=backup_orig_data):\n return None\n\n img_type = self._dwd_get_image_type()\n if img_type is None:\n return None\n\n # get combination of HRV and VIS008 channel data\n hrvc_chn = self._dwd_get_hrvc_channel()\n\n if img_type == IMAGETYPES.DAY_ONLY:\n img = self._dwd_create_RGB_image(\n (hrvc_chn.data, hrvc_chn.data, self[0.635].data),\n ((0, 100),\n (0, 100),\n (0, 100)))\n img.enhance(gamma=(1.3, 1.3, 1.3))\n merge_masks(img)\n return img\n\n if img_type == IMAGETYPES.NIGHT_ONLY:\n img = self._dwd_create_RGB_image(\n (self[10.8].data, self[10.8].data, self[10.8].data),\n ((40, -87.5),\n (40, -87.5),\n (40, -87.5)))\n return img\n\n if img_type == IMAGETYPES.DAY_NIGHT:\n alpha_data = self._dwd_get_day_night_alpha_channel().data\n # create day image\n day_img = self._dwd_create_RGB_image(\n (hrvc_chn.data, hrvc_chn.data, self[0.635].data, alpha_data),\n ((0, 100),\n (0, 100),\n (0, 100),\n (0, 255)))\n day_img.enhance(\n inverse=(False, False, False, True), gamma=(1.3, 1.3, 1.3, 1.0))\n # create night image\n night_img = self._dwd_create_RGB_image(\n (self[10.8].data, self[10.8].data, self[10.8].data, alpha_data),\n ((40, -87.5),\n (40, -87.5),\n (40, -87.5),\n (0, 255)))\n # blend day over night\n night_img.blend(day_img)\n # remove alpha channels\n night_img.convert(\"RGB\")\n\n return night_img\n\n return None", "def upscale_pre(user_select):\n global result_img\n global pixel_buff\n print('=======================================')\n print('Starting upscale...')\n input_img_x, input_img_y = working_img.size\n working_img_mode = working_img.mode\n output_img = Image.new(working_img_mode, (input_img_x*2, input_img_y*2))\n outputx, outputy = output_img.size\n print(f'outputx set:{outputx} outputy set:{outputy}')\n if working_img_mode == \"RBGA\":\n pixel_buff = (0, 0, 0, 0)\n else:\n pixel_buff = (0, 0, 0)\n print('Making space...')\n #Inserting input image pixel and black pixel\n ix = iy = 0\n for oy in range(0, outputy, 2):\n for ox in range(0, outputx):\n if ox % 2 != 1:\n output_img.putpixel((ox, oy), working_img.getpixel((ix,iy)))\n ix += 1\n else:\n output_img.putpixel((ox, oy), pixel_buff)\n if ix == input_img_x:\n ix = 0\n #print(f'pixel:{ix, iy} input:{working_img.getpixel((ix,iy))} / pixel:{ox, oy} output:{output_img.getpixel((ox,oy))}')\n iy += 1\n #Inserting Black line\n ix = iy = 0\n for oy in range(1, outputy, 2):\n for ox in range(0, outputx):\n output_img.putpixel((ox, oy), pixel_buff)\n #print(f'pixel:{ix, iy} input:{working_img.getpixel((ix,iy))} / pixel:{ox, oy} output:{output_img.getpixel((ox,oy))}')\n return output_img", "def convert_to_jpg(raw_file):\n match = re.match('(\\d+)x(\\d+)x(\\d+)x(\\d+)_(\\w+)', raw_file)\n\n if match:\n # print(match.group(1))\n # print(match.group(2))\n # print(match.group(3))\n # print(match.group(4))\n # print(match.group(5))\n x = int(match.group(1))\n y = int(match.group(2))\n bpp = int(match.group(3))\n dimension = int(match.group(4))\n filename = match.group(0)\n\n rawData = open(raw_file, 'rb').read()\n imgSize = (x, y)\n # Use the PIL raw decoder to read the data.\n # the 'F;16' informs the raw decoder that we are reading\n # a little endian, unsigned integer 16 bit data.\n # img = Image.fromstring('L', imgSize, rawData, 'raw', 'F;32')\n\n img = Image.frombuffer('L', imgSize, rawData, 'raw')\n img = img.rotate(180)\n img = img.transpose(Image.FLIP_LEFT_RIGHT)\n img.save(filename + \".jpg\")", "def bluelogo(original_image,blue,template):\r\n #set the radius of the rounded corners\r\n width, height = template.size\r\n resized_image = original_image.resize((width,height))\r\n \r\n \r\n ###\r\n #create a mask\r\n ###\r\n \r\n #start with transparent mask\r\n rounded_mask = PIL.Image.new('RGBA', (width, height))\r\n drawing_layer = PIL.ImageDraw.Draw(rounded_mask)\r\n \r\n # Overwrite the RGBA values with A=255.\r\n # The 127 for RGB values was used merely for visualizing the mask\r\n rounded_mask.paste(blue, (0,0))\r\n # Uncomment the following line to show the mask\r\n # plt.imshow(rounded_mask)\r\n \r\n # Make the new image, starting with all transparent\r\n \r\n #resizes the image to 200 to 200 for easy pasting\r\n \r\n \r\n result = PIL.Image.new('RGBA', template.size, (0,0,0,0))\r\n result.paste(resized_image, (0,0), mask=rounded_mask)\r\n return result", "def mock_raw_image(x_dim=1024, y_dim=1024, num_channels=3,\n output_path=None, write_image=True):\n\n rand_shape = (x_dim, y_dim, num_channels)\n\n if num_channels != 3:\n raise NotImplementedError(\"mock_raw_image for channels != 3 not yet \"\n \"implemented.\")\n\n img = np.random.random(rand_shape)\n img = np.uint8(img*255)\n\n if write_image:\n image_obj = allen_brain.PIL_Image()\n pil_img = image_obj.fromarray(img, mode=\"RGB\")\n with tf.gfile.Open(output_path, \"w\") as f:\n pil_img.save(f, \"jpeg\")\n\n return img", "def _initialize_rgb_image_to_tensor(self):\n norm_transform = transforms.Normalize(self.get_image_mean(), self.get_image_std_dev())\n self._rgb_image_to_tensor_unnormalized = transforms.ToTensor()\n self._rgb_image_to_tensor_normalized = transforms.Compose([transforms.ToTensor(), norm_transform])", "def generate_image(color):\n color_tuple = int_rgb_tuple(color)\n return Image.new('RGB', (500, 500), color=color_tuple)", "def generate_image(self):\n pass", "def GetImage( self, w, h, imdata ):\n global biBitCount\n\n if biBitCount == 32:\n ## 32-bit: BGRA.\n image = Image.frombytes('RGBA', (w, h), imdata, 'raw', 'BGRA', 0, -1)\n elif biBitCount == 24:\n ## 24-bit: BGR.\n image = Image.frombytes('RGB', (w, h), imdata, 'raw', 'BGR', 0, -1)\n ## Fix error screen (background isn't trasparent).\n ## Get max color (background).\n backg = max(image.getcolors(w * h))[1] \n ## Put trasparency where's background.\n image = image.convert('RGBA')\n data = numpy.array(image)\n red, green, blue, alpha = data.T\n areas = (red == backg[0]) & (green == backg[1]) & (blue == backg[2])\n data[areas.T] = (backg[0], backg[1], backg[2], 0)\n image = Image.fromarray(data)\n \n return image", "def convert_and_save_image(image, path):\n img = Image.open(image)\n r, g, b, a = img.split()\n img = Image.merge(\"RGB\", (r, g, b))\n image_name = image.split(\".\")[0].split('/')[-1]\n img.save(f'{path}/{image_name}.bmp')\n img.close()", "def plt_to_img(dummy: any = None, **kwargs) -> PIL.Image.Image:\n return PIL.Image.open(plot_to_file(**kwargs))", "def prepare_image(img):\n img = img.filter(ImageFilter.SMOOTH_MORE)\n img = img.filter(ImageFilter.SMOOTH_MORE)\n if 'L' != img.mode:\n img = img.convert('L')\n return img", "def test_image(filename, x_size=def_x_size, y_size=def_y_size):\n # Create image and loop over all pixels\n im = Image.new(\"RGB\", (x_size, y_size))\n pixels = im.load()\n for i in range(x_size):\n for j in range(y_size):\n x = remap_interval(i, 0, x_size, -1, 1)\n y = remap_interval(j, 0, y_size, -1, 1)\n pixels[i, j] = (random.randint(0, 255), # Red channel\n random.randint(0, 255), # Green channel\n random.randint(0, 255)) # Blue channel\n im.save(filename)", "def img_prep(img, shape=(128, 128)):\n # Resize\n img = resize_volume(img, (shape[0], shape[1]))\n\n img = numpy.multiply(255, _normalize(img)).astype(numpy.uint8)\n\n return img", "def generate_image(filename, x_size=350, y_size=350):\n global timeflag\n timeflag = 0\n\n # Functions for red, green, and blue channels - where the magic happens!\n red_function = build_random_function(13, 15)\n green_function = build_random_function(13, 15)\n blue_function = build_random_function(13,15)\n print \"red_function:\\t\" + str(red_function)+\"\\n\"\n print \"green_function:\\t\" + str(green_function)+\"\\n\"\n print \"blue_function:\\t\" + str(blue_function)+\"\\n\"\n\n # Create image and loop over all pixels\n im = Image.new(\"RGB\", (x_size, y_size))\n pixels = im.load()\n for i in range(x_size):\n for j in range(y_size):\n x = remap_interval(i, 0, x_size, -1, 1)\n y = remap_interval(j, 0, y_size, -1, 1)\n pixels[i, j] = (\n color_map(evaluate_random_function(red_function, x, y)),\n color_map(evaluate_random_function(green_function, x, y)),\n color_map(evaluate_random_function(blue_function, x, y))\n )\n\n im.save(filename)", "def _prepare_image(self, grid):\n grid = np.array(grid, dtype=np.uint8)\n\n width = int(grid.shape[1] * self.scale_percent)\n height = int(grid.shape[0] * self.scale_percent)\n grid = cv2.resize(grid, (width, height), interpolation=cv2.INTER_AREA)\n return grid", "def convtoRGB(filename: str):\n fp = Image.open(filename)\n if fp.mode == \"RGB\":\n return 0\n fp = fp.convert(\"RGB\")\n fp.save(filename)\n fp.close()\n return 1", "def test_write_unsupported_mode_PA(tmp_path):\n\n temp_file = str(tmp_path / \"temp.webp\")\n file_path = \"Tests/images/transparent.gif\"\n with Image.open(file_path) as im:\n im.save(temp_file)\n with Image.open(temp_file) as image:\n assert image.mode == \"RGBA\"\n assert image.size == (200, 150)\n assert image.format == \"WEBP\"\n\n image.load()\n image.getdata()\n with Image.open(file_path) as im:\n target = im.convert(\"RGBA\")\n\n assert_image_similar(image, target, 25.0)", "def create_image(pixels, out_path, scaled=False):\n if scaled:\n # e.g. flirIrCamera\n Gmin = pixels[pixels>0].min()\n Gmax = pixels.max()\n scaled_px = (pixels-Gmin)/(Gmax - Gmin)\n plt.imsave(out_path, cm.get_cmap('jet')(scaled_px))\n else:\n # e.g. PSII\n # TODO: Can we make these use same library?\n # TODO: plt.imsave(out_path, pixels)\n Image.fromarray(pixels).save(out_path)", "def image_to_scratch(im, scratch_image_name):\n\tim.save(scratch_image_name, dpi=(200,200))", "def get_blank_image(width: int, height: int, n_channels: int, cval=255) -> np.ndarray:\n if n_channels == 0:\n image = np.zeros((height, width)) + 255\n else:\n image = np.zeros((height, width, n_channels)) + cval\n return image.astype(\"uint8\")", "def _toRgbImage(self, fractal, colors, color_offset):\n soln_real = adjustRange(fractal[0], 0, 127)\n soln_imag = adjustRange(fractal[1], 0, 127)\n iters = adjustRange(fractal[2], 0, 128)\n\n rgb_image = np.array([\n soln_real + iters,\n soln_imag + iters,\n iters\n ]\n ).astype(dtype=np.uint8)\n\n return rgb_image.T", "def process_image(image):\n image = resize(image)\n return image", "def glance_create_new_image(glance, images_location, image_info, image_name_prefix=None):\n # image raw file path\n image_raw_source = image_info['image_raw_source']\n image_file = os.path.join(images_location, image_raw_source)\n\n if not os.path.isfile(image_file):\n logger.warning(\"image raw file:'%s' not found!\", image_file)\n return None\n\n fimg = None\n try:\n fimg = open(image_file, 'rb')\n except Exception:\n logger.error(\"Opening raw image file:'%s' failed\", image_file)\n return None\n\n try:\n # image name\n image_name = image_info['image_name']\n if image_name_prefix:\n image_name = \"{}{}\".format(image_name_prefix, image_name)\n logger.debug(\"image_name: %s\", image_name)\n\n # image min_disk\n if image_info['image_min_disk'] == 'auto':\n # compute the size of the file -> min disk size in GB\n imagesize = os.fstat(fimg.fileno()).st_size\n image_min_disk = (imagesize/1024/1024/1024)+1\n else:\n image_min_disk = image_info['image_min_disk']\n logger.debug(\"image_min_disk: %s\", image_min_disk)\n\n # image min_ram\n image_min_ram = image_info['image_min_ram']\n logger.debug(\"image_min_ram: %s\", image_min_ram)\n\n # image properties (dictionary)\n image_properties = image_info['image_properties']\n logger.debug(\"image_properies: %s\", image_properties)\n\n logger.debug(\"glance image create (private): '%s'\", image_name)\n image = glance.images.create(name=image_name,\n visibility='private',\n disk_format='raw',\n container_format='bare',\n min_disk=int(image_min_disk),\n min_ram=int(image_min_ram))\n logger.debug(\"glance image upload: '%s' -> '%s'\", fimg.name, image_name)\n glance.images.upload(image.id, fimg)\n\n except Exception:\n logger.exception(\"Creating and uploading Glance image '%s' failed\", image_name)\n return None\n\n return image", "def _tile_image(self, data):\n image = Image.open(BytesIO(data))\n return image.convert('RGBA')", "def process_image(image):\r\n image = random_brightness(image)\r\n image = crop_image(image)\r\n image = resize(image)\r\n return image", "def test_scale_image_rgb_identity(self):\n scaled = scale_image(self.rgb_image, 1)\n self.assertEqual(self.rgb_image.shape, scaled.shape)", "def _tile_image(self, data):\n image = Image.open(StringIO(data))\n return image.convert('RGBA')", "def newimage(self, infile):\n return _image.image_newimage(self, infile)", "def test_image(filename, x_size=350, y_size=350):\n # Create image and loop over all pixels\n im = Image.new(\"RGB\", (x_size, y_size))\n pixels = im.load()\n for i in range(x_size):\n for j in range(y_size):\n pixels[i, j] = (random.randint(0, 255), # Red channel\n random.randint(0, 255), # Green channel\n random.randint(0, 255)) # Blue channel\n im.save(filename)\n return 'saved'", "def image(self):\r\n\r\n if sys.version < '3':\r\n imageio = StringIO.StringIO(self._image_data)\r\n else:\r\n imageio = StringIO.BytesIO(self._image_data)\r\n\r\n try:\r\n source_image = PILImage.open(imageio)\r\n img = PILImage.new('RGBA', source_image.size, (0, 0, 0, 0))\r\n\r\n if source_image.mode == 'L':\r\n alpha = source_image.split()[0]\r\n transparency = source_image.info.get('transparency')\r\n mask = PILImage.eval(alpha, lambda a: 0 if a == transparency else 255)\r\n img.paste(source_image, (0, 0), mask=mask)\r\n else:\r\n img.paste(source_image, (0, 0))\r\n except IOError, e:\r\n raise PILUnavailableError(e.args[0].split()[1])\r\n finally:\r\n imageio.close()\r\n\r\n self.original_width, self.original_height = img.size\r\n\r\n # Crop the image searching for the smallest possible bounding box\r\n # without losing any non-transparent pixel.\r\n # This crop is only used if the crop flag is set in the config.\r\n if self.config['crop']:\r\n img = img.crop(img.split()[-1].getbbox())\r\n return img", "def dwd_RGB_12_12_9i_N(self, backup_orig_data=False):\n self.check_channels(\"HRV\", 0.85, 10.8, 3.75, 12.0)\n\n if not self._dwd_channel_preparation([\"HRV\", 0.85, 10.8, 3.75, 12.0],\n backup_orig_data=backup_orig_data):\n return None\n\n # get combination of HRV and VIS008 channel data\n hrvc_chn = self._dwd_get_hrvc_channel()\n\n img_type = self._dwd_get_image_type()\n if img_type is None:\n return None\n\n if img_type == IMAGETYPES.DAY_ONLY:\n img = self._dwd_create_RGB_image(\n (hrvc_chn.data, hrvc_chn.data, self[10.8].data),\n ((0, 100),\n (0, 100),\n (323 - CONVERSION, 203 - CONVERSION)))\n merge_masks(img)\n return img\n\n if img_type == IMAGETYPES.NIGHT_ONLY:\n img = self._dwd_create_RGB_image(\n (self[3.75].data, self[10.8].data, self[12.0].data),\n ((40, -87.5),\n (40, -87.5),\n (40, -87.5)))\n img.enhance(stretch=\"histogram\")\n return img\n\n if img_type == IMAGETYPES.DAY_NIGHT:\n alpha_data = self._dwd_get_day_night_alpha_channel().data\n # create day image\n day_img = self._dwd_create_RGB_image(\n (hrvc_chn.data, hrvc_chn.data, self[10.8].data, alpha_data),\n ((0, 100),\n (0, 100),\n (323 - CONVERSION, 203 - CONVERSION),\n (0, 255)))\n day_img.enhance(inverse=(False, False, False, True))\n # create night image\n night_img = self._dwd_create_RGB_image(\n (self[3.75].data, self[10.8].data, self[12.0].data, alpha_data),\n ((40, -87.5),\n (40, -87.5),\n (40, -87.5),\n (0, 255)))\n night_img.enhance(stretch=\"histogram\")\n # blend day over night\n night_img.blend(day_img)\n # remove alpha channels before saving\n night_img.convert(\"RGB\")\n\n return night_img\n\n return None", "def guitar(target_size: Optional[Tuple[int, int]] = None, rgb: bool = True) -> Tensor:\n return imread(HERE+'guitar.jpg', target_size=target_size, rgb=rgb)", "def convertColorSpace(\n self,\n img, # Image in some color space\n srcColorSpace = 'BGR', # Source color space\n tgtColorSpace = 'RGB', # Traget color space\n ):\n\n if srcColorSpace == tgtColorSpace:\n return img\n\n if srcColorSpace == 'BGR':\n img_bgr = img\n elif srcColorSpace == 'RGB':\n img_bgr = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)\n elif srcColorSpace == 'HSV':\n img_bgr = cv2.cvtColor(img, cv2.COLOR_HSV2BGR)\n elif srcColorSpace == 'HLS':\n img_bgr = cv2.cvtColor(img, cv2.COLOR_HLS2BGR)\n elif srcColorSpace == 'LUV':\n img_bgr = cv2.cvtColor(img, cv2.COLOR_LUV2BGR)\n elif srcColorSpace == 'YUV':\n img_bgr = cv2.cvtColor(img, cv2.COLOR_YUV2BGR)\n elif srcColorSpace == 'YCrCb':\n img_bgr = cv2.cvtColor(img, cv2.COLOR_YCrCb2BGR)\n else:\n raise Exception(\"Incorrect color space: {}\".format(srcColorSpace))\n\n if tgtColorSpace == 'BGR':\n img_tgt = img_bgr\n elif tgtColorSpace == 'RGB':\n img_tgt = cv2.cvtColor(img_bgr, cv2.COLOR_BGR2RGB)\n elif tgtColorSpace == 'HSV':\n img_tgt = cv2.cvtColor(img_bgr, cv2.COLOR_BGR2HSV)\n elif tgtColorSpace == 'HLS':\n img_tgt = cv2.cvtColor(img_bgr, cv2.COLOR_BGR2HLS)\n elif tgtColorSpace == 'LUV':\n img_tgt = cv2.cvtColor(img_bgr, cv2.COLOR_BGR2LUV)\n elif tgtColorSpace == 'YUV':\n img_tgt = cv2.cvtColor(img_bgr, cv2.COLOR_BGR2YUV)\n elif tgtColorSpace == 'YCrCb':\n img_tgt = cv2.cvtColor(img_bgr, cv2.COLOR_BGR2YCrCb)\n else:\n raise Exception(\"Incorrect color space: {}\".format(tgtColorSpace))\n\n return img_tgt", "def handle_image(name):\n from_path = args.from_dir + name\n to_path = args.to_dir + name\n\n if width != args.width:\n subprocess.call('jpegtran -rotate 90 -grayscale ' + from_path + ' > ' \\\n + to_path, shell=True)\n else:\n subprocess.call('jpegtran -grayscale ' + from_path + ' > ' + to_path,\\\n shell=True)", "def generate_art_3(filename, x_size=350, y_size=350, t_size=30):\n # Functions for red, green, and blue channels - where the magic happens!\n r_lb = random.randint(1, 5)\n g_lb = random.randint(1, 10)\n b_lb = random.randint(1, 5)\n red_function = build_random_function_3(r_lb, r_lb+1)\n green_function = build_random_function_3(g_lb, g_lb+1)\n blue_function = build_random_function_3(b_lb, b_lb+1)\n\n # Create image and loop over all pixels\n im = Image.new(\"RGB\", (x_size, y_size))\n pixels = im.load()\n for time in range(t_size):\n for i in range(x_size):\n for j in range(y_size):\n t = remap_interval(time, 0, t_size, -1, 1)\n x = remap_interval(i, 0, x_size, -1, 1)\n y = remap_interval(j, 0, y_size, -1, 1)\n pixels[i, j] = (\n color_map(eval_r_func_3(red_function, x, y, t)),\n color_map(eval_r_func_3(green_function, x, y, t)),\n color_map(eval_r_func_3(blue_function, x, y, t))\n )\n str_num = '0' * (5 - len(str(time))) + str(time)\n print(str_num)\n im.save(filename + str_num + '.png')\n return 'saved'", "def __init__(self, *args):\n _itkRGBAPixelPython.itkRGBAPixelUS_swiginit(self,_itkRGBAPixelPython.new_itkRGBAPixelUS(*args))" ]
[ "0.67979735", "0.6447679", "0.6425846", "0.6331001", "0.6304116", "0.6232688", "0.6175286", "0.60538095", "0.6020988", "0.6015573", "0.59706867", "0.59613866", "0.5947222", "0.5861616", "0.5856823", "0.5853021", "0.5808916", "0.5805031", "0.57975703", "0.5789881", "0.57089484", "0.57057846", "0.57044923", "0.56997156", "0.5680265", "0.5663609", "0.56351167", "0.5620828", "0.56049573", "0.5595417", "0.55769134", "0.55485445", "0.55423766", "0.55403054", "0.551625", "0.5512906", "0.54829895", "0.5481677", "0.5474257", "0.5474257", "0.54714406", "0.5448405", "0.5419939", "0.5419186", "0.5418111", "0.5410817", "0.54072267", "0.5395995", "0.53903884", "0.5381219", "0.53811103", "0.5376679", "0.5371874", "0.53500116", "0.5340348", "0.53400546", "0.53226477", "0.53223884", "0.53223884", "0.53137153", "0.5309274", "0.52991027", "0.5289104", "0.52798295", "0.5271648", "0.52682257", "0.52630246", "0.52625024", "0.5260942", "0.5257261", "0.52538484", "0.5244438", "0.5244319", "0.52425736", "0.5236653", "0.52205056", "0.5218714", "0.5215547", "0.52092904", "0.5207537", "0.5201432", "0.5198489", "0.51975584", "0.5195038", "0.51897943", "0.51883286", "0.5183609", "0.5177018", "0.5167128", "0.5157733", "0.5154942", "0.5149551", "0.51485217", "0.5147033", "0.51430243", "0.5134909", "0.5127007", "0.5124831", "0.5119758", "0.5119589" ]
0.70588124
0
Returns image data as a ``ContentFile``.
def _create_content_file(self, content): io = BytesIO() content.save(io, self._normalize_format(), quality=self.quality) return ContentFile(io.getvalue())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getImgContentFile(img):\n format, imgstr = img.split(';base64,')\n ext = format.split('/')[-1]\n file = ContentFile(base64.b64decode(imgstr), name='temp.' + ext)\n return file", "def contents(self):\n if not self._contents:\n if self._path:\n # Read file into memory so we don't run out of file descriptors\n f = open(self._path, \"rb\")\n self._contents = f.read()\n f.close()\n elif self._pil_image:\n # Write PIL image to string\n f = StringIO()\n self._pil_image.save(f, self.format)\n self._contents = f.getvalue()\n return self._contents", "def content(self) -> FrameworkDataImageContentInterface:\n return self._content", "def get_file_content(self):\n s = StringIO.StringIO()\n\n s.write(self.get_header())\n s.write(self.get_content())\n\n return s.getvalue()", "def open(self):\n return ContentFile(self.get_contents())", "def generate(self, content):\n\n tmp = self._create_tmp_image(content)\n rendered = self._render(tmp)\n return self._create_content_file(rendered)", "def data(self):\n return self.image", "def get_dicom_file_content(self) -> bytes:\n return self.orthanc.get_instance_file(self.identifier)", "def _get_file_content(self):\n with open(self.file_name, mode='rb') as file:\n self.file_content = file.read()", "def get_image_data(self):\n raise NotImplementedError(str(type(self)) + 'does not'\n 'implement get_image.')", "def file_data(self):\n return self.read(self.file)", "def image(request):\n response = HttpResponse(request.content.data, content_type=request.mime_type)\n filename = re.sub(\n r'[^\\w\\.]', '_', request.patch.filename.encode('ascii', 'replace'))\n response['Content-Disposition'] = 'attachment; filename=\"%s\"' % filename\n response['Cache-Control'] = 'no-cache, no-store'\n return response", "def image(request):\n response = HttpResponse(request.content.data, content_type=request.mime_type)\n filename = re.sub(\n r'[^\\w\\.]', '_', request.patch.filename.encode('ascii', 'replace'))\n response['Content-Disposition'] = 'attachment; filename=\"%s\"' % filename\n response['Cache-Control'] = 'no-cache, no-store'\n return response", "def to_internal_value(self, data):\n if isinstance(data, str) and data.startswith('data:image'):\n # Found image is encoded, and must be decoded\n format, imgstr = data.split(';base64,')\n ext = format.split('/')[-1] # Extract file extension\n id = uuid.uuid4()\n data = ContentFile(base64.b64decode(imgstr), name = id.urn[9:] + '.' + ext)\n return super(Base64ImageField, self).to_internal_value(data)", "def data(self) -> List[JpegImageFile]:\n return self._data", "def get_image():\n response = send_file(tempFileObj, as_attachment=True, attachment_filename='marked_image.png')\n return response", "def get_file_content(*args, **kwargs):\n return get_file_content_async(*args, **kwargs).get_result()", "def get_data(self, get_saved_image, stream=False):\n\n # reads the image's data and returns\n return get_saved_image(self.short_hash,\n stream=stream)", "def get_image(self, data_base):\n cursor = data_base.cursor(dictionary=True)\n cursor.execute(f\"SELECT data FROM image WHERE id = '{self.image_id}'\")\n image = cursor.fetchone()\n cursor.close()\n return b64encode(image['data']).decode('utf-8')", "def get_content(self, filename):\n f_id = self.face.FACES.files.find_one({ \"filename\" : filename }, { \"_id\" : 1 })\n return self.face_fs.get(f_id['_id']).read()", "def get_image(self):\n return self.process_raw_image(self.get_raw_image())", "def value_from_datadict(self, data, files, name):\n cover_image_binascii = data[self.cover_image_name]\n cover_image_binascii = cover_image_binascii[len(IMAGE_HEADER):]\n cover_image_binary = a2b_base64(cover_image_binascii) \n print('-------')\n print(cover_image_binary)\n print('-------')\n \n image_name = data.get(\n self.cover_image_metadata_name, 'default.jpg')\n content_type, encoding = guess_type(image_name)\n\n # create a file\n named_temp_file = NamedTemporaryFile() \n size = named_temp_file.write(cover_image_binary)\n image = UploadedFile(\n named_temp_file,\n name,\n image_name,\n content_type,\n size,\n encoding,\n )\n return image", "def data(self):\n return self._img", "def read_content(self):\n return self.file.read()", "def _get_image_content(image_url):\n response = requests.get(image_url)\n return response.content", "def to_internal_value(self, data):\n try: # ToDo penetrate in order test if it has any security flaws\n decoded = base64.b64decode(data)\n mime_type = magic.from_buffer(decoded, mime=True)\n file_ext = mimetypes.guess_extension(mime_type)\n except TypeError:\n raise serializers.ValidationError(\n _('Not a valid base64 file')\n )\n\n if file_ext not in settings.VALID_FILE_EXTENSIONS:\n raise serializers.ValidationError(\n _('Forbidden file extension')\n )\n\n file_name = \"{0}{1}\".format(uuid.uuid4(), file_ext)\n data = ContentFile(decoded, name=file_name)\n return data", "def sample_file_content(self) -> DownloadableDataFileContentInterface:\n return self._sample_file_content", "def contents(self,imagefile=None,icat_fallback=True):\n if imagefile is None : imagefile=self.imagefile\n if imagefile is None : raise ValueError,\"imagefile is unknown\"\n if self.encrypted() : raise ValueError,\"Cannot generate content for encrypted files\"\n if self.compressed() or imagefile.name.endswith(\".aff\") or imagefile.name.endswith(\".E01\"):\n if icat_fallback:\n # \n # For now, compressed files rely on icat rather than python interface\n #\n offset = safeInt(self.volume.offset)\n block_size = safeInt(self.volume.block_size)\n if block_size==0: block_size = 512\n inode = self.inode()\n if inode :\n block_size = 512\n cmd = ['icat','-b',str(block_size),'-o',str(offset/block_size),imagefile.name,str(inode)] \n (data,err) = Popen(cmd, stdout=PIPE,stderr=PIPE).communicate()\n # Check for an error\n if len(err) > 0 : \n raise ValueError, \"icat error: \"+\" \".join(cmd)\n return data\n else :\n raise ValueError, \"Inode missing from file in compressed format.\"\n raise ValueError,\"Cannot read raw bytes in compressed disk image\"\n res = []\n for run in self.byteruns():\n res.append(self.content_for_run(run=run,imagefile=imagefile))\n return \"\".join(res)", "def __init__(self, file, name):\n if isinstance(file, ContentFile):\n image_data = file\n else:\n image_data = ContentFile(file.read(), name=name)\n self.image_data = image_data\n file.close()", "def image(self):\n # TODO: make sure this method works for png, gif, tiff\n if self.has_metadata:\n self.extract_metadata()\n tempdir_path = self.make_tempdir()\n tempfile_path = os.path.join(tempdir_path, self.filename)\n warnings.simplefilter('error', Image.DecompressionBombWarning)\n try: # Do image conversions\n img_in = Image.open(self.src_path)\n img_out = Image.frombytes(img_in.mode, img_in.size, img_in.tobytes())\n img_out.save(tempfile_path)\n self.src_path = tempfile_path\n except Exception as e: # Catch decompression bombs\n # TODO: change this from all Exceptions to specific DecompressionBombWarning\n self.add_error(e, \"Caught exception (possible decompression bomb?) while translating file {}.\".format(self.src_path))\n self.make_dangerous()\n self.add_file_string('Image file')\n self.set_property('processing_type', 'image')", "def get_dicom_file_content(self) -> bytes:\n return self.client.get_instances_id_file(self.id_)", "def file(self):\n # ImageField (both django's and factory_boy's) require PIL.\n # Try to import it along one of its known installation paths.\n try:\n from PIL import Image as PILimage\n except ImportError:\n import Image as PILimage\n\n thumb = PILimage.new(\"RGB\", (100, 100), \"blue\")\n thumb_io = io.BytesIO()\n thumb.save(thumb_io, format=\"JPEG\")\n\n return File(thumb_io, name=self.original_filename)", "def image(self):\n return self.chunks.get('image')", "def get_content(url):\n img=requests.get(url).content\n return img", "def getImage():\n\tsaveimage = request.files['image']\n\n\timagename = saveimage.filename\n\tsaveimage.save(os.path.join(app.config['uploadFolder'], imagename))\n\tfiles = {'image': open(imagename, 'rb')}\n\tjsonfile = getData(files)\t\t\n\treturn render_template('index.html', json_data = jsonfile)", "def getContent(self):\r\n if self.virtual_res:\r\n return EmptyFileObject()\r\n return self.nibbler.open_file(self.path)", "def image(self):\n return self.__getattr__(\"image\", _returnType=\"value\", _location=\"remote\")", "def getImage( self ):\n return self.__image;", "def get_file_content(self, upload_id: str, file_path: str, token: str) \\\n -> Tuple[IO[bytes], dict]:\n return self.request_file(f'/{upload_id}/{file_path}/content', token)", "def get_content_data(file: str) -> str:\n file = file[1:]\n file_extension = file.split(\".\")\n\n if file_extension[-1] == \"jpg\":\n content_type = \"Content-Type: image/jpeg\"\n elif file_extension[-1] == \"png\" or file_extension[-1] == \"gif\":\n content_type = \"Content-Type: image/\" + file_extension[-1]\n else:\n content_type = \"Content-Type: text/\" + file_extension[-1]\n\n content_length = \"Content-Length: \" + str(os.stat(file).st_size)\n\n return content_length + \"\\r\\n\" + content_type", "def photo(request, entry_uuid):\n resp = HttpResponse()\n metadata, photo = download_from_bucket(entry_uuid, GeoPostBase.imageBucket)\n resp.write(base64.b64encode(photo))\n resp['Content-Type'] = metadata['contentType']\n return resp", "def data_for_src(self, file_id):\n data, metadata = self.load(file_id, True)\n return \"data:image/gif;base64,%s\" % data.encode('base64')", "def content(self) -> str:\n if self._content is None:\n with open(self.name, mode='r', encoding='utf-8') as f:\n self._content = f.read()\n return self._content", "def read(self):\n with self.lock:\n return self.image", "def get_content(self, file_path: str) -> Tuple[defaultdict, str]:\n tags = self.__metadata__(file_path)\n html = self.__html__(tags)\n tags[\"file_path\"] = f\"images/gallery/{file_path.split(os.path.sep)[-1]}\"\n self.image_paths.append(file_path)\n return tags, html", "def get_raw_data(self):\n return self.content", "def imagefile(self):\n return os.path.join(self.__folder, self.__name + '.jpg')", "def raw_image(self):\n return self._image", "def getimage(self):", "def get_image(self):\n return self.image", "def get_image(self):\n return self.image", "def get_image(self):\n return self.image", "def content(self):\n if hasattr(self.raw_content, 'read'):\n if hasattr(self.raw_content, 'seek'):\n self.raw_content.seek(0)\n data = self.raw_content.read()\n else:\n data = self.raw_content\n\n if isinstance(data, str):\n data = data.encode(\"utf-8\")\n return data", "def read_data(reader: UFOReader, filename: str) -> bytes:\n return reader.readImage(filename) # type: ignore", "def GetImage(self):\r\n\r\n return self._image", "def get_content(self, file_path:str):\n raise NotImplementedError", "def test_get_file_binary_content(self):\n content = image_helper.get_file_binary_content(self.subject)\n\n self.assertGreater(len(content), 0)\n\n with open(self.subject, \"rb\") as f:\n original_content = f.read()\n\n self.assertEqual(content, original_content)", "def image(self) -> object:\n return self._image", "def data(self):\n return self.content", "def get_content(self) -> Content:\n pass", "def contents(self):\n if not self._contents:\n if self._path:\n # Read file into memory so we don't run out of file descriptors\n f = open(self._path, \"rb\")\n self._contents = f.read()\n f.close()\n return self._contents", "def get_file(self, path):\n file = self.get('data_request?id=file&parameters=%s' % path)\n return file", "def open(self):\n if isinstance(self.raw_content, (bytes, str)):\n return BytesIO(self.content)\n fileobj = self.raw_content.open()\n\n # TODO remove when Django 1 is no longer supported\n if fileobj is None:\n assert not isinstance(self.raw_content, BlobMeta), repr(self)\n # work around Django 1.11 bug, fixed in 2.0\n # https://github.com/django/django/blob/1.11.15/django/core/files/base.py#L131-L137\n # https://github.com/django/django/blob/2.0/django/core/files/base.py#L128\n return self.raw_content\n\n return fileobj", "def deserialize_image(self, data, give_file_name):\r\n # Generate a random 8-character name\r\n # name = \"img_\" + self.generate_random_name() + \".png\"\r\n name = give_file_name + \".png\"\r\n file_path = os.path.join(self.temp_dir, name)\r\n img = Image.frombytes(data['mode'], data['size'], data['pixels'])\r\n img.save(file_path)\r\n return file_path", "def make_content(museum, image_url, image_name, image_artist, filename):\n message = \"From the \" + museum\n # if image_name is not None:\n # message += \" with title \" + image_name\n if image_artist is not None:\n message += \" by \" + image_artist\n\n r = requests.get(image_url)\n if r.status_code == 200:\n with open(filename, mode=\"wb\") as image:\n for chunk in r:\n image.write(chunk)\n else:\n return None\n return (message)", "def get_image ( self, object ):\n return self.image", "def image(self):\n return self._image", "def content(self):\n if self._content is not None:\n return self._content\n\n binaries = self._kdb.kdb.obj_root.Meta.Binaries\n xpath = './Binary[@ID=\"{}\"]'.format(self.ref)\n binary = binaries.xpath(xpath)[0]\n result = b64decode(binary.text)\n\n if (binary.attrib['Compressed']):\n result = zlib.decompress(result, 16+zlib.MAX_WBITS)\n\n self._content = result\n return self._content", "def open(self):\n return File(open(self.get_path()), \"rb\")", "def image_data(verbose=False):\n # This is a principled use of the `global` statement; don't lint me.\n global _IMAGE_DATA # pylint: disable=global-statement\n if _IMAGE_DATA is None:\n if verbose:\n logger.info(\"--- Downloading image.\")\n with contextlib.closing(urllib.request.urlopen(IMAGE_URL)) as infile:\n _IMAGE_DATA = infile.read()\n return _IMAGE_DATA", "def save_content(obj):\n filename, _, _, content = obj\n storage = get_storage(config['Instance']['content_dir'])\n return storage.save(filename, ContentFile(content))", "def get(self, img):\n\n return send_from_directory(\"images\", img)", "def image_to_fp(image, image_format):\n # type: (Any, str) -> IO[bytes]\n fp = io.BytesIO()\n image.save(fp, format=image_format) # save the content to fp\n fp.seek(0)\n return fp", "def __get_image_file(self):\n if file_utils.file_exists(self.image_file_path):\n return open(self.image_file_path, 'r')\n else:\n if not os.path.exists(self.download_path):\n os.makedirs(self.download_path)\n logger.info('Found existing image file')\n return self.__download_image_file()", "def data_file(self, path):\n return open(os.path.join(self.resource_path, path)).read()", "def get_content(self):\n return self.content", "def _blob(self):\n return self._load_blob", "def download(self):\n data = urllib.urlopen(self.remoteurl).read()\n s = StringIO.StringIO(data)\n return Image.open(s)", "def image(self):\n return self._image", "def image(self):\n return self._image", "def image(self):\n return self._image", "def image(self):\n return self._image", "def image(self):\n return self._image", "def image(self):\n return self._image", "def image(self):\n return self.datasource.data[\"values\"]", "def getImage(filename):\n if not fs.exists(filename=filename):\n raise Exception(\"mongo file does not exist! {0}\".format(filename)) \n im_stream = fs.get_last_version(filename)\n im = Image.open(im_stream)\n img_io = BytesIO() \n im.save(img_io, 'JPEG', quality=70)\n img_io.seek(0)\n return img_io\n \n #return serve_pil_image(im)\n\n #d = ObjectId('5ad204a5c2eb5d031a7fd7e5') \n #connection = MongoClient()\n #database = connection['image']\n # create a new gridfs object.\n #fs = gridfs.GridFS(database)\n #outputdata = fs.get(d).read()\n #decode=outputdata#.decode()\n #return decode", "def image(self) -> PIL.Image.Image:\n try:\n data = io.BytesIO(self.data)\n return PIL.Image.open(data)\n except Exception: # Image data is incorrect, fix as a simple transparent image\n return PIL.Image.new('RGBA', Image.MAX_IMAGE_SIZE)", "def read_file_content_as_base64(file_path: str) -> str:\n file = open(file_path, \"rb\")\n image_base64 = base64.b64encode(file.read()).decode(\"utf-8\")\n file.close()\n\n return image_base64", "def image(request, ef_id):\n ef = get_object_or_404(ExamFile, id=ef_id)\n thumb = get_thumbnail_path(ef)\n daimage = file(thumb, 'rb').read()\n return HttpResponse(content=daimage, mimetype='image/png')", "def get_image(self):\n return self.camera.getImage()", "def get_upload_content(self, upload_id: str, token: str) \\\n -> Tuple[IO[bytes], dict]:\n return self.request_file(f'/{upload_id}/content', token)", "def file(self):\n file = BytesIO(b\"the_file\")\n file.name = self.name\n return File(file)", "def _create_tmp_image(self, content):\n\n content.seek(0)\n image = Image.open(content)\n if self.force_rgb and image.mode not in ('L', 'RGB', 'RGBA'):\n image = image.convert('RGB')\n return image", "def getContent(self) -> object:\n ...", "def get_file(self):\n return self.theFile", "async def async_image(self) -> bytes | None:\n image_path = Path(__file__).parent / self._image_filename\n return await self.hass.async_add_executor_job(image_path.read_bytes)", "def storage_get_file(self, group='', key=''):\n try:\n obj = None\n content = None\n if key != '':\n if self.config['type'] == 's3':\n obj = self.s3.Object(bucket_name=self.bucket, key='corr-{0}s/{1}'.format(group,key))\n res = obj.get()\n content = res['Body'].read()\n elif self.config['type'] == 'filesystem':\n with open('{0}/corr-{1}s/{2}'.format(self.storage_path, group, key), \"rb\") as obj:\n content = obj.read()\n else:\n content = None\n\n except:\n print(traceback.print_exc())\n content = None\n\n try:\n if self.config['type'] == 's3':\n file_buffer = BytesIO()\n elif self.config['type'] == 'filesystem':\n file_buffer = BytesIO()\n file_buffer.write(content)\n file_buffer.seek(0)\n return file_buffer\n except:\n self.app.logger.error(traceback.print_exc())\n print(traceback.print_exc())\n return None", "def get_image(self):\n self.drawer.flush()\n return self.img", "def picture_bytes(self):\n return bytearray(self.picture_base64[\"data\"][\"data\"])", "def contents(self):\n if not hasattr(self, \"_content\"):\n self._content = self.get_content()\n return self._content" ]
[ "0.74447596", "0.6932529", "0.67421204", "0.67015547", "0.65835774", "0.6578695", "0.6555087", "0.6495914", "0.63615793", "0.62989324", "0.6253196", "0.6239247", "0.6239247", "0.62345475", "0.61542207", "0.61115354", "0.6059986", "0.6055182", "0.6054496", "0.60452163", "0.60135317", "0.59980893", "0.59692657", "0.5962641", "0.5912276", "0.5885956", "0.58818024", "0.5877296", "0.58757365", "0.58665687", "0.5853968", "0.58520067", "0.58460283", "0.5843553", "0.5830814", "0.5822896", "0.5820423", "0.58147687", "0.5808004", "0.57911927", "0.5782579", "0.57550895", "0.5752051", "0.57434636", "0.57413244", "0.57377577", "0.572889", "0.57279205", "0.57265455", "0.5714601", "0.5714601", "0.5714601", "0.5706397", "0.56613433", "0.56577456", "0.5650447", "0.5638895", "0.5637415", "0.5633535", "0.5627005", "0.5606255", "0.56050295", "0.56041515", "0.55984116", "0.55974495", "0.5577776", "0.5571255", "0.55618256", "0.5561803", "0.55560154", "0.5554451", "0.55489033", "0.554091", "0.5540653", "0.5539968", "0.5536585", "0.55218065", "0.55201894", "0.5519838", "0.5519838", "0.5519838", "0.5519838", "0.5519838", "0.5519838", "0.55194604", "0.55167574", "0.5509476", "0.55008966", "0.54987675", "0.5496507", "0.5489982", "0.54820454", "0.54620653", "0.5459907", "0.54588795", "0.5457357", "0.54512805", "0.5447697", "0.54438454", "0.5439221" ]
0.63306314
9
Resizes a valid image, and returns as a Django ``ContentFile``.
def generate(self, content): tmp = self._create_tmp_image(content) rendered = self._render(tmp) return self._create_content_file(rendered)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def resize_img(self,scale=1):\n reduced = self.image.reduce((scale,scale))\n reduced.save(\"../edited/{}\".format(self.image.filename))\n\n reduced = Image.open(\"../edited/{}\".format(self.image.filename))\n return reduced", "def resize_image(first_image, width, height):\n resizing_image = Image()\n temp_image = ((Pil_image.open(first_image.picture)).convert(\"RGB\")).resize((width, height), Pil_image.ANTIALIAS)\n filestream = BytesIO()\n temp_image.save(filestream, 'JPEG', quality=90)\n filestream.seek(0)\n name = f\"{str(first_image.picture).split('.')[0]}_{width}_{height}.{str(first_image.picture).split('.')[1]}\"\n image = InMemoryUploadedFile(filestream, 'picture', name, 'jpeg/image', sys.getsizeof(filestream), None)\n resizing_image.url = first_image.url\n resizing_image.width = width\n resizing_image.height = height\n resizing_image.parent = first_image.id\n resizing_image.picture = image\n resizing_image.save()\n return resizing_image", "def get_resized_image(memory_uploaded_image, height):\n img = PillowImage.open(memory_uploaded_image)\n og_width, og_height = img.size\n proportions = height / og_height\n width = int(og_width * proportions)\n resized = img.resize((width, height), PillowImage.ANTIALIAS)\n thumb_io = BytesIO()\n resized.save(thumb_io, format=img.format)\n\n return InMemoryUploadedFile(thumb_io, u\"image\", str(memory_uploaded_image), img.format, None, None)", "def process_image(image):\n image = resize(image)\n return image", "def getURLResizedImage(self, path, w, h, conserve_aspect_ration=True):\n image = self.getOrDownloadImageObject(path)\n return self.performResize(image, w, h, conserve_aspect_ration)", "def resize_image(image, size=(926, 617)):\n\n im = Image.open(image)\n im.convert('RGB')\n im.thumbnail(size)\n thumb_io = BytesIO()\n im.save(thumb_io, 'JPEG', quality=85)\n thumbnail = File(thumb_io, name=image.name)\n return thumbnail", "def _resize_image(self, filename, resize_source):\n from PIL import Image, ImageOps\n img = Image.open(filename)\n img = ImageOps.fit(img, resize_source['size'], Image.ANTIALIAS)\n try:\n img.save(filename, optimize=1)\n except IOError:\n img.save(filename)", "def clean_image(self):\n image = self.cleaned_data.get('image')\n if image and image.size > 5242880:\n raise forms.ValidationError(u'This image is too big, limit 500kb')\n return image", "def __processImage(self, f):\n try:\n imgobj = Image.open(f).convert('RGB')\n except:\n return None\n w, h = imgobj.size\n if w < h:\n # reduce width to required dimension and adjust height accordingly\n new_h = int(h * self.PROCESSING_DIM / w)\n resizedImg = imgobj.resize((self.PROCESSING_DIM, new_h))\n\n y_start = int(new_h / 2 - self.PROCESSING_DIM / 2)\n processedImage = resizedImg.crop((0, y_start, self.PROCESSING_DIM, y_start + self.PROCESSING_DIM))\n\n else:\n # reduce height to required dimension and adjust width accordingly\n new_w = int(w * self.PROCESSING_DIM / h)\n resizedImg = imgobj.resize((new_w, self.PROCESSING_DIM))\n\n x_start = int(new_w / 2 - self.PROCESSING_DIM / 2)\n processedImage = resizedImg.crop((x_start, 0, x_start + self.PROCESSING_DIM, self.PROCESSING_DIM))\n\n return processedImage", "def generate_test_image(name, size=(36, 36)):\n return ContentFile(\n factory.django.ImageField()._make_data(\n {'width': size[0], 'height': size[1]}\n ), '{}.jpg'.format(name))", "def rescale(self, data, width, height, force=True):\n import Image as pil\n from cStringIO import StringIO\n\n max_width = width\n max_height = height\n\n input_file = StringIO(data)\n img = pil.open(input_file)\n img = img.convert('RGB')\n\n if not force:\n img.thumbnail((max_width, max_height), pil.ANTIALIAS)\n else:\n src_width, src_height = img.size\n src_ratio = float(src_width) / float(src_height)\n dst_width, dst_height = max_width, max_height\n dst_ratio = float(dst_width) / float(dst_height)\n\n if dst_ratio < src_ratio:\n crop_height = src_height\n crop_width = crop_height * dst_ratio\n x_offset = float(src_width - crop_width) / 2\n y_offset = 0\n else:\n crop_width = src_width\n crop_height = crop_width / dst_ratio\n x_offset = 0\n y_offset = float(src_height - crop_height) / 3\n img = img.crop(\n (int(x_offset), int(y_offset), int(x_offset) + int(crop_width), int(y_offset) + int(crop_height)))\n img = img.resize((dst_width, dst_height), pil.ANTIALIAS)\n\n tmp = StringIO()\n img.save(tmp, 'JPEG')\n tmp.seek(0)\n output_data = tmp.getvalue()\n input_file.close()\n tmp.close()\n\n return output_data", "def _create_resized_images(self, raw_field, save):\r\n # Derive base filename (strip out the relative directory).\r\n filename = os.path.split(self.image.name)[-1]\r\n ctype = guess_type(filename)[0]\r\n\r\n # Generate resized copy of image.\r\n remove_model_image(self, 'image_resized')\r\n bb = self.is_event and settings.EVENT_RESIZED_IMAGE_BOUNDING_BOX or settings.CAMPAIGN_RESIZED_IMAGE_BOUNDING_BOX\r\n resize, crop, img = get_perfect_fit_resize_crop(bb, input_image=self.image.path)\r\n resized_contents = resize_in_memory(img, resize, crop=crop)\r\n resized_file = str_to_file(resized_contents)\r\n resized_field = InMemoryUploadedFile(resized_file, None, None, ctype, len(resized_contents), None)\r\n self.image_resized.save(name='resized-%s' % filename, content=resized_field, save=save)\r\n resized_file.close()\r\n\r\n # Generate avatar.\r\n remove_model_image(self, 'image_avatar')\r\n avatar_contents = resize_in_memory(self.image.path, settings.CAMPAIGN_AVATAR_IMAGE_CROP, crop=settings.CAMPAIGN_AVATAR_IMAGE_CROP, crop_before_resize=True)\r\n avatar_file = str_to_file(avatar_contents)\r\n avatar_field = InMemoryUploadedFile(avatar_file, None, None, ctype, len(avatar_contents), None)\r\n self.image_avatar.save(name='avatar-%s' % filename, content=avatar_field, save=save)\r\n avatar_file.close()", "def resize(img):\n size = (500, 500)\n img.thumbnail(size)\n return img", "def file(self):\n # ImageField (both django's and factory_boy's) require PIL.\n # Try to import it along one of its known installation paths.\n try:\n from PIL import Image as PILimage\n except ImportError:\n import Image as PILimage\n\n thumb = PILimage.new(\"RGB\", (100, 100), \"blue\")\n thumb_io = io.BytesIO()\n thumb.save(thumb_io, format=\"JPEG\")\n\n return File(thumb_io, name=self.original_filename)", "def resize(self, size):\n return Image(self.pil_image.resize(size, PIL.Image.ANTIALIAS))", "def resizeImage(self):\n ratio = float(self.qIma.width()) / float(self.qIma.height())\n if self.qIma.width() > self.qIma.height():\n maxWidth = 300\n maxHeight = int(300 / ratio)\n else:\n maxWidth = int(300 / ratio)\n maxHeight = 300\n img = self.qIma.toImage().scaled(maxWidth, maxHeight, QtCore.Qt.KeepAspectRatio)\n return img", "def getResizedImage(self, path, w, h, conserve_aspect_ration=True):\n image = self.getPILFromPath(path)\n return self.performResize(image, w, h, conserve_aspect_ration)", "def to_python(self, data):\n\n min_width, min_height, formats, max_size = config['MIN_WIDTH'], config['MIN_HEIGHT'], config['FORMATS'], \\\n config['IMAGE_MAX_SIZE']\n\n # While a lot of validation will only be carried out in-depth on the backend,\n # due to the difficulty of writing it, this size validation will be on the\n # frontend as well. This is because allowing somebody to upload a\n # large file just to get it kicked back would be a huge UX degradation\n # and also a bandwidth hog. This size validation will be accompanied by nginx\n # giving users who try to upload a truly massive file a much ruder experience\n # (dropped connection) to prevent huge server load on our end\n if data.size > max_size:\n # Translators: Error message for people who try to bypass image upload restrictions\n raise serializers.ValidationError(detail=_('Image is too large'))\n\n file = super(RestrictedDjangoImageField, self).to_python(data)\n\n width, height = file.image.size\n\n if width < min_width or height < min_height:\n # Translators: Error message when image is too small\n raise serializers.ValidationError(detail=_('Image does not meet '\n 'minimum width and height'\n ' requirements'))\n format = file.image.format\n\n if format not in formats:\n # Translators: Error message when image is not one of the allowed formats\n raise serializers.ValidationError(detail=_('Image does not meet '\n 'formatting requirements'))\n\n return file", "def _resize_image(filename, size):\n width, height = 0, 1\n\n try:\n import Image, ImageOps\n except ImportError:\n from PIL import Image, ImageOps\n\n if not size['resample']:\n resample = Image.ANTIALIAS\n\n img = Image.open(filename)\n if (img.size[width] > size['width'] or\n img.size[height] > size['height']):\n\n #If the image is big resize it with the cheapest resize algorithm\n factor = 1\n while (img.size[0] / factor > 2 * size['width'] and\n img.size[1] * 2 / factor > 2 * size['height']):\n factor *= 2\n if factor > 1:\n img.thumbnail((int(img.size[0] / factor),\n int(img.size[1] / factor)), resample=resample)\n\n if size['crop']:\n img = ImageOps.fit(img, (size['width'], size['height']), method=resample)\n else:\n img.thumbnail((size['width'], size['height']), resample=resample)\n\n try:\n img.save(filename, optimize=1)\n except IOError:\n img.save(filename)", "def make_thumbnail(image, size=(100, 100)):\n logging.debug(image)\n\n im = create_colorblind_image(image)\n\n thumb_io = BytesIO() # create a BytesIO object\n\n im.save(thumb_io, 'PNG', quality=85) # save image to BytesIO object\n\n thumbnail = File(thumb_io, name=image.name) # create a django friendly File object\n\n return thumbnail", "def restore_to_correct_size(self, file_description):\n file_name = os.listdir('utils_dfn/output/result')[0]\n file_name_with_ext, file_name = extract_file_name(file_name)\n new_file = os.path.join('static/rescaled_images', self.file_name + file_description + '.png')\n img = cv2.imread(os.path.join('utils_dfn/output/result', file_name_with_ext))\n if self.new_width > self.new_height:\n new_size = self.new_width\n else:\n new_size = self.new_height\n\n img = resize_image(img, new_size, new_size)\n img = crop_image(img, bottom=self.pad_to_bottom, right=self.pad_to_right)\n cv2.imwrite(new_file, img)\n self.rescaled_file_name_with_ext, _ = extract_file_name(new_file)", "def image(request, ef_id):\n ef = get_object_or_404(ExamFile, id=ef_id)\n thumb = get_thumbnail_path(ef)\n daimage = file(thumb, 'rb').read()\n return HttpResponse(content=daimage, mimetype='image/png')", "def get_resized_image(photo_data):\n photo_stream_data = BytesIO(photo_data.stream.read())\n image_to_process = Image.open(photo_stream_data)\n width,height = image_to_process.size\n new_size = width\n if height < width:\n new_size = height\n\n square_image = image_to_process.resize((new_size,new_size))\n return square_image", "def read_image_and_resize(image_path: str,\n new_WH: Tuple[int, int]=(512, 512),\n save_dir: str=\"resize\") -> str:\n assert os.path.exists(save_dir) is True\n new_path = os.path.join(save_dir, os.path.basename(image_path))\n image = cv2.imread(image_path)\n image = cv2.resize(image, new_WH, interpolation=cv2.INTER_AREA)\n cv2.imwrite(new_path, image)\n\n return image_path", "def save(self):\n im = Image.open(self.picture)\n output = BytesIO()\n im.thumbnail((350, 350))\n im.save(output, format='JPEG', quality=100)\n output.seek(0)\n self.picture = InMemoryUploadedFile(output, 'ImageField', \"%s.jpg\" % self.picture.name.split('.')[0],\n 'image/jpeg', sys.getsizeof(output), None)\n super(Tire, self).save()", "def img_resize(infile, size):\n try:\n infile.thumbnail(size, Image.ANTIALIAS)\n except:\n print(\"cannot create thumbnail for '%s'\" % infile)\n return infile", "def resize_image(image_path: str):\n image: Image.Image = Image.open(image_path)\n w = image.width\n h = image.height\n scale_factor = IMAGE_WIDTH_DEFAULT / w\n image.resize((int(w * scale_factor), int(h * scale_factor))).save(image_path)", "def image(self):\n # TODO: make sure this method works for png, gif, tiff\n if self.has_metadata:\n self.extract_metadata()\n tempdir_path = self.make_tempdir()\n tempfile_path = os.path.join(tempdir_path, self.filename)\n warnings.simplefilter('error', Image.DecompressionBombWarning)\n try: # Do image conversions\n img_in = Image.open(self.src_path)\n img_out = Image.frombytes(img_in.mode, img_in.size, img_in.tobytes())\n img_out.save(tempfile_path)\n self.src_path = tempfile_path\n except Exception as e: # Catch decompression bombs\n # TODO: change this from all Exceptions to specific DecompressionBombWarning\n self.add_error(e, \"Caught exception (possible decompression bomb?) while translating file {}.\".format(self.src_path))\n self.make_dangerous()\n self.add_file_string('Image file')\n self.set_property('processing_type', 'image')", "def image(request):\n response = HttpResponse(request.content.data, content_type=request.mime_type)\n filename = re.sub(\n r'[^\\w\\.]', '_', request.patch.filename.encode('ascii', 'replace'))\n response['Content-Disposition'] = 'attachment; filename=\"%s\"' % filename\n response['Cache-Control'] = 'no-cache, no-store'\n return response", "def image(request):\n response = HttpResponse(request.content.data, content_type=request.mime_type)\n filename = re.sub(\n r'[^\\w\\.]', '_', request.patch.filename.encode('ascii', 'replace'))\n response['Content-Disposition'] = 'attachment; filename=\"%s\"' % filename\n response['Cache-Control'] = 'no-cache, no-store'\n return response", "def edit(request, image_id):\n Image = get_image_model()\n ImageForm = get_image_form(Image)\n\n image = get_object_or_404(Image, id=image_id)\n\n cropped_images = CroppedImage.objects.filter(full_image=image)\n\n if not permission_policy.user_has_permission_for_instance(request.user, 'change', image):\n return permission_denied(request)\n\n if request.POST:\n original_file = image.file\n form = ImageForm(request.POST, request.FILES, instance=image, user=request.user)\n if form.is_valid():\n if 'file' in form.changed_data:\n # if providing a new image file, delete the old one and all renditions.\n # NB Doing this via original_file.delete() clears the file field,\n # which definitely isn't what we want...\n original_file.storage.delete(original_file.name)\n image.renditions.all().delete()\n\n # Set new image file size\n image.file_size = image.file.size\n\n form.save()\n\n # Reindex the image to make sure all tags are indexed\n for backend in get_search_backends():\n backend.add(image)\n\n messages.success(request, _(\"Image '{0}' updated.\").format(image.title), buttons=[\n messages.button(reverse('wagtailimages:edit', args=(image.id,)), _('Edit again'))\n ])\n\n if request.POST.get(\"clipping\"):\n return redirect(\"/admin/clipping/\"+image_id)\n return redirect('wagtailimages:index')\n else:\n messages.error(request, _(\"The image could not be saved due to errors.\"))\n else:\n form = ImageForm(instance=image, user=request.user)\n\n # Check if we should enable the frontend url generator\n try:\n reverse('wagtailimages_serve', args=('foo', '1', 'bar'))\n url_generator_enabled = True\n except NoReverseMatch:\n url_generator_enabled = False\n\n if image.is_stored_locally():\n # Give error if image file doesn't exist\n if not os.path.isfile(image.file.path):\n messages.error(request, _(\n \"The source image file could not be found. Please change the source or delete the \"\n \"image.\"\n ).format(image.title), buttons=[\n messages.button(reverse('wagtailimages:delete', args=(image.id,)), _('Delete'))\n ])\n\n return render(request, \"wagtailimages/images/edit.html\", {\n 'image': image,\n 'cropped_images': cropped_images,\n 'form': form,\n 'url_generator_enabled': url_generator_enabled,\n 'filesize': image.get_file_size(),\n 'user_can_delete': permission_policy.user_has_permission_for_instance(\n request.user, 'delete', image\n ),\n })", "def resize_image(self, width=200):\n self.new_width = width\n aspect_ratio = self.original_height/float(self.original_width)\n self.new_height = int(aspect_ratio * self.new_width)\n\n resized_image = self.image.resize((self.new_width, self.new_height), Image.BILINEAR)\n return resized_image", "def resize_image(filename, out=None, thumbnail=False):\n\n if thumbnail:\n image_size = frappe.db.get_value(\n 'eBay Manager Settings', filters=None,\n fieldname='ebay_thumbnail_size')\n else:\n image_size = frappe.db.get_value(\n 'eBay Manager Settings', filters=None,\n fieldname='ebay_image_size')\n image_size = int(image_size)\n\n if image_size < 1:\n frappe.throw('Invalid image size: ' + str(image_size))\n\n size_string = '{}x{}>'.format(image_size, image_size)\n if out is not None:\n subprocess.call(\n ['convert', '-auto-orient', '-resize', size_string,\n filename, out])\n else:\n subprocess.call(\n ['mogrify', '-auto-orient', '-resize', size_string, filename])", "def resizeImage(self, obj, w, h, conserve_aspect_ration=True):\n image = self.getPILFromObject(obj)\n return self.performResize(image, w, h, conserve_aspect_ration)", "def test_converted_larger_image(self):\n user = UserFactory.create()\n file_path = os.path.join(os.path.dirname(__file__), \"broken_marshal.jpg\")\n self._upload_photo(user, file_path)", "def resize_img(self, filename: str, size: Tuple[int, int] = (299, 299)):\n img = Image.open(join(self.source_dir, filename))\n width, height = img.size\n orig_shape = np.array(img.size)\n wanted_shape = np.array(size)\n ratios = wanted_shape / orig_shape\n wanted_width, wanted_height = size\n ratio_w, ratio_h = wanted_width / width, wanted_height / height\n\n if np.alltrue(ratios > 1):\n # Both sides of the image are shorter than the desired dimension,\n # so take the side that's closer in size and enlarge the image\n # in both directions to make that one fit\n factor = min(ratio_h, ratio_w)\n img = img.resize((int(width * factor), int(height * factor)))\n\n # Now we have an image that's either larger than the desired shape\n # or at least one side matches the desired shape and we can resize\n # with contain\n cover = resizeimage.resize_contain(img, size)\n cover.save(join(self.dest_dir, filename), 'JPEG')", "def rescale_image(self, img_file, new_width, new_height, model_path, file_description):\n cwd = os.getcwd()\n self.new_width = new_width\n self.new_height = new_height\n self.extract_file_name(img_file)\n shutil.copy(img_file, os.path.join('utils_dfn/temp', self.file_name_with_ext))\n self.run_padding()\n self.run_dfn(model_path)\n self.restore_to_correct_size(file_description)\n clean()", "def resized(self, source='image', id='imagekit:thumbnail',\n\t\tdest=None, **kwargs):\n\n\t\tif dest and hasattr(self, dest):\n\t\t\treturn getattr(self, dest)\n\n\t\tkwargs['source'] = getattr(self, source)\n\n\t\tgenerator = generator_registry.get(id, **kwargs)\n\t\timage = ImageCacheFile(generator)\n\t\tif dest:\n\t\t\tsetattr(self, dest, image)\n\t\treturn image", "def save(self, *args, **kwargs):\n if not self.pk: # on create\n image = Image.open(self.file)\n image.thumbnail((400, 400), Image.ANTIALIAS)\n\n thumb = io.BytesIO()\n image.save(\n thumb, format=\"jpeg\", quality=80, optimize=True, progressive=True\n )\n self.thumbnail = InMemoryUploadedFile(\n thumb, None, self.file.name, 'image/jpeg', thumb.tell(), None\n )\n\n super(File, self).save(*args, **kwargs)", "def handle_image_file(file: _FileLike) -> Image.Image:\n try:\n im = Image.open(file)\n return im\n except IOError as e:\n raise HTTPException(500, detail=str(e))", "def test_image_resize_anuncio_premium(self):\n self.anuncio.is_premium = True\n self.anuncio.save()\n image_obj = self.image_model()\n image_obj.anuncio = self.anuncio\n image_obj.image = simple_uploaded_file(self.image_path)\n image_obj.save()\n\n self.assertEqual(image_obj.image.width, 1000)\n self.assertEqual(image_obj.image.height, 625)\n\n # Eliminar obj para que elimine las imágenes.\n image_obj.delete()", "def cache_image(self):\n img_temp = NamedTemporaryFile()\n # Header required for HTTPS connections\n request = Request(self.url, headers={'User-Agent': ''})\n response = urlopen(request)\n type_file = dict(response.info()._headers)['Content-Type']\n if 'image' not in type_file:\n raise ValidationError(\"The URL does not contains any image. (Content-Type: {0}) (URL: {1})\".format(type, self.url))\n # Store the filename with extension\n url_image = urlparse(self.url)\n filename, file_ext = splitext(basename(url_image.path))\n # If the file doesn't have a extension, find it out from the header\n if file_ext == '':\n file_ext = type_file.replace('image/', '')\n self.filename = \"{0}.{1}\".format(filename, file_ext)\n source_data = response.read()\n # Compress the image\n source_data = optimize(source_data)\n img_temp.write(source_data)\n img_temp.flush()\n # Save the image in the server\n self.image .save(self.url, File(img_temp))", "def test_get_file_image(self):\n image = image_helper.get_file_image(self.subject)\n\n self.assertEqual(image.size, (800, 450))", "def process_image_attachment(image_id):\n image_attachment_model = import_image_attachment()\n image = image_attachment_model.objects.get(pk=image_id)\n image.create_display_size()\n image.create_thumbnail()", "def test_transform_image_resize_and_crop_landscape(self):\n self.expect_open_image('SomeBlobKey', (1200, 1600))\n self.expect_crop(top_y=0.0, bottom_y=0.75)\n self.expect_resize(32)\n self.expect_encode_image('SomeImageSize32-c')\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageSize32-c', 'image/jpeg'),\n self.app._transform_image('SomeBlobKey', 's32-c'))\n self.mox.VerifyAll()", "def test_get_image(self):\n with open(self.subject, \"rb\") as f:\n content = f.read()\n\n image = image_helper.get_image(content)\n\n self.assertEqual(image.size, (800, 450))", "def test_im_file_resize(self):\n self._test_img_resize(IMBackend())", "def check_and_save_img(file, path):\n img_bytes = file.read()\n if not secure_filetype(file):\n return \"Not an image. \"\n input_image = Image.open(io.BytesIO(img_bytes))\n input_image.save(path)\n if not secure_filesize(path):\n return \"Too large given file. \"\n return ''", "def resize_action():\n fn = request.args.get(\"filename\")\n size = int(request.args.get(\"size\"))\n resize_file(fn, size)\n return Response(status=200)", "def compress_image(image: BinaryIO, size: Tuple[int, int] = (512, 512)) -> File:\n im = Image.open(image)\n im_format = im.format\n im.thumbnail(size)\n if im.mode in (\"P\", \"RGBA\"):\n im = im.quantize()\n im_io = BytesIO()\n im.save(im_io, im_format)\n return File(im_io, image.name)", "def resize_image(self, filename, size=(299,299,3)):\n path = join(self.source_dir, filename)\n img = Image.open(path)\n img = img.resize(size)\n img.save(join(self.dest_dir, filename), 'JPEG', optimize=True)", "def test_transform_image_original_size(self):\n self.expect_open_image('SomeBlobKey', (1600, 1200))\n self.expect_encode_image('SomeImageInJpeg')\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageInJpeg', 'image/jpeg'),\n self.app._transform_image('SomeBlobKey', 's0'))\n self.mox.VerifyAll()", "def create_img_object(file, scale):\n with Raw(filename=file) as raw:\n img = Image.open(io.BytesIO(raw.thumbnail_to_buffer())).convert('RGBA')\n resize_dims = list(map(int, (i * scale for i in img.size)))\n img = img.resize(resize_dims)\n return img", "def resize(file_path, width, height):\n folder, file_name, ext = parse_file_path(file_path)\n url = addr + '/img/resize'\n\n img = cv2.imread(file_path)\n _, img_encoded = cv2.imencode('.jpg', img)\n payload = img_encoded.tostring()\n \n response = requests.post(url, data=payload, params = {'w':width, 'h':height})\n if response.status_code != 200:\n print(json.load(response.content))\n return None\n \n img_array = uncompress_nparr(response.content)\n file_path = os.path.join(folder, file_name, + '_' + random_string() + '-resized' + ext)\n cv2.imwrite(file_path, img_array)\n return file_path", "def _rename_resize_image(self, instance=None, **kwargs):\n if getattr(instance, self.name):\n filename = getattr(instance, self.name).path\n ext = os.path.splitext(filename)[1].lower().replace('jpg', 'jpeg')\n dst = self.generate_filename(instance, '%s_%s%s' % (self.name,\n instance._get_pk_val(), ext))\n dst_fullpath = os.path.join(settings.MEDIA_ROOT, dst)\n if os.path.abspath(filename) != os.path.abspath(dst_fullpath):\n os.rename(filename, dst_fullpath)\n for variation in self.variations:\n variation_filename = self._get_variation_filename(variation, dst_fullpath)\n shutil.copyfile(dst_fullpath, variation_filename)\n self._resize_image(variation_filename, variation)\n setattr(instance, self.attname, dst)\n instance.save()", "def preprocess_image(image: Image.Image, max_size: int = 1200) -> Image.Image:\n width_0, height_0 = image.size\n\n if max((width_0, height_0)) <= max_size:\n return image\n\n if width_0 > height_0:\n aspect_ratio = max_size / float(width_0)\n new_height = int(float(height_0) * float(aspect_ratio))\n image = image.resize((max_size, new_height), Image.ANTIALIAS)\n return image\n else:\n aspect_ratio = max_size / float(height_0)\n new_width = int(float(width_0) * float(aspect_ratio))\n image = image.resize((max_size, new_width), Image.ANTIALIAS)\n return image", "async def resize_photo(photo):\n image = Image.open(photo)\n maxsize = (512, 512)\n if (image.width and image.height) < 512:\n size1 = image.width\n size2 = image.height\n if image.width > image.height:\n scale = 512 / size1\n size1new = 512\n size2new = size2 * scale\n else:\n scale = 512 / size2\n size1new = size1 * scale\n size2new = 512\n size1new = math.floor(size1new)\n size2new = math.floor(size2new)\n sizenew = (size1new, size2new)\n image = image.resize(sizenew)\n else:\n image.thumbnail(maxsize)\n\n return image", "def load_and_resize(src_path, size):\n \n if os.path.exists(src_path):\n src = Image.open(src_path).convert('RGB')\n else:\n raise Exception(\"could not find image {}\".format(src_path))\n\n src = Image.open(src_path).convert('RGB')\n\n height, width = src.size\n if min(height, width) < size:\n # raise Exception(\"input image is smaller than {}\".format(size))\n print(\"Warning: {} is smaller than {}\".format(src_path, size))\n # resize and make square\n dst = src.resize((size, size), Image.BICUBIC)\n assert(dst.size[0] == dst.size[1])\n return dst", "def convertImage(self):\n saveAs = config.get(\"saveFormat\")\n saveLocation = config.get(\"saveLocation\")\n image = Image.open(self.folderLocation.text())\n fileName = Path(image.filename).stem\n if saveAs == 'ico':\n size = 256, 256\n destPath = saveLocation+fileName+\".\"+saveAs\n im = Image.open(self.folderLocation.text())\n im.thumbnail(size)\n im.save(destPath, 'jpeg')\n else:\n image.save(saveLocation+fileName+\".\"+saveAs, saveAs)", "def test_transform_image_resize_and_crop_portrait_png(self):\n self.expect_open_image('SomeBlobKey', (1600, 1200), mime_type='PNG')\n self.expect_crop(left_x=0.125, right_x=0.875)\n self.expect_resize(32)\n self.expect_encode_image('SomeImageSize32-c',\n images_service_pb.OutputSettings.PNG)\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageSize32-c', 'image/png'),\n self.app._transform_image('SomeBlobKey', 's32-c'))\n self.mox.VerifyAll()", "def image_response(image,format,mimetype):\n buf = StringIO()\n im = image.save(buf,format)\n return Response(buf.getvalue(), mimetype=mimetype)", "def test_transform_image_resize_and_crop_portrait(self):\n self.expect_open_image('SomeBlobKey', (148, 215))\n self.expect_crop(top_y=0.0, bottom_y=0.68837209302325575)\n self.expect_resize(32)\n self.expect_encode_image('SomeImageSize32-c')\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageSize32-c', 'image/jpeg'),\n self.app._transform_image('SomeBlobKey', 's32-c'))\n self.mox.VerifyAll()", "def _resize_image_tuple(self, image_tup):\n if self.api_info is None:\n self.get_info() # sets the image size and other such info from server.\n try:\n MIN_SIZE = self.api_info['min_image_size']\n MAX_SIZE = self.api_info['max_image_size']\n img = Image.open(image_tup[0])\n min_dimension = min(img.size)\n max_dimension = max(img.size)\n min_ratio = float(MIN_SIZE) / min_dimension\n max_ratio = float(MAX_SIZE) / max_dimension\n def get_newsize(img, ratio, SIZE):\n if img.size[0] == min_dimension:\n newsize = (SIZE, int(round(ratio * img.size[1])))\n else:\n newsize = (int(round(ratio * img.size[0])), SIZE)\n return newsize\n im_changed = False\n # Only resample if min size is > 512 or < 256\n if max_ratio < 1.0: # downsample to MAX_SIZE\n newsize = get_newsize(img, max_ratio, MAX_SIZE)\n img = img.resize(newsize, Image.BILINEAR)\n im_changed = True\n elif min_ratio > 1.0: # upsample to MIN_SIZE\n newsize = get_newsize(img, min_ratio, MIN_SIZE)\n img = img.resize(newsize, Image.BICUBIC)\n im_changed = True\n else: # no changes needed so rewind file-object.\n image_tup[0].seek(0)\n # Finally make sure we have RGB images.\n if img.mode != \"RGB\":\n img = img.convert(\"RGB\")\n im_changed = True\n if im_changed:\n io = StringIO()\n img.save(io, 'jpeg', quality=IM_QUALITY)\n io.seek(0) # rewind file-object to read() below is good to go.\n image_tup = (io, image_tup[1])\n except IOError, e:\n logger.warning('Could not open image file: %s, still sending to server.', image_tup[1])\n return image_tup", "def set_image_from_file(self, file):\n if isinstance(file, str):\n try:\n file = self.files.filter(current=True, path=file)[0]\n except IndexError:\n return\n\n content = file.get_content()\n format = file.get_format()\n ext = format.default_extension if format else \"\"\n\n # The file name needs to be unique to bust any caches.\n file = ContentFile(content)\n file.name = f\"{self.id}-{shortuuid.uuid()}{ext}\"\n\n self.image_file = file\n self.image_updated = timezone.now()\n self.save()", "def generate(self, save=True):\n stretched_photo, crop_box = self._generate_img()\n\n # set crop_box to (0,0,0,0) if photo not cropped\n if not crop_box:\n crop_box = 0, 0, 0, 0\n\n self.crop_left, self.crop_top, right, bottom = crop_box\n self.crop_width = right - self.crop_left\n self.crop_height = bottom - self.crop_top\n\n self.width, self.height = stretched_photo.size\n\n f = BytesIO()\n imgf = (self.photo._get_image().format or\n Image.EXTENSION[path.splitext(self.photo.image.name)[1]])\n\n stretched_photo.save(f, format=imgf, quality=self.format.resample_quality)\n f.seek(0)\n\n self.image.save(self.file(), ContentFile(f.read()), save)", "def upload_image_to_minio_directly(request):\n myRequest = request\n image = myRequest.FILES[\"image_to_upload\"]\n minioClient = create_minio_client_from_settings()\n\n # Using Pillow to fetch metadata\n width, height, size, imageFormat, name = fetch_metadata(image)\n\n serializer = ImageForMinioSerializer(data={\n \"name\": name,\n \"image\": image,\n \"height\": height,\n \"width\": width,\n \"size\": size, # pillow_image.size will return (width, height)\n \"path_to_image\": \"NEEDSTOBESET\"\n }\n )\n\n if (\n serializer.is_valid(raise_exception=True) and\n is_image(image.content_type)\n ):\n # Resetting the cursor to the beginning of the file\n # 'django-storage-minio' takes care of this automatically\n image.file.seek(0)\n etag = minioClient.put_object(\n \"anas\",\n image.name,\n image.file,\n image.size\n )\n\n return Response({\"response\": etag})", "def make_thumbnail(self):\n # https://gist.github.com/valberg/2429288\n\n # make sure image data is set\n if not self.image_data:\n return False\n\n if self.proxy_data:\n return True\n\n # Create a resized version of the image\n image = Image.open(self.image_data)\n image.thumbnail(THUMBNAIL_SIZE, Image.BICUBIC)\n\n # Save the thumbnail to in-memory 'file'\n temp_thumb = BytesIO()\n image.save(temp_thumb, 'jpeg')\n temp_thumb.seek(0) # rewinds the file\n\n # Save image to a SimpleUploadFile which can be saved\n # into ImageField\n # TODO figure out how to pass base image's UUID before\n # image is committed to DB\n basename = os.path.basename(self.image_data.name)\n uuidname = os.path.splitext(basename)[0]\n suf = SimpleUploadedFile(uuidname,\n temp_thumb.read(), content_type='image/jpeg')\n thumb_filename = '{}_thumb.jpeg'.format(suf.name)\n\n # set save=False, or else it will infinite loop\n self.proxy_data.save(thumb_filename,\n suf,\n save=False)\n\n # Also store the real dimensions for the Pillow thumbnail\n self.proxy_width, self.proxy_height = image.size\n\n temp_thumb.close()\n\n return True", "def scale_image(image_path, size, method='scale'):\n (original_path, file_name, file_ext) = split_filepath(image_path)\n cached_filename = '%s.%s.%dx%d_%s.jpg' % (file_name, file_ext, size[0], size[1], method)\n cached_file_path = '%s/%s' % (original_path, cached_filename)\n\n if not os.path.exists(image_path):\n return False\n\n if not os.path.exists(cached_file_path):\n try:\n import Image\n except ImportError:\n try:\n from PIL import Image\n except ImportError:\n raise ImportError('Cannot import the Python Image Library.')\n\n image = Image.open(image_path)\n\n # normalize image mode\n if image.mode != 'RGBA':\n image = image.convert('RGBA')\n\n if format == 'PNG':\n pixdata = image.load()\n for y in xrange(image.size[1]):\n for x in xrange(image.size[0]):\n if pixdata[x, y] == (0, 0, 0, 0):\n pixdata[x, y] = (255, 255, 255, 0)\n\n if method == 'scale':\n image.thumbnail(size, Image.ANTIALIAS)\n image.save(cached_file_path, 'JPEG')\n elif method == 'crop':\n try:\n import ImageOps\n except ImportError:\n from PIL import ImageOps\n\n ImageOps.fit(image, size, Image.ANTIALIAS).save(cached_file_path, 'JPEG', quality=80)\n\n #return os.path.abspath(cached_file_path).replace(os.path.abspath(settings.BASE_PATH), '')\n return cached_filename", "def test_pil_file_resize(self):\n self._test_img_resize(PILBackend())", "def image_optimize(file, width, height, force=False, resize=False):\n import Image as pil\n \n max_width = width\n max_height = height\n\n img = pil.open(file)\n \n #Fixing problem with gif\n if img.mode != \"RGB\":\n img = img.convert(\"RGB\")\n\n src_width, src_height = img.size\n src_ratio = float(src_width) / float(src_height)\n dst_width, dst_height = max_width, max_height\n dst_ratio = float(dst_width) / float(dst_height)\n \n if not force:\n if src_height > src_width:\n max_width = dst_height\n max_height = dst_width\n img.thumbnail((max_width, max_height), pil.ANTIALIAS)\n else:\n if dst_ratio < src_ratio:\n crop_height = src_height \n crop_width = crop_height * dst_ratio \n x_offset = int((src_width - crop_width)/2) \n y_offset = 0 \n else: \n crop_width = src_width\n crop_height = crop_width / dst_ratio\n x_offset = 0\n y_offset = int((src_height - crop_height)/3)\n img = img.crop((x_offset, y_offset, x_offset+int(crop_width),\n y_offset+int(crop_height)))\n img = img.resize((dst_width, dst_height), pil.ANTIALIAS)\n \n return img", "def _get_thumbnail_image_from_file(dir_path, image_file):\n # Get image\n img = _get_image_from_file(dir_path, image_file)\n # If it's not supported, exit now\n if img is None:\n return None\n if img.format.lower() == 'gif':\n return None\n # Get image dimensions\n img_width, img_height = img.size\n # We need to perform a resize - first, work out the scale ratio to take the\n # image width to args.width (args.width:img_width ratio)\n scale_ratio = args.width / float(img_width)\n # Work out target image height based on the scale ratio\n target_height = int(scale_ratio * img_height)\n # Perform the resize\n try:\n img.thumbnail((args.width, target_height), resample=RESAMPLE)\n except IOError as exptn:\n print('WARNING: IOError when thumbnailing %s/%s: %s' % (\n dir_path, image_file, exptn\n ))\n return None\n # Return the resized image\n return img", "def resize_image(data, sz=(256, 256)):\n from PIL import Image as PIL_Image\n\n im = PIL_Image.open(BytesIO(data))\n if im.mode != \"RGB\":\n im = im.convert('RGB')\n imr = im.resize(sz, resample=PIL_Image.BILINEAR)\n fh_im = BytesIO()\n imr.save(fh_im, format='JPEG')\n fh_im.seek(0)\n return bytearray(fh_im.read())", "def resize(self, new_size):\n resized_img = opencv.resize(self.img, new_size)\n return Image(resized_img)", "def resize_profile_pic(sender, instance, **kwargs):\n profile_pic = instance.profile_picture\n if profile_pic.name != \"default.png\":\n img = Image.open(profile_pic.path)\n if img.height > 300 or img.width > 300:\n output_size = (300, 300)\n img.thumbnail(output_size)\n img.save(profile_pic.path)", "def to_internal_value(self, data):\n if isinstance(data, str) and data.startswith('data:image'):\n # Found image is encoded, and must be decoded\n format, imgstr = data.split(';base64,')\n ext = format.split('/')[-1] # Extract file extension\n id = uuid.uuid4()\n data = ContentFile(base64.b64decode(imgstr), name = id.urn[9:] + '.' + ext)\n return super(Base64ImageField, self).to_internal_value(data)", "def _load_image(path):\r\n image = Image.open(path)\r\n size = image.size\r\n \r\n image = image.resize((550,550), Image.ANTIALIAS)\r\n# image = image.thumbnail((200,200), Image.ANTIALIAS)\r\n return image", "def thumbnail(self, size, resample=BICUBIC):\r\n # preserve aspect ratio\r\n x, y = self.size\r\n if x > size[0]:\r\n y = int(max(y * size[0] / x, 1))\r\n x = int(size[0])\r\n if y > size[1]:\r\n x = int(max(x * size[1] / y, 1))\r\n y = int(size[1])\r\n size = x, y\r\n if size == self.size:\r\n return\r\n self.draft(None, size)\r\n self._instance = self.resize(size, resample, image=self._instance)\r\n self.readonly = 0\r\n self.pyaccess = None", "def resize(fname, width, height):\n image = cv2.imread(fname) # read priginal image\n cv2.imshow('Original Image', image) # show original image\n cv2.waitKey(0) # stop\n\n org_height, org_width = image.shape[0:2] # original width and height\n\n # print width and height\n print(\"width\", org_width)\n print(\"height\", org_height)\n\n # check image height and width and resize to new size\n if org_width >= org_height:\n new_image = cv2.resize(image, (width, height))\n else:\n new_image = cv2.resize(image, (height, width))\n\n return fname, new_image", "def resize_and_process_image(data: dict, context):\n file_name = data[\"name\"]\n bucket_name = data[\"bucket\"]\n _, temp_local_filename = tempfile.mkstemp(suffix=file_name)\n blob = storage_client.bucket(bucket_name).get_blob(file_name)\n blob_bytes = blob.download_as_bytes()\n output = io.BytesIO(blob_bytes)\n output.seek(0)\n image = Image.open(output)\n print(\"trying to resize image\")\n # resizes image\n resized_image = resize_image(image)\n resized_image.save(fp=temp_local_filename)\n print(\"Image resized\")\n\n # Upload result to second bucket\n print(\"Trying to upload resized image to second bucket\")\n second_bucket_name = os.getenv(\"SECOND_BUCKET\")\n second_bucket = storage_client.bucket(second_bucket_name)\n print(\"second bucket found\")\n new_blob = second_bucket.blob(file_name)\n new_blob.metadata = blob.metadata\n print(\"created new blob\")\n new_blob.upload_from_filename(temp_local_filename)\n print(\"uploaded resized image from file\")\n os.remove(temp_local_filename)", "def resize(path_to_image, width, height):\n\n fd_img = open(path_to_image, 'rb')\n img = Image.open(fd_img)\n img = resizeimage.resize_cover(img, [int(width), int(height)])\n img.save(path_to_image, img.format)\n fd_img.close()", "def scale(self, infile: str, outfile: str):\n\n with Image(filename=infile) as src:\n width, height = self.new_dimensions(src)\n with src.clone() as dest:\n dest.resize(width=width,height=height)\n logging.debug(\n f'Converted \"{infile}\" ({src.width}x{src.height})'\n f' -> \"{outfile}\" ({dest.width}x{dest.height})')\n dest.save(filename=outfile)", "def get_image(request, image_class, image_name):\n image = image_cache[image_class][image_name]\n return HttpResponse(image, content_type='image/png')", "def resize_image(self, resize_percent, origFile):\n origresize = Image.open(origFile)\n width, height = origresize.size\n width_resize = int(resize_percent) + width\n height_resize = int(resize_percent) + height\n origresize = origresize.resize((int(round(width_resize)), int(round(height_resize))), Image.ANTIALIAS)\n \n return origresize", "def generate_fit_image(self):\n if self.fit_mode == \"stretch\":\n return self.im.resize((self.width, self.height))\n else:\n raise NotImplementedError(\"%s: This fit mode doesn't exist\" % self.fit_mode)", "def getImgContentFile(img):\n format, imgstr = img.split(';base64,')\n ext = format.split('/')[-1]\n file = ContentFile(base64.b64decode(imgstr), name='temp.' + ext)\n return file", "def test_resize_photo_poorly():\n somepic = get_image_path('mozilla.png')\n src = tempfile.NamedTemporaryFile(\n mode='r+b', suffix='.png', delete=False, dir=settings.TMP_PATH\n )\n shutil.copyfile(somepic, src.name)\n src_image = Image.open(src.name)\n assert src_image.size == (339, 128)\n\n resize_photo(src.name, src.name)\n\n # assert nothing happened\n src_image = Image.open(src.name)\n assert src_image.size == (339, 128)", "def test_transform_image_no_resize_png(self):\n self.expect_open_image('SomeBlobKey', (1600, 1200), mime_type='PNG')\n self.expect_resize(blob_image._DEFAULT_SERVING_SIZE)\n self.expect_encode_image('SomeImageInPng',\n images_service_pb.OutputSettings.PNG)\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageInPng', 'image/png'),\n self.app._transform_image('SomeBlobKey', ''))\n self.mox.VerifyAll()", "def test_image_uploads_on_save(self):\n \n files_count = len(os.listdir(settings.MEDIA_ROOT + '/persons'))\n with open('media/test_images/test.jpg') as f:\n self.client.post(reverse('edit'), {'ava': f})\n files_count_after = len(os.listdir(settings.MEDIA_ROOT + '/persons'))\n # added file and thumbnail\n self.assertEquals(files_count_after - files_count, 2) \n \n # test image scales \n from PIL import Image\n im = Image.open(settings.MEDIA_ROOT + '/persons/test.thumbnail.jpg')\n thumbnail_size = Person.thumbnail_size\n self.assertEquals((thumbnail_size,thumbnail_size), im.size)", "def resize_and_upload(input, output, width, height):\n image = PIL.Image.open(input)\n if image.mode == \"CMYK\":\n image = image.convert(\"RGB\")\n if height:\n if height > image.size[1]:\n width = int(image.size[0] * (width / height))\n height = image.size[1]\n elif width > image.size[0]:\n height = int(image.size[1] * (height / width))\n width = image.size[0]\n # crop\n new_image = PIL.ImageOps.fit(\n image,\n (height, width),\n PIL.Image.ANTIALIAS\n )\n else:\n # if a bigger size of the original is asked, use original image\n if width > image.size[0]:\n new_image = image\n else:\n wpercent = (width / float(image.size[0]))\n height = int((float(image.size[1]) * float(wpercent)))\n new_image = image.resize((width, height), PIL.Image.ANTIALIAS)\n\n out_img = io.BytesIO()\n new_image.save(out_img, 'PNG')\n out_img.seek(0)\n uploaded = s3.Bucket(settings.AWS_STORAGE_BUCKET_NAME).put_object(\n ACL='public-read',\n Key=os.path.join(output, uuid.uuid4().hex + '.png'),\n Body=out_img,\n ContentDisposition='inline',\n ContentType='image/png',\n )\n if hasattr(settings, 'AWS_S3_CUSTOM_DOMAIN') and settings.AWS_S3_CUSTOM_DOMAIN:\n host = 'https://{}'.format(settings.AWS_S3_CUSTOM_DOMAIN)\n else:\n host = 'https://s3.{}.amazonaws.com/{}'.format(\n settings.AWS_S3_REGION_NAME,\n settings.AWS_STORAGE_BUCKET_NAME\n )\n return {\n 'href': '{}/{}'.format(\n host,\n uploaded.key,\n ),\n 'width': new_image.size[0],\n 'height': new_image.size[1],\n }", "def resizeImage(image, newSize):\n\n # scale image\n scaledImage = cv2.resize(image, newSize)\n return scaledImage", "def thumbnail(self, item):\n if self._has_image_field(item) and self._field_is_visible(\"image\"):\n tile_conf = self.get_tile_configuration()\n image_conf = tile_conf.get(\"image\", None)\n if image_conf:\n scaleconf = image_conf[\"imgsize\"]\n # scale string is something like: 'mini 200:200' and\n # we need the name only: 'mini'\n if scaleconf == \"_original\":\n scale = None\n else:\n scale = scaleconf.split(\" \")[0]\n scales = item.restrictedTraverse(\"@@images\")\n return scales.scale(\"image\", scale)", "def resize(image_path, target_dimensions, image_format):\n with Image.open(image_path) as img:\n img = img.resize(target_dimensions, resample=Image.LANCZOS)\n if image_format == 'PNG':\n img = img.convert('RGBA')\n else:\n img = img.convert('RGB')\n img.save(image_path, format=image_format, quality=95)", "def test_transform_image_not_upscaled(self):\n self.expect_open_image('SomeBlobKey', (400, 300))\n self.expect_encode_image('SomeImageInJpeg')\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageInJpeg', 'image/jpeg'),\n self.app._transform_image('SomeBlobKey', ''))\n self.mox.VerifyAll()", "def scale_jpeg_camera_image(cam_image: Image, width: int, height: int) -> bytes:\n turbo_jpeg = TurboJPEGSingleton.instance()\n if not turbo_jpeg:\n return cam_image.content\n\n try:\n (current_width, current_height, _, _) = turbo_jpeg.decode_header(\n cam_image.content\n )\n except OSError:\n return cam_image.content\n\n scaling_factor = find_supported_scaling_factor(\n current_width, current_height, width, height\n )\n if scaling_factor is None:\n return cam_image.content\n\n return cast(\n bytes,\n turbo_jpeg.scale_with_quality(\n cam_image.content,\n scaling_factor=scaling_factor,\n quality=JPEG_QUALITY,\n ),\n )", "def resize_from_img(img, imgname, size=(128, 128)):\n try:\n img = img.resize(size, Image.LANCZOS)\n img.save(imgname)\n except Exception as error:\n logging.error(traceback.format_exc())", "def resize_image(self, nNewSize, nFlags):\n\t\treturn Job(SDK.PrlVmDev_ResizeImage(self.handle, nNewSize, nFlags)[0])", "def coerce_image(image):\n\n if not image:\n if hasattr(settings, 'BETTY_DEFAULT_IMAGE') and settings.BETTY_DEFAULT_IMAGE != None:\n # If we have a default image, let's use that.\n return AnonymousImageField(settings.BETTY_DEFAULT_IMAGE)\n else:\n return None\n\n if not isinstance(image, ImageFieldFile):\n # If this isn't an ImageField, coerce it\n try:\n image_id = int(image)\n except:\n if settings.BETTY_DEFAULT_IMAGE:\n image_id = settings.BETTY_DEFAULT_IMAGE\n else:\n return None\n image = AnonymousImageField(image_id)\n\n return image", "def save_form_data(self, instance, data):\r\n if data and isinstance(data, UploadedFile):\r\n # A new file is being uploaded. So delete the old one.\r\n remove_model_image(instance, 'image')\r\n super(CampaignImageField, self).save_form_data(instance, data)\r\n instance._create_resized_images(raw_field=data, save=False)", "def image_resize(job_object):\n try:\n job = json.loads(job_object.arg)\n base64_file = job['image']\n args = job['args'] if 'args' in job else {}\n del job['image']\n logging.info(job)\n \n def write_file(local_path,filename,file_b64):\n logging.debug(\"about to save to \" + \"%s/%s\" % (local_path,filename))\n if not os.path.exists(local_path): os.makedirs(local_path)\n image_file = base64.b64decode(file_b64)\n local_file = open(\"%s/%s\" % (local_path,filename), \"w\")\n local_file.write(image_file)\n local_file.close()\n \n def download_file(url,local_path,filename):\n print \"downloading \" + url\n f = urllib2.urlopen(urllib2.Request(url))\n print \"about to save to \" + \"%s/%s\" % (local_path,filename)\n if not os.path.exists(local_path): os.makedirs(local_path)\n # Open our local file for writing\n local_file = open(\"%s/%s\" % (local_path,filename), \"w\")\n local_file.write(f.read())\n local_file.close()\n \n local_path = '%s/upload/%s' % (options.asset_root,job['path'])\n local_path_wfile = '%s/%s%s' % (local_path,job['file'],job['extension'])\n filename = '%s%s' % (job['file'],job['extension'])\n #download_file(job['url'],local_path,filename)\n write_file(local_path,filename,base64_file)\n \n def resize_and_save(local_file,new_file,maxsize=None,maxh=None,maxw=None,crop=None):\n \"\"\"Resize the image and save\"\"\"\n logging.debug(\"maxw = %s, maxsize=%s, crop=%s\" % (maxw,maxsize,crop))\n img = Image.open(local_file)\n width,height = img.size\n width,height = float(width), float(height)\n ratio = float(1)\n if crop is not None:\n size = float(maxsize)\n if width <= height and width > size:\n ratio = size/width\n elif height < width and height > size:\n ratio = size/height\n else: \n ratio = 1 # too small\n elif maxsize:\n size = float(maxsize)\n if width >= height and width > size:\n ratio = size/width\n elif height > width and height > size:\n ratio = size/height\n else: \n ratio = 1 # too small\n elif maxh:\n size = maxh\n if height > size:\n ratio = size/height\n else:\n # too small\n ratio = 1\n elif maxw:\n size = maxw\n if width > size:\n ratio = size/width\n else:\n # too small\n ratio = 1\n else:\n raise Exception(\"must specify max width, OR max size\")\n \n print(\"old: ratio = %s: size(x,y) = %s,%s\" % (ratio,width,height))\n height = int(height*ratio)\n width = int(width*ratio)\n print(\"new ratio = %s: size(x,y) = %s,%s\" % (ratio,width,height))\n img = img.resize((width, height),Image.ANTIALIAS)\n if crop is not None:\n log.debug(\"in crop %s\" % crop)\n crop = int(crop)\n if width > crop:\n amt = int((int(width) - crop)/2)\n img = img.crop((amt,0,amt + crop, crop))\n elif height > crop:\n amt = int((int(height) - crop)/2)\n img = img.crop((0,amt,crop,amt+crop))\n \n log.debug(\"saving new file %s\" % new_file)\n if img.mode != \"RGB\":\n img = img.convert(\"RGB\")\n img.save(new_file)\n \n \n if os.path.exists(local_path_wfile):\n if args != {}:\n ext = args['extension'] if 'extension' in args else \"_t\"\n resize_and_save(local_path_wfile,\n '%s/%s%s.jpg' % (local_path,job['file'],ext),\n maxsize=args['maxsize'],\n crop=args['crop'])\n else:\n resize_and_save(local_path_wfile,'%s/%s_t.jpg' % (local_path,job['file']),maxsize=100)\n resize_and_save(local_path_wfile,'%s/%s_m.jpg' % (local_path,job['file']),maxw=317)\n resize_and_save(local_path_wfile,'%s/%s_l.jpg' % (local_path,job['file']),maxsize=800)\n keeptrying = False\n else:\n logging.error(\"haven't found file? %s\" % local_path_wfile)\n \n # delete original\n logging.debug(\"About to delete original %s\" % local_path_wfile)\n os.remove(local_path_wfile)\n \n except:\n traceback.print_exc()", "def test_clean_only_image(image):\n resource = models.MediaResource(image=image)\n\n resource.clean()", "def test_transform_image_no_resize_tiff(self):\n self.expect_open_image('SomeBlobKey', (1600, 1200), mime_type='TIFF')\n self.expect_resize(blob_image._DEFAULT_SERVING_SIZE)\n # TIFF is not servable, so we transcode to JPEG.\n self.expect_encode_image('SomeImageInJpeg')\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageInJpeg', 'image/jpeg'),\n self.app._transform_image('SomeBlobKey', ''))\n self.mox.VerifyAll()" ]
[ "0.6446886", "0.63090426", "0.6298052", "0.61945766", "0.60929877", "0.6076873", "0.60629976", "0.6004527", "0.5964197", "0.59304476", "0.59220564", "0.5894676", "0.58834445", "0.5864532", "0.58378077", "0.58318716", "0.5828034", "0.58193", "0.57947105", "0.5785676", "0.57840073", "0.5783209", "0.5761495", "0.57607", "0.57519597", "0.5749564", "0.57414055", "0.5733752", "0.57236433", "0.57236433", "0.57224673", "0.57046884", "0.5702856", "0.5697195", "0.5695649", "0.5670628", "0.56673175", "0.56544614", "0.56447184", "0.56367534", "0.56357986", "0.5630397", "0.56240225", "0.5618668", "0.5612683", "0.56082577", "0.559279", "0.5564833", "0.55635554", "0.5555942", "0.55490434", "0.55475837", "0.5536569", "0.5534449", "0.5530213", "0.5529918", "0.55222094", "0.5504336", "0.5495834", "0.54941505", "0.5483196", "0.5473025", "0.547278", "0.54508847", "0.5437339", "0.5415132", "0.5407541", "0.54064506", "0.5399145", "0.53923434", "0.5390737", "0.5388473", "0.53661716", "0.53475064", "0.5334799", "0.53329986", "0.53254205", "0.5319276", "0.52989876", "0.5289327", "0.528618", "0.5285182", "0.5282056", "0.52792865", "0.5279141", "0.5266317", "0.5263303", "0.52628195", "0.5262674", "0.5242416", "0.52390176", "0.52160585", "0.5214422", "0.5213513", "0.52096474", "0.52086127", "0.5188748", "0.51794225", "0.5176854", "0.5167363", "0.5155041" ]
0.0
-1
Renders the image. Override this method when creating a custom renderer.
def _render(self, image): raise NotImplementedError('Override this method to render images!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def render(self) -> None:\n if self.native_rendering:\n self._render()\n else:\n self.renderer.render_image(self.get_rendered_image())", "def _render(self):\n self.dirty = False\n self.image = self.font.render(self._text, self.aa, self.color_fg)\n self.rect = self.image.get_rect()", "def render(self):\n\n self.desert_image.render()\n self.cannon_image.render()\n self.play_button.render()\n self.escape_label.render()", "def display(self):\n display(self.image)", "def render_image(self,\n frame=None,\n factor=4,\n antialias=True,\n trim=False,\n transparent=False):\n if frame is not None:\n self.frame = frame\n params = dict(\n factor=factor,\n antialias=antialias,\n trim=trim,\n transparent=transparent)\n self._remote_call('_exportImage', target='Widget', kwargs=params)", "def render(self):\n if self.frame_pos:\n self.pos = [\n self.frame_pos[0] + self.position[0] - (self.size[0] / 2),\n self.frame_pos[1] + self.position[1] - (self.size[1] / 2),\n ]\n if self.variable_text:\n self.image = self.fontA.render(self.text, 1, self.color)", "def display(self, image):\n raise NotImplementedError()", "def __draw_image(self):\n if self.image_name is not None:\n img = mpimg.imread(self.image_name)\n extent = (0.5, self.xmax+0.5, -0.5, self.ymax-0.5)\n self.ax.imshow(img, extent=extent, origin='lower',\n alpha=self.image_alpha)", "def draw(self):\n if self.dirty or (self.image is None):\n self._render()\n self.screen.blit(self.image, self.rect)", "def draw(self):\r\n self.screen.blit(self.image, self.image.get_rect())", "def draw(self):\n self.write_image()\n self.update()", "def render(self):\n raise NotImplementedError(\"Renderer is an abstract class\")", "def generateImage(self):\n self.image = self.font.render(self.text, True, self.color)\n self.rect = self.image.get_rect()\n self.rect.center = self.xy", "def render(self):\n raise RenderNotImplemented('Render function is not implemented.')", "def draw(self):\n self.screen.blit(self.image, self.rect)", "def draw(self, surface):\r\n surface.blit(self.image, self.rect)", "def draw(self, surface):\n surface.blit(self.image, self.rect)", "def draw(self, surface):\n surface.blit(self.image, self.rect)", "def Draw(self):\n\t\tGameImage.Draw(self, self.coords)", "def render(self, output, image_size,\n face_colors=(\"#477984\", \"#EEAA4D\", \"#74C3F2\")):\n pass", "def render(self, screen):\n # print(\"Drawing scene {}\".format(self.imgname))\n screen.fill(self.color)", "def render(self):\n raise NotImplementedError", "def generate_image(self):\n pass", "def render(self, mode='human'):\n self.rendering_mode = mode\n\n if self.viewer is None:\n self.viewer = EnvViewer(self, offscreen=self.offscreen)\n\n self.enable_auto_render = not self.offscreen\n\n # If the frame has already been rendered, do nothing\n if self.should_update_rendering:\n self.viewer.display()\n\n if mode == 'rgb_array':\n image = self.viewer.get_image()\n if not self.viewer.offscreen:\n self.viewer.handle_events()\n self.viewer.handle_events()\n return image\n elif mode == 'human':\n if not self.viewer.offscreen:\n self.viewer.handle_events()\n self.should_update_rendering = False", "def process_image(self):\n pass", "def _render(self) -> None:\n pass", "def render(self):\n raise NotImplementedError()", "def render(self, **kwargs):\n return Draw.MolToImage(self._state, **kwargs)", "def render_image(self, rgbobj, dst_x, dst_y):\n self.logger.debug(\"redraw pixmap=%s\" % (self.pixmap))\n if self.pixmap is None:\n return\n self.logger.debug(\"drawing to pixmap\")\n\n # Prepare array for rendering\n arr = rgbobj.get_array(self.rgb_order, dtype=np.uint8)\n (height, width) = arr.shape[:2]\n\n return self._render_offscreen(self.pixmap, arr, dst_x, dst_y,\n width, height)", "def draw(self):\n self.screen.blit(self.msg_image, self.msg_image_rect)", "def draw_image(self):\n self.PDF.saveState()\n self.PDF.scale(1, -1)\n # self.PDF.drawImage(\n # LOGO, 490, -78, width=80, preserveAspectRatio=True, mask=\"auto\"\n # )\n self.PDF.restoreState()", "def draw(self, surface):\n\n\t\tsurface.blit(self.image, self.rect.topleft)", "def render(self):\r\n super().render()", "def display(self):\n image_qt = ImageQt.ImageQt(self.view_state.get_image())\n self.imageLabel.setPixmap(QtGui.QPixmap.fromImage(image_qt))\n self.imageLabel.adjustSize()", "def render(self):\n pass", "def render(self):\n pass", "def render(self):\n pass", "def render(self):\n pass", "def render(self):\n pass", "def render(self):\n pass", "def draw(self):\n return ImageDraw.Draw(self.buffer)", "def draw(self, camera):\n camera.draw_image(self._image, self._rectangle.center(),\n self._rectangle.rotation, self._rectangle.size()[1])", "def _render_callback(self, _sim, _viewer):\n pass", "def draw(self, surface):\r\n if self.visible:\r\n surface.blit(self.image, (self.x, self.y))", "def draw(self):\n raise NotImplementedError", "def draw(self):\n raise NotImplementedError", "def draw(self):\n raise NotImplementedError", "def update(self):\n self.image = self.__font.render(str(self.__score), 1, (255, 255, 255))", "def paint(self, event):\r\n width, height = self.imageView.Size\r\n dimension = min(width, height)\r\n\r\n if dimension < self.image.dimension:\r\n resizeQuality = wx.IMAGE_QUALITY_BICUBIC\r\n elif dimension < self.image.dimension * 2:\r\n resizeQuality = wx.IMAGE_QUALITY_BILINEAR\r\n else:\r\n resizeQuality = wx.IMAGE_QUALITY_NORMAL\r\n\r\n image = self.image.image().Scale(dimension, dimension, resizeQuality)\r\n\r\n self.imageView.Refresh()\r\n\r\n dc = wx.AutoBufferedPaintDC(self.imageView)\r\n dc.Clear()\r\n dc.DrawBitmap(wx.Bitmap(image),\r\n (width - dimension) // 2,\r\n (height - dimension) // 2)", "def _render_static_image_annotation(self):\n cv2.rectangle(self._image,\n (0,0), (640, 40),\n (0, 0, 0),\n -1)\n \n cv2.putText(self._image,self._current_mode, (40, 25),\n cv2.FONT_HERSHEY_SIMPLEX, 0.7, 255, 2)\n\n cv2.putText(self._image, time.asctime(), (400, 460),\n cv2.FONT_HERSHEY_SIMPLEX, 0.7, 255, 2)", "def render(self):\n if not self.renderer:\n raise NoRendererError('Field %s has no renderer assigned.' %\n self.id)\n return self.renderer.render(self)", "def render(self, renderer: RenderingManager):\n self.grader.render(renderer)", "def render(self, r):\n raise NotImplementedError", "def render(self, renderer, right=False):\n pass # pragma: no cover", "def render_image(self, arr, order, win_coord):\n self.logger.debug(\"redraw surface\")\n if self.surface is None:\n return\n\n dst_x, dst_y = win_coord[:2]\n\n daht, dawd, depth = arr.shape\n self.logger.debug(\"arr shape is %dx%dx%d\" % (dawd, daht, depth))\n\n cr = cairo.Context(self.surface)\n # TODO: is it really necessary to hang on to this context?\n self.cr = cr\n\n # fill surface with background color\n imgwin_wd, imgwin_ht = self.viewer.get_window_size()\n cr.rectangle(0, 0, imgwin_wd, imgwin_ht)\n r, g, b = self.viewer.get_bg()\n cr.set_source_rgba(r, g, b)\n cr.fill()\n\n stride = cairo.ImageSurface.format_stride_for_width(cairo.FORMAT_ARGB32,\n dawd)\n img_surface = cairo.ImageSurface.create_for_data(arr,\n cairo.FORMAT_ARGB32,\n dawd, daht, stride)\n\n cr.set_source_surface(img_surface, dst_x, dst_y)\n cr.set_operator(cairo.OPERATOR_SOURCE)\n\n cr.mask_surface(img_surface, dst_x, dst_y)\n cr.fill()", "def draw(self, screen):\n\n if self.exist:\n screen.blit(self._img, self._rect)", "def draw_img(self, i, j, k):\n if k < len(self.images):\n img = self.images[k]\n r = self.get_rect(i, j)\n self.screen.blit(img, r)", "def on_draw(self):\n\t\tself.render()", "def on_draw(self):\n\t\tself.render()", "def draw_image(self, image, position = (0, 0), anchor= 'topleft'):\n offset = self._calculate_offset(anchor, image._surf.get_size())\n self._surf.blit(image._surf, position + offset)", "def _image(self):\n print(\"imaging\")\n self.images.append(self.device_control.image())\n yield", "def render(self, axes=None):\n raise NotImplementedError()", "def on_draw_over_image(self):", "def render(self, scene: scenes.Scene):\n\n logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)\n\n time_0 = time.time()\n logging.info(f\"Starting to render scene with size \"\n f\"{self.hsize}, {self.vsize}\")\n image = canvas.Canvas(self.hsize, self.vsize)\n\n for y in range(self.vsize):\n print(y)\n for x in range(self.hsize):\n ray = self.ray_for_pixel(x, y)\n color, _ = scene.color_at(ray)\n image.set(x, y, color)\n\n time_1 = time.time()\n logging.info(f\"Rendered scene in {round(time_1-time_0, 2)}s \"\n f\"({round(self.hsize*self.vsize/(time_1-time_0), 2)} pps)\")\n\n return image", "def dspyRender(self):\n pass", "def on_image(self, image):", "def render(self):\n\n # If you comment this out, you cannot change the light state with the same pose.\n # if np.all(\n # self._rendered_pos == self.camera_coords.worldpos()) and np.all(\n # self._rendered_rot == self.camera_coords.worldrot()):\n # return self._rendered\n\n target = self.camera_coords.worldpos() + \\\n self.camera_coords.rotate_vector([0, 0, 1.])\n up = self.camera_coords.rotate_vector([0, -1.0, 0])\n # up = self.camera_coords.copy().rotate_vector([0, 1.0, 0])\n\n vm = pybullet.computeViewMatrix(\n self.camera_coords.worldpos(), target, up)\n\n i_arr = pybullet.getCameraImage(\n self.im_width, self.im_height, vm, self.pm,\n shadow=0,\n renderer=pybullet.ER_TINY_RENDERER,\n lightDirection=self.lightDirection,\n lightColor=self.lightColor,\n lightDistance=self.lightDistance,\n lightAmbientCoeff=self.lightAmbientCoeff,\n lightDiffuseCoeff=self.lightDiffuseCoeff,\n lightSpecularCoeff=self.lightSpecularCoeff\n )\n\n self._rendered = i_arr\n self._rendered_pos = self.camera_coords.worldpos().copy()\n self._rendered_rot = self.camera_coords.worldrot().copy()\n\n return i_arr", "def render(self):\n self.axial.Render()\n self.coronal.Render()\n self.sagittal.Render()\n #self.isosurface.Render()\n #self.rwi_pcp.Render()", "def render(self, window):\n body = pygame.image.load(IMAGE_SNAKE).convert_alpha() # loading image\n for block in self.body:\n window.blit(body, (block[0]*SPRITE_SIZE, block[1]*SPRITE_SIZE)) # painting a beautiful snek\n if self.neural_net: # calls for neural net rendering\n self.neural_net.render(window, self.vision)", "def paint(self):\r\n pass", "def display(self):\n nrow = 1\n ncol = len(self.views) + 1\n rows = [(self.views[0].image, len(self.views) + 1)]\n fig, axes = plt.subplots(nrows=nrow, ncols=ncol,\n figsize=self._figsize(rows),\n squeeze=True)\n for ax, (title, img) in zip(axes.ravel(),\n [(v.position.id, v.image) for v in self.views] + [\n ('combined', self.image)]):\n ax.imshow(img)\n ax.axis('off')\n ax.xaxis.set_visible(False)\n ax.yaxis.set_visible(False)\n ax.set(title=title)\n fig.tight_layout()\n fig.canvas.draw()\n img_array = np.array(fig.canvas.renderer._renderer)\n plt.close('all')\n return img_array", "def render(self, bbox, width=None, height=None):\n width = width or self.tilesize\n height = height or self.tilesize\n self._prepare_rendering(bbox, width=width, height=height)\n # Render image with default Agg renderer\n tmpfile = NamedTemporaryFile(delete=False)\n im = mapnik.Image(width, height)\n mapnik.render(self._mapnik, im)\n im.save(tmpfile.name, 'png256') # TODO: mapnik output only to file?\n tmpfile.close()\n content = open(tmpfile.name).read()\n os.unlink(tmpfile.name)\n return content", "def get_image(self):\n self.drawer.flush()\n return self.img", "def blit(self):\n self.screen.blit(self.image, self.rect)", "def showImage(self, image):\n \n self.image = img", "def draw(self):\n\t\tpass", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self):\n pass", "def render(self):\n self.screen.fill(prepare.BACKGROUND_COLOR)\n self.health_bar()\n # self.enemy_health()\n self.energy_bar()\n self.level.draw(self.screen)\n pg.display.update()", "def getimage(self):", "def render(self):\n self._surface.fill(Color('black'))\n for y in range(0, self.height):\n for x in range(0, self.length):\n if self.grid.get_cell(x, y) == CellType.snake:\n self._surface.blit(self.snake_cell_image,\n (x * self.cell_size, y * self.cell_size))\n elif self.grid.get_cell(x, y) == CellType.apple:\n self._surface.blit(self.apple_cell_image,\n (x * self.cell_size, y * self.cell_size))\n pg.display.update()", "def render(self):\n try:\n if self.permit():\n return self.renderer.render(self)\n except AttributeError:\n if self.renderer is None:\n raise NotImplementedError(\"Should have implemented a renderer for {0}\".format(self.name))\n else:\n raise\n return ''", "def render(self):\n dirty = self.sprites.draw(self.screen)\n\n if self.notification.is_active:\n self.notification.render(self.screen)\n\n # Display the text.\n pygame.display.update()\n\n # Only update sprites, not the whole screen.\n pygame.display.update(dirty)", "def draw_image(self, image, position=(0, 0), anchor='topleft'):\n offset = self._calculate_offset(anchor, image._surf.get_size())\n self._surf.blit(image._surf, position + offset)\n self._version += 1\n spyral.util.scale_surface.clear(self._surf)\n return self", "def RenderTexture(self, vtkVolume, vtkRenderer, p_int=..., p_int=..., *args, **kwargs):\n ...", "def update(self):\n rect = self.img_rect.center\n self.font_img = self.font.render(self.text, True, color1)\n self.img_rect = self.font_img.get_rect()\n # self.img_rect.center = [self.title_img.get_width() + self.font_img.get_width()//2 + padding, 5*Y//8]\n self.img_rect.center = rect", "def render(self):\n self.camera.SimulationStep(0.01)\n return self.camera.GetSensorData().imagedata;", "def update(self):\n self.image = self.__font.render( self.message, 1, self.colour)\n self.rect = self.image.get_rect()\n self.rect.center = self.position", "def show(self, exec_rasterize = False):\n\n if (exec_rasterize):\n self.rasterize()\n\n Image.fromarray(self._image).show()", "def process(self, image):", "def display(self):\n\t\tself.imgDisplay.set_from_pixbuf(self.getVisible())\n\t\tgc.collect()", "def draw(self, surface):\n surface.blit(self.image, (0,0))\n for widget in self.widgets:\n widget.draw(surface)", "def render(self, window):\r\n # Update images\r\n for i in self.images:\r\n i.undraw()\r\n\r\n if self.alive == False:\r\n self.images[3].draw(window)\r\n elif self.scared == True:\r\n self.images[2].draw(window)\r\n elif self.a == True:\r\n self.images[0].draw(window)\r\n elif self.a == False:\r\n self.images[1].draw(window)\r\n\r\n # Move images\r\n toX = (self.boundingBox.pos.getX() - (self.images[0].getAnchor().getX() - 20))\r\n toY = (self.boundingBox.pos.getY() - (self.images[0].getAnchor().getY() - 20))\r\n\r\n for i in self.images:\r\n i.move(toX, toY)", "def draw(self):\n self.game.screen.blit(self.image, self.game.off(self.pos))", "def make_image(self):\n\n if self.type == 'passthrough':\n return\n render_template(\n os.path.dirname(self.main_module),\n os.path.basename(self.main_module_path),\n self.language,\n self.requirements,\n self.whitelist,\n self.type,\n into=self.code_dir)\n self.build()", "def render(self, screen) -> None:\n screen.fill(self.background_color)\n self.draw_center_circle(screen)\n self.draw_rectangle_field(screen)\n self.draw_defense_areas(screen)\n self.draw_field_divider(screen)\n self.draw_goals(screen)", "def render(self, *args, **kwargs):\r\n raise NotImplementedError", "def draw(self, img, idx=None):\n if idx is None:\n for rhombus in self.rhombuses:\n img = self.__draw_rhombus(img, rhombus)\n else:\n img = self.__draw_rhombus(img, self.rhombuses[idx])\n\n return img" ]
[ "0.83629304", "0.7514363", "0.738157", "0.71521425", "0.7143713", "0.7124317", "0.7027804", "0.69824153", "0.697108", "0.69224477", "0.69172174", "0.69109416", "0.68451834", "0.676605", "0.6748223", "0.66155213", "0.660075", "0.660075", "0.65852135", "0.64857286", "0.64756054", "0.6467235", "0.64649606", "0.6449069", "0.639439", "0.6379378", "0.6378728", "0.6329882", "0.6296905", "0.62766397", "0.62709296", "0.6242086", "0.6234971", "0.62324166", "0.61855394", "0.61855394", "0.61855394", "0.61855394", "0.61855394", "0.61855394", "0.61756504", "0.6169834", "0.61585695", "0.61495537", "0.614531", "0.614531", "0.614531", "0.6142228", "0.61237085", "0.6118498", "0.6116703", "0.61098254", "0.6108429", "0.60943645", "0.609424", "0.6094048", "0.6079516", "0.60737807", "0.60737807", "0.6073463", "0.60673547", "0.60558164", "0.60552937", "0.60509056", "0.60506004", "0.6045226", "0.60397184", "0.6028453", "0.602592", "0.60228086", "0.60178113", "0.60155475", "0.6014473", "0.6014336", "0.60132897", "0.6009112", "0.60077125", "0.60077125", "0.60077125", "0.60077125", "0.5989808", "0.5988152", "0.59859914", "0.59856", "0.5967391", "0.59659934", "0.59649193", "0.5958046", "0.59523755", "0.5931555", "0.5930769", "0.59293556", "0.59015816", "0.5900659", "0.5895832", "0.58920044", "0.58865845", "0.5885952", "0.5879704", "0.5878351" ]
0.86215574
0
Version management for migrations.
def __eq__(self, other): return ( self.bleed == other.bleed and self.width == other.width and self.height == other.height )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def migration():", "def migrate(cr, version):\n pass", "def version(self):\r\n print migration.db_version()", "def post_migrations(self):", "def _run_migrations(self, current_migration_version: int):\n logger.debug(\"Checking for necessary database migrations...\")\n\n while current_migration_version < latest_migration_version:\n next_migration_version = current_migration_version + 1\n logger.info(\n f\"Migrating the database from v{current_migration_version} to v{next_migration_version}...\",\n )\n\n migration = importlib.import_module(f\".migrations.{str(next_migration_version).rjust(3, '0')}\", \"middleman\")\n # noinspection PyUnresolvedReferences\n migration.migrate(self)\n\n # Update the stored migration version\n self._execute(\"UPDATE migration_version SET version = ?\", (next_migration_version,))\n\n logger.info(f\"Database migrated to v{next_migration_version}\")\n current_migration_version += 1", "def makemigration(self):\n template = os.path.join(os.path.dirname(__file__),\n 'migration_template.py')\n ver = self.latest(quiet=True) + 1\n destination = os.path.abspath(self.config.get('migrate', 'location'))\n if not os.path.exists(destination):\n os.makedirs(destination)\n fname = 'version_{}.py'.format(ver)\n shutil.copyfile(template, os.path.join(destination, fname))\n self.logger.info('Migration \\'{}\\' created'.format(fname))\n self.latest()", "def migrate(self):\n\tpass", "def version(self):", "async def migrate(self):\n # Controlla se ci sono tabelle nel db\n async with self.db.acquire() as conn:\n async with conn.cursor() as cur:\n await cur.execute(\"\"\"SELECT COUNT(DISTINCT table_name) as c\n FROM information_schema.columns\n WHERE table_schema = %s\"\"\", (conn.db,))\n db_empty = (await cur.fetchone())[\"c\"] <= 0\n\n # Se ci sono tabelle, prova a leggere `db_version`\n if not db_empty:\n await cur.execute(\"SELECT db_version FROM db_version LIMIT 1\")\n db_version_in_db = await cur.fetchone()\n db_version = 0 if db_version_in_db is None else db_version_in_db[\"db_version\"]\n else:\n db_version = 0\n\n # Prendi la lista di file sql e py da eseguire\n new_migrations = [x for x in self.migrations if x.id > db_version]\n\n # Controlla se ci sono migration da eseguire\n if not new_migrations:\n self.logger.info(\"No new migrations. The database is already up to date!\")\n return\n\n # Esegui migrations\n self.logger.info(\"Current db version: @{}\".format(db_version))\n db_version += 1\n current_migration = self.get_migration(db_version)\n while current_migration is not None:\n self.logger.info(\"Executing {}\".format(current_migration.file_name))\n\n if current_migration.type == \"sql\":\n # Leggi ed esegui file sql\n with open(\n os.path.join(os.path.dirname(__file__), \"migrations/{}\".format(current_migration.file_name)), \"r\"\n ) as f:\n data = f.read()\n async with self.db.acquire() as conn:\n async with conn.cursor() as cur:\n await cur.execute(data)\n await conn.commit()\n elif current_migration.type == \"py\":\n # Importa modulo py\n module = importlib.import_module(\"migrator.migrations.{}\".format(current_migration.file_name[:-3]))\n migr = getattr(module, \"do\")\n await migr()\n\n # Migration eseguita, aggiorna `db_version`\n self.logger.info(\"Migration {} executed with no errors\".format(current_migration.file_name))\n await self.save_db_version(db_version)\n\n # Vai alla prossima migration\n db_version += 1\n current_migration = self.get_migration(db_version)\n self.logger.info(\"All migrations executed correctly\")", "def create_versions_after_migration(**kwargs):\n migrations = [migration\n for migration, rollback in kwargs.get('plan', [])\n if not rollback]\n models: Set[Any] = set()\n for migration in migrations:\n models.update(getattr(migration, 'REVISED_MODELS', []))\n\n with transaction.atomic():\n for model in reversion_models(models):\n create_revisions_for(model)", "def upgrade(self, version):\n try:\n version = int(version)\n except:\n if version != 'latest':\n self.logger.error('Unable to parse version \"{}\"'.format(version))\n return\n\n # check the current db version\n current_version = self.inspect()\n if current_version is None:\n self.logger.error('Unable to inspect your database. '\n 'Perhaps you need to run \\'jambi inpsect\\'?')\n return\n\n # get the migrations\n migrations = self.find_migrations()\n latest_version = migrations[-1][1] if any(migrations) else 0\n migrations = tuple(filter(lambda x: x[1] > current_version, migrations))\n\n if current_version > latest_version:\n self.logger.error('Your database version is higher than the '\n 'current database version. '\n '(current: {}, latest: {})'.format(current_version,\n latest_version))\n elif current_version == latest_version:\n self.logger.info('You are already up to date. '\n '(version: {})'.format(current_version))\n return\n\n # filter out migrations that are beyond the desired version\n if version == 'latest':\n version = latest_version\n migrations = tuple(filter(lambda x: x[1] <= version, migrations))\n if not any(migrations):\n self.logger.info('You are already up to date. '\n '(version: {})'.format(current_version))\n return\n\n # run the migrations\n self.logger.info('Now performing the migration to version {}...'.format(version))\n self.db.connect()\n with self.db.atomic():\n for n, v, m in migrations:\n self.logger.info('>>> [{}] Attempting...'.format(v))\n migrator = PostgresqlMigrator(self.db)\n upgrades = m.upgrade(migrator)\n migrate(*upgrades)\n self._set_version(v)\n self.logger.info('>>> [{}] Success!'.format(v))\n self.db.close()\n self.logger.info('Successfully migrated to version {}...'.format(version))\n return", "def _migrate(self):\n with self.engine.begin() as conn:\n context = alembic.migration.MigrationContext.configure(conn)\n current_rev = context.get_current_revision()\n self.log.debug('Current migration revision: %s' % current_rev)\n\n config = alembic.config.Config()\n config.set_main_option(\"script_location\",\n \"zuul:driver/sql/alembic\")\n config.set_main_option(\"sqlalchemy.url\",\n self.connection_config.get('dburi'))\n\n # Alembic lets us add arbitrary data in the tag argument. We can\n # leverage that to tell the upgrade scripts about the table prefix.\n tag = {'table_prefix': self.table_prefix}\n alembic.command.upgrade(config, 'head', tag=tag)", "def upgrade(self):", "def upgrade(self):", "async def manage_version():\n\n try:\n repo = git.Repo(search_parent_directories=True)\n version = repo.git.describe('--tags')\n except Exception:\n version = \"v0.0.0\"\n\n base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n creation_time = time.ctime(os.path.getmtime(base_dir))\n\n response = {'version': version, 'deployedOn': creation_time}\n return OK(response)", "def test_changeVersions(self):\n self._testVersionChanging(8, 2, 3)", "def do_version(self):\n return \"1.0.0\", True", "def _get_version(self):", "def _set_database_version(db, version):\n if not isinstance(version, int):\n raise TypeError(\"Version must be integer, not %s : %s\" % (\n version, type(version)))\n create_metadata = \\\n \"CREATE TABLE %s (version INT)\" % METADATA_COLUMN_NAME\n execute_sql(db, create_metadata)\n insert_version = \\\n \"INSERT INTO %s VALUES (%s)\" % (METADATA_COLUMN_NAME, version)\n execute_sql(db, insert_version)", "def update_db_version():\n print(\"Checking Database states...\")\n os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"ADSM.settings\")\n try:\n call_command('migrate', database='scenario_db', interactive=False, fake_initial=True)\n call_command('migrate', database='default', interactive=False, fake_initial=True)\n except:\n print(\"Error: Migration failed.\")\n print('Done migrating databases.')", "def upgrade_schema():\n\n db_version = get_db_version()\n try:\n while db_version < CURRENT_DATABASE_VERSION:\n db_version += 1\n upgrade_script = 'upgrade_to_'+str(db_version)\n globals()[upgrade_script]()\n except KeyError as e:\n logging.exception('Attempted to upgrade using script that does not exist: {}'.format(e))\n sys.exit(1)\n except Exception as e:\n logging.exception('Incremental upgrade of db failed')\n sys.exit(1)\n else:\n config.db.singletons.update_one({'_id': 'version'}, {'$set': {'database': CURRENT_DATABASE_VERSION}})\n sys.exit(0)", "def upgrade(self, old_version, new_version):\n pass", "def db_upgrade():\n generate_migration_file()\n dbu_query = anosql.from_path(MIGRATION_FILE, 'psycopg2')\n\n for time_step in [_.strip('.sql') for _ in migration_files()]:\n decide = MySQLScheme.fetch_one(REVISION_EXISTS,\n **{\"args\": {'revision': time_step}})\n if not decide:\n MySQLScheme.commit(getattr(dbu_query, f\"upgrade_{time_step}\").sql)\n LOGGER.info(f\"successful migration: {time_step}\")\n else:\n LOGGER.info(f'migration already exists: {time_step}')", "def version(self):\n pass", "def version(self):\n pass", "def version(self):\n pass", "def sync(self, version=None):\r\n return migration.db_sync(version)", "def run(self):\n\n version_str = (\n get_git_version(here))\n\n version_uniparser_dict = (\n get_uniparser_version())\n\n if (version_str is not None or\n version_uniparser_dict is not None):\n\n with open(\n os.path.join(here, 'lingvodoc', 'version.py'), 'w',\n encoding = 'utf-8') as version_py_file:\n\n version_py_file.write(\n self.version_py_template.format(\n repr(version_str),\n repr(version_uniparser_dict)))\n\n # Continuing with setup.\n\n super().run()", "def ready(self):\n import django_better_migrations.migration_writer_patch # noqa", "def version(self, newVersion=None):\n pass", "def migrate(ctx, start_version, end_version):\n if ctx.obj['TYPE'] == 'file':\n if ctx.obj['DATA_FILE']:\n file_path = ctx.obj['DATA_FILE']\n else:\n file_path = os.path.join(ctx.obj['DATA_DIR'], ctx.obj['NAMESPACE'] + '.json')\n\n # todo make this more like alemebic and determine/load versions automatically\n with open(file_path, 'r') as f:\n data = json.loads(f.read())\n\n data = run_migration(data, start_version, end_version)\n with open(file_path, 'w') as f:\n f.write(json.dumps(data))", "def Version(self) -> _n_0_t_12:", "def Version(self) -> _n_0_t_12:", "def test_version(self):\n pass", "def test_semantic_version():\n semantic_version.Version(settings.VERSION)", "def version():\n\n pass", "def _testVersionChanging(self, major, minor, micro, prerelease=None):\n versionUpdates = []\n def myVersionChanger(sourceTree, versionTemplate):\n versionUpdates.append((sourceTree, versionTemplate))\n versionChanger = ChangeVersionsScript()\n versionChanger.changeAllProjectVersions = myVersionChanger\n version = \"%d.%d.%d\" % (major, minor, micro)\n if prerelease is not None:\n version += \"pre%d\" % (prerelease,)\n versionChanger.main([version])\n self.assertEquals(len(versionUpdates), 1)\n self.assertEquals(versionUpdates[0][0], FilePath(\".\"))\n self.assertEquals(versionUpdates[0][1].major, major)\n self.assertEquals(versionUpdates[0][1].minor, minor)\n self.assertEquals(versionUpdates[0][1].micro, micro)\n self.assertEquals(versionUpdates[0][1].prerelease, prerelease)", "def test_version_control_specified(self):\n # Establish version control on this database\n version = 0\n dbcontrol = ControlledSchema.create(self.engine, self.repos, version)\n self.assertEqual(dbcontrol.version, version)\n\n # Correct when we load it, too\n dbcontrol = ControlledSchema(self.engine, self.repos)\n self.assertEqual(dbcontrol.version, version)\n\n dbcontrol.drop()\n\n # Now try it with a nonzero value\n version = 10\n for i in range(version):\n self.repos.create_script('')\n self.assertEqual(self.repos.latest, version)\n\n # Test with some mid-range value\n dbcontrol = ControlledSchema.create(self.engine,self.repos, 5)\n self.assertEqual(dbcontrol.version, 5)\n dbcontrol.drop()\n\n # Test with max value\n dbcontrol = ControlledSchema.create(self.engine, self.repos, version)\n self.assertEqual(dbcontrol.version, version)\n dbcontrol.drop()", "def _set_version(self, version):\n with self.db.atomic():\n JambiModel.delete().execute()\n JambiModel.create(ref=str(version))\n self.logger.debug('Set jambi version to {}'.format(version))", "def set_version_db(apps, schema_editor):\n Version = apps.get_model(\"reversion\", \"Version\")\n content_types = Version.objects.values_list(\"content_type\", flat=True).distinct()\n for content_type in content_types:\n model_class = content_type.model_class()\n db = router.db_for_write(model_class)\n Version.objects.filter(content_type=content_type).update(db=db)", "def get_versions(self):\n raise NotImplementedError", "def _update_version(self) -> None:\n # Implement in child class.\n raise NotImplementedError", "def version_updater(logging_level=logging.WARNING):\n # connect to db\n do_db_setup()\n\n # set Qt lib\n set_qt_lib()\n\n from anima.ui import version_updater, models\n from anima.env import mayaEnv\n reload(mayaEnv)\n reload(version_updater)\n reload(models)\n m = Maya()\n import pymel\n m.name = \"Maya\" + str(pymel.versions.current())[0:4]\n\n logger.setLevel(logging_level)\n\n # generate a reference_resolution\n version_updater.UI(environment=m)", "def ongeza(self, type_):\n switch = {\n 'm': semver.bump_major,\n 'n': semver.bump_minor,\n 'p': semver.bump_patch,\n 'major': semver.bump_major,\n 'minor': semver.bump_minor,\n 'patch': semver.bump_patch}\n\n new_version = switch.get(type_)(self.version)\n\n if new_version in set(self.versions):\n self.logger.error('version `%s` already present', new_version)\n new_version = None\n\n return new_version", "def upgrade_to_1():\n config.db.singletons.insert_one({'_id': 'version', 'database': 1})", "def _walk_versions(self, config, engine, downgrade=True, snake_walk=False):\n\n revisions = self._revisions()\n for dest, curr in revisions:\n self._migrate_up(config, engine, dest, curr, with_data=True)\n\n if snake_walk and dest != 'None':\n # NOTE(I159): Pass reversed arguments into `_migrate_down`\n # method because we have been upgraded to a destination\n # revision and now we going to downgrade back.\n self._migrate_down(config, curr, dest, with_data=True)\n self._migrate_up(config, dest, curr, with_data=True)\n\n if downgrade:\n revisions = self._revisions(downgrade)\n for dest, curr in revisions:\n self._migrate_down(config, engine, dest, curr, with_data=True)\n if snake_walk:\n self._migrate_up(config, engine, curr, dest,\n with_data=True)\n self._migrate_down(config, engine, dest, curr,\n with_data=True)", "def run_migration_checks():\n check_model_state()\n check_migration_state()", "def run_migration(env, upgrade_type):\n pass", "def get_version(self):\n pass", "def main(arguments):\n migration = Migration(arguments)\n return migration.run()", "def migrate(ctx):\n connecter = ScalingoInterface(ctx.obj)\n connecter.manage_py(\"migrate\")", "def setup_before_migration(self, apps):", "def _current_versions(self, job):\n raise NotImplementedError", "def migrate(cls)->None:\n pass", "async def load_db_migrations(self, conn: Optional[Pool] = None) -> None:\n\n def get_version_from_name(name: str) -> int:\n return int(name.split(\"_\", 1)[0])\n\n file_ = max(\n (f for f in settings.migrations_directory.iterdir() if f.suffix == \".sql\"),\n key=lambda f: get_version_from_name(f.name),\n )\n\n async with MaybeAcquire(conn, self.pool) as conn:\n query = \"SELECT current_setting('mg.version') as version\"\n try:\n schema_version = await conn.fetchval(query)\n except UndefinedObjectError:\n schema_version = 0\n else:\n schema_version = int(schema_version)\n\n logger.debug(\"Schema version: %d\", schema_version)\n\n file_version = get_version_from_name(file_.name)\n if file_version <= schema_version:\n logger.debug(\"Skipping migration: %s\", file_.name)\n return\n\n sql_commands = read_sql_file(file_)\n logger.info(\"Applying migration: %s\", file_.name)\n\n await conn.execute(sql_commands)\n await self._set_database_version(file_version)", "def versioned(command_subclass):\n VERSION_PY = \"\"\"\n# This file is updated from Git information by running 'python setup.py\n# version'.\n__version__ = '%s'\n\"\"\"\n orig_callable = command_subclass.run\n\n def modified_callable(self):\n if not os.path.isdir(\".git\"):\n print \"This does not appear to be a Git repository.\"\n return\n try:\n p = subprocess.Popen([\"git\", \"describe\",\n \"--tags\", \"--always\"],\n stdout=subprocess.PIPE)\n except EnvironmentError:\n print \"unable to run git, leaving idss-seration/_version.py alone\"\n return\n stdout = p.communicate()[0]\n if p.returncode != 0:\n print \"unable to run git, leaving idss-seriation/_version.py alone\"\n return\n # our tags are like: v2.2\n ver = stdout[len(\"v\"):].strip()\n f = open(\"sklearn_mmadsen/_version.py\", \"w\")\n f.write(VERSION_PY % ver)\n f.close()\n print \"updated _version.py to '%s'\" % ver\n orig_callable(self)\n\n command_subclass.run = modified_callable\n return command_subclass", "def test_get_version(self):\n pass", "def test_version_01(self):\n\n version = self.sqlbak([\"--version\"])\n self.assertTrue(\"sqlbak v\" in version)", "def stampdb(self, args):\n revision = REVISION_MAPPING[args.configversion]\n print(f\"Based on config version {args.configversion} \"\n f\"we think your results schema is version {revision} and are upgrading to it\")\n stamp_db(revision, args.dbfile)", "async def _upgrade_db(self) -> None:\n cur_version = await self._get_db_version()\n for n in range(cur_version + 1, sql_data.CUR_VERSION + 1):\n log.msg('Upgrading database to version %d' % n)\n if n in sql_data.SQL_UPGRADES:\n for command in sql_data.SQL_UPGRADES[n]:\n await self.operation(command)\n if cur_version != sql_data.CUR_VERSION:\n await self._set_db_version(sql_data.CUR_VERSION)", "def _version(self):\n # TODO: Can we delete this method and just print the line from the\n # reqs file verbatim instead?\n def version_of_archive(filename, package_name):\n # Since we know the project_name, we can strip that off the left, strip\n # any archive extensions off the right, and take the rest as the\n # version.\n for ext in ARCHIVE_EXTENSIONS:\n if filename.endswith(ext):\n filename = filename[:-len(ext)]\n break\n # Handle github sha tarball downloads.\n if is_git_sha(filename):\n filename = package_name + '-' + filename\n if not filename.lower().replace('_', '-').startswith(package_name.lower()):\n # TODO: Should we replace runs of [^a-zA-Z0-9.], not just _, with -?\n give_up(filename, package_name)\n return filename[len(package_name) + 1:] # Strip off '-' before version.\n\n def version_of_wheel(filename, package_name):\n # For Wheel files (http://legacy.python.org/dev/peps/pep-0427/#file-\n # name-convention) we know the format bits are '-' separated.\n whl_package_name, version, _rest = filename.split('-', 2)\n # Do the alteration to package_name from PEP 427:\n our_package_name = re.sub(r'[^\\w\\d.]+', '_', package_name, re.UNICODE)\n if whl_package_name != our_package_name:\n give_up(filename, whl_package_name)\n return version\n\n def give_up(filename, package_name):\n raise RuntimeError(\"The archive '%s' didn't start with the package name '%s', so I couldn't figure out the version number. My bad; improve me.\" %\n (filename, package_name))\n\n get_version = (version_of_wheel\n if self._downloaded_filename().endswith('.whl')\n else version_of_archive)\n return get_version(self._downloaded_filename(), self._project_name())", "def set_version(self, version):\n\n def update_version(version, filepath):\n with open(filepath, \"r\") as stream:\n contents = stream.read()\n\n new_contents = _fix_contents_version(contents, version)\n assert contents != new_contents\n with open(filepath, \"w\") as stream:\n stream.write(new_contents)\n\n update_version(version, os.path.join(\".\", \"package.json\"))\n update_version(version, os.path.join(\".\", \"src\", \"setup.py\"))\n update_version(\n version, os.path.join(\".\", \"src\", \"robocorp_code\", \"__init__.py\")\n )", "def switch_to_version(self, version):\n self.current_version = version\n self.save()", "def run (args = None):\n cmds.init.require_init()\n (options, args) = optargs (args)\n try:\n revision = int(args[0])\n except (IndexError, ValueError):\n revision = repo.migration.latest_number()\n\n current = repo.revision.current()\n\n if options.dry_run:\n repo.revision.set_current(revision)\n print \"\"\"Revision set to #%s\"\"\" % revision\n return\n\n\n if current == revision and not options.abandon_current:\n print \"\"\"Nothing to update.\"\"\"\n return\n\n print \"\"\"Updating to migration #%s.\"\"\" % revision\n\n outstanding_changes = repo.has_outstanding_changes()\n if outstanding_changes:\n apply_after_update = repo.outstanding_changes()\n print \"\"\"Undoing outstanding changes.\"\"\"\n db.dump.load (repo.outstanding_changes(undo = True))\n \n if revision < current:\n # Downgrading\n while current > revision:\n try:\n Migration = repo.migration.Migration (current)\n print \"\"\"Downgrading migration #%s: %s.\"\"\" % (current, Migration.message)\n Migration.down()\n current = current - 1\n repo.revision.set_current (current)\n except MigrationFailedError:\n break\n \n else:\n # Upgrading\n while current < revision:\n try:\n current = current + 1\n Migration = repo.migration.Migration (current)\n print \"\"\"Upgrading migration #%s: %s.\"\"\" % (current, Migration.message)\n Migration.up()\n repo.revision.save_to_file(current)\n repo.revision.set_current (current)\n except MigrationFailedError:\n break\n\n print \"\"\"Updated to revision #%s.\"\"\" % repo.revision.current()\n\n if outstanding_changes and not options.abandon_current:\n print \"\"\"Reapplying outstanding changes.\"\"\"\n db.dump.load (apply_after_update)", "def upgrade(self, revision: str = \"head\") -> \"Alembic\":\n\n self.configure()\n\n if not self.migrator_base.does_table_exists(\n \"alembic_version\"\n ) or self.is_revision_different(revision):\n PyFunceble.facility.Logger.info(\n \"Started update (%r) of the database schema(s).\", revision\n )\n\n alembic_command.upgrade(self.alembic_config, revision)\n\n PyFunceble.facility.Logger.info(\n \"Finished update (%r) of the database schema(s).\", revision\n )", "def test_defaultChangeVersionsVersionChanger(self):\n versionChanger = ChangeVersionsScript()\n self.assertEquals(versionChanger.changeAllProjectVersions,\n changeAllProjectVersions)", "def find_migrations(self):\n fileloc = self.config.get('migrate', 'location')\n fullpath = os.path.abspath(fileloc)\n if not os.path.exists(fullpath):\n os.makedirs(fullpath)\n try:\n filenames = os.listdir(fullpath)\n except FileNotFoundError:\n self.logger.error('Unable to find migration folder '\n '\"{}\"'.format(fullpath))\n return []\n\n def is_valid_migration_name(n):\n return n.startswith('version_') and n.endswith('.py')\n filenames = filter(lambda x: is_valid_migration_name(x), filenames)\n filepaths = [(os.path.join(fullpath, f), f.replace('.py', ''))\n for f in filenames]\n migrations = []\n for fp, mn in filepaths:\n module_name = '.'.join([fileloc.replace('/', '.').strip('.'), mn])\n try:\n ver = int(re.search(r'version_(\\d+)', mn).group(1))\n except:\n self.logger.warning('Cannot parse version number from \"{}\", '\n 'skipping'.format(mn))\n continue\n self.logger.debug('Found {} at version {}'.format(module_name,\n ver))\n migrations.append(\n (module_name, ver, importlib.import_module(module_name))\n )\n return sorted(migrations, key=lambda x: x[1])", "def set_version(self, bundle, ctx, filename, version):", "def migrate_up(self, version, with_data=False):\n # NOTE(xek): This is a list of migrations where we allow dropping\n # things. The rules for adding exceptions are very very specific.\n # Chances are you don't meet the critera.\n # Reviewers: DO NOT ALLOW THINGS TO BE ADDED HERE\n exceptions = [\n 64, # drop constraint\n 86, # drop watch_rule/watch_data tables\n ]\n # Reviewers: DO NOT ALLOW THINGS TO BE ADDED HERE\n\n # NOTE(xek): We start requiring things be additive in\n # liberty, so ignore all migrations before that point.\n LIBERTY_START = 63\n\n if version >= LIBERTY_START and version not in exceptions:\n banned = ['Table', 'Column']\n else:\n banned = None\n with BannedDBSchemaOperations(banned):\n super(HeatMigrationsCheckers, self).migrate_up(version, with_data)", "def GetMigrationStatus(self, instance):\n raise HypervisorError(\"Migration not supported by the chroot hypervisor\")", "def migrate(t):\n\tapp = Application(__name__)\n\taction = t.option.action\n\toptions = argparse.Namespace()\n\tif not action:\n\t\t# no argument, then migrate version to latest\n\t\toptions.version = None\n\t\tMigration('go').run(config=app.config, options=options)\n\telse:\n\t\ttry:\n\t\t\t# specified the version, then migrate to it\n\t\t\tversion = int(action)\n\t\t\toptions.version = version\n\t\t\tMigration('go').run(config=app.config, options=options)\n\t\texcept ValueError:\n\t\t\t# specified some command to run, push the 'action' back first.\n\t\t\tt.argv.insert(0, action)\n\t\t\toptions = parser_options(t)\n\t\t\tMigration(options.migrate).run(options=options, config=app.config)", "def _load_migrations():\n\n upgrade_script_rex = re.compile(\n r'^upgrade_(0|[1-9][0-9]*)_to_([1-9][0-9]*)\\.py$')\n migrations = {}\n\n # Currently, we only load migrations for a '__core__' schema, and only from\n # the migrations directory. One idea if we need to eventually support\n # migrations for the per-testsuite tables is to add subdirectories keyed on\n # the testsuite.\n for schema_name in ('__core__',):\n schema_migrations_path = os.path.join(os.path.dirname(__file__),\n 'migrations')\n schema_migrations = {}\n for item in os.listdir(schema_migrations_path):\n # Ignore certain known non-scripts.\n if item in ('README.txt', '__init__.py', 'new_suite.py',\n 'util.py') or item.endswith('.pyc'):\n continue\n\n # Ignore non-matching files.\n m = upgrade_script_rex.match(item)\n if m is None:\n logger.warning(\n \"ignoring item %r in schema migration directory: %r\",\n item, schema_migrations_path)\n continue\n\n # Check the version numbers for validity.\n version, next_version = map(int, m.groups())\n if next_version != version + 1:\n logger.error(\n \"invalid script name %r in schema migration directory: %r\",\n item, schema_migrations_path)\n continue\n\n schema_migrations[version] = os.path.join(\n schema_migrations_path, item)\n\n # Ignore directories with no migrations.\n if not schema_migrations:\n logger.warning(\"ignoring empty migrations directory: %r\",\n schema_migrations_path)\n continue\n\n # Check the provided versions for sanity.\n current_version = max(schema_migrations) + 1\n for i in range(current_version):\n if i not in schema_migrations:\n logger.error(\"schema %r is missing migration for version: %r\",\n schema_name, i)\n\n # Store the current version as another item in the per-schema migration\n # dictionary.\n schema_migrations['current_version'] = current_version\n\n # Store the schema migrations.\n migrations[schema_name] = schema_migrations\n\n return migrations", "def upgradedb(ctx):\n path = Path(__file__).resolve().parent.parent\n conf = Config(str(path / \"migrations\" / \"alembic.ini\"))\n conf.set_main_option(\"script_location\", str(path / \"migrations\"))\n command.upgrade(conf, \"heads\")", "def test_upper_version(self):\n filename = str(uuid.uuid4())\n _silentremove(filename)\n dburi = \"sqlite:///%s\" % filename\n db = connection.connect(dburi, create=True, verbose=False)\n dbversion = DBVersion()\n dbversion.version = CURRENT_DB_VERSION + 1\n dbversion.version_number = CURRENT_DB_VERSION + 1\n dbversion.version_timestamp = datetime.datetime.now().strftime(\"%s\")\n db.add(dbversion)\n db.commit()\n\n self.assertRaises(DBAdminError, db_current_version, db)\n self.assertRaises(DBAdminError, db_verify, db)\n\n _remove(filename)", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def increment_version_on_insert(obj):\n history_model = obj.previous_version()\n\n if history_model is not None:\n obj.version = history_model.version + 1", "def test_valid_versions(self):\n instance = ClassWithVersion()\n versions = [\"1.2.3\", \"1.2.*\", \"1.*\", \"*\", \"1.1.1\", \"1.0.1rc1\"]\n for version in versions:\n instance.version = version\n self.assertEqual(instance.version(), version)", "def get_version_number():\n return [0, 1, 0]", "def AddDatabaseVersion(\n parser, restrict_choices=True, hidden=False, support_default_version=True\n):\n # Section for engine-specific content.\n # This section is auto-generated by //cloud/storage_fe/sql/sync_engines.\n # Do not make manual edits.\n choices = [\n 'MYSQL_5_6',\n 'MYSQL_5_7',\n 'MYSQL_8_0',\n 'POSTGRES_9_6',\n 'POSTGRES_10',\n 'POSTGRES_11',\n 'POSTGRES_12',\n 'POSTGRES_13',\n 'POSTGRES_14',\n 'POSTGRES_15',\n 'SQLSERVER_2017_EXPRESS',\n 'SQLSERVER_2017_WEB',\n 'SQLSERVER_2017_STANDARD',\n 'SQLSERVER_2017_ENTERPRISE',\n 'SQLSERVER_2019_EXPRESS',\n 'SQLSERVER_2019_WEB',\n 'SQLSERVER_2019_STANDARD',\n 'SQLSERVER_2019_ENTERPRISE',\n 'SQLSERVER_2022_EXPRESS',\n 'SQLSERVER_2022_WEB',\n 'SQLSERVER_2022_STANDARD',\n 'SQLSERVER_2022_ENTERPRISE',\n ]\n # End of engine-specific content.\n\n help_text_unspecified_part = (\n DEFAULT_INSTANCE_DATABASE_VERSION + ' is used.'\n if support_default_version\n else 'no changes occur.'\n )\n help_text = (\n 'The database engine type and versions. If left unspecified, '\n + help_text_unspecified_part\n + ' See the list of database versions at '\n + 'https://cloud.google.com/sql/docs/mysql/admin-api/rest/v1beta4/SqlDatabaseVersion.'\n )\n\n if restrict_choices:\n help_text += (\n ' Apart from listed major versions, DATABASE_VERSION also accepts'\n ' supported minor versions.'\n )\n\n parser.add_argument(\n '--database-version',\n required=False,\n default=DEFAULT_INSTANCE_DATABASE_VERSION\n if support_default_version\n else None,\n choices=_MajorVersionMatchList(choices) if restrict_choices else None,\n help=help_text,\n hidden=hidden,\n )", "def do_create_version(**kwargs):\n version_params = {\n \"name\": kwargs['dag_run'].conf.get('model_version'),\n \"description\": 'Version 1',\n \"runtimeVersion\": kwargs['dag_run'].conf.get('tf_version'),\n \"deploymentUri\": 'gs://{}/{}'.format(COMPOSER_BUCKET_NAME, PREFIX_FINAL_MODEL)\n }\n\n ti = kwargs['ti']\n\n mle = MLEngineHook()\n\n model_name = kwargs['dag_run'].conf.get('model_name')\n model_versions = ti.xcom_pull(key='model_versions', task_ids='list_versions')\n\n version_path = 'projects/{}/models/{}/versions/{}'.format(PROJECT,\n model_name,\n version_params['name'])\n\n if version_path in [v['name'] for v in model_versions]:\n logging.info(\"Delete previously version of the model to overwrite.\")\n mle.delete_version(PROJECT, model_name, version_params['name'])\n\n mle.create_version(PROJECT, model_name, version_params)", "def db_migrate():\n when = str(int(time.time()))\n sql_file = os.path.join(MIGRATION_FOLDER, f\"{when}.sql\")\n\n with open(sql_file, 'w') as save_sql:\n up = MYSQL_UP.format(f\"upgrade-{when}\", when, MIGRATION_TABLE)\n down = MYSQL_DOWN.format(f\"downgrade-{when}\", when, MIGRATION_TABLE)\n\n save_sql.write(\"\\n\\n\".join([up, down]))\n LOGGER.info(f\"migration file: {os.path.join('migrations', sql_file)}\")", "def strategy(self) -> AwesomeVersionStrategy:\n return version_strategy(self.string)", "def db_setup(self):\n revision: Table = Table(self.revision_table,\n self.sql_metadata,\n Column(self._MigrationTableColumns.revisions.value, Text, primary_key=True),\n schema=self.revision_table_schema)\n revision.create(self.psql_engine)", "def migratedb(rollback=False):\n\n require(\"virtualenv_path\", \"project_path\", \"sudo_user\")\n\n #\n # Some things need to be done first (i.e. if they need a different\n # database connection or some custom args)\n #\n if \"migratedb_first\" in env:\n\n for app, args in env.migratedb_first.iteritems():\n\n version = get_south_migrate_version(app, rollback)\n\n migrate_app_db(app, version, args)\n\n #\n # Do the rest afterwards\n #\n if has_version_info():\n\n apps = env.south_migrations.keys()\n\n for app in apps:\n\n print app\n\n version = get_south_migrate_version(app, rollback)\n\n migrate_app_db(app, version)\n\n #\n # If we know nothing, just migrate everything\n #\n else:\n migrate_app_db()", "def __version__(self):\n return self.instance.__version__", "def version(self):\n raise NotImplementedError('version')", "def version_number() -> int:\n return 0", "def create(self, migration_name):\n # original version vith timestamp version format\n # timestamp = strftime(\"%Y%m%d%H%M%S\", localtime())\n next_ver = self.get_next_version()\n file_name = \"%s_%s%s\" % (next_ver, migration_name, Migration.MIGRATION_FILES_EXTENSION)\n if not Migration.is_file_name_valid(file_name):\n raise Exception(\n \"invalid migration name ('%s'); it should contain only letters, numbers and/or underscores\"\n % file_name)\n\n new_file_name = os.path.join(self.__migrations_dir, file_name)\n\n try:\n f = codecs.open(new_file_name, \"w\", \"utf-8\")\n f.write(Migration.TEMPLATE)\n f.close()\n except IOError:\n raise Exception(\"could not create file ('%s')\" % new_file_name)\n\n migration = Migration(new_file_name)\n self.__migrations.append(migration)\n return migration", "def do_upgrade(env, ver, cursor):\n cursor.execute('UPDATE system SET name=%s WHERE name=%s',\n (\"agiletools_version\", \"taskboard_schema\"))", "def migrate(cls)->None:\n database.cursor.execute(\"\"\"CREATE TABLE IF NOT EXISTS votes(\n id serial PRIMARY KEY,\n question integer,\n user_id integer,\n value integer\n )\"\"\")\n database.connection.commit()", "def genVersion(*args, **kwargs):\n return generateVersionFileData(Version(*args, **kwargs))", "def __update_version(self):\r\n\r\n db_version = self.__get_db_version_int()\r\n if db_version == SCHEMA_VERSION:\r\n return\r\n\r\n #\r\n # Define functions for upgrading between schema versions\r\n #\r\n def update_2xto30():\r\n \"\"\"Incremental update of database from Freeseer 2.x and older to 3.0\r\n\r\n SCHEMA_VERSION is 300\r\n \"\"\"\r\n if db_version > 300:\r\n log.debug('Database newer than schema version 300.')\r\n return # No update needed\r\n\r\n log.debug('Updating to schema 300.')\r\n QtSql.QSqlQuery('ALTER TABLE presentations RENAME TO presentations_old') # temporary table\r\n self.__create_presentations_table(PRESENTATIONS_SCHEMA_300)\r\n QtSql.QSqlQuery(\"\"\"INSERT INTO presentations\r\n SELECT Id, Title, Speaker, Description, Level, Event, Room, Time FROM presentations_old\"\"\")\r\n QtSql.QSqlQuery('DROP TABLE presentations_old')\r\n\r\n def update_30to31():\r\n \"\"\"Performs incremental update of database from 3.0 and older to 3.1.\"\"\"\r\n QtSql.QSqlQuery('ALTER TABLE presentations RENAME TO presentations_old')\r\n self.__create_presentations_table(PRESENTATIONS_SCHEMA_310)\r\n QtSql.QSqlQuery(\"\"\"INSERT INTO presentations\r\n SELECT Id, Title, Speaker, Description, Level, Event, Room, Time, Time, Time\r\n FROM presentations_old\"\"\")\r\n QtSql.QSqlQuery('DROP TABLE presentations_old')\r\n\r\n #\r\n # Perform the upgrade\r\n #\r\n updaters = [update_2xto30, update_30to31]\r\n for updater in updaters:\r\n updater()\r\n\r\n QtSql.QSqlQuery('PRAGMA user_version = %i' % SCHEMA_VERSION)\r\n log.info('Upgraded presentations database from version {} to {}'.format(db_version, SCHEMA_VERSION))", "def updateVersions(self):\r\n f = open('../versions.pckl', 'wb')\r\n pickle.dump(self.versions, f)\r\n f.close()", "def update_migrations_run(self, migration: str):\n pass", "def upgrade(revision, sql):\n alembic_command.upgrade(alembic_config, revision, sql=sql)", "def checkVersions():\n item = Item(fromScene=True)\n\n for ns, componentMData in item.components.iteritems():\n if ns == 'cam':\n # todo tratar versoes da camera\n continue\n\n if componentMData['assembleMode'] == 'reference':\n refComponent = ReferenceComponent(ns, componentMData, parent=item)\n refComponent.checkDBForNewVersion()\n\n elif componentMData['assembleMode'] == 'xlo':\n xloComponent = XloComponent(ns, componentMData, parent=item)\n xloComponent.checkDBForNewVersion()\n xloComponent.checkDBForNewCacheVersion()\n\n elif componentMData['assembleMode'] == 'cache':\n cacheComponent = CacheComponent(ns, componentMData, parent=item)\n cacheComponent.checkDBForNewVersion()\n\n item.putDataToDB()", "def versions():\n result = timeline.versions()\n if result:\n click.echo('\\n'.join(result))", "def version(self):\n self.version_list[-1] = self.revision\n version = '.'.join(self.version_list)\n return version" ]
[ "0.7271674", "0.6912402", "0.6781953", "0.66197056", "0.6540998", "0.64763844", "0.63564444", "0.6351916", "0.6346999", "0.627028", "0.6214616", "0.61989695", "0.61721605", "0.61721605", "0.6119614", "0.6112119", "0.60870755", "0.6083832", "0.6082907", "0.6082181", "0.59738785", "0.5948841", "0.59176534", "0.5889575", "0.5889575", "0.5889575", "0.5860145", "0.58464795", "0.5839337", "0.58341116", "0.5826495", "0.58219796", "0.58219796", "0.5812942", "0.5764615", "0.57588995", "0.5756661", "0.5748352", "0.57469314", "0.5744396", "0.5728743", "0.5728247", "0.57269204", "0.57248694", "0.5701582", "0.5701496", "0.56878936", "0.56848305", "0.5682562", "0.5678865", "0.5675738", "0.56652933", "0.56620955", "0.56585944", "0.5658057", "0.56419104", "0.5641308", "0.56294596", "0.5621609", "0.5620805", "0.5618515", "0.5612417", "0.5607297", "0.5605022", "0.55979496", "0.5591854", "0.5591306", "0.5585797", "0.558445", "0.55815715", "0.5572332", "0.5570798", "0.55696195", "0.55685604", "0.55653995", "0.55653995", "0.55653995", "0.55653995", "0.5563189", "0.55580366", "0.5544274", "0.55277026", "0.5527281", "0.55239904", "0.55147594", "0.5513377", "0.5500819", "0.5487873", "0.5481971", "0.5478464", "0.54727185", "0.54708093", "0.5470133", "0.54601073", "0.5459747", "0.54519325", "0.545027", "0.5446997", "0.54469264", "0.5421024", "0.54132605" ]
0.0
-1
Version management for migrations.
def __eq__(self, other): return ( self.constrain == other.constrain and self.width == other.width and self.height == other.height and self.upscale == other.upscale )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def migration():", "def migrate(cr, version):\n pass", "def version(self):\r\n print migration.db_version()", "def post_migrations(self):", "def _run_migrations(self, current_migration_version: int):\n logger.debug(\"Checking for necessary database migrations...\")\n\n while current_migration_version < latest_migration_version:\n next_migration_version = current_migration_version + 1\n logger.info(\n f\"Migrating the database from v{current_migration_version} to v{next_migration_version}...\",\n )\n\n migration = importlib.import_module(f\".migrations.{str(next_migration_version).rjust(3, '0')}\", \"middleman\")\n # noinspection PyUnresolvedReferences\n migration.migrate(self)\n\n # Update the stored migration version\n self._execute(\"UPDATE migration_version SET version = ?\", (next_migration_version,))\n\n logger.info(f\"Database migrated to v{next_migration_version}\")\n current_migration_version += 1", "def makemigration(self):\n template = os.path.join(os.path.dirname(__file__),\n 'migration_template.py')\n ver = self.latest(quiet=True) + 1\n destination = os.path.abspath(self.config.get('migrate', 'location'))\n if not os.path.exists(destination):\n os.makedirs(destination)\n fname = 'version_{}.py'.format(ver)\n shutil.copyfile(template, os.path.join(destination, fname))\n self.logger.info('Migration \\'{}\\' created'.format(fname))\n self.latest()", "def migrate(self):\n\tpass", "def version(self):", "async def migrate(self):\n # Controlla se ci sono tabelle nel db\n async with self.db.acquire() as conn:\n async with conn.cursor() as cur:\n await cur.execute(\"\"\"SELECT COUNT(DISTINCT table_name) as c\n FROM information_schema.columns\n WHERE table_schema = %s\"\"\", (conn.db,))\n db_empty = (await cur.fetchone())[\"c\"] <= 0\n\n # Se ci sono tabelle, prova a leggere `db_version`\n if not db_empty:\n await cur.execute(\"SELECT db_version FROM db_version LIMIT 1\")\n db_version_in_db = await cur.fetchone()\n db_version = 0 if db_version_in_db is None else db_version_in_db[\"db_version\"]\n else:\n db_version = 0\n\n # Prendi la lista di file sql e py da eseguire\n new_migrations = [x for x in self.migrations if x.id > db_version]\n\n # Controlla se ci sono migration da eseguire\n if not new_migrations:\n self.logger.info(\"No new migrations. The database is already up to date!\")\n return\n\n # Esegui migrations\n self.logger.info(\"Current db version: @{}\".format(db_version))\n db_version += 1\n current_migration = self.get_migration(db_version)\n while current_migration is not None:\n self.logger.info(\"Executing {}\".format(current_migration.file_name))\n\n if current_migration.type == \"sql\":\n # Leggi ed esegui file sql\n with open(\n os.path.join(os.path.dirname(__file__), \"migrations/{}\".format(current_migration.file_name)), \"r\"\n ) as f:\n data = f.read()\n async with self.db.acquire() as conn:\n async with conn.cursor() as cur:\n await cur.execute(data)\n await conn.commit()\n elif current_migration.type == \"py\":\n # Importa modulo py\n module = importlib.import_module(\"migrator.migrations.{}\".format(current_migration.file_name[:-3]))\n migr = getattr(module, \"do\")\n await migr()\n\n # Migration eseguita, aggiorna `db_version`\n self.logger.info(\"Migration {} executed with no errors\".format(current_migration.file_name))\n await self.save_db_version(db_version)\n\n # Vai alla prossima migration\n db_version += 1\n current_migration = self.get_migration(db_version)\n self.logger.info(\"All migrations executed correctly\")", "def create_versions_after_migration(**kwargs):\n migrations = [migration\n for migration, rollback in kwargs.get('plan', [])\n if not rollback]\n models: Set[Any] = set()\n for migration in migrations:\n models.update(getattr(migration, 'REVISED_MODELS', []))\n\n with transaction.atomic():\n for model in reversion_models(models):\n create_revisions_for(model)", "def upgrade(self, version):\n try:\n version = int(version)\n except:\n if version != 'latest':\n self.logger.error('Unable to parse version \"{}\"'.format(version))\n return\n\n # check the current db version\n current_version = self.inspect()\n if current_version is None:\n self.logger.error('Unable to inspect your database. '\n 'Perhaps you need to run \\'jambi inpsect\\'?')\n return\n\n # get the migrations\n migrations = self.find_migrations()\n latest_version = migrations[-1][1] if any(migrations) else 0\n migrations = tuple(filter(lambda x: x[1] > current_version, migrations))\n\n if current_version > latest_version:\n self.logger.error('Your database version is higher than the '\n 'current database version. '\n '(current: {}, latest: {})'.format(current_version,\n latest_version))\n elif current_version == latest_version:\n self.logger.info('You are already up to date. '\n '(version: {})'.format(current_version))\n return\n\n # filter out migrations that are beyond the desired version\n if version == 'latest':\n version = latest_version\n migrations = tuple(filter(lambda x: x[1] <= version, migrations))\n if not any(migrations):\n self.logger.info('You are already up to date. '\n '(version: {})'.format(current_version))\n return\n\n # run the migrations\n self.logger.info('Now performing the migration to version {}...'.format(version))\n self.db.connect()\n with self.db.atomic():\n for n, v, m in migrations:\n self.logger.info('>>> [{}] Attempting...'.format(v))\n migrator = PostgresqlMigrator(self.db)\n upgrades = m.upgrade(migrator)\n migrate(*upgrades)\n self._set_version(v)\n self.logger.info('>>> [{}] Success!'.format(v))\n self.db.close()\n self.logger.info('Successfully migrated to version {}...'.format(version))\n return", "def _migrate(self):\n with self.engine.begin() as conn:\n context = alembic.migration.MigrationContext.configure(conn)\n current_rev = context.get_current_revision()\n self.log.debug('Current migration revision: %s' % current_rev)\n\n config = alembic.config.Config()\n config.set_main_option(\"script_location\",\n \"zuul:driver/sql/alembic\")\n config.set_main_option(\"sqlalchemy.url\",\n self.connection_config.get('dburi'))\n\n # Alembic lets us add arbitrary data in the tag argument. We can\n # leverage that to tell the upgrade scripts about the table prefix.\n tag = {'table_prefix': self.table_prefix}\n alembic.command.upgrade(config, 'head', tag=tag)", "def upgrade(self):", "def upgrade(self):", "async def manage_version():\n\n try:\n repo = git.Repo(search_parent_directories=True)\n version = repo.git.describe('--tags')\n except Exception:\n version = \"v0.0.0\"\n\n base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n creation_time = time.ctime(os.path.getmtime(base_dir))\n\n response = {'version': version, 'deployedOn': creation_time}\n return OK(response)", "def test_changeVersions(self):\n self._testVersionChanging(8, 2, 3)", "def do_version(self):\n return \"1.0.0\", True", "def _get_version(self):", "def _set_database_version(db, version):\n if not isinstance(version, int):\n raise TypeError(\"Version must be integer, not %s : %s\" % (\n version, type(version)))\n create_metadata = \\\n \"CREATE TABLE %s (version INT)\" % METADATA_COLUMN_NAME\n execute_sql(db, create_metadata)\n insert_version = \\\n \"INSERT INTO %s VALUES (%s)\" % (METADATA_COLUMN_NAME, version)\n execute_sql(db, insert_version)", "def update_db_version():\n print(\"Checking Database states...\")\n os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"ADSM.settings\")\n try:\n call_command('migrate', database='scenario_db', interactive=False, fake_initial=True)\n call_command('migrate', database='default', interactive=False, fake_initial=True)\n except:\n print(\"Error: Migration failed.\")\n print('Done migrating databases.')", "def upgrade_schema():\n\n db_version = get_db_version()\n try:\n while db_version < CURRENT_DATABASE_VERSION:\n db_version += 1\n upgrade_script = 'upgrade_to_'+str(db_version)\n globals()[upgrade_script]()\n except KeyError as e:\n logging.exception('Attempted to upgrade using script that does not exist: {}'.format(e))\n sys.exit(1)\n except Exception as e:\n logging.exception('Incremental upgrade of db failed')\n sys.exit(1)\n else:\n config.db.singletons.update_one({'_id': 'version'}, {'$set': {'database': CURRENT_DATABASE_VERSION}})\n sys.exit(0)", "def upgrade(self, old_version, new_version):\n pass", "def db_upgrade():\n generate_migration_file()\n dbu_query = anosql.from_path(MIGRATION_FILE, 'psycopg2')\n\n for time_step in [_.strip('.sql') for _ in migration_files()]:\n decide = MySQLScheme.fetch_one(REVISION_EXISTS,\n **{\"args\": {'revision': time_step}})\n if not decide:\n MySQLScheme.commit(getattr(dbu_query, f\"upgrade_{time_step}\").sql)\n LOGGER.info(f\"successful migration: {time_step}\")\n else:\n LOGGER.info(f'migration already exists: {time_step}')", "def version(self):\n pass", "def version(self):\n pass", "def version(self):\n pass", "def sync(self, version=None):\r\n return migration.db_sync(version)", "def run(self):\n\n version_str = (\n get_git_version(here))\n\n version_uniparser_dict = (\n get_uniparser_version())\n\n if (version_str is not None or\n version_uniparser_dict is not None):\n\n with open(\n os.path.join(here, 'lingvodoc', 'version.py'), 'w',\n encoding = 'utf-8') as version_py_file:\n\n version_py_file.write(\n self.version_py_template.format(\n repr(version_str),\n repr(version_uniparser_dict)))\n\n # Continuing with setup.\n\n super().run()", "def ready(self):\n import django_better_migrations.migration_writer_patch # noqa", "def version(self, newVersion=None):\n pass", "def migrate(ctx, start_version, end_version):\n if ctx.obj['TYPE'] == 'file':\n if ctx.obj['DATA_FILE']:\n file_path = ctx.obj['DATA_FILE']\n else:\n file_path = os.path.join(ctx.obj['DATA_DIR'], ctx.obj['NAMESPACE'] + '.json')\n\n # todo make this more like alemebic and determine/load versions automatically\n with open(file_path, 'r') as f:\n data = json.loads(f.read())\n\n data = run_migration(data, start_version, end_version)\n with open(file_path, 'w') as f:\n f.write(json.dumps(data))", "def Version(self) -> _n_0_t_12:", "def Version(self) -> _n_0_t_12:", "def test_version(self):\n pass", "def test_semantic_version():\n semantic_version.Version(settings.VERSION)", "def version():\n\n pass", "def _testVersionChanging(self, major, minor, micro, prerelease=None):\n versionUpdates = []\n def myVersionChanger(sourceTree, versionTemplate):\n versionUpdates.append((sourceTree, versionTemplate))\n versionChanger = ChangeVersionsScript()\n versionChanger.changeAllProjectVersions = myVersionChanger\n version = \"%d.%d.%d\" % (major, minor, micro)\n if prerelease is not None:\n version += \"pre%d\" % (prerelease,)\n versionChanger.main([version])\n self.assertEquals(len(versionUpdates), 1)\n self.assertEquals(versionUpdates[0][0], FilePath(\".\"))\n self.assertEquals(versionUpdates[0][1].major, major)\n self.assertEquals(versionUpdates[0][1].minor, minor)\n self.assertEquals(versionUpdates[0][1].micro, micro)\n self.assertEquals(versionUpdates[0][1].prerelease, prerelease)", "def test_version_control_specified(self):\n # Establish version control on this database\n version = 0\n dbcontrol = ControlledSchema.create(self.engine, self.repos, version)\n self.assertEqual(dbcontrol.version, version)\n\n # Correct when we load it, too\n dbcontrol = ControlledSchema(self.engine, self.repos)\n self.assertEqual(dbcontrol.version, version)\n\n dbcontrol.drop()\n\n # Now try it with a nonzero value\n version = 10\n for i in range(version):\n self.repos.create_script('')\n self.assertEqual(self.repos.latest, version)\n\n # Test with some mid-range value\n dbcontrol = ControlledSchema.create(self.engine,self.repos, 5)\n self.assertEqual(dbcontrol.version, 5)\n dbcontrol.drop()\n\n # Test with max value\n dbcontrol = ControlledSchema.create(self.engine, self.repos, version)\n self.assertEqual(dbcontrol.version, version)\n dbcontrol.drop()", "def _set_version(self, version):\n with self.db.atomic():\n JambiModel.delete().execute()\n JambiModel.create(ref=str(version))\n self.logger.debug('Set jambi version to {}'.format(version))", "def set_version_db(apps, schema_editor):\n Version = apps.get_model(\"reversion\", \"Version\")\n content_types = Version.objects.values_list(\"content_type\", flat=True).distinct()\n for content_type in content_types:\n model_class = content_type.model_class()\n db = router.db_for_write(model_class)\n Version.objects.filter(content_type=content_type).update(db=db)", "def get_versions(self):\n raise NotImplementedError", "def _update_version(self) -> None:\n # Implement in child class.\n raise NotImplementedError", "def version_updater(logging_level=logging.WARNING):\n # connect to db\n do_db_setup()\n\n # set Qt lib\n set_qt_lib()\n\n from anima.ui import version_updater, models\n from anima.env import mayaEnv\n reload(mayaEnv)\n reload(version_updater)\n reload(models)\n m = Maya()\n import pymel\n m.name = \"Maya\" + str(pymel.versions.current())[0:4]\n\n logger.setLevel(logging_level)\n\n # generate a reference_resolution\n version_updater.UI(environment=m)", "def ongeza(self, type_):\n switch = {\n 'm': semver.bump_major,\n 'n': semver.bump_minor,\n 'p': semver.bump_patch,\n 'major': semver.bump_major,\n 'minor': semver.bump_minor,\n 'patch': semver.bump_patch}\n\n new_version = switch.get(type_)(self.version)\n\n if new_version in set(self.versions):\n self.logger.error('version `%s` already present', new_version)\n new_version = None\n\n return new_version", "def upgrade_to_1():\n config.db.singletons.insert_one({'_id': 'version', 'database': 1})", "def _walk_versions(self, config, engine, downgrade=True, snake_walk=False):\n\n revisions = self._revisions()\n for dest, curr in revisions:\n self._migrate_up(config, engine, dest, curr, with_data=True)\n\n if snake_walk and dest != 'None':\n # NOTE(I159): Pass reversed arguments into `_migrate_down`\n # method because we have been upgraded to a destination\n # revision and now we going to downgrade back.\n self._migrate_down(config, curr, dest, with_data=True)\n self._migrate_up(config, dest, curr, with_data=True)\n\n if downgrade:\n revisions = self._revisions(downgrade)\n for dest, curr in revisions:\n self._migrate_down(config, engine, dest, curr, with_data=True)\n if snake_walk:\n self._migrate_up(config, engine, curr, dest,\n with_data=True)\n self._migrate_down(config, engine, dest, curr,\n with_data=True)", "def run_migration_checks():\n check_model_state()\n check_migration_state()", "def run_migration(env, upgrade_type):\n pass", "def get_version(self):\n pass", "def main(arguments):\n migration = Migration(arguments)\n return migration.run()", "def migrate(ctx):\n connecter = ScalingoInterface(ctx.obj)\n connecter.manage_py(\"migrate\")", "def setup_before_migration(self, apps):", "def _current_versions(self, job):\n raise NotImplementedError", "def migrate(cls)->None:\n pass", "async def load_db_migrations(self, conn: Optional[Pool] = None) -> None:\n\n def get_version_from_name(name: str) -> int:\n return int(name.split(\"_\", 1)[0])\n\n file_ = max(\n (f for f in settings.migrations_directory.iterdir() if f.suffix == \".sql\"),\n key=lambda f: get_version_from_name(f.name),\n )\n\n async with MaybeAcquire(conn, self.pool) as conn:\n query = \"SELECT current_setting('mg.version') as version\"\n try:\n schema_version = await conn.fetchval(query)\n except UndefinedObjectError:\n schema_version = 0\n else:\n schema_version = int(schema_version)\n\n logger.debug(\"Schema version: %d\", schema_version)\n\n file_version = get_version_from_name(file_.name)\n if file_version <= schema_version:\n logger.debug(\"Skipping migration: %s\", file_.name)\n return\n\n sql_commands = read_sql_file(file_)\n logger.info(\"Applying migration: %s\", file_.name)\n\n await conn.execute(sql_commands)\n await self._set_database_version(file_version)", "def versioned(command_subclass):\n VERSION_PY = \"\"\"\n# This file is updated from Git information by running 'python setup.py\n# version'.\n__version__ = '%s'\n\"\"\"\n orig_callable = command_subclass.run\n\n def modified_callable(self):\n if not os.path.isdir(\".git\"):\n print \"This does not appear to be a Git repository.\"\n return\n try:\n p = subprocess.Popen([\"git\", \"describe\",\n \"--tags\", \"--always\"],\n stdout=subprocess.PIPE)\n except EnvironmentError:\n print \"unable to run git, leaving idss-seration/_version.py alone\"\n return\n stdout = p.communicate()[0]\n if p.returncode != 0:\n print \"unable to run git, leaving idss-seriation/_version.py alone\"\n return\n # our tags are like: v2.2\n ver = stdout[len(\"v\"):].strip()\n f = open(\"sklearn_mmadsen/_version.py\", \"w\")\n f.write(VERSION_PY % ver)\n f.close()\n print \"updated _version.py to '%s'\" % ver\n orig_callable(self)\n\n command_subclass.run = modified_callable\n return command_subclass", "def test_get_version(self):\n pass", "def test_version_01(self):\n\n version = self.sqlbak([\"--version\"])\n self.assertTrue(\"sqlbak v\" in version)", "def stampdb(self, args):\n revision = REVISION_MAPPING[args.configversion]\n print(f\"Based on config version {args.configversion} \"\n f\"we think your results schema is version {revision} and are upgrading to it\")\n stamp_db(revision, args.dbfile)", "async def _upgrade_db(self) -> None:\n cur_version = await self._get_db_version()\n for n in range(cur_version + 1, sql_data.CUR_VERSION + 1):\n log.msg('Upgrading database to version %d' % n)\n if n in sql_data.SQL_UPGRADES:\n for command in sql_data.SQL_UPGRADES[n]:\n await self.operation(command)\n if cur_version != sql_data.CUR_VERSION:\n await self._set_db_version(sql_data.CUR_VERSION)", "def _version(self):\n # TODO: Can we delete this method and just print the line from the\n # reqs file verbatim instead?\n def version_of_archive(filename, package_name):\n # Since we know the project_name, we can strip that off the left, strip\n # any archive extensions off the right, and take the rest as the\n # version.\n for ext in ARCHIVE_EXTENSIONS:\n if filename.endswith(ext):\n filename = filename[:-len(ext)]\n break\n # Handle github sha tarball downloads.\n if is_git_sha(filename):\n filename = package_name + '-' + filename\n if not filename.lower().replace('_', '-').startswith(package_name.lower()):\n # TODO: Should we replace runs of [^a-zA-Z0-9.], not just _, with -?\n give_up(filename, package_name)\n return filename[len(package_name) + 1:] # Strip off '-' before version.\n\n def version_of_wheel(filename, package_name):\n # For Wheel files (http://legacy.python.org/dev/peps/pep-0427/#file-\n # name-convention) we know the format bits are '-' separated.\n whl_package_name, version, _rest = filename.split('-', 2)\n # Do the alteration to package_name from PEP 427:\n our_package_name = re.sub(r'[^\\w\\d.]+', '_', package_name, re.UNICODE)\n if whl_package_name != our_package_name:\n give_up(filename, whl_package_name)\n return version\n\n def give_up(filename, package_name):\n raise RuntimeError(\"The archive '%s' didn't start with the package name '%s', so I couldn't figure out the version number. My bad; improve me.\" %\n (filename, package_name))\n\n get_version = (version_of_wheel\n if self._downloaded_filename().endswith('.whl')\n else version_of_archive)\n return get_version(self._downloaded_filename(), self._project_name())", "def set_version(self, version):\n\n def update_version(version, filepath):\n with open(filepath, \"r\") as stream:\n contents = stream.read()\n\n new_contents = _fix_contents_version(contents, version)\n assert contents != new_contents\n with open(filepath, \"w\") as stream:\n stream.write(new_contents)\n\n update_version(version, os.path.join(\".\", \"package.json\"))\n update_version(version, os.path.join(\".\", \"src\", \"setup.py\"))\n update_version(\n version, os.path.join(\".\", \"src\", \"robocorp_code\", \"__init__.py\")\n )", "def switch_to_version(self, version):\n self.current_version = version\n self.save()", "def run (args = None):\n cmds.init.require_init()\n (options, args) = optargs (args)\n try:\n revision = int(args[0])\n except (IndexError, ValueError):\n revision = repo.migration.latest_number()\n\n current = repo.revision.current()\n\n if options.dry_run:\n repo.revision.set_current(revision)\n print \"\"\"Revision set to #%s\"\"\" % revision\n return\n\n\n if current == revision and not options.abandon_current:\n print \"\"\"Nothing to update.\"\"\"\n return\n\n print \"\"\"Updating to migration #%s.\"\"\" % revision\n\n outstanding_changes = repo.has_outstanding_changes()\n if outstanding_changes:\n apply_after_update = repo.outstanding_changes()\n print \"\"\"Undoing outstanding changes.\"\"\"\n db.dump.load (repo.outstanding_changes(undo = True))\n \n if revision < current:\n # Downgrading\n while current > revision:\n try:\n Migration = repo.migration.Migration (current)\n print \"\"\"Downgrading migration #%s: %s.\"\"\" % (current, Migration.message)\n Migration.down()\n current = current - 1\n repo.revision.set_current (current)\n except MigrationFailedError:\n break\n \n else:\n # Upgrading\n while current < revision:\n try:\n current = current + 1\n Migration = repo.migration.Migration (current)\n print \"\"\"Upgrading migration #%s: %s.\"\"\" % (current, Migration.message)\n Migration.up()\n repo.revision.save_to_file(current)\n repo.revision.set_current (current)\n except MigrationFailedError:\n break\n\n print \"\"\"Updated to revision #%s.\"\"\" % repo.revision.current()\n\n if outstanding_changes and not options.abandon_current:\n print \"\"\"Reapplying outstanding changes.\"\"\"\n db.dump.load (apply_after_update)", "def upgrade(self, revision: str = \"head\") -> \"Alembic\":\n\n self.configure()\n\n if not self.migrator_base.does_table_exists(\n \"alembic_version\"\n ) or self.is_revision_different(revision):\n PyFunceble.facility.Logger.info(\n \"Started update (%r) of the database schema(s).\", revision\n )\n\n alembic_command.upgrade(self.alembic_config, revision)\n\n PyFunceble.facility.Logger.info(\n \"Finished update (%r) of the database schema(s).\", revision\n )", "def test_defaultChangeVersionsVersionChanger(self):\n versionChanger = ChangeVersionsScript()\n self.assertEquals(versionChanger.changeAllProjectVersions,\n changeAllProjectVersions)", "def find_migrations(self):\n fileloc = self.config.get('migrate', 'location')\n fullpath = os.path.abspath(fileloc)\n if not os.path.exists(fullpath):\n os.makedirs(fullpath)\n try:\n filenames = os.listdir(fullpath)\n except FileNotFoundError:\n self.logger.error('Unable to find migration folder '\n '\"{}\"'.format(fullpath))\n return []\n\n def is_valid_migration_name(n):\n return n.startswith('version_') and n.endswith('.py')\n filenames = filter(lambda x: is_valid_migration_name(x), filenames)\n filepaths = [(os.path.join(fullpath, f), f.replace('.py', ''))\n for f in filenames]\n migrations = []\n for fp, mn in filepaths:\n module_name = '.'.join([fileloc.replace('/', '.').strip('.'), mn])\n try:\n ver = int(re.search(r'version_(\\d+)', mn).group(1))\n except:\n self.logger.warning('Cannot parse version number from \"{}\", '\n 'skipping'.format(mn))\n continue\n self.logger.debug('Found {} at version {}'.format(module_name,\n ver))\n migrations.append(\n (module_name, ver, importlib.import_module(module_name))\n )\n return sorted(migrations, key=lambda x: x[1])", "def set_version(self, bundle, ctx, filename, version):", "def migrate_up(self, version, with_data=False):\n # NOTE(xek): This is a list of migrations where we allow dropping\n # things. The rules for adding exceptions are very very specific.\n # Chances are you don't meet the critera.\n # Reviewers: DO NOT ALLOW THINGS TO BE ADDED HERE\n exceptions = [\n 64, # drop constraint\n 86, # drop watch_rule/watch_data tables\n ]\n # Reviewers: DO NOT ALLOW THINGS TO BE ADDED HERE\n\n # NOTE(xek): We start requiring things be additive in\n # liberty, so ignore all migrations before that point.\n LIBERTY_START = 63\n\n if version >= LIBERTY_START and version not in exceptions:\n banned = ['Table', 'Column']\n else:\n banned = None\n with BannedDBSchemaOperations(banned):\n super(HeatMigrationsCheckers, self).migrate_up(version, with_data)", "def GetMigrationStatus(self, instance):\n raise HypervisorError(\"Migration not supported by the chroot hypervisor\")", "def migrate(t):\n\tapp = Application(__name__)\n\taction = t.option.action\n\toptions = argparse.Namespace()\n\tif not action:\n\t\t# no argument, then migrate version to latest\n\t\toptions.version = None\n\t\tMigration('go').run(config=app.config, options=options)\n\telse:\n\t\ttry:\n\t\t\t# specified the version, then migrate to it\n\t\t\tversion = int(action)\n\t\t\toptions.version = version\n\t\t\tMigration('go').run(config=app.config, options=options)\n\t\texcept ValueError:\n\t\t\t# specified some command to run, push the 'action' back first.\n\t\t\tt.argv.insert(0, action)\n\t\t\toptions = parser_options(t)\n\t\t\tMigration(options.migrate).run(options=options, config=app.config)", "def _load_migrations():\n\n upgrade_script_rex = re.compile(\n r'^upgrade_(0|[1-9][0-9]*)_to_([1-9][0-9]*)\\.py$')\n migrations = {}\n\n # Currently, we only load migrations for a '__core__' schema, and only from\n # the migrations directory. One idea if we need to eventually support\n # migrations for the per-testsuite tables is to add subdirectories keyed on\n # the testsuite.\n for schema_name in ('__core__',):\n schema_migrations_path = os.path.join(os.path.dirname(__file__),\n 'migrations')\n schema_migrations = {}\n for item in os.listdir(schema_migrations_path):\n # Ignore certain known non-scripts.\n if item in ('README.txt', '__init__.py', 'new_suite.py',\n 'util.py') or item.endswith('.pyc'):\n continue\n\n # Ignore non-matching files.\n m = upgrade_script_rex.match(item)\n if m is None:\n logger.warning(\n \"ignoring item %r in schema migration directory: %r\",\n item, schema_migrations_path)\n continue\n\n # Check the version numbers for validity.\n version, next_version = map(int, m.groups())\n if next_version != version + 1:\n logger.error(\n \"invalid script name %r in schema migration directory: %r\",\n item, schema_migrations_path)\n continue\n\n schema_migrations[version] = os.path.join(\n schema_migrations_path, item)\n\n # Ignore directories with no migrations.\n if not schema_migrations:\n logger.warning(\"ignoring empty migrations directory: %r\",\n schema_migrations_path)\n continue\n\n # Check the provided versions for sanity.\n current_version = max(schema_migrations) + 1\n for i in range(current_version):\n if i not in schema_migrations:\n logger.error(\"schema %r is missing migration for version: %r\",\n schema_name, i)\n\n # Store the current version as another item in the per-schema migration\n # dictionary.\n schema_migrations['current_version'] = current_version\n\n # Store the schema migrations.\n migrations[schema_name] = schema_migrations\n\n return migrations", "def upgradedb(ctx):\n path = Path(__file__).resolve().parent.parent\n conf = Config(str(path / \"migrations\" / \"alembic.ini\"))\n conf.set_main_option(\"script_location\", str(path / \"migrations\"))\n command.upgrade(conf, \"heads\")", "def test_upper_version(self):\n filename = str(uuid.uuid4())\n _silentremove(filename)\n dburi = \"sqlite:///%s\" % filename\n db = connection.connect(dburi, create=True, verbose=False)\n dbversion = DBVersion()\n dbversion.version = CURRENT_DB_VERSION + 1\n dbversion.version_number = CURRENT_DB_VERSION + 1\n dbversion.version_timestamp = datetime.datetime.now().strftime(\"%s\")\n db.add(dbversion)\n db.commit()\n\n self.assertRaises(DBAdminError, db_current_version, db)\n self.assertRaises(DBAdminError, db_verify, db)\n\n _remove(filename)", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def increment_version_on_insert(obj):\n history_model = obj.previous_version()\n\n if history_model is not None:\n obj.version = history_model.version + 1", "def test_valid_versions(self):\n instance = ClassWithVersion()\n versions = [\"1.2.3\", \"1.2.*\", \"1.*\", \"*\", \"1.1.1\", \"1.0.1rc1\"]\n for version in versions:\n instance.version = version\n self.assertEqual(instance.version(), version)", "def get_version_number():\n return [0, 1, 0]", "def AddDatabaseVersion(\n parser, restrict_choices=True, hidden=False, support_default_version=True\n):\n # Section for engine-specific content.\n # This section is auto-generated by //cloud/storage_fe/sql/sync_engines.\n # Do not make manual edits.\n choices = [\n 'MYSQL_5_6',\n 'MYSQL_5_7',\n 'MYSQL_8_0',\n 'POSTGRES_9_6',\n 'POSTGRES_10',\n 'POSTGRES_11',\n 'POSTGRES_12',\n 'POSTGRES_13',\n 'POSTGRES_14',\n 'POSTGRES_15',\n 'SQLSERVER_2017_EXPRESS',\n 'SQLSERVER_2017_WEB',\n 'SQLSERVER_2017_STANDARD',\n 'SQLSERVER_2017_ENTERPRISE',\n 'SQLSERVER_2019_EXPRESS',\n 'SQLSERVER_2019_WEB',\n 'SQLSERVER_2019_STANDARD',\n 'SQLSERVER_2019_ENTERPRISE',\n 'SQLSERVER_2022_EXPRESS',\n 'SQLSERVER_2022_WEB',\n 'SQLSERVER_2022_STANDARD',\n 'SQLSERVER_2022_ENTERPRISE',\n ]\n # End of engine-specific content.\n\n help_text_unspecified_part = (\n DEFAULT_INSTANCE_DATABASE_VERSION + ' is used.'\n if support_default_version\n else 'no changes occur.'\n )\n help_text = (\n 'The database engine type and versions. If left unspecified, '\n + help_text_unspecified_part\n + ' See the list of database versions at '\n + 'https://cloud.google.com/sql/docs/mysql/admin-api/rest/v1beta4/SqlDatabaseVersion.'\n )\n\n if restrict_choices:\n help_text += (\n ' Apart from listed major versions, DATABASE_VERSION also accepts'\n ' supported minor versions.'\n )\n\n parser.add_argument(\n '--database-version',\n required=False,\n default=DEFAULT_INSTANCE_DATABASE_VERSION\n if support_default_version\n else None,\n choices=_MajorVersionMatchList(choices) if restrict_choices else None,\n help=help_text,\n hidden=hidden,\n )", "def do_create_version(**kwargs):\n version_params = {\n \"name\": kwargs['dag_run'].conf.get('model_version'),\n \"description\": 'Version 1',\n \"runtimeVersion\": kwargs['dag_run'].conf.get('tf_version'),\n \"deploymentUri\": 'gs://{}/{}'.format(COMPOSER_BUCKET_NAME, PREFIX_FINAL_MODEL)\n }\n\n ti = kwargs['ti']\n\n mle = MLEngineHook()\n\n model_name = kwargs['dag_run'].conf.get('model_name')\n model_versions = ti.xcom_pull(key='model_versions', task_ids='list_versions')\n\n version_path = 'projects/{}/models/{}/versions/{}'.format(PROJECT,\n model_name,\n version_params['name'])\n\n if version_path in [v['name'] for v in model_versions]:\n logging.info(\"Delete previously version of the model to overwrite.\")\n mle.delete_version(PROJECT, model_name, version_params['name'])\n\n mle.create_version(PROJECT, model_name, version_params)", "def db_migrate():\n when = str(int(time.time()))\n sql_file = os.path.join(MIGRATION_FOLDER, f\"{when}.sql\")\n\n with open(sql_file, 'w') as save_sql:\n up = MYSQL_UP.format(f\"upgrade-{when}\", when, MIGRATION_TABLE)\n down = MYSQL_DOWN.format(f\"downgrade-{when}\", when, MIGRATION_TABLE)\n\n save_sql.write(\"\\n\\n\".join([up, down]))\n LOGGER.info(f\"migration file: {os.path.join('migrations', sql_file)}\")", "def strategy(self) -> AwesomeVersionStrategy:\n return version_strategy(self.string)", "def db_setup(self):\n revision: Table = Table(self.revision_table,\n self.sql_metadata,\n Column(self._MigrationTableColumns.revisions.value, Text, primary_key=True),\n schema=self.revision_table_schema)\n revision.create(self.psql_engine)", "def migratedb(rollback=False):\n\n require(\"virtualenv_path\", \"project_path\", \"sudo_user\")\n\n #\n # Some things need to be done first (i.e. if they need a different\n # database connection or some custom args)\n #\n if \"migratedb_first\" in env:\n\n for app, args in env.migratedb_first.iteritems():\n\n version = get_south_migrate_version(app, rollback)\n\n migrate_app_db(app, version, args)\n\n #\n # Do the rest afterwards\n #\n if has_version_info():\n\n apps = env.south_migrations.keys()\n\n for app in apps:\n\n print app\n\n version = get_south_migrate_version(app, rollback)\n\n migrate_app_db(app, version)\n\n #\n # If we know nothing, just migrate everything\n #\n else:\n migrate_app_db()", "def __version__(self):\n return self.instance.__version__", "def version(self):\n raise NotImplementedError('version')", "def version_number() -> int:\n return 0", "def create(self, migration_name):\n # original version vith timestamp version format\n # timestamp = strftime(\"%Y%m%d%H%M%S\", localtime())\n next_ver = self.get_next_version()\n file_name = \"%s_%s%s\" % (next_ver, migration_name, Migration.MIGRATION_FILES_EXTENSION)\n if not Migration.is_file_name_valid(file_name):\n raise Exception(\n \"invalid migration name ('%s'); it should contain only letters, numbers and/or underscores\"\n % file_name)\n\n new_file_name = os.path.join(self.__migrations_dir, file_name)\n\n try:\n f = codecs.open(new_file_name, \"w\", \"utf-8\")\n f.write(Migration.TEMPLATE)\n f.close()\n except IOError:\n raise Exception(\"could not create file ('%s')\" % new_file_name)\n\n migration = Migration(new_file_name)\n self.__migrations.append(migration)\n return migration", "def do_upgrade(env, ver, cursor):\n cursor.execute('UPDATE system SET name=%s WHERE name=%s',\n (\"agiletools_version\", \"taskboard_schema\"))", "def migrate(cls)->None:\n database.cursor.execute(\"\"\"CREATE TABLE IF NOT EXISTS votes(\n id serial PRIMARY KEY,\n question integer,\n user_id integer,\n value integer\n )\"\"\")\n database.connection.commit()", "def genVersion(*args, **kwargs):\n return generateVersionFileData(Version(*args, **kwargs))", "def __update_version(self):\r\n\r\n db_version = self.__get_db_version_int()\r\n if db_version == SCHEMA_VERSION:\r\n return\r\n\r\n #\r\n # Define functions for upgrading between schema versions\r\n #\r\n def update_2xto30():\r\n \"\"\"Incremental update of database from Freeseer 2.x and older to 3.0\r\n\r\n SCHEMA_VERSION is 300\r\n \"\"\"\r\n if db_version > 300:\r\n log.debug('Database newer than schema version 300.')\r\n return # No update needed\r\n\r\n log.debug('Updating to schema 300.')\r\n QtSql.QSqlQuery('ALTER TABLE presentations RENAME TO presentations_old') # temporary table\r\n self.__create_presentations_table(PRESENTATIONS_SCHEMA_300)\r\n QtSql.QSqlQuery(\"\"\"INSERT INTO presentations\r\n SELECT Id, Title, Speaker, Description, Level, Event, Room, Time FROM presentations_old\"\"\")\r\n QtSql.QSqlQuery('DROP TABLE presentations_old')\r\n\r\n def update_30to31():\r\n \"\"\"Performs incremental update of database from 3.0 and older to 3.1.\"\"\"\r\n QtSql.QSqlQuery('ALTER TABLE presentations RENAME TO presentations_old')\r\n self.__create_presentations_table(PRESENTATIONS_SCHEMA_310)\r\n QtSql.QSqlQuery(\"\"\"INSERT INTO presentations\r\n SELECT Id, Title, Speaker, Description, Level, Event, Room, Time, Time, Time\r\n FROM presentations_old\"\"\")\r\n QtSql.QSqlQuery('DROP TABLE presentations_old')\r\n\r\n #\r\n # Perform the upgrade\r\n #\r\n updaters = [update_2xto30, update_30to31]\r\n for updater in updaters:\r\n updater()\r\n\r\n QtSql.QSqlQuery('PRAGMA user_version = %i' % SCHEMA_VERSION)\r\n log.info('Upgraded presentations database from version {} to {}'.format(db_version, SCHEMA_VERSION))", "def updateVersions(self):\r\n f = open('../versions.pckl', 'wb')\r\n pickle.dump(self.versions, f)\r\n f.close()", "def update_migrations_run(self, migration: str):\n pass", "def upgrade(revision, sql):\n alembic_command.upgrade(alembic_config, revision, sql=sql)", "def checkVersions():\n item = Item(fromScene=True)\n\n for ns, componentMData in item.components.iteritems():\n if ns == 'cam':\n # todo tratar versoes da camera\n continue\n\n if componentMData['assembleMode'] == 'reference':\n refComponent = ReferenceComponent(ns, componentMData, parent=item)\n refComponent.checkDBForNewVersion()\n\n elif componentMData['assembleMode'] == 'xlo':\n xloComponent = XloComponent(ns, componentMData, parent=item)\n xloComponent.checkDBForNewVersion()\n xloComponent.checkDBForNewCacheVersion()\n\n elif componentMData['assembleMode'] == 'cache':\n cacheComponent = CacheComponent(ns, componentMData, parent=item)\n cacheComponent.checkDBForNewVersion()\n\n item.putDataToDB()", "def versions():\n result = timeline.versions()\n if result:\n click.echo('\\n'.join(result))", "def version(self):\n self.version_list[-1] = self.revision\n version = '.'.join(self.version_list)\n return version" ]
[ "0.7271674", "0.6912402", "0.6781953", "0.66197056", "0.6540998", "0.64763844", "0.63564444", "0.6351916", "0.6346999", "0.627028", "0.6214616", "0.61989695", "0.61721605", "0.61721605", "0.6119614", "0.6112119", "0.60870755", "0.6083832", "0.6082907", "0.6082181", "0.59738785", "0.5948841", "0.59176534", "0.5889575", "0.5889575", "0.5889575", "0.5860145", "0.58464795", "0.5839337", "0.58341116", "0.5826495", "0.58219796", "0.58219796", "0.5812942", "0.5764615", "0.57588995", "0.5756661", "0.5748352", "0.57469314", "0.5744396", "0.5728743", "0.5728247", "0.57269204", "0.57248694", "0.5701582", "0.5701496", "0.56878936", "0.56848305", "0.5682562", "0.5678865", "0.5675738", "0.56652933", "0.56620955", "0.56585944", "0.5658057", "0.56419104", "0.5641308", "0.56294596", "0.5621609", "0.5620805", "0.5618515", "0.5612417", "0.5607297", "0.5605022", "0.55979496", "0.5591854", "0.5591306", "0.5585797", "0.558445", "0.55815715", "0.5572332", "0.5570798", "0.55696195", "0.55685604", "0.55653995", "0.55653995", "0.55653995", "0.55653995", "0.5563189", "0.55580366", "0.5544274", "0.55277026", "0.5527281", "0.55239904", "0.55147594", "0.5513377", "0.5500819", "0.5487873", "0.5481971", "0.5478464", "0.54727185", "0.54708093", "0.5470133", "0.54601073", "0.5459747", "0.54519325", "0.545027", "0.5446997", "0.54469264", "0.5421024", "0.54132605" ]
0.0
-1
Version management for migrations.
def __eq__(self, other): return ( self.bg_color == other.bg_color and self.width == other.width and self.height == other.height )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def migration():", "def migrate(cr, version):\n pass", "def version(self):\r\n print migration.db_version()", "def post_migrations(self):", "def _run_migrations(self, current_migration_version: int):\n logger.debug(\"Checking for necessary database migrations...\")\n\n while current_migration_version < latest_migration_version:\n next_migration_version = current_migration_version + 1\n logger.info(\n f\"Migrating the database from v{current_migration_version} to v{next_migration_version}...\",\n )\n\n migration = importlib.import_module(f\".migrations.{str(next_migration_version).rjust(3, '0')}\", \"middleman\")\n # noinspection PyUnresolvedReferences\n migration.migrate(self)\n\n # Update the stored migration version\n self._execute(\"UPDATE migration_version SET version = ?\", (next_migration_version,))\n\n logger.info(f\"Database migrated to v{next_migration_version}\")\n current_migration_version += 1", "def makemigration(self):\n template = os.path.join(os.path.dirname(__file__),\n 'migration_template.py')\n ver = self.latest(quiet=True) + 1\n destination = os.path.abspath(self.config.get('migrate', 'location'))\n if not os.path.exists(destination):\n os.makedirs(destination)\n fname = 'version_{}.py'.format(ver)\n shutil.copyfile(template, os.path.join(destination, fname))\n self.logger.info('Migration \\'{}\\' created'.format(fname))\n self.latest()", "def migrate(self):\n\tpass", "def version(self):", "async def migrate(self):\n # Controlla se ci sono tabelle nel db\n async with self.db.acquire() as conn:\n async with conn.cursor() as cur:\n await cur.execute(\"\"\"SELECT COUNT(DISTINCT table_name) as c\n FROM information_schema.columns\n WHERE table_schema = %s\"\"\", (conn.db,))\n db_empty = (await cur.fetchone())[\"c\"] <= 0\n\n # Se ci sono tabelle, prova a leggere `db_version`\n if not db_empty:\n await cur.execute(\"SELECT db_version FROM db_version LIMIT 1\")\n db_version_in_db = await cur.fetchone()\n db_version = 0 if db_version_in_db is None else db_version_in_db[\"db_version\"]\n else:\n db_version = 0\n\n # Prendi la lista di file sql e py da eseguire\n new_migrations = [x for x in self.migrations if x.id > db_version]\n\n # Controlla se ci sono migration da eseguire\n if not new_migrations:\n self.logger.info(\"No new migrations. The database is already up to date!\")\n return\n\n # Esegui migrations\n self.logger.info(\"Current db version: @{}\".format(db_version))\n db_version += 1\n current_migration = self.get_migration(db_version)\n while current_migration is not None:\n self.logger.info(\"Executing {}\".format(current_migration.file_name))\n\n if current_migration.type == \"sql\":\n # Leggi ed esegui file sql\n with open(\n os.path.join(os.path.dirname(__file__), \"migrations/{}\".format(current_migration.file_name)), \"r\"\n ) as f:\n data = f.read()\n async with self.db.acquire() as conn:\n async with conn.cursor() as cur:\n await cur.execute(data)\n await conn.commit()\n elif current_migration.type == \"py\":\n # Importa modulo py\n module = importlib.import_module(\"migrator.migrations.{}\".format(current_migration.file_name[:-3]))\n migr = getattr(module, \"do\")\n await migr()\n\n # Migration eseguita, aggiorna `db_version`\n self.logger.info(\"Migration {} executed with no errors\".format(current_migration.file_name))\n await self.save_db_version(db_version)\n\n # Vai alla prossima migration\n db_version += 1\n current_migration = self.get_migration(db_version)\n self.logger.info(\"All migrations executed correctly\")", "def create_versions_after_migration(**kwargs):\n migrations = [migration\n for migration, rollback in kwargs.get('plan', [])\n if not rollback]\n models: Set[Any] = set()\n for migration in migrations:\n models.update(getattr(migration, 'REVISED_MODELS', []))\n\n with transaction.atomic():\n for model in reversion_models(models):\n create_revisions_for(model)", "def upgrade(self, version):\n try:\n version = int(version)\n except:\n if version != 'latest':\n self.logger.error('Unable to parse version \"{}\"'.format(version))\n return\n\n # check the current db version\n current_version = self.inspect()\n if current_version is None:\n self.logger.error('Unable to inspect your database. '\n 'Perhaps you need to run \\'jambi inpsect\\'?')\n return\n\n # get the migrations\n migrations = self.find_migrations()\n latest_version = migrations[-1][1] if any(migrations) else 0\n migrations = tuple(filter(lambda x: x[1] > current_version, migrations))\n\n if current_version > latest_version:\n self.logger.error('Your database version is higher than the '\n 'current database version. '\n '(current: {}, latest: {})'.format(current_version,\n latest_version))\n elif current_version == latest_version:\n self.logger.info('You are already up to date. '\n '(version: {})'.format(current_version))\n return\n\n # filter out migrations that are beyond the desired version\n if version == 'latest':\n version = latest_version\n migrations = tuple(filter(lambda x: x[1] <= version, migrations))\n if not any(migrations):\n self.logger.info('You are already up to date. '\n '(version: {})'.format(current_version))\n return\n\n # run the migrations\n self.logger.info('Now performing the migration to version {}...'.format(version))\n self.db.connect()\n with self.db.atomic():\n for n, v, m in migrations:\n self.logger.info('>>> [{}] Attempting...'.format(v))\n migrator = PostgresqlMigrator(self.db)\n upgrades = m.upgrade(migrator)\n migrate(*upgrades)\n self._set_version(v)\n self.logger.info('>>> [{}] Success!'.format(v))\n self.db.close()\n self.logger.info('Successfully migrated to version {}...'.format(version))\n return", "def _migrate(self):\n with self.engine.begin() as conn:\n context = alembic.migration.MigrationContext.configure(conn)\n current_rev = context.get_current_revision()\n self.log.debug('Current migration revision: %s' % current_rev)\n\n config = alembic.config.Config()\n config.set_main_option(\"script_location\",\n \"zuul:driver/sql/alembic\")\n config.set_main_option(\"sqlalchemy.url\",\n self.connection_config.get('dburi'))\n\n # Alembic lets us add arbitrary data in the tag argument. We can\n # leverage that to tell the upgrade scripts about the table prefix.\n tag = {'table_prefix': self.table_prefix}\n alembic.command.upgrade(config, 'head', tag=tag)", "def upgrade(self):", "def upgrade(self):", "async def manage_version():\n\n try:\n repo = git.Repo(search_parent_directories=True)\n version = repo.git.describe('--tags')\n except Exception:\n version = \"v0.0.0\"\n\n base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n creation_time = time.ctime(os.path.getmtime(base_dir))\n\n response = {'version': version, 'deployedOn': creation_time}\n return OK(response)", "def test_changeVersions(self):\n self._testVersionChanging(8, 2, 3)", "def do_version(self):\n return \"1.0.0\", True", "def _get_version(self):", "def _set_database_version(db, version):\n if not isinstance(version, int):\n raise TypeError(\"Version must be integer, not %s : %s\" % (\n version, type(version)))\n create_metadata = \\\n \"CREATE TABLE %s (version INT)\" % METADATA_COLUMN_NAME\n execute_sql(db, create_metadata)\n insert_version = \\\n \"INSERT INTO %s VALUES (%s)\" % (METADATA_COLUMN_NAME, version)\n execute_sql(db, insert_version)", "def update_db_version():\n print(\"Checking Database states...\")\n os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"ADSM.settings\")\n try:\n call_command('migrate', database='scenario_db', interactive=False, fake_initial=True)\n call_command('migrate', database='default', interactive=False, fake_initial=True)\n except:\n print(\"Error: Migration failed.\")\n print('Done migrating databases.')", "def upgrade_schema():\n\n db_version = get_db_version()\n try:\n while db_version < CURRENT_DATABASE_VERSION:\n db_version += 1\n upgrade_script = 'upgrade_to_'+str(db_version)\n globals()[upgrade_script]()\n except KeyError as e:\n logging.exception('Attempted to upgrade using script that does not exist: {}'.format(e))\n sys.exit(1)\n except Exception as e:\n logging.exception('Incremental upgrade of db failed')\n sys.exit(1)\n else:\n config.db.singletons.update_one({'_id': 'version'}, {'$set': {'database': CURRENT_DATABASE_VERSION}})\n sys.exit(0)", "def upgrade(self, old_version, new_version):\n pass", "def db_upgrade():\n generate_migration_file()\n dbu_query = anosql.from_path(MIGRATION_FILE, 'psycopg2')\n\n for time_step in [_.strip('.sql') for _ in migration_files()]:\n decide = MySQLScheme.fetch_one(REVISION_EXISTS,\n **{\"args\": {'revision': time_step}})\n if not decide:\n MySQLScheme.commit(getattr(dbu_query, f\"upgrade_{time_step}\").sql)\n LOGGER.info(f\"successful migration: {time_step}\")\n else:\n LOGGER.info(f'migration already exists: {time_step}')", "def version(self):\n pass", "def version(self):\n pass", "def version(self):\n pass", "def sync(self, version=None):\r\n return migration.db_sync(version)", "def run(self):\n\n version_str = (\n get_git_version(here))\n\n version_uniparser_dict = (\n get_uniparser_version())\n\n if (version_str is not None or\n version_uniparser_dict is not None):\n\n with open(\n os.path.join(here, 'lingvodoc', 'version.py'), 'w',\n encoding = 'utf-8') as version_py_file:\n\n version_py_file.write(\n self.version_py_template.format(\n repr(version_str),\n repr(version_uniparser_dict)))\n\n # Continuing with setup.\n\n super().run()", "def ready(self):\n import django_better_migrations.migration_writer_patch # noqa", "def version(self, newVersion=None):\n pass", "def migrate(ctx, start_version, end_version):\n if ctx.obj['TYPE'] == 'file':\n if ctx.obj['DATA_FILE']:\n file_path = ctx.obj['DATA_FILE']\n else:\n file_path = os.path.join(ctx.obj['DATA_DIR'], ctx.obj['NAMESPACE'] + '.json')\n\n # todo make this more like alemebic and determine/load versions automatically\n with open(file_path, 'r') as f:\n data = json.loads(f.read())\n\n data = run_migration(data, start_version, end_version)\n with open(file_path, 'w') as f:\n f.write(json.dumps(data))", "def Version(self) -> _n_0_t_12:", "def Version(self) -> _n_0_t_12:", "def test_version(self):\n pass", "def test_semantic_version():\n semantic_version.Version(settings.VERSION)", "def version():\n\n pass", "def _testVersionChanging(self, major, minor, micro, prerelease=None):\n versionUpdates = []\n def myVersionChanger(sourceTree, versionTemplate):\n versionUpdates.append((sourceTree, versionTemplate))\n versionChanger = ChangeVersionsScript()\n versionChanger.changeAllProjectVersions = myVersionChanger\n version = \"%d.%d.%d\" % (major, minor, micro)\n if prerelease is not None:\n version += \"pre%d\" % (prerelease,)\n versionChanger.main([version])\n self.assertEquals(len(versionUpdates), 1)\n self.assertEquals(versionUpdates[0][0], FilePath(\".\"))\n self.assertEquals(versionUpdates[0][1].major, major)\n self.assertEquals(versionUpdates[0][1].minor, minor)\n self.assertEquals(versionUpdates[0][1].micro, micro)\n self.assertEquals(versionUpdates[0][1].prerelease, prerelease)", "def test_version_control_specified(self):\n # Establish version control on this database\n version = 0\n dbcontrol = ControlledSchema.create(self.engine, self.repos, version)\n self.assertEqual(dbcontrol.version, version)\n\n # Correct when we load it, too\n dbcontrol = ControlledSchema(self.engine, self.repos)\n self.assertEqual(dbcontrol.version, version)\n\n dbcontrol.drop()\n\n # Now try it with a nonzero value\n version = 10\n for i in range(version):\n self.repos.create_script('')\n self.assertEqual(self.repos.latest, version)\n\n # Test with some mid-range value\n dbcontrol = ControlledSchema.create(self.engine,self.repos, 5)\n self.assertEqual(dbcontrol.version, 5)\n dbcontrol.drop()\n\n # Test with max value\n dbcontrol = ControlledSchema.create(self.engine, self.repos, version)\n self.assertEqual(dbcontrol.version, version)\n dbcontrol.drop()", "def _set_version(self, version):\n with self.db.atomic():\n JambiModel.delete().execute()\n JambiModel.create(ref=str(version))\n self.logger.debug('Set jambi version to {}'.format(version))", "def set_version_db(apps, schema_editor):\n Version = apps.get_model(\"reversion\", \"Version\")\n content_types = Version.objects.values_list(\"content_type\", flat=True).distinct()\n for content_type in content_types:\n model_class = content_type.model_class()\n db = router.db_for_write(model_class)\n Version.objects.filter(content_type=content_type).update(db=db)", "def get_versions(self):\n raise NotImplementedError", "def _update_version(self) -> None:\n # Implement in child class.\n raise NotImplementedError", "def version_updater(logging_level=logging.WARNING):\n # connect to db\n do_db_setup()\n\n # set Qt lib\n set_qt_lib()\n\n from anima.ui import version_updater, models\n from anima.env import mayaEnv\n reload(mayaEnv)\n reload(version_updater)\n reload(models)\n m = Maya()\n import pymel\n m.name = \"Maya\" + str(pymel.versions.current())[0:4]\n\n logger.setLevel(logging_level)\n\n # generate a reference_resolution\n version_updater.UI(environment=m)", "def ongeza(self, type_):\n switch = {\n 'm': semver.bump_major,\n 'n': semver.bump_minor,\n 'p': semver.bump_patch,\n 'major': semver.bump_major,\n 'minor': semver.bump_minor,\n 'patch': semver.bump_patch}\n\n new_version = switch.get(type_)(self.version)\n\n if new_version in set(self.versions):\n self.logger.error('version `%s` already present', new_version)\n new_version = None\n\n return new_version", "def upgrade_to_1():\n config.db.singletons.insert_one({'_id': 'version', 'database': 1})", "def _walk_versions(self, config, engine, downgrade=True, snake_walk=False):\n\n revisions = self._revisions()\n for dest, curr in revisions:\n self._migrate_up(config, engine, dest, curr, with_data=True)\n\n if snake_walk and dest != 'None':\n # NOTE(I159): Pass reversed arguments into `_migrate_down`\n # method because we have been upgraded to a destination\n # revision and now we going to downgrade back.\n self._migrate_down(config, curr, dest, with_data=True)\n self._migrate_up(config, dest, curr, with_data=True)\n\n if downgrade:\n revisions = self._revisions(downgrade)\n for dest, curr in revisions:\n self._migrate_down(config, engine, dest, curr, with_data=True)\n if snake_walk:\n self._migrate_up(config, engine, curr, dest,\n with_data=True)\n self._migrate_down(config, engine, dest, curr,\n with_data=True)", "def run_migration_checks():\n check_model_state()\n check_migration_state()", "def run_migration(env, upgrade_type):\n pass", "def get_version(self):\n pass", "def main(arguments):\n migration = Migration(arguments)\n return migration.run()", "def migrate(ctx):\n connecter = ScalingoInterface(ctx.obj)\n connecter.manage_py(\"migrate\")", "def setup_before_migration(self, apps):", "def _current_versions(self, job):\n raise NotImplementedError", "def migrate(cls)->None:\n pass", "async def load_db_migrations(self, conn: Optional[Pool] = None) -> None:\n\n def get_version_from_name(name: str) -> int:\n return int(name.split(\"_\", 1)[0])\n\n file_ = max(\n (f for f in settings.migrations_directory.iterdir() if f.suffix == \".sql\"),\n key=lambda f: get_version_from_name(f.name),\n )\n\n async with MaybeAcquire(conn, self.pool) as conn:\n query = \"SELECT current_setting('mg.version') as version\"\n try:\n schema_version = await conn.fetchval(query)\n except UndefinedObjectError:\n schema_version = 0\n else:\n schema_version = int(schema_version)\n\n logger.debug(\"Schema version: %d\", schema_version)\n\n file_version = get_version_from_name(file_.name)\n if file_version <= schema_version:\n logger.debug(\"Skipping migration: %s\", file_.name)\n return\n\n sql_commands = read_sql_file(file_)\n logger.info(\"Applying migration: %s\", file_.name)\n\n await conn.execute(sql_commands)\n await self._set_database_version(file_version)", "def versioned(command_subclass):\n VERSION_PY = \"\"\"\n# This file is updated from Git information by running 'python setup.py\n# version'.\n__version__ = '%s'\n\"\"\"\n orig_callable = command_subclass.run\n\n def modified_callable(self):\n if not os.path.isdir(\".git\"):\n print \"This does not appear to be a Git repository.\"\n return\n try:\n p = subprocess.Popen([\"git\", \"describe\",\n \"--tags\", \"--always\"],\n stdout=subprocess.PIPE)\n except EnvironmentError:\n print \"unable to run git, leaving idss-seration/_version.py alone\"\n return\n stdout = p.communicate()[0]\n if p.returncode != 0:\n print \"unable to run git, leaving idss-seriation/_version.py alone\"\n return\n # our tags are like: v2.2\n ver = stdout[len(\"v\"):].strip()\n f = open(\"sklearn_mmadsen/_version.py\", \"w\")\n f.write(VERSION_PY % ver)\n f.close()\n print \"updated _version.py to '%s'\" % ver\n orig_callable(self)\n\n command_subclass.run = modified_callable\n return command_subclass", "def test_get_version(self):\n pass", "def test_version_01(self):\n\n version = self.sqlbak([\"--version\"])\n self.assertTrue(\"sqlbak v\" in version)", "def stampdb(self, args):\n revision = REVISION_MAPPING[args.configversion]\n print(f\"Based on config version {args.configversion} \"\n f\"we think your results schema is version {revision} and are upgrading to it\")\n stamp_db(revision, args.dbfile)", "async def _upgrade_db(self) -> None:\n cur_version = await self._get_db_version()\n for n in range(cur_version + 1, sql_data.CUR_VERSION + 1):\n log.msg('Upgrading database to version %d' % n)\n if n in sql_data.SQL_UPGRADES:\n for command in sql_data.SQL_UPGRADES[n]:\n await self.operation(command)\n if cur_version != sql_data.CUR_VERSION:\n await self._set_db_version(sql_data.CUR_VERSION)", "def _version(self):\n # TODO: Can we delete this method and just print the line from the\n # reqs file verbatim instead?\n def version_of_archive(filename, package_name):\n # Since we know the project_name, we can strip that off the left, strip\n # any archive extensions off the right, and take the rest as the\n # version.\n for ext in ARCHIVE_EXTENSIONS:\n if filename.endswith(ext):\n filename = filename[:-len(ext)]\n break\n # Handle github sha tarball downloads.\n if is_git_sha(filename):\n filename = package_name + '-' + filename\n if not filename.lower().replace('_', '-').startswith(package_name.lower()):\n # TODO: Should we replace runs of [^a-zA-Z0-9.], not just _, with -?\n give_up(filename, package_name)\n return filename[len(package_name) + 1:] # Strip off '-' before version.\n\n def version_of_wheel(filename, package_name):\n # For Wheel files (http://legacy.python.org/dev/peps/pep-0427/#file-\n # name-convention) we know the format bits are '-' separated.\n whl_package_name, version, _rest = filename.split('-', 2)\n # Do the alteration to package_name from PEP 427:\n our_package_name = re.sub(r'[^\\w\\d.]+', '_', package_name, re.UNICODE)\n if whl_package_name != our_package_name:\n give_up(filename, whl_package_name)\n return version\n\n def give_up(filename, package_name):\n raise RuntimeError(\"The archive '%s' didn't start with the package name '%s', so I couldn't figure out the version number. My bad; improve me.\" %\n (filename, package_name))\n\n get_version = (version_of_wheel\n if self._downloaded_filename().endswith('.whl')\n else version_of_archive)\n return get_version(self._downloaded_filename(), self._project_name())", "def set_version(self, version):\n\n def update_version(version, filepath):\n with open(filepath, \"r\") as stream:\n contents = stream.read()\n\n new_contents = _fix_contents_version(contents, version)\n assert contents != new_contents\n with open(filepath, \"w\") as stream:\n stream.write(new_contents)\n\n update_version(version, os.path.join(\".\", \"package.json\"))\n update_version(version, os.path.join(\".\", \"src\", \"setup.py\"))\n update_version(\n version, os.path.join(\".\", \"src\", \"robocorp_code\", \"__init__.py\")\n )", "def switch_to_version(self, version):\n self.current_version = version\n self.save()", "def run (args = None):\n cmds.init.require_init()\n (options, args) = optargs (args)\n try:\n revision = int(args[0])\n except (IndexError, ValueError):\n revision = repo.migration.latest_number()\n\n current = repo.revision.current()\n\n if options.dry_run:\n repo.revision.set_current(revision)\n print \"\"\"Revision set to #%s\"\"\" % revision\n return\n\n\n if current == revision and not options.abandon_current:\n print \"\"\"Nothing to update.\"\"\"\n return\n\n print \"\"\"Updating to migration #%s.\"\"\" % revision\n\n outstanding_changes = repo.has_outstanding_changes()\n if outstanding_changes:\n apply_after_update = repo.outstanding_changes()\n print \"\"\"Undoing outstanding changes.\"\"\"\n db.dump.load (repo.outstanding_changes(undo = True))\n \n if revision < current:\n # Downgrading\n while current > revision:\n try:\n Migration = repo.migration.Migration (current)\n print \"\"\"Downgrading migration #%s: %s.\"\"\" % (current, Migration.message)\n Migration.down()\n current = current - 1\n repo.revision.set_current (current)\n except MigrationFailedError:\n break\n \n else:\n # Upgrading\n while current < revision:\n try:\n current = current + 1\n Migration = repo.migration.Migration (current)\n print \"\"\"Upgrading migration #%s: %s.\"\"\" % (current, Migration.message)\n Migration.up()\n repo.revision.save_to_file(current)\n repo.revision.set_current (current)\n except MigrationFailedError:\n break\n\n print \"\"\"Updated to revision #%s.\"\"\" % repo.revision.current()\n\n if outstanding_changes and not options.abandon_current:\n print \"\"\"Reapplying outstanding changes.\"\"\"\n db.dump.load (apply_after_update)", "def upgrade(self, revision: str = \"head\") -> \"Alembic\":\n\n self.configure()\n\n if not self.migrator_base.does_table_exists(\n \"alembic_version\"\n ) or self.is_revision_different(revision):\n PyFunceble.facility.Logger.info(\n \"Started update (%r) of the database schema(s).\", revision\n )\n\n alembic_command.upgrade(self.alembic_config, revision)\n\n PyFunceble.facility.Logger.info(\n \"Finished update (%r) of the database schema(s).\", revision\n )", "def test_defaultChangeVersionsVersionChanger(self):\n versionChanger = ChangeVersionsScript()\n self.assertEquals(versionChanger.changeAllProjectVersions,\n changeAllProjectVersions)", "def find_migrations(self):\n fileloc = self.config.get('migrate', 'location')\n fullpath = os.path.abspath(fileloc)\n if not os.path.exists(fullpath):\n os.makedirs(fullpath)\n try:\n filenames = os.listdir(fullpath)\n except FileNotFoundError:\n self.logger.error('Unable to find migration folder '\n '\"{}\"'.format(fullpath))\n return []\n\n def is_valid_migration_name(n):\n return n.startswith('version_') and n.endswith('.py')\n filenames = filter(lambda x: is_valid_migration_name(x), filenames)\n filepaths = [(os.path.join(fullpath, f), f.replace('.py', ''))\n for f in filenames]\n migrations = []\n for fp, mn in filepaths:\n module_name = '.'.join([fileloc.replace('/', '.').strip('.'), mn])\n try:\n ver = int(re.search(r'version_(\\d+)', mn).group(1))\n except:\n self.logger.warning('Cannot parse version number from \"{}\", '\n 'skipping'.format(mn))\n continue\n self.logger.debug('Found {} at version {}'.format(module_name,\n ver))\n migrations.append(\n (module_name, ver, importlib.import_module(module_name))\n )\n return sorted(migrations, key=lambda x: x[1])", "def set_version(self, bundle, ctx, filename, version):", "def migrate_up(self, version, with_data=False):\n # NOTE(xek): This is a list of migrations where we allow dropping\n # things. The rules for adding exceptions are very very specific.\n # Chances are you don't meet the critera.\n # Reviewers: DO NOT ALLOW THINGS TO BE ADDED HERE\n exceptions = [\n 64, # drop constraint\n 86, # drop watch_rule/watch_data tables\n ]\n # Reviewers: DO NOT ALLOW THINGS TO BE ADDED HERE\n\n # NOTE(xek): We start requiring things be additive in\n # liberty, so ignore all migrations before that point.\n LIBERTY_START = 63\n\n if version >= LIBERTY_START and version not in exceptions:\n banned = ['Table', 'Column']\n else:\n banned = None\n with BannedDBSchemaOperations(banned):\n super(HeatMigrationsCheckers, self).migrate_up(version, with_data)", "def GetMigrationStatus(self, instance):\n raise HypervisorError(\"Migration not supported by the chroot hypervisor\")", "def migrate(t):\n\tapp = Application(__name__)\n\taction = t.option.action\n\toptions = argparse.Namespace()\n\tif not action:\n\t\t# no argument, then migrate version to latest\n\t\toptions.version = None\n\t\tMigration('go').run(config=app.config, options=options)\n\telse:\n\t\ttry:\n\t\t\t# specified the version, then migrate to it\n\t\t\tversion = int(action)\n\t\t\toptions.version = version\n\t\t\tMigration('go').run(config=app.config, options=options)\n\t\texcept ValueError:\n\t\t\t# specified some command to run, push the 'action' back first.\n\t\t\tt.argv.insert(0, action)\n\t\t\toptions = parser_options(t)\n\t\t\tMigration(options.migrate).run(options=options, config=app.config)", "def _load_migrations():\n\n upgrade_script_rex = re.compile(\n r'^upgrade_(0|[1-9][0-9]*)_to_([1-9][0-9]*)\\.py$')\n migrations = {}\n\n # Currently, we only load migrations for a '__core__' schema, and only from\n # the migrations directory. One idea if we need to eventually support\n # migrations for the per-testsuite tables is to add subdirectories keyed on\n # the testsuite.\n for schema_name in ('__core__',):\n schema_migrations_path = os.path.join(os.path.dirname(__file__),\n 'migrations')\n schema_migrations = {}\n for item in os.listdir(schema_migrations_path):\n # Ignore certain known non-scripts.\n if item in ('README.txt', '__init__.py', 'new_suite.py',\n 'util.py') or item.endswith('.pyc'):\n continue\n\n # Ignore non-matching files.\n m = upgrade_script_rex.match(item)\n if m is None:\n logger.warning(\n \"ignoring item %r in schema migration directory: %r\",\n item, schema_migrations_path)\n continue\n\n # Check the version numbers for validity.\n version, next_version = map(int, m.groups())\n if next_version != version + 1:\n logger.error(\n \"invalid script name %r in schema migration directory: %r\",\n item, schema_migrations_path)\n continue\n\n schema_migrations[version] = os.path.join(\n schema_migrations_path, item)\n\n # Ignore directories with no migrations.\n if not schema_migrations:\n logger.warning(\"ignoring empty migrations directory: %r\",\n schema_migrations_path)\n continue\n\n # Check the provided versions for sanity.\n current_version = max(schema_migrations) + 1\n for i in range(current_version):\n if i not in schema_migrations:\n logger.error(\"schema %r is missing migration for version: %r\",\n schema_name, i)\n\n # Store the current version as another item in the per-schema migration\n # dictionary.\n schema_migrations['current_version'] = current_version\n\n # Store the schema migrations.\n migrations[schema_name] = schema_migrations\n\n return migrations", "def upgradedb(ctx):\n path = Path(__file__).resolve().parent.parent\n conf = Config(str(path / \"migrations\" / \"alembic.ini\"))\n conf.set_main_option(\"script_location\", str(path / \"migrations\"))\n command.upgrade(conf, \"heads\")", "def test_upper_version(self):\n filename = str(uuid.uuid4())\n _silentremove(filename)\n dburi = \"sqlite:///%s\" % filename\n db = connection.connect(dburi, create=True, verbose=False)\n dbversion = DBVersion()\n dbversion.version = CURRENT_DB_VERSION + 1\n dbversion.version_number = CURRENT_DB_VERSION + 1\n dbversion.version_timestamp = datetime.datetime.now().strftime(\"%s\")\n db.add(dbversion)\n db.commit()\n\n self.assertRaises(DBAdminError, db_current_version, db)\n self.assertRaises(DBAdminError, db_verify, db)\n\n _remove(filename)", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def increment_version_on_insert(obj):\n history_model = obj.previous_version()\n\n if history_model is not None:\n obj.version = history_model.version + 1", "def test_valid_versions(self):\n instance = ClassWithVersion()\n versions = [\"1.2.3\", \"1.2.*\", \"1.*\", \"*\", \"1.1.1\", \"1.0.1rc1\"]\n for version in versions:\n instance.version = version\n self.assertEqual(instance.version(), version)", "def get_version_number():\n return [0, 1, 0]", "def AddDatabaseVersion(\n parser, restrict_choices=True, hidden=False, support_default_version=True\n):\n # Section for engine-specific content.\n # This section is auto-generated by //cloud/storage_fe/sql/sync_engines.\n # Do not make manual edits.\n choices = [\n 'MYSQL_5_6',\n 'MYSQL_5_7',\n 'MYSQL_8_0',\n 'POSTGRES_9_6',\n 'POSTGRES_10',\n 'POSTGRES_11',\n 'POSTGRES_12',\n 'POSTGRES_13',\n 'POSTGRES_14',\n 'POSTGRES_15',\n 'SQLSERVER_2017_EXPRESS',\n 'SQLSERVER_2017_WEB',\n 'SQLSERVER_2017_STANDARD',\n 'SQLSERVER_2017_ENTERPRISE',\n 'SQLSERVER_2019_EXPRESS',\n 'SQLSERVER_2019_WEB',\n 'SQLSERVER_2019_STANDARD',\n 'SQLSERVER_2019_ENTERPRISE',\n 'SQLSERVER_2022_EXPRESS',\n 'SQLSERVER_2022_WEB',\n 'SQLSERVER_2022_STANDARD',\n 'SQLSERVER_2022_ENTERPRISE',\n ]\n # End of engine-specific content.\n\n help_text_unspecified_part = (\n DEFAULT_INSTANCE_DATABASE_VERSION + ' is used.'\n if support_default_version\n else 'no changes occur.'\n )\n help_text = (\n 'The database engine type and versions. If left unspecified, '\n + help_text_unspecified_part\n + ' See the list of database versions at '\n + 'https://cloud.google.com/sql/docs/mysql/admin-api/rest/v1beta4/SqlDatabaseVersion.'\n )\n\n if restrict_choices:\n help_text += (\n ' Apart from listed major versions, DATABASE_VERSION also accepts'\n ' supported minor versions.'\n )\n\n parser.add_argument(\n '--database-version',\n required=False,\n default=DEFAULT_INSTANCE_DATABASE_VERSION\n if support_default_version\n else None,\n choices=_MajorVersionMatchList(choices) if restrict_choices else None,\n help=help_text,\n hidden=hidden,\n )", "def do_create_version(**kwargs):\n version_params = {\n \"name\": kwargs['dag_run'].conf.get('model_version'),\n \"description\": 'Version 1',\n \"runtimeVersion\": kwargs['dag_run'].conf.get('tf_version'),\n \"deploymentUri\": 'gs://{}/{}'.format(COMPOSER_BUCKET_NAME, PREFIX_FINAL_MODEL)\n }\n\n ti = kwargs['ti']\n\n mle = MLEngineHook()\n\n model_name = kwargs['dag_run'].conf.get('model_name')\n model_versions = ti.xcom_pull(key='model_versions', task_ids='list_versions')\n\n version_path = 'projects/{}/models/{}/versions/{}'.format(PROJECT,\n model_name,\n version_params['name'])\n\n if version_path in [v['name'] for v in model_versions]:\n logging.info(\"Delete previously version of the model to overwrite.\")\n mle.delete_version(PROJECT, model_name, version_params['name'])\n\n mle.create_version(PROJECT, model_name, version_params)", "def db_migrate():\n when = str(int(time.time()))\n sql_file = os.path.join(MIGRATION_FOLDER, f\"{when}.sql\")\n\n with open(sql_file, 'w') as save_sql:\n up = MYSQL_UP.format(f\"upgrade-{when}\", when, MIGRATION_TABLE)\n down = MYSQL_DOWN.format(f\"downgrade-{when}\", when, MIGRATION_TABLE)\n\n save_sql.write(\"\\n\\n\".join([up, down]))\n LOGGER.info(f\"migration file: {os.path.join('migrations', sql_file)}\")", "def strategy(self) -> AwesomeVersionStrategy:\n return version_strategy(self.string)", "def db_setup(self):\n revision: Table = Table(self.revision_table,\n self.sql_metadata,\n Column(self._MigrationTableColumns.revisions.value, Text, primary_key=True),\n schema=self.revision_table_schema)\n revision.create(self.psql_engine)", "def migratedb(rollback=False):\n\n require(\"virtualenv_path\", \"project_path\", \"sudo_user\")\n\n #\n # Some things need to be done first (i.e. if they need a different\n # database connection or some custom args)\n #\n if \"migratedb_first\" in env:\n\n for app, args in env.migratedb_first.iteritems():\n\n version = get_south_migrate_version(app, rollback)\n\n migrate_app_db(app, version, args)\n\n #\n # Do the rest afterwards\n #\n if has_version_info():\n\n apps = env.south_migrations.keys()\n\n for app in apps:\n\n print app\n\n version = get_south_migrate_version(app, rollback)\n\n migrate_app_db(app, version)\n\n #\n # If we know nothing, just migrate everything\n #\n else:\n migrate_app_db()", "def __version__(self):\n return self.instance.__version__", "def version(self):\n raise NotImplementedError('version')", "def version_number() -> int:\n return 0", "def create(self, migration_name):\n # original version vith timestamp version format\n # timestamp = strftime(\"%Y%m%d%H%M%S\", localtime())\n next_ver = self.get_next_version()\n file_name = \"%s_%s%s\" % (next_ver, migration_name, Migration.MIGRATION_FILES_EXTENSION)\n if not Migration.is_file_name_valid(file_name):\n raise Exception(\n \"invalid migration name ('%s'); it should contain only letters, numbers and/or underscores\"\n % file_name)\n\n new_file_name = os.path.join(self.__migrations_dir, file_name)\n\n try:\n f = codecs.open(new_file_name, \"w\", \"utf-8\")\n f.write(Migration.TEMPLATE)\n f.close()\n except IOError:\n raise Exception(\"could not create file ('%s')\" % new_file_name)\n\n migration = Migration(new_file_name)\n self.__migrations.append(migration)\n return migration", "def do_upgrade(env, ver, cursor):\n cursor.execute('UPDATE system SET name=%s WHERE name=%s',\n (\"agiletools_version\", \"taskboard_schema\"))", "def migrate(cls)->None:\n database.cursor.execute(\"\"\"CREATE TABLE IF NOT EXISTS votes(\n id serial PRIMARY KEY,\n question integer,\n user_id integer,\n value integer\n )\"\"\")\n database.connection.commit()", "def genVersion(*args, **kwargs):\n return generateVersionFileData(Version(*args, **kwargs))", "def __update_version(self):\r\n\r\n db_version = self.__get_db_version_int()\r\n if db_version == SCHEMA_VERSION:\r\n return\r\n\r\n #\r\n # Define functions for upgrading between schema versions\r\n #\r\n def update_2xto30():\r\n \"\"\"Incremental update of database from Freeseer 2.x and older to 3.0\r\n\r\n SCHEMA_VERSION is 300\r\n \"\"\"\r\n if db_version > 300:\r\n log.debug('Database newer than schema version 300.')\r\n return # No update needed\r\n\r\n log.debug('Updating to schema 300.')\r\n QtSql.QSqlQuery('ALTER TABLE presentations RENAME TO presentations_old') # temporary table\r\n self.__create_presentations_table(PRESENTATIONS_SCHEMA_300)\r\n QtSql.QSqlQuery(\"\"\"INSERT INTO presentations\r\n SELECT Id, Title, Speaker, Description, Level, Event, Room, Time FROM presentations_old\"\"\")\r\n QtSql.QSqlQuery('DROP TABLE presentations_old')\r\n\r\n def update_30to31():\r\n \"\"\"Performs incremental update of database from 3.0 and older to 3.1.\"\"\"\r\n QtSql.QSqlQuery('ALTER TABLE presentations RENAME TO presentations_old')\r\n self.__create_presentations_table(PRESENTATIONS_SCHEMA_310)\r\n QtSql.QSqlQuery(\"\"\"INSERT INTO presentations\r\n SELECT Id, Title, Speaker, Description, Level, Event, Room, Time, Time, Time\r\n FROM presentations_old\"\"\")\r\n QtSql.QSqlQuery('DROP TABLE presentations_old')\r\n\r\n #\r\n # Perform the upgrade\r\n #\r\n updaters = [update_2xto30, update_30to31]\r\n for updater in updaters:\r\n updater()\r\n\r\n QtSql.QSqlQuery('PRAGMA user_version = %i' % SCHEMA_VERSION)\r\n log.info('Upgraded presentations database from version {} to {}'.format(db_version, SCHEMA_VERSION))", "def updateVersions(self):\r\n f = open('../versions.pckl', 'wb')\r\n pickle.dump(self.versions, f)\r\n f.close()", "def update_migrations_run(self, migration: str):\n pass", "def upgrade(revision, sql):\n alembic_command.upgrade(alembic_config, revision, sql=sql)", "def checkVersions():\n item = Item(fromScene=True)\n\n for ns, componentMData in item.components.iteritems():\n if ns == 'cam':\n # todo tratar versoes da camera\n continue\n\n if componentMData['assembleMode'] == 'reference':\n refComponent = ReferenceComponent(ns, componentMData, parent=item)\n refComponent.checkDBForNewVersion()\n\n elif componentMData['assembleMode'] == 'xlo':\n xloComponent = XloComponent(ns, componentMData, parent=item)\n xloComponent.checkDBForNewVersion()\n xloComponent.checkDBForNewCacheVersion()\n\n elif componentMData['assembleMode'] == 'cache':\n cacheComponent = CacheComponent(ns, componentMData, parent=item)\n cacheComponent.checkDBForNewVersion()\n\n item.putDataToDB()", "def versions():\n result = timeline.versions()\n if result:\n click.echo('\\n'.join(result))", "def version(self):\n self.version_list[-1] = self.revision\n version = '.'.join(self.version_list)\n return version" ]
[ "0.7271674", "0.6912402", "0.6781953", "0.66197056", "0.6540998", "0.64763844", "0.63564444", "0.6351916", "0.6346999", "0.627028", "0.6214616", "0.61989695", "0.61721605", "0.61721605", "0.6119614", "0.6112119", "0.60870755", "0.6083832", "0.6082907", "0.6082181", "0.59738785", "0.5948841", "0.59176534", "0.5889575", "0.5889575", "0.5889575", "0.5860145", "0.58464795", "0.5839337", "0.58341116", "0.5826495", "0.58219796", "0.58219796", "0.5812942", "0.5764615", "0.57588995", "0.5756661", "0.5748352", "0.57469314", "0.5744396", "0.5728743", "0.5728247", "0.57269204", "0.57248694", "0.5701582", "0.5701496", "0.56878936", "0.56848305", "0.5682562", "0.5678865", "0.5675738", "0.56652933", "0.56620955", "0.56585944", "0.5658057", "0.56419104", "0.5641308", "0.56294596", "0.5621609", "0.5620805", "0.5618515", "0.5612417", "0.5607297", "0.5605022", "0.55979496", "0.5591854", "0.5591306", "0.5585797", "0.558445", "0.55815715", "0.5572332", "0.5570798", "0.55696195", "0.55685604", "0.55653995", "0.55653995", "0.55653995", "0.55653995", "0.5563189", "0.55580366", "0.5544274", "0.55277026", "0.5527281", "0.55239904", "0.55147594", "0.5513377", "0.5500819", "0.5487873", "0.5481971", "0.5478464", "0.54727185", "0.54708093", "0.5470133", "0.54601073", "0.5459747", "0.54519325", "0.545027", "0.5446997", "0.54469264", "0.5421024", "0.54132605" ]
0.0
-1
Normalize, pad and batch the input images.
def preprocess_image(self, batched_inputs): images = [x.to(self.device) for x in batched_inputs] norms = [self.normalizer(x) for x in images] size = (norms[0].shape[1],norms[0].shape[2]) images = ImageList.from_tensors(norms, self.backbone.size_divisibility) return images, size
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def normalize_images(data, blend_cat, Args):\n im = data['X_train']['blend_image']\n std = np.std(im)\n mean = np.mean(im)\n data['X_train']['blend_image'] = (im - mean) / std\n data['X_val']['blend_image'] = (data['X_val']['blend_image'] - mean) / std\n data['X_train'] = normalize_other_inputs(data['X_train'], Args)\n data['X_val'] = normalize_other_inputs(data['X_val'], Args)\n for key in data['Y_train'].keys():\n data['Y_train'][key] = (data['Y_train'][key] - mean) / std\n data['Y_val'][key] = (data['Y_val'][key] - mean) / std\n blend_cat['std'] = std\n blend_cat['mean'] = mean\n return data", "def preprocess_image(self, batched_inputs):\n images = [x[\"image\"].to(self.device) for x in batched_inputs]\n images_aug = [x[\"image_color\"].to(self.device) for x in batched_inputs]\n\n images = [self.normalizer(x) for x in images]\n images_aug = [self.normalizer(x) for x in images_aug]\n\n images = ImageList.from_tensors(images,\n self.backbone.size_divisibility)\n images_aug = ImageList.from_tensors(images_aug,\n self.backbone.size_divisibility)\n return images, images_aug", "def _normalize_images(self, images: th.Tensor) -> th.Tensor:\n output = ((images+2)/4 - self._norm_mean)/self._norm_std\n return output", "def preprocess(imgs):\n imgs_p = np.ndarray((len(imgs), img_rows, img_cols), dtype=np.float32)\n for i in range(len(imgs)):\n imgs_p[i] = imgs[i].reshape((img_rows, img_cols))/255.\n\n imgs_p = imgs_p[..., np.newaxis]\n\n # Perform data normalization\n mean = imgs_p.mean()\n std = imgs_p.std()\n imgs_p -= mean\n imgs_p /= std\n\n return imgs_p", "def preprocess_image(self, batched_inputs):\n images = [x[\"image\"].float().to(self.device) for x in batched_inputs]\n images = [self.normalizer(img) for img in images]\n images = ImageList.from_tensors(images, self.backbone.size_divisibility)\n return images", "def normalization(imgs):\n\n imgs = np.asarray(imgs).astype(np.float32)\n imgs = np.expand_dims(imgs / 255, axis=-1)\n return imgs", "def process_batch(self, inputs):\n for key, ipt in inputs.items():\n inputs[key] = ipt.to(self.device)\n\n # we only feed the image with frame_id 0 through the depth encoder\n features = self.models[\"encoder\"](inputs[\"color_aug\", 0, 0])\n outputs = self.models[\"depth\"](features)\n\n outputs.update(self.predict_poses(inputs, features))\n\n self.generate_images_pred(inputs, outputs)\n losses = self.compute_losses(inputs, outputs)\n\n return outputs, losses", "def preprocess_image(self, inputs):\n raise NotImplementedError('preprocess_image method not implemented.')", "def _preprocessing(self, input_image):\n if self.resize:\n input_image = self._np_resize_image(input_image,\n self.input_size,\n dtype='int')\n image = self._np_transpose(input_image)\n image = self._np_normalize(image)\n image = self._np_flip_n_cat(image)\n return image", "def __call__(self, in_data):\n # There are five data augmentation steps\n # 1. Color augmentation\n # 2. Random expansion\n # 3. Random cropping\n # 4. Resizing with random interpolation\n # 5. Random horizontal flipping\n if self.count % 10 == 0 and self.count % self.batchsize == 0 and self.count != 0:\n self.i += 1\n i = self.i % len(self.dim)\n self.output_shape = (self.dim[i], self.dim[i])\n # print(self.count, self.i, self.output_shape)\n self.count += 1\n\n img, bbox, label = in_data\n\n # 1. Color augmentation\n img = random_distort(img, brightness_delta=32,\n contrast_low=0.5, contrast_high=1.5,\n saturation_low=0.5, saturation_high=1.5,\n hue_delta=25)\n\n # Normalize. range is [0, 1]\n img /= 255.0\n\n _, H, W = img.shape\n scale = np.random.uniform(0.25, 2)\n random_expand = np.random.uniform(0.8, 1.2, 2)\n net_h, net_w = self.output_shape\n out_h = net_h * scale # random_expand[0]\n out_w = net_w * scale # random_expand[1]\n if H > W:\n out_w = out_h * (float(W) / H) * np.random.uniform(0.8, 1.2)\n elif H < W:\n out_h = out_w * (float(H) / W) * np.random.uniform(0.8, 1.2)\n\n out_h = int(out_h)\n out_w = int(out_w)\n\n img = resize_with_random_interpolation(img, (out_h, out_w))\n bbox = transforms.resize_bbox(bbox, (H, W), (out_h, out_w))\n\n if out_h < net_h and out_w < net_w:\n img, param = expand(img, out_h=net_h, out_w=net_w,\n fill=self.value, return_param=True)\n bbox = transforms.translate_bbox(\n bbox, y_offset=param['y_offset'], x_offset=param['x_offset'])\n else:\n out_h = net_h if net_h > out_h else int(out_h * 1.05)\n out_w = net_w if net_w > out_w else int(out_w * 1.05)\n img, param = expand(img, out_h=out_h, out_w=out_w,\n fill=self.value, return_param=True)\n bbox = transforms.translate_bbox(\n bbox, y_offset=param['y_offset'], x_offset=param['x_offset'])\n\n img, param = crop_with_bbox_constraints(\n img, bbox, return_param=True,\n crop_height=net_h, crop_width=net_w)\n bbox, param = transforms.crop_bbox(\n bbox, y_slice=param['y_slice'], x_slice=param['x_slice'],\n allow_outside_center=False, return_param=True)\n label = label[param['index']]\n\n\n # 5. Random horizontal flipping # OK\n img, params = transforms.random_flip(\n img, x_random=True, return_param=True)\n bbox = transforms.flip_bbox(\n bbox, self.output_shape, x_flip=params['x_flip'])\n\n # Preparation for Yolov2 network\n bbox[:, ::2] /= self.output_shape[0] # y\n bbox[:, 1::2] /= self.output_shape[1] # x\n\n num_bbox = len(bbox)\n len_max = max(num_bbox, self.max_target)\n\n gmap = create_map_anchor_gt(bbox, self.anchors, self.output_shape,\n self.downscale, self.n_boxes, len_max)\n\n out_bbox = np.zeros((len_max, 4), dtype='f')\n out_bbox[:num_bbox] = bbox[:num_bbox]\n out_label = np.zeros((len_max), dtype='i')\n out_label[:num_bbox] = label\n\n gmap = gmap[:self.max_target]\n out_bbox = out_bbox[:self.max_target]\n out_label = out_label[:self.max_target]\n num_array = min(num_bbox, self.max_target)\n\n img = np.clip(img, 0, 1)\n return img, out_bbox, out_label, gmap, np.array([num_array], dtype='i')", "def preprocess_train(im, boxes, classes, inst_masks, mask, input_size, min_size=2,\n use_augment=False, training_scale=[0.3, 0.5, 0.7, 1.0]):\n ori_im = np.copy(im)\n target_h, target_w = input_size\n\n # ---------- old data_augmentation ----------\n if use_augment:\n if np.random.choice([0, 1]) != 0:\n scale = np.random.choice(training_scale) # adding more small objects\n im, inst_masks, mask, boxes, classes = random_scale(im, inst_masks, mask, boxes, classes, scale=scale)\n min_obj_cover = np.random.choice([0.8, 0.9, 1.0])\n # truncted examples may lead to multiple-detections..\n im, inst_masks, mask, boxes, classes = random_aspect_ratio(im, inst_masks, mask, boxes, classes,\n min_aspect_ratio=0.5, max_aspect_ratio=2.0,\n min_obj_cover=min_obj_cover)\n #\n # # r = np.random.randint(0, 3)\n # if np.random.rand() < 0.75:\n # im, inst_masks, mask, boxes, classes = fixed_scale(im, inst_masks, mask, boxes, classes, target_h, target_w)\n # else:\n # im, inst_masks, mask, boxes, classes = center_crop2fixed_pad(im, inst_masks, mask, boxes, classes, target_w, target_h,\n # min_size=min_size)\n\n # ---------- old data_augmentation ----------\n\n # ---------- none data_augmentation ----------\n im, inst_masks, mask, boxes, classes = fixed_scale(im, inst_masks, mask, boxes, classes, target_h, target_w)\n im, inst_masks, mask, boxes, classes = random_flip(im, inst_masks, mask, boxes, classes)\n # ---------- none data_augmentation ----------\n\n # ---------- old data_augmentation ----------\n im = distort_color(im)\n # ---------- old data_augmentation ----------\n\n im = imcv2_recolor(im)\n\n # add this because zeros numpy array will cause errors in torch Dataloader\n inst_masks = np.zeros([1, target_h, target_w], dtype=inst_masks.dtype) if inst_masks.size == 0 else inst_masks\n\n boxes = np.asarray(boxes, dtype=np.float32)\n return im, boxes, classes, inst_masks, mask, ori_im", "def Batch_Size_Normalization(batch, batch_len, pad_token, batch_size):\n max_length = max(batch_len)\n current_batch_len = len(batch)\n need_more = batch_size-current_batch_len\n if need_more==0:\n return batch\n\n padding_array = np.ones(max_length)*pad_token\n for i in range(need_more):\n batch.append(padding_array)\n return batch", "def pre_process(self, images: Union[np.ndarray, List]) -> np.ndarray:\n images = validate_image(images)\n image_sizes = []\n image_arr = []\n for image in images:\n image_sizes.append(image.shape)\n image = resize(image,\n height=self.in_h,\n width=self.in_w)\n image = normalize(image)\n image_arr.append(image)\n image_arr = np.array(image_arr)\n return image_arr, image_sizes", "def space_to_batch(images, labels, tiles, n_tiles, paddings_image, paddings_tiles, shape_padded_image, shape_padded_label, shape_input, shape_output, b_with_labels=False, b_verbose=False):\n\n # map parse function to each zipped element\n print(paddings_tiles, shape_padded_label, shape_output)\n assert any([a % b <= 0 for a, b in zip(shape_padded_label, shape_output)])\n\n paddings_both = [a + b for a, b in zip(paddings_image, paddings_tiles)]\n shape_padded_both = [a + 2 * b for a, b in zip(shape_padded_image, paddings_tiles)]\n scale_factor = [float(a/b) for a, b in zip(shape_padded_both, shape_padded_image)]\n\n paddings_labels = [(x, x) for x in paddings_tiles] + [(0, 0)]\n paddings_both = [(x, x) for x in paddings_both] + [(0, 0)]\n\n if b_verbose:\n print('Padding/ padding_img: ', paddings_labels, paddings_both, scale_factor)\n logging.info('Using %d patches to predict a whole image', n_tiles)\n\n # process labels into patches\n if b_with_labels:\n # print('labels prior: ', labels)\n labels = tf.pad(labels, paddings_labels)\n labels = tf.expand_dims(labels, axis=0)\n batch_shape = tf.stack([n_tiles, *shape_output, tf.shape(labels)[-1]])\n labels = tf.reshape(labels, batch_shape)\n # print('labels post: ', labels)\n\n # process images into patches\n # Note: a simple reshape is not possible due to the overlapping of inputs\n # map_fn or tf while_loops or sth similar might help\n images = tf.pad(images, paddings_both)\n if b_verbose:\n images = tf.Print(images, [tf.shape(images), tiles], 'Temporary patch shape - before: ', summarize=5)\n\n patches = [None for _ in range(n_tiles)]\n # patch_indices = list(range(n_tiles))\n positions = [None for _ in range(n_tiles)]\n offset_image = [int(x / 2) for x in shape_input]\n idx_tile = 0\n for idx_0 in range(tiles[0]):\n for idx_1 in range(tiles[1]):\n for idx_2 in range(tiles[2]):\n start_pos = [shape_output[0] * idx_0, shape_output[1] * idx_1, shape_output[2] * idx_2, 0]\n positions[idx_tile] = [float(a + b) for a, b in zip(start_pos[0:3], offset_image)]\n patches[idx_tile] = tf.slice(images, start_pos, shape_input + [tf.shape(images)[-1]])\n idx_tile += 1\n # images = tf.Print(images, [tf.shape(images), idx_0, idx_1, idx_2, start_pos], 'performed crop at: ')\n\n if b_verbose:\n patches[0] = tf.Print(patches[0], [tf.shape(patches[0])], 'Temporary patch shape - within: ', summarize=5)\n images = tf.stack(patches, axis=0)\n\n positions_t = tf.stack(positions, axis=0)\n positions_t = tf.cast(tf.multiply((tf.divide(positions_t, shape_padded_both) - 0.5) * 2, scale_factor), dtype=tf.float32) # rescale it | account for larger padded size\n if b_verbose:\n images = tf.Print(images, [tf.shape(images)], 'Temporary patch shape - after: ', summarize=5)\n\n return images, labels, positions_t", "def preprocess_image(image, training):\r\n if training:\r\n ### YOUR CODE HERE\r\n hpad = np.zeros((32,4,3))\r\n image = np.hstack((image,hpad))\r\n image = np.hstack((hpad,image))\r\n\r\n vpad = np.zeros((4,40, 3))\r\n image = np.vstack((image, vpad))\r\n image = np.vstack((vpad, image))\r\n\r\n #print(np.shape(image))\r\n # Resize the image to add four extra pixels on each side.\r\n\r\n ### YOUR CODE HERE\r\n\r\n ### YOUR CODE HERE\r\n # Randomly crop a [32, 32] section of the image.\r\n # HINT: randomly generate the upper left point of the image\r\n rx = np.random.randint(8)\r\n ry = np.random.randint(8)\r\n crp_img = image[rx:rx+32,ry:ry+32,:]\r\n #print(np.shape(crp_img))\r\n\r\n ### YOUR CODE HERE\r\n\r\n ### YOUR CODE HERE\r\n # Randomly flip the image horizontally.\r\n # for i in range(crp_img.shape[0]):\r\n # crp_img[i] = np.fliplr(crp_img[i])\r\n rf = np.random.randint(2)\r\n if(rf == 0):\r\n crp_img = np.fliplr(crp_img)\r\n #print(np.shape(crp_img))\r\n image = crp_img\r\n\r\n\r\n ### YOUR CODE HERE\r\n\r\n ### YOUR CODE HERE\r\n # Subtract off the mean and divide by the standard deviation of the pixels.\r\n cmean = []\r\n cstd = []\r\n for i in range(np.shape(image)[2]):\r\n arr = image[:,:,i]\r\n cmean = np.mean(arr)\r\n cstd = (np.std(arr))\r\n lfn = lambda x : (x-cmean)/cstd\r\n image[:,:,i] = lfn(arr)\r\n #print(np.shape(image))\r\n\r\n ### YOUR CODE HERE\r\n\r\n return image", "def pre_process_data(input_path: list, cuts: int, shape: int = 32, normalize: bool = True) -> list:\n images = []\n images_uncut = []\n for files_path in input_path:\n\n files = os.listdir(files_path) # TODO paths\n for f in files:\n file_path = f'{files_path}/{f}'\n im_uncut = cv2.imread(file_path)\n im_uncut = cv2.cvtColor(im_uncut, cv2.COLOR_RGB2GRAY)\n images_uncut.append(cv2.resize(im_uncut, (shape * cuts, shape * cuts)))\n x = np.array(images_uncut)\n\n if normalize:\n x_mean = np.mean(x, axis=(0, 1, 2))\n x_std = np.std(x, axis=(0, 1, 2))\n x = (x - x_mean) / (x_std + 1e-9)\n\n for im in x:\n height = im.shape[0]\n width = im.shape[1]\n frac_h = height // cuts\n frac_w = width // cuts\n i = 0\n image = []\n for h in range(cuts):\n for w in range(cuts):\n crop = im[h * frac_h:(h + 1) * frac_h, w * frac_w:(w + 1) * frac_w]\n crop_rehaped = cv2.resize(crop, (shape, shape))\n image.append([crop_rehaped, i, number_to_angle(i, cuts), neighbours(i, cuts)])\n i = i + 1\n images.append(image)\n # return np.array(images) # todo back to array\n return images", "def _pad_img(self, results):\n pad_val = self.pad_val.get('img', 0)\n for key in results.get('img_fields', ['img']):\n if self.pad_to_square:\n max_size = max(results[key].shape[:2])\n self.size = (max_size, max_size)\n if self.size is not None:\n padded_img = general_ocr.impad(\n results[key], shape=self.size, pad_val=pad_val)\n elif self.size_divisor is not None:\n padded_img = general_ocr.impad_to_multiple(\n results[key], self.size_divisor, pad_val=pad_val)\n results[key] = padded_img\n results['pad_shape'] = padded_img.shape\n results['pad_fixed_size'] = self.size\n results['pad_size_divisor'] = self.size_divisor", "def preprocess(\n self,\n images: ImageInput,\n do_resize: Optional[bool] = None,\n size: Optional[Dict[str, int]] = None,\n resample: Optional[\"PILImageResampling\"] = None,\n do_rescale: Optional[bool] = None,\n rescale_factor: Optional[Union[int, float]] = None,\n do_normalize: Optional[bool] = None,\n image_mean: Optional[Union[float, List[float]]] = None,\n image_std: Optional[Union[float, List[float]]] = None,\n do_pad: Optional[bool] = None,\n pad_size: Optional[Dict[str, int]] = None,\n do_convert_rgb: bool = None,\n return_tensors: Optional[Union[str, TensorType]] = None,\n data_format: ChannelDimension = ChannelDimension.FIRST,\n input_data_format: Optional[Union[str, ChannelDimension]] = None,\n **kwargs,\n ):\n do_resize = do_resize if do_resize is not None else self.do_resize\n size = size if size is not None else self.size\n size = get_size_dict(max_size=size, default_to_square=False) if not isinstance(size, dict) else size\n resample = resample if resample is not None else self.resample\n do_rescale = do_rescale if do_rescale is not None else self.do_rescale\n rescale_factor = rescale_factor if rescale_factor is not None else self.rescale_factor\n do_normalize = do_normalize if do_normalize is not None else self.do_normalize\n image_mean = image_mean if image_mean is not None else self.image_mean\n image_std = image_std if image_std is not None else self.image_std\n do_pad = do_pad if do_pad is not None else self.do_pad\n pad_size = pad_size if pad_size is not None else self.pad_size\n pad_size = get_size_dict(pad_size, default_to_square=True)\n do_convert_rgb = do_convert_rgb if do_convert_rgb is not None else self.do_convert_rgb\n\n images = make_list_of_images(images)\n\n if not valid_images(images):\n raise ValueError(\n \"Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, \"\n \"torch.Tensor, tf.Tensor or jax.ndarray.\"\n )\n\n if do_resize and (size is None or resample is None):\n raise ValueError(\"Size and resample must be specified if do_resize is True.\")\n\n if do_rescale and rescale_factor is None:\n raise ValueError(\"Rescale factor must be specified if do_rescale is True.\")\n\n if do_normalize and (image_mean is None or image_std is None):\n raise ValueError(\"Image mean and std must be specified if do_normalize is True.\")\n\n if do_pad and pad_size is None:\n raise ValueError(\"Pad size must be specified if do_pad is True.\")\n\n # PIL RGBA images are converted to RGB\n if do_convert_rgb:\n images = [convert_to_rgb(image) for image in images]\n\n # All transformations expect numpy arrays.\n images = [to_numpy_array(image) for image in images]\n\n if is_scaled_image(images[0]) and do_rescale:\n logger.warning_once(\n \"It looks like you are trying to rescale already rescaled images. If the input\"\n \" images have pixel values between 0 and 1, set `do_rescale=False` to avoid rescaling them again.\"\n )\n\n if input_data_format is None:\n # We assume that all images have the same channel dimension format.\n input_data_format = infer_channel_dimension_format(images[0])\n\n original_sizes = [get_image_size(image, channel_dim=input_data_format) for image in images]\n\n if do_resize:\n images = [\n self.resize(image=image, size=size, resample=resample, input_data_format=input_data_format)\n for image in images\n ]\n\n reshaped_input_sizes = [get_image_size(image, channel_dim=input_data_format) for image in images]\n\n if do_rescale:\n images = [\n self.rescale(image=image, scale=rescale_factor, input_data_format=input_data_format)\n for image in images\n ]\n\n if do_normalize:\n images = [\n self.normalize(image=image, mean=image_mean, std=image_std, input_data_format=input_data_format)\n for image in images\n ]\n\n if do_pad:\n images = [\n self.pad_image(image=image, pad_size=pad_size, input_data_format=input_data_format) for image in images\n ]\n\n images = [\n to_channel_dimension_format(image, data_format, input_channel_dim=input_data_format) for image in images\n ]\n encoded_outputs = BatchFeature(\n data={\n \"pixel_values\": images,\n \"original_sizes\": original_sizes,\n \"reshaped_input_sizes\": reshaped_input_sizes,\n },\n tensor_type=return_tensors,\n )\n return encoded_outputs", "def normalize(batch_img: np.ndarray) -> np.ndarray:\n batch_img = batch_img.astype('float32')\n return batch_img / 127.5 - 1", "def dimension_postprocess(self, chunked_data, original_data, scale=1, padding=True):\r\n\r\n assert len(original_data.shape) == 2, \"data dimension expected to be (xline ,samp_point)\"\r\n assert len(chunked_data.shape) == 3, \"Chunked data dimension expected to be (batch_size, xline, samp_point)\"\r\n\r\n if padding:\r\n if original_data.shape[0] < self.rows:\r\n new_images = []\r\n for data in chunked_data:\r\n new_images.append(data[0:scale * original_data.shape[0], :])\r\n chunked_data = np.array(new_images)\r\n\r\n if original_data.shape[1] < self.cols:\r\n new_images = []\r\n for data in chunked_data:\r\n new_images.append(data[:, 0:scale * original_data.shape[1]])\r\n chunked_data = np.array(new_images)\r\n\r\n new_shape = (\r\n original_data.shape[0] * scale,\r\n original_data.shape[1] * scale\r\n )\r\n reconstruction = np.zeros(new_shape)\r\n x_chunks, y_chunks = self.get_chunks(original_data)\r\n\r\n i = 0\r\n s = scale\r\n for x in x_chunks:\r\n for y in y_chunks:\r\n prior_fill = reconstruction != 0\r\n chunk = np.zeros(new_shape)\r\n chunk[x[0] * s:x[1] * s, y[0] * s:y[1] * s] += chunked_data[i]\r\n chunk_fill = chunk != 0\r\n reconstruction += chunk\r\n reconstruction[prior_fill & chunk_fill] = reconstruction[prior_fill & chunk_fill] / 2\r\n i += 1\r\n return reconstruction", "def _pre_process_images(images, details):\n # If the images are gray-scale, the number of channels (1) must be \"added\" to the size of the samples.\n if details['channels'] == 1:\n img_rows, img_cols = details['sample size']\n\n # The place of the dimension with 1 depends on the backend used by Keras.\n if K.image_data_format() == 'channels_first':\n images = images.reshape(images.shape[0], 1, img_rows, img_cols)\n else:\n images = images.reshape(images.shape[0], img_rows, img_cols, 1)\n\n # Normalize pixel values to be in the interval [0, 1]\n images = images.astype('float32')\n max_bit_value = 2 ** details['bits per sample'] - 1\n images /= max_bit_value\n return images", "def main():\n\n # Just grab all files - we'll use try/except to filter\n images = glob.glob(os.path.join(args.input_dir, '*.*'))\n if not os.path.exists(args.output_dir):\n os.makedirs(args.output_dir)\n for img_file in images:\n print(img_file)\n try:\n np_img = plt.imread(img_file)\n print(np_img.shape)\n img_name = img_file.split(os.sep)[-1]\n new_img_file = os.path.join(args.output_dir, img_name)\n pad_image(np_img, new_img_file)\n except Exception as e:\n print('Warning: {}. Skpping file.'.format(e))\n continue", "def _augment_images(self, images, random_state, parents, hooks):\n nb_images = len(images)\n samples = self.p.draw_samples((nb_images,), random_state=random_state)\n for i in sm.xrange(nb_images):\n if samples[i] == 1:\n if self.axis == 1:\n images[i] = np.fliplr(images[i])\n elif self.axis == 0:\n images[i] = np.flipud(images[i])\n self.samples = samples\n return images", "def process_batch(self, image_batch):\n images = []\n for image_data in image_batch:\n image_resize = cv2.resize(image_data, (0,0), fx=0.5, fy=0.5) #NOTE\n images.append(image_resize)\n\n return np.array(images)", "def batch_image_preprocess(raw_images,\n image_size: Union[int, Tuple[int, int]],\n mean_rgb,\n stddev_rgb,\n batch_size: int = None):\n if not batch_size:\n # map_fn is a little bit slower due to some extra overhead.\n # map_fn -> vectorized_map (fully parallelizes the batch).\n map_fn = functools.partial(\n image_preprocess,\n image_size=image_size,\n mean_rgb=mean_rgb,\n stddev_rgb=stddev_rgb)\n images, scales = tf.vectorized_map(map_fn, raw_images, warn=False)\n images = tf.stop_gradient(tf.cast(images, tf.float32))\n scales = tf.stop_gradient(tf.cast(scales, tf.float32))\n return (images, scales)\n\n # If batch size is known, use a simple loop.\n scales, images = [], []\n for i in range(batch_size):\n image, scale = image_preprocess(raw_images[i], image_size, mean_rgb,\n stddev_rgb)\n scales.append(scale)\n images.append(image)\n images = tf.stack(images)\n scales = tf.stack(scales)\n return (images, scales)", "def _normalize(images):\n images -= images.mean(axis=0, keepdims=True)\n images /= np.maximum(images.std(axis=0, keepdims=True), 3e-1)", "def _normalize(images):\n images -= images.mean(axis=0, keepdims=True)\n images /= np.maximum(images.std(axis=0, keepdims=True), 3e-1)", "def dimension_preprocess(self, data, padding=True):\r\n\r\n assert len(data.shape) == 2, \"Data dimension expected to be ( xline, samp_point)\"\r\n if padding:\r\n if data.shape[0] < self.rows:\r\n padding = np.ones((self.rows - data.shape[0], data.shape[1]))\r\n data = np.concatenate((data, padding), axis=0)\r\n if data.shape[1] < self.cols:\r\n padding = np.ones((data.shape[0], self.cols - data.shape[1]))\r\n data = np.concatenate((data, padding), axis=1)\r\n x_chunks, y_chunks = self.get_chunks(data)\r\n images = []\r\n for x in x_chunks:\r\n for y in y_chunks:\r\n images.append(\r\n data[x[0]:x[1], y[0]:y[1]]\r\n )\r\n images = np.array(images)\r\n\r\n return images", "def preprocess_batch(self,image_batch):\n if len(image_batch.shape) == 4 and image_batch.shape[1:] != (72,72,1):\n assert (False),'wrong batch shape'\n\n if len(image_batch.shape) == 3:\n if image_batch.shape == (72, 72, 3):\n image_batch = np.mean(image_batch,axis=2).reshape(72,72,1)\n elif image_batch.shape == (72, 72, 1):\n pass\n else:\n assert(False),'wrong batch shape'\n\n return image_batch/255", "def batch_preprocess(self, input_folder, output_folder, padding=20):\n input_files = glob.glob(input_folder + '/*')\n for input_path in input_files:\n subject_name = re.search(self.KEY_WORD_FILE, input_path).group()\n output_path = output_folder + '/' + subject_name\n\n data, options = nrrd.read(input_path)\n data, options = self.pad_upper(data, options, padding)\n data, options = self.filter_background_to_air(data, options)\n\n print 'write ' + output_path\n nrrd.write(output_path, data, options) # too slow in Python", "def normalise(image):", "def postprocess(self, images):\n if not isinstance(images, np.ndarray):\n raise ValueError(f'Images should be with type `numpy.ndarray`!')\n\n if images.ndim != 4 or images.shape[1] != self.image_channels:\n raise ValueError(f'Input should be with shape [batch_size, channel, '\n f'height, width], where channel equals to '\n f'{self.image_channels}!\\n'\n f'But {images.shape} is received!')\n images = (images - self.min_val) * 255 / (self.max_val - self.min_val)\n images = np.clip(images + 0.5, 0, 255).astype(np.uint8)\n images = images.transpose(0, 2, 3, 1)\n if self.image_channels == 3 and self.channel_order == 'BGR':\n images = images[:, :, :, ::-1]\n\n return images", "def preprocess(img):\n \n scaler=StandardScaler() ## scaler object to perform preprocessing\n img=scaler.fit_transform(img) ## zero-center and normalize\n \n return img", "def make_iterator_extract_scores_from_images_batched(dataloader, net, logger, image_batch_size, is_cuda,\n num_random_pyramid_scales=0, num_random_negative_labels=-1,\n class_image_augmentation=\"\"):\n\n logger.info(\"Extracting scores from all images\")\n # get images of all classes\n class_images, class_aspect_ratios, class_ids = dataloader.get_all_class_images()\n num_classes = len(class_images)\n assert len(class_aspect_ratios) == num_classes\n assert len(class_ids) == num_classes\n query_img_sizes = [FeatureMapSize(img=img) for img in class_images]\n \n # the current code works only with class batch == 1, this in inefficient in some place, but good in others\n # is there a better way?\n class_batch_size = 1\n\n # extract all class convolutions from batched class images\n class_conv_layer_batched = []\n logger.info(\"Extracting weights from {0} classes{1}\".format(num_classes,\n f\" with {class_image_augmentation} augmentation\" if class_image_augmentation else \"\"))\n for i in range(0, num_classes, class_batch_size):\n batch_class_ids = class_ids[i : i + class_batch_size]\n\n batch_class_images = []\n for i_label in range(len(batch_class_ids)):\n im = class_images[i + i_label].squeeze(0)\n if is_cuda:\n im = im.cuda()\n batch_class_images.append(im)\n if not class_image_augmentation:\n num_class_views = 1\n elif class_image_augmentation == \"rotation90\":\n im90 = im.rot90(1, [1, 2])\n im180 = im90.rot90(1, [1, 2])\n im270 = im180.rot90(1, [1, 2])\n batch_class_images.append(im90)\n batch_class_images.append(im180)\n batch_class_images.append(im270)\n num_class_views = 4\n elif class_image_augmentation == \"horflip\":\n im_flipped = im.flip(2)\n batch_class_images.append(im_flipped)\n num_class_views = 2\n elif class_image_augmentation == \"horflip_rotation90\":\n im90 = im.rot90(1, [1, 2])\n im180 = im90.rot90(1, [1, 2])\n im270 = im180.rot90(1, [1, 2])\n im_flipped = im.flip(2)\n im90_flipped = im90.flip(2)\n im180_flipped = im180.flip(2)\n im270_flipped = im270.flip(2)\n\n for new_im in [im90, im180, im270, im_flipped, im90_flipped, im180_flipped, im270_flipped]:\n batch_class_images.append(new_im)\n\n num_class_views = len(batch_class_images)\n else:\n raise RuntimeError(f\"Unknown value of class_image_augmentation: {class_image_augmentation}\")\n\n for b_im in batch_class_images:\n class_feature_maps = net.net_label_features([b_im])\n class_conv_layer = net.os2d_head_creator.create_os2d_head(class_feature_maps)\n class_conv_layer_batched.append(class_conv_layer)\n \n # loop over all images\n iterator_batches = dataloader.make_iterator_for_all_images(image_batch_size, num_random_pyramid_scales=num_random_pyramid_scales)\n for batch_ids, pyramids_batch, box_transforms_batch, initial_img_size_batch in iterator_batches:\n t_start_batch = time.time()\n # select labels to use for search at this batch\n if num_random_negative_labels >= 0 :\n # randomly shuffle labels\n neg_labels = torch.randperm(len(class_conv_layer_batched))\n neg_labels = neg_labels[:num_random_negative_labels]\n # add positive labels\n pos_labels = dataloader.get_class_ids_for_image_ids(batch_ids)\n pos_labels = dataloader.convert_label_ids_global_to_local(pos_labels, class_ids)\n batch_labels_local = torch.cat([neg_labels, pos_labels], 0).unique()\n else:\n # take all the labels - needed for evaluation\n batch_labels_local = torch.arange(len(class_conv_layer_batched))\n \n batch_class_ids = [class_ids[l // num_class_views] for l in batch_labels_local]\n batch_query_img_sizes = [query_img_sizes[l // num_class_views] for l in batch_labels_local]\n\n # extract features at all pyramid levels\n batch_images_pyramid = []\n loc_scores = []\n class_scores = []\n fm_sizes = []\n transform_corners = []\n num_pyramid_levels = len(pyramids_batch)\n \n t_cum_features = 0.0\n t_cum_labels = 0.0\n for batch_images in pyramids_batch:\n if is_cuda:\n batch_images = batch_images.cuda()\n \n t_start_features = time.time()\n feature_maps = net.net_feature_maps(batch_images)\n torch.cuda.synchronize()\n t_cum_features += time.time() - t_start_features\n\n # batch class images\n loc_scores.append([])\n class_scores.append([])\n fm_sizes.append([])\n transform_corners.append([])\n t_start_labels = time.time()\n assert class_batch_size == 1, \"the iterator on images works only with labels batches of size 1\"\n\n for i_class_batch in batch_labels_local:\n # apply net at this pyramid level\n loc_s_p, class_s_p, _, fm_sizes_p, transform_corners_p = \\\n net(class_head=class_conv_layer_batched[i_class_batch],\n feature_maps=feature_maps)\n loc_scores[-1].append(loc_s_p)\n class_scores[-1].append(class_s_p)\n fm_sizes[-1].append(fm_sizes_p)\n transform_corners[-1].append(transform_corners_p)\n torch.cuda.synchronize()\n t_cum_labels += time.time() - t_start_labels\n\n if not feature_maps.requires_grad:\n # explicitly remove a possibly large chunk of GPU memory\n del feature_maps\n\n batch_images_pyramid.append(batch_images)\n\n timing_str = \"Feature time: {0}, Label time: {1}, \".format(time_for_printing(t_cum_features, mode=\"s\"),\n time_for_printing(t_cum_labels, mode=\"s\"))\n\n # loc_scores, class_scores: pyramid_level x class_batch x image_in_batch x\n for i_image_in_batch, image_id in enumerate(batch_ids):\n # get scores from all pyramid levels\n image_loc_scores_p, image_class_scores_p, image_fm_sizes_p = [], [], []\n transform_corners_p = []\n for i_p in range(num_pyramid_levels):\n if loc_scores is not None and loc_scores[0] is not None and loc_scores[0][0] is not None:\n image_loc_scores_p.append(torch.cat([s[i_image_in_batch] for s in loc_scores[i_p]], 0))\n else:\n image_loc_scores_p.append(None)\n image_class_scores_p.append(torch.cat([s[i_image_in_batch] for s in class_scores[i_p]], 0))\n\n if transform_corners is not None and transform_corners[0] is not None and transform_corners[0][0] is not None:\n transform_corners_p.append(torch.cat([s[i_image_in_batch] for s in transform_corners[i_p]], 0))\n else:\n transform_corners_p.append(None)\n\n image_fm_sizes_p.append(fm_sizes[i_p][0])\n\n # get a pyramid of one image[i_p]\n one_image_pyramid = [p[i_image_in_batch] for p in batch_images_pyramid]\n\n # extract the box transformations\n box_reverse_transforms = box_transforms_batch[i_image_in_batch]\n\n logger.info(timing_str + \"Net time: {0}\".format(time_since(t_start_batch)))\n yield image_id, image_loc_scores_p, image_class_scores_p, one_image_pyramid,\\\n batch_query_img_sizes, batch_class_ids, box_reverse_transforms, image_fm_sizes_p, transform_corners_p", "def preprocess_(image, input_shape, keep_ap = True, rectangle=None, pad_center=False, gray_scale=False, **kwargs):\n\n if isinstance(image, str):\n try:\n image = kneron_preprocessing.API.load_image(image)\n except:\n try:\n image = kneron_preprocessing.API.load_bin(image, **kwargs)\n except:\n print('input format error')\n assert 0\n else:\n assert isinstance(image, np.ndarray)\n\n # only do part of the image\n '''\n if rectangle is not None:\n left, top, width, height = rectangle[:4]\n x1, y1, x2, y2 = left, top, left+width, top+height\n image = kneron_preprocessing.API.crop(image, box=(x1, y1, x2, y2))\n '''\n # get image original shape\n h_ori, w_ori = image.shape[:2]\n h, w = input_shape\n scale = [1, 1]\n '''\n if keep_ap:\n if w_ori > h or h_ori > w:\n scale = max(1.0*w_ori / w, 1.0*h_ori / h)\n scale = [scale, scale]\n image = kneron_preprocessing.API.resize(image, size=(w, h), keep_ratio=keep_ap, type='bilinear')\n else:\n scale = [1, 1]\n else:\n if w_ori > h or h_ori > w:\n scale = [1.0*w_ori / w, 1.0*h_ori / h]\n image = kneron_preprocessing.API.resize(image, size=(w, h), keep_ratio=keep_ap, type='bilinear')\n else:\n scale = [1, 1]\n\n if pad_center:\n image = kneron_preprocessing.API.pad_center(image, size=(w, h), pad_val=0)\n else:\n image = kneron_preprocessing.API.pad_corner(image, size=(w, h), pad_val=0)\n '''\n img_data = np.array(image).reshape((h, w, 3))\n if gray_scale:\n img_data = kneron_preprocessing.API.convert(image=img_data, out_fmt='NIR')\n img_data = img_data.reshape((h, w, 1))\n img_data = img_data[None]\n\n img_data = kneron_preprocessing.API.norm(img_data)\n \n return img_data, {'scale': scale, 'w_ori': w_ori, 'h_ori': h_ori}", "def smdm_normalize(images, window, padding, name=\"unnamed_smdm_normalize\"):\n\tMEDIAN_JITTER = tf.constant(1e-8)\n\t\n\tif window % 2 == 0:\n\t\traise ValueError(\"attempted to smdm_normalize() with even-sized window\")\n\n\timages = tf.cast(images, tf.float32)\n\tbatch_size, height, width, channels = tf.shape(images)[0], tf.shape(images)[1], tf.shape(images)[2], tf.shape(images)[3]\n\n\tspatial_last = tf.transpose(images, (0, 3, 1, 2))\n\tspatial_last_and_flat = tf.reshape(spatial_last, (batch_size, channels, -1))\n\tn = tf.multiply(height, width)\n\tk = tf.to_int32(tf.divide(n, 2)) + 1\n\ttop_k = tf.nn.top_k(spatial_last_and_flat, k, name=name + \"_top_half_of_images\")[0]\n\tmedians_spatial_last_and_flat = tf.cond(\n\t\ttf.equal(tf.mod(n, 2), 0),\n\t\tlambda: tf.reduce_mean(top_k[:, :, k - 2: k], -1, keep_dims=True),\n\t\tlambda: top_k[:, :, k - 1]\n\t)\n\tmedians_spatial_last_and_flat = tf.add(\n\t\tmedians_spatial_last_and_flat,\n\t\ttf.fill(tf.shape(medians_spatial_last_and_flat), MEDIAN_JITTER)\n\t)\n\tmedians_spatial_last = tf.expand_dims(medians_spatial_last_and_flat, 3)\n\tmedians = tf.transpose(medians_spatial_last, (0, 2, 3, 1))\n\timages = tf.divide(images, medians, name=name + \"_divide_images_by_medians\")\n\n\tpadding_amount = int((window - 1) / 2)\n\tpadding_amounts = ((0, 0), (padding_amount, padding_amount), (padding_amount, padding_amount), (0, 0))\n\timages_padded = tf.pad(images, padding_amounts, padding)\n\tlocal_means = tf.nn.pool(images_padded, (window, window), \"AVG\", \"VALID\", name=name + \"_local_means_of_images\")\n\timages = tf.subtract(images, local_means, name=name + \"_subtract_local_means_from_images\")\n\n\treturn images", "def process_images(image, label):\n # Normalize images to have a mean of 0 and standard deviation of 1\n # per_image_standardization is preferred, which normalize the entire image to mean zero and std 1.\n # It also make learning fast.\n image = tf.image.per_image_standardization(image)\n # Resize images from 32x32 to 277x277\n image = tf.image.resize(image, (227,227))\n return image, label", "def run(self):\n self.run_tasks()\n self.images = np.array(self.images)\n self.shapes.extend(self.images.shape[-2:])\n\n self.images = np.reshape(self.images, self.shapes)", "def process_images(self):\n self.processed_content_image = tf.keras.applications.vgg19.preprocess_input(\n self.content_image)\n self.processed_style_image = tf.keras.applications.vgg19.preprocess_input(\n self.style_image)", "def normalize_batch(batch, mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]):\n # normalize using imagenet mean and std\n batch = batch.clone()\n mean = torch.tensor(mean).view(-1, 1, 1)\n std = torch.tensor(std).view(-1, 1, 1)\n # if your image data is scaled to scale 0-255, uncomment the line below\n # batch.div_(255.0)\n return (batch - mean) / std", "def preprocess(self, x: paddle.Tensor) -> paddle.Tensor:\n # Normalize colors\n x = (x - self.pixel_mean) / self.pixel_std\n\n # Pad\n h, w = x.shape[-2:]\n padh = self.image_encoder.img_size - h\n padw = self.image_encoder.img_size - w\n x = F.pad(x, (0, padw, 0, padh))\n return x", "def batch_norm(self, inputs):\n x = inputs\n x = self.bn(x)\n return x", "def batch_norm(inputs, training, data_format):\n # We set fused=True for a significant performance boost. See\n # https://www.tensorflow.org/performance/performance_guide#common_fused_ops\n return tf.compat.v1.layers.batch_normalization(\n inputs=inputs, axis=1 if data_format == 'channels_first' else 3,\n momentum=_BATCH_NORM_DECAY, epsilon=_BATCH_NORM_EPSILON, center=True,\n scale=True, training=training, fused=True)", "def make_flat_avg(images, out):\n image = Image(avg_images(images, out))\n image.normalise()\n return out", "def batch_normalization(input_var=None):\n\n # Hyperparameters\n hp = Hyperparameters()\n hp('batch_size', 30)\n hp('n_epochs', 1000)\n hp('learning_rate', 0.01)\n hp('l1_reg', 0.00)\n hp('l2_reg', 0.0001)\n hp('patience', 5000)\n\n # Create connected layers\n # Input layer\n l_in = InputLayer(input_shape=(hp.batch_size, 28 * 28), input_var=input_var, name='Input')\n # Batch Normalization\n l_bn1 = BatchNormalization(incoming=l_in, name='Batch Normalization 1')\n # Dense Layer\n l_hid1 = DenseLayer(incoming=l_bn1, n_units=500, W=glorot_uniform, l1=hp.l1_reg,\n l2=hp.l2_reg, activation=relu, name='Hidden layer 1')\n # Batch Normalization\n l_bn2 = BatchNormalization(incoming=l_hid1, name='Batch Normalization 2')\n # Dense Layer\n l_hid2 = DenseLayer(incoming=l_bn2, n_units=500, W=glorot_uniform, l1=hp.l1_reg,\n l2=hp.l2_reg, activation=relu, name='Hidden layer 2')\n # Batch Normalization\n l_bn3 = BatchNormalization(incoming=l_hid2, name='Batch Normalization 3')\n # Logistic regression Layer\n l_out = LogisticRegression(incoming=l_bn3, n_class=10, l1=hp.l1_reg,\n l2=hp.l2_reg, name='Logistic regression')\n\n # Create network and add layers\n net = Network('mlp with batch normalization')\n net.add(l_in)\n net.add(l_bn1)\n net.add(l_hid1)\n net.add(l_bn2)\n net.add(l_hid2)\n net.add(l_bn3)\n net.add(l_out)\n\n return net, hp", "def preprocess_image(images,\n height=INCEPTION_DEFAULT_IMAGE_SIZE,\n width=INCEPTION_DEFAULT_IMAGE_SIZE,\n scope=None):\n is_single = images.shape.ndims == 3\n with ops.name_scope(scope, 'preprocess', [images, height, width]):\n if not images.dtype.is_floating:\n images = math_ops.to_float(images)\n if is_single:\n images = array_ops.expand_dims(images, axis=0)\n resized = image_ops.resize_bilinear(images, [height, width])\n resized = (resized - 128.0) / 128.0\n if is_single:\n resized = array_ops.squeeze(resized, axis=0)\n return resized", "def _data_generation(self, batch_data):\n # Initialization\n batch_x = []\n batch_y = defaultdict(list)\n\n for ind, item_data in batch_data.iterrows():\n img_path = os.path.join(self.img_dir, \"images\", \"rgb\", item_data[\"name\"])\n img = cv2.imread(img_path)\n try:\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n except Exception as error:\n print(img_path)\n print(error)\n not_valid_mask = self.read_masks_borders(item_data[\"name\"])\n img[not_valid_mask] = 0\n\n # getmasks\n targets = np.zeros((img.shape[0], img.shape[1], len(self.classes)))\n for i, c in enumerate(self.classes):\n mask_path = os.path.join(self.img_dir, \"labels\", c, item_data[\"name\"])\n mask = cv2.imread(\n mask_path.replace(\".jpg\", \".png\"), cv2.IMREAD_GRAYSCALE\n )\n mask[not_valid_mask[:, :, 0]] = 0\n mask = mask > 0\n targets[:, :, i] = mask\n\n res = self.reshape_func(image=img, mask=targets)\n img, targets = res['image'], res['mask']\n if self.do_aug:\n res = self.aug(image=img, mask=targets)\n img, targets = res['image'], res['mask']\n\n for i, c in enumerate(self.classes):\n batch_y[c].append(targets[:, :, i])\n\n batch_x.append(img)\n\n batch_x = np.array(batch_x, np.float32)\n batch_y = {k: np.array(v, np.float32) for k, v in batch_y.items()}\n batch_y = {k: np.expand_dims(v, axis=-1) for k, v in batch_y.items()}\n\n return (\n imagenet_utils.preprocess_input(batch_x, \"channels_last\", mode=\"tf\"),\n batch_y\n )", "def batch_generate(self, inputs, labels, batch_size=64):\n inputs_image, inputs, labels = check_inputs_labels(inputs, labels)\n arr_x = inputs\n arr_y = labels\n len_x = inputs_image.shape[0]\n batch_size = check_int_positive('batch_size', batch_size)\n batches = int(len_x / batch_size)\n rest = len_x - batches*batch_size\n res = []\n for i in range(batches):\n if isinstance(arr_x, tuple):\n x_batch = tuple([sub_items[i*batch_size: (i + 1)*batch_size] for sub_items in arr_x])\n else:\n x_batch = arr_x[i*batch_size: (i + 1)*batch_size]\n if isinstance(arr_y, tuple):\n y_batch = tuple([sub_labels[i*batch_size: (i + 1)*batch_size] for sub_labels in arr_y])\n else:\n y_batch = arr_y[i*batch_size: (i + 1)*batch_size]\n adv_x = self.generate(x_batch, y_batch)\n # Black-attack methods will return 3 values, just get the second.\n res.append(adv_x[1] if isinstance(adv_x, tuple) else adv_x)\n\n if rest != 0:\n if isinstance(arr_x, tuple):\n x_batch = tuple([sub_items[batches*batch_size:] for sub_items in arr_x])\n else:\n x_batch = arr_x[batches*batch_size:]\n if isinstance(arr_y, tuple):\n y_batch = tuple([sub_labels[batches*batch_size:] for sub_labels in arr_y])\n else:\n y_batch = arr_y[batches*batch_size:]\n adv_x = self.generate(x_batch, y_batch)\n # Black-attack methods will return 3 values, just get the second.\n res.append(adv_x[1] if isinstance(adv_x, tuple) else adv_x)\n\n adv_x = np.concatenate(res, axis=0)\n return adv_x", "def __call__(self, results):\n self._pad_img(results)\n self._pad_masks(results)\n self._pad_seg(results)\n return results", "def pad_all_images(image_dir, width, height, pad_type, value=(0, 0, 0)):\n pool = Pool(1)\n pool.starmap(pad_image, zip(\n image_dir, itertools.repeat(width), itertools.repeat(height), itertools.repeat(pad_type),\n itertools.repeat(value)))\n pool.close()\n pool.join()", "def adapt_batch(batch):\n image_arrays, labellings = batch\n\n current_batch_size = len(labellings)\n\n images = np.array(image_arrays).reshape(current_batch_size, *image_arrays[0].shape)\n\n padded_labellings = pad_labellings(labellings)\n\n labels = np.array(padded_labellings, dtype=np.int32).reshape(current_batch_size, -1)\n\n input_lengths = compute_input_lengths(image_arrays)\n\n label_lengths = np.array([len(labelling) for labelling in labellings],\n dtype=np.int32).reshape(current_batch_size, 1)\n\n return [images, labels, input_lengths, label_lengths], labels", "def normalize_images(image_sitk):\n\n max = 400\n min = -1000\n\n image_np = sitk.GetArrayFromImage(image_sitk)\n\n # Normalization\n image_np = (image_np - min)/(max - min)\n image_np[image_np > 1] = 1\n image_np[image_np < 0] = 0\n\n # Convert back to SITK\n out_image_sitk = sitk.GetImageFromArray(image_np)\n out_image_sitk.CopyInformation(image_sitk)\n\n return out_image_sitk", "def normalize_image(self):\n # The image normalization is identical to Cloud TPU ResNet.\n self._image = tf.image.convert_image_dtype(self._image, dtype=tf.float32)\n offset = tf.constant(DATASET_MEAN)\n offset = tf.expand_dims(offset, axis=0)\n offset = tf.expand_dims(offset, axis=0)\n self._image -= offset\n\n scale = tf.constant(DATASET_VAR)\n scale = tf.expand_dims(scale, axis=0)\n scale = tf.expand_dims(scale, axis=0)\n self._image /= scale", "def batch_norm(inputs, training, data_format):\n # We set fused=True for a significant performance boost. See\n # https://www.tensorflow.org/performance/performance_guide#common_fused_ops\n return tf.layers.batch_normalization(\n inputs=inputs, axis=1 if data_format == 'channels_first' else 3,\n momentum=_BATCH_NORM_DECAY, epsilon=_BATCH_NORM_EPSILON, center=True,\n scale=True, training=training, fused=True)", "def forward(\n self,\n batched_input: List[Dict[str, Any]],\n multimask_output: bool, ) -> List[Dict[str, paddle.Tensor]]:\n input_images = paddle.stack(\n [self.preprocess(x[\"image\"]) for x in batched_input], axis=0)\n image_embeddings = self.image_encoder(input_images)\n\n outputs = []\n for image_record, curr_embedding in zip(batched_input,\n image_embeddings):\n if \"point_coords\" in image_record:\n points = (image_record[\"point_coords\"],\n image_record[\"point_labels\"])\n else:\n points = None\n sparse_embeddings, dense_embeddings = self.prompt_encoder(\n points=points,\n boxes=image_record.get(\"boxes\", None),\n masks=image_record.get(\"mask_inputs\", None), )\n low_res_masks, iou_predictions = self.mask_decoder(\n image_embeddings=curr_embedding.unsqueeze(0),\n image_pe=self.prompt_encoder.get_dense_pe(),\n sparse_prompt_embeddings=sparse_embeddings,\n dense_prompt_embeddings=dense_embeddings,\n multimask_output=multimask_output, )\n masks = self.postprocess_masks(\n low_res_masks,\n input_size=image_record[\"image\"].shape[-2:],\n original_size=image_record[\"original_size\"], )\n masks = masks > self.mask_threshold\n outputs.append({\n \"masks\": masks,\n \"iou_predictions\": iou_predictions,\n \"low_res_logits\": low_res_masks,\n })\n return outputs", "def normalize(\n self,\n image: np.ndarray,\n data_format: Optional[Union[str, ChannelDimension]] = None,\n input_data_format: Optional[Union[str, ChannelDimension]] = None,\n ) -> np.ndarray:\n image = rescale(image=image, scale=1 / 127.5, data_format=data_format, input_data_format=input_data_format)\n image = image - 1\n return image", "def inv_preprocess(imgs, num_images, img_mean):\n n, h, w, c = imgs.shape\n assert (n >= num_images), 'Batch size %d should be greater or equal than number of images to save %d.' % (\n n, num_images)\n outputs = np.zeros((num_images, h, w, c), dtype=np.uint8)\n for i in range(num_images):\n outputs[i] = (imgs[i] + img_mean)[:, :, ::-1].astype(np.uint8)\n return outputs", "def preprocess_train_keep_aspect_ratio(im, boxes, classes, inst_masks, mask, min_side, max_side,\n canvas_height, canvas_width,\n use_augment=False, training_scale=[0.3, 0.5, 0.7, 1.0]):\n im, inst_masks, mask, boxes, classes, im_scale = resize_as_min_side(im, inst_masks, mask, boxes, classes,\n min_side=min_side, max_side=max_side)\n\n im, inst_masks, mask, boxes, classes = random_flip(im, inst_masks, mask, boxes, classes)\n if use_augment:\n if np.random.choice([0, 1]) != 0:\n scale = np.random.choice(training_scale) # adding more small objects\n im, inst_masks, mask, boxes, classes = random_scale(im, inst_masks, mask, boxes, classes, scale=scale)\n\n im, inst_masks, mask, boxes, classes = pad_to_canvas(im, inst_masks, mask, boxes, classes,\n canvas_width=canvas_width,\n canvas_height=canvas_height)\n\n # im = distort_color(im)\n im = imcv2_recolor(im)\n\n boxes = np.asarray(boxes, dtype=np.float32)\n inst_masks = np.zeros([1, im.shape[0], im.shape[1]], dtype=inst_masks.dtype) if inst_masks.size == 0 else inst_masks\n return im, boxes, classes, inst_masks, mask, im_scale", "def preprocess(img, min_size, max_size):\n if(min_size > max_size):\n raise Exception('min_size should not exceed max_size')\n \n width, height = img.size\n minDim = min(width,height)\n maxDim = max(width,height)\n scale_shorter_side = min_size/minDim\n scale_longer_side = maxDim * scale_shorter_side\n if(scale_longer_side > max_size):\n scale = max_size/maxDim\n else:\n scale = scale_shorter_side\n transform = transforms.Compose([\n transforms.Resize((round(img.height*scale),round(img.width * scale))),\n transforms.ToTensor(),\n transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n ])\n img = transform(img)\n return scale,img", "def image_preprocess(image, image_size, mean_rgb, stddev_rgb):\n input_processor = dataloader.DetectionInputProcessor(image, image_size)\n input_processor.normalize_image(mean_rgb, stddev_rgb)\n input_processor.set_scale_factors_to_output_size()\n image = input_processor.resize_and_crop_image()\n image_scale = input_processor.image_scale_to_original\n return image, image_scale", "def _images_and_boxes_preprocessing(self, imgs, boxes):\r\n # Image [0, 255] -> [0, 1].\r\n imgs = imgs.float()\r\n imgs = imgs / 255.0\r\n\r\n height, width = imgs.shape[2], imgs.shape[3]\r\n # The format of boxes is [x1, y1, x2, y2]. The input boxes are in the\r\n # range of [0, 1].\r\n boxes[:, [0, 2]] *= width\r\n boxes[:, [1, 3]] *= height\r\n boxes = transform.clip_boxes_to_image(boxes, height, width)\r\n\r\n if self._split == \"train\":\r\n # Train split\r\n imgs, boxes = transform.random_short_side_scale_jitter(\r\n imgs,\r\n min_size=self._jitter_min_scale,\r\n max_size=self._jitter_max_scale,\r\n boxes=boxes,\r\n )\r\n imgs, boxes = transform.random_crop(imgs, self._crop_size, boxes=boxes)\r\n\r\n # Random flip.\r\n imgs, boxes = transform.horizontal_flip(0.5, imgs, boxes=boxes)\r\n elif self._split == \"val\":\r\n # Val split\r\n # Resize short side to crop_size. Non-local and STRG uses 256.\r\n imgs, boxes = transform.random_short_side_scale_jitter(\r\n imgs, min_size=self._crop_size, max_size=self._crop_size, boxes=boxes\r\n )\r\n\r\n # Apply center crop for val split\r\n imgs, boxes = transform.uniform_crop(\r\n imgs, size=self._crop_size, spatial_idx=1, boxes=boxes\r\n )\r\n\r\n if self._test_force_flip:\r\n imgs, boxes = transform.horizontal_flip(1, imgs, boxes=boxes)\r\n elif self._split == \"test\":\r\n # Test split\r\n # Resize short side to crop_size. Non-local and STRG uses 256.\r\n imgs, boxes = transform.random_short_side_scale_jitter(\r\n imgs, min_size=self._crop_size, max_size=self._crop_size, boxes=boxes\r\n )\r\n\r\n if self._test_force_flip:\r\n imgs, boxes = transform.horizontal_flip(1, imgs, boxes=boxes)\r\n else:\r\n raise NotImplementedError(\"{} split not supported yet!\".format(self._split))\r\n\r\n # Do color augmentation (after divided by 255.0).\r\n if self._split == \"train\" and self._use_color_augmentation:\r\n if not self._pca_jitter_only:\r\n imgs = transform.color_jitter(\r\n imgs, img_brightness=0.4, img_contrast=0.4, img_saturation=0.4\r\n )\r\n\r\n imgs = transform.lighting_jitter(\r\n imgs,\r\n alphastd=0.1,\r\n eigval=np.array(self._pca_eigval).astype(np.float32),\r\n eigvec=np.array(self._pca_eigvec).astype(np.float32),\r\n )\r\n\r\n # Normalize images by mean and std.\r\n imgs = transform.color_normalization(\r\n imgs,\r\n np.array(self._data_mean, dtype=np.float32),\r\n np.array(self._data_std, dtype=np.float32),\r\n )\r\n\r\n if self._use_bgr:\r\n # Convert image format from RGB to BGR.\r\n # Note that Kinetics pre-training uses RGB!\r\n imgs = imgs[:, [2, 1, 0], ...]\r\n\r\n boxes = transform.clip_boxes_to_image(boxes, self._crop_size, self._crop_size)\r\n\r\n return imgs, boxes", "def test_normalize(dummy_input):\n # Test the 2D image: H, W, C\n image, label = dummy_input(image_size=(512, 512, 3),\n label_size=(512, 512, 1))\n transform = Normalize(means=None, stds=None)\n _image, _label = transform(image, label, normalize_tags=[True, False])\n assert not (image == _image).all()\n assert (label == _label).all()\n\n # Test the 3D image: H, W, D, C\n image, label = dummy_input(image_size=(512, 512, 20, 3),\n label_size=(512, 512, 20, 1))\n transform = Normalize(means=None, stds=None)\n _image, _label = transform(image, label, normalize_tags=[True, False])\n assert not (image == _image).all()\n assert (label == _label).all()\n assert np.abs(np.mean(_image)-0) < 1e-8\n assert np.abs(np.std(_image)-1) < 1e-8", "def _images_and_boxes_preprocessing(self, imgs, boxes, gt_boxes=None):\n # Image [0, 255] -> [0, 1].\n imgs = imgs.float()\n imgs = imgs / 255.0\n\n height, width = imgs.shape[2], imgs.shape[3]\n # The format of boxes is [x1, y1, x2, y2]. The input boxes are in the\n # range of [0, 1].\n # boxes[:, [0, 2]] *= width\n # boxes[:, [1, 3]] *= height\n boxes = transform.clip_boxes_to_image(boxes, height, width)\n\n if self._split == \"train\":\n # Train split\n imgs, boxes = transform.random_short_side_scale_jitter(\n imgs,\n min_size=self._jitter_min_scale,\n max_size=self._jitter_max_scale,\n boxes=boxes,\n )\n imgs, boxes = transform.random_crop(\n imgs, self._crop_size, boxes=boxes\n )\n\n # Random flip.\n imgs, boxes = transform.horizontal_flip(0.5, imgs, boxes=boxes)\n elif self._split == \"val\":\n # Val split\n # Resize short side to crop_size. Non-local and STRG uses 256.\n imgs, boxes = transform.random_short_side_scale_jitter(\n imgs,\n min_size=self._crop_size,\n max_size=self._crop_size,\n boxes=boxes,\n )\n\n # Apply center crop for val split\n imgs, boxes = transform.uniform_crop(\n imgs, size=self._crop_size, spatial_idx=1, boxes=boxes\n )\n\n if self._test_force_flip:\n imgs, boxes = transform.horizontal_flip(1, imgs, boxes=boxes)\n elif self._split == \"test\":\n # Test split\n # Resize short side to crop_size. Non-local and STRG uses 256.\n imgs, boxes = transform.random_short_side_scale_jitter(\n imgs,\n min_size=self._crop_size,\n max_size=self._crop_size,\n boxes=boxes,\n )\n\n if self._test_force_flip:\n imgs, boxes = transform.horizontal_flip(1, imgs, boxes=boxes)\n else:\n raise NotImplementedError(\n \"{} split not supported yet!\".format(self._split)\n )\n\n # Do color augmentation (after divided by 255.0).\n if self._split == \"train\" and self._use_color_augmentation:\n if not self._pca_jitter_only:\n imgs = transform.color_jitter(\n imgs,\n img_brightness=0.4,\n img_contrast=0.4,\n img_saturation=0.4,\n )\n\n imgs = transform.lighting_jitter(\n imgs,\n alphastd=0.1,\n eigval=np.array(self._pca_eigval).astype(np.float32),\n eigvec=np.array(self._pca_eigvec).astype(np.float32),\n )\n\n # Normalize images by mean and std.\n imgs = transform.color_normalization(\n imgs,\n np.array(self._data_mean, dtype=np.float32),\n np.array(self._data_std, dtype=np.float32),\n )\n\n if not self._use_bgr:\n # Convert image format from BGR to RGB.\n # Note that Kinetics pre-training uses RGB!\n imgs = imgs[:, [2, 1, 0], ...]\n\n boxes = transform.clip_boxes_to_image(\n boxes, self._crop_size, self._crop_size\n )\n\n return imgs, boxes", "def normalise(dataset):\n # Scale images to the [0, 1] range\n dataset = dataset.astype(\"float32\") / 255\n # Make sure images have shape (28, 28, 1)\n return np.expand_dims(dataset, -1)", "def _train_aug(self, results):\n img = results['img']\n h, w, c = img.shape\n boxes = results['gt_bboxes']\n while True:\n scale = random.choice(self.ratios)\n new_h = int(self.crop_size[0] * scale)\n new_w = int(self.crop_size[1] * scale)\n h_border = self._get_border(self.border, h)\n w_border = self._get_border(self.border, w)\n\n for i in range(50):\n center_x = random.randint(low=w_border, high=w - w_border)\n center_y = random.randint(low=h_border, high=h - h_border)\n\n cropped_img, border, patch = self._crop_image_and_paste(\n img, [center_y, center_x], [new_h, new_w])\n\n mask = self._filter_boxes(patch, boxes)\n # if image do not have valid bbox, any crop patch is valid.\n if not mask.any() and len(boxes) > 0:\n continue\n\n results['img'] = cropped_img\n results['img_shape'] = cropped_img.shape\n results['pad_shape'] = cropped_img.shape\n\n x0, y0, x1, y1 = patch\n\n left_w, top_h = center_x - x0, center_y - y0\n cropped_center_x, cropped_center_y = new_w // 2, new_h // 2\n\n # crop bboxes accordingly and clip to the image boundary\n for key in results.get('bbox_fields', []):\n mask = self._filter_boxes(patch, results[key])\n bboxes = results[key][mask]\n bboxes[:, 0:4:2] += cropped_center_x - left_w - x0\n bboxes[:, 1:4:2] += cropped_center_y - top_h - y0\n if self.bbox_clip_border:\n bboxes[:, 0:4:2] = np.clip(bboxes[:, 0:4:2], 0, new_w)\n bboxes[:, 1:4:2] = np.clip(bboxes[:, 1:4:2], 0, new_h)\n keep = (bboxes[:, 2] > bboxes[:, 0]) & (\n bboxes[:, 3] > bboxes[:, 1])\n bboxes = bboxes[keep]\n results[key] = bboxes\n if key in ['gt_bboxes']:\n if 'gt_labels' in results:\n labels = results['gt_labels'][mask]\n labels = labels[keep]\n results['gt_labels'] = labels\n if 'gt_masks' in results:\n raise NotImplementedError(\n 'RandomCenterCropPad only supports bbox.')\n\n # crop semantic seg\n for key in results.get('seg_fields', []):\n raise NotImplementedError(\n 'RandomCenterCropPad only supports bbox.')\n return results", "def resize_and_pad_images(images, min_dim, max_dim):\n height, width = images.shape[-2:]\n resize_width, resize_height, _ = compute_resize_params_2((width, height), min_dim, max_dim)\n\n # make width and height a multiple of 32\n pad_right = (int(math.ceil(resize_width / 32)) * 32) - resize_width\n pad_bottom = (int(math.ceil(resize_height / 32)) * 32) - resize_height\n\n images = F.interpolate(images, (resize_width, resize_height), mode=\"bilinear\", align_corners=False)\n return F.pad(images, (0, pad_right, 0, pad_bottom))", "def _process_batch(sess, original_images, semantic_predictions, image_names,\n image_heights, image_widths, image_id_offset, save_dir,\n raw_save_dir, train_id_to_eval_id=None):\n (original_images,\n semantic_predictions,\n image_names,\n image_heights,\n image_widths) = sess.run([original_images, semantic_predictions,\n image_names, image_heights, image_widths])\n\n num_image = semantic_predictions.shape[0]\n for i in range(num_image):\n image_height = np.squeeze(image_heights[i])\n image_width = np.squeeze(image_widths[i])\n original_image = np.squeeze(original_images[i])\n semantic_prediction = np.squeeze(semantic_predictions[i])\n crop_semantic_prediction = semantic_prediction[:image_height, :image_width]\n\n # Save image.\n save_annotation.save_annotation(\n original_image, save_dir, _IMAGE_FORMAT % (image_id_offset + i),\n add_colormap=False)\n\n # Save prediction.\n save_annotation.save_annotation(\n crop_semantic_prediction, save_dir,\n _PREDICTION_FORMAT % (image_id_offset + i), add_colormap=True,\n colormap_type=FLAGS.colormap_type)\n\n if FLAGS.also_save_raw_predictions:\n image_filename = os.path.basename(image_names[i])\n\n if train_id_to_eval_id is not None:\n crop_semantic_prediction = _convert_train_id_to_eval_id(\n crop_semantic_prediction,\n train_id_to_eval_id)\n save_annotation.save_annotation(\n crop_semantic_prediction, raw_save_dir, image_filename,\n add_colormap=False)", "def pad_images(_input_image_paths : list[str], _output_image_dir : str, \\\n _pad_colour : tuple[int,int,int]) -> None:\n for image in _input_image_paths:\n with Image.open(image) as image_object:\n\n #Rotate the image based on the EXIF data's orientation tag.\n #Ensures that images taller than they are wide are kept as such when padding\n image_object = PIL.ImageOps.exif_transpose(image_object)\n\n old_x,old_y = image_object.size\n bigger_dimension = max(old_x,old_y)\n\n #Figure out how much extra should be added to each of the four sides\n x_additive = y_additive = 0\n if old_x > old_y:\n y_additive = (old_x - old_y)//2\n\n elif old_y > old_x:\n x_additive = (old_y - old_x)//2\n\n #Create a new, larger image with the requested padding colour,\n # and then paste the original image overtop in the correct position\n new_canvas = Image.new(\"RGB\", (bigger_dimension,bigger_dimension), _pad_colour)\n new_canvas.paste(image_object, (x_additive, y_additive))\n new_canvas.save(_output_image_dir + os.path.basename(image))", "def run_padding(self):\n\n image_padded, mask, self.pad_to_right, self.pad_to_bottom = gen_padded_image_and_mask (os.path.join('utils_dfn/temp', self.file_name_with_ext),\n self.new_height, self.new_width)\n cv2.imwrite(os.path.join('utils_dfn/img', self.file_name + '_padded_resized.png'), image_padded)\n cv2.imwrite(os.path.join('utils_dfn/mask', self.file_name + '_mask.png'), mask)", "def next_batch(self):\n next_train_index = self.curr_train_index + self.hparams.batch_size\n if next_train_index > self.num_train:\n # Increase epoch number\n epoch = self.epochs + 1\n self.reset()\n self.epochs = epoch\n batched_data = (\n self.train_images[self.curr_train_index:self.curr_train_index +\n self.hparams.batch_size],\n self.train_labels[self.curr_train_index:self.curr_train_index +\n self.hparams.batch_size])\n final_imgs = []\n images, labels = batched_data\n if self.hparams.augment_type == 'mixup':\n images, labels = augmentation_transforms.mixup_batch(\n images, labels, self.hparams.mixup_alpha)\n elif self.hparams.augment_type == 'image_freq':\n images, labels = augmentation_transforms.freq_augment(\n images,\n labels,\n amplitude=self.hparams.freq_augment_amplitude,\n magnitude=self.hparams.augmentation_magnitude,\n proportion_f=self.hparams.freq_augment_ffrac,\n probability=self.hparams.augmentation_probability)\n for data in images:\n if self.hparams.augment_type == 'autoaugment':\n epoch_policy = self.good_policies[np.random.choice(\n len(self.good_policies))]\n final_img = augmentation_transforms.apply_policy(epoch_policy, data)\n elif self.hparams.augment_type == 'random':\n epoch_policy = found_policies.random_policy(\n self.hparams.num_augmentation_layers,\n self.hparams.augmentation_magnitude,\n self.hparams.augmentation_probability)\n final_img = augmentation_transforms.apply_policy(epoch_policy, data)\n else:\n final_img = np.copy(data)\n if self.hparams.apply_flip_crop:\n final_img = augmentation_transforms.random_flip(\n augmentation_transforms.zero_pad_and_crop(data, 4))\n # Apply cutout\n if self.hparams.apply_cutout:\n final_img = augmentation_transforms.cutout_numpy(final_img)\n\n final_imgs.append(final_img)\n final_imgs = np.array(final_imgs, np.float32)\n if self.hparams.noise_type == 'radial':\n labels = augmentation_transforms.add_radial_noise(\n final_imgs, labels, self.hparams.frequency, self.hparams.amplitude,\n self.hparams.noise_class, self.hparams.normalize_amplitude)\n elif self.hparams.noise_type == 'random' or self.hparams.noise_type == 'fourier' or self.hparams.noise_type == 'f' or self.hparams.noise_type == '1/f':\n labels = augmentation_transforms.add_sinusoidal_noise(\n final_imgs, labels, self.hparams.frequency, self.hparams.amplitude,\n self.direction, self.hparams.noise_class,\n self.hparams.normalize_amplitude)\n elif self.hparams.noise_type == 'uniform':\n labels = augmentation_transforms.add_uniform_noise(\n labels, self.hparams.amplitude, self.hparams.noise_class)\n\n batched_data = (final_imgs, labels)\n self.curr_train_index += self.hparams.batch_size\n return batched_data", "def __resize_to_32x(input_data):\n rate = 1\n while min(input_data['img'].shape[:2])>736:\n test_size = 736\n if input_data['img'].shape[0] > input_data['img'].shape[1]:\n if True: # input_data['img'].shape[1] < 512:\n rate = test_size / input_data['img'].shape[1]\n seq = iaa.Sequential([\n iaa.Scale({'height': \"keep-aspect-ratio\", 'width': test_size}, 'cubic')\n ])\n input_data['img'] = seq.augment_image(input_data['img'])\n else:\n if True: # input_data['img'].shape[0] < 512:\n rate = test_size / input_data['img'].shape[0]\n seq = iaa.Sequential([\n iaa.Scale({'height': test_size, 'width': \"keep-aspect-ratio\"}, 'cubic')\n ])\n input_data['img'] = seq.augment_image(input_data['img'])\n\n if DataAugmentor._is_synthtext(input_data):\n input_data['contour'] = [[np.cast['int32'](contour * rate) for contour in contours] for contours in\n input_data['contour']]\n else:\n input_data['contour'] = [np.cast['int32'](contour * rate) for contour in input_data['contour']]\n input_data['center_point'] = [(np.cast['int32'](point[0] * rate),\n np.cast['int32'](point[1] * rate)) for point in input_data['center_point']]\n p_row = 0\n while True:\n if (input_data['img'].shape[0] + p_row) % 32 == 0:\n break\n p_row += 1\n p_col = 0\n while True:\n if (input_data['img'].shape[1] + p_col) % 32 == 0:\n break\n p_col += 1\n\n input_data['img'] = np.pad(input_data['img'], ((0, p_row), (0, p_col), (0, 0)), mode='constant')\n\n return input_data", "def normalize_other_inputs(X, Args):\n other_keys = list(X.keys())\n other_keys.remove(\"blend_image\")\n for key in other_keys:\n X[key] = (X[key] - np.mean(X[key])) / np.std(X[key])\n if Args.model == \"orchid\":\n loc_im = np.zeros_like(X[other_keys[0]])\n for i, key in enumerate(other_keys):\n im = X.pop(key)\n maximum = np.min((im.max(axis=2).max(axis=1)))\n im[im < maximum / 1.5] = 0\n im[im >= maximum / 1.5] = i + 1\n loc_im += im\n X['loc_im'] = loc_im\n return X", "def transform_images(img, size):\n return tf.image.resize(img, (size, size)) / 255", "def call(self, inputs):\r\n outputs = K.spatial_2d_padding(inputs,\r\n padding=self.padding,\r\n data_format=self.data_format)\r\n\r\n p00, p01 = self.padding[0][0], self.padding[0][1]\r\n p10, p11 = self.padding[1][0], self.padding[1][1]\r\n if self.data_format == \"channels_last\":\r\n\r\n row0 = K.concatenate([inputs[:, p00:0:-1, p10:0:-1, :],\r\n inputs[:, p00:0:-1, :, :],\r\n inputs[:, p00:0:-1, -2:-2-p11:-1, :]],\r\n axis=2)\r\n row1 = K.concatenate([inputs[:, :, p10:0:-1, :],\r\n inputs,\r\n inputs[:, :, -2:-2-p11:-1, :]],\r\n axis=2)\r\n row2 = K.concatenate([inputs[:, -2:-2-p01:-1, p10:0:-1, :],\r\n inputs[:, -2:-2-p01:-1, :, :],\r\n inputs[:, -2:-2-p01:-1, -2:-2-p11:-1, :]],\r\n axis=2)\r\n\r\n outputs = K.concatenate([row0, row1, row2], axis=1)\r\n\r\n else: # self.data_format == \"channels_first\"\r\n\r\n row0 = K.concatenate([inputs[:, :, p00:0:-1, p10:0:-1],\r\n inputs[:, :, p00:0:-1, :],\r\n inputs[:, :, p00:0:-1, -2:-2-p11:-1]],\r\n axis=3)\r\n row1 = K.concatenate([inputs[:, :, :, p10:0:-1],\r\n inputs,\r\n inputs[:, :, :, -2:-2-p11:-1]],\r\n axis=3)\r\n row2 = K.concatenate([inputs[:, :, -2:-2-p01:-1, p10:0:-1],\r\n inputs[:, :, -2:-2-p01:-1, :],\r\n inputs[:, :, -2:-2-p01:-1, -2:-2-p11:-1]],\r\n axis=3)\r\n\r\n outputs = K.concatenate([row0, row1, row2], axis=2)\r\n\r\n return outputs", "def preprocess(images, target_height, target_width):\n # Scale to [0, 1].\n images = tf.image.convert_image_dtype(images, dtype=tf.float32)\n\n # Stack images channel-wise.\n batch_size = tf.shape(images)[0]\n images = stack_images_channelwise(images, batch_size)\n\n # Resize to target height and width.\n images = tf.image.resize(images, [target_height, target_width])\n return images", "def pad_images_to_match_shapes(self):\n # Force all the images to have the same shape\n imageShapes = np.array([img.shape for img in self.imageList])\n ny, nx = imageShapes.max(axis=0)\n\n # Loop through each image and add padding if necessary\n for ny1nx1 in imageShapes:\n ny1, nx1 = ny1nx1\n padY = ny - ny1 if ny1 < ny else 0\n padX = nx - nx1 if nx1 < nx else 0\n\n # Extract the first image in the imageList\n thisImg = self.pop_image(0)\n\n if padX > 0 or padY > 0:\n # Pad the image as necessary\n thisImg = thisImg.pad(((0, padY), (0, padX)), 'constant')\n\n # Return the image to the imageList (at the END of the list)\n self.add_image(thisImg)\n\n # Hand the padded ImageStack back to the user\n return self", "def image_preprocessing(image_buffer, bbox, image_size, is_training):\n if is_training:\n image = _decode_and_random_crop(image_buffer, bbox, image_size)\n image = _normalize(image)\n image = tf.image.random_flip_left_right(image)\n else:\n image = _decode_and_center_crop(image_buffer, image_size)\n image = _normalize(image)\n image = tf.reshape(image, [image_size, image_size, 3])\n return image", "def flow(self, batch_size=32):\n nb_batches = int(len(self.image_ids_in_subset) / batch_size) + 1\n while True:\n # Before each epoch we shuffle the images' ids\n random.shuffle(self.image_ids_in_subset)\n\n for i in range(nb_batches):\n # We first get all the image ids for the next batch\n current_bach = self.image_ids_in_subset[i*batch_size:(i+1)*batch_size]\n X_batch = []\n Y_batch = []\n\n for image_id in current_bach:\n # Load the image and resize it. We get a PIL Image object\n img = image.load_img(self.get_img_path(int(image_id)), grayscale=False, target_size=(cfg.IMAGE.IMG_SIZE, cfg.IMAGE.IMG_SIZE))\n # Cast the Image object to a numpy array and put the channel has the last dimension\n img_arr = image.img_to_array(img, data_format='channels_last')\n X_batch.append(img_arr)\n # Y_batch.append(self.id_to_label[image_id])\n Y_batch.append(self.get_labels(image_id))\n\n # resize X_batch in (batch_size, IMG_HEIGHT, IMG_WIDTH, 3)\n X_batch = np.reshape(X_batch, (-1, cfg.IMAGE.IMG_SIZE, cfg.IMAGE.IMG_SIZE, 3))\n # resize Y_batch in (None, nb_classes)\n Y_batch = np.reshape(Y_batch, (-1, self.nb_classes))\n\n # substract mean values from imagenet\n X_batch = preprocess_input(X_batch, data_format='channels_last')\n yield(X_batch, Y_batch)", "def collate(self, batch):\n \n images = []\n indices = []\n roi_size = 5 if self.Train else 4\n rois = torch.zeros((len(batch), 20, roi_size), dtype=torch.float32)\n rois = rois.to(batch[0][1].device)\n \n for _b in range(len(batch)):\n # Accumulate patches:\n images.append(batch[_b][0].to(torch.float32))\n indices.append(batch[_b][2])\n \n # Accumulate ROI:\n \"\"\"\n image_num = torch.Tensor([_b]).expand(batch[_b][1].size(0))\n image_num = image_num.type(batch[_b][1].dtype).view(-1,1)\n image_num = image_num.to(batch[_b][1].device)\n _roi = torch.cat([image_num, batch[_b][1]], dim=1)\n rois = torch.cat([rois, _roi], dim=0)\n \"\"\"\n num_boxes = batch[_b][1].size(0)\n rois[_b,:num_boxes,:] = batch[_b][1]\n \n \n # Stack outputs and return\n batch = [torch.stack(images, dim=0), rois, torch.Tensor(indices)]\n return batch", "def preprocess(self, inputs, color_aug):\n for k in list(inputs):\n if \"color\" in k:\n n, im, i = k\n inputs[(n, im, 0)] = self.resize(inputs[(n, im, - 1)])\n\n for k in list(inputs):\n if \"color\" in k:\n f = inputs[k]\n n, im, i = k\n inputs[(n, im, i)] = self.to_tensor(f)\n if i == 0:\n inputs[(n + \"_aug\", im, i)] = self.to_tensor(color_aug(f))", "def preprocess_image(image, model_image_size):\n #resized_image = cv2.resize(image, tuple(reversed(model_image_size)), cv2.INTER_AREA)\n resized_image = letterbox_resize(image, tuple(reversed(model_image_size)))\n image_data = np.asarray(resized_image).astype('float32')\n image_data = normalize_image(image_data)\n image_data = np.expand_dims(image_data, 0) # Add batch dimension.\n return image_data", "def img_preprocess(self, img, output_size=64):\n w, h = img.size\n if w > h:\n box_param = (int(w * 0.5 - h * 0.5), 0, int(w * 0.5 + h * 0.5), h)\n cropped = img.crop(box_param)\n else: # w < h\n box_param = (0, int(h * 0.5 - w * 0.5), w, int(h * 0.5 + w * 0.5))\n cropped = img.crop(box_param)\n\n resized = cropped.resize((output_size, output_size))\n resized = np.asarray(resized)\n\n return resized", "def run(images_padded, dictionary, codes, kernel_stride, padding_dims,\n stepsize=0.001, num_iters=1, normalize_dictionary=True):\n reconstruction_mask = create_mask(images_padded, padding_dims)\n codes_temp_transposed = codes.transpose(dim0=1, dim1=0)\n # TODO: Figure out if I can remove the double-transpose in gradient comp\n for iter_idx in range(num_iters):\n # WARNING: this gradient computation can overflow, adjusting the stepsize\n # or the scale of the data are typical remedies\n gradient = (torch.nn.functional.conv2d(\n (reconstruction_mask * (\n torch.nn.functional.conv_transpose2d(codes, dictionary,\n stride=kernel_stride) - images_padded)).transpose(dim0=1, dim1=0),\n codes_temp_transposed, dilation=kernel_stride) /\n images_padded.shape[0]).transpose(dim0=1, dim1=0)\n # it makes sense to put this update on the same scale as the dictionary\n # so that stepsize is effectively dimensionless\n gradient.mul_(dictionary.norm(p=2) / gradient.norm(p=2))\n dictionary.sub_(stepsize * gradient)\n if normalize_dictionary:\n dictionary.div_(torch.squeeze(dictionary.norm(\n p=2, dim=(1, 2, 3)))[:, None, None, None])", "def preprocess_batch(self,image_batch):\n if len(image_batch.shape) == 4 and image_batch.shape[1:] != (32,32,3):\n assert (False),'batch shape wrong'\n\n if len(image_batch.shape) == 3 and image_batch.shape != (32,32,3):\n assert(False),'batch shape wrong'\n\n return image_batch - self.mean", "def preprocess_example_input(input_config):\n\n input_path = input_config[\"input_path\"]\n input_shape = input_config[\"input_shape\"]\n one_img = imread(input_path)\n if \"normalize_cfg\" in input_config.keys():\n normalize_cfg = input_config[\"normalize_cfg\"]\n mean = np.array(normalize_cfg[\"mean\"], dtype=np.float32)\n std = np.array(normalize_cfg[\"std\"], dtype=np.float32)\n one_img = imnormalize(one_img, mean, std)\n one_img = imresize(one_img, input_shape[2:][::-1]).transpose(2, 0, 1)\n one_img = torch.from_numpy(one_img).unsqueeze(0).float().requires_grad_(True)\n (_, C, H, W) = input_shape\n one_meta = {\n \"img_shape\": (H, W, C),\n \"ori_shape\": (H, W, C),\n \"pad_shape\": (H, W, C),\n \"filename\": \"<demo>.png\",\n \"scale_factor\": 1.0,\n \"flip\": False,\n }\n\n return one_img, one_meta", "def aug_batch(self, x_batch):\n rand_batch_seed = np.random.randint(1000)\n for i_seq in range(x_batch.shape[0]):\n ia.seed(rand_batch_seed + i_seq)\n # for j_img in range(x_batch.shape[1]):\n # single_image = x_batch[i_seq, j_img, ...]\n # x_batch[i_seq, j_img, ...] = self.aug_pipe.augment_image(single_image)\n\n batch = x_batch[i_seq, ...]\n x_batch[i_seq, ...] = self.aug_pipe.augment_images(batch)\n return x_batch", "def reshape_normalise(img):\n\t# The image shape is expected to match the input of VGG19\n\timg = np.resize(img, (1, CONFIG.IMAGE_HEIGHT, CONFIG.IMAGE_WIDTH, CONFIG.COLOR_CHANNELS)).astype('float32')\n\timg -= CONFIG.MEAN_PIXEL\n\treturn img", "def preprocess(\n self,\n images: ImageInput,\n do_resize: bool = None,\n size: Dict[str, int] = None,\n resample: PILImageResampling = None,\n do_normalize: bool = None,\n do_color_quantize: Optional[bool] = None,\n clusters: Optional[Union[List[List[int]], np.ndarray]] = None,\n return_tensors: Optional[Union[str, TensorType]] = None,\n data_format: Optional[Union[str, ChannelDimension]] = ChannelDimension.FIRST,\n input_data_format: Optional[Union[str, ChannelDimension]] = None,\n **kwargs,\n ) -> PIL.Image.Image:\n do_resize = do_resize if do_resize is not None else self.do_resize\n size = size if size is not None else self.size\n size = get_size_dict(size)\n resample = resample if resample is not None else self.resample\n do_normalize = do_normalize if do_normalize is not None else self.do_normalize\n do_color_quantize = do_color_quantize if do_color_quantize is not None else self.do_color_quantize\n clusters = clusters if clusters is not None else self.clusters\n clusters = np.array(clusters)\n\n images = make_list_of_images(images)\n\n if not valid_images(images):\n raise ValueError(\n \"Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, \"\n \"torch.Tensor, tf.Tensor or jax.ndarray.\"\n )\n\n if do_resize and size is None or resample is None:\n raise ValueError(\"Size and resample must be specified if do_resize is True.\")\n\n if do_color_quantize and clusters is None:\n raise ValueError(\"Clusters must be specified if do_color_quantize is True.\")\n\n # All transformations expect numpy arrays.\n images = [to_numpy_array(image) for image in images]\n\n if is_scaled_image(images[0]) and do_normalize:\n logger.warning_once(\n \"It looks like you are trying to rescale already rescaled images. If you wish to do this, \"\n \"make sure to set `do_normalize` to `False` and that pixel values are between [-1, 1].\",\n )\n\n if input_data_format is None:\n # We assume that all images have the same channel dimension format.\n input_data_format = infer_channel_dimension_format(images[0])\n\n if do_resize:\n images = [\n self.resize(image=image, size=size, resample=resample, input_data_format=input_data_format)\n for image in images\n ]\n\n if do_normalize:\n images = [self.normalize(image=image, input_data_format=input_data_format) for image in images]\n\n if do_color_quantize:\n images = [to_channel_dimension_format(image, ChannelDimension.LAST, input_data_format) for image in images]\n # color quantize from (batch_size, height, width, 3) to (batch_size, height, width)\n images = np.array(images)\n images = color_quantize(images, clusters).reshape(images.shape[:-1])\n\n # flatten to (batch_size, height*width)\n batch_size = images.shape[0]\n images = images.reshape(batch_size, -1)\n\n # We need to convert back to a list of images to keep consistent behaviour across processors.\n images = list(images)\n else:\n images = [\n to_channel_dimension_format(image, data_format, input_channel_dim=input_data_format)\n for image in images\n ]\n\n data = {\"input_ids\": images}\n return BatchFeature(data=data, tensor_type=return_tensors)", "def __padding(self, image, boxes, height, width):\n temp = boxes[:, :4].astype(np.int)\n y1 = np.where(temp[:, 0] < 0)[0]\n if len(y1) > 0:\n temp[y1, 0] = 0\n x1 = np.where(temp[:, 1] < 0)[0]\n if len(x1) > 0:\n temp[x1, 0] = 0\n y2 = np.where(temp[:, 2] > image.shape[0] - 1)[0]\n if len(y2) > 0:\n temp[y2, 0] = image.shape[0] - 1\n x2 = np.where(temp[:, 3] > image.shape[1] - 1)[0]\n if len(x2) > 0:\n temp[x2, 0] = image.shape[1] - 1\n pad_top = np.abs(temp[:, 0] - boxes[:, 0]).astype(np.int)\n pad_left = np.abs(temp[:, 1] - boxes[:, 1]).astype(np.int)\n pad_bottom = np.abs(temp[:, 2] - boxes[:, 2]).astype(np.int)\n pad_right = np.abs(temp[:, 3] - boxes[:, 3]).astype(np.int)\n input_data = np.empty([boxes.shape[0], 3, height, width], dtype=np.float32)\n for i in range(boxes.shape[0]):\n crop_img = image[temp[i, 0]:temp[i, 2] + 1, temp[i, 1]:temp[i, 3] + 1, :]\n crop_img = cv2.copyMakeBorder(crop_img, pad_top[i], pad_bottom[i], \\\n pad_left[i], pad_right[i], cv2.BORDER_CONSTANT, value=0)\n if crop_img is None:\n continue\n crop_img = cv2.resize(crop_img, (width, height)).astype(np.float32)\n crop_img[:, :, 0] -= self.mean[0]\n crop_img[:, :, 1] -= self.mean[1]\n crop_img[:, :, 2] -= self.mean[2]\n crop_img *= self.scale_factor\n crop_img = np.transpose(crop_img, (2, 0, 1))\n input_data[i] = crop_img.copy()\n return input_data", "def process_batch(self, inputs):\n # Otherwise, we only feed the image with frame_id 0 through the depth encoder\n features, raw_hrnet_features = self.models[\"encoder\"](inputs[\"color_aug\", 0, 0])\n outputs = self.models[\"depth_decoder\"](features)\n\n if self.opt.use_dc:\n lambda_ = 1.0\n outputs['domain_classifier'] = self.models['domain_classifier'](raw_hrnet_features, lambda_)\n\n if self.opt.use_pose_net and \"real\" in self.syn_or_real:\n outputs.update(self.predict_poses(inputs, features))\n\n # convert estimated disparity from neural network to depth\n self.generate_images_pred_local(inputs, outputs)\n\n # loss functions\n losses = self.compute_losses_local(inputs, outputs)\n\n return outputs, losses", "def img_normalize(image, label):\n mean, std = ds_stats\n image -= tf.constant(mean, shape=[1, 1, num_channels], dtype=image.dtype)\n image /= tf.constant(std, shape=[1, 1, num_channels], dtype=image.dtype)\n return image, label", "def resize_batch(images : List[np.ndarray], size : Tuple[int,int,int,int], resize_kind='stretch') :\n assert resize_kind in ['stretch'] and len(size) == 4\n n, w, h, c = size if size[-1]==3 else tuple(size[i] for i in [0,3,1,2])\n resize = lambda x: BaseRuntime.resize_stretch(x, (h,w))\n dtype = images[0].dtype\n n_pad = n - len(images)\n batch_pad = [np.zeros((h,w,c),dtype=dtype)] * n_pad\n batch_image = list(map(resize, images))\n batch_image = batch_image + batch_pad\n return np.stack(batch_image)", "def image_augmentations(\n image,\n data_augmentations,\n model_input_image_size,\n label=None):\n if image.get_shape() == None:\n im_size = model_input_image_size\n else:\n im_size = image.get_shape().as_list()\n im_size_check = True # np.any(\n # np.less_equal(\n # model_input_image_size[:2],\n # im_size[:2]))\n if data_augmentations is not None:\n for aug in data_augmentations:\n # Pixel/image-level augmentations\n if aug == 'image_float32':\n image = tf.cast(image, tf.float32)\n if aug == 'label_float32':\n label = tf.cast(label, tf.float32)\n if aug == 'bfloat16':\n image = tf.cast(image, tf.bfloat16)\n if aug == 'singleton':\n image = tf.expand_dims(image, axis=-1)\n print 'Adding singleton dimension to image.'\n if aug == 'sgl_label' or aug == 'singleton_label':\n label = tf.expand_dims(label, axis=-1)\n print 'Adding singleton dimension to label.'\n if aug == 'coco_labels':\n label = tf.nn.relu(label - 91)\n if aug == 'contrastive_loss':\n label = tf.stack(\n [tf.ones_like(label), tf.zeros_like(label)], -1)\n if aug == 'bsds_normalize':\n data = np.load(\n '/media/data_cifs/image_datasets/BSDS500/images/train/file_paths.npz')\n mean = data['mean'].squeeze(0)\n stds = data['stds'].squeeze(0)\n image = (image - mean) / stds\n if aug == 'bsds_crop' and im_size_check:\n assert len(image.get_shape()) == 3, '4D not implemented yet.'\n scale_choices = tf.convert_to_tensor(\n # [1. / 2., 1.1 / 2., 1.2 / 2.])\n [1., 1, 1.1, 1.2])\n samples = tf.multinomial(\n tf.log([tf.ones_like(scale_choices)]), 1)\n image_shape = image.get_shape().as_list()\n scale = scale_choices[tf.cast(samples[0][0], tf.int32)]\n scale_tf = tf.cast(\n tf.round(\n np.asarray(\n image_shape[:2]).astype(\n np.float32) * scale),\n tf.int32)\n combined = tf.concat([image, label], axis=-1)\n combo_shape = combined.get_shape().as_list()\n combined_resize = tf.squeeze(\n tf.image.resize_nearest_neighbor(\n tf.expand_dims(combined, axis=0),\n scale_tf,\n align_corners=True),\n axis=0)\n combined_crop = tf.random_crop(\n combined_resize,\n tf.concat(\n [model_input_image_size[:2], [combo_shape[-1]]], 0))\n image = combined_crop[:, :, :image_shape[-1]]\n label = combined_crop[:, :, image_shape[-1]:]\n image.set_shape(model_input_image_size)\n label.set_shape(\n model_input_image_size[:2] + [\n combo_shape[-1] - model_input_image_size[-1]])\n print 'Applying BSDS crop.'\n if aug == 'hed_resize' and im_size_check:\n assert len(image.get_shape()) == 3, '4D not implemented yet.'\n scale_choices = tf.convert_to_tensor(\n # [1. / 2., 1.1 / 2., 1.2 / 2.])\n np.arange(1, 1.51, 0.1)) # 0.7, 1.5\n samples = tf.multinomial(\n tf.log([tf.ones_like(scale_choices)]), 1)\n image_shape = image.get_shape().as_list()\n scale = scale_choices[tf.cast(samples[0][0], tf.int32)]\n scale_tf = tf.cast(\n tf.round(\n np.asarray(\n image_shape[:2]).astype(\n np.float32) * scale),\n tf.int32)\n combined = tf.concat([image, label], axis=-1)\n combo_shape = combined.get_shape().as_list()\n combined_resize = tf.squeeze(\n tf.image.resize_bilinear(\n tf.expand_dims(combined, axis=0),\n scale_tf,\n align_corners=True),\n axis=0)\n print 'Applying HED resize.'\n if aug == 'uint8_rescale':\n image = tf.cast(image, tf.float32) / 255.\n print 'Applying uint8 rescale to the image.'\n if aug == 'cube_plus_rescale':\n image = tf.cast(image, tf.float32) / 13273.\n print 'Applying uint8 rescale to the image.'\n if aug == 'uint8_rescale_label':\n label = tf.cast(label, tf.float32) / 255.\n print 'Applying uint8 rescale to the label.'\n if aug == 'uint8_rescale_-1_1':\n image = 2 * (tf.cast(image, tf.float32) / 255.) - 1\n print 'Applying uint8 rescale.'\n if aug == 'image_to_bgr':\n image = tf.stack(\n [image[..., 2], image[..., 1], image[..., 0]], axis=-1)\n if aug == 'pascal_normalize':\n image = image - [123.68, 116.78, 103.94]\n if aug == 'ilsvrc12_normalize':\n MEAN_RGB = [0.485 * 255, 0.456 * 255, 0.406 * 255]\n STDDEV_RGB = [0.229 * 255, 0.224 * 255, 0.225 * 255]\n image = (image - MEAN_RGB) / STDDEV_RGB\n if aug == 'random_contrast':\n assert len(image.get_shape()) == 3, '4D not implemented yet.'\n image = tf.image.random_contrast(image, lower=0.2, upper=1.8)\n print 'Applying random contrast.'\n if aug == 'random_brightness':\n assert len(image.get_shape()) == 3, '4D not implemented yet.'\n image = tf.image.random_brightness(image, max_delta=63.)\n print 'Applying random brightness.'\n if aug == 'grayscale' and im_size_check:\n # image = tf.image.rgb_to_grayscale(image)\n if len(image.get_shape().as_list()) == 2:\n image = tf.expand_dims(image, axis=-1)\n else:\n image = tf.expand_dims(image[..., 0], axis=-1)\n print 'Converting to grayscale.'\n if aug == 'rgb2gray' and im_size_check:\n image = tf.image.rgb_to_grayscale(image)\n print 'Converting rgb2gray.'\n if aug == 'clip_uint8' and im_size_check:\n image = tf.minimum(image, 255.)\n image = tf.maximum(image, 0.)\n if aug == 'cube_plus_crop':\n image = cube_plus_crop(image, model_input_image_size)\n # Affine augmentations\n if aug == 'rotate' and im_size_check:\n max_theta = 22.\n angle_rad = (max_theta / 180.) * math.pi\n angles = tf.random_uniform([], -angle_rad, angle_rad)\n transform = tf.contrib.image.angles_to_projective_transforms(\n angles,\n im_size[0],\n im_size[1])\n image = tf.contrib.image.transform(\n image,\n tf.contrib.image.compose_transforms(transform),\n interpolation='BILINEAR') # or 'NEAREST'\n print 'Applying random rotate.'\n if aug == 'rotate90' and im_size_check:\n image = tf.image.rot90(\n image,\n tf.random_uniform(\n shape=[],\n minval=0,\n maxval=4,\n dtype=tf.int32))\n print 'Applying random 90 degree rotate.'\n if aug == 'rotate90_image_label' and im_size_check:\n concat = tf.image.rot90(\n tf.concat([image, label], -1),\n tf.random_uniform(\n shape=[],\n minval=0,\n maxval=4,\n dtype=tf.int32))\n image = concat[..., :im_size[-1]]\n label = concat[..., im_size[-1]:]\n print 'Applying random 90 degree rotate to images and labels.'\n if aug == 'stack3d':\n image = tf.concat([image, image, image], axis=-1)\n if aug == 'rot_image_label' and im_size_check:\n max_theta = 30.\n angle_rad = (max_theta / 180.) * math.pi\n angles = tf.random_uniform([], -angle_rad, angle_rad)\n transform = tf.contrib.image.angles_to_projective_transforms(\n angles,\n im_size[0],\n im_size[1])\n image = tf.contrib.image.transform(\n image,\n tf.contrib.image.compose_transforms(transform),\n interpolation='BILINEAR') # or 'NEAREST'\n label = tf.contrib.image.transform(\n label,\n tf.contrib.image.compose_transforms(transform),\n interpolation='BILINEAR') # or 'NEAREST'\n print 'Applying random rotate.'\n if aug == 'random_scale_crop_image_label'\\\n and im_size_check:\n scale_choices = tf.convert_to_tensor(\n [1., 1.04, 1.08, 1.12, 1.16])\n samples = tf.multinomial(\n tf.log([tf.ones_like(scale_choices)]), 1)\n image_shape = image.get_shape().as_list()\n scale = scale_choices[tf.cast(samples[0][0], tf.int32)]\n scale_tf = tf.cast(\n tf.round(\n np.asarray(\n model_input_image_size[:2]).astype(\n np.float32) * scale),\n tf.int32)\n combined = tf.concat([image, label], axis=-1)\n combo_shape = combined.get_shape().as_list()\n combined_resize = tf.squeeze(\n tf.image.resize_bicubic(\n tf.expand_dims(combined, axis=0),\n scale_tf,\n align_corners=True),\n axis=0)\n combined_crop = tf.random_crop(\n combined_resize, tf.concat(\n [model_input_image_size[:2], [combo_shape[-1]]], 0))\n image = combined_crop[:, :, :image_shape[-1]]\n label = combined_crop[:, :, image_shape[-1]:]\n image.set_shape(model_input_image_size)\n label.set_shape(\n model_input_image_size[:2] + [\n combo_shape[-1] - model_input_image_size[-1]])\n if aug == 'rc_res' and im_size_check:\n image = random_crop(image, model_input_image_size)\n if len(model_input_image_size) > 2:\n model_input_image_size = model_input_image_size[:2]\n ms = [x // 2 for x in model_input_image_size]\n image = resize_image_label(\n im=image,\n model_input_image_size=ms,\n f='bicubic')\n print 'Applying random crop and resize.'\n if aug == 'cc_res' and im_size_check:\n image = center_crop(image, model_input_image_size)\n if len(model_input_image_size) > 2:\n model_input_image_size = model_input_image_size[:2]\n ms = [x // 2 for x in model_input_image_size]\n image = resize_image_label(\n im=image,\n model_input_image_size=ms,\n f='bicubic')\n print 'Applying center crop and resize.'\n if aug == 'random_crop' and im_size_check:\n image = random_crop(image, model_input_image_size)\n print 'Applying random crop.'\n if aug == 'center_crop' and im_size_check:\n image = center_crop(image, model_input_image_size)\n print 'Applying center crop.'\n if aug == 'rc_image_label' and im_size_check:\n assert len(image.get_shape()) == 3, '4D not implemented yet.'\n image, label = crop_image_label(\n image=image,\n label=label,\n size=model_input_image_size,\n crop='random')\n if aug == 'cc_image_label' and im_size_check:\n assert len(image.get_shape()) == 3, '4D not implemented yet.'\n image, label = crop_image_label(\n image=image,\n label=label,\n size=model_input_image_size,\n crop='center')\n if aug == 'resize' and im_size_check:\n if len(model_input_image_size) > 2:\n model_input_image_size = model_input_image_size[:2]\n image = resize_image_label(\n im=image,\n model_input_image_size=model_input_image_size,\n f='bicubic')\n print 'Applying area resize.'\n if aug == 'jk_resize' and im_size_check:\n if len(model_input_image_size) > 2:\n model_input_image_size = model_input_image_size[:2]\n image = tf.image.resize_image_with_crop_or_pad(\n image,\n model_input_image_size[0],\n model_input_image_size[1])\n print 'Applying area resize.'\n if aug == 'random_crop_and_res_cube_plus' and im_size_check:\n im_shape = image.get_shape().as_list()\n im_shape[0] /= 4\n im_shape[1] /= 4\n image = resize_image_label(\n im=image,\n model_input_image_size=im_shape[:2],\n f='bicubic')\n image = random_crop(image, model_input_image_size)\n if aug == 'center_crop_and_res_cube_plus' and im_size_check:\n im_shape = image.get_shape().as_list()\n im_shape[0] /= 4\n im_shape[1] /= 4\n image = resize_image_label(\n im=image,\n model_input_image_size=im_shape[:2],\n f='bicubic')\n image = center_crop(image, model_input_image_size)\n if aug == 'res_and_crop' and im_size_check:\n model_input_image_size_1 = np.asarray(\n model_input_image_size[:2]) + 28\n image = resize_image_label(\n im=image,\n model_input_image_size=model_input_image_size_1,\n f='area')\n image = center_crop(image, model_input_image_size)\n print 'Applying area resize.'\n if aug == 'res_nn' and im_size_check:\n assert len(image.get_shape()) == 3, '4D not implemented yet.'\n if len(model_input_image_size) > 2:\n model_input_image_size = model_input_image_size[:2]\n image = resize_image_label(\n im=image,\n model_input_image_size=model_input_image_size,\n f='nearest')\n print 'Applying nearest resize.'\n if aug == 'res_image_label' and im_size_check:\n assert len(image.get_shape()) == 3, '4D not implemented yet.'\n if len(model_input_image_size) > 2:\n model_input_image_size = model_input_image_size[:2]\n image = resize_image_label(\n im=image,\n model_input_image_size=model_input_image_size,\n f='bicubic')\n label = resize_image_label(\n im=label,\n model_input_image_size=model_input_image_size,\n f='bicubic')\n print 'Applying bilinear resize.'\n if aug == 'res_nn_image_label' and im_size_check:\n assert len(image.get_shape()) == 3, '4D not implemented yet.'\n if len(model_input_image_size) > 2:\n model_input_image_size = model_input_image_size[:2]\n image = resize_image_label(\n im=image,\n model_input_image_size=model_input_image_size,\n f='nearest')\n label = resize_image_label(\n im=label,\n model_input_image_size=model_input_image_size,\n f='nearest')\n print 'Applying nearest resize.'\n if aug == 'left_right':\n image = image_flip(image, direction='left_right')\n print 'Applying random flip left-right.'\n if aug == 'up_down':\n image = image_flip(image, direction='up_down')\n print 'Applying random flip up-down.'\n if aug == 'lr_viz_flip':\n assert len(image.get_shape()) == 3, '4D not implemented yet.'\n image, label = lr_viz_flip(image, label)\n image, label = ud_viz_flip(image, label)\n if aug == 'lr_flip_image_label':\n assert len(image.get_shape()) == 3, '4D not implemented yet.'\n image, label = lr_flip_image_label(image, label)\n if aug == 'ud_flip_image_label':\n assert len(image.get_shape()) == 3, '4D not implemented yet.'\n image, label = ud_flip_image_label(image, label)\n if aug == 'gratings_modulate':\n modulate = 10\n image //= modulate\n offset = (255 / 2) - ((255 / modulate) / 2)\n image += offset\n if aug == 'gaussian_noise':\n im_shape = image.get_shape().as_list()\n assert len(im_shape) == 3, '4D not implemented yet.'\n sigma = 1. / 10.\n mu = 0.\n image = image + tf.random_normal(\n im_shape,\n mean=mu,\n stddev=sigma)\n print 'Applying gaussian noise.'\n if aug == 'gaussian_noise_small':\n im_shape = image.get_shape().as_list()\n assert len(im_shape) == 3, '4D not implemented yet.'\n sigma = 1. / 20.\n mu = 0.\n image = image + tf.random_normal(\n im_shape,\n mean=mu,\n stddev=sigma)\n print 'Applying gaussian noise.'\n if aug == 'mixup':\n raise RuntimeError('Mixup not properly implemented yet.')\n alpha = 0.4\n dist = tf.distributions.Beta(alpha, alpha)\n image = image * dist + (1 - dist) * tf.roll(image, 0, 1)\n label = label * dist + (1 - dist) * tf.roll(label, 0, 1)\n if aug == 'hed_brightness':\n image = tf.image.random_brightness(image, 63)\n if aug == 'hed_contrast':\n image = tf.image.random_contrast(image, lower=0.4, upper=1.5)\n if aug == 'blur_labels':\n label = tf_blur(\n image=label,\n kernel_size=3, # extent\n name='label_blur',\n normalize=True,\n sigma=1.)\n if aug == 'calculate_rate_time_crop':\n im_shape = image.get_shape().as_list()\n minval = im_shape[0] // 3\n time_crop = tf.random_uniform(\n [],\n minval=minval,\n maxval=im_shape[0],\n dtype=tf.int32)\n\n # For now always pull from the beginning\n indices = tf.range(0, time_crop, dtype=tf.int32)\n selected_image = tf.gather(image, indices)\n padded_image = tf.zeros(\n [im_shape[0] - time_crop] + im_shape[1:],\n dtype=selected_image.dtype)\n\n # Randomly concatenate pad to front or back\n image = tf.cond(\n pred=tf.greater(\n tf.random_uniform(\n [],\n minval=0,\n maxval=1,\n dtype=tf.float32),\n 0.5),\n true_fn=lambda: tf.concat(\n [selected_image, padded_image], axis=0),\n false_fn=lambda: tf.concat(\n [padded_image, selected_image], axis=0)\n )\n image.set_shape(im_shape)\n\n # Convert label to rate\n label = label / im_shape[0]\n if aug == 'calculate_rate':\n label = label / image.get_shape().as_list()[0]\n print 'Applying rate transformation.'\n if aug == 'threshold':\n image = tf.cast(tf.greater(image, 0.1), tf.float32)\n print 'Applying threshold.'\n if aug == 'nonzero_label':\n label = tf.cast(tf.greater(label, 0.2), tf.float32)\n print 'Applying threshold.'\n if aug == 'zero_one':\n image = tf.minimum(tf.maximum(image, 0.), 1.)\n print 'Applying threshold.'\n if aug == 'timestep_duplication':\n image = tf.stack([image for iid in range(7)])\n print 'Applying timestep duplication.'\n if aug == 'per_image_standardization':\n image = tf.image.per_image_standardization(image)\n print 'Applying per-image zscore.'\n if aug == 'flip_image_polarity':\n image = tf.abs(image - 1.)\n if aug == 'flip_label_polarity':\n label = tf.abs(label - 1.)\n if aug == 'NCHW':\n image = tf.transpose(image, (2, 0, 1))\n if aug == 'bfloat16_image':\n image = tf.cast(image, tf.bfloat16)\n if aug == 'bfloat16_label':\n label = tf.cast(label, tf.bfloat16)\n if aug == 'hfloat16_image':\n image = tf.cast(image, tf.float16)\n if aug == 'hfloat16_label':\n label = tf.cast(label, tf.float16)\n if aug == 'threshold_label':\n label = tf.cast(tf.greater(label, 0.999), tf.float32)\n print 'Applying threshold of 0.999 to the label.'\n if aug == 'threshold_label_255':\n # cABC label = tf.cast(tf.greater(label, 200), tf.float32)\n label = tf.cast(tf.greater(label, 10), tf.float32)\n print 'Applying threshold of 127.5 to the label.'\n if aug == 'normalize_label':\n label = tf.cast(label, tf.float32)\n label = label / tf.reduce_max(label) # tf.cast(tf.greater(label, 25), tf.float32)\n print 'Normalizing label to [0, 1].'\n if aug == 'scale_to_255':\n image = image * 255.\n if aug == 'clip_255':\n image = tf.maximum(tf.minimum(255., image), 0.)\n # else:\n # assert len(image.get_shape()) == 3, '4D not implemented yet.'\n # image = tf.image.resize_image_with_crop_or_pad(\n # image, model_input_image_size[0], model_input_image_size[1])\n return image, label", "def preprocess_image(self, inputs):\n return utils.preprocess_image(inputs, mode='custom_tf')", "def call(self, inputs):\n squash = utils.BatchSquash(len(inputs.shape) - 2)\n\n # Reshape into single batch dim.\n result = squash.flatten(inputs)\n for layer in self._cnn_layers:\n result = layer(result)\n\n # Restore squashed batch dimensions.\n return squash.unflatten(result)", "def __call__(self, results):\n for key in results.get('img_fields', ['img']):\n results[key] = general_ocr.imnormalize(results[key], self.mean, self.std,\n self.to_rgb)\n results['img_norm_cfg'] = dict(\n mean=self.mean, std=self.std, to_rgb=self.to_rgb)\n return results", "def norm_input(image, label):\n cropped_image = tf.image.resize_image_with_crop_or_pad(image, FLAGS.image_size, FLAGS.image_size)\n\n norm_image = tf.image.per_image_standardization(cropped_image)\n\n return norm_image, label", "def Batch_Preprocessing(batch, word2idx, batch_size, batch_first=True, pad_token=\"<pad>\"):\n pad_token = word2idx[pad_token]\n batch_idx = Batch2Idx(batch, word2idx)\n\n batch_idx.sort(key = lambda s: len(s), reverse=True) # sort the batch in the descending order\n\n batch_len = Batch_Length_Calculator(batch_idx)\n batch_idx = Batch_Size_Normalization(batch_idx, batch_len, pad_token, batch_size)\n padded_batch = Batch_Padding(batch_idx, batch_first=batch_first, pad_token=pad_token)\n return padded_batch, batch_len", "def normalize(img):\n # TODO: implement this function.\n min_img = min([min(i) for i in img])\n max_img = max([max(i) for i in img])\n\n for i in range(len(img)):\n \tfor j in range(len(img[0])):\n \t\timg[i][j] = ((img[i][j] - min_img) / (max_img - min_img))\n #raise NotImplementedError\n return img", "def preprocess(self, img):\n return img - np.mean(img)" ]
[ "0.7097169", "0.66408646", "0.65812963", "0.6465365", "0.6464898", "0.6406874", "0.63755596", "0.63661265", "0.6324502", "0.6318984", "0.63017005", "0.6297598", "0.6261444", "0.62284863", "0.6226007", "0.6213169", "0.6194187", "0.6189247", "0.61866045", "0.6185736", "0.6183467", "0.6160369", "0.6142805", "0.6132306", "0.61175466", "0.61103964", "0.61103964", "0.609539", "0.60671926", "0.6028339", "0.60206604", "0.60205185", "0.6017841", "0.601765", "0.6010428", "0.59826297", "0.5980465", "0.59593976", "0.59489226", "0.59444714", "0.5937271", "0.5929325", "0.5925083", "0.5912933", "0.5901047", "0.5898357", "0.58935285", "0.5872156", "0.5871004", "0.5870802", "0.5867303", "0.58653903", "0.58647025", "0.58594", "0.5857643", "0.5854025", "0.5852768", "0.58403504", "0.5837714", "0.5827739", "0.58226585", "0.5821461", "0.58210945", "0.58199334", "0.5817141", "0.58039856", "0.57974565", "0.5790613", "0.57853377", "0.57801163", "0.57773453", "0.57706624", "0.5768793", "0.5757613", "0.5749063", "0.574674", "0.57463986", "0.574241", "0.5739295", "0.573714", "0.57309353", "0.5730093", "0.5727306", "0.5725084", "0.57211393", "0.57185066", "0.5716352", "0.5703853", "0.56950784", "0.5691718", "0.56865036", "0.56834877", "0.5682258", "0.56813717", "0.5676079", "0.56726605", "0.5672005", "0.566925", "0.56687266", "0.56662726" ]
0.67390656
1
Read a key file, if the key file does not exist create one
def keys(self) -> None: path = Path('./config/key') global key # If the file path does not exist, create one if not path.exists(): os.makedirs(path) while True: # read key.key file try: file = open(path / 'key.key', 'rb') key = file.read() file.close # when key.key file does not exist. Create one except FileNotFoundError: key = Fernet.generate_key() file = open(path / 'key.key', 'wb') file.write(key) file.close() continue break
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_key(key_name):\n if not p.exists(key_name):\n write_key(key_name)\n\n return open(key_name, \"rb\").read()", "def read_key(self, keyfile_name):\n\n with open(keyfile_name, 'rb') as f:\n self.key = f.read()\n self.cryptor = Fernet(self.key)", "def readKey(self, keyPath):\n\t\ttry:", "def load_key():\r\n\r\n key_dir = os.path.join(os.path.dirname(__file__), \"resources/key\")\r\n\r\n try:\r\n return open(key_dir, \"rb\").read()\r\n except:\r\n return None", "def test_create_keypair_from_file(self):\n keys = RSA.generate(1024)\n nova_utils.save_keys_to_files(keys=keys, pub_file_path=pub_file_path)\n self.keypair_creator = create_keypairs.OpenStackKeypair(self.os_creds,\n create_keypairs.KeypairSettings(name=keypair_name,\n public_filepath=pub_file_path))\n self.keypair_creator.create()\n\n keypair = nova_utils.keypair_exists(self.keypair_creator.nova, self.keypair_creator.keypair)\n self.assertEquals(self.keypair_creator.keypair, keypair)\n\n file_key = open(os.path.expanduser(pub_file_path)).read()\n self.assertEquals(self.keypair_creator.keypair.public_key, file_key)", "def load_or_create_client_key(key_file):\n # this is based on txacme.endpoint.load_or_create_client_key, but doesn't\n # hardcode the 'client.key' filename\n acme_key_file = FilePath(key_file)\n if acme_key_file.exists():\n logger.info(\"Loading ACME account key from '%s'\", acme_key_file)\n key = serialization.load_pem_private_key(\n acme_key_file.getContent(), password=None, backend=default_backend()\n )\n else:\n logger.info(\"Saving new ACME account key to '%s'\", acme_key_file)\n key = generate_private_key(\"rsa\")\n acme_key_file.setContent(\n key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption(),\n )\n )\n return JWKRSA(key=key)", "def read_key():\n path = os.path.join(os.path.dirname(__file__), 'data')\n f = open(os.path.join(path, 'credential.txt'), 'r')\n key = f.read()\n f.close()\n return key", "def load_key():\n return open(\"Secret.key\",\"rb\").read()", "def load_device_key(self, filename):\n pass", "def load_key():\n return open(\"pass.key\", \"rb\").read()", "def load_key():\n return open(\"secret.key\", \"rb\").read()", "def randomKeyFile(file_name):\n\twith open(file_name, \"w\") as kfile:\n\t\tkey = stringGen(256)\n\t\tkfile.write(key)\n\t\tkfile.close()", "def getKey(filename):\n try:\n fh = open(filename, 'rb')\n except IOError:\n logging.debug(\"getKey(): Creating new secret key.\")\n key = OpenSSL.rand.bytes(32)\n writeKeyToFile(key, filename)\n else:\n logging.debug(\"getKey(): Secret key file found. Loading...\")\n key = fh.read()\n fh.close()\n return key", "def load_key(self):\n\t return open(\"key.key\", \"rb\").read()", "def _load_key(self, path):\n with open(path, 'r') as f:\n self._key = f.readline().strip()\n self._secret = f.readline().strip()", "def CreateKeyFile():\n keyfile = tempfile.mkstemp()[1]\n cmd = [\n 'openssl',\n 'genrsa',\n '-out', keyfile,\n '2048'\n ]\n _RunCommand(cmd)\n return keyfile", "def _search_for_key_file(path_to_key_file):\n\n return True if os.path.exists(path_to_key_file) else False", "def test_getKey_keyexists(self):\n filename = self.mktemp()\n with open(filename, 'wb') as fh:\n fh.write(SEKRIT_KEY)\n fh.flush()\n\n key = crypto.getKey(filename)\n self.failUnlessIsInstance(key, basestring,\n \"key isn't a string! type=%r\" % type(key))\n self.assertEqual(SEKRIT_KEY, key,\n \"\"\"The example key and the one read from file differ!\n key (in hex): %s\n SEKRIT_KEY (in hex): %s\"\"\"\n % (key.encode('hex'), SEKRIT_KEY.encode('hex')))", "def open(self, key):\n try:\n return open(self._filename(key), \"rb\")\n except FileNotFoundError:\n raise KeyError(key)", "def generate_key():\r\n # generating key\r\n key = Fernet.generate_key()\r\n\r\n key_dir = os.path.join(os.path.dirname(__file__), \"resources/key\")\r\n\r\n # writing key in file\r\n with open(key_dir, \"wb\") as keyFile:\r\n keyFile.write(key)", "def load_key():", "def import_key(self, filename):\n fields = self.input_file(filename)\n\n if (\"Description\" not in fields or \"Method\" not in fields or\n \"Key length\" not in fields or \n \"Secret key\" not in fields or\n fields[\"Method\"] != \"AES\"):\n raise Exception(\"Error reading AES key file.\")\n # print (fields)\n key = fields['Secret key']\n key = binascii.unhexlify(key)\n key_len = int(fields[\"Key length\"], 16)\n if len(key) != key_len:\n raise Exception(\"AES key file contains false information.\")\n \n return key", "def new_key(self, key_name=None, key_type=Key.KEY_REGULAR_FILE):\r\n if key_name == '-':\r\n return Key(self.name, '-', key_type=Key.KEY_STREAM_WRITABLE)\r\n else:\r\n dir_name = os.path.dirname(key_name)\r\n if dir_name and not os.path.exists(dir_name):\r\n os.makedirs(dir_name)\r\n fp = open(key_name, 'wb')\r\n return Key(self.name, key_name, fp)", "def load_Fernet_key(filename):\n\tfich = open(str(filename) +'.key', 'rb')\n\tkey = fich.read() # The key will be type bytes\n\tfich.close()\n\treturn key", "def create_file(self, key=None):\n self.make_directory()\n open(self.file_path(key), 'w').close()", "def create_keypair(key_name):\n if os.path.isfile(SSH_FOLDER + key_name + \".pem\"):\n return # Key already created\n ec2 = boto.ec2.connect_to_region(AWS_REGION)\n key = ec2.create_key_pair(key_name)\n key.save(SSH_FOLDER)", "def setup_keys():\n if os.path.isfile(\"key.txt\"):\n message = \"Key already generated\"\n else:\n secret = secrets.token_urlsafe(64)\n message = \"Secret generated and saved in key.txt\"\n with open(\"key.txt\", \"w\") as fd:\n fd.write(secret)\n return json.dumps({'message': message})", "def create_key(self):\r\n dialog = QtWidgets.QFileDialog(self)\r\n dialog.setFileMode(QtWidgets.QFileDialog.AnyFile)\r\n dialog.setNameFilter(\"Any files (*.key)\")\r\n if dialog.exec_():\r\n key_file = dialog.selectedFiles()[0]\r\n self.encryptor.generate_key_file(\"{}.key\".format(key_file))\r\n QtWidgets.QMessageBox.information(self, \"Key File Generation\", \r\n (\"Your key file has been successfully generated.\\n\\n\"\r\n \"You can load it to encrypt / decrypt.\"))", "def new_private_key(self):\n option = 'new_private_key'\n _file = self.__get_option(option)\n\n if _file and not os.path.exists(_file) and not os.path.isfile(_file):\n self.log.error(\"Paramenter '%s' points to non-existing file '%s')\" % \\\n (option, _file))\n raise ConfigError('File Error', \"Paramenter '%s' points to non-existing file '%s')\" % \\\n (option, _file))\n else:\n return None", "def _create_external_keypair():\n\n if not utils.use_external_resource(ctx.node.properties):\n return False\n\n key_pair_name = ctx.node.properties['resource_id']\n key_pair_in_account = _get_key_pair_by_id(key_pair_name)\n key_path_in_filesystem = _get_path_to_key_file()\n ctx.logger.debug(\n 'Path to key file: {0}.'.format(key_path_in_filesystem))\n if not key_pair_in_account:\n raise NonRecoverableError(\n 'External resource, but the key pair is not in the account.')\n if not _search_for_key_file(key_path_in_filesystem):\n raise NonRecoverableError(\n 'External resource, but the key file does not exist.')\n utils.set_external_resource_id(key_pair_name, ctx.instance)\n return True", "def get_access_key(self, keyfile):\n my_key = AccessKey.create_key_from_file(keyfile)\n my_key.store_keys()\n return my_key.key", "def verify_key_data_exists(key, file_name):\n try:\n with open(file_name, 'r') as file:\n lines = file.readlines()\n for line in lines:\n row = [r.strip() for r in line.split(',')]\n if row[0] == key:\n # row[3] has file name\n with open(row[3], 'r') as rfile:\n if rfile.read():\n return True\n return False\n except Exception as file_error:\n raise file_error", "def generate_key():\n key = Fernet.generate_key()\n with open(\"Secret.key\",\"wb\")as key_file:\n key_file.write(key)", "def create_api_key(filename):\n if filename is not None:\n with open(filename) as json_data:\n try:\n params = json.load(json_data)\n except ValueError as error:\n sys.stderr.write(error.message + '\\n')\n sys.exit(1)\n\n api.create(params)\n else:\n click.echo('Example usage: lecli create apikey path_to_file.json')", "def generate_keyfile(csrf_key, session_key):\n output = file_template.safe_substitute(dict(\n csrf_key=csrf_key, session_key=session_key\n ))\n if os.path.exists(file_name):\n if options.force is None:\n print \"Warning: secret_keys.py file exists. Use '-f' flag to force overwrite.\"\n else:\n write_file(output)\n else:\n write_file(output)", "def __loadKey(self, key_image_file_name: str):\n # get the key name from the file name e.g. ${key_name}.png\n key_name = key_image_file_name.split('.')[0]\n\n self.maple_logger.debug(\"Loading key: {0}\", key_name)\n\n self.key_locations[key_name] = self.__getKeyLocation(key_image_file_name)", "def writeKeyToFile(key, filename):\n logging.info(\"Writing key to file: %r\" % filename)\n flags = os.O_WRONLY | os.O_TRUNC | os.O_CREAT | getattr(os, \"O_BIN\", 0)\n fd = os.open(filename, flags, 0400)\n os.write(fd, key)\n os.fsync(fd)\n os.close(fd)", "def read_keypair(priv_key_file, public_key_file):\n key_pair = {}\n with open(priv_key_file) as f:\n key_data = f.read()\n f.close()\n key_pair[\"key\"] = key_data\n with open(public_key_file) as f:\n pub_data = f.read()\n f.close()\n key_pair[\"pub\"] = pub_data\n for i in [priv_key_file, public_key_file]:\n os.remove(i)\n return key_pair", "def generate_key():\n key = Fernet.generate_key()\n with open(\"secret.key\", \"wb\") as key_file:\n key_file.write(key)", "def generate_key():\n key = Fernet.generate_key()\n with open(\"pass.key\", \"wb\") as key_file:\n key_file.write(key)", "def create_key ():", "def create_user_key_file(username: str):\n\n user: User = UserModel().get_user(username=username)\n user_key: Key = user.public_key\n\n public_key: bytes = user_key.public_key\n\n if not os.path.exists(\"./ssh_ca\"):\n os.mkdir(\"./ssh_ca\")\n\n with open(f\"./ssh_ca/{username}.pub\") as public_key_file:\n public_key_file.write(public_key.decode())", "def read_api_key(path):\n path = os.path.abspath(path)\n if not os.path.exists(path):\n raise ValueError(\"no key found at given path: \" + path)\n with open(path) as f:\n return f.readline().strip()", "def gen_keys_old(name):\n d = 'keys'\n if not os.path.isdir(d):\n os.mkdir(d)\n if not os.path.isfile('%s/%s.pem'%(d,name)):\n open('%s/%s.pem'%(d,name),'w').write(Crypto.PublicKey.RSA.generate(1024,os.urandom).exportKey('PEM'))", "async def add_key(request: web.Request) -> web.Response:\n if not request.can_read_body:\n return web.json_response({'message': \"Must upload key file\"},\n status=400)\n data = await request.post()\n keyfile = data.get('key')\n if not keyfile:\n return web.json_response(\n {'message': \"No key 'key' in request\"}, status=400)\n\n add_key_result = wifi.add_key(keyfile.filename, keyfile.file.read())\n\n response_body = {\n 'uri': '/wifi/keys/{}'.format(add_key_result.key.directory),\n 'id': add_key_result.key.directory,\n 'name': os.path.basename(add_key_result.key.file)\n }\n if add_key_result.created:\n return web.json_response(response_body, status=201)\n else:\n response_body['message'] = 'Key file already present'\n return web.json_response(response_body, status=200)", "def test_rcreate_refuse_to_overwrite_keyfile(self):\n keyfile = os.path.join(self.tmpdir, \"keyfile\")\n with environment_variable(BORG_KEY_FILE=keyfile):\n self.cmd(f\"--repo={self.repository_location}0\", \"rcreate\", KF_ENCRYPTION)\n with open(keyfile) as file:\n before = file.read()\n arg = (f\"--repo={self.repository_location}1\", \"rcreate\", KF_ENCRYPTION)\n if self.FORK_DEFAULT:\n self.cmd(*arg, exit_code=2)\n else:\n with pytest.raises(Error):\n self.cmd(*arg)\n with open(keyfile) as file:\n after = file.read()\n assert before == after", "def create_key(key_name, save_path, region=None, key=None, keyid=None, profile=None):\n conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)\n\n try:\n key = conn.create_key_pair(key_name)\n log.debug(\"the key to return is : %s\", key)\n key.save(save_path)\n return key.material\n except boto.exception.BotoServerError as e:\n log.debug(e)\n return False", "def _delete_key_file():\n\n key_path = _get_path_to_key_file()\n\n if _search_for_key_file(key_path):\n try:\n os.remove(key_path)\n except OSError as e:\n raise NonRecoverableError(\n 'Unable to delete key pair: {0}.'\n .format(str(e)))", "def read_key(path_to: str) -> str:\n m_type, _ = guess_type(path_to)\n if m_type == types_map['.txt']:\n with open(path_to, 'r') as api_token_file:\n return api_token_file.read().strip()\n\n else:\n return path_to", "def test_getKey_tmpfile(self):\n filename = self.mktemp()\n key = crypto.getKey(filename)\n self.failUnlessIsInstance(key, basestring,\n \"key isn't a string! type=%r\" % type(key))", "def create_keypair(econfig_file=None, region=None, keyname=\"bcbio\"):\n import boto\n import boto.ec2\n if econfig_file:\n keypair_dir = os.path.dirname(econfig_file).replace(\"elasticluster\", \"aws_keypairs\")\n else:\n keypair_dir = os.path.join(os.getcwd(), \"aws_keypairs\")\n if not os.path.exists(keypair_dir):\n os.makedirs(keypair_dir)\n private_key = os.path.join(os.path.join(keypair_dir, keyname))\n new_key = not os.path.exists(private_key)\n if new_key:\n cmd = [\"ssh-keygen\", \"-t\", \"rsa\", \"-N\", \"\", \"-f\", private_key, \"-C\", \"bcbio_aws_keypair\"]\n subprocess.check_call(cmd)\n public_key = private_key + \".pub\"\n if region:\n ec2 = boto.ec2.connect_to_region(region)\n else:\n ec2 = boto.connect_ec2()\n key = ec2.get_key_pair(keyname)\n if key and new_key:\n print(\"Non matching key %s found in AWS, removing.\" % keyname)\n ec2.delete_key_pair(keyname)\n key = None\n if not key:\n print(\"Key %s not found in AWS, importing created key\" % keyname)\n with open(public_key) as in_handle:\n body = in_handle.read()\n try:\n ec2.import_key_pair(keyname, body)\n except TypeError as e:\n body = body.encode('utf-8')\n ec2.import_key_pair(keyname, body)\n return {\"user_key_name\": keyname, \"user_key_private\": private_key,\n \"user_key_public\": public_key}", "def load_key(fn, psw=None):\n if not fn:\n die(\"Need private key\")\n if psw:\n psw = as_bytes(psw)\n data = load_gpg_file(fn)\n key = load_pem_private_key(data, password=psw, backend=get_backend())\n return key", "def write_key():\n key = fernet.Fernet.generate_key()\n keyfile = open(KEY_PATH,'wb')\n keyfile.write(key)\n keyfile.close()", "def write_key(key_name):\n key = Fernet.generate_key()\n with open(key_name, \"wb\") as key_file:\n key_file.write(key)", "def loadKeys( ):\n\n log.debug(\"Reading session ticket keys from file.\")\n\n global HMACKey\n global AESKey\n global creationTime\n\n if not os.path.exists(const.DATA_DIRECTORY + const.KEY_STORE):\n rotateKeys()\n return\n\n try:\n with open(const.DATA_DIRECTORY + const.KEY_STORE, \"rb\") as fd:\n creationTime, HMACKey, AESKey = pickle.load(fd)\n fd.close()\n except IOError as e:\n log.error(\"Error opening ticket key file: %s.\" % e)", "def __init__(self, file_name, key):\n try:\n self._file_name = file_name\n self._encryptor = AES(key.encode())\n self._document = open(self._file_name, \"rb+\")\n except Exception as error:\n print(error)\n sys.exit(1)", "def create_keypair(address_type, addresses_path, address_prefix, name):\n vkey_file = get_vkey_file(addresses_path, address_prefix, name)\n skey_file = get_skey_file(addresses_path, address_prefix, name)\n\n if(path.exists(vkey_file)) :\n print(address_prefix, \"key pair already exists for\", name)\n return\n \n makedirs(path.dirname(vkey_file), mode=0o777, exist_ok=True)\n\n run_params = ['cardano-cli', address_type, 'key-gen', '--verification-key-file', vkey_file, '--signing-key-file', skey_file]\n subprocess_run(run_params, capture_output=False, text=True)\n return", "def test_get_read_fail(self):\n file_handler = open(self.test_key_filename, 'w')\n file_handler.write('Mock corrupt data')\n file_handler.close()\n os.chmod(self.test_key_filename, 000)\n\n self.assertRaises(IOError, self.key_gen.get)", "def init_key(key_size, key_dir):\n try:\n key_pem = crypto_util.make_key(key_size)\n except ValueError as err:\n logging.fatal(str(err))\n sys.exit(1)\n\n # Save file\n le_util.make_or_verify_dir(key_dir, 0o700)\n key_f, key_filename = le_util.unique_file(\n os.path.join(key_dir, \"key-letsencrypt.pem\"), 0o600)\n key_f.write(key_pem)\n key_f.close()\n\n logging.info(\"Generating key (%d bits): %s\", key_size, key_filename)\n\n return le_util.Key(key_filename, key_pem)", "def tmp_key(filename):\n return TMP_PREFIX + filename", "def get_key_data_filepath():\n global key_filepath, directory\n filename = 'key.csv'\n key_filepath = os.path.join(directory, filename)", "def ifExist(file_name, key):\n\tif exists(file_name) and exists(key):\n\t\treturn True\n\telse:\n\t\treturn False", "def new_public_key(self):\n\n option = 'new_public_key'\n _file = self.__get_option(option)\n\n if _file and not os.path.exists(_file) and not os.path.isfile(_file):\n self.log.error(\"Paramenter '%s' points to non-existing file '%s')\" % \\\n (option, _file))\n raise ConfigError('File Error', \"Paramenter '%s' points to non-existing file '%s')\" % \\\n (option, _file))\n else:\n return None", "def store_Fernet_key(key,filename):\n\tfich = open(str(filename) + '.key', 'wb')\n\tfich.write(key) # The key is type bytes still\n\tfich.close()", "def get_key(self, key_name, headers=None, version_id=None,\r\n key_type=Key.KEY_REGULAR_FILE):\r\n if key_name == '-':\r\n return Key(self.name, '-', key_type=Key.KEY_STREAM_READABLE)\r\n else:\r\n fp = open(key_name, 'rb')\r\n return Key(self.name, key_name, fp)", "def get_creds_file(self):\n filename = self.filename\n\n home = str(Path.home())\n filepath = home + os.sep + filename\n self.path = filepath\n if not os.path.isfile(filepath):\n return False\n\n j = json.load(open(filepath))\n self.keys = j\n return j", "def file_key(filename):\n return FILE_PREFIX + filename", "def test_create_and_import_encrypted_rsa(self):\n name = \"key_encrypted\"\n password = \"123456\"\n bits= 3072\n generate_and_write_rsa_keypair(name, bits, password)\n private_key = import_rsa_key_from_file(name, password)\n public_key = import_rsa_key_from_file(name + \".pub\")\n\n securesystemslib.formats.KEY_SCHEMA.check_match(private_key)\n self.assertTrue(private_key[\"keyval\"].get(\"private\"))\n self.assertTrue(\n securesystemslib.formats.PUBLIC_KEY_SCHEMA.matches(public_key))", "def get_key_from_file():\n json_data = request.get_json()\n \n is_reference = json_data['is_reference']\n filename = json_data['filename']\n key_name = json_data['key_name']\n\n \n settings.setOptionsFile(get_info('uid'))\n f = ROOT.TFile(filename)\n\n d = eval(cppyy.gbl.getDictionary(f,key_name))\n \n f.Close()\n return jsonify(d)", "def read_keyname(self):\n self.show(f'cat {self.keyname_file}')\n with open(self.keyname_file) as f:\n keyname = f.readline().strip()\n self.report('Using key:', keyname)\n return keyname", "def read_standard_file(pickle_file_name):\n\n pickle_file_handle = open(pickle_file_name, 'rb')\n saliency_dict = pickle.load(pickle_file_handle)\n pickle_file_handle.close()\n\n missing_keys = list(set(STANDARD_FILE_KEYS) - set(saliency_dict.keys()))\n if len(missing_keys) == 0:\n return saliency_dict\n\n error_string = (\n '\\n{0:s}\\nKeys listed above were expected, but not found, in file '\n '\"{1:s}\".'\n ).format(str(missing_keys), pickle_file_name)\n\n raise ValueError(error_string)", "def _handle_existing_agent_key_path(restored_key_path,\n db_key_path):\n with open(db_key_path) as key_file:\n content_1 = key_file.read()\n with open(restored_key_path) as key_file:\n content_2 = key_file.read()\n if content_1 != content_2:\n raise NonRecoverableError(\n 'Agent key path already taken: {0}'.format(db_key_path)\n )\n ctx.logger.debug('Agent key path already exist: '\n '{0}'.format(db_key_path))", "def write_key(self, keyfile_name):\n\n print(self.key)\n with open(keyfile_name, 'wb') as f:\n f.write(self.key)", "def install_secret_key(app, filename='secret_key'):\n filename = os.path.join(app.instance_path, filename)\n\n try:\n app.config['SECRET_KEY'] = open(filename, 'rb').read()\n except IOError:\n print('Error: No secret key. Create it with:')\n full_path = os.path.dirname(filename)\n if not os.path.isdir(full_path):\n print('mkdir -p {filename}'.format(filename=full_path))\n print('head -c 24 /dev/urandom > {filename}'.format(filename=filename))\n sys.exit(1)", "def test_create_keypair_save_both(self):\n self.keypair_creator = create_keypairs.OpenStackKeypair(self.os_creds,\n create_keypairs.KeypairSettings(name=keypair_name,\n public_filepath=pub_file_path,\n private_filepath=priv_file_path))\n self.keypair_creator.create()\n\n keypair = nova_utils.keypair_exists(self.keypair_creator.nova, self.keypair_creator.keypair)\n self.assertEquals(self.keypair_creator.keypair, keypair)\n\n file_key = open(os.path.expanduser(pub_file_path)).read()\n self.assertEquals(self.keypair_creator.keypair.public_key, file_key)\n\n self.assertTrue(os.path.isfile(priv_file_path))", "def test_set_key_filename_missing(self):\n command_line = self._MENU + [self._KEYNAME, \"--keyfile-path\", \"/bogus\"]\n self.check_error(StratisCliKeyfileNotFoundError, command_line, _ERROR)", "def read_in_xforce_keys(file):\n key = file.readline().strip()\n password = file.readline().strip()\n if validate_api_creds(key) and validate_api_creds(password):\n return key, password\n else:\n print(\"API credentials invalid. Please check your key and password. Exiting...\")\n sys.exit(1)", "def get_or_create_dmcrypt_key(\n _uuid,\n key_dir,\n ):\n path = os.path.join(key_dir, _uuid)\n\n # already have it?\n if os.path.exists(path):\n return path\n\n # make a new key\n try:\n if not os.path.exists(key_dir):\n os.makedirs(key_dir, stat.S_IRUSR|stat.S_IWUSR|stat.S_IXUSR)\n with file('/dev/urandom', 'rb') as i:\n key = i.read(256)\n fd = os.open(path, os.O_WRONLY|os.O_CREAT,\n stat.S_IRUSR|stat.S_IWUSR)\n assert os.write(fd, key) == len(key)\n os.close(fd)\n return path\n except:\n raise Error('unable to read or create dm-crypt key', path)", "def test_getKey_nokey(self):\n filename = os.path.join(os.getcwd(), 'sekrit')\n key = crypto.getKey(filename)\n self.failUnlessIsInstance(key, basestring,\n \"key isn't a string! type=%r\" % type(key))", "def key_file(self):\n return self._get('key_file')", "def create_token(filename):\n\n try:\n os.makedirs(os.path.dirname(filename))\n except Exception:\n pass\n\n sk = ecdsa.SigningKey.generate(curve=ecdsa.NIST256p)\n vk = sk.verifying_key\n if vk is not None:\n line = encode_line(\"signing-key\", sk.to_der(), vk.to_der())\n\n with open(filename, \"w\") as f:\n f.write(line)", "def __set_or_create_key_if_not_exist(self):\n\n # instantiate PKI class:\n pki = PKI(username=self.username, password=self.password)\n\n # load private key into object. key is ready to be used to sign already imported\n privkey = pki.load_priv_key()\n\n # if it is an empty list then no key created and saved on username so generate new key\n if not privkey:\n pki.generate_pub_priv_key()\n privkey = pki.load_priv_key()\n\n # set self.privkey to privkey\n self.privkey = privkey", "def main(key_file: Optional[str]) -> None:\n # Generate a new 256-bit private key if no key is specified.\n if not key_file:\n customer_key_bytes = os.urandom(32)\n else:\n with open(key_file, \"rb\") as f:\n customer_key_bytes = f.read()\n\n google_public_key = get_google_public_cert_key()\n wrapped_rsa_key = wrap_rsa_key(google_public_key, customer_key_bytes)\n\n b64_key = base64.b64encode(customer_key_bytes).decode(\"utf-8\")\n\n print(f\"Base-64 encoded private key: {b64_key}\")\n print(f\"Wrapped RSA key: {wrapped_rsa_key.decode('utf-8')}\")", "def sync_keys(keys_path: Path) -> None:\n file_lines = []\n with keys_path.open(\"r\") as fkeys:\n for line in fkeys:\n line = line.strip()\n if not line or line.startswith(\"#\"):\n # Keep comments and empty lines\n file_lines.append(line)\n continue\n\n fields = line.split(\" \")\n if len(fields) < 2:\n raise ValueError(f\"Unexpected line: {line!r}\")\n current_key_id = fields[0]\n email = fields[1]\n raw_key = None\n wkd_url = None\n key_comment = None\n if \"@\" in email:\n email = email.lower()\n\n # Download the key using WKD\n wkd_url = get_wkd_advanced_url(email)\n try:\n with urllib.request.urlopen(wkd_url) as response:\n raw_key = response.read()\n except urllib.error.URLError:\n pass\n else:\n print(f\"Downloaded key for {email} from {wkd_url}\")\n key_comment = wkd_url\n\n # Try the direct method when the advanced one failed\n # Ignore domains which have issues in their configuration\n if raw_key is None and not email.endswith(\"@att.net\"):\n wkd_url = get_wkd_direct_url(email)\n raw_key = None\n try:\n with urllib.request.urlopen(wkd_url) as response:\n raw_key = response.read()\n except urllib.error.URLError:\n pass\n else:\n print(f\"Downloaded key for {email} from {wkd_url}\")\n key_comment = wkd_url\n\n for url in fields[2:]:\n # Check URL, and only keep the first valid key\n with urllib.request.urlopen(url) as response:\n armored_key = response.read()\n try:\n new_raw_key = unarmor_gpg(armored_key)\n except ValueError as exc:\n raise ValueError(f\"Error in {url!r}: {exc}\")\n\n if new_raw_key == b\"\":\n print(f\"Downloaded empty key from {url}\")\n continue\n if raw_key is None:\n raw_key = new_raw_key\n key_comment = url\n print(f\"Downloaded key from {url}\")\n\n # Try using GnuPG directly\n if raw_key is None:\n raw_key = gpg_recv_key(current_key_id)\n key_comment = \"received using GnuPG\"\n\n # Save the key using the key ID\n key_id = get_pgp_key_id(raw_key)\n file_name = email.replace(\"@\", \"_\").replace(\"+\", \"_\") + \"_\" + key_id + \".asc\"\n assert re.match(\n r\"^[A-Za-z][-0-9A-Za-z._]+$\", file_name\n ), f\"Unexpected characters in file name {file_name!r}\"\n print(f\"Saving key for {email!r} in {'all_keys/' + file_name!r}\")\n b64_key = base64.b64encode(raw_key).decode(\"ascii\")\n with (ALL_KEYS_PATH / file_name).open(\"w\") as fkey:\n print(\"-----BEGIN PGP PUBLIC KEY BLOCK-----\", file=fkey)\n print(f\"Comment: {key_comment}\", file=fkey)\n print(\"\", file=fkey)\n for offset in range(0, len(b64_key), 64):\n print(b64_key[offset:offset + 64], file=fkey)\n print(opgp_crc24_b64(raw_key), file=fkey)\n print(\"-----END PGP PUBLIC KEY BLOCK-----\", file=fkey)\n\n # Write the key ID in the file\n new_line = f\"0x{key_id} {email}\"\n if len(fields) > 2:\n new_line += \" \" + \" \".join(fields[2:])\n file_lines.append(new_line)\n\n # Refresh the file\n with keys_path.open(\"w\") as fout:\n print(\"\\n\".join(file_lines), file=fout)", "def loadKey (self, filename=\"pub.key\"):\n try:\n key_file = open(filename, \"r\")\n data = key_file.read()\n aux = data.split(\";\")\n self.n = int(aux[0])\n self.n_sq = int(aux[1])\n self.g = int(aux[2])\n except:\n raise Exception(\"could not load key from file: \" + filename)", "def grab_or_generate_secret_key(secret_file_path):\n try:\n secret_key = open(secret_file_path).read().strip()\n except IOError:\n try:\n from random import SystemRandom\n valid_chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'\n secret_key_as_list = [SystemRandom().choice(valid_chars) for i in range(50)]\n secret_key = ''.join(secret_key_as_list)\n secret = file(secret_file_path, 'w')\n secret.write(secret_key)\n secret.close()\n except IOError:\n Exception('Please create a %s file with random characters \\\n to generate your secret key!' % secret_file_path)\n\n return secret_key", "def create_key(name):\n\tinput_data = GPG.gen_key_input(\n\t\tkey_type='RSA',\n\t\tkey_length='1024',\n\t\tname_real='PGP File System',\n\t\tname_comment=create_comment(name),\n\t\tname_email='[email protected]'\n\t)\n\treturn GPG.gen_key(input_data)", "def create_key(name):\n input_data = GPG.gen_key_input(\n key_type='RSA',\n key_length='1024',\n name_real='PGP File System',\n name_comment=create_comment(name),\n name_email='[email protected]'\n )\n return GPG.gen_key(input_data)", "def baca_kunci_rsa():\n filename = ambil_file(['key'])\n if filename.endswith('.key'):\n with open(filename,\"rb\") as f:\n kunci = f.readlines()\n return kunci\n else:\n return False", "def test_create_keypair_save_pub_only(self):\n self.keypair_creator = create_keypairs.OpenStackKeypair(self.os_creds,\n create_keypairs.KeypairSettings(name=keypair_name,\n public_filepath=pub_file_path))\n self.keypair_creator.create()\n\n keypair = nova_utils.keypair_exists(self.keypair_creator.nova, self.keypair_creator.keypair)\n self.assertEquals(self.keypair_creator.keypair, keypair)\n\n file_key = open(os.path.expanduser(pub_file_path)).read()\n self.assertEquals(self.keypair_creator.keypair.public_key, file_key)", "def get_existing_filename_or_die(self, key) -> str:\n filename = self.get_or_default(key, None)\n if filename is None:\n print(\"Error, '\" + key + \"' is required.\")\n sys.exit(1)\n elif not os.path.isfile(filename):\n print(\"'\" + str(filename) + \"' is not a file.\")\n sys.exit(1)\n else:\n return filename", "def rsa_privatekey_to_file(key,filename):\r\n \r\n if not rsa_is_valid_privatekey(key):\r\n raise ValueError, \"Invalid private key\"\r\n\r\n fileobject = file(filename,\"w\")\r\n fileobject.write(rsa_privatekey_to_string(key))\r\n fileobject.close()", "def open_file():\n fp = open('dictionary.txt', 'r')\n return fp", "def get_credential_storage_custom_key(filename, key_dict,\n warn_on_readonly=True):\n multistore = _get_multistore(filename, warn_on_readonly=warn_on_readonly)\n key = util.dict_to_tuple_key(key_dict)\n return multistore._get_storage(key)", "def fetch_file(index_file, filename):\n with open(index_file, 'r') as index, open(filename, 'w+') as download:\n print 'Fetching keys from ', KEYSERVER, ' to create ', filename\n fetched_file = ''\n index_length = len(index.readlines())\n index.seek(0) # because python is stupid\n counter = 0\n for key in index.readlines():\n print 'Fetching key ', counter, ' of ', index_length\n counter = counter + 1\n fetched_file = fetched_file + parse_key(key.rstrip('\\n'))\n print 'All keys have been downloaded'\n download.write(base64.b64decode(fetched_file))\n print 'File has been decoded and saved as ', filename", "def loadKey(self, filename=\"priv.key\"):\n try:\n key_file = open(filename, \"r\")\n data = key_file.read()\n aux = data.split(\";\")\n self.lamb = int(aux[0])\n self.mu = int(aux[1])\n except:\n raise Exception(\"could not load key from file: \" + filename)", "def generate_key(self):\n self.key = Fernet.generate_key()\n with open(\"secret.key\", \"wb\") as key_file:\n key_file.write(self.key)", "def apikey(self,filename='apikey'):\n f = open(filename)\n line = f.readline()\n f.close()\n return line.strip()", "def _load_key(self):\n try:\n with open(self.gmaps_key_file) as fid:\n key = fid.read().strip()\n except FileNotFoundError:\n logging.warning(\"Failed to load Google Maps API key from '%s' - you \"\n \"will not be able to make new queries to the Google Maps API!\",\n self.gmaps_key_file)\n return None\n return key", "def load_private(file):\n with open(file, \"rb\") as pemfile:\n key = jwk.JWK.from_pem(pemfile.read())\n\n logging.info('Loaded private key from {}'.format(file))\n return key" ]
[ "0.75079066", "0.70167714", "0.6591259", "0.6573006", "0.65389", "0.6534434", "0.6523295", "0.6516931", "0.6431083", "0.64273334", "0.64232206", "0.6402406", "0.6346148", "0.63321847", "0.62482345", "0.62318903", "0.6229613", "0.61933273", "0.6170967", "0.6168514", "0.616238", "0.61296433", "0.61296123", "0.61107147", "0.6102883", "0.6084103", "0.60645616", "0.6044928", "0.60164917", "0.5985161", "0.5984452", "0.58974266", "0.5886547", "0.58664966", "0.58611506", "0.58578426", "0.5853269", "0.58372223", "0.58354306", "0.58171356", "0.5815621", "0.5779573", "0.57757545", "0.57688326", "0.5745626", "0.5732927", "0.57083064", "0.5707542", "0.5699841", "0.56838727", "0.5679736", "0.56737196", "0.5671568", "0.5669389", "0.56672615", "0.5658375", "0.56280816", "0.5619518", "0.56179076", "0.56155634", "0.5615446", "0.56065106", "0.5604134", "0.55855906", "0.55814934", "0.5576545", "0.5543319", "0.55371964", "0.5535697", "0.55315095", "0.55026805", "0.5498447", "0.54953015", "0.54804486", "0.54794556", "0.5457333", "0.54512495", "0.5444464", "0.54418135", "0.5431296", "0.5422626", "0.54193395", "0.54137945", "0.5407118", "0.5405549", "0.53852177", "0.53698486", "0.5367545", "0.5364726", "0.53613865", "0.53606826", "0.5354906", "0.53539383", "0.53524846", "0.5347948", "0.53465056", "0.53344494", "0.53316915", "0.5331669", "0.5322126" ]
0.60398936
28
Ensure that apigateway v1 and apigateway v2 actions are both present in the ses namespace
def test_services_with_multiple_pages_apigateway(self): # API Gateway Management V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonapigatewaymanagement.html self.assertTrue("apigateway:AddCertificateToDomain" in self.all_actions) self.assertTrue("apigateway:RemoveCertificateFromDomain" in self.all_actions) self.assertTrue("apigateway:SetWebACL" in self.all_actions) # API Gateway Management V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonapigatewaymanagement.html # API Gateway V2 doesn't have any unique actions in but it does have some unique resource types. Let's make sure those resource types are in the IAM Definition. # Resource types unique to API Gateway V2: resource_types = get_arn_types_for_service("apigateway") resource_types = list(resource_types.keys()) self.assertTrue("AccessLogSettings" in resource_types) # Resource types unique to API Gateway V1: self.assertTrue("RestApi" in resource_types)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_gh_226_elasticloadbalancing_v1_and_v2(self):\n results = get_actions_for_service(\"elasticloadbalancing\")\n # print(json.dumps(results, indent=4))\n lb_v1_only_action = \"elasticloadbalancing:CreateTargetGroup\"\n lb_v2_only_action = \"elasticloadbalancing:SetSecurityGroups\"\n self.assertTrue(lb_v1_only_action in results)\n self.assertTrue(lb_v2_only_action in results)", "def test_services_with_multiple_pages_ses(self):\n # SES V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonses.html\n self.assertTrue(\"ses:PutIdentityPolicy\" in self.all_actions)\n # SES V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonsimpleemailservicev2.html\n self.assertTrue(\"ses:ListImportJobs\" in self.all_actions)\n\n results = get_actions_for_service(\"ses\")\n actions = [\n \"ses:CloneReceiptRuleSet\",\n \"ses:CreateConfigurationSetTrackingOptions\",\n \"ses:CreateReceiptFilter\",\n \"ses:CreateReceiptRule\",\n \"ses:CreateReceiptRuleSet\",\n \"ses:CreateTemplate\",\n \"ses:DeleteConfigurationSetTrackingOptions\",\n \"ses:DeleteIdentity\",\n \"ses:DeleteIdentityPolicy\",\n \"ses:DeleteReceiptFilter\",\n \"ses:DeleteReceiptRule\",\n \"ses:DeleteReceiptRuleSet\",\n \"ses:DeleteTemplate\",\n \"ses:DeleteVerifiedEmailAddress\",\n \"ses:DescribeActiveReceiptRuleSet\",\n \"ses:DescribeConfigurationSet\",\n \"ses:DescribeReceiptRule\",\n \"ses:DescribeReceiptRuleSet\",\n \"ses:GetAccountSendingEnabled\",\n \"ses:GetIdentityDkimAttributes\",\n \"ses:GetIdentityMailFromDomainAttributes\",\n \"ses:GetIdentityNotificationAttributes\",\n \"ses:GetIdentityPolicies\",\n \"ses:GetIdentityVerificationAttributes\",\n \"ses:GetSendQuota\",\n \"ses:GetSendStatistics\",\n \"ses:GetTemplate\",\n \"ses:ListIdentities\",\n \"ses:ListIdentityPolicies\",\n \"ses:ListReceiptFilters\",\n \"ses:ListReceiptRuleSets\",\n \"ses:ListTemplates\",\n \"ses:ListVerifiedEmailAddresses\",\n \"ses:PutIdentityPolicy\",\n \"ses:ReorderReceiptRuleSet\",\n \"ses:SendBounce\",\n \"ses:SendBulkTemplatedEmail\",\n \"ses:SendRawEmail\",\n \"ses:SendTemplatedEmail\",\n \"ses:SetActiveReceiptRuleSet\",\n \"ses:SetIdentityDkimEnabled\",\n \"ses:SetIdentityFeedbackForwardingEnabled\",\n \"ses:SetIdentityHeadersInNotificationsEnabled\",\n \"ses:SetIdentityMailFromDomain\",\n \"ses:SetIdentityNotificationTopic\",\n \"ses:SetReceiptRulePosition\",\n \"ses:TestRenderTemplate\",\n \"ses:UpdateAccountSendingEnabled\",\n \"ses:UpdateConfigurationSetReputationMetricsEnabled\",\n \"ses:UpdateConfigurationSetSendingEnabled\",\n \"ses:UpdateConfigurationSetTrackingOptions\",\n \"ses:UpdateReceiptRule\",\n \"ses:UpdateTemplate\",\n \"ses:VerifyDomainDkim\",\n \"ses:VerifyDomainIdentity\",\n \"ses:VerifyEmailAddress\",\n \"ses:VerifyEmailIdentity\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_subscriber_access_for_two_vsg_services(self):", "def test_kafka_action_names_overlap_issue(self):\n # Kafka actions used to be in two pages but are now one. This verifies the current state.\n # results = get_actions_for_service(\"kafka\")\n # print(results)\n actions = [\n \"kafka:BatchAssociateScramSecret\",\n \"kafka:BatchDisassociateScramSecret\",\n \"kafka:CreateClusterV2\",\n \"kafka:DeleteConfiguration\",\n \"kafka:DescribeClusterV2\",\n \"kafka:ListClustersV2\",\n \"kafka:ListConfigurationRevisions\",\n \"kafka:ListKafkaVersions\",\n \"kafka:ListScramSecrets\",\n \"kafka:RebootBroker\",\n \"kafka:UpdateBrokerType\",\n \"kafka:UpdateConfiguration\",\n \"kafka:UpdateConnectivity\",\n \"kafka:UpdateSecurity\"\n ]\n\n for action in actions:\n self.assertTrue(action in self.all_actions)", "def test_request_with_two_bundles(self):\n xml = self._make_request()\n request = request_from_xml(xml)\n self.assertTrue(validate_request(request, self.policy))", "def test_services_with_multiple_pages_aws_marketplace(self):\n # Overlap: AWS Marketplace, Marketplace Catalog, and AWS Marketplace Entitlement service, AWS Marketplace Image Building Service, AWS Marketplace Metering Service, AWS Marketplace Private Marketplace, and AWS Marketplace Procurement Systems\n # AWS Marketplace: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplace.html\n self.assertTrue(\"aws-marketplace:AcceptAgreementApprovalRequest\" in self.all_actions)\n # AWS Marketplace Catalog: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplacecatalog.html\n self.assertTrue(\"aws-marketplace:CancelChangeSet\" in self.all_actions)\n # AWS Marketplace Entitlement Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceentitlementservice.html\n self.assertTrue(\"aws-marketplace:GetEntitlements\" in self.all_actions)\n # AWS Marketplace Image Building Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceimagebuildingservice.html\n self.assertTrue(\"aws-marketplace:DescribeBuilds\" in self.all_actions)\n # AWS Marketplace Metering Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplacemeteringservice.html\n self.assertTrue(\"aws-marketplace:BatchMeterUsage\" in self.all_actions)\n # AWS Marketplace Private Marketplace: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceprivatemarketplace.html\n self.assertTrue(\"aws-marketplace:AssociateProductsWithPrivateMarketplace\" in self.all_actions)\n # AWS Marketplace Procurement Systems: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceprocurementsystemsintegration.html\n self.assertTrue(\"aws-marketplace:DescribeProcurementSystemConfiguration\" in self.all_actions)\n\n results = get_actions_for_service(\"aws-marketplace\")\n actions = [\n \"aws-marketplace:AcceptAgreementApprovalRequest\",\n \"aws-marketplace:BatchMeterUsage\",\n \"aws-marketplace:CancelAgreementRequest\",\n \"aws-marketplace:CancelChangeSet\",\n \"aws-marketplace:CompleteTask\",\n \"aws-marketplace:DescribeAgreement\",\n \"aws-marketplace:DescribeBuilds\",\n \"aws-marketplace:DescribeChangeSet\",\n \"aws-marketplace:DescribeEntity\",\n \"aws-marketplace:DescribeProcurementSystemConfiguration\",\n \"aws-marketplace:DescribeTask\",\n \"aws-marketplace:GetAgreementApprovalRequest\",\n \"aws-marketplace:GetAgreementRequest\",\n \"aws-marketplace:GetAgreementTerms\",\n \"aws-marketplace:GetEntitlements\",\n \"aws-marketplace:ListAgreementApprovalRequests\",\n \"aws-marketplace:ListAgreementRequests\",\n \"aws-marketplace:ListBuilds\",\n \"aws-marketplace:ListChangeSets\",\n \"aws-marketplace:ListEntities\",\n \"aws-marketplace:ListTasks\",\n \"aws-marketplace:MeterUsage\",\n \"aws-marketplace:PutProcurementSystemConfiguration\",\n \"aws-marketplace:RegisterUsage\",\n \"aws-marketplace:RejectAgreementApprovalRequest\",\n \"aws-marketplace:ResolveCustomer\",\n \"aws-marketplace:SearchAgreements\",\n \"aws-marketplace:StartBuild\",\n \"aws-marketplace:StartChangeSet\",\n \"aws-marketplace:Subscribe\",\n \"aws-marketplace:Unsubscribe\",\n \"aws-marketplace:UpdateAgreementApprovalRequest\",\n \"aws-marketplace:UpdateTask\",\n \"aws-marketplace:ViewSubscriptions\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_subscriber_access_if_vsg2_goes_down(self):", "def test_get_actions_with_arn_type_and_access_level_case_2(self):\n desired_output = [\n 'ssm:DeleteParameter',\n 'ssm:DeleteParameters',\n 'ssm:LabelParameterVersion',\n 'ssm:PutParameter'\n]\n output = get_actions_with_arn_type_and_access_level(\n \"ssm\", \"parameter\", \"Write\"\n )\n for item in desired_output:\n self.assertTrue(item in output)", "def test_aws_service_api_validate_subscription_post(self):\n pass", "def lambda_handler(event, context):\n set_logging(level=logging.DEBUG)\n\n try:\n payload = json.loads(event[\"Records\"][0][\"Sns\"][\"Message\"])\n account_id = payload['account_id']\n account_name = payload['account_name']\n # get the last region from the list to process\n region = payload['regions'].pop()\n # region = payload['region']\n # if request_id is present in payload, it means this lambda was called from the API\n request_id = payload.get('request_id', None)\n except Exception:\n logging.exception(f\"Failed to parse event\\n{event}\")\n return\n\n try:\n config = Config()\n\n main_account = Account(region=config.aws.region)\n ddb_table = main_account.resource(\"dynamodb\").Table(config.sqspolicy.ddb_table_name)\n\n account = Account(id=account_id,\n name=account_name,\n region=region,\n role_name=config.aws.role_name_identification)\n if account.session is None:\n return\n\n logging.debug(f\"Checking for public SQS policies in {account}\")\n\n # existing open issues for account to check if resolved\n open_issues = IssueOperations.get_account_open_issues(ddb_table, account_id, SQSPolicyIssue)\n # make dictionary for fast search by id\n # and filter by current region\n open_issues = {issue.issue_id: issue for issue in open_issues if issue.issue_details.region == region}\n logging.debug(f\"SQS in DDB:\\n{open_issues.keys()}\")\n\n checker = SQSPolicyChecker(account=account)\n if checker.check():\n for queue in checker.queues:\n logging.debug(f\"Checking {queue.name}\")\n if queue.public:\n issue = SQSPolicyIssue(account_id, queue.url)\n issue.issue_details.tags = queue.tags\n issue.issue_details.name = queue.name\n issue.issue_details.region = queue.account.region\n issue.issue_details.policy = queue.policy\n if config.sqspolicy.in_whitelist(account_id, queue.url):\n issue.status = IssueStatus.Whitelisted\n else:\n issue.status = IssueStatus.Open\n logging.debug(f\"Setting {queue.name} status {issue.status}\")\n IssueOperations.update(ddb_table, issue)\n # remove issue id from issues_list_from_db (if exists)\n # as we already checked it\n open_issues.pop(queue.url, None)\n\n logging.debug(f\"SQS in DDB:\\n{open_issues.keys()}\")\n # all other unresolved issues in DDB are for removed/remediated queues\n for issue in open_issues.values():\n IssueOperations.set_status_resolved(ddb_table, issue)\n if request_id:\n api_table = main_account.resource(\"dynamodb\").Table(config.api.ddb_table_name)\n DDB.track_progress(api_table, request_id)\n except Exception:\n logging.exception(f\"Failed to check SQS policies for '{account_id} ({account_name})'\")\n return\n\n # push SNS messages until the list with regions to check is empty\n if len(payload['regions']) > 0:\n try:\n Sns.publish(payload[\"sns_arn\"], payload)\n except Exception:\n logging.exception(\"Failed to chain insecure services checking\")\n\n logging.debug(f\"Checked SQS policies for '{account_id} ({account_name})'\")", "def verifyActionCenterRts():\n pass", "def test_connect_post_namespaced_status_webhooks(self):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces(self):\n pass", "def test_replace_namespaced_route_status(self):\n pass", "def check_script(vouts):\n for vout in [v for v in vouts[::-1] if v['hex'].startswith('6a')]:\n verb = BlockchainSpider.decode_op_return(vout['hex'])\n action = Spoolverb.from_verb(verb).action\n if action in Spoolverb.supported_actions:\n return verb\n raise Exception(\"Invalid ascribe transaction\")", "def test_get_snsname_arn_auth_exception_handling(self, aws_res_mock):\n # local imports of code-under-test ensure moto has mocks\n # registered before any possible calls out to AWS\n from awstools.awstools import get_snsname_arn\n\n # create a mock SNS client that returns what we tell it to\n client = boto3.client('sns')\n stub = Stubber(client)\n stub.add_client_error('create_topic', service_error_code='AuthorizationError')\n stub.activate()\n\n\n # since firesim manager code doesn't take clients as method parameters\n # now we mock boto3.client to return our stubbed client\n with patch.object(boto3._get_default_session(), 'client', return_value=client) as mock_session:\n topic_arn = get_snsname_arn()\n\n stub.assert_no_pending_responses()\n topic_arn.should.be.none\n\n # TODO we could mock rootLogger.critical to capture it's calls and args and validate that we're seeing the correct \"nice\" message\n\n # make sure get_snsname_arn() actually called out to get a sns\n # client, otherwise we aren't testing what we think we are\n mock_session.assert_called_once_with('sns')\n\n aws_res_mock.assert_called_once()", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_template_action_spaces(self):\n pass", "def test_subscriber_access_if_vsg1_goes_down(self):", "def enforce(context, action, target, do_raise=True):\n \"\"\"\n ======================================================================================\n context = <xdrs.context.RequestContext object at 0x6dcf050>\n target = {'project_id': u'4537aca4a4a4462fa4c59ad5b5581f00', 'user_id': u'91d732b65831491d8bd952b3111e62dd'}\n action = xdrs:get_algorithms\n ======================================================================================\n \"\"\"\n init()\n \n credentials = context.to_dict()\n \"\"\"\n ======================================================================================\n credentials = {'project_name': u'admin', 'user_id': u'91d732b65831491d8bd952b3111e62dd', 'roles': [u'heat_stack_owner', u'_member_', u'admin'], 'timestamp': '2015-03-10T06:48:40.110653', 'auth_token': 'MIIT9wYJKoZIhvcNAQcCoIIT6DCCE+QCAQExCTAHBgUrDgMCGjCCEk0GCSqGSIb3DQEHAaCCEj4EghI6eyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxNS0wMy0xMFQwNjo0ODozOS41MzU2NjEiLCAiZXhwaXJlcyI6ICIyMDE1LTAzLTEwVDA3OjQ4OjM5WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogImFkbWluIHRlbmFudCIsICJlbmFibGVkIjogdHJ1ZSwgImlkIjogIjQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgIm5hbWUiOiAiYWRtaW4ifX0sICJzZXJ2aWNlQ2F0YWxvZyI6IFt7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc0L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjIvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAiaWQiOiAiMTZiMTVjYzVmZjUwNGNiODlmNTg2NjRlMjdhNjljNjkiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc0L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImNvbXB1dGUiLCAibmFtZSI6ICJub3ZhIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjk2OTYvIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjk2OTYvIiwgImlkIjogIjFiMjkzYTgxNjk2YjRiN2Y4OTZlYWQ0NjIyYTFjMmExIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6OTY5Ni8ifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAibmV0d29yayIsICJuYW1lIjogIm5ldXRyb24ifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3Ni92Mi80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgImlkIjogIjNhNzY3OWNjZTdkZjRhY2ZhMTZiM2NhNTJkZGNmYzgyIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3Ni92Mi80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWV2MiIsICJuYW1lIjogImNpbmRlcnYyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjMiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3NC92MyIsICJpZCI6ICIwYmIxZDFiODhhZmU0MGRhOTNiY2IxNTg0Y2ExN2ZiOSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjMifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZXYzIiwgIm5hbWUiOiAibm92YXYzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwODAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MCIsICJpZCI6ICIxZTMyZTE3MmU3OWM0YzVhYTZiNWM3ZjhkNzVhZjRmYiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwODAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiczMiLCAibmFtZSI6ICJzd2lmdF9zMyJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo5MjkyIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjkyOTIiLCAiaWQiOiAiM2QxYzc5MjY1MWEwNDljNWE2MWUzNWJmZWZjNGM4OGIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo5MjkyIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImltYWdlIiwgIm5hbWUiOiAiZ2xhbmNlIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzciLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3NyIsICJpZCI6ICIzOWE0YzA2NDIzYTg0OTNjOTI4ZGExOGY0YTVjY2MxZiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzcifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAibWV0ZXJpbmciLCAibmFtZSI6ICJjZWlsb21ldGVyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDAvdjEvIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDAvdjEvIiwgImlkIjogIjU1NzBiOGY4MTE0OTRlMWI5NTVkYjZlNTAzZGYyYWZkIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODAwMC92MS8ifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY2xvdWRmb3JtYXRpb24iLCAibmFtZSI6ICJoZWF0LWNmbiJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YxLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzYvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAiaWQiOiAiMGExYzhkYTRmMTU2NDk1YWFkMjEzMGUyYzA2OTE5ODIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YxLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogInZvbHVtZSIsICJuYW1lIjogImNpbmRlciJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4NzczL3NlcnZpY2VzL0FkbWluIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzMvc2VydmljZXMvQ2xvdWQiLCAiaWQiOiAiMDMzZjY3ZTk1MDBjNDljYThmOGIxODkzZTJhN2VkYWYiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4NzczL3NlcnZpY2VzL0Nsb3VkIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImVjMiIsICJuYW1lIjogIm5vdmFfZWMyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDQvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODAwNC92MS80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCIsICJpZCI6ICI0YmViNjQ0MjUzYWU0NzdmOWU5NDk2ZWVkZDEwOTNhNSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDQvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAib3JjaGVzdHJhdGlvbiIsICJuYW1lIjogImhlYXQifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC8iLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC92MS9BVVRIXzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgImlkIjogIjNhMTA2MzU0MjYxMDQzMjk5YTVkMjQ3ZTVmMjU5NGQyIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC92MS9BVVRIXzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogIm9iamVjdC1zdG9yZSIsICJuYW1lIjogInN3aWZ0In0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjM1MzU3L3YyLjAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6NTAwMC92Mi4wIiwgImlkIjogIjVjNGVlN2MzMTE4NDQyNGM5NDJhMWM1MjgxODU3MmZiIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6NTAwMC92Mi4wIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImlkZW50aXR5IiwgIm5hbWUiOiAia2V5c3RvbmUifV0sICJ1c2VyIjogeyJ1c2VybmFtZSI6ICJhZG1pbiIsICJyb2xlc19saW5rcyI6IFtdLCAiaWQiOiAiOTFkNzMyYjY1ODMxNDkxZDhiZDk1MmIzMTExZTYyZGQiLCAicm9sZXMiOiBbeyJuYW1lIjogImhlYXRfc3RhY2tfb3duZXIifSwgeyJuYW1lIjogIl9tZW1iZXJfIn0sIHsibmFtZSI6ICJhZG1pbiJ9XSwgIm5hbWUiOiAiYWRtaW4ifSwgIm1ldGFkYXRhIjogeyJpc19hZG1pbiI6IDAsICJyb2xlcyI6IFsiZDlmZGVlODI1NjE3NGJlNWE3MmFjZGZmNDNkM2VkZDMiLCAiOWZlMmZmOWVlNDM4NGIxODk0YTkwODc4ZDNlOTJiYWIiLCAiN2E1ZTg5MmFiYTE5NDI3NWI3ZjQxZWM4Njg2ZDUwOGYiXX19fTGCAYEwggF9AgEBMFwwVzELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVVuc2V0MQ4wDAYDVQQHDAVVbnNldDEOMAwGA1UECgwFVW5zZXQxGDAWBgNVBAMMD3d3dy5leGFtcGxlLmNvbQIBATAHBgUrDgMCGjANBgkqhkiG9w0BAQEFAASCAQBkwVlwVgYM+mCIXICViGPgW+AZ--Y3NfWjW92GTBqW4keVrPosYxz--b2SVSGqwOHI1xFPqIx1+fzBCcilE5rIuJ3gxAc2VEWl4whMkriqWo6M8YY+GxGJ07h1NZ3Jc9Mrk7RTWPwU9YPilWPSU9sRx4bv+y7XpL8EIEvi+0dvHKgGI+nvqEYVFIf1vYQN5bvSnAgC1rZ9oB0M4Pg1wd47xQcenZL+XOWb8uxUReAvT-lfjXav7DhwUzPgmlY2XpN+9yfhAXAFF0GkokwjncvC5YTILOa41eMUg8ip47+rijNpQ2FuxVpRhQ-xL9it8+vAYkGLqe7eaQylsf0Nu6JJ', 'remote_address': '172.21.7.40', 'quota_class': None, 'is_admin': True, 'tenant': u'4537aca4a4a4462fa4c59ad5b5581f00', 'service_catalog': [{u'endpoints_links': [], u'endpoints': [{u'adminURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00', u'region': u'RegionOne', u'publicURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00', u'id': u'0a1c8da4f156495aad2130e2c0691982', u'internalURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00'}], u'type': u'volume', u'name': u'cinder'}], 'request_id': 'req-c0439276-3600-49cb-8de5-680b3f7d735c', 'instance_lock_checked': False, 'project_id': u'4537aca4a4a4462fa4c59ad5b5581f00', 'user_name': u'admin', 'read_deleted': 'no', 'user': u'91d732b65831491d8bd952b3111e62dd'}\n ======================================================================================\n \"\"\"\n\n # Add the exception arguments if asked to do a raise\n extra = {}\n if do_raise:\n extra.update(exc=exception.PolicyNotAuthorized, action=action)\n\n \"\"\"\n ======================================================================================\n action = xdrs:get_algorithms\n target = <xdrs.objects.instance.Instance object at 0x62b4a50>\n credentials = {'project_name': u'admin', 'user_id': u'91d732b65831491d8bd952b3111e62dd', 'roles': [u'heat_stack_owner', u'_member_', u'admin'], 'timestamp': '2015-03-10T06:48:40.110653', 'auth_token': 'MIIT9wYJKoZIhvcNAQcCoIIT6DCCE+QCAQExCTAHBgUrDgMCGjCCEk0GCSqGSIb3DQEHAaCCEj4EghI6eyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxNS0wMy0xMFQwNjo0ODozOS41MzU2NjEiLCAiZXhwaXJlcyI6ICIyMDE1LTAzLTEwVDA3OjQ4OjM5WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogImFkbWluIHRlbmFudCIsICJlbmFibGVkIjogdHJ1ZSwgImlkIjogIjQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgIm5hbWUiOiAiYWRtaW4ifX0sICJzZXJ2aWNlQ2F0YWxvZyI6IFt7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc0L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjIvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAiaWQiOiAiMTZiMTVjYzVmZjUwNGNiODlmNTg2NjRlMjdhNjljNjkiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc0L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImNvbXB1dGUiLCAibmFtZSI6ICJub3ZhIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjk2OTYvIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjk2OTYvIiwgImlkIjogIjFiMjkzYTgxNjk2YjRiN2Y4OTZlYWQ0NjIyYTFjMmExIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6OTY5Ni8ifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAibmV0d29yayIsICJuYW1lIjogIm5ldXRyb24ifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3Ni92Mi80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgImlkIjogIjNhNzY3OWNjZTdkZjRhY2ZhMTZiM2NhNTJkZGNmYzgyIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3Ni92Mi80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWV2MiIsICJuYW1lIjogImNpbmRlcnYyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjMiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3NC92MyIsICJpZCI6ICIwYmIxZDFiODhhZmU0MGRhOTNiY2IxNTg0Y2ExN2ZiOSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjMifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZXYzIiwgIm5hbWUiOiAibm92YXYzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwODAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MCIsICJpZCI6ICIxZTMyZTE3MmU3OWM0YzVhYTZiNWM3ZjhkNzVhZjRmYiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwODAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiczMiLCAibmFtZSI6ICJzd2lmdF9zMyJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo5MjkyIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjkyOTIiLCAiaWQiOiAiM2QxYzc5MjY1MWEwNDljNWE2MWUzNWJmZWZjNGM4OGIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo5MjkyIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImltYWdlIiwgIm5hbWUiOiAiZ2xhbmNlIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzciLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3NyIsICJpZCI6ICIzOWE0YzA2NDIzYTg0OTNjOTI4ZGExOGY0YTVjY2MxZiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzcifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAibWV0ZXJpbmciLCAibmFtZSI6ICJjZWlsb21ldGVyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDAvdjEvIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDAvdjEvIiwgImlkIjogIjU1NzBiOGY4MTE0OTRlMWI5NTVkYjZlNTAzZGYyYWZkIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODAwMC92MS8ifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY2xvdWRmb3JtYXRpb24iLCAibmFtZSI6ICJoZWF0LWNmbiJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YxLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzYvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAiaWQiOiAiMGExYzhkYTRmMTU2NDk1YWFkMjEzMGUyYzA2OTE5ODIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YxLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogInZvbHVtZSIsICJuYW1lIjogImNpbmRlciJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4NzczL3NlcnZpY2VzL0FkbWluIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzMvc2VydmljZXMvQ2xvdWQiLCAiaWQiOiAiMDMzZjY3ZTk1MDBjNDljYThmOGIxODkzZTJhN2VkYWYiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4NzczL3NlcnZpY2VzL0Nsb3VkIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImVjMiIsICJuYW1lIjogIm5vdmFfZWMyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDQvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODAwNC92MS80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCIsICJpZCI6ICI0YmViNjQ0MjUzYWU0NzdmOWU5NDk2ZWVkZDEwOTNhNSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDQvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAib3JjaGVzdHJhdGlvbiIsICJuYW1lIjogImhlYXQifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC8iLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC92MS9BVVRIXzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgImlkIjogIjNhMTA2MzU0MjYxMDQzMjk5YTVkMjQ3ZTVmMjU5NGQyIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC92MS9BVVRIXzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogIm9iamVjdC1zdG9yZSIsICJuYW1lIjogInN3aWZ0In0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjM1MzU3L3YyLjAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6NTAwMC92Mi4wIiwgImlkIjogIjVjNGVlN2MzMTE4NDQyNGM5NDJhMWM1MjgxODU3MmZiIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6NTAwMC92Mi4wIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImlkZW50aXR5IiwgIm5hbWUiOiAia2V5c3RvbmUifV0sICJ1c2VyIjogeyJ1c2VybmFtZSI6ICJhZG1pbiIsICJyb2xlc19saW5rcyI6IFtdLCAiaWQiOiAiOTFkNzMyYjY1ODMxNDkxZDhiZDk1MmIzMTExZTYyZGQiLCAicm9sZXMiOiBbeyJuYW1lIjogImhlYXRfc3RhY2tfb3duZXIifSwgeyJuYW1lIjogIl9tZW1iZXJfIn0sIHsibmFtZSI6ICJhZG1pbiJ9XSwgIm5hbWUiOiAiYWRtaW4ifSwgIm1ldGFkYXRhIjogeyJpc19hZG1pbiI6IDAsICJyb2xlcyI6IFsiZDlmZGVlODI1NjE3NGJlNWE3MmFjZGZmNDNkM2VkZDMiLCAiOWZlMmZmOWVlNDM4NGIxODk0YTkwODc4ZDNlOTJiYWIiLCAiN2E1ZTg5MmFiYTE5NDI3NWI3ZjQxZWM4Njg2ZDUwOGYiXX19fTGCAYEwggF9AgEBMFwwVzELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVVuc2V0MQ4wDAYDVQQHDAVVbnNldDEOMAwGA1UECgwFVW5zZXQxGDAWBgNVBAMMD3d3dy5leGFtcGxlLmNvbQIBATAHBgUrDgMCGjANBgkqhkiG9w0BAQEFAASCAQBkwVlwVgYM+mCIXICViGPgW+AZ--Y3NfWjW92GTBqW4keVrPosYxz--b2SVSGqwOHI1xFPqIx1+fzBCcilE5rIuJ3gxAc2VEWl4whMkriqWo6M8YY+GxGJ07h1NZ3Jc9Mrk7RTWPwU9YPilWPSU9sRx4bv+y7XpL8EIEvi+0dvHKgGI+nvqEYVFIf1vYQN5bvSnAgC1rZ9oB0M4Pg1wd47xQcenZL+XOWb8uxUReAvT-lfjXav7DhwUzPgmlY2XpN+9yfhAXAFF0GkokwjncvC5YTILOa41eMUg8ip47+rijNpQ2FuxVpRhQ-xL9it8+vAYkGLqe7eaQylsf0Nu6JJ', 'remote_address': '172.21.7.40', 'quota_class': None, 'is_admin': True, 'tenant': u'4537aca4a4a4462fa4c59ad5b5581f00', 'service_catalog': [{u'endpoints_links': [], u'endpoints': [{u'adminURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00', u'region': u'RegionOne', u'publicURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00', u'id': u'0a1c8da4f156495aad2130e2c0691982', u'internalURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00'}], u'type': u'volume', u'name': u'cinder'}], 'request_id': 'req-c0439276-3600-49cb-8de5-680b3f7d735c', 'instance_lock_checked': False, 'project_id': u'4537aca4a4a4462fa4c59ad5b5581f00', 'user_name': u'admin', 'read_deleted': 'no', 'user': u'91d732b65831491d8bd952b3111e62dd'}\n extra = {'action': 'xdrs:get_algorithms', 'exc': <class 'xdrs.exception.PolicyNotAuthorized'>}\n ======================================================================================\n \"\"\"\n return policy.check(action, target, credentials, **extra)", "def test_must_be_associated(self):\n\n def handle(event):\n return 0x0000, event.action_information\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ProceduralEventLogging)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)]\n )\n\n ae.add_requested_context(ProceduralEventLogging)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n assoc.release()\n assert assoc.is_released\n assert not assoc.is_established\n with pytest.raises(RuntimeError):\n assoc.send_n_action(None, None, None, None)\n\n scp.shutdown()", "def has_action2(self, feature):\n return feature in self._action2", "def test_zsk_policy_no_bundle_overlap(self):\n signature_algorithm = self._make_signature_algorithm()\n request_policy = f\"\"\"\n <RequestPolicy>\n <ZSK>\n <PublishSafety>P10D</PublishSafety>\n <RetireSafety>P10D</RetireSafety>\n <MaxSignatureValidity>P21D</MaxSignatureValidity>\n <MinSignatureValidity>P21D</MinSignatureValidity>\n <MaxValidityOverlap>P12D</MaxValidityOverlap>\n <MinValidityOverlap>P9D</MinValidityOverlap>\n {signature_algorithm}\n </ZSK>\n </RequestPolicy>\n \"\"\"\n\n bundle1, bundle2, = self._get_two_bundles()\n xml = self._make_request(\n request_policy=request_policy, bundle1=bundle1, bundle2=bundle2\n )\n request = request_from_xml(xml)\n policy = replace(\n self.policy,\n check_bundle_intervals=False, # want to test against ZSK policy, not KSK policy\n check_cycle_length=False, # want to test against ZSK policy, not KSK policy\n )\n with self.assertRaises(KSR_POLICY_SIG_OVERLAP_Violation) as exc:\n validate_request(request, policy)\n self.assertEqual(\n 'Bundle \"test-2\" does not overlap with previous bundle \"test-1\" (2019-02-01 00:00:00+00:00 > '\n \"2019-01-22 00:00:00+00:00)\",\n str(exc.exception),\n )", "def test_other_iam_data_fixes_in_GH_393(self):\n # Cassandra: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonkeyspacesforapachecassandra.html\n results = get_actions_for_service(\"cassandra\")\n self.assertTrue(\"cassandra:Restore\" in results)\n # Comprehend Medical: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazoncomprehendmedical.html\n results = get_actions_for_service(\"comprehendmedical\")\n # print(results)\n actions = [\n \"comprehendmedical:DescribeEntitiesDetectionV2Job\",\n \"comprehendmedical:DescribeICD10CMInferenceJob\",\n \"comprehendmedical:DescribePHIDetectionJob\",\n \"comprehendmedical:DescribeRxNormInferenceJob\",\n # \"comprehendmedical:DescribeSNOMEDCTInferenceJob\", # Not in SAR\n \"comprehendmedical:DetectEntitiesV2\",\n \"comprehendmedical:InferICD10CM\",\n \"comprehendmedical:InferRxNorm\",\n # \"comprehendmedical:InferSNOMEDCT\", # Not in SAR\n \"comprehendmedical:ListEntitiesDetectionV2Jobs\",\n \"comprehendmedical:ListICD10CMInferenceJobs\",\n \"comprehendmedical:ListPHIDetectionJobs\",\n \"comprehendmedical:ListRxNormInferenceJobs\",\n # \"comprehendmedical:ListSNOMEDCTInferenceJobs\", # Not in SAR\n \"comprehendmedical:StartEntitiesDetectionV2Job\",\n \"comprehendmedical:StartICD10CMInferenceJob\",\n \"comprehendmedical:StartPHIDetectionJob\",\n \"comprehendmedical:StartRxNormInferenceJob\",\n \"comprehendmedical:StopEntitiesDetectionV2Job\",\n \"comprehendmedical:StopICD10CMInferenceJob\",\n ]\n for action in actions:\n # if action not in results:\n # print(action)\n self.assertTrue(action in results)\n # Compute Optimizer\n results = get_actions_for_service(\"compute-optimizer\")\n actions = [\n \"compute-optimizer:DeleteRecommendationPreferences\",\n \"compute-optimizer:ExportEBSVolumeRecommendations\",\n \"compute-optimizer:ExportLambdaFunctionRecommendations\",\n \"compute-optimizer:GetEffectiveRecommendationPreferences\",\n \"compute-optimizer:GetEnrollmentStatusesForOrganization\",\n \"compute-optimizer:GetLambdaFunctionRecommendations\",\n \"compute-optimizer:GetRecommendationPreferences\",\n \"compute-optimizer:PutRecommendationPreferences\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # DataSync\n results = get_actions_for_service(\"datasync\")\n actions = [\n \"datasync:UpdateLocationNfs\",\n \"datasync:UpdateLocationObjectStorage\",\n \"datasync:UpdateLocationSmb\",\n \"datasync:UpdateTaskExecution\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Account Management\n results = get_actions_for_service(\"account\")\n actions = [\n \"account:DeleteAlternateContact\",\n \"account:GetAlternateContact\",\n \"account:PutAlternateContact\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS IAM Access Analyzer\n results = get_actions_for_service(\"access-analyzer\")\n actions = [\n \"access-analyzer:CancelPolicyGeneration\",\n \"access-analyzer:CreateAccessPreview\",\n \"access-analyzer:GetAccessPreview\",\n \"access-analyzer:GetGeneratedPolicy\",\n \"access-analyzer:ListAccessPreviewFindings\",\n \"access-analyzer:ListAccessPreviews\",\n \"access-analyzer:ListPolicyGenerations\",\n \"access-analyzer:StartPolicyGeneration\",\n \"access-analyzer:ValidatePolicy\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Elemental Activations\n results = get_actions_for_service(\"elemental-activations\")\n actions = [\n \"elemental-activations:CompleteAccountRegistration\",\n \"elemental-activations:StartAccountRegistration\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n # OpenSearch\n results = get_actions_for_service(\"es\")\n actions = [\n \"es:DescribeDomainChangeProgress\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Location\n results = get_actions_for_service(\"geo\")\n actions = [\n \"geo:CalculateRouteMatrix\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Amazon Managed Grafana\n results = get_actions_for_service(\"grafana\")\n actions = [\n \"grafana:DescribeWorkspaceAuthentication\",\n \"grafana:UpdateWorkspaceAuthentication\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # EC2 Image Builder\n results = get_actions_for_service(\"imagebuilder\")\n actions = [\n \"imagebuilder:ImportVmImage\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Timestream\n results = get_actions_for_service(\"timestream\")\n actions = [\n \"timestream:CreateScheduledQuery\",\n \"timestream:DeleteScheduledQuery\",\n \"timestream:DescribeScheduledQuery\",\n \"timestream:ExecuteScheduledQuery\",\n \"timestream:ListScheduledQueries\",\n \"timestream:UpdateScheduledQuery\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS Transfer Family\n results = get_actions_for_service(\"transfer\")\n actions = [\n \"transfer:CreateAccess\",\n \"transfer:CreateWorkflow\",\n \"transfer:DeleteAccess\",\n \"transfer:DeleteWorkflow\",\n \"transfer:DescribeAccess\",\n \"transfer:DescribeExecution\",\n \"transfer:DescribeWorkflow\",\n \"transfer:ListAccesses\",\n \"transfer:ListExecutions\",\n \"transfer:ListWorkflows\",\n \"transfer:SendWorkflowStepState\",\n \"transfer:UpdateAccess\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_acknowledge_hmac_validation_failed(client):\n res = client.get(\n \"/v0/acknowledge?fp=splunk_82998ef6bb3db9dff3dsfdsfsdc\" \"&t=97244b15a21f45e002b2e913866ff7545510f9b08dea5241f\"\n )\n assert res.status == \"500 INTERNAL SERVER ERROR\"", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces(self):\n pass", "def test_services_with_multiple_pages_kinesis_analytics(self):\n # Kinesis Analytics V1\n results = get_actions_for_service(\"kinesisanalytics\")\n actions = [\n \"kinesisanalytics:GetApplicationState\", # Only in v1, not v2\n \"kinesisanalytics:ListApplications\", # In both\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces(self):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_delete_action_spaces(self):\n pass", "def test_services_with_multiple_pages_greengrass(self):\n # Greengrass V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiotgreengrass.html\n self.assertTrue(\"greengrass:CreateResourceDefinition\" in self.all_actions)\n # Greengrass V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiotgreengrassv2.html\n self.assertTrue(\"greengrass:CreateComponentVersion\" in self.all_actions)\n results = get_actions_for_service(\"greengrass\")\n actions = [\n \"greengrass:AssociateRoleToGroup\",\n \"greengrass:CreateConnectorDefinition\",\n \"greengrass:CreateConnectorDefinitionVersion\",\n \"greengrass:CreateCoreDefinition\",\n \"greengrass:CreateCoreDefinitionVersion\",\n \"greengrass:CreateDeviceDefinition\",\n \"greengrass:CreateDeviceDefinitionVersion\",\n \"greengrass:CreateFunctionDefinition\",\n \"greengrass:CreateFunctionDefinitionVersion\",\n \"greengrass:CreateGroup\",\n \"greengrass:CreateGroupCertificateAuthority\",\n \"greengrass:CreateGroupVersion\",\n \"greengrass:CreateLoggerDefinition\",\n \"greengrass:CreateLoggerDefinitionVersion\",\n \"greengrass:CreateResourceDefinition\",\n \"greengrass:CreateResourceDefinitionVersion\",\n \"greengrass:CreateSoftwareUpdateJob\",\n \"greengrass:CreateSubscriptionDefinition\",\n \"greengrass:CreateSubscriptionDefinitionVersion\",\n \"greengrass:DeleteConnectorDefinition\",\n \"greengrass:DeleteCoreDefinition\",\n \"greengrass:DeleteDeviceDefinition\",\n \"greengrass:DeleteFunctionDefinition\",\n \"greengrass:DeleteGroup\",\n \"greengrass:DeleteLoggerDefinition\",\n \"greengrass:DeleteResourceDefinition\",\n \"greengrass:DeleteSubscriptionDefinition\",\n \"greengrass:DisassociateRoleFromGroup\",\n \"greengrass:Discover\",\n \"greengrass:GetAssociatedRole\",\n \"greengrass:GetBulkDeploymentStatus\",\n \"greengrass:GetConnectorDefinition\",\n \"greengrass:GetConnectorDefinitionVersion\",\n \"greengrass:GetCoreDefinition\",\n \"greengrass:GetCoreDefinitionVersion\",\n \"greengrass:GetDeploymentStatus\",\n \"greengrass:GetDeviceDefinition\",\n \"greengrass:GetDeviceDefinitionVersion\",\n \"greengrass:GetFunctionDefinition\",\n \"greengrass:GetFunctionDefinitionVersion\",\n \"greengrass:GetGroup\",\n \"greengrass:GetGroupCertificateAuthority\",\n \"greengrass:GetGroupCertificateConfiguration\",\n \"greengrass:GetGroupVersion\",\n \"greengrass:GetLoggerDefinition\",\n \"greengrass:GetLoggerDefinitionVersion\",\n \"greengrass:GetResourceDefinition\",\n \"greengrass:GetResourceDefinitionVersion\",\n \"greengrass:GetSubscriptionDefinition\",\n \"greengrass:GetSubscriptionDefinitionVersion\",\n \"greengrass:GetThingRuntimeConfiguration\",\n \"greengrass:ListBulkDeploymentDetailedReports\",\n \"greengrass:ListBulkDeployments\",\n \"greengrass:ListConnectorDefinitionVersions\",\n \"greengrass:ListConnectorDefinitions\",\n \"greengrass:ListCoreDefinitionVersions\",\n \"greengrass:ListCoreDefinitions\",\n \"greengrass:ListDeviceDefinitionVersions\",\n \"greengrass:ListDeviceDefinitions\",\n \"greengrass:ListFunctionDefinitionVersions\",\n \"greengrass:ListFunctionDefinitions\",\n \"greengrass:ListGroupCertificateAuthorities\",\n \"greengrass:ListGroupVersions\",\n \"greengrass:ListGroups\",\n \"greengrass:ListLoggerDefinitionVersions\",\n \"greengrass:ListLoggerDefinitions\",\n \"greengrass:ListResourceDefinitionVersions\",\n \"greengrass:ListResourceDefinitions\",\n \"greengrass:ListSubscriptionDefinitionVersions\",\n \"greengrass:ListSubscriptionDefinitions\",\n \"greengrass:ResetDeployments\",\n \"greengrass:StartBulkDeployment\",\n \"greengrass:StopBulkDeployment\",\n \"greengrass:UpdateConnectorDefinition\",\n \"greengrass:UpdateCoreDefinition\",\n \"greengrass:UpdateDeviceDefinition\",\n \"greengrass:UpdateFunctionDefinition\",\n \"greengrass:UpdateGroup\",\n \"greengrass:UpdateGroupCertificateConfiguration\",\n \"greengrass:UpdateLoggerDefinition\",\n \"greengrass:UpdateResourceDefinition\",\n \"greengrass:UpdateSubscriptionDefinition\",\n \"greengrass:UpdateThingRuntimeConfiguration\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n # if action not in results:\n # print(action)", "def test_ingress_returns_envelope_unchanged():\n plugin_instance = PluginVipCustomisation()\n assert plugin_instance.ingress('envelope', 'http_headers', 'operation') == ('envelope', 'http_headers')", "def test_create_policy_for_all_namespaces(self):\n pass", "def _authenticate(self, reqs, session=None):\n if not isinstance(reqs[0], dict):\n raise TypeError('The input \"req\" is not typeof dict.')\n if not isinstance(reqs[1], dict):\n raise TypeError('The input \"req\" is not typeof dict.')\n\n auth_response = {}\n req = reqs[0]\n cacert = req.get('cacert')\n endpoint_type = req.get('endpoint_type', 'publicURL')\n insecure = req.get('insecure')\n mistral_url = req.get('mistral_url')\n region_name = req.get('region_name')\n service_type = req.get('service_type', 'workflowv2')\n\n verify = self._verification_needed(cacert, insecure)\n\n if not session:\n auth = self._get_auth(**req)\n\n if auth:\n session = ks_session.Session(auth=auth, verify=verify)\n\n if session:\n if not mistral_url:\n try:\n mistral_url = session.get_endpoint(\n service_type=service_type,\n interface=endpoint_type,\n region_name=region_name\n )\n except Exception:\n mistral_url = None\n\n auth_response['mistral_url'] = mistral_url\n auth_response['session'] = session\n\n target_req = reqs[1]\n\n if \"auth_url\" in target_req:\n target_auth = self._get_auth(**target_req)\n\n if target_auth:\n\n # target cacert and insecure\n cacert = target_req.get('cacert')\n insecure = target_req.get('insecure')\n\n verify = self._verification_needed(cacert, insecure)\n\n target_session = ks_session.Session(\n auth=target_auth,\n verify=verify\n )\n\n target_auth_headers = target_session.get_auth_headers() or {}\n\n target_auth_token = target_auth_headers.get('X-Auth-Token')\n\n auth_response.update({\n api.TARGET_AUTH_TOKEN: target_auth_token,\n api.TARGET_PROJECT_ID: target_session.get_project_id(),\n api.TARGET_USER_ID: target_session.get_user_id(),\n api.TARGET_AUTH_URI: target_auth._plugin.auth_url,\n })\n\n access = target_auth.get_access(target_session)\n service_catalog = access.service_catalog\n\n if self._is_service_catalog_v2(service_catalog):\n access_data = access._data[\"access\"]\n if not len(access_data['serviceCatalog']):\n LOG.warning(\n \"Service Catalog empty, some authentication\"\n \"credentials may be missing. This can cause\"\n \"malfunction in the Mistral action executions.\")\n sc_json = jsonutils.dumps(access_data)\n auth_response[api.TARGET_SERVICE_CATALOG] = sc_json\n\n if not auth_response:\n LOG.debug(\"No valid token or password + user provided. \"\n \"Continuing without authentication\")\n return {}\n\n return auth_response", "def _action(self, action, data=None, api=\"signin\"):\n if not data:\n data = {}\n\n data['action'] = action\n # data['redirect_uri'] = self._REDIRECT_URL\n data['csrf'] = self._csrf_token()\n\n print(data)\n\n r = self.session()._post(\n \"https://signin.aws.amazon.com/{0}\".format(api),\n data=data,\n )\n\n if r.status_code != 200:\n print(r.text)\n raise Exception(\"failed action {0}\".format(action))\n\n out = json.loads(r.text)\n if out['state'].lower() != 'success':\n if 'Message' in out['properties']:\n raise Exception(\"failed action {0}: {1}\".format(action, out['properties']['Message']))\n else:\n raise Exception(\"failed action {0}\".format(action))\n\n return out['properties']", "def test_create_route_for_all_namespaces(self):\n pass", "def test_patch_namespaced_route_status(self):\n pass", "def test_aws_service_api_interfaces_get(self):\n pass", "def can_send(self, s_params: SendParams) -> Squonk2AgentRv:\n assert s_params\n assert isinstance(s_params, SendParams)\n\n if _TEST_MODE:\n msg: str = 'Squonk2Agent is in TEST mode'\n _LOGGER.warning(msg)\n\n # Every public API **MUST** call ping().\n # This ensures Squonk2 is available and gets suitable API tokens...\n if not self.ping():\n msg = 'Squonk2 ping failed.'\\\n ' Are we configured properly and is Squonk2 alive?'\n _LOGGER.error(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n\n return self._verify_access(c_params=s_params.common)", "def _enforce(self, req, action):\n try:\n self.policy.enforce(req.context, action, {})\n except exception.Forbidden:\n raise HTTPForbidden()", "def __init__(self, session, assoc_type):\n super(AssociateRequest, self).__init__()\n self.session = session\n self.assoc_type = assoc_type\n self.namespace = OPENID2_NS", "def _policy_action_validate(policy_domain, midToken, mc_id, assetId, action, application, returnKey):\n logger = logging.getLogger(LogDefaults.default_log_name)\n\n logger.debug(\"midToken:%s, mc_id:%s, assetId:%s, action:%s, app:%s\",\n midToken, mc_id, assetId, action, application)\n\n partial_event = None\n\n client_uuid = str(mc_id)\n assetId = str(assetId)\n\n policy_session_dict = MongoPolicySessionApi.get_policy_session(mc_id)\n\n # TODO IF client does not exist, PI should not exist. There is no need\n # TODO to check for client. Move to PI API file.\n if not policy_session_dict:\n new_response = dict(\n access='denied',\n uuid=client_uuid,\n cause='client does not exist'\n )\n return new_response, partial_event\n\n p_ingest_svc = PlibIngestSvc()\n resp_obj, asset_data = p_ingest_svc.lookup_by_assetId(assetId, midToken)\n json_response = resp_obj.json_body if resp_obj else dict()\n in_response = json_response.get('response', None)\n cause = in_response['cause'] if in_response else None\n\n if not resp_obj.success or resp_obj.http_status == HTTPStatus.NOT_FOUND:\n new_response = dict(\n access='denied',\n uuid=client_uuid,\n cause=cause\n )\n return new_response, partial_event\n\n filter_by = dict(\n action=action,\n application=application)\n found, pi_list = PolicyValidationApi._get_matching_policy_instances_v2(\n policy_domain, midToken, client_uuid, filter_by)\n\n logger.debug(\"get_matching_pis: filter_by:%s, found:%s pi_list:%s\",\n filter_by, found, pi_list)\n\n if not found:\n new_response = dict(\n access='denied',\n uuid=client_uuid,\n cause='no client policy for assetId'\n )\n return new_response, partial_event\n\n authorization_granted_list = list()\n authorization_denied_list = list()\n policy_names = dict()\n for pi in pi_list:\n pi_ok, new_response = PolicyValidationApi._pi_validate(\n pi, client_uuid, policy_session_dict)\n if pi_ok:\n authorization_granted_list.append(new_response)\n policy_names['granted'] = pi['name']\n else:\n authorization_denied_list.append(new_response)\n policy_names['denied'] = pi['name']\n # END: FOR PI\n is_granted = len(authorization_granted_list) > 0\n\n logger.debug(\"granted list=%s\", authorization_granted_list)\n logger.debug(\"denied list=%s\", authorization_denied_list)\n\n policy_name = policy_names['granted'] if is_granted else policy_names.get('denied', \"\")\n\n partial_event = partial(\n DDPolicyEventsWrapper.construct_event, policy=policy_name)\n\n if not is_granted:\n new_response = dict(\n access='denied',\n uuid=client_uuid,\n # FIXME: inconsistent with new_response format\n cause_list=authorization_denied_list\n )\n logger.debug(\"authorization denied: %s\", new_response)\n return new_response, partial_event\n\n # Key service lookup\n key_dict = None\n if returnKey:\n p_key_svc = PlibKeySvc()\n get_resp_obj = p_key_svc.lookup_by_assetId(assetId)\n keysvc_success = (\n get_resp_obj.success and\n get_resp_obj.json_body['status'] == HTTPStatus.OK)\n if not keysvc_success:\n new_response = dict(\n access='denied',\n uuid=client_uuid,\n cause=get_resp_obj.json_body\n )\n return new_response, partial_event\n key_dict = get_resp_obj.json_body['response']['key']\n\n # For now return head of authorization granted list if not empty\n new_response = dict(\n access='granted',\n uuid=client_uuid,\n cause=authorization_granted_list[0]['cause']\n )\n if returnKey:\n new_response['key'] = key_dict\n logger.debug(\"authorization granted: %s\", new_response)\n return new_response, partial_event", "def _setup_ses(self):\n print(\"\\n ** Setting up SES mocking\")\n ses = boto3.client('ses', region_name=\"us-east-1\")\n ses.verify_domain_identity(Domain='donatemates.com')\n #response = ses.verify_email_address(EmailAddress='[email protected]')", "def _validate_capabilities_oidc(\n src_client: CogniteClient,\n dst_client: CogniteClient,\n needed_capabilities: [str],\n src_api_authentication: str,\n dst_api_authentication: str,\n) -> bool:\n\n # print(needed_capabilities) # check which resources are going to be replicated - need read access for source and write access for destination\n\n # which capabilities to check for in the capabilities list\n check_for_capabilities = []\n\n # creating the list of which capabilities to search for in the cognite client object\n if \"assets\" in needed_capabilities:\n check_for_capabilities.append(\"assetsAcl\")\n if \"events\" in needed_capabilities:\n check_for_capabilities.append(\"eventsAcl\")\n if \"timeseries\" in needed_capabilities:\n check_for_capabilities.append(\"timeSeriesAcl\")\n if \"sequences\" in needed_capabilities:\n check_for_capabilities.append(\"sequencesAcl\")\n if \"relationships\" in needed_capabilities:\n check_for_capabilities.append(\"relationshipsAcl\")\n if \"files\" in needed_capabilities:\n check_for_capabilities.append(\"filesAcl\")\n if \"raw\" in needed_capabilities:\n check_for_capabilities.append(\"rawAcl\")\n if \"datasets\" in needed_capabilities:\n check_for_capabilities.append(\"datasetsAcl\")\n\n # if 'labels' in needed_capabilities:\n # check_for_capabilities.append('labelsAcl')\n # if 'types' in needed_capabilities:\n # check_for_capabilities.append('typesAcl')\n\n try:\n if src_api_authentication:\n # check that src_capabilities are read for all the mentioned resources\n src_capabilities = src_client.iam.token.inspect().capabilities\n for check_capability in check_for_capabilities:\n for capability in src_capabilities:\n if check_capability in capability.keys():\n if not \"READ\" in capability[check_capability][\"actions\"]:\n return False\n if dst_api_authentication:\n dst_capabilities = dst_client.iam.token.inspect().capabilities\n for check_capability in check_for_capabilities:\n for capability in dst_capabilities:\n if check_capability in capability.keys():\n if not \"WRITE\" in capability[check_capability][\"actions\"]:\n return False\n\n except CogniteAPIError as exc:\n logging.fatal(\n \"Mismatch in needed capabilities with project capabilities with the following message: \".format(exc)\n )\n return False\n return True", "def test_apis_wo_auth(self):\n\n # Order list API\n url = reverse('orders-list')\n response = self.client.get(url)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n # Order summary API\n url = reverse('order-summary-list')\n response = self.client.get(url)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n # Order create API\n url = reverse('orders-list')\n response = self.client.post(url)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n # Shares list/summary API\n url = reverse('shares-list', args=['summary'])\n response = self.client.get(url)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n url = reverse('shares-list', args=['all'])\n response = self.client.get(url)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)", "def _enforce(self, req, action, target=None):\n if target is None:\n target = {}\n try:\n self.policy.enforce(req.context, action, target)\n except exception.Forbidden as e:\n LOG.debug(\"User not permitted to perform '%s' action\", action)\n raise webob.exc.HTTPForbidden(explanation=e.msg, request=req)", "def test_create_namespaced_policy(self):\n pass", "def test_aws_service_api_keypair_get(self):\n pass", "def test_accepted(self):\n actions = signoff_actions(appversions={\"code\": \"fx1.0\"},\n locales={\"code\": \"de\"})\n actions = list(actions)\n eq_(len(actions), 1)\n so = Signoff.objects.get(action=actions[0][0])\n eq_(so.push.tip.shortrev, \"l10n de 0002\")\n eq_(so.locale.code, \"de\")\n eq_(so.action_set.count(), 2)", "def verifyActionCenterFirewall():\n pass", "def test_multiple_aclhooks_2(self):\n self._test_hook_approval_sequence([True, None], True)", "def test_aws_service_api_keypair_delete(self):\n pass", "def check_api_keys(self, request):\n app_id, api_obj = request.META.get(\"HTTP_APP_ID\"), None\n api_secret_key = request.META.get(\"HTTP_API_SECRET_KEY\")\n if app_id and api_secret_key:\n # validate app_id and api_secret_key\n app_id_bool = self._validate_app_id(app_id)\n if not app_id_bool:\n return False, self.app_id_message\n api_secret_key_bool = self._validate_api_secret_key(api_secret_key)\n if not api_secret_key:\n return False, self.api_secret_key_message\n try:\n api_obj = ApiApp.objects.get(app_id=app_id, api_secret_key=api_secret_key, active=True)\n if api_obj:\n self.app(request, api_obj)\n return True, ''\n except ApiApp.DoesNotExist:\n self.app(request, api_obj)\n return False, self.message\n else:\n self.app(request, api_obj)\n return False, self.message", "def test_get_actions_with_arn_type_and_access_level_case_3(self):\n desired_output = [\n 's3:PutAccountPublicAccessBlock',\n 's3:PutAccessPointPublicAccessBlock'\n ]\n output = get_actions_with_arn_type_and_access_level(\n # \"ram\", \"resource-share\", \"Write\"\n \"s3\", \"*\", \"Permissions management\"\n )\n print(output)\n for item in desired_output:\n self.assertTrue(item in output)\n # self.assertListEqual(desired_output, output)", "def get_actions(\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = GetActions.create(\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def test_wsgi_script_on_cognito_event_request(self):\n lh = LambdaHandler(\"tests.test_wsgi_script_name_settings\")\n\n event = {\n \"version\": \"1\",\n \"region\": \"eu-west-1\",\n \"userPoolId\": \"region_poolID\",\n \"userName\": \"uuu-id-here\",\n \"callerContext\": {\n \"awsSdkVersion\": \"aws-sdk-js-2.149.0\",\n \"clientId\": \"client-id-here\",\n },\n \"triggerSource\": \"PreSignUp_SignUp\",\n \"request\": {\n \"userAttributes\": {\"email\": \"[email protected]\"},\n \"validationData\": None,\n },\n \"response\": {\n \"autoConfirmUser\": False,\n \"autoVerifyEmail\": False,\n \"autoVerifyPhone\": False,\n },\n }\n\n response = lh.handler(event, None)\n\n self.assertEqual(response[\"response\"][\"autoConfirmUser\"], False)", "def can_i_afford_intent_handler(handler_input):\n # type: (HandlerInput) -> Response\n\n slots = handler_input.request_envelope.request.intent.slots\n print(f\"Slots: {slots}\")\n purchase = slots['purchase'].value.lower()\n print(f\"purchase: {purchase}\")\n\n monzo = MonzoGetter(ACCESS_TOKEN)\n monthly_spend = monzo.get_monthly_spend_pounds()\n\n try:\n price = price_lookup_pounds[purchase]\n if price > (MONTHLY_BUDGET - monthly_spend):\n speech_text = f\"Sorry, you can't afford this. A {purchase} \" \\\n f\"costs about {price} pounds. You've already spent \" \\\n f\"{monthly_spend} pounds this month.\"\n else:\n remaining = MONTHLY_BUDGET - monthly_spend - price\n speech_text = f\"You can afford that. A {purchase} costs about \" \\\n f\"{price} pounds. If you buy it your remaining \" \\\n f\"monthly budget will be {remaining}\"\n except KeyError:\n # Just in case....\n speech_text = \"Sorry, we couldn't find a price for that product.\" \\\n f\"You have {MONTHLY_BUDGET - monthly_spend} pounds\" \\\n \" left to spend this month\"\n\n handler_input.response_builder.speak(speech_text).set_card(\n SimpleCard(\"Hello World\", speech_text)).set_should_end_session(\n False)\n return handler_input.response_builder.response", "def __call__(self, env, start_response):\n s3 = env.get('HTTP_AUTHORIZATION')\n token = env.get('HTTP_X_AUTH_TOKEN', env.get('HTTP_X_STORAGE_TOKEN'))\n if s3 or (token and token.startswith(self.reseller_prefix)):\n # Note: Empty reseller_prefix will match all tokens.\n # Attempt to auth my token with my auth server\n groups = self.get_groups(env, token,\n memcache_client=cache_from_env(env))\n if groups:\n env['REMOTE_USER'] = groups\n user = groups and groups.split(',', 1)[0] or ''\n # We know the proxy logs the token, so we augment it just a bit\n # to also log the authenticated user.\n env['HTTP_X_AUTH_TOKEN'] = '%s,%s' % (user, token)\n env['swift.authorize'] = self.authorize\n env['swift.clean_acl'] = clean_acl\n else:\n # Unauthorized token\n if self.reseller_prefix:\n # Because I know I'm the definitive auth for this token, I\n # can deny it outright.\n return HTTPUnauthorized()(env, start_response)\n # Because I'm not certain if I'm the definitive auth for empty\n # reseller_prefixed tokens, I won't overwrite swift.authorize.\n elif 'swift.authorize' not in env:\n env['swift.authorize'] = self.denied_response\n else:\n if self.reseller_prefix:\n # With a non-empty reseller_prefix, I would like to be called\n # back for anonymous access to accounts I know I'm the\n # definitive auth for.\n try:\n version, rest = split_path(env.get('PATH_INFO', ''),\n 1, 2, True)\n except ValueError:\n return HTTPNotFound()(env, start_response)\n if rest and rest.startswith(self.reseller_prefix):\n # Handle anonymous access to accounts I'm the definitive\n # auth for.\n env['swift.authorize'] = self.authorize\n env['swift.clean_acl'] = clean_acl\n # Not my token, not my account, I can't authorize this request,\n # deny all is a good idea if not already set...\n elif 'swift.authorize' not in env:\n env['swift.authorize'] = self.denied_response\n # Because I'm not certain if I'm the definitive auth for empty\n # reseller_prefixed accounts, I won't overwrite swift.authorize.\n elif 'swift.authorize' not in env:\n env['swift.authorize'] = self.authorize\n env['swift.clean_acl'] = clean_acl\n return self.app(env, start_response)", "def test_parse_request_type_2a(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.SESSION_INIT)", "def test_basic_api(self):\n self.create_and_verify_stack(\"single/basic_api\")\n\n first_dep_ids = self.get_stack_deployment_ids()\n self.assertEqual(len(first_dep_ids), 1)\n\n self.set_template_resource_property(\"MyApi\", \"DefinitionUri\", self.get_s3_uri(\"swagger2.json\"))\n self.update_stack()\n\n second_dep_ids = self.get_stack_deployment_ids()\n self.assertEqual(len(second_dep_ids), 1)\n\n self.assertEqual(len(set(first_dep_ids).intersection(second_dep_ids)), 0)", "def test_verbs_not_allowed(self, verb, public_omis_api_client):\n order = OrderPaidFactory()\n\n url = reverse(\n 'api-v3:public-omis:payment:collection',\n kwargs={'public_token': order.public_token},\n )\n response = getattr(public_omis_api_client, verb)(url, json_={})\n assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED", "def check_snstopicpolicy_crossaccount(self, snsitem):\n #(region, account, arn, aws_object) = audit_object\n #\"Principal\": { \"AWS\": \"*\" }\n # \"AWS\": \"arn:aws:iam::027213240437:root\"\n policy = snsitem.config.get('SNSPolicy', {})\n for statement in policy.get(\"Statement\", []):\n account_numbers = []\n account_number = ''\n princ_aws = statement.get(\"Principal\", {}) \\\n .get(\"AWS\", \"error\")\n if princ_aws == \"*\":\n account_number = statement.get(\"Condition\", {}) \\\n .get(\"StringEquals\", {}) \\\n .get(\"AWS:SourceOwner\", None)\n if not account_number:\n tag = \"SNS Topic open to everyone\"\n notes = \"An SNS policy where { 'Principal': { 'AWS': '*' } } must also have\"\n notes += \" a {'Condition': {'StringEquals': { 'AWS:SourceOwner': '<ACCOUNT_NUMBER>' } } }\"\n notes += \" or it is open to the world. In this case, anyone is allowed to perform \"\n notes += \" this action(s): {}\".format(statement.get(\"Action\"))\n self.add_issue(10, tag, snsitem, notes=notes)\n continue\n else:\n try:\n account_numbers.append(str(account_number))\n except ValueError:\n raise InvalidSourceOwner(account_number)\n else:\n if isinstance(princ_aws, list):\n for entry in princ_aws:\n account_numbers.append(str(re.search('arn:aws:iam::([0-9-]+):', entry).group(1)))\n else:\n try:\n account_numbers.append(str(re.search('arn:aws:iam::([0-9-]+):', princ_aws).group(1)))\n except:\n import json\n print json.dumps(snsitem.config, indent=4)\n raise InvalidARN(princ_aws)\n\n for account_number in account_numbers:\n account = Account.query.filter(Account.number == account_number).first()\n account_name = None\n if account is not None:\n account_name = account.name\n\n if not account_name:\n tag = \"Unknown Cross Account Access\"\n notes = \"from {} to {}\".format(account_number, snsitem.account)\n self.add_issue(10, tag, snsitem, notes=notes)\n elif account_name != snsitem.account:\n tag = \"Friendly Cross Account Access\"\n notes = \"from {} to {}\".format(account_name, snsitem.account)\n self.add_issue(0, tag, snsitem, notes=notes)", "def test_create_policy_binding_for_all_namespaces(self):\n pass", "def test_read_namespaced_route_status(self):\n pass", "def test_create_namespaced_route(self):\n pass", "def test_dashboards_v2_request_access(self):\n pass", "def enforce_policy(self, method_name, request):\n context_dict = request.context.to_dict()\n if method_name in [\"detail\", \"get_all\", \"get_one\"]:\n policy.authorize(event_log_policy.POLICY_ROOT % \"get\", {},\n context_dict)\n else:\n raise exceptions.PolicyNotFound()", "def test_replace_namespaced_policy(self):\n pass", "def aws_es_os_coginto_authentication_check(cache: dict, session, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:\n # ISO Time\n iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()\n for response in describe_es_os_domains(cache, session):\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(response,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson)\n esDomainName = response[\"DomainStatus\"][\"DomainName\"]\n esVersion = response[\"DomainStatus\"][\"ElasticsearchVersion\"]\n domainId = response[\"DomainStatus\"][\"DomainId\"]\n domainArn = response[\"DomainStatus\"][\"ARN\"]\n try:\n cognitoEnabledCheck = response[\"DomainStatus\"][\"CognitoOptions\"][\"Enabled\"]\n except:\n cognitoEnabledCheck = False\n # this is a failing check\n if cognitoEnabledCheck is False:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"{domainArn}/elasticsearch-cognito-auth-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": domainArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"MEDIUM\"},\n \"Confidence\": 99,\n \"Title\": \"[OpenSearch.2] OpenSearch/AWS ElasticSearch Service domains should use Cognito authentication for Kibana\",\n \"Description\": \"OpenSearch/AWS ElasticSearch Service domain \"\n + esDomainName\n + \" does not use Cognito authentication for Kibana. Refer to the remediation instructions if this configuration is not intended\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"If your domain should use Cognito authentication for Kibana refer to the Amazon Cognito Authentication for Kibana section of the Amazon Elasticsearch Service Developer Guide\",\n \"Url\": \"https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-cognito-auth.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Analytics\",\n \"AssetService\": \"Amazon OpenSearch Service\",\n \"AssetComponent\": \"Search Domain\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsOpenSearchServiceDomain\",\n \"Id\": domainArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsOpenSearchServiceDomain\": {\n \"Id\": domainId,\n \"DomainName\": esDomainName,\n \"EngineVersion\": esVersion,\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.AC-6\",\n \"NIST SP 800-53 Rev. 4 AC-1\",\n \"NIST SP 800-53 Rev. 4 AC-2\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 AC-16\",\n \"NIST SP 800-53 Rev. 4 AC-19\",\n \"NIST SP 800-53 Rev. 4 AC-24\",\n \"NIST SP 800-53 Rev. 4 IA-1\",\n \"NIST SP 800-53 Rev. 4 IA-2\",\n \"NIST SP 800-53 Rev. 4 IA-4\",\n \"NIST SP 800-53 Rev. 4 IA-5\",\n \"NIST SP 800-53 Rev. 4 IA-8\",\n \"NIST SP 800-53 Rev. 4 PE-2\",\n \"NIST SP 800-53 Rev. 4 PS-3\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.7.1.1\",\n \"ISO 27001:2013 A.9.2.1\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"{domainArn}/elasticsearch-cognito-auth-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": domainArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[OpenSearch.2] OpenSearch/AWS ElasticSearch Service domains should use Cognito authentication for Kibana\",\n \"Description\": \"OpenSearch/AWS ElasticSearch Service domain \"\n + esDomainName\n + \" uses Cognito authentication for Kibana.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"If your domain should use Cognito authentication for Kibana refer to the Amazon Cognito Authentication for Kibana section of the Amazon Elasticsearch Service Developer Guide\",\n \"Url\": \"https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-cognito-auth.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Analytics\",\n \"AssetService\": \"Amazon OpenSearch Service\",\n \"AssetComponent\": \"Search Domain\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsOpenSearchServiceDomain\",\n \"Id\": domainArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsOpenSearchServiceDomain\": {\n \"Id\": domainId,\n \"DomainName\": esDomainName,\n \"EngineVersion\": esVersion,\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.AC-6\",\n \"NIST SP 800-53 Rev. 4 AC-1\",\n \"NIST SP 800-53 Rev. 4 AC-2\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 AC-16\",\n \"NIST SP 800-53 Rev. 4 AC-19\",\n \"NIST SP 800-53 Rev. 4 AC-24\",\n \"NIST SP 800-53 Rev. 4 IA-1\",\n \"NIST SP 800-53 Rev. 4 IA-2\",\n \"NIST SP 800-53 Rev. 4 IA-4\",\n \"NIST SP 800-53 Rev. 4 IA-5\",\n \"NIST SP 800-53 Rev. 4 IA-8\",\n \"NIST SP 800-53 Rev. 4 PE-2\",\n \"NIST SP 800-53 Rev. 4 PS-3\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.7.1.1\",\n \"ISO 27001:2013 A.9.2.1\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding", "def test_intent_support(self):\n dispatcher = self.get_dispatcher()\n for intent in self.get_intents():\n self.assertIsNot(dispatcher(intent), None)", "def rule_40_can_create_sg(session):\n\n def try_create(session, side):\n res, conn_vpc = session[\"config\"][side][\"res\"], session[\"conn\"][side](\"vpc\")\n subnet = conn_vpc.get_all_subnets([res[\"subnet_id\"]])[0]\n\n try:\n conn_vpc.create_security_group(\n \"foo\", \"bar\", vpc_id = subnet.vpc_id, dry_run = True)\n except EC2ResponseError as e:\n if 412 != e.status:\n raise e\n\n try_create(session, \"server\")\n try_create(session, \"client\")\n\n return True", "def test_aws_service_api_keypairs_get(self):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces(self):\n pass", "def test_validate_put_existing(client):\n response = client.put(\n '/user/1',\n data=json.dumps({\n 'name': 'Jeff Knupp',\n 'email': '[email protected]',\n }),\n headers={'Content-Type': 'application/json'}\n )\n assert response.status_code == 400\n assert response.json['message'] == INVALID_ACTION_MESSAGE", "def SoapAction(self) -> str:", "def test_ocsp():\n ocsp_pyasn1.OCSP_VALIDATION_CACHE = {} # reset the memory cache\n ocsp = ocsp_pyasn1.SnowflakeOCSP()\n for url in URLS:\n connection = _openssl_connect(url)\n assert ocsp.validate(url, connection), \\\n 'Failed to validate: {0}'.format(url)", "def test_service_status(self, api_instance):\n params = api_instance.get_service_status()\n # Only key we care about here is GetServiceStatus\n assert params[\"Action\"] == \"GetServiceStatus\"", "def test_replace_namespaced_route(self):\n pass", "def azs_lookup(session, lambda_compatible_only=False):\n if session is None:\n return []\n\n client = session.client('ec2')\n response = client.describe_availability_zones()\n # SH Removing Hack as subnet A is already in Production and causes issues trying to delete\n # We will strip out subnets A and C when creating the lambdas.\n #rtn = [(z[\"ZoneName\"], z[\"ZoneName\"][-1]) for z in response[\"AvailabilityZones\"] if z['ZoneName'] != 'us-east-1a']\n rtn = [(z[\"ZoneName\"], z[\"ZoneName\"][-1]) for z in response[\"AvailabilityZones\"]]\n\n if lambda_compatible_only:\n current_account = get_account_id_from_session(session)\n for az in rtn.copy():\n if az[1] == 'c' and current_account == hosts.PROD_ACCOUNT:\n rtn.remove(az)\n if az[1] == 'a' and current_account == hosts.DEV_ACCOUNT:\n rtn.remove(az)\n return rtn", "def aws_es_os_tls_policy_check(cache: dict, session, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:\n # ISO Time\n iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()\n for response in describe_es_os_domains(cache, session):\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(response,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson)\n esDomainName = response[\"DomainStatus\"][\"DomainName\"]\n esVersion = response[\"DomainStatus\"][\"ElasticsearchVersion\"]\n domainId = response[\"DomainStatus\"][\"DomainId\"]\n domainArn = response[\"DomainStatus\"][\"ARN\"]\n if response[\"DomainStatus\"][\"DomainEndpointOptions\"][\"EnforceHTTPS\"] is True:\n tlsPolicyCheck = str(\n response[\"DomainStatus\"][\"DomainEndpointOptions\"][\"TLSSecurityPolicy\"]\n )\n if response[\"DomainStatus\"][\"DomainEndpointOptions\"][\"TLSSecurityPolicy\"] != \"Policy-Min-TLS-1-2-2019-07\":\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"{domainArn}/elasticsearch-tls-1-2-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": domainArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\n \"Software and Configuration Checks/AWS Security Best Practices\",\n \"Effects/Data Exposure\",\n ],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"HIGH\"},\n \"Confidence\": 99,\n \"Title\": \"[OpenSearch.6] OpenSearch/AWS ElasticSearch Service domains that enforce HTTPS-only communications should use a TLS 1.2 security policy\",\n \"Description\": \"OpenSearch/AWS ElasticSearch Service domain \"\n + esDomainName\n + \" does not use a TLS 1.2 security policy. Refer to the remediation instructions if this configuration is not intended\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"If your domain should use a TLS 1.2 security policy refer to the About Configuration Changes section of the Amazon Elasticsearch Service Developer Guide\",\n \"Url\": \"https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-managedomains.html#es-managedomains-configuration-changes\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Analytics\",\n \"AssetService\": \"Amazon OpenSearch Service\",\n \"AssetComponent\": \"Search Domain\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsOpenSearchServiceDomain\",\n \"Id\": domainArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsOpenSearchServiceDomain\": {\n \"Id\": domainId,\n \"DomainName\": esDomainName,\n \"EngineVersion\": esVersion,\n \"DomainEndpointOptions\": {\n \"EnforceHTTPS\": True,\n \"TLSSecurityPolicy\": tlsPolicyCheck,\n },\n }\n },\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.DS-2\",\n \"NIST SP 800-53 Rev. 4 SC-8\",\n \"NIST SP 800-53 Rev. 4 SC-11\",\n \"NIST SP 800-53 Rev. 4 SC-12\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.8.2.3\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.13.2.1\",\n \"ISO 27001:2013 A.13.2.3\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n ],\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\",\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"{domainArn}/elasticsearch-tls-1-2-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": domainArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\n \"Software and Configuration Checks/AWS Security Best Practices\",\n \"Effects/Data Exposure\",\n ],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[OpenSearch.6] OpenSearch/AWS ElasticSearch Service domains that enforce HTTPS-only communications should use a TLS 1.2 security policy\",\n \"Description\": \"OpenSearch/AWS ElasticSearch Service domain \"\n + esDomainName\n + \" uses a TLS 1.2 security policy.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"If your domain should use a TLS 1.2 security policy refer to the About Configuration Changes section of the Amazon Elasticsearch Service Developer Guide\",\n \"Url\": \"https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-managedomains.html#es-managedomains-configuration-changes\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Analytics\",\n \"AssetService\": \"Amazon OpenSearch Service\",\n \"AssetComponent\": \"Search Domain\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsOpenSearchServiceDomain\",\n \"Id\": domainArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsOpenSearchServiceDomain\": {\n \"Id\": domainId,\n \"DomainName\": esDomainName,\n \"EngineVersion\": esVersion,\n \"DomainEndpointOptions\": {\n \"EnforceHTTPS\": True,\n \"TLSSecurityPolicy\": tlsPolicyCheck,\n },\n }\n },\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.DS-2\",\n \"NIST SP 800-53 Rev. 4 SC-8\",\n \"NIST SP 800-53 Rev. 4 SC-11\",\n \"NIST SP 800-53 Rev. 4 SC-12\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.8.2.3\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.13.2.1\",\n \"ISO 27001:2013 A.13.2.3\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n ],\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\",\n }\n yield finding\n else:\n continue", "def test_claim_resources_success_force_evacuate_no_shared(self):\n # the source allocation is also held by the instance_uuid so report\n # client will see it.\n current_allocs = {\n 'allocations': {\n uuids.source_host: {\n 'generation': 42,\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20\n },\n },\n },\n \"consumer_generation\": 1,\n \"project_id\": uuids.project_id,\n \"user_id\": uuids.user_id\n }\n\n self.ks_adap_mock.get.return_value = fake_requests.FakeResponse(\n status_code=200,\n content=jsonutils.dumps(current_allocs))\n self.ks_adap_mock.put.return_value = fake_requests.FakeResponse(\n status_code=204)\n consumer_uuid = uuids.consumer_uuid\n # this is an evacuate so we have the same resources request towards the\n # dest host\n alloc_req = {\n 'allocations': {\n uuids.dest_host: {\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20,\n }\n },\n },\n # this allocation request comes from the conductor that read the\n # allocation from placement therefore it has consumer_generation in\n # it.\n \"consumer_generation\": 1,\n \"project_id\": uuids.project_id,\n \"user_id\": uuids.user_id\n }\n\n project_id = uuids.project_id\n user_id = uuids.user_id\n res = self.client.claim_resources(self.context, consumer_uuid,\n alloc_req, project_id, user_id,\n allocation_request_version='1.28')\n\n expected_url = \"/allocations/%s\" % consumer_uuid\n # we expect that both the source and dest allocations are here\n expected_payload = {\n 'allocations': {\n uuids.source_host: {\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20\n },\n },\n uuids.dest_host: {\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20,\n }\n },\n },\n # report client uses the consumer_generation that it got in the\n # allocation request\n 'consumer_generation': 1,\n 'project_id': project_id,\n 'user_id': user_id}\n self.ks_adap_mock.put.assert_called_once_with(\n expected_url, microversion='1.28', json=mock.ANY,\n global_request_id=self.context.global_id)\n # We have to pull the json body from the mock call_args to validate\n # it separately otherwise hash seed issues get in the way.\n actual_payload = self.ks_adap_mock.put.call_args[1]['json']\n self.assertEqual(expected_payload, actual_payload)\n\n self.assertTrue(res)", "def test_aws_service_api_keypair_generate_post(self):\n pass", "def test_cmd_cs_subscription_bad_action(self):\n bad_action = 'blahblah'\n\n result = self.runner.invoke(cli, ['subscription', bad_action])\n assert f\"invalid choice: {bad_action}\" in result.output\n assert result.exception", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces(self):\n pass", "def rule_20_connect(session):\n\n c, my_id = session[\"config\"], session[\"config\"][\"tags\"][\"instavpn\"]\n\n session[\"conn\"].update({\n \"server\": AWSConn(c[\"server\"][\"identity\"], my_id),\n \"client\": AWSConn(c[\"client\"][\"identity\"], my_id),\n })\n\n return True", "def test_services_with_multiple_pages_lex(self):\n # Lex V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonlex.html\n self.assertTrue(\"lex:DeleteUtterances\" in self.all_actions)\n # Lex V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonlexv2.html\n self.assertTrue(\"lex:ListBotLocales\" in self.all_actions)\n results = get_actions_for_service(\"lex\")\n actions = [\n \"lex:CreateIntentVersion\",\n \"lex:CreateSlotTypeVersion\",\n \"lex:DeleteBotChannelAssociation\",\n \"lex:DeleteIntentVersion\",\n \"lex:DeleteSlotTypeVersion\",\n \"lex:GetBot\",\n \"lex:GetBotAlias\",\n \"lex:GetBotAliases\",\n \"lex:GetBotChannelAssociation\",\n \"lex:GetBotChannelAssociations\",\n \"lex:GetBotVersions\",\n \"lex:GetBots\",\n \"lex:GetBuiltinIntent\",\n \"lex:GetBuiltinIntents\",\n \"lex:GetBuiltinSlotTypes\",\n \"lex:GetExport\",\n \"lex:GetImport\",\n \"lex:GetIntent\",\n \"lex:GetIntentVersions\",\n \"lex:GetIntents\",\n \"lex:GetMigration\",\n \"lex:GetMigrations\",\n \"lex:GetSlotType\",\n \"lex:GetSlotTypeVersions\",\n \"lex:GetSlotTypes\",\n \"lex:GetUtterancesView\",\n \"lex:PostContent\",\n \"lex:PostText\",\n \"lex:PutBot\",\n \"lex:PutBotAlias\",\n \"lex:PutIntent\",\n \"lex:PutSlotType\",\n \"lex:StartMigration\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_validate_bookstore_endpoint():\n expected = {\n \"bookstore_valid\": False,\n \"publish_valid\": False,\n \"archive_valid\": False,\n \"clone_valid\": True,\n }\n settings = BookstoreSettings(s3_endpoint_url=\"\")\n assert validate_bookstore(settings) == expected", "def test_claim_resources_success_evacuate_no_shared(self):\n # the source allocation is also held by the instance_uuid so report\n # client will see it.\n current_allocs = {\n 'allocations': {\n uuids.source_host: {\n 'generation': 42,\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20\n },\n },\n },\n \"consumer_generation\": 1,\n \"project_id\": uuids.project_id,\n \"user_id\": uuids.user_id\n }\n self.ks_adap_mock.get.return_value = fake_requests.FakeResponse(\n status_code=200,\n content=jsonutils.dumps(current_allocs))\n put_allocations_resp_mock = fake_requests.FakeResponse(status_code=204)\n self.ks_adap_mock.put.return_value = put_allocations_resp_mock\n consumer_uuid = uuids.consumer_uuid\n # this is an evacuate so we have the same resources request towards the\n # dest host\n alloc_req = {\n 'allocations': {\n uuids.dest_host: {\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20,\n }\n },\n },\n # this allocation request comes from the scheduler therefore it\n # does not have consumer_generation in it.\n \"project_id\": uuids.project_id,\n \"user_id\": uuids.user_id\n }\n\n project_id = uuids.project_id\n user_id = uuids.user_id\n res = self.client.claim_resources(self.context, consumer_uuid,\n alloc_req, project_id, user_id,\n allocation_request_version='1.28')\n\n expected_url = \"/allocations/%s\" % consumer_uuid\n # we expect that both the source and dest allocations are here\n expected_payload = {\n 'allocations': {\n uuids.source_host: {\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20\n },\n },\n uuids.dest_host: {\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20,\n }\n },\n },\n # report client uses the consumer_generation that it got from\n # placement when asked for the existing allocations\n 'consumer_generation': 1,\n 'project_id': project_id,\n 'user_id': user_id}\n self.ks_adap_mock.put.assert_called_once_with(\n expected_url, microversion='1.28', json=mock.ANY,\n global_request_id=self.context.global_id)\n # We have to pull the json body from the mock call_args to validate\n # it separately otherwise hash seed issues get in the way.\n actual_payload = self.ks_adap_mock.put.call_args[1]['json']\n self.assertEqual(expected_payload, actual_payload)\n\n self.assertTrue(res)", "def check_ocsp_response_status(single_response_map, ocsp_response):\n ret = []\n for hkey, data in single_response_map.items():\n if data['status'] == 'good':\n ret.append(_process_good_status(\n hkey, data, ocsp_response))\n elif data['status'] == 'revoked': # revoked\n _process_revoked_status(hkey, data)\n else: # unknown\n _process_unknown_status(hkey)\n if len(ret) != len(single_response_map):\n raise OperationalError(\n msg=u\"Not all OCSP Response was returned\",\n errno=ER_INVALID_OCSP_RESPONSE,\n )", "def requires_asap(issuers=None, subject_should_match_issuer=None, func=None):\n return with_asap(func=func,\n required=True,\n issuers=issuers,\n subject_should_match_issuer=subject_should_match_issuer)", "def test_138_service_catalog(self):\n u.log.debug('Checking keystone service catalog...')\n self.set_api_version(2)\n endpoint_check = {\n 'adminURL': u.valid_url,\n 'id': u.not_null,\n 'region': 'RegionOne',\n 'publicURL': u.valid_url,\n 'internalURL': u.valid_url\n }\n expected = {\n 'volume': [endpoint_check],\n 'identity': [endpoint_check]\n }\n actual = self.keystone_v2.service_catalog.get_endpoints()\n\n ret = u.validate_svc_catalog_endpoint_data(expected, actual)\n if ret:\n amulet.raise_status(amulet.FAIL, msg=ret)", "def validate_params(aws_default_region, aws_role_arn, aws_role_session_name, aws_access_key_id, aws_secret_access_key):\n if not aws_default_region:\n raise DemistoException('You must specify AWS default region.')\n\n if bool(aws_access_key_id) != bool(aws_secret_access_key):\n raise DemistoException('You must provide Access Key id and Secret key id to configure the instance with '\n 'credentials.')\n if bool(aws_role_arn) != bool(aws_role_session_name):\n raise DemistoException('Role session name is required when using role ARN.')", "def VerifyAWSAccountAccess(self, parameters, body, ids):\n # [POST] https://assets.falcon.crowdstrike.com/support/api/swagger.html#/cloud-connect-aws/VerifyAWSAccountAccess\n ID_LIST = str(ids).replace(\",\",\"&ids=\")\n FULL_URL = self.base_url+'/cloud-connect-aws/entities/verify-account-access/v1?ids={}'.format(ID_LIST)\n HEADERS = self.headers\n PARAMS = parameters\n BODY = body #payload does not appear to be required\n result = self.Result()\n try:\n response = requests.request(\"POST\", FULL_URL, json=BODY, params=PARAMS, headers=HEADERS, verify=False)\n returned = result(response.status_code, response.headers, response.json())\n except Exception as e:\n returned = result(500, {}, str(e))\n \n return returned", "def test_lti20_rest_good_dispatch(self):\r\n for ginput, expected in self.GOOD_DISPATCH_INPUTS:\r\n self.assertEquals(self.xmodule.parse_lti_2_0_handler_suffix(ginput), expected)", "def test_add_hmac_signature_get(self):\n resp = self.client.get(\"/\")\n self.assertTrue(resp.status_code == 405)", "def test_get_actions_with_arn_type_and_access_level_case_4(self):\n desired_output = [\n 'secretsmanager:ListSecrets'\n ]\n output = get_actions_with_arn_type_and_access_level(\n \"secretsmanager\", \"*\", \"List\"\n )\n self.assertListEqual(desired_output, output)", "def test_resource_actions(self):\n test_resource = ResourceTypeName.get()\n expected_actions = sorted(['rt:get', 'rt:put', 'rt:update', 'rt:delete'])\n self.app.post(\n f'/v1/resource/{test_resource}',\n data=json.dumps({'actions': expected_actions}),\n headers=admin_headers)\n\n # Get the actions for a resource type\n resp = self.app.get(f'/v1/resource/{test_resource}/actions', headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n actions = json.loads(resp.body)['actions']\n self.assertEqual(actions, expected_actions)\n\n # Delete actions from a resource type\n modify_actions = expected_actions[-2:]\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions[:2])\n\n # OK returned when deleting actions not part of a resource type\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n\n # Put actions into a resource type\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions)\n\n # OK returned when putting actions already a part of a resource type.\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0(self):\n pass", "def test_patch_namespaced_policy(self):\n pass", "def _ValidRequest(request):\n if not request.json:\n abort(400)\n sessId = request.json['sessionId']\n sessKey = request.json['sessionKey']\n # Check if it is active and correct key\n return database.SessionActive(sessId) and database.CorrectSessionKey(sessId, sessKey)", "def test_create_pod_security_policy_review_for_all_namespaces(self):\n pass", "def test_create_pod_security_policy_self_subject_review_for_all_namespaces(self):\n pass" ]
[ "0.59183055", "0.57958144", "0.5341116", "0.51459986", "0.5056637", "0.5025083", "0.49718618", "0.49709255", "0.49639255", "0.4939532", "0.48318732", "0.47402886", "0.47391623", "0.47184974", "0.4717983", "0.47155645", "0.47113252", "0.4703302", "0.46926475", "0.46769983", "0.46675658", "0.46649936", "0.46610305", "0.46535316", "0.4649807", "0.4649175", "0.46461895", "0.46326602", "0.46007538", "0.45988023", "0.4595491", "0.45910946", "0.4585959", "0.45720324", "0.4568081", "0.45465842", "0.4543321", "0.45391455", "0.45342746", "0.4522167", "0.4509795", "0.45055035", "0.45002982", "0.4497225", "0.44918138", "0.44917035", "0.44834575", "0.4470137", "0.44642225", "0.4446611", "0.4438906", "0.4437704", "0.4436723", "0.443578", "0.44314194", "0.4422564", "0.442091", "0.4419995", "0.44151378", "0.44133982", "0.44063944", "0.44045165", "0.4400183", "0.43974635", "0.4395876", "0.43895504", "0.43878233", "0.43856162", "0.4383167", "0.43781072", "0.43671125", "0.43661574", "0.43631327", "0.43630078", "0.43598348", "0.43562198", "0.43506777", "0.43506536", "0.4342857", "0.4342591", "0.43348327", "0.4333588", "0.4327427", "0.4325071", "0.43186253", "0.43181232", "0.4316093", "0.43141344", "0.43138638", "0.43012282", "0.430024", "0.42995536", "0.42982036", "0.429649", "0.429344", "0.42876944", "0.42869776", "0.42865795", "0.4280835", "0.42736754" ]
0.5989848
0
Ensure that awsmarketplace actions from all the different awsmarketplace SAR pages are present in the IAM definition.
def test_services_with_multiple_pages_aws_marketplace(self): # Overlap: AWS Marketplace, Marketplace Catalog, and AWS Marketplace Entitlement service, AWS Marketplace Image Building Service, AWS Marketplace Metering Service, AWS Marketplace Private Marketplace, and AWS Marketplace Procurement Systems # AWS Marketplace: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplace.html self.assertTrue("aws-marketplace:AcceptAgreementApprovalRequest" in self.all_actions) # AWS Marketplace Catalog: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplacecatalog.html self.assertTrue("aws-marketplace:CancelChangeSet" in self.all_actions) # AWS Marketplace Entitlement Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceentitlementservice.html self.assertTrue("aws-marketplace:GetEntitlements" in self.all_actions) # AWS Marketplace Image Building Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceimagebuildingservice.html self.assertTrue("aws-marketplace:DescribeBuilds" in self.all_actions) # AWS Marketplace Metering Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplacemeteringservice.html self.assertTrue("aws-marketplace:BatchMeterUsage" in self.all_actions) # AWS Marketplace Private Marketplace: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceprivatemarketplace.html self.assertTrue("aws-marketplace:AssociateProductsWithPrivateMarketplace" in self.all_actions) # AWS Marketplace Procurement Systems: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceprocurementsystemsintegration.html self.assertTrue("aws-marketplace:DescribeProcurementSystemConfiguration" in self.all_actions) results = get_actions_for_service("aws-marketplace") actions = [ "aws-marketplace:AcceptAgreementApprovalRequest", "aws-marketplace:BatchMeterUsage", "aws-marketplace:CancelAgreementRequest", "aws-marketplace:CancelChangeSet", "aws-marketplace:CompleteTask", "aws-marketplace:DescribeAgreement", "aws-marketplace:DescribeBuilds", "aws-marketplace:DescribeChangeSet", "aws-marketplace:DescribeEntity", "aws-marketplace:DescribeProcurementSystemConfiguration", "aws-marketplace:DescribeTask", "aws-marketplace:GetAgreementApprovalRequest", "aws-marketplace:GetAgreementRequest", "aws-marketplace:GetAgreementTerms", "aws-marketplace:GetEntitlements", "aws-marketplace:ListAgreementApprovalRequests", "aws-marketplace:ListAgreementRequests", "aws-marketplace:ListBuilds", "aws-marketplace:ListChangeSets", "aws-marketplace:ListEntities", "aws-marketplace:ListTasks", "aws-marketplace:MeterUsage", "aws-marketplace:PutProcurementSystemConfiguration", "aws-marketplace:RegisterUsage", "aws-marketplace:RejectAgreementApprovalRequest", "aws-marketplace:ResolveCustomer", "aws-marketplace:SearchAgreements", "aws-marketplace:StartBuild", "aws-marketplace:StartChangeSet", "aws-marketplace:Subscribe", "aws-marketplace:Unsubscribe", "aws-marketplace:UpdateAgreementApprovalRequest", "aws-marketplace:UpdateTask", "aws-marketplace:ViewSubscriptions", ] for action in actions: self.assertTrue(action in results)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_actions_with_arn_type_and_access_level_case_3(self):\n desired_output = [\n 's3:PutAccountPublicAccessBlock',\n 's3:PutAccessPointPublicAccessBlock'\n ]\n output = get_actions_with_arn_type_and_access_level(\n # \"ram\", \"resource-share\", \"Write\"\n \"s3\", \"*\", \"Permissions management\"\n )\n print(output)\n for item in desired_output:\n self.assertTrue(item in output)\n # self.assertListEqual(desired_output, output)", "def test_kafka_action_names_overlap_issue(self):\n # Kafka actions used to be in two pages but are now one. This verifies the current state.\n # results = get_actions_for_service(\"kafka\")\n # print(results)\n actions = [\n \"kafka:BatchAssociateScramSecret\",\n \"kafka:BatchDisassociateScramSecret\",\n \"kafka:CreateClusterV2\",\n \"kafka:DeleteConfiguration\",\n \"kafka:DescribeClusterV2\",\n \"kafka:ListClustersV2\",\n \"kafka:ListConfigurationRevisions\",\n \"kafka:ListKafkaVersions\",\n \"kafka:ListScramSecrets\",\n \"kafka:RebootBroker\",\n \"kafka:UpdateBrokerType\",\n \"kafka:UpdateConfiguration\",\n \"kafka:UpdateConnectivity\",\n \"kafka:UpdateSecurity\"\n ]\n\n for action in actions:\n self.assertTrue(action in self.all_actions)", "def test_services_with_multiple_pages_apigateway(self):\n # API Gateway Management V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonapigatewaymanagement.html\n self.assertTrue(\"apigateway:AddCertificateToDomain\" in self.all_actions)\n self.assertTrue(\"apigateway:RemoveCertificateFromDomain\" in self.all_actions)\n self.assertTrue(\"apigateway:SetWebACL\" in self.all_actions)\n # API Gateway Management V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonapigatewaymanagement.html\n # API Gateway V2 doesn't have any unique actions in but it does have some unique resource types. Let's make sure those resource types are in the IAM Definition.\n # Resource types unique to API Gateway V2:\n resource_types = get_arn_types_for_service(\"apigateway\")\n resource_types = list(resource_types.keys())\n self.assertTrue(\"AccessLogSettings\" in resource_types)\n # Resource types unique to API Gateway V1:\n self.assertTrue(\"RestApi\" in resource_types)", "def test_other_iam_data_fixes_in_GH_393(self):\n # Cassandra: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonkeyspacesforapachecassandra.html\n results = get_actions_for_service(\"cassandra\")\n self.assertTrue(\"cassandra:Restore\" in results)\n # Comprehend Medical: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazoncomprehendmedical.html\n results = get_actions_for_service(\"comprehendmedical\")\n # print(results)\n actions = [\n \"comprehendmedical:DescribeEntitiesDetectionV2Job\",\n \"comprehendmedical:DescribeICD10CMInferenceJob\",\n \"comprehendmedical:DescribePHIDetectionJob\",\n \"comprehendmedical:DescribeRxNormInferenceJob\",\n # \"comprehendmedical:DescribeSNOMEDCTInferenceJob\", # Not in SAR\n \"comprehendmedical:DetectEntitiesV2\",\n \"comprehendmedical:InferICD10CM\",\n \"comprehendmedical:InferRxNorm\",\n # \"comprehendmedical:InferSNOMEDCT\", # Not in SAR\n \"comprehendmedical:ListEntitiesDetectionV2Jobs\",\n \"comprehendmedical:ListICD10CMInferenceJobs\",\n \"comprehendmedical:ListPHIDetectionJobs\",\n \"comprehendmedical:ListRxNormInferenceJobs\",\n # \"comprehendmedical:ListSNOMEDCTInferenceJobs\", # Not in SAR\n \"comprehendmedical:StartEntitiesDetectionV2Job\",\n \"comprehendmedical:StartICD10CMInferenceJob\",\n \"comprehendmedical:StartPHIDetectionJob\",\n \"comprehendmedical:StartRxNormInferenceJob\",\n \"comprehendmedical:StopEntitiesDetectionV2Job\",\n \"comprehendmedical:StopICD10CMInferenceJob\",\n ]\n for action in actions:\n # if action not in results:\n # print(action)\n self.assertTrue(action in results)\n # Compute Optimizer\n results = get_actions_for_service(\"compute-optimizer\")\n actions = [\n \"compute-optimizer:DeleteRecommendationPreferences\",\n \"compute-optimizer:ExportEBSVolumeRecommendations\",\n \"compute-optimizer:ExportLambdaFunctionRecommendations\",\n \"compute-optimizer:GetEffectiveRecommendationPreferences\",\n \"compute-optimizer:GetEnrollmentStatusesForOrganization\",\n \"compute-optimizer:GetLambdaFunctionRecommendations\",\n \"compute-optimizer:GetRecommendationPreferences\",\n \"compute-optimizer:PutRecommendationPreferences\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # DataSync\n results = get_actions_for_service(\"datasync\")\n actions = [\n \"datasync:UpdateLocationNfs\",\n \"datasync:UpdateLocationObjectStorage\",\n \"datasync:UpdateLocationSmb\",\n \"datasync:UpdateTaskExecution\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Account Management\n results = get_actions_for_service(\"account\")\n actions = [\n \"account:DeleteAlternateContact\",\n \"account:GetAlternateContact\",\n \"account:PutAlternateContact\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS IAM Access Analyzer\n results = get_actions_for_service(\"access-analyzer\")\n actions = [\n \"access-analyzer:CancelPolicyGeneration\",\n \"access-analyzer:CreateAccessPreview\",\n \"access-analyzer:GetAccessPreview\",\n \"access-analyzer:GetGeneratedPolicy\",\n \"access-analyzer:ListAccessPreviewFindings\",\n \"access-analyzer:ListAccessPreviews\",\n \"access-analyzer:ListPolicyGenerations\",\n \"access-analyzer:StartPolicyGeneration\",\n \"access-analyzer:ValidatePolicy\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Elemental Activations\n results = get_actions_for_service(\"elemental-activations\")\n actions = [\n \"elemental-activations:CompleteAccountRegistration\",\n \"elemental-activations:StartAccountRegistration\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n # OpenSearch\n results = get_actions_for_service(\"es\")\n actions = [\n \"es:DescribeDomainChangeProgress\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Location\n results = get_actions_for_service(\"geo\")\n actions = [\n \"geo:CalculateRouteMatrix\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Amazon Managed Grafana\n results = get_actions_for_service(\"grafana\")\n actions = [\n \"grafana:DescribeWorkspaceAuthentication\",\n \"grafana:UpdateWorkspaceAuthentication\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # EC2 Image Builder\n results = get_actions_for_service(\"imagebuilder\")\n actions = [\n \"imagebuilder:ImportVmImage\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Timestream\n results = get_actions_for_service(\"timestream\")\n actions = [\n \"timestream:CreateScheduledQuery\",\n \"timestream:DeleteScheduledQuery\",\n \"timestream:DescribeScheduledQuery\",\n \"timestream:ExecuteScheduledQuery\",\n \"timestream:ListScheduledQueries\",\n \"timestream:UpdateScheduledQuery\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS Transfer Family\n results = get_actions_for_service(\"transfer\")\n actions = [\n \"transfer:CreateAccess\",\n \"transfer:CreateWorkflow\",\n \"transfer:DeleteAccess\",\n \"transfer:DeleteWorkflow\",\n \"transfer:DescribeAccess\",\n \"transfer:DescribeExecution\",\n \"transfer:DescribeWorkflow\",\n \"transfer:ListAccesses\",\n \"transfer:ListExecutions\",\n \"transfer:ListWorkflows\",\n \"transfer:SendWorkflowStepState\",\n \"transfer:UpdateAccess\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_get_actions_with_arn_type_and_access_level_case_5(self):\n\n output = get_actions_with_arn_type_and_access_level(\n \"s3\", \"object\", \"List\"\n )\n self.assertTrue(\"s3:ListMultipartUploadParts\" in output)", "def test_get_actions_with_arn_type_and_access_level_case_2(self):\n desired_output = [\n 'ssm:DeleteParameter',\n 'ssm:DeleteParameters',\n 'ssm:LabelParameterVersion',\n 'ssm:PutParameter'\n]\n output = get_actions_with_arn_type_and_access_level(\n \"ssm\", \"parameter\", \"Write\"\n )\n for item in desired_output:\n self.assertTrue(item in output)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_template_action_spaces(self):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces(self):\n pass", "def legal_actions(self):\n raise NotImplementedError", "def get_legal_actions(self):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_delete_action_spaces(self):\n pass", "def test_actions(self, actions):\n try:\n for action in actions:\n self.get_action(action['type'])(**action)\n except Exception as e:\n print('Exception: {}'.format(str(e)))", "def test_get_actions_with_arn_type_and_access_level_case_4(self):\n desired_output = [\n 'secretsmanager:ListSecrets'\n ]\n output = get_actions_with_arn_type_and_access_level(\n \"secretsmanager\", \"*\", \"List\"\n )\n self.assertListEqual(desired_output, output)", "def _get_legal_actions(self):\n raise NotImplementedError", "def decide_place(self, action):\n pass", "def action_space(self, curr_state):\n # Action space - allowed (position, value) combinations for the agent and environment given the current state\n\n agent_actions = list(product(self.allowed_positions(curr_state), self.allowed_values(curr_state)[0]))\n env_actions = list(product(self.allowed_positions(curr_state), self.allowed_values(curr_state)[1]))\n return (agent_actions, env_actions)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces(self):\n pass", "def valid_actions(self) -> List[str]:\n return list(self.action_map().keys())", "def test_services_with_multiple_pages_kinesis_analytics(self):\n # Kinesis Analytics V1\n results = get_actions_for_service(\"kinesisanalytics\")\n actions = [\n \"kinesisanalytics:GetApplicationState\", # Only in v1, not v2\n \"kinesisanalytics:ListApplications\", # In both\n ]\n for action in actions:\n self.assertTrue(action in results)", "def action_space(self, curr_state):\n\n agent_actions = product(self.allowed_positions(curr_state), self.allowed_values(curr_state)[0])\n env_actions = product(self.allowed_positions(curr_state), self.allowed_values(curr_state)[1])\n return (agent_actions, env_actions)", "def action_space(self, curr_state):\n\n agent_actions = product(self.allowed_positions(curr_state), self.allowed_values(curr_state)[0])\n env_actions = product(self.allowed_positions(curr_state), self.allowed_values(curr_state)[1])\n return (agent_actions, env_actions)", "def get_actions(self, request):\n return super(OrganizationAdmin, self).get_actions(request)", "def get_legal_actions(self, block_=None):\n return self._get_move_actions(block_) + self._get_mine_actions() + \\\n self._get_placement_actions(block_)", "def get_action_meanings(self) -> list[str]:\n keys = ale_py.Action.__members__.values()\n values = ale_py.Action.__members__.keys()\n mapping = dict(zip(keys, values))\n return [mapping[action] for action in self._action_set]", "def _get_placement_actions(self, exclude=None):\n if not self._has_blocks_to_place(exclude=exclude):\n return []\n\n dirs = [_Vec3(0, 2, 0)]\n for dir_ in _adj_dirs():\n dirs.extend([dir_, dir_ + _Vec3(0, 1, 0)])\n if self._get_block(self._pos + dir_) in [_AIR, _WATER]:\n dirs.append(dir_ + _Vec3(0, -1, 0))\n\n rtn = []\n for dir_ in dirs:\n pos = self._pos + dir_\n if self._can_place(pos):\n rtn.append({\n 'func': '_place',\n 'args': (pos,),\n 'kwargs': {'exclude': exclude}\n })\n\n return rtn", "def test_autocomplete_locations_urls(self):\n r = self.base_check_request(\"get\", \"autocomplete/locations/\")\n self.assertIsInstance(r, list)\n self.assertEqual(len(r), 10, \"Invalid default count\")\n\n ac_keys = ['ancestors', 'id', 'is_region', 'name', 'prepositional_name',\n 'slug', 'text_for_apartments_search',\n 'text_for_complexes_search', 'type_name']\n # ac_keys_full = ac_keys + [\"metro_stations\"]\n for ac in r:\n # check response objects structure\n self.assertListEqual(sorted(list(ac.keys())), ac_keys)\n\n # check response types\n # self.check_list_item_keys(ac[\"ancestors\"], ac_keys_full)\n self.assertIsInstance(ac['id'], int)\n self.assertIsInstance(ac['is_region'], bool)\n self.assertIsInstance(ac['name'], str)\n self.assertIsInstance(ac['prepositional_name'], str)\n self.assertIsInstance(ac['slug'], str)\n self.assertIsInstance(ac['text_for_apartments_search'], (str, type(None)))\n self.assertIsInstance(ac['text_for_complexes_search'], (str, type(None)))\n self.assertIsInstance(ac['type_name'], str)", "def test_intent_support(self):\n dispatcher = self.get_dispatcher()\n for intent in self.get_intents():\n self.assertIsNot(dispatcher(intent), None)", "def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None:\n super().__init__(scope, construct_id, **kwargs)\n\n # -------------------------------\n # S3 Bucket for Manifests\n # -------------------------------\n\n qs_gov_bucket = s3.Bucket(\n self,\n id=f\"{cf.PROJECT}-ManifestBucket\",\n )\n bucket_name = qs_gov_bucket.bucket_name\n\n # -------------------------------\n # IAM\n # -------------------------------\n\n list_roles_policy = iam.ManagedPolicy(\n self,\n id=f\"{cf.PROJECT}-ListRolesPolicy\",\n description=None,\n managed_policy_name=None,\n path=\"/\",\n statements=[\n iam.PolicyStatement(\n effect=iam.Effect.ALLOW,\n resources=[\"*\"],\n actions=[\"iam:ListRoles\", \"iam:ListAccountAliases\"],\n )\n ],\n )\n\n federated_quicksight_policy = iam.ManagedPolicy(\n self,\n id=f\"{cf.PROJECT}-FederatedQuickSightPolicy\",\n managed_policy_name=f\"{cf.PROJECT}-FederatedQuickSightPolicy\",\n statements=[\n iam.PolicyStatement(\n effect=iam.Effect.ALLOW,\n resources=[\n f\"arn:aws:iam::{cf.ACCOUNT}:saml-provider/{cf.OKTA_IDP_NAME}\"\n ],\n actions=[\"sts:AssumeRoleWithSAML\"],\n conditions={\n \"StringEquals\": {\n \"saml:aud\": \"https://signin.aws.amazon.com/saml\"\n }\n },\n )\n ],\n )\n\n okta_federated_principal = iam.FederatedPrincipal(\n federated=f\"arn:aws:iam::{cf.ACCOUNT}:saml-provider/{cf.OKTA_IDP_NAME}\",\n assume_role_action=\"sts:AssumeRoleWithSAML\",\n conditions={\n \"StringEquals\": {\"SAML:aud\": \"https://signin.aws.amazon.com/saml\"}\n },\n )\n\n federated_quicksight_role = iam.Role(\n self,\n id=f\"{cf.PROJECT}-{cf.OKTA_ROLE_NAME}\",\n role_name=f\"{cf.PROJECT}-{cf.OKTA_ROLE_NAME}\",\n assumed_by=okta_federated_principal,\n description=\"Allow Okta to Federate Login & User Creation to QuickSight\",\n managed_policies=[federated_quicksight_policy],\n )\n\n\n iam.User(\n self,\n id=f\"{cf.PROJECT}-OktaSSOUser\",\n user_name=f\"{cf.PROJECT}-OktaSSOUser\",\n managed_policies=[list_roles_policy],\n )\n\n\n # -------------------------------\n # Lambda Functions\n # -------------------------------\n\n # iam role for Lambdas\n\n qs_governance_policy = iam.ManagedPolicy(\n self,\n id=f\"{cf.PROJECT}-QuickSightGovernancePolicy\",\n managed_policy_name=f\"{cf.PROJECT}-QuickSightGovernancePolicy\",\n statements=[\n iam.PolicyStatement(\n effect=iam.Effect.ALLOW,\n resources=[\n f\"arn:aws:secretsmanager:{cf.REGION}:{cf.ACCOUNT}:secret:{cf.OKTA_SECRET}*\"\n ],\n actions=[\n \"secretsmanager:GetSecretValue\",\n ],\n ),\n iam.PolicyStatement(\n effect=iam.Effect.ALLOW,\n resources=[\"*\"],\n actions=[\"quicksight:*\", \"ds:*\"],\n ),\n iam.PolicyStatement(\n effect=iam.Effect.ALLOW,\n resources=[f\"arn:aws:s3:::{bucket_name}/*\"],\n actions=[\"s3:Get*\", \"s3:Put*\"],\n ),\n ],\n )\n\n quicksight_permission_mapping_role = iam.Role(\n self,\n id=f\"{cf.PROJECT}-QuickSightPermissionMappingRole\",\n assumed_by=iam.ServicePrincipal(\"lambda.amazonaws.com\"),\n managed_policies=[\n iam.ManagedPolicy.from_aws_managed_policy_name(\n \"service-role/AWSLambdaBasicExecutionRole\"\n ),\n qs_governance_policy,\n ],\n )\n\n # Lambdas\n\n get_okta_info_lambda = _lambda.Function(\n self,\n id=f\"{cf.PROJECT}-GetOktaInfo\",\n handler=\"get_okta_info.handler\",\n role=quicksight_permission_mapping_role,\n runtime=_lambda.Runtime.PYTHON_3_8,\n code=_lambda.Code.asset(os.path.join(cf.PATH_SRC, \"pkg\")),\n function_name=f\"{cf.PROJECT}-GetOktaInfo\",\n environment={\n \"OKTA_SECRET\": cf.OKTA_SECRET,\n \"OKTA_ROLE_NAME\": cf.OKTA_ROLE_NAME,\n \"QS_GOVERNANCE_BUCKET\": bucket_name,\n \"QS_USER_GOVERNANCE_KEY\": cf.QS_USER_GOVERNANCE_KEY,\n },\n memory_size=256,\n timeout=core.Duration.seconds(180),\n )\n\n # Lamda Okta to QuickSight Mappers\n\n qs_user_governance_lambda = _lambda.Function(\n self,\n id=f\"{cf.PROJECT}-QSUserGovernance\",\n handler=\"qs_user_gov.handler\",\n role=quicksight_permission_mapping_role,\n runtime=_lambda.Runtime.PYTHON_3_8,\n code=_lambda.Code.asset(os.path.join(cf.PATH_SRC, \"pkg\")),\n function_name=f\"{cf.PROJECT}-QSUserGovernance\",\n environment={\n \"OKTA_ROLE_NAME\": f\"{cf.PROJECT}-{cf.OKTA_ROLE_NAME}\",\n \"QS_GOVERNANCE_BUCKET\": bucket_name,\n \"QS_USER_GOVERNANCE_KEY\": cf.QS_USER_GOVERNANCE_KEY,\n \"OKTA_GROUP_QS_PREFIX\": cf.OKTA_GROUP_QS_PREFIX,\n \"QS_ADMIN_OKTA_GROUP\": cf.QS_ADMIN_OKTA_GROUP,\n \"QS_AUTHOR_OKTA_GROUP\": cf.QS_AUTHOR_OKTA_GROUP,\n \"QS_READER_OKTA_GROUP\": cf.QS_READER_OKTA_GROUP\n },\n memory_size=256,\n timeout=core.Duration.seconds(180),\n )\n\n qs_asset_governance_lambda = _lambda.Function(\n self,\n id=f\"{cf.PROJECT}-QSAssetGovernance\",\n handler=\"qs_asset_gov.handler\",\n role=quicksight_permission_mapping_role,\n runtime=_lambda.Runtime.PYTHON_3_8,\n code=_lambda.Code.asset(os.path.join(cf.PATH_SRC, \"pkg\")),\n function_name=f\"{cf.PROJECT}-QSAssetGovernance\",\n environment={\n \"QS_GOVERNANCE_BUCKET\": bucket_name,\n \"QS_ASSET_GOVERNANCE_KEY\": cf.QS_ASSET_GOVERNANCE_KEY,\n },\n memory_size=256,\n timeout=core.Duration.seconds(180),\n )\n\n # -------------------------------\n # Events\n # -------------------------------\n\n qs_user_governance_lambda.add_event_source(\n lambda_event_sources.S3EventSource(\n bucket=qs_gov_bucket,\n events=[s3.EventType.OBJECT_CREATED],\n filters=[s3.NotificationKeyFilter(prefix=cf.QS_USER_GOVERNANCE_KEY)],\n )\n )\n\n qs_asset_governance_lambda.add_event_source(\n lambda_event_sources.S3EventSource(\n bucket=qs_gov_bucket,\n events=[s3.EventType.OBJECT_CREATED],\n filters=[s3.NotificationKeyFilter(prefix=cf.QS_ASSET_GOVERNANCE_KEY)],\n )\n )\n\n lambda_schedule = events.Schedule.rate(core.Duration.days(1))\n get_okta_info_target = events_targets.LambdaFunction(\n handler=get_okta_info_lambda\n )\n events.Rule(\n self,\n id=f\"{cf.PROJECT}-GetOktaInfoScheduledEvent\",\n description=\"The once per day CloudWatch event trigger for the Lambda\",\n enabled=True,\n schedule=lambda_schedule,\n targets=[get_okta_info_target],\n )\n\n # -------------------------------\n # S3 Object Deployment - QS Asset Manifest\n # -------------------------------\n\n asset_manifest_deploy = s3_deploy.BucketDeployment(\n self,\n id=f\"{cf.PROJECT}-AssetManifestDeploy\",\n sources=[s3_deploy.Source.asset(\n os.path.join(cf.PATH_ROOT, 'qs_config')\n )],\n destination_bucket=qs_gov_bucket\n )", "def get_actions(self, request):\n actions = super().get_actions(request)\n if not settings.PUBLISHER_CODE:\n del actions['create_cwr']\n if 'delete_selected' in actions:\n del actions['delete_selected']\n return actions", "def check_action_sanity(self):\n for action in crest.get_all_actions(self.model):\n assert action._name is not None, f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is 'None'\"\n assert action._name != \"\", f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is empty string\"\n\n assert isinstance(action.transition, crest.Transition), f\"Action {action._name}'s state is not a crest.Transition. It is: {action.transition} ({action.transition.__class__})\"\n assert action.state in crest.get_transitions(action._parent), f\"Action's transition {action.transition._name} ({action.transition}) is not in the transitions of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.target, crest.Port), f\"Action {action._name}'s target is not a crest.Port\"\n assert action.target in api.get_targets(action._parent), f\"Action's target {action.target._name} ({action.target}) is not in the targets of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.function, (crestml.LearnedFunction, types.FunctionType)), f\"Action {action._name}'s function needs to be of type types.FunctionType or crestdsl.ml.LearnedFunction\"\n assert 'self' in inspect.signature(action.function).parameters, f\"Action {action._name}'s function has no self parameter. entity: {action._parent._name} ({action._parent.__class__.__name__})\"\n assert len(inspect.signature(action.function).parameters) == 1, f\"An action should have only one one argument 'self'\"\n\n for port in SH.get_read_ports_from_update(action.function, action):\n assert port in api.get_sources(action._parent), f\"Action {action._name} seems to be reading a port {port._name} ({port}) which is not in the sources of its entity {action._parent._name} ({action._parent})\"", "def test_services_with_multiple_pages_lex(self):\n # Lex V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonlex.html\n self.assertTrue(\"lex:DeleteUtterances\" in self.all_actions)\n # Lex V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonlexv2.html\n self.assertTrue(\"lex:ListBotLocales\" in self.all_actions)\n results = get_actions_for_service(\"lex\")\n actions = [\n \"lex:CreateIntentVersion\",\n \"lex:CreateSlotTypeVersion\",\n \"lex:DeleteBotChannelAssociation\",\n \"lex:DeleteIntentVersion\",\n \"lex:DeleteSlotTypeVersion\",\n \"lex:GetBot\",\n \"lex:GetBotAlias\",\n \"lex:GetBotAliases\",\n \"lex:GetBotChannelAssociation\",\n \"lex:GetBotChannelAssociations\",\n \"lex:GetBotVersions\",\n \"lex:GetBots\",\n \"lex:GetBuiltinIntent\",\n \"lex:GetBuiltinIntents\",\n \"lex:GetBuiltinSlotTypes\",\n \"lex:GetExport\",\n \"lex:GetImport\",\n \"lex:GetIntent\",\n \"lex:GetIntentVersions\",\n \"lex:GetIntents\",\n \"lex:GetMigration\",\n \"lex:GetMigrations\",\n \"lex:GetSlotType\",\n \"lex:GetSlotTypeVersions\",\n \"lex:GetSlotTypes\",\n \"lex:GetUtterancesView\",\n \"lex:PostContent\",\n \"lex:PostText\",\n \"lex:PutBot\",\n \"lex:PutBotAlias\",\n \"lex:PutIntent\",\n \"lex:PutSlotType\",\n \"lex:StartMigration\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def get_available_actions(self, state):\n pass", "def test_correct_data_under_places(self):\n load_to_datastore(self.places_sofia, self.metadata_sofia)\n CommonAssertions.check_correct_data_under_places(tester=self, places=self.places_sofia,\n metadata=self.metadata_sofia)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0(self):\n pass", "def test_required_unknown():\n parser=argparse.ArgumentParser()\n parser.add_argument('--region',\n help='Enter a region like us-east-2.',\n dest=\"region\",\n action=ValidateRegion,\n required=True)\n parser.add_argument('--output',\n help='pretty, json, yaml',\n dest=\"output\",\n action=Validateoutput,\n nargs=\"?\",\n default=\"yaml\"\n )\n parser.add_argument('--filter-types',\n help='eg: AWS::IAM::Role or AWS::EC2::Instance. Using \"ALL\" with no quotes and we will run it for all current supported resource types',\n nargs='+',\n dest=\"types\",\n action=Validatefilter,\n required=True)\n parser.add_argument('--tag_keys',\n help='Allows you to exclude particular AWS Resources based on the presence of a particular tag key on the resource. This will only be applied to AWS Resources that support tagging. Valid values: any string that is a valid tag - multiple values can be supplied.',\n dest=\"tags\")\n \n #This should raise an error since this will cause a SystemExit since bad params were passed in \n args = [\"--region\", \"NADA\",'--output', \"NADA\",'--filter-types',\"NADA\"]\n with pytest.raises(SystemExit):\n parser.parse_args(args)\n \n \n \n \n #This should NOT raise an error since good params were passed into the parser\n args = [\"--region\", \"us-east-1\",'--output', \"yaml\",'--filter-types',\"AWS::EC2::Instance\"] \n with not_raises(SystemExit):\n parser.parse_args(args)", "def test_undefined_action_is_logged(self):\n create_file(self.authz_file, textwrap.dedent(\"\"\"\\\n [groups]\n administrators = éat\n [wiki:WikiStart]\n änon = UNKNOWN_VIEW, TEST_CREATE, !TEST_MODIFY\n [milestone:milestone1]\n * = UNKNOWN_MODIFY, !TEST_VIEW\n \"\"\"))\n authz_policy = AuthzPolicy(self.env)\n authz_policy.parse_authz()\n\n self.assertEqual(2, len(self.env.log_messages))\n self.assertIn(('WARNING',\n 'The action UNKNOWN_VIEW in the [wiki:WikiStart] '\n 'section of trac-authz-policy is not a valid action.'),\n self.env.log_messages)\n self.assertIn(('WARNING',\n 'The action UNKNOWN_MODIFY in the [milestone:milestone1] '\n 'section of trac-authz-policy is not a valid action.'),\n self.env.log_messages)", "def test_search_form_apartments_urls(self):\n r_keys = ['balcony_types', 'bathroom_type', 'building_floors_max',\n 'building_floors_min', 'building_type', 'decoration',\n 'elevators_type', 'floor_max', 'floor_min', 'infrastructure',\n 'living_area_max', 'living_area_min', 'metro_stations',\n 'price_per_m_max', 'price_per_m_min', 'regions', 'rooms_count',\n 'total_area_max', 'total_area_min']\n r = self.check_request_keys(\"get\", \"search-forms/apartments/\", r_keys)\n\n self.check_list_item_keys(r[\"balcony_types\"], ['id', 'name'])\n self.check_list_item_keys(r[\"bathroom_type\"], ['id', 'name'])\n self.assertIsInstance(r['building_floors_max'], int)\n self.assertIsInstance(r['building_floors_min'], int)\n self.check_list_item_keys(r[\"building_type\"], ['id', 'name'])\n self.assertIsInstance(r['decoration'], list)\n self.assertEqual(r['decoration'], [])\n self.check_list_item_keys(r[\"elevators_type\"], ['id', 'name'])\n self.assertIsInstance(r['floor_max'], int)\n self.assertIsInstance(r['floor_min'], int)\n self.assertIsInstance(r['infrastructure'], list)\n self.assertEqual(r['infrastructure'], [])\n self.assertIsInstance(r['living_area_max'], int)\n self.assertIsInstance(r['living_area_min'], int)\n self.check_list_item_keys(r[\"metro_stations\"], ['id', 'name'])\n self.assertIsInstance(r['price_per_m_max'], int)\n self.assertIsInstance(r['price_per_m_min'], int)\n self.check_list_item_keys(r[\"regions\"], ['format', 'id', 'locations', 'name', 'slug', 'typeBeforeLocation',\n 'typeName', 'typePrepositionalShortName', 'typeShortName'])\n self.check_list_items_type(r['rooms_count'], int)\n self.assertIsInstance(r['total_area_max'], int)\n self.assertIsInstance(r['total_area_min'], int)", "def test_cohorts_management_a11y(self):\n self.cohort_management_page.a11y_audit.config.set_rules({\n \"ignore\": [\n 'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865\n 'region', # TODO: AC-932\n ]\n })\n self.cohort_management_page.a11y_audit.check_for_accessibility_errors()", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_library_variable_set_usage_list_action_spaces(self):\n pass", "def InitActionCheck(initActionList, init):\n for actions in initActionList:\n action_class = getNameFromIRI(actions.is_a[0].iri)\n # if the action is a SpeedAction class\n if action_class == \"SpeedAction\":\n action_entity_ref = getNameFromIRI(actions.has_entity_ref[0].iri)\n target_speed = actions.has_target_speed[0]\n ontology_transition_dynamics = actions.has_transition_dynamics[0]\n xosc_transition_dynamics = checkTransitionDynamics(ontology_transition_dynamics)\n init.add_init_action(action_entity_ref, xosc.AbsoluteSpeedAction(target_speed, xosc_transition_dynamics))\n continue\n #if the action is TeleportAction\n if action_class == \"TeleportAction\":\n action_entity_ref = getNameFromIRI(actions.has_entity_ref[0].iri)\n # if the action has position as parameter set\n s: int = 0\n offset = 0\n lane_id = 0\n road_id = 0\n if len(actions.has_position) != 0:\n position = actions.has_position[0]\n if len(position.has_s) != 0:\n s = position.has_s[0]\n\n if len(position.has_offset) != 0:\n offset = position.has_offset[0]\n\n if len(position.has_lane_id) != 0:\n lane_id = position.has_lane_id[0]\n\n if len(position.has_road_id) != 0:\n road_id = position.has_road_id[0]\n\n init.add_init_action(action_entity_ref, xosc.TeleportAction(xosc.LanePosition(s, offset, lane_id, road_id)))\n continue\n if action_class == \"EnvironmentAction\": # if the action is an EnvironmentAction\n xosc_environment_action = checkEnvironmentAction(actions)\n init.add_global_action(xosc_environment_action)\n return init", "def aws_elasticsearch_public_access_check(cache: dict, session, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:\n # ISO Time\n iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()\n for response in describe_es_os_domains(cache, session):\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(response,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson)\n esDomainName = response[\"DomainStatus\"][\"DomainName\"]\n esVersion = response[\"DomainStatus\"][\"ElasticsearchVersion\"]\n domainId = response[\"DomainStatus\"][\"DomainId\"]\n domainArn = response[\"DomainStatus\"][\"ARN\"]\n # Determine if ES has Cognito Enabled\n try:\n cognitoEnabledCheck = str(response[\"DomainStatus\"][\"CognitoOptions\"][\"Enabled\"])\n except KeyError:\n cognitoEnabledCheck = False\n # Determine if ES is in a VPC\n try:\n vpcId = str(response[\"VPCOptions\"][\"VPCId\"])\n except KeyError:\n vpcId = \"NO_VPC\"\n # Determine if there is a policy and then parse through it. If the \"AWS\": \"*\" principal is allowed (anonymous access) without\n # any conditions we can assume there is not anything else to stop them\n try:\n policyDoc = response[\"AccessPolicies\"]\n policyJson = json.loads(policyDoc.encode().decode(\"unicode_escape\"))\n hasPolicy = True\n for sid in policyJson[\"Statement\"]:\n try:\n conditionCheck = str(sid[\"Condition\"])\n hasCondition = True\n except:\n conditionCheck = \"\"\n hasCondition = False\n if str(sid[\"Principal\"]) == '{\"AWS\": \"*\"}' and hasCondition is False:\n policyAllowAnon = True\n else:\n policyAllowAnon = False\n except KeyError or ValueError:\n policyDoc = \"\"\n policyJson = \"NO_POLICY\"\n policyAllowAnon = \"NO_POLICY\"\n hasPolicy = False\n # Full Public Check\n if policyAllowAnon is True and vpcId == \"NO_VPC\" and cognitoEnabledCheck is False:\n fullPublic = True\n else:\n fullPublic = False\n # This is a failing check\n if fullPublic is True:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"{domainArn}/elasticsearch-public-access-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": domainArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\n \"Software and Configuration Checks/AWS Security Best Practices\",\n \"Effects/Data Exposure\"\n ],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"CRITICAL\"},\n \"Confidence\": 99,\n \"Title\": \"[OpenSearch.9] OpenSearch/AWS ElasticSearch Service domains should not be exposed to the public\",\n \"Description\": \"OpenSearch/AWS ElasticSearch Service domain \"\n + esDomainName\n + \" is open to public due to not using a VPC, Cognito, or any additional conditions within the resource policy. Public access will allow malicious actors to attack the confidentiality, integrity or availability of documents indexed in your Domain. Refer to the remediation instructions if this configuration is not intended.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For information on protecting Domains with a Resource-based Policy refer to the Identity and Access Management in Amazon Elasticsearch Service section of the Amazon Elasticsearch Service Developer Guide\",\n \"Url\": \"https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-ac.html\"\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Analytics\",\n \"AssetService\": \"Amazon OpenSearch Service\",\n \"AssetComponent\": \"Search Domain\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsOpenSearchServiceDomain\",\n \"Id\": domainArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsOpenSearchServiceDomain\": {\n \"Id\": domainId,\n \"DomainName\": esDomainName,\n \"EngineVersion\": esVersion\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.AC-3\",\n \"NIST CSF V1.1 PR.AC-4\",\n \"NIST CSF V1.1 PR.DS-5\",\n \"NIST SP 800-53 Rev. 4 AC-1\",\n \"NIST SP 800-53 Rev. 4 AC-2\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 AC-4\",\n \"NIST SP 800-53 Rev. 4 AC-5\",\n \"NIST SP 800-53 Rev. 4 AC-6\",\n \"NIST SP 800-53 Rev. 4 AC-14\",\n \"NIST SP 800-53 Rev. 4 AC-16\",\n \"NIST SP 800-53 Rev. 4 AC-17\",\n \"NIST SP 800-53 Rev. 4 AC-19\",\n \"NIST SP 800-53 Rev. 4 AC-20\",\n \"NIST SP 800-53 Rev. 4 AC-24\",\n \"NIST SP 800-53 Rev. 4 PE-19\",\n \"NIST SP 800-53 Rev. 4 PS-3\",\n \"NIST SP 800-53 Rev. 4 PS-6\",\n \"NIST SP 800-53 Rev. 4 SC-7\",\n \"NIST SP 800-53 Rev. 4 SC-8\",\n \"NIST SP 800-53 Rev. 4 SC-13\",\n \"NIST SP 800-53 Rev. 4 SC-15\",\n \"NIST SP 800-53 Rev. 4 SC-31\",\n \"NIST SP 800-53 Rev. 4 SI-4\",\n \"AICPA TSC CC6.3\",\n \"AICPA TSC CC6.6\",\n \"AICPA TSC CC7.2\",\n \"ISO 27001:2013 A.6.1.2\",\n \"ISO 27001:2013 A.6.2.1\",\n \"ISO 27001:2013 A.6.2.2\",\n \"ISO 27001:2013 A.7.1.1\",\n \"ISO 27001:2013 A.7.1.2\",\n \"ISO 27001:2013 A.7.3.1\",\n \"ISO 27001:2013 A.8.2.2\",\n \"ISO 27001:2013 A.8.2.3\",\n \"ISO 27001:2013 A.9.1.1\",\n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.2.3\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.10.1.1\",\n \"ISO 27001:2013 A.11.1.4\",\n \"ISO 27001:2013 A.11.1.5\",\n \"ISO 27001:2013 A.11.2.1\",\n \"ISO 27001:2013 A.11.2.6\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.13.1.3\",\n \"ISO 27001:2013 A.13.2.1\",\n \"ISO 27001:2013 A.13.2.3\",\n \"ISO 27001:2013 A.13.2.4\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"{domainArn}/elasticsearch-public-access-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": domainArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\n \"Software and Configuration Checks/AWS Security Best Practices\",\n \"Effects/Data Exposure\"\n ],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"CRITICAL\"},\n \"Confidence\": 99,\n \"Title\": \"[OpenSearch.9] OpenSearch/AWS ElasticSearch Service domains should not be exposed to the public\",\n \"Description\": \"OpenSearch/AWS ElasticSearch Service domain \"\n + esDomainName\n + \" is not to the public due to using a VPC, Cognito, or any additional conditions within the resource policy.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For information on protecting Domains with a Resource-based Policy refer to the Identity and Access Management in Amazon Elasticsearch Service section of the Amazon Elasticsearch Service Developer Guide\",\n \"Url\": \"https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-ac.html\"\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Analytics\",\n \"AssetService\": \"Amazon OpenSearch Service\",\n \"AssetComponent\": \"Search Domain\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsOpenSearchServiceDomain\",\n \"Id\": domainArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsOpenSearchServiceDomain\": {\n \"Id\": domainId,\n \"DomainName\": esDomainName,\n \"EngineVersion\": esVersion\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.AC-3\",\n \"NIST CSF V1.1 PR.AC-4\",\n \"NIST CSF V1.1 PR.DS-5\",\n \"NIST SP 800-53 Rev. 4 AC-1\",\n \"NIST SP 800-53 Rev. 4 AC-2\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 AC-4\",\n \"NIST SP 800-53 Rev. 4 AC-5\",\n \"NIST SP 800-53 Rev. 4 AC-6\",\n \"NIST SP 800-53 Rev. 4 AC-14\",\n \"NIST SP 800-53 Rev. 4 AC-16\",\n \"NIST SP 800-53 Rev. 4 AC-17\",\n \"NIST SP 800-53 Rev. 4 AC-19\",\n \"NIST SP 800-53 Rev. 4 AC-20\",\n \"NIST SP 800-53 Rev. 4 AC-24\",\n \"NIST SP 800-53 Rev. 4 PE-19\",\n \"NIST SP 800-53 Rev. 4 PS-3\",\n \"NIST SP 800-53 Rev. 4 PS-6\",\n \"NIST SP 800-53 Rev. 4 SC-7\",\n \"NIST SP 800-53 Rev. 4 SC-8\",\n \"NIST SP 800-53 Rev. 4 SC-13\",\n \"NIST SP 800-53 Rev. 4 SC-15\",\n \"NIST SP 800-53 Rev. 4 SC-31\",\n \"NIST SP 800-53 Rev. 4 SI-4\",\n \"AICPA TSC CC6.3\",\n \"AICPA TSC CC6.6\",\n \"AICPA TSC CC7.2\",\n \"ISO 27001:2013 A.6.1.2\",\n \"ISO 27001:2013 A.6.2.1\",\n \"ISO 27001:2013 A.6.2.2\",\n \"ISO 27001:2013 A.7.1.1\",\n \"ISO 27001:2013 A.7.1.2\",\n \"ISO 27001:2013 A.7.3.1\",\n \"ISO 27001:2013 A.8.2.2\",\n \"ISO 27001:2013 A.8.2.3\",\n \"ISO 27001:2013 A.9.1.1\",\n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.2.3\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.10.1.1\",\n \"ISO 27001:2013 A.11.1.4\",\n \"ISO 27001:2013 A.11.1.5\",\n \"ISO 27001:2013 A.11.2.1\",\n \"ISO 27001:2013 A.11.2.6\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.13.1.3\",\n \"ISO 27001:2013 A.13.2.1\",\n \"ISO 27001:2013 A.13.2.3\",\n \"ISO 27001:2013 A.13.2.4\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding", "def provision_create(ec2_conn, iam_conn, interana_account_id, s3_bucket_path, interana_user):\n try:\n user, all_policies = check_account_setup(iam_conn, interana_user)\n except Exception, e:\n print \"Warning could not verify user interana_user {} because {}\".format(interana_user, e)\n\n infile = 's3_bucket_list.policy.template'\n outfile = 's3_bucket_list.policy'\n\n bucket_name, bucket_prefix = get_bucket_name_prefix(s3_bucket_path)\n\n all_lines = ''\n with open(infile, 'r') as tmp_fh, open(outfile, 'w') as out_fh:\n for line in tmp_fh:\n re_proxy = re.compile('<INTERANA_ACCOUNT_ID>')\n translate = re_proxy.sub(interana_account_id, line)\n\n re_proxy = re.compile('<BUCKET_NAME>')\n translate = re_proxy.sub(bucket_name, translate)\n\n re_proxy = re.compile('<BUCKET_PREFIX>')\n translate = re_proxy.sub(bucket_prefix, translate)\n\n out_fh.write(translate)\n all_lines += translate.strip()\n\n if len(bucket_prefix) < 1:\n with open(outfile, 'r') as in_fh:\n policy = json.load(in_fh)\n del policy['Statement'][1]['Condition']\n all_lines = json.dumps(policy)\n print \"Download file to check GetObject Access {}\".format(outfile)\n with open(outfile, 'w') as out_fh:\n json.dump(policy, out_fh, indent=4)\n\n print \"****policy file {}***\".format(outfile)\n\n print json.dumps(json.loads(all_lines), indent=True)", "def test_all_actions_setup(self, mocked_find):\n\n setup_identity_cache()\n\n mocked_find.side_effect = KeyError(\"Error forced for testing\")\n\n url = \"/v1/actions/CreateProjectAndUser\"\n data = {\"project_name\": \"test_project\", \"email\": \"[email protected]\"}\n response = self.client.post(url, data, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_503_SERVICE_UNAVAILABLE)\n\n new_task = Task.objects.all()[0]\n\n class_conf = new_task.config\n expected_action_names = CreateProjectAndUser.default_actions[:]\n expected_action_names += class_conf.additional_actions\n\n actions = new_task.actions\n observed_action_names = [a.action_name for a in actions]\n self.assertEqual(observed_action_names, expected_action_names)", "def testAmenities(self):\n place = Place()\n self.assertTrue(hasattr(place, \"amenity_ids\"))\n self.assertEqual(type(place.amenity_ids), list)\n self.assertEqual(len(place.amenity_ids), 0)", "def getActions(self, state): \n util.raiseNotDefined()", "def get_dropbox_policy ( bucket_name, requires_aspera = False ) :\n if requires_aspera :\n return \"\"\"{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Sid\": \"GrantUploadDownloadPermissionsToBucket\",\n \"Effect\": \"Allow\",\n \"Action\": [\n \"s3:AbortMultipartUpload\",\n \"s3:DeleteObject\",\n \"s3:DeleteObjectVersion\",\n \"s3:GetBucketAcl\",\n \"s3:GetBucketLocation\",\n \"s3:GetBucketLogging\",\n \"s3:GetBucketNotification\",\n \"s3:GetBucketPolicy\",\n \"s3:GetBucketRequestPayment\",\n \"s3:GetBucketTagging\",\n \"s3:GetBucketVersioning\",\n \"s3:GetBucketWebsite\",\n \"s3:GetLifecycleConfiguration\",\n \"s3:GetObject\",\n \"s3:GetObjectAcl\",\n \"s3:GetObjectTorrent\",\n \"s3:GetObjectVersion\",\n \"s3:GetObjectVersionAcl\",\n \"s3:GetObjectVersionTorrent\",\n \"s3:ListAllMyBuckets\",\n \"s3:ListBucket\",\n \"s3:ListBucketMultipartUploads\",\n \"s3:ListBucketVersions\",\n \"s3:ListMultipartUploadParts\",\n \"s3:PutBucketVersioning\",\n \"s3:PutObject\"\n ],\n \"Resource\": [\n \"arn:aws:s3:::\"\"\" + bucket_name + \"\"\"\",\n \"arn:aws:s3:::\"\"\" + bucket_name + \"\"\"/*\"\n ]\n },\n {\n \"Sid\": \"AllowAsperaRootLevelListingOfTheBucket\",\n \"Action\": [\"s3:ListBucket\"],\n \"Effect\": \"Allow\",\n \"Resource\": [\n \"arn:aws:s3:::\"\"\" + bucket_name + \"\"\"\"\n ],\n \"Condition\":{\n \"StringEquals\":{\n \"s3:prefix\":[\"\"], \"s3:delimiter\":[\"/\"]\n }\n }\n },\n {\n \"Sid\" : \"AllowGroupToSeeBucketListInAsperaConsole\",\n \"Action\" : [\n \"s3:ListAllMyBuckets\",\n \"s3:GetBucketLocation\"\n ],\n \"Effect\" : \"Allow\",\n \"Resource\" : [ \"arn:aws:s3:::\" ]\n }\n ]\n}\"\"\"\n\n else :\n return \"\"\"{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Sid\": \"GrantUploadDownloadPermissionsToBucket\",\n \"Effect\": \"Allow\",\n \"Action\": [\n \"s3:AbortMultipartUpload\",\n \"s3:DeleteObject\",\n \"s3:DeleteObjectVersion\",\n \"s3:GetBucketAcl\",\n \"s3:GetBucketLocation\",\n \"s3:GetBucketLogging\",\n \"s3:GetBucketNotification\",\n \"s3:GetBucketPolicy\",\n \"s3:GetBucketRequestPayment\",\n \"s3:GetBucketTagging\",\n \"s3:GetBucketVersioning\",\n \"s3:GetBucketWebsite\",\n \"s3:GetLifecycleConfiguration\",\n \"s3:GetObject\",\n \"s3:GetObjectAcl\",\n \"s3:GetObjectTorrent\",\n \"s3:GetObjectVersion\",\n \"s3:GetObjectVersionAcl\",\n \"s3:GetObjectVersionTorrent\",\n \"s3:ListAllMyBuckets\",\n \"s3:ListBucket\",\n \"s3:ListBucketMultipartUploads\",\n \"s3:ListBucketVersions\",\n \"s3:ListMultipartUploadParts\",\n \"s3:PutBucketVersioning\",\n \"s3:PutObject\"\n ],\n \"Resource\": [\n \"arn:aws:s3:::\"\"\" + bucket_name + \"\"\"\",\n \"arn:aws:s3:::\"\"\" + bucket_name + \"\"\"/*\"\n ]\n }\n ]\n}\"\"\"", "def exists_intent_action(self, intent_keyword):\n pass", "def actions() -> None:\n pass", "def actions_required(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"actions_required\")", "def actions_required(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"actions_required\")", "def actions_required(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"actions_required\")", "def actions_required(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"actions_required\")", "def take_actions(self, actions: MultiAgentDict):\n\n # 1. - 4.\n wage_increases, demand = self.parse_actions(actions)\n wages = {agent.agent_id: agent.wage * (1 + wage_increases[agent.agent_id]) for agent in self.agents.values()}\n self.clear_labor_market(wages)\n self.clear_goods_market(demand)\n\n # 5. - 7.\n self.clear_dividends(self.firm.profit)\n self.clear_capital_market()\n\n return wage_increases, demand", "def test_ami_exists(self) -> None:\n owner = self.sts.get_caller_identity().get('Account')\n amis = self.ec2_client.describe_images(\n Owners=[owner],\n Filters=[{\n 'Name': 'name',\n 'Values': ['saints-xctf-web-server*']\n }]\n )\n self.assertTrue(len(amis.get('Images')) > 0)", "def validate_params(aws_default_region, aws_role_arn, aws_role_session_name, aws_access_key_id, aws_secret_access_key):\n if not aws_default_region:\n raise DemistoException('You must specify AWS default region.')\n\n if bool(aws_access_key_id) != bool(aws_secret_access_key):\n raise DemistoException('You must provide Access Key id and Secret key id to configure the instance with '\n 'credentials.')\n if bool(aws_role_arn) != bool(aws_role_session_name):\n raise DemistoException('Role session name is required when using role ARN.')", "def _build_action_space(self):\n feasible_actions = []\n # Adding the inform actions and request actions.\n for slot in sorted(self.slot_set.keys()):\n feasible_actions.append({'action': 'request', 'inform_slots': {}, 'request_slots': {slot: dialogue_configuration.VALUE_UNKNOWN},\"explicit_inform_slots\":{}, \"implicit_inform_slots\":{}})\n # Diseases as actions.\n for disease in sorted(self.disease_symptom.keys()):\n feasible_actions.append({'action': 'inform', 'inform_slots': {\"disease\":disease}, 'request_slots': {},\"explicit_inform_slots\":{}, \"implicit_inform_slots\":{}})\n\n return feasible_actions", "def actions(self):\n raise NotImplementedError", "def parse_exception(arg_action, arg_exception):\n # print(arg_exception)\n if arg_exception.response['Error']['Code'] == 'NoSuchBucket':\n print(f'BotoBucket() Error: \"NoSuchBucket\". '\n f'Class Method: {arg_action}, aws operation: {arg_exception.operation_name}')\n return [{arg_exception.operation_name}, arg_exception.response['Error']['Code'],\n arg_exception.response['Error']['Message'], arg_exception.response]\n\n elif arg_exception.response['Error']['Code'] == 'BucketAlreadyExists':\n print(f'BotoBucket() Error: \"Bucket Already Exists\". '\n f'Class Method: {arg_action}, aws operation: {arg_exception.operation_name}')\n return [{arg_exception.operation_name}, arg_exception.response['Error']['Code'],\n arg_exception.response['Error']['Message'], arg_exception.response]\n\n elif arg_exception.response['Error']['Code'] == 'IllegalLocationConstraintException':\n print(f'BotoBucket() Error: \"Wrong Region\". '\n f'Class Method: {arg_action}, aws operation: {arg_exception.operation_name}')\n return [{arg_exception.operation_name}, arg_exception.response['Error']['Code'],\n arg_exception.response['Error']['Message'], arg_exception.response]\n\n elif arg_exception.response['Error']['Code'] == 'BucketAlreadyOwnedByYou':\n print(f'BotoBucket() Error: \"Bucket Already Owned By You\". '\n f'Class Method: {arg_action}, aws operation: {arg_exception.operation_name}')\n return [{arg_exception.operation_name}, arg_exception.response['Error']['Code'],\n arg_exception.response['Error']['Message'], arg_exception.response]\n\n elif arg_exception.response['Error']['Code'] == 'InvalidBucketName':\n print(f'BotoBucket() Error: \"The specified bucket name is not valid.\". '\n f'Class Method: {arg_action}, aws operation: {arg_exception.operation_name}')\n return [{arg_exception.operation_name}, arg_exception.response['Error']['Code'],\n arg_exception.response['Error']['Message'], arg_exception.response]\n\n elif arg_exception.response['Error']['Code'] == 'BucketNotEmpty':\n print(f'BotoBucket() Error: \"The bucket you tried to delete is not empty.\". '\n f'Class Method: {arg_action}, aws operation: {arg_exception.operation_name}')\n return [{arg_exception.operation_name}, arg_exception.response['Error']['Code'],\n arg_exception.response['Error']['Message'], arg_exception.response]\n\n else:\n print(f'BotoBucket() Error: \"Other Exception. Please parse the response.\". '\n f'Class Method: {arg_action}, aws operation: {arg_exception.operation_name}')\n print(arg_exception.response)\n return [{arg_exception.operation_name}, arg_exception.response['Error']['Code'],\n arg_exception.response['Error']['Message'], arg_exception.response]", "def clean_ami_public_access(self):\n main_account = Account(region=config.aws.region)\n ddb_table = main_account.resource(\"dynamodb\").Table(self.config.publicAMIs.ddb_table_name)\n\n retention_period = self.config.publicAMIs.remediation_retention_period\n\n jira = JiraReporting(self.config)\n slack = SlackNotification(self.config)\n\n for account_id, account_name in self.config.aws.accounts.items():\n logging.debug(f\"Checking '{account_name} / {account_id}'\")\n issues = IssueOperations.get_account_open_issues(ddb_table, account_id, PublicAMIIssue)\n for issue in issues:\n ami_id = issue.issue_id\n\n in_whitelist = self.config.publicAMIs.in_whitelist(account_id, ami_id)\n\n if in_whitelist:\n logging.debug(f\"Skipping {ami_id} (in whitelist)\")\n\n # Adding label with \"whitelisted\" to jira ticket.\n jira.add_label(\n ticket_id=issue.jira_details.ticket,\n label=IssueStatus.Whitelisted.value\n )\n continue\n\n if issue.timestamps.reported is None:\n logging.debug(f\"Skipping '{ami_id}' (was not reported)\")\n continue\n\n if issue.timestamps.remediated is not None:\n logging.debug(f\"Skipping {ami_id} (has been already remediated)\")\n continue\n\n updated_date = issue.timestamp_as_datetime\n no_of_days_issue_created = (self.config.now - updated_date).days\n\n if no_of_days_issue_created >= retention_period:\n owner = issue.jira_details.owner\n bu = issue.jira_details.business_unit\n product = issue.jira_details.product\n\n try:\n account = Account(id=account_id,\n name=account_name,\n region=issue.issue_details.region,\n role_name=self.config.aws.role_name_reporting)\n if account.session is None:\n continue\n\n checker = PublicAMIChecker(account=account)\n checker.check(amis_to_check=[ami_id])\n ami = checker.get_ami(ami_id)\n if ami is None:\n logging.debug(f\"AMI {ami_id} was removed by user\")\n elif not ami.public_access:\n logging.debug(f\"AMI {ami.name} public access issue was remediated by user\")\n else:\n logging.debug(f\"Remediating '{ami.name}' \")\n\n remediation_succeed = True\n if ami.modify_image_attribute():\n comment = (f\"AMI '{ami.name}' public access issue \"\n f\"in '{account_name} / {account_id}' account \"\n f\"was remediated by hammer\")\n else:\n remediation_succeed = False\n comment = (f\"Failed to remediate AMI '{ami.name}' public access issue \"\n f\"in '{account_name} / {account_id}' account \"\n f\"due to some limitations. Please, check manually\")\n\n jira.remediate_issue(\n ticket_id=issue.jira_details.ticket,\n comment=comment,\n reassign=remediation_succeed,\n )\n slack.report_issue(\n msg=f\"{comment}\"\n f\"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}\",\n owner=owner,\n account_id=account_id,\n bu=bu, product=product,\n )\n IssueOperations.set_status_remediated(ddb_table, issue)\n except Exception:\n logging.exception(f\"Error occurred while updating AMI '{ami_id}' access \"\n f\"in '{account_name} / {account_id}'\")\n else:\n logging.debug(f\"Skipping '{ami_id}' \"\n f\"({retention_period - no_of_days_issue_created} days before remediation)\")", "def actions_required(self) -> Optional[str]:\n return pulumi.get(self, \"actions_required\")", "def get_actions(\n self, observations: Observations, action_space: gym.Space\n ) -> Actions:\n return super().get_actions(observations, action_space)", "def test_services_with_multiple_pages_ses(self):\n # SES V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonses.html\n self.assertTrue(\"ses:PutIdentityPolicy\" in self.all_actions)\n # SES V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonsimpleemailservicev2.html\n self.assertTrue(\"ses:ListImportJobs\" in self.all_actions)\n\n results = get_actions_for_service(\"ses\")\n actions = [\n \"ses:CloneReceiptRuleSet\",\n \"ses:CreateConfigurationSetTrackingOptions\",\n \"ses:CreateReceiptFilter\",\n \"ses:CreateReceiptRule\",\n \"ses:CreateReceiptRuleSet\",\n \"ses:CreateTemplate\",\n \"ses:DeleteConfigurationSetTrackingOptions\",\n \"ses:DeleteIdentity\",\n \"ses:DeleteIdentityPolicy\",\n \"ses:DeleteReceiptFilter\",\n \"ses:DeleteReceiptRule\",\n \"ses:DeleteReceiptRuleSet\",\n \"ses:DeleteTemplate\",\n \"ses:DeleteVerifiedEmailAddress\",\n \"ses:DescribeActiveReceiptRuleSet\",\n \"ses:DescribeConfigurationSet\",\n \"ses:DescribeReceiptRule\",\n \"ses:DescribeReceiptRuleSet\",\n \"ses:GetAccountSendingEnabled\",\n \"ses:GetIdentityDkimAttributes\",\n \"ses:GetIdentityMailFromDomainAttributes\",\n \"ses:GetIdentityNotificationAttributes\",\n \"ses:GetIdentityPolicies\",\n \"ses:GetIdentityVerificationAttributes\",\n \"ses:GetSendQuota\",\n \"ses:GetSendStatistics\",\n \"ses:GetTemplate\",\n \"ses:ListIdentities\",\n \"ses:ListIdentityPolicies\",\n \"ses:ListReceiptFilters\",\n \"ses:ListReceiptRuleSets\",\n \"ses:ListTemplates\",\n \"ses:ListVerifiedEmailAddresses\",\n \"ses:PutIdentityPolicy\",\n \"ses:ReorderReceiptRuleSet\",\n \"ses:SendBounce\",\n \"ses:SendBulkTemplatedEmail\",\n \"ses:SendRawEmail\",\n \"ses:SendTemplatedEmail\",\n \"ses:SetActiveReceiptRuleSet\",\n \"ses:SetIdentityDkimEnabled\",\n \"ses:SetIdentityFeedbackForwardingEnabled\",\n \"ses:SetIdentityHeadersInNotificationsEnabled\",\n \"ses:SetIdentityMailFromDomain\",\n \"ses:SetIdentityNotificationTopic\",\n \"ses:SetReceiptRulePosition\",\n \"ses:TestRenderTemplate\",\n \"ses:UpdateAccountSendingEnabled\",\n \"ses:UpdateConfigurationSetReputationMetricsEnabled\",\n \"ses:UpdateConfigurationSetSendingEnabled\",\n \"ses:UpdateConfigurationSetTrackingOptions\",\n \"ses:UpdateReceiptRule\",\n \"ses:UpdateTemplate\",\n \"ses:VerifyDomainDkim\",\n \"ses:VerifyDomainIdentity\",\n \"ses:VerifyEmailAddress\",\n \"ses:VerifyEmailIdentity\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_make_compatible_taxa_summaries_sample_id_map_incomplete_map(self):\r\n self.assertRaises(ValueError, _make_compatible_taxa_summaries,\r\n self.taxa_summary3, self.taxa_summary4, self.sample_id_map3)", "def test_gh_226_elasticloadbalancing_v1_and_v2(self):\n results = get_actions_for_service(\"elasticloadbalancing\")\n # print(json.dumps(results, indent=4))\n lb_v1_only_action = \"elasticloadbalancing:CreateTargetGroup\"\n lb_v2_only_action = \"elasticloadbalancing:SetSecurityGroups\"\n self.assertTrue(lb_v1_only_action in results)\n self.assertTrue(lb_v2_only_action in results)", "def test_get_actions_for_invalid_service(self):\n output = get_actions_for_service(\"invalid_service\")\n self.assertListEqual([], output)", "def test_was_produced_by_action(self):\n\n test_content = {\n AbstractAction.ACTION: WordExtraction.__name__,\n AbstractAction.RESULT: ['One', 'Two']\n }\n\n assert WordExtraction.produced(test_content)\n\n test_content[AbstractAction.ACTION] = ''\n\n assert not WordExtraction.produced(test_content)", "def test_location(self, all_metars):\n expected = [\"KIAH\", 'KGNV', 'KNID', 'KTPA', 'KP60']\n for metar, expected_val in zip(all_metars, expected):\n parser = Parser(metar)\n actual = parser.parse()\n assert expected_val == actual['location']", "def _check_market_place_in_range(self):\n\t\tfor building in self.get_buildings_in_range():\n\t\t\tif building.id == BUILDINGS.MARKET_PLACE_CLASS:\n\t\t\t\tif StaticPather.get_path_on_roads(self.island, self, building) is not None:\n\t\t\t\t\t# a market place is in range\n\t\t\t\t\treturn\n\t\t# no market place found\n\t\tself.session.ingame_gui.message_widget.add(self.position.origin.x, self.position.origin.y, \\\n\t\t 'NO_MARKET_PLACE_IN_RANGE')", "def test_create_resource_access_review_for_all_namespaces(self):\n pass", "def test_police_abbreviations(self):\n for word in self.report.get_words():\n for uword in self.rules.police_abbreviations:\n if uword[\"word\"] == word.text.lower():\n self.add_error(\n f\"{word.text} är en intern förkortning. \"\n f\"Använd {uword['means']} istället.\",\n word=word,\n )", "def test_non_additive_requires_tags(self):\n\n # local imports of code-under-test ensure moto has mocks\n # registered before any possible calls out to AWS\n from awstools.awstools import launch_instances, run_block_device_dict, farm_security_group_setup\n\n # launch_instances requires vpc setup as done by firesim/scripts/setup_firesim.py\n from awstools.aws_setup import aws_setup\n aws_setup()\n farm_security_group_setup()\n\n type = 'f1.2xlarge'\n\n with pytest.raises(ValueError):\n launch_instances(type, 1,\n instancemarket=\"ondemand\", spotinterruptionbehavior=None, spotmaxprice=None,\n blockdevices=run_block_device_dict(),\n always_expand=False)", "def get_available_actions(self):\n return self.actions", "def test_assessor_access_normal(self):\n assessor = get_or_create_default_assessor()\n self.client.login(assessor.email)\n # This assessor doesn't belong to a group\n self.assertTrue(is_assessor(assessor))\n # add the assessor to the assessment group\n self.assertTrue(Assessment.objects.filter(application=self.application).count() > 0)\n for assessment in Assessment.objects.filter(application=self.application):\n add_assessor_to_assessor_group(assessor, assessment.assessor_group)\n\n # forbidden\n urls_get_forbidden = [\n reverse('wl_applications:enter_conditions', args=[self.application.pk]),\n ]\n urls_post_forbidden = [\n {\n 'url': reverse('wl_applications:create_condition', args=[self.application.pk]),\n 'data': {\n 'code': '123488374',\n 'text': 'condition text'\n }\n },\n {\n 'url': reverse('wl_applications:set_assessment_condition_state'),\n 'data': {\n 'assessmentConditionID': self.assessment_condition.pk,\n 'acceptanceStatus': 'accepted',\n }\n },\n {\n 'url': reverse('wl_applications:enter_conditions', args=[self.application.pk]),\n 'data': {\n 'conditionID': [self.condition.pk],\n }\n },\n ]\n # Allowed\n urls_get_allowed = [\n reverse('wl_applications:search_conditions'),\n reverse('wl_applications:enter_conditions_assessor', args=[self.application.pk, self.assessment.pk]),\n ]\n urls_post_allowed = [\n {\n 'url': reverse('wl_applications:enter_conditions_assessor',\n args=[self.application.pk, self.assessment.pk]),\n 'data': {\n 'conditionID': [self.condition.pk],\n }\n },\n ]\n for url in urls_get_forbidden:\n response = self.client.get(url, follow=True)\n if response.status_code != 403:\n self.assertRedirects(response, reverse('wl_dashboard:tables_assessor'), status_code=302,\n target_status_code=200)\n for url in urls_post_forbidden:\n response = self.client.post(url['url'], url['data'], follow=True)\n if response.status_code != 403:\n self.assertRedirects(response, reverse('wl_dashboard:tables_assessor'), status_code=302,\n target_status_code=200)\n for url in urls_get_allowed:\n response = self.client.get(url, follow=True)\n self.assertEqual(200, response.status_code)\n\n for url in urls_post_allowed:\n response = self.client.post(url['url'], url['data'], follow=True)\n self.assertEqual(200, response.status_code)", "def _generate_actions(self) -> list:\n pass", "def deploy_handler_with_advance_op(self):\n bucket = [\"src_bucket\", \"metadata\",\"advance_op\"]\n self.check_for_buckets(bucket)\n self.__deploy_function(EXPORTED_FUNCTION.ADVANCE_OP)", "def test_emtpy_conflict_places(conflict_places):\n assert conflict_places.named_place(\"Woodshop\") == None", "def clean_up(exc):\n\n FORBIDDEN_FIELDS_TECH = [\n \"categories\",\n ]\n\n FORBIDDEN_FIELDS_BIO = [\"location\", \"product\"]\n\n for field in list(exc.keys()):\n if exc[field] is None or exc[field] == \"None\":\n del exc[field]\n continue\n\n if exc[\"type\"] == \"biosphere\" and field in FORBIDDEN_FIELDS_BIO:\n del exc[field]\n if exc[\"type\"] == \"technosphere\" and field in FORBIDDEN_FIELDS_TECH:\n del exc[field]\n\n return exc", "def check(self):\r\n for action in self._actions:\r\n action.check()", "def corporate_action_restricted_assets(self) -> Tuple[str, ...]:\n return self.__corporate_action_restricted_assets", "def step_impl(context, objects_type):\n\n log.info(\"=====> From the STB verify that the HTTP Cache is built and the objects from the nsa file are available to query\")\n\n if objects_type == \"ObjectsTest1\":\n object_list = resourceset_parameters.ObjectsTest1\n elif objects_type == \"ObjectsTest2\":\n object_list = resourceset_parameters.ObjectsTest2\n elif objects_type == \"ObjectsTest4\":\n object_list = resourceset_parameters.ObjectsTest4\n else:\n assert False, \" ****> Failed: No objects_type parameter while posting. Got: {objects_type}\".format(objects_type=objects_type)\n\n for i in object_list:\n verify_object_available_in_stb(context, resourceset_parameters.Object_names[i])", "def soft_assert_bulk_verify_filter_functionality(page, modal, exp_asmt,\n soft_assert):\n filter_section_element = modal.filter_section.expand()\n if not isinstance(page, dashboard.MyAssessments):\n soft_assert.expect(\n filter_section_element.get_mapped_to_audit_filter() == exp_asmt.audit,\n \"'Filter by Mapping' section should contain title of opened audit.\")\n else:\n filter_section_element.add_mapping_filter(\n objects.get_singular(objects.AUDITS, title=True),\n element.Common.TITLE, exp_asmt.audit)\n filter_section_element.apply()\n base.Test.general_equal_soft_assert(\n soft_assert, [exp_asmt],\n webui_service.AssessmentsService().get_objs_from_bulk_update_modal(\n modal, with_second_tier_info=True),\n *exp_asmt.bulk_update_modal_tree_view_attrs_to_exclude)", "def get_mapping_actions(image=None, imageId=None, in_digests=[], bundle={}):\n\n if not image or not bundle:\n raise Exception(\"input error\")\n\n if not verify_policy_bundle(bundle=bundle):\n raise Exception(\"input bundle does not conform to bundle schema\")\n\n ret = []\n \n image_infos = []\n\n image_info = anchore_utils.get_all_image_info(image)\n if image_info and image_info not in image_infos:\n image_infos.append(image_info)\n\n for m in bundle['mappings']:\n polname = m['policy_id']\n wlnames = m['whitelist_ids']\n\n for image_info in image_infos:\n #_logger.info(\"IMAGE INFO: \" + str(image_info))\n ii = {}\n ii.update(image_info)\n registry = ii.pop('registry', \"N/A\")\n repo = ii.pop('repo', \"N/A\")\n\n tags = []\n fulltag = ii.pop('fulltag', \"N/A\")\n if fulltag != 'N/A':\n tinfo = anchore_utils.parse_dockerimage_string(fulltag)\n if 'tag' in tinfo and tinfo['tag']:\n tag = tinfo['tag']\n\n for t in [image, fulltag]:\n tinfo = anchore_utils.parse_dockerimage_string(t)\n if 'tag' in tinfo and tinfo['tag'] and tinfo['tag'] not in tags:\n tags.append(tinfo['tag'])\n\n digest = ii.pop('digest', \"N/A\")\n digests = [digest]\n for d in image_info['digests']:\n dinfo = anchore_utils.parse_dockerimage_string(d)\n if 'digest' in dinfo and dinfo['digest']:\n digests.append(dinfo['digest'])\n \n p_ids = []\n p_names = []\n for p in bundle['policies']:\n p_ids.append(p['id'])\n p_names.append(p['name'])\n\n wl_ids = []\n wl_names = []\n for wl in bundle['whitelists']:\n wl_ids.append(wl['id'])\n wl_names.append(wl['name'])\n \n if polname not in p_ids:\n _logger.info(\"policy not in bundle: \" + str(polname))\n continue\n\n skip=False\n for wlname in wlnames:\n if wlname not in wl_ids:\n _logger.info(\"whitelist not in bundle\" + str(wlname))\n skip=True\n if skip:\n continue\n\n mname = m['name']\n mregistry = m['registry']\n mrepo = m['repository']\n if m['image']['type'] == 'tag':\n mtag = m['image']['value']\n mdigest = None\n mimageId = None\n elif m['image']['type'] == 'digest':\n mdigest = m['image']['value']\n mtag = None\n mimageId = None\n elif m['image']['type'] == 'id':\n mimageId = m['image']['value']\n mtag = None\n mdigest = None\n else:\n mtag = mdigest = mimageId = None\n\n if registry == mregistry or mregistry == '*':\n _logger.debug(\"checking mapping for image (\"+str(image_info)+\") match.\")\n\n if repo == mrepo or mrepo == '*':\n doit = False\n matchstring = mname + \": N/A\"\n if tag and (mtag == '*' or mtag == tag or mtag in tags):\n matchstring = mname + \":\" + ','.join([mregistry, mrepo, mtag])\n doit = True\n elif digest and (mdigest == digest or mdigest in in_digests or mdigest in digests):\n matchstring = mname + \":\" + ','.join([mregistry, mrepo, mdigest])\n doit = True\n elif imageId and (mimageId == imageId):\n matchstring = mname + \":\" + ','.join([mregistry, mrepo, mimageId])\n doit = True\n\n matchstring = matchstring.encode('utf8')\n if doit:\n _logger.debug(\"match found for image (\"+str(matchstring)+\")\")\n\n wldata = []\n wldataset = set()\n for wlname in wlnames:\n wldataset = set(list(wldataset) + extract_whitelist_data(bundle, wlname))\n wldata = list(wldataset)\n\n poldata = extract_policy_data(bundle, polname)\n \n wlnames.sort()\n evalstr = ','.join([polname] + wlnames)\n evalhash = hashlib.md5(evalstr).hexdigest()\n ret.append( ( poldata, wldata, polname,wlnames, matchstring, m, evalhash) )\n return(ret)\n else:\n _logger.debug(\"no match found for image (\"+str(image_info)+\") match.\")\n else:\n _logger.debug(\"no match found for image (\"+str(image_info)+\") match.\")\n\n return(ret)", "def aws_permissions(self, perms):\n for perm in perms:\n group = perm.get(\"Group\")\n if group:\n self.allowed_groups.append(group)\n\n user = perm.get(\"UserId\")\n if user:\n self.allowed_users.append(user)", "def get_legal_actions(self, index):\n actions = []\n agent = self.agent_states[index]\n for action in ACTIONS:\n pos = agent.pos[0] + action[0], agent.pos[1] + action[1]\n if MAP[pos[0]][pos[1]] not in WALL:\n actions.append(action)\n return actions", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_template_action(self):\n pass", "def test_view_acls(self):\n v1, v2, v3 = set_resources_and_sync([\n make_video(\n media_id='123', acl=['USER_spqr1', 'USER_abcd1', 'INST_botolph', 'GROUP_1234']),\n make_video(media_id='456', acl=['WORLD']),\n make_video(media_id='789', acl=['CAM']),\n ])\n i1 = mpmodels.MediaItem.objects.get(jwp__key=v1.key)\n i2 = mpmodels.MediaItem.objects.get(jwp__key=v2.key)\n i3 = mpmodels.MediaItem.objects.get(jwp__key=v3.key)\n\n self.assertEqual(i1.view_permission.crsids, ['spqr1', 'abcd1'])\n self.assertEqual(i1.view_permission.lookup_groups, ['1234'])\n self.assertEqual(i1.view_permission.lookup_insts, ['botolph'])\n self.assertFalse(i1.view_permission.is_public)\n self.assertFalse(i1.view_permission.is_signed_in)\n\n self.assertEqual(i2.view_permission.crsids, [])\n self.assertEqual(i2.view_permission.lookup_groups, [])\n self.assertEqual(i2.view_permission.lookup_insts, [])\n self.assertTrue(i2.view_permission.is_public)\n self.assertFalse(i2.view_permission.is_signed_in)\n\n self.assertEqual(i3.view_permission.crsids, [])\n self.assertEqual(i3.view_permission.lookup_groups, [])\n self.assertEqual(i3.view_permission.lookup_insts, [])\n self.assertFalse(i3.view_permission.is_public)\n self.assertTrue(i3.view_permission.is_signed_in)", "def test_resource_actions(self):\n test_resource = ResourceTypeName.get()\n expected_actions = sorted(['rt:get', 'rt:put', 'rt:update', 'rt:delete'])\n self.app.post(\n f'/v1/resource/{test_resource}',\n data=json.dumps({'actions': expected_actions}),\n headers=admin_headers)\n\n # Get the actions for a resource type\n resp = self.app.get(f'/v1/resource/{test_resource}/actions', headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n actions = json.loads(resp.body)['actions']\n self.assertEqual(actions, expected_actions)\n\n # Delete actions from a resource type\n modify_actions = expected_actions[-2:]\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions[:2])\n\n # OK returned when deleting actions not part of a resource type\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n\n # Put actions into a resource type\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions)\n\n # OK returned when putting actions already a part of a resource type.\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)", "def pre_check():\n try:\n x = os.environ['AWS_DEFAULT_REGION']\n except KeyError:\n print(\"FATAL ERROR:\")\n traceback.print_exc(file=sys.stdout)\n sys.exit(\"Please set your shell variables for AWS access\")\n del x", "def get_actions(self, request):\n actions = super(RateLimitedIPAdmin, self).get_actions(request)\n del actions['delete_selected']\n return actions", "def test_services_with_multiple_pages_elb(self):\n results = get_actions_for_service(\"elasticloadbalancing\")\n actions = [\n \"elasticloadbalancing:ApplySecurityGroupsToLoadBalancer\",\n \"elasticloadbalancing:AttachLoadBalancerToSubnets\",\n \"elasticloadbalancing:ConfigureHealthCheck\",\n \"elasticloadbalancing:CreateAppCookieStickinessPolicy\",\n \"elasticloadbalancing:CreateLBCookieStickinessPolicy\",\n \"elasticloadbalancing:CreateLoadBalancerListeners\",\n \"elasticloadbalancing:CreateLoadBalancerPolicy\",\n \"elasticloadbalancing:DeleteLoadBalancerListeners\",\n \"elasticloadbalancing:DeleteLoadBalancerPolicy\",\n \"elasticloadbalancing:DeregisterInstancesFromLoadBalancer\",\n \"elasticloadbalancing:DescribeInstanceHealth\",\n \"elasticloadbalancing:DescribeLoadBalancerPolicies\",\n \"elasticloadbalancing:DescribeLoadBalancerPolicyTypes\",\n \"elasticloadbalancing:DetachLoadBalancerFromSubnets\",\n \"elasticloadbalancing:DisableAvailabilityZonesForLoadBalancer\",\n \"elasticloadbalancing:EnableAvailabilityZonesForLoadBalancer\",\n \"elasticloadbalancing:RegisterInstancesWithLoadBalancer\",\n \"elasticloadbalancing:SetLoadBalancerListenerSSLCertificate\",\n \"elasticloadbalancing:SetLoadBalancerPoliciesForBackendServer\",\n \"elasticloadbalancing:SetLoadBalancerPoliciesOfListener\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces(self):\n pass", "def all_allowed_actions(self):\n actions = []\n for managed_policy in self.attached_managed_policies:\n actions.extend(managed_policy.policy_document.all_allowed_actions)\n for inline_policy in self.inline_policies:\n actions.extend(inline_policy.policy_document.all_allowed_actions)\n for group in self.groups:\n actions.extend(group.all_allowed_actions)\n actions = list(dict.fromkeys(actions))\n actions.sort()\n return actions", "def available_actions(cls, piles):\n actions = set()\n for i, pile in enumerate(piles):\n for j in range(1, piles[i] + 1):\n actions.add((i, j))\n return actions", "def actions():\n pass", "def describe_actions(self) -> Iterator[str]:\r\n yield \"Replace all DA attributes with anonymized values that precede the originals\"\r\n yield \"Replace all DT attributes with anonymized values that precede the originals\"\r\n yield \"Replace all TM attributes with anonymized values that precede the originals\"", "def getLegalMovingActions(state,agent):\n actions = state.getLegalActions(agent)\n # Removing 'Stop'\n if Directions.STOP in actions:\n actions.remove(Directions.STOP)\n return actions", "def test_add_azr_location(self):\n pass", "def actions(self, agent_state):\n raise NotImplementedError(\"Don't know what actions are available\")", "def test_box_actions_out_of_bound(env: gym.Env):\n env.reset(seed=42)\n\n oob_env = gym.make(env.spec.id, disable_env_checker=True)\n oob_env.reset(seed=42)\n\n assert isinstance(env.action_space, spaces.Box)\n dtype = env.action_space.dtype\n upper_bounds = env.action_space.high\n lower_bounds = env.action_space.low\n\n for i, (is_upper_bound, is_lower_bound) in enumerate(\n zip(env.action_space.bounded_above, env.action_space.bounded_below)\n ):\n if is_upper_bound:\n obs, _, _, _, _ = env.step(upper_bounds)\n oob_action = upper_bounds.copy()\n oob_action[i] += np.cast[dtype](OOB_VALUE)\n\n assert oob_action[i] > upper_bounds[i]\n oob_obs, _, _, _, _ = oob_env.step(oob_action)\n\n assert np.alltrue(obs == oob_obs)\n\n if is_lower_bound:\n obs, _, _, _, _ = env.step(\n lower_bounds\n ) # `env` is unwrapped, and in new step API\n oob_action = lower_bounds.copy()\n oob_action[i] -= np.cast[dtype](OOB_VALUE)\n\n assert oob_action[i] < lower_bounds[i]\n oob_obs, _, _, _, _ = oob_env.step(oob_action)\n\n assert np.alltrue(obs == oob_obs)\n\n env.close()", "def setUp(self):\n user = User.objects.create(email=\"[email protected]\", first_name=\"Test1\", last_name=\"User\")\n group = AnaGroup.objects.create(name=\"test group\")\n IAM.objects.create(user=user,\n aws_user=\"AWS user\",\n aws_access_key=\"AWS access key\",\n aws_secret_access_key=\"AWS secret key\",\n group=group)" ]
[ "0.58062184", "0.57554114", "0.5695702", "0.5673701", "0.5447731", "0.5414008", "0.53181934", "0.5090683", "0.5057209", "0.49688128", "0.49623215", "0.49491638", "0.49443293", "0.49354288", "0.4900723", "0.48934472", "0.48312107", "0.48288488", "0.47950754", "0.4768656", "0.4768656", "0.47588596", "0.47535774", "0.46872535", "0.46716824", "0.46657667", "0.46551266", "0.4650297", "0.46221343", "0.46129328", "0.46048793", "0.46030843", "0.4589791", "0.4587225", "0.45796263", "0.45755762", "0.45709822", "0.45583615", "0.45398676", "0.45375967", "0.45359737", "0.45192328", "0.45069426", "0.4497698", "0.4493605", "0.4488601", "0.44852486", "0.44758165", "0.4468897", "0.4468897", "0.4468897", "0.4468897", "0.44616726", "0.4461071", "0.44579223", "0.44554195", "0.44473514", "0.44452655", "0.444227", "0.4440376", "0.44328898", "0.44327727", "0.44325766", "0.44269857", "0.44258615", "0.44230312", "0.4422442", "0.4415729", "0.4394283", "0.43914425", "0.43861824", "0.43773124", "0.43732613", "0.43678373", "0.43650365", "0.4363115", "0.43604678", "0.43601972", "0.43586403", "0.4354752", "0.43527237", "0.43474957", "0.43405613", "0.4334881", "0.4330424", "0.43295276", "0.4323804", "0.4323179", "0.43227136", "0.43159863", "0.43155038", "0.43144587", "0.43103507", "0.43075073", "0.43047482", "0.430064", "0.42993727", "0.42972803", "0.42953792", "0.42911258" ]
0.66148233
0
Ensure that greengrass v1 and greengrass v2 actions are both present in the greengrass namespace
def test_services_with_multiple_pages_greengrass(self): # Greengrass V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiotgreengrass.html self.assertTrue("greengrass:CreateResourceDefinition" in self.all_actions) # Greengrass V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiotgreengrassv2.html self.assertTrue("greengrass:CreateComponentVersion" in self.all_actions) results = get_actions_for_service("greengrass") actions = [ "greengrass:AssociateRoleToGroup", "greengrass:CreateConnectorDefinition", "greengrass:CreateConnectorDefinitionVersion", "greengrass:CreateCoreDefinition", "greengrass:CreateCoreDefinitionVersion", "greengrass:CreateDeviceDefinition", "greengrass:CreateDeviceDefinitionVersion", "greengrass:CreateFunctionDefinition", "greengrass:CreateFunctionDefinitionVersion", "greengrass:CreateGroup", "greengrass:CreateGroupCertificateAuthority", "greengrass:CreateGroupVersion", "greengrass:CreateLoggerDefinition", "greengrass:CreateLoggerDefinitionVersion", "greengrass:CreateResourceDefinition", "greengrass:CreateResourceDefinitionVersion", "greengrass:CreateSoftwareUpdateJob", "greengrass:CreateSubscriptionDefinition", "greengrass:CreateSubscriptionDefinitionVersion", "greengrass:DeleteConnectorDefinition", "greengrass:DeleteCoreDefinition", "greengrass:DeleteDeviceDefinition", "greengrass:DeleteFunctionDefinition", "greengrass:DeleteGroup", "greengrass:DeleteLoggerDefinition", "greengrass:DeleteResourceDefinition", "greengrass:DeleteSubscriptionDefinition", "greengrass:DisassociateRoleFromGroup", "greengrass:Discover", "greengrass:GetAssociatedRole", "greengrass:GetBulkDeploymentStatus", "greengrass:GetConnectorDefinition", "greengrass:GetConnectorDefinitionVersion", "greengrass:GetCoreDefinition", "greengrass:GetCoreDefinitionVersion", "greengrass:GetDeploymentStatus", "greengrass:GetDeviceDefinition", "greengrass:GetDeviceDefinitionVersion", "greengrass:GetFunctionDefinition", "greengrass:GetFunctionDefinitionVersion", "greengrass:GetGroup", "greengrass:GetGroupCertificateAuthority", "greengrass:GetGroupCertificateConfiguration", "greengrass:GetGroupVersion", "greengrass:GetLoggerDefinition", "greengrass:GetLoggerDefinitionVersion", "greengrass:GetResourceDefinition", "greengrass:GetResourceDefinitionVersion", "greengrass:GetSubscriptionDefinition", "greengrass:GetSubscriptionDefinitionVersion", "greengrass:GetThingRuntimeConfiguration", "greengrass:ListBulkDeploymentDetailedReports", "greengrass:ListBulkDeployments", "greengrass:ListConnectorDefinitionVersions", "greengrass:ListConnectorDefinitions", "greengrass:ListCoreDefinitionVersions", "greengrass:ListCoreDefinitions", "greengrass:ListDeviceDefinitionVersions", "greengrass:ListDeviceDefinitions", "greengrass:ListFunctionDefinitionVersions", "greengrass:ListFunctionDefinitions", "greengrass:ListGroupCertificateAuthorities", "greengrass:ListGroupVersions", "greengrass:ListGroups", "greengrass:ListLoggerDefinitionVersions", "greengrass:ListLoggerDefinitions", "greengrass:ListResourceDefinitionVersions", "greengrass:ListResourceDefinitions", "greengrass:ListSubscriptionDefinitionVersions", "greengrass:ListSubscriptionDefinitions", "greengrass:ResetDeployments", "greengrass:StartBulkDeployment", "greengrass:StopBulkDeployment", "greengrass:UpdateConnectorDefinition", "greengrass:UpdateCoreDefinition", "greengrass:UpdateDeviceDefinition", "greengrass:UpdateFunctionDefinition", "greengrass:UpdateGroup", "greengrass:UpdateGroupCertificateConfiguration", "greengrass:UpdateLoggerDefinition", "greengrass:UpdateResourceDefinition", "greengrass:UpdateSubscriptionDefinition", "greengrass:UpdateThingRuntimeConfiguration" ] for action in actions: self.assertTrue(action in results) # if action not in results: # print(action)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_gh_226_elasticloadbalancing_v1_and_v2(self):\n results = get_actions_for_service(\"elasticloadbalancing\")\n # print(json.dumps(results, indent=4))\n lb_v1_only_action = \"elasticloadbalancing:CreateTargetGroup\"\n lb_v2_only_action = \"elasticloadbalancing:SetSecurityGroups\"\n self.assertTrue(lb_v1_only_action in results)\n self.assertTrue(lb_v2_only_action in results)", "def has_action2(self, feature):\n return feature in self._action2", "def greengrass_v2(self) -> Optional[pulumi.Input['GatewayGreengrassV2Args']]:\n return pulumi.get(self, \"greengrass_v2\")", "def check_action_sanity(self):\n for action in crest.get_all_actions(self.model):\n assert action._name is not None, f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is 'None'\"\n assert action._name != \"\", f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is empty string\"\n\n assert isinstance(action.transition, crest.Transition), f\"Action {action._name}'s state is not a crest.Transition. It is: {action.transition} ({action.transition.__class__})\"\n assert action.state in crest.get_transitions(action._parent), f\"Action's transition {action.transition._name} ({action.transition}) is not in the transitions of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.target, crest.Port), f\"Action {action._name}'s target is not a crest.Port\"\n assert action.target in api.get_targets(action._parent), f\"Action's target {action.target._name} ({action.target}) is not in the targets of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.function, (crestml.LearnedFunction, types.FunctionType)), f\"Action {action._name}'s function needs to be of type types.FunctionType or crestdsl.ml.LearnedFunction\"\n assert 'self' in inspect.signature(action.function).parameters, f\"Action {action._name}'s function has no self parameter. entity: {action._parent._name} ({action._parent.__class__.__name__})\"\n assert len(inspect.signature(action.function).parameters) == 1, f\"An action should have only one one argument 'self'\"\n\n for port in SH.get_read_ports_from_update(action.function, action):\n assert port in api.get_sources(action._parent), f\"Action {action._name} seems to be reading a port {port._name} ({port}) which is not in the sources of its entity {action._parent._name} ({action._parent})\"", "def test_kafka_action_names_overlap_issue(self):\n # Kafka actions used to be in two pages but are now one. This verifies the current state.\n # results = get_actions_for_service(\"kafka\")\n # print(results)\n actions = [\n \"kafka:BatchAssociateScramSecret\",\n \"kafka:BatchDisassociateScramSecret\",\n \"kafka:CreateClusterV2\",\n \"kafka:DeleteConfiguration\",\n \"kafka:DescribeClusterV2\",\n \"kafka:ListClustersV2\",\n \"kafka:ListConfigurationRevisions\",\n \"kafka:ListKafkaVersions\",\n \"kafka:ListScramSecrets\",\n \"kafka:RebootBroker\",\n \"kafka:UpdateBrokerType\",\n \"kafka:UpdateConfiguration\",\n \"kafka:UpdateConnectivity\",\n \"kafka:UpdateSecurity\"\n ]\n\n for action in actions:\n self.assertTrue(action in self.all_actions)", "def verifyActionCenterRts():\n pass", "def test_10_unsupported_actions(self):\n\n def __count_pulled_packages(pth):\n self.pkgrepo(\"list -F tsv -H -s {0}\".format(pth))\n return len(self.output.splitlines())\n\n def __check_errout(pfmri):\n s1 = \"invalid action in package {0}\".format(pfmri)\n s2 = \"Malformed action in package '{0}'\".format(pfmri)\n self.assert_(s1 in self.errout or s2 in self.errout,\n \"{0} not in error\".format(pfmri))\n\n def __empty_repo(uri, arg_string):\n if uri.startswith(\"http://\"):\n rurl = self.dcs[4].get_repo_url()\n self.pkgrepo(\"remove -s {0} '*'\".format(rurl))\n # Refresh the depot to get it to realize that\n # the catalog has changed.\n self.dcs[4].refresh()\n elif arg_string:\n portable.remove(uri)\n else:\n self.pkgrepo(\"remove -s {0} '*'\".format(uri))\n\n\n def __test_rec(duri, arg_string, pfmris):\n self.debug(\"\\n\\nNow pkgrecv'ing to {0}\".format(duri))\n\n # It's necessary to use the -D option below because\n # otherwise pkgrecv will fail because the manifest\n # doesn't validate.\n\n novalidate = \"-D manifest_validate=Never \"\n # Check that invalid action attributes don't cause\n # tracebacks.\n self.pkgrecv(self.durl1, novalidate +\n \"-d {0} {1} {2}\".format(duri, arg_string,\n \" \".join(pfmris)), exit=pkgdefs.EXIT_OOPS)\n for pfmri in pfmris:\n __check_errout(pfmri)\n self.assertEqual(__count_pulled_packages(duri), 0)\n if arg_string:\n portable.remove(duri)\n\n self.pkgrecv(self.rurl1, novalidate +\n \"-d {0} {1} {2}\".format(duri, arg_string,\n \" \".join(pfmris)), exit=pkgdefs.EXIT_OOPS)\n for pfmri in pfmris:\n __check_errout(pfmri)\n self.assertEqual(__count_pulled_packages(duri), 0)\n if arg_string:\n portable.remove(duri)\n\n # Check that other packages are retrieved and the exit\n # code reflects partial success.\n self.pkgrecv(self.durl1, novalidate +\n \"-d {0} {1} -m all-timestamps '*'\".format(\n duri, arg_string), exit=pkgdefs.EXIT_PARTIAL)\n for pfmri in pfmris:\n __check_errout(pfmri)\n self.assertEqual(__count_pulled_packages(duri),\n len(self.published) - len(pfmris))\n __empty_repo(duri, arg_string)\n\n self.pkgrecv(self.rurl1, novalidate +\n \"-d {0} {1} -m all-timestamps '*'\".format(\n duri, arg_string), exit=pkgdefs.EXIT_PARTIAL)\n for pfmri in pfmris:\n __check_errout(pfmri)\n self.assertEqual(__count_pulled_packages(duri),\n len(self.published) - len(pfmris))\n __empty_repo(duri, arg_string)\n\n self.rurl1 = self.dcs[1].get_repo_url()\n repo = self.dcs[1].get_repo()\n rd = repo.get_pub_rstore()\n pfmri = fmri.PkgFmri(self.published[4])\n mp = rd.manifest(pfmri)\n\n with open(mp, \"rb\") as fh:\n original_txt = fh.read()\n txt = original_txt.replace(\"type=require\", \"type=foo\")\n with open(mp, \"wb\") as fh:\n fh.write(txt)\n\n rpth = tempfile.mkdtemp(dir=self.test_root)\n self.pkgrepo(\"create {0}\".format(rpth))\n adir = tempfile.mkdtemp(dir=self.test_root)\n\n # The __empty repo function above assumes that the only http uri\n # used is the one for depot number 4.\n dest_uris = ((rpth, \"\"), (self.durl4, \"\"),\n (os.path.join(adir, \"archive.p5p\"), \"-a\"))\n for duri, arg_string in dest_uris:\n __test_rec(duri, arg_string, [self.published[4]])\n\n # Test that multiple packages failing are handled correctly.\n for i in range(5, 7):\n pfmri = fmri.PkgFmri(self.published[i])\n mp = rd.manifest(pfmri)\n with open(mp, \"rb\") as fh:\n original_txt = fh.read()\n txt = \"foop\\n\" + original_txt\n with open(mp, \"wb\") as fh:\n fh.write(txt)\n\n for duri, arg_string, in dest_uris:\n __test_rec(duri, arg_string, self.published[4:7])", "def check_type(self):\n if self.action < 0 or self.action >= len(_action_args_dict):\n raise GameActionError('Invalid action type ({0})'.format(self.action))", "def test_other_iam_data_fixes_in_GH_393(self):\n # Cassandra: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonkeyspacesforapachecassandra.html\n results = get_actions_for_service(\"cassandra\")\n self.assertTrue(\"cassandra:Restore\" in results)\n # Comprehend Medical: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazoncomprehendmedical.html\n results = get_actions_for_service(\"comprehendmedical\")\n # print(results)\n actions = [\n \"comprehendmedical:DescribeEntitiesDetectionV2Job\",\n \"comprehendmedical:DescribeICD10CMInferenceJob\",\n \"comprehendmedical:DescribePHIDetectionJob\",\n \"comprehendmedical:DescribeRxNormInferenceJob\",\n # \"comprehendmedical:DescribeSNOMEDCTInferenceJob\", # Not in SAR\n \"comprehendmedical:DetectEntitiesV2\",\n \"comprehendmedical:InferICD10CM\",\n \"comprehendmedical:InferRxNorm\",\n # \"comprehendmedical:InferSNOMEDCT\", # Not in SAR\n \"comprehendmedical:ListEntitiesDetectionV2Jobs\",\n \"comprehendmedical:ListICD10CMInferenceJobs\",\n \"comprehendmedical:ListPHIDetectionJobs\",\n \"comprehendmedical:ListRxNormInferenceJobs\",\n # \"comprehendmedical:ListSNOMEDCTInferenceJobs\", # Not in SAR\n \"comprehendmedical:StartEntitiesDetectionV2Job\",\n \"comprehendmedical:StartICD10CMInferenceJob\",\n \"comprehendmedical:StartPHIDetectionJob\",\n \"comprehendmedical:StartRxNormInferenceJob\",\n \"comprehendmedical:StopEntitiesDetectionV2Job\",\n \"comprehendmedical:StopICD10CMInferenceJob\",\n ]\n for action in actions:\n # if action not in results:\n # print(action)\n self.assertTrue(action in results)\n # Compute Optimizer\n results = get_actions_for_service(\"compute-optimizer\")\n actions = [\n \"compute-optimizer:DeleteRecommendationPreferences\",\n \"compute-optimizer:ExportEBSVolumeRecommendations\",\n \"compute-optimizer:ExportLambdaFunctionRecommendations\",\n \"compute-optimizer:GetEffectiveRecommendationPreferences\",\n \"compute-optimizer:GetEnrollmentStatusesForOrganization\",\n \"compute-optimizer:GetLambdaFunctionRecommendations\",\n \"compute-optimizer:GetRecommendationPreferences\",\n \"compute-optimizer:PutRecommendationPreferences\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # DataSync\n results = get_actions_for_service(\"datasync\")\n actions = [\n \"datasync:UpdateLocationNfs\",\n \"datasync:UpdateLocationObjectStorage\",\n \"datasync:UpdateLocationSmb\",\n \"datasync:UpdateTaskExecution\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Account Management\n results = get_actions_for_service(\"account\")\n actions = [\n \"account:DeleteAlternateContact\",\n \"account:GetAlternateContact\",\n \"account:PutAlternateContact\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS IAM Access Analyzer\n results = get_actions_for_service(\"access-analyzer\")\n actions = [\n \"access-analyzer:CancelPolicyGeneration\",\n \"access-analyzer:CreateAccessPreview\",\n \"access-analyzer:GetAccessPreview\",\n \"access-analyzer:GetGeneratedPolicy\",\n \"access-analyzer:ListAccessPreviewFindings\",\n \"access-analyzer:ListAccessPreviews\",\n \"access-analyzer:ListPolicyGenerations\",\n \"access-analyzer:StartPolicyGeneration\",\n \"access-analyzer:ValidatePolicy\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Elemental Activations\n results = get_actions_for_service(\"elemental-activations\")\n actions = [\n \"elemental-activations:CompleteAccountRegistration\",\n \"elemental-activations:StartAccountRegistration\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n # OpenSearch\n results = get_actions_for_service(\"es\")\n actions = [\n \"es:DescribeDomainChangeProgress\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Location\n results = get_actions_for_service(\"geo\")\n actions = [\n \"geo:CalculateRouteMatrix\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Amazon Managed Grafana\n results = get_actions_for_service(\"grafana\")\n actions = [\n \"grafana:DescribeWorkspaceAuthentication\",\n \"grafana:UpdateWorkspaceAuthentication\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # EC2 Image Builder\n results = get_actions_for_service(\"imagebuilder\")\n actions = [\n \"imagebuilder:ImportVmImage\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Timestream\n results = get_actions_for_service(\"timestream\")\n actions = [\n \"timestream:CreateScheduledQuery\",\n \"timestream:DeleteScheduledQuery\",\n \"timestream:DescribeScheduledQuery\",\n \"timestream:ExecuteScheduledQuery\",\n \"timestream:ListScheduledQueries\",\n \"timestream:UpdateScheduledQuery\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS Transfer Family\n results = get_actions_for_service(\"transfer\")\n actions = [\n \"transfer:CreateAccess\",\n \"transfer:CreateWorkflow\",\n \"transfer:DeleteAccess\",\n \"transfer:DeleteWorkflow\",\n \"transfer:DescribeAccess\",\n \"transfer:DescribeExecution\",\n \"transfer:DescribeWorkflow\",\n \"transfer:ListAccesses\",\n \"transfer:ListExecutions\",\n \"transfer:ListWorkflows\",\n \"transfer:SendWorkflowStepState\",\n \"transfer:UpdateAccess\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces(self):\n pass", "def check(self):\n # get the data from shotgun\n app = self.parent.app\n context = app.context\n # get asset type\n filters = [[\"id\", \"is\", context.entity[\"id\"]]]\n fields = [\"sg_asset_type\"]\n assetType = app.shotgun.find_one(\n \"Asset\", filters=filters, fields=fields)[\"sg_asset_type\"]\n # get step short name\n filters = [[\"id\", \"is\", context.step[\"id\"]]]\n fields = [\"short_name\"]\n stepShortName = app.shotgun.find_one(\n \"Step\", filters=filters, fields=fields)[\"short_name\"]\n\n try:\n assetNode = gNodes.getTopGNode()\n except:\n assetNode = None\n\n if assetNode:\n metadataCode = assetNode.grid_code.get()\n metadataAssetType = assetNode.grid_type.get(asString=True)\n metadataPipeStep = assetNode.grid_pipeStep.get(asString=True)\n if not (assetType == metadataAssetType and\n stepShortName == metadataPipeStep and\n context.entity[\"name\"] == metadataCode):\n self.status = self.errorMode\n self.addError(\"Context and asset node metadata don't match\")\n self.errorMessage = \"Context and asset node metadata don't match\"\n else:\n self.status = \"OK\"\n else:\n self.status = \"OK\"", "def __init__(__self__, *,\n greengrass: Optional[pulumi.Input['GatewayGreengrassArgs']] = None,\n greengrass_v2: Optional[pulumi.Input['GatewayGreengrassV2Args']] = None):\n if greengrass is not None:\n pulumi.set(__self__, \"greengrass\", greengrass)\n if greengrass_v2 is not None:\n pulumi.set(__self__, \"greengrass_v2\", greengrass_v2)", "def test_lti20_rest_good_dispatch(self):\r\n for ginput, expected in self.GOOD_DISPATCH_INPUTS:\r\n self.assertEquals(self.xmodule.parse_lti_2_0_handler_suffix(ginput), expected)", "def check_if_can_evolve(self):\n # This sounds similar to generate actions\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces(self):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder(self):\n pass", "def is_valid_git_action(action):\n\n return action in GIT_ACTIONS", "def different_actions(old_action: PersistentAction, new_action: PersistentAction) -> bool:\n if Invocation.different_required(old_action.required, new_action.required):\n return True\n\n if old_action.command != new_action.command:\n if old_action.command is None:\n old_action_kind = \"a phony command\"\n else:\n old_action_kind = \"the command: \" + \" \".join(old_action.command)\n\n if new_action.command is None:\n new_action_kind = \"a phony command\"\n else:\n new_action_kind = \"the command: \" + \" \".join(new_action.command)\n\n Logger.why(f\"Must run actions because changed {old_action_kind} \" f\"into {new_action_kind}\")\n return True\n\n return False", "def test_mr_green_genes(self):\n self.validate_goal_for('game-20130104-075510-644e1cc8.html',\n u'cyncat',\n 'MrGreenGenes')", "def test_bad_action(self):\r\n action = 'robot-not-an-action'\r\n url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})\r\n response = self.client.get(url, {'identifiers': self.beta_tester.email, 'action': action})\r\n self.assertEqual(response.status_code, 400)", "def legal_actions(self):\n raise NotImplementedError", "def test_request_with_two_bundles(self):\n xml = self._make_request()\n request = request_from_xml(xml)\n self.assertTrue(validate_request(request, self.policy))", "def test_subscriber_access_if_vsg2_goes_down(self):", "def actions():\n pass", "def test_subscriber_access_for_two_vsg_services(self):", "def check_global_request(self, kind, msg):\n return False", "def check(self):\r\n for action in self._actions:\r\n action.check()", "def test_get_actions_with_arn_type_and_access_level_case_2(self):\n desired_output = [\n 'ssm:DeleteParameter',\n 'ssm:DeleteParameters',\n 'ssm:LabelParameterVersion',\n 'ssm:PutParameter'\n]\n output = get_actions_with_arn_type_and_access_level(\n \"ssm\", \"parameter\", \"Write\"\n )\n for item in desired_output:\n self.assertTrue(item in output)", "def check_stability(self):", "def test_action_independence_multiple(self):\n DST1, DST2 = ('SET_FIELD', ('IPV4_DST', 0x1)), ('SET_FIELD', ('IPV4_DST', 0x2))\n SRC1, SRC2 = ('SET_FIELD', ('IPV4_SRC', 0x1)), ('SET_FIELD', ('IPV4_SRC', 0x2))\n OUT1, OUT2 = ('OUTPUT', 1), ('OUTPUT', 2)\n n1 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n \"\"\"\n dst:1, src:2 -> output:1, dst:2, src:1, output:2\n dst:0/31 -> dst:1, src:2, output:1, dst:2, src:1, output:2\n \"\"\"\n n2 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 1, None),\n ('IPV4_SRC', 2, None)]),\n instructions=inst_from_acts([OUT1, DST2, SRC1, OUT2])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n \"\"\"\n dst:1 -> src:2, output:1, dst:2, src:1, output:2\n dst:0/31 -> dst:1, src:2, output:1, dst:2, src:1, output:2\n \"\"\"\n n3 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 1, None)]),\n instructions=inst_from_acts([SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n\n self.assertTrue(check_equal(n1, n2))\n self.assertTrue(check_equal(n2, n3))\n self.assertTrue(check_equal(n1, n3))", "def on_action(atype, action):\n if atype == Actions.CREATE_OFFER:\n assert action.structure[\"which\"] in (\"local\", \"remote\")\n c = self.client if action.structure[\"which\"] == \"local\" else self.remote_client\n c.create_offer()\n return GstValidate.ActionReturn.OK\n elif atype == Actions.CREATE_ANSWER:\n assert action.structure[\"which\"] in (\"local\", \"remote\")\n c = self.client if action.structure[\"which\"] == \"local\" else self.remote_client\n c.create_answer()\n return GstValidate.ActionReturn.OK\n elif atype == Actions.WAIT_FOR_NEGOTIATION_STATE:\n states = [NegotiationState(action.structure[\"state\"]), NegotiationState.ERROR]\n assert action.structure[\"which\"] in (\"local\", \"remote\")\n c = self.client if action.structure[\"which\"] == \"local\" else self.remote_client\n state = c.wait_for_negotiation_states(states)\n return GstValidate.ActionReturn.OK if state != NegotiationState.ERROR else GstValidate.ActionReturn.ERROR\n elif atype == Actions.ADD_STREAM:\n self.client.add_stream(action.structure[\"pipeline\"])\n return GstValidate.ActionReturn.OK\n elif atype == Actions.ADD_DATA_CHANNEL:\n assert action.structure[\"which\"] in (\"local\", \"remote\")\n c = self.client if action.structure[\"which\"] == \"local\" else self.remote_client\n c.add_data_channel(action.structure[\"id\"])\n return GstValidate.ActionReturn.OK\n elif atype == Actions.SEND_DATA_CHANNEL_STRING:\n assert action.structure[\"which\"] in (\"local\", \"remote\")\n c = self.client if action.structure[\"which\"] == \"local\" else self.remote_client\n channel = c.find_channel (action.structure[\"id\"])\n channel.send_string (action.structure[\"msg\"])\n return GstValidate.ActionReturn.OK\n elif atype == Actions.WAIT_FOR_DATA_CHANNEL_STATE:\n assert action.structure[\"which\"] in (\"local\", \"remote\")\n c = self.client if action.structure[\"which\"] == \"local\" else self.remote_client\n states = [DataChannelState(action.structure[\"state\"]), DataChannelState.ERROR]\n channel = c.find_channel (action.structure[\"id\"])\n state = channel.wait_for_states(states)\n return GstValidate.ActionReturn.OK if state != DataChannelState.ERROR else GstValidate.ActionReturn.ERROR\n elif atype == Actions.CLOSE_DATA_CHANNEL:\n assert action.structure[\"which\"] in (\"local\", \"remote\")\n c = self.client if action.structure[\"which\"] == \"local\" else self.remote_client\n channel = c.find_channel (action.structure[\"id\"])\n channel.close()\n return GstValidate.ActionReturn.OK\n elif atype == Actions.WAIT_FOR_DATA_CHANNEL:\n assert action.structure[\"which\"] in (\"local\", \"remote\")\n c = self.client if action.structure[\"which\"] == \"local\" else self.remote_client\n state = c.wait_for_data_channel(action.structure[\"id\"])\n return GstValidate.ActionReturn.OK\n elif atype == Actions.WAIT_FOR_DATA_CHANNEL_STRING:\n assert action.structure[\"which\"] in (\"local\", \"remote\")\n c = self.client if action.structure[\"which\"] == \"local\" else self.remote_client\n channel = c.find_channel (action.structure[\"id\"])\n channel.wait_for_message(action.structure[\"msg\"])\n return GstValidate.ActionReturn.OK\n elif atype == Actions.WAIT_FOR_NEGOTIATION_NEEDED:\n self.client.wait_for_negotiation_needed(action.structure[\"generation\"])\n return GstValidate.ActionReturn.OK\n elif atype == Actions.SET_WEBRTC_OPTIONS:\n self.client.set_options (action.structure)\n self.remote_client.set_options (action.structure)\n return GstValidate.ActionReturn.OK\n else:\n assert \"Not reached\" == \"\"", "def get_legal_actions(self):\n pass", "def isAuthorized(user, ress_name, action_name):\r\n #Import model that support PGSQL schema if difined\r\n if hasattr(settings, 'ACL_UTILS'):\r\n try:\r\n utils = __import__(settings.ACL_UTILS, fromlist=['*'])\r\n return utils.isAuthorized(user, ress_name, action_name)\r\n except ImportError:\r\n raise Exception(\"ACL UTILS import Error\")\r\n else:\r\n utils = __import__(\"geoprisma.acl.utils\", fromlist=['*'])\r\n return utils.isAuthorized(user, ress_name, action_name)", "def _get_legal_actions(self):\n raise NotImplementedError", "def get_gt_action_based(request):\n\n action = request.GET.get('action',None)\n ns = request.GET.get('annotation_mode',None)\n\n if ns == 'Manual':\n ns = 'Human'\n elif ns == 'Automatic':\n ns = 'Robot'\n gts = GroundTruthLogFile.objects.filter(gt_type=action)\n\n if ns is not None:\n ns_id = NameSpace.objects.get(ns_id = ns)\n gts = GroundTruthLogFile.objects.filter(ns_id = ns_id, gt_type = action)\n\n json_resp = {'count':gts.count()}\n return JsonResponse(json_resp)", "def verifyActionCenterFirewall():\n pass", "def check(self):\n illegalNamespaces = list()\n\n prog = re.compile(\"^[A-Z]{4}[0-9]{2}_[0-9]{3}:$\")\n\n for assetNode in pm.ls(type=\"gAsset\"):\n if assetNode.isReferenced() and not prog.match(assetNode.namespace()):\n illegalNamespaces.append(assetNode)\n\n if not illegalNamespaces:\n self.status = \"OK\"\n else:\n self.status = self.errorMode\n self.errorNodes = illegalNamespaces\n for illegalNamespace in illegalNamespaces:\n self.addError(\"%s has a illegal namespace\" % illegalNamespace)\n self.errorMessage = \"%s asset(s) have a illegal namespace\" % (\n len(illegalNamespaces))", "def test_unsupported_action(self):\n lang, _ = Language.objects.get_or_create(name=\"Test Language\", code=\"text-x-lang\")\n\n status = notify_external_apps(instance=lang, action=\"TEST\")\n self.assertFalse(status.success)\n self.assertGreater(len(status.message), 0)\n\n status = notify_external_apps(instance=lang, action=\"DELETE\")\n self.assertFalse(status.success)\n self.assertGreater(len(status.message), 0)\n\n status = notify_external_apps(instance=lang, action=\"CREATE\")\n self.assertFalse(status.success)\n self.assertGreater(len(status.message), 0)", "def _check_regr(self, regr, new_reg):\n body = getattr(new_reg, 'body', new_reg)\n for k, v in body.items():\n if k == 'resource' or not v:\n continue\n if regr.body[k] != v:\n raise errors.UnexpectedUpdate(regr)\n if regr.body.key != self.key.public_key():\n raise errors.UnexpectedUpdate(regr)\n return regr", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()", "def check(self):\n # get the data from shotgun\n app = self.parent.app\n context = app.context\n\n # get step short name\n filters = [[\"id\", \"is\", context.step[\"id\"]]]\n fields = [\"short_name\"]\n stepShortName = app.shotgun.find_one(\n \"Step\", filters=filters, fields=fields)[\"short_name\"]\n\n try:\n shotNode = gNodes.getTopGNode()\n except:\n shotNode = None\n\n if shotNode:\n metadataCode = shotNode.grid_code.get()\n metadataPipeStep = shotNode.grid_pipeStep.get(asString=True)\n if not (stepShortName == metadataPipeStep and\n context.entity[\"name\"] == metadataCode):\n self.status = self.errorMode\n self.addError(\"Context and shot node metadata don't match\")\n self.errorMessage = \"Context and shot node metadata don't match\"\n else:\n self.status = \"OK\"\n else:\n self.status = \"OK\"", "def test_subscriber_access_if_vsg1_goes_down(self):", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces(self):\n pass", "def check_peer_actions():\n restart = relation_get(attribute='restart-services-trigger')\n if restart and os.path.isdir(SYNC_FLAGS_DIR):\n for flagfile in glob.glob(os.path.join(SYNC_FLAGS_DIR, '*')):\n flag = os.path.basename(flagfile)\n key = re.compile(\"^(.+)?\\.(.+)?\\.(.+)\")\n res = re.search(key, flag)\n if res:\n source = res.group(1)\n service = res.group(2)\n action = res.group(3)\n else:\n key = re.compile(\"^(.+)?\\.(.+)?\")\n res = re.search(key, flag)\n source = res.group(1)\n action = res.group(2)\n\n # Don't execute actions requested by this unit.\n if local_unit().replace('.', '-') != source:\n if action == 'restart':\n log(\"Running action='%s' on service '%s'\" %\n (action, service), level=DEBUG)\n service_restart(service)\n elif action == 'start':\n log(\"Running action='%s' on service '%s'\" %\n (action, service), level=DEBUG)\n service_start(service)\n elif action == 'stop':\n log(\"Running action='%s' on service '%s'\" %\n (action, service), level=DEBUG)\n service_stop(service)\n elif action == 'update-ca-certificates':\n log(\"Running %s\" % (action), level=DEBUG)\n subprocess.check_call(['update-ca-certificates'])\n elif action == 'ensure-pki-permissions':\n log(\"Running %s\" % (action), level=DEBUG)\n ensure_pki_dir_permissions()\n else:\n log(\"Unknown action flag=%s\" % (flag), level=WARNING)\n\n try:\n os.remove(flagfile)\n except:\n pass", "def test_unsupported_action(self):\r\n self.xmodule.verify_oauth_body_sign = Mock()\r\n request = Request(self.environ)\r\n request.body = self.get_request_body({'action': 'wrongAction'})\r\n response = self.xmodule.grade_handler(request, '')\r\n real_response = self.get_response_values(response)\r\n expected_response = {\r\n 'action': None,\r\n 'code_major': 'unsupported',\r\n 'description': 'Target does not support the requested operation.',\r\n 'messageIdentifier': self.DEFAULTS['messageIdentifier'],\r\n }\r\n self.assertEqual(response.status_code, 200)\r\n self.assertDictEqual(expected_response, real_response)", "def checkRgfAbility(self, pair):\n l1, l2 = pair[0], pair[1]\n\n ## ... find out if they are already grafted with one another,\n # if yes jump to next pair\n # condition1 is met (i.e. true) if they are not already grafted\n condition1 = False if l2 in self._partner_indices[l1] else True\n ## ... find out if they are currently within the root graft formation\n # process, if yes jump to next pair\n condition2 = True if (self._rgf_counter[l1] == -1\n and self._rgf_counter[l2] == -1) else False\n # ... check if both plants have a certain size (i.e. DBH > 1.5 cm) in\n # order to avoid that freshly recruited plants start grafting\n condition3 = True if (self._r_stem[l1] > 0.0075\n and self._r_stem[l2] > 0.0075) else False\n\n # ... find out if the grafting conditions are met, if yes set rgf = 1\n start_rgf = True if ((condition1 and condition2 and condition3)\n == True) else False\n return start_rgf", "def _pre_flight_checks(self) -> Squonk2AgentRv:\n\n # If a Squonk2Org record exists its UUID cannot have changed.\n # We cannot change the organisation once deployed. The corresponding Units,\n # Products and Projects are organisation-specific. The Squonk2Org table\n # records the organisation ID and the Account Server URL where the ID\n # is valid. None of these values can change once deployed.\n\n assert self.__configuration_checked\n assert self.__configured\n\n if self.__org_record and self.__org_record.uuid != self.__CFG_SQUONK2_ORG_UUID:\n msg: str = f'Configured Squonk2 Organisation ({self.__CFG_SQUONK2_ORG_UUID})'\\\n f' does not match pre-existing record ({self.__org_record.uuid})'\n _LOGGER.error(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n\n # OK, so the ORG exists and its UUID has not changed.\n # Is it known to the configured AS?\n if not self._get_squonk2_owner_tokens():\n msg = 'Failed to get AS or DM token for organisation owner'\n _LOGGER.warning(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n _LOGGER.debug('Got Squonk2 API Access Tokens')\n\n # Get the ORG from the AS API.\n # If it knows the org the response will be successful,\n # and we'll also have the Org's name.\n as_o_rv = AsApi.get_organisation(self.__org_owner_as_token,\n org_id=self.__CFG_SQUONK2_ORG_UUID)\n if not as_o_rv.success:\n msg = 'Failed to get AS Organisation'\n _LOGGER.warning(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n\n # The org is known to the AS.\n # Get the AS API version (for reference)\n as_v_rv: AsApiRv = AsApi.get_version()\n if not as_v_rv.success:\n msg = 'Failed to get version from AS'\n _LOGGER.warning(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n\n as_version: str = as_v_rv.msg['version']\n _LOGGER.debug('Happy with Squonk2 Account Server (as_version=%s)', as_version)\n\n # Everything seems to be OK but we might not have an organisation in this\n # call (it may be the first call of the instance lifetime).\n # So, if there's no Squonk2Org record, create one,\n # recording the ORG ID and the AS and version we used.\n if not self.__org_record:\n assert self.__CFG_SQUONK2_ASAPI_URL\n _LOGGER.info('Creating NEW Squonk2Org record for %s.'\n ' as-url=%s as-org=\"%s\" as-version=%s',\n self.__CFG_SQUONK2_ORG_UUID,\n self.__CFG_SQUONK2_ASAPI_URL,\n as_o_rv.msg['name'],\n as_version)\n self.__org_record = Squonk2Org(uuid=self.__CFG_SQUONK2_ORG_UUID,\n name=as_o_rv.msg['name'],\n as_url=self.__CFG_SQUONK2_ASAPI_URL,\n as_version=as_version)\n self.__org_record.save()\n _LOGGER.info('Created Squonk2Org record for %s',\n self.__CFG_SQUONK2_ORG_UUID)\n else:\n _LOGGER.debug('Squonk2Org for %s \"%s\" already exists - nothing to do',\n self.__org_record.uuid,\n self.__org_record.name)\n\n # Organisation is known to AS, and it hasn't changed.\n _LOGGER.debug('Successful pre-flight checks')\n return SuccessRv", "def check_gs_name(self, name):\n if name in self.currentconfig.list_greyscales():\n QtWidgets.QMessageBox.warning(self, \"Name error\", \"Greyscale name\" + name + \" clashes with existing one\")\n return True\n return False", "def get_available_actions(self, state):\n pass", "def test_convert_to_existing_group2(self, inventoryloader):\n inventoryloader.convert_group('glance_api', 'glance_all')\n inventoryloader.convert_group('glance_registry', 'glance_all')\n assert 'glance_api' not in inventoryloader.groups\n assert 'glance_registry' not in inventoryloader.groups\n assert not inventoryloader.groups['glance_all'].has_group('glance_api')\n assert not inventoryloader.groups['glance_all'].has_group('glance_registry')\n assert inventoryloader.groups['glance_all'].has_host('localhost')\n assert inventoryloader.groups['glance_all'].has_host('localhost2')\n assert \"management_bridge\" in inventoryloader.groups['glance_all'].vars", "def check(self):\n illegalNamespaces = list()\n\n progStandard = re.compile(\"^[A-Z]{4}[0-9]{2}_[0-9]{3}$\")\n progShot = re.compile(\"^SH[0-9]{4}_[0-9]{3}$\")\n\n for namespaces in pm.namespaceInfo(listOnlyNamespaces=True, internal=False, recurse=True):\n for namespace in namespaces.split(\":\"):\n if not progStandard.match(namespace) and not progShot.match(namespace) not in [\"UI\", \"shared\"]:\n illegalNamespaces.append(namespace)\n\n if not illegalNamespaces:\n self.status = \"OK\"\n else:\n self.status = self.errorMode\n self.errorNodes = illegalNamespaces\n for illegalNamespace in illegalNamespaces:\n self.addError(\"%s is a illegal namespace\" % illegalNamespace)\n self.errorMessage = \"%s illegal namespace\" % (\n len(illegalNamespaces))", "def action(self, gstate, actions=None):\n raise NotImplementedError", "def test_accepted(self):\n actions = signoff_actions(appversions={\"code\": \"fx1.0\"},\n locales={\"code\": \"de\"})\n actions = list(actions)\n eq_(len(actions), 1)\n so = Signoff.objects.get(action=actions[0][0])\n eq_(so.push.tip.shortrev, \"l10n de 0002\")\n eq_(so.locale.code, \"de\")\n eq_(so.action_set.count(), 2)", "def actions() -> None:\n pass", "def test_unsupported_action(self):\n self.xmodule.verify_oauth_body_sign = Mock()\n request = Request(self.environ)\n request.body = self.get_request_body({'action': 'wrongAction'})\n response = self.xmodule.grade_handler(request, '')\n real_response = self.get_response_values(response)\n expected_response = {\n 'action': None,\n 'code_major': 'unsupported',\n 'description': 'Target does not support the requested operation.',\n 'messageIdentifier': self.defaults['messageIdentifier'],\n }\n assert response.status_code == 200\n self.assertDictEqual(expected_response, real_response)", "def get_action2(self, feature):\n assert feature in self._action2\n return self._action2[feature]", "def check_action(self, action_cnfg, data):\n if data.get(\"result\") == \"ok\" and len(data.get(\"objects\",[])) > 0: \n if action_cnfg is None:\n return True\n else:\n tobe_detected = action_cnfg.objects\n tobe_excluded = action_cnfg.objects_exclude\n score_min = action_cnfg.score\n #score_min = 0 if score_min=='' else score_min\n area = action_cnfg.area\n found = False\n i = 0\n detected = data.get('objects')\n for obj in detected: \n if (not action_cnfg.use_memory or not ('memory_obj' in obj and obj['memory_obj'].is_action_triggered(action_cnfg.type))):\n if (len(tobe_detected) == 0 or obj['class'] in tobe_detected) and (not obj['class'] in tobe_excluded):\n found = True\n else:\n continue\n found = found and obj['score']*100 >= score_min\n # check if box points are inside detection polygon area\n if len(area) >= 3:\n pts = np.array(area, np.int32)\n bbPath = mplPath.Path(pts)\n results = bbPath.contains_points([\n (detected[i]['box'][1], detected[i]['box'][0]), \n (detected[i]['box'][3], detected[i]['box'][2]),\n (detected[i]['box'][3], detected[i]['box'][0]),\n (detected[i]['box'][1], detected[i]['box'][2]),\n ])\n edge_inside = False\n for inside in results:\n if inside:\n edge_inside = edge_inside or inside\n found = found and edge_inside\n if found:\n break\n i += 1\n return found\n return False", "def set_action2(self, action2, feature):\n assert feature not in self._action2\n self._action2[feature] = action2", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0(self):\n pass", "def cluster_actions():\n request_debug(r, logger)\n action = request_get(r, \"action\")\n logger.info(\"cluster_op with action={}\".format(action))\n if action == \"apply\":\n return cluster_apply(r)\n elif action == \"release\":\n return cluster_release(r)\n elif action == \"start\":\n return cluster_start(r)\n elif action == \"stop\":\n return cluster_stop(r)\n elif action == \"restart\":\n return cluster_restart(r)\n else:\n return make_fail_response(\"Unknown action type\")", "def checkEnvironmentAction(ontology_environment_action):\n name = getNameFromIRI(ontology_environment_action.iri)\n environment = ontology_environment_action.has_environment[0]\n xosc_environment = checkEnvironment(environment)\n return xosc.EnvironmentAction(xosc_environment)", "def greengrass(self) -> Optional[pulumi.Input['GatewayGreengrassArgs']]:\n return pulumi.get(self, \"greengrass\")", "def check_args(self):\n spec = _action_args_dict[self.action]\n \n if not spec.has_extended and len(self.args) != spec.n_req_args:\n raise GameActionError(\n 'Number of args for {0} doesn\\'t match (args={1}, n_args must be {2})'\n .format(spec.name, self.args, spec.n_req_args))\n\n elif spec.has_extended and len(self.args) < spec.n_req_args:\n raise GameActionError(\n 'Number of args for {0} doesn\\'t match (args={1}, n_args must be >= {2})'\n .format(spec.name, self.args, n_args))\n\n # Regular args\n for i, arg, (_type,name) in izip(count(), self.args, spec.required_arg_specs):\n card_arg_match = _type is Card and type(arg) is int\n bad_arg_match = type(arg) is not _type\n arg_is_none = arg is None\n arg_invalid_bool = type(arg) is not bool and _type is bool\n str_unicode_error = type(arg) is str and _type is unicode \\\n or type(arg) is unicode and _type is str\n\n if bad_arg_match and not arg_is_none and not\\\n str_unicode_error and not card_arg_match:\n raise GameActionError(\n 'Argument {0} (\"{1}\"), {2} doesn\\'t match type ({3} != {4})'\n .format(i, name, str(arg), str(_type), str(type(arg))))\n\n if arg_invalid_bool:\n raise GameActionError(\n 'Argument {0} (\"{1}\") must be boolean (received {2})'\n .format(i, name, str(arg)))\n\n # Extended args\n for i, arg in izip(count(spec.n_req_args), self.args[spec.n_req_args:]):\n _type, name = spec.extended_arg_spec\n\n card_arg_match = _type is Card and type(arg) is int\n bad_arg_match = type(arg) is not _type\n arg_is_none = arg is None\n arg_invalid_bool = type(arg) is not bool and _type is bool\n str_unicode_error = type(arg) is str and _type is unicode \\\n or type(arg) is unicode and _type is str\n\n if bad_arg_match and not arg_is_none and not\\\n str_unicode_error and not card_arg_match\\\n and not arg_invalid_bool:\n raise GameActionError(\n 'Argument {0} (\"{1}\"), {2} doesn\\'t match type ({3} != {4})'\n .format(i, name, str(arg), str(_type), str(type(arg))))\n\n if arg_invalid_bool:\n raise GameActionError(\n 'Argument {0} (\"{1}\") must be boolean (received {2})'\n .format(i, name, str(arg)))", "def test_action_register_methods(self) -> None:\n with self.assertWarns(RemovedInReviewBoard70Warning,\n self.deprecation_message):\n foo_action = FooAction()\n foo_action.register()\n\n self.assertEqual(actions_registry.get('action_id', 'foo-action'),\n foo_action)\n\n foo_action.unregister()\n\n self.assertIsNone(actions_registry.get('action_id', 'foo-action'))", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces(self):\n pass", "def check_for_synk_url(context):\n json_data = context.response.json()\n if \"component_analyses\" in json_data:\n vulnerabilities = json_data['component_analyses']['vulnerability']\n for v in vulnerabilities:\n assert \"url\" in v, \"No snyk url found for vulnerablity\"\n assert \"cvss_v3\" in v, \"No CVSS_V3 found for vulnerablity\"\n assert \"title\" in v, \"No title found for vulnerablity\"\n assert \"severity\" in v, \"No severity found for vulnerablity\"", "def action(self, gstate, actions):\n self.log.debug(\"Picking among actions %s\" % actions)\n return actions[0]", "def check_script(vouts):\n for vout in [v for v in vouts[::-1] if v['hex'].startswith('6a')]:\n verb = BlockchainSpider.decode_op_return(vout['hex'])\n action = Spoolverb.from_verb(verb).action\n if action in Spoolverb.supported_actions:\n return verb\n raise Exception(\"Invalid ascribe transaction\")", "def on_status_change(self, aws_greengrass, new_status, old_status):\r\n raise NotImplementedError('You must implement \"on_status_change()\" to '\r\n 'use the \"AWSGreengrassListener\" class.')", "def test_create_resource_access_review_for_all_namespaces(self):\n pass", "def check(self):\n gAsset = cmds.ls(type='gAsset')\n\n render_geo = []\n if gAsset:\n trans = cmds.listRelatives(gAsset[0], p=True, f=True)\n meshes = cmds.listRelatives(trans, ad=True, type='mesh', f=True)\n if meshes:\n render_geo.extend(meshes)\n # for item in meshes:\n # trans = cmds.listRelatives(item, p=True, f=True)\n # render_geo.extend(trans)\n\n if not pm.ls(\"*.grid_renderGeo\"):\n self.status = self.errorMode\n self.addError(\"No geometry's are tagged as render geo\")\n self.errorMessage = \"No geometry is tagged as render geo\"\n elif not len(set(cmds.ls(\"*.grid_renderGeo\"))) == len(render_geo):\n self.status = self.errorMode\n self.addError(\"Not all Geo tags under gasset\")\n self.errorMessage = \"Not all Geo tags under gasset\"\n else:\n self.status = \"OK\"\n else:\n self.addError(\"No Gasset found\")\n self.errorMessage = \"No gasset found\"", "def test_user_grammar_actions():\n grammar = \"\"\"\n S: A B C;\n @nonterm_action\n C: A B;\n A: \"a\";\n @term_action\n B: \"b\";\n \"\"\"\n\n called = [False, False]\n\n def nonterm_action(_, __):\n called[0] = True\n\n def term_action(_, __):\n called[1] = True\n\n my_actions = {\n \"nonterm_action\": nonterm_action,\n \"term_action\": term_action,\n }\n\n g = Grammar.from_string(grammar)\n p = Parser(g, actions=my_actions)\n assert p.parse(\"a b a b\")\n assert all(called)", "def _check_BGS_targtype(targtype):\n targposs = ['faint', 'bright', 'wise']\n\n if targtype not in targposs:\n msg = 'targtype must be one of {} not {}'.format(targposs, targtype)\n log.critical(msg)\n raise ValueError(msg)", "def testGame2(hitServer):\n\n ###### LOCAL HELPERS ######\n def statusInfoCheck5player1(currentID, currentMessage, libCount=None, fasCount=None):\n check = a.client_call_for_json(f'client/client_status/{a.ids}')\n if check['name'] != 'a' or check['party'] != 'Liberal' or check['role'] != 'Liberal' or check['statusID'] != currentID or check['statusUpdate'] != currentMessage or \"Other Fascists\" in check:\n raise AssertionError(\"something wrong with get_status()\" + str(check) + \"\\n\" + currentMessage)\n check = b.client_call_for_json(f'client/client_status/{b.ids}')\n if check['name'] != 'b' or check['party'] != 'Liberal' or check['role'] != 'Liberal' or check['statusID'] != currentID or check['statusUpdate'] != currentMessage or \"Other Fascists\" in check:\n raise AssertionError(\"something wrong with get_status()\" + str(check))\n check = c.client_call_for_json(f'client/client_status/{c.ids}')\n if check['name'] != 'c' or check['party'] != 'Liberal' or check['role'] != 'Liberal' or check['statusID'] != currentID or check['statusUpdate'] != currentMessage or \"Other Fascists\" in check:\n raise AssertionError(\"something wrong with get_status()\" + str(check))\n check = d.client_call_for_json(f'client/client_status/{d.ids}')\n if check['name'] != 'd' or check['party'] != 'Fascist' or check['role'] != 'Fascist' or check['statusID'] != currentID or check['statusUpdate'] != currentMessage or check['Hitler'] != 'e' or check['Other Fascists'] != \"\":\n raise AssertionError(\"something wrong with get_status()\" + str(check))\n check = e.client_call_for_json(f'client/client_status/{e.ids}')\n if check['name'] != 'e' or check['party'] != 'Fascist' or check['role'] != 'Hitler' or check['statusID'] != currentID or check['statusUpdate'] != currentMessage or check['Other Fascists'] != \"d\":\n raise AssertionError(\"something wrong with get_status()\" + str(check))\n if libCount != None:\n if check[\"libPolicies\"] != libCount:\n raise AssertionError(\"something wrong in status, too few liberal policies on count\")\n if fasCount != None:\n if check[\"fasPolicies\"] != fasCount:\n raise AssertionError(\"something wrong in status, too few fascist policies on count\") \n return 'done'\n\n ####### THE MEAT & POTATOES ##########\n a = TestPlayer('a', hitServer)\n b = TestPlayer('b', hitServer)\n c = TestPlayer('c', hitServer)\n d = TestPlayer('d', hitServer)\n e = TestPlayer('e', hitServer)\n\n check = a.client_call_for_http('setup/1/5')\n if check != 'confirm':\n raise AssertionError('setup Error')\n\n waitForIt()\n a.ids = a.client_call_for_http(f'client/join_game/{a.name}')\n a.id = int(a.ids)\n waitForIt()\n b.ids = b.client_call_for_http(f'client/join_game/{b.name}')\n b.id = int(b.ids)\n waitForIt()\n c.ids = c.client_call_for_http(f'client/join_game/{c.name}')\n c.id = int(c.ids)\n waitForIt()\n d.ids = d.client_call_for_http(f'client/join_game/{d.name}')\n d.id = int(d.ids)\n waitForIt()\n e.ids = e.client_call_for_http(f'client/join_game/{e.name}')\n e.id = int(e.ids)\n waitForIt()\n\n x = TestPlayer('frank', hitServer)\n check = x.client_call_for_http(f'client/join_game/{x.name}')\n if \"Unable to join, game is full.\" != check:\n raise AssertionError(\"frank wasn't supposed to be able to join.\" + x.text)\n\n waitForIt()\n\n a.dryCall(\"debug/mockData3\")\n waitForIt()\n try:\n statusInfoCheck5player1(3, \"The current President is a. Waiting for the President to nominate a Chancellor.\")\n except AssertionError as theProblem:\n raise theProblem\n waitForIt()\n\n ###########START OF GAME LOOP##################\n # turn 1: a is president, a elections b, all vote ya, a gives b 1 lib/1 fas, b choses the fascist. 1 bad policy.\n check = a.client_call_for_json(f'client/nominate_chancellor/{a.ids}')\n others = [b, c, d, e]\n for i in others:\n if check[i.ids] != i.name:\n print('What was got', check)\n raise AssertionError(\"president's json nomination has problem\")\n # TODO - Additional checks on received JSON should probably be made...\n check = b.client_call_for_json(f'client/nominate_chancellor/{b.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n check = c.client_call_for_json(f'client/nominate_chancellor/{c.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n check = d.client_call_for_json(f'client/nominate_chancellor/{d.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n check = e.client_call_for_json(f'client/nominate_chancellor/{e.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n waitForIt()\n check = a.client_call_for_http(f'client/nominate_chancellor_order/{a.ids}/{b.ids}')\n if check != \"confirm\":\n raise AssertionError('nomination order not confirmed')\n waitForIt()\n\n\n statusInfoCheck5player1(6, f\"{a.name} nominated {b.name} for Chancellor.\")\n # updateGStatusUpdate(f\"{findCurrentPresident().name} nominated {Player.objects.get(pk=selected).name} for Chancellor.\")\n\n waitForIt()\n a.client_call_for_http(f'client/submit_vote/{a.ids}/1')\n waitForIt()\n b.client_call_for_http(f'client/submit_vote/{b.ids}/1')\n waitForIt()\n c.client_call_for_http(f'client/submit_vote/{c.ids}/1')\n waitForIt()\n d.client_call_for_http(f'client/submit_vote/{d.ids}/1')\n waitForIt()\n e.client_call_for_http(f'client/submit_vote/{e.ids}/1')\n waitForIt()\n\n statusInfoCheck5player1(99, f\"Vote passed. President {a.name} and Chancellor {b.name} will now enact a policy decision.\")\n\n waitForIt()\n checkAgainst = {'a': True, 'b': True, \"c\": True, 'd': True, 'e': True, 'result': 'The vote passed.', \"number of votes\": 5}\n check = a.client_call_for_json(f'client/show_all_votes')\n if check != checkAgainst:\n raise AssertionError('vote check problem. ' + str(check))\n waitForIt()\n check = a.client_call_for_http(f'client/vote_show_confirmation/{a.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = b.client_call_for_http(f'client/vote_show_confirmation/{b.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = c.client_call_for_http(f'client/vote_show_confirmation/{c.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = d.client_call_for_http(f'client/vote_show_confirmation/{d.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = e.client_call_for_http(f'client/vote_show_confirmation/{e.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n\n statusInfoCheck5player1(9, f\"Vote passed. President {a.name} and Chancellor {b.name} will now enact a policy decision.\")\n\n waitForIt()\n check = a.client_call_for_json(f'client/president_draw/{a.ids}')\n if check['wait'] != \"\" or check['information'] != 'Select a policy card to discard. The remaining cards will be passed to the Chancellor.' or \\\n check[\"statusID\"] != 9 or not (check[\"1\"] == \"Liberal\" or check[\"1\"] == \"Fascist\") or \\\n not (check[\"2\"] == \"Liberal\" or check[\"2\"] == \"Fascist\") or \\\n not (check[\"3\"] == \"Liberal\" or check[\"3\"] == \"Fascist\"):\n print(check)\n raise AssertionError(\"check the president draw cards, something is probably wrong.\")\n waitForIt()\n check = b.client_call_for_json(f'client/president_draw/{b.ids}')\n if check != {'wait': f\"Waiting for President {a.name} to select policies.\", \"statusID\": 9}:\n raise AssertionError()\n waitForIt()\n check = c.client_call_for_json(f'client/president_draw/{c.ids}')\n if check != {'wait': f\"Waiting for President {a.name} to select policies.\", \"statusID\": 9}:\n raise AssertionError()\n waitForIt()\n check = d.client_call_for_json(f'client/president_draw/{d.ids}')\n if check != {'wait': f\"Waiting for President {a.name} to select policies.\", \"statusID\": 9}:\n raise AssertionError() \n waitForIt()\n check = e.client_call_for_json(f'client/president_draw/{e.ids}')\n if check != {'wait': f\"Waiting for President {a.name} to select policies.\", \"statusID\": 9}:\n raise AssertionError()\n waitForIt()\n\n check = a.client_call_for_http(f'client/president_play/{a.ids}/Liberal/Fascist/Liberal')\n if check != \"confirm\":\n print(check)\n raise AssertionError('president card selection problem')\n waitForIt()\n\n statusInfoCheck5player1(10, 'Waiting for the Chancellor to select a policy.')\n\n check = b.client_call_for_json(f'client/chancellor_draw/{b.ids}')\n if check != {'1': \"Liberal\", \"2\": \"Fascist\", 'information': \"Select a policy card to enact.\", 'veto': False, 'vetoText': '', \"statusID\": 10, \"wait\": \"\"}:\n print(check)\n raise AssertionError('chancellor draw problem')\n check = a.client_call_for_json(f'client/chancellor_draw/{a.ids}')\n if check != {'wait': f\"Waiting for Chancellor {b.name} to select a policy.\", \"statusID\": 10}:\n raise AssertionError(str(check))\n waitForIt()\n check = c.client_call_for_json(f'client/chancellor_draw/{c.ids}')\n if check != {'wait': f\"Waiting for Chancellor {b.name} to select a policy.\", \"statusID\": 10}:\n raise AssertionError()\n waitForIt()\n check = d.client_call_for_json(f'client/chancellor_draw/{d.ids}')\n if check != {'wait': f\"Waiting for Chancellor {b.name} to select a policy.\", \"statusID\": 10}:\n raise AssertionError()\n waitForIt()\n check = e.client_call_for_json(f'client/chancellor_draw/{e.ids}')\n if check != {'wait': f\"Waiting for Chancellor {b.name} to select a policy.\", \"statusID\": 10}:\n raise AssertionError()\n waitForIt()\n\n check = b.client_call_for_http(f'client/chancellor_play/{b.ids}/Fascist/Liberal')\n if check != 'confirm':\n print(\"error headsup\", check)\n raise AssertionError(\"something wrong with chancellor play\")\n\n statusInfoCheck5player1(97, \"A Fascist policy was enacted!\")\n\n waitForIt()\n if a.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n if b.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n if c.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n if d.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n if e.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n\n a.client_call_for_http(f'client/policy_result_confirm/{a.ids}')\n b.client_call_for_http(f'client/policy_result_confirm/{b.ids}')\n c.client_call_for_http(f'client/policy_result_confirm/{c.ids}')\n d.client_call_for_http(f'client/policy_result_confirm/{d.ids}')\n e.client_call_for_http(f'client/policy_result_confirm/{e.ids}')\n waitForIt()\n\n waitForIt()\n statusInfoCheck5player1(3, f'The current President is {b.name}. Waiting for the President to nominate a Chancellor.')\n \n ###########START OF GAME LOOP##################\n # turn 2: b is president, b elections c, all vote ya, b gives c 1 lib/1 fas, c choses the fas. 2 bad policies.\n check = b.client_call_for_json(f'client/nominate_chancellor/{b.ids}')\n others = [a, c, d, e]\n for i in others:\n print(i.ids, i.id)\n if i.ids not in check:\n print('What was got', check)\n raise AssertionError(\"president's json nomination has problem1\")\n if check[i.ids] != i.name:\n print('What was got', check)\n raise AssertionError(\"president's json nomination has problem2\")\n # if a.ids in check:\n # raise AssertionError(\"former president should not be available for nomination.\")\n # TODO - Additional checks on received JSON should probably be made...\n check = a.client_call_for_json(f'client/nominate_chancellor/{a.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n check = c.client_call_for_json(f'client/nominate_chancellor/{c.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n check = d.client_call_for_json(f'client/nominate_chancellor/{d.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n check = e.client_call_for_json(f'client/nominate_chancellor/{e.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n waitForIt()\n check = b.client_call_for_http(f'client/nominate_chancellor_order/{b.ids}/{c.ids}')\n if check != \"confirm\":\n raise AssertionError('nomination order not confirmed')\n waitForIt()\n\n\n statusInfoCheck5player1(6, f\"{b.name} nominated {c.name} for Chancellor.\")\n # updateGStatusUpdate(f\"{findCurrentPresident().name} nominated {Player.objects.get(pk=selected).name} for Chancellor.\")\n\n waitForIt()\n a.client_call_for_http(f'client/submit_vote/{a.ids}/1')\n waitForIt()\n b.client_call_for_http(f'client/submit_vote/{b.ids}/1')\n waitForIt()\n c.client_call_for_http(f'client/submit_vote/{c.ids}/1')\n waitForIt()\n d.client_call_for_http(f'client/submit_vote/{d.ids}/1')\n waitForIt()\n e.client_call_for_http(f'client/submit_vote/{e.ids}/1')\n waitForIt()\n\n waitForIt()\n waitForIt()\n statusInfoCheck5player1(99, f\"Vote passed. President {b.name} and Chancellor {c.name} will now enact a policy decision.\", fasCount=1, libCount=0)\n\n waitForIt()\n checkAgainst = {'a': True, 'b': True, \"c\": True, 'd': True, 'e': True, 'result': 'The vote passed.', \"number of votes\": 5}\n check = a.client_call_for_json(f'client/show_all_votes')\n if check != checkAgainst:\n raise AssertionError('vote check problem. ' + str(check))\n waitForIt()\n check = a.client_call_for_http(f'client/vote_show_confirmation/{a.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = b.client_call_for_http(f'client/vote_show_confirmation/{b.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = c.client_call_for_http(f'client/vote_show_confirmation/{c.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = d.client_call_for_http(f'client/vote_show_confirmation/{d.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = e.client_call_for_http(f'client/vote_show_confirmation/{e.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n\n statusInfoCheck5player1(9, f\"Vote passed. President {b.name} and Chancellor {c.name} will now enact a policy decision.\")\n\n waitForIt()\n check = b.client_call_for_json(f'client/president_draw/{b.ids}')\n if check['wait'] != \"\" or check['information'] != 'Select a policy card to discard. The remaining cards will be passed to the Chancellor.' or \\\n check[\"statusID\"] != 9 or not (check[\"1\"] == \"Liberal\" or check[\"1\"] == \"Fascist\") or \\\n not (check[\"2\"] == \"Liberal\" or check[\"2\"] == \"Fascist\") or \\\n not (check[\"3\"] == \"Liberal\" or check[\"3\"] == \"Fascist\"): \n print(check)\n raise AssertionError(\"check the president draw cards, something is probably wrong.\")\n waitForIt()\n check = a.client_call_for_json(f'client/president_draw/{a.ids}')\n if check != {'wait': f\"Waiting for President {b.name} to select policies.\", \"statusID\": 9}:\n raise AssertionError()\n waitForIt()\n check = c.client_call_for_json(f'client/president_draw/{c.ids}')\n if check != {'wait': f\"Waiting for President {b.name} to select policies.\", \"statusID\": 9}:\n raise AssertionError()\n waitForIt()\n check = d.client_call_for_json(f'client/president_draw/{d.ids}')\n if check != {'wait': f\"Waiting for President {b.name} to select policies.\", \"statusID\": 9}:\n raise AssertionError() \n waitForIt()\n check = e.client_call_for_json(f'client/president_draw/{e.ids}')\n if check != {'wait': f\"Waiting for President {b.name} to select policies.\", \"statusID\": 9}:\n raise AssertionError()\n waitForIt()\n\n check = b.client_call_for_http(f'client/president_play/{b.ids}/Liberal/Fascist/Liberal')\n if check != \"confirm\":\n print(check)\n raise AssertionError('president card selection problem')\n waitForIt()\n\n statusInfoCheck5player1(10, 'Waiting for the Chancellor to select a policy.')\n\n check = c.client_call_for_json(f'client/chancellor_draw/{c.ids}')\n if check != {'1': \"Liberal\", \"2\": \"Fascist\", 'information': \"Select a policy card to enact.\", 'veto': False, 'vetoText': '', \"statusID\": 10, 'wait': \"\"}:\n print(check)\n raise AssertionError('chancellor draw problem')\n check = a.client_call_for_json(f'client/chancellor_draw/{a.ids}')\n if check != {'wait': f\"Waiting for Chancellor {c.name} to select a policy.\", \"statusID\": 10}:\n raise AssertionError(str(check))\n waitForIt()\n check = b.client_call_for_json(f'client/chancellor_draw/{b.ids}')\n if check != {'wait': f\"Waiting for Chancellor {c.name} to select a policy.\", \"statusID\": 10}:\n raise AssertionError(check)\n waitForIt()\n check = d.client_call_for_json(f'client/chancellor_draw/{d.ids}')\n if check != {'wait': f\"Waiting for Chancellor {c.name} to select a policy.\", \"statusID\": 10}:\n raise AssertionError()\n waitForIt()\n check = e.client_call_for_json(f'client/chancellor_draw/{e.ids}')\n if check != {'wait': f\"Waiting for Chancellor {c.name} to select a policy.\", \"statusID\": 10}:\n raise AssertionError()\n waitForIt()\n\n check = c.client_call_for_http(f'client/chancellor_play/{c.ids}/Fascist/Liberal')\n if check != 'confirm':\n print(\"error headsup\", check)\n raise AssertionError(\"something wrong with chancellor play\")\n\n statusInfoCheck5player1(97, \"A Fascist policy was enacted!\")\n\n waitForIt()\n if a.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n if b.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n if c.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n if d.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n if e.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n\n a.client_call_for_http(f'client/policy_result_confirm/{a.ids}')\n b.client_call_for_http(f'client/policy_result_confirm/{b.ids}')\n c.client_call_for_http(f'client/policy_result_confirm/{c.ids}')\n d.client_call_for_http(f'client/policy_result_confirm/{d.ids}')\n e.client_call_for_http(f'client/policy_result_confirm/{e.ids}')\n waitForIt()\n\n waitForIt()\n statusInfoCheck5player1(3, f'The current President is {c.name}. Waiting for the President to nominate a Chancellor.')\n \n ###########START OF GAME LOOP##################\n # turn 3: c is president, c elections d, all vote ya, c gives d 1 lib/1 fas, d choses the fas. c gets to policy peek.\n check = c.client_call_for_json(f'client/nominate_chancellor/{c.ids}')\n others = [a, b, d, e]\n for i in others:\n print(i.ids, i.id)\n if i.ids not in check:\n print('What was got', check)\n raise AssertionError(\"president's json nomination has problem1\")\n if check[i.ids] != i.name:\n print('What was got', check)\n raise AssertionError(\"president's json nomination has problem2\")\n # if b.ids in check: # b is term limited, previous president\n # raise AssertionError(\"former president should not be available for nomination.\")\n # TODO - Additional checks on received JSON should probably be made...\n check = a.client_call_for_json(f'client/nominate_chancellor/{a.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n check = b.client_call_for_json(f'client/nominate_chancellor/{b.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n check = d.client_call_for_json(f'client/nominate_chancellor/{d.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n check = e.client_call_for_json(f'client/nominate_chancellor/{e.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n waitForIt()\n check = c.client_call_for_http(f'client/nominate_chancellor_order/{c.ids}/{d.ids}')\n if check != \"confirm\":\n raise AssertionError('nomination order not confirmed')\n waitForIt()\n\n\n statusInfoCheck5player1(6, f\"{c.name} nominated {d.name} for Chancellor.\", libCount=0, fasCount=2)\n # updateGStatusUpdate(f\"{findCurrentPresident().name} nominated {Player.objects.get(pk=selected).name} for Chancellor.\")\n\n waitForIt()\n a.client_call_for_http(f'client/submit_vote/{a.ids}/1')\n waitForIt()\n b.client_call_for_http(f'client/submit_vote/{b.ids}/1')\n waitForIt()\n c.client_call_for_http(f'client/submit_vote/{c.ids}/1')\n waitForIt()\n d.client_call_for_http(f'client/submit_vote/{d.ids}/1')\n waitForIt()\n e.client_call_for_http(f'client/submit_vote/{e.ids}/1')\n waitForIt()\n\n waitForIt()\n waitForIt()\n statusInfoCheck5player1(99, f\"Vote passed. President {c.name} and Chancellor {d.name} will now enact a policy decision.\")\n\n waitForIt()\n checkAgainst = {'a': True, 'b': True, \"c\": True, 'd': True, 'e': True, 'result': 'The vote passed.', \"number of votes\": 5}\n check = a.client_call_for_json(f'client/show_all_votes')\n if check != checkAgainst:\n raise AssertionError('vote check problem. ' + str(check))\n waitForIt()\n check = a.client_call_for_http(f'client/vote_show_confirmation/{a.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = b.client_call_for_http(f'client/vote_show_confirmation/{b.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = c.client_call_for_http(f'client/vote_show_confirmation/{c.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = d.client_call_for_http(f'client/vote_show_confirmation/{d.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = e.client_call_for_http(f'client/vote_show_confirmation/{e.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n\n statusInfoCheck5player1(9, f\"Vote passed. President {c.name} and Chancellor {d.name} will now enact a policy decision.\")\n\n waitForIt()\n check = c.client_call_for_json(f'client/president_draw/{c.ids}')\n if check['wait'] != \"\" or check['information'] != 'Select a policy card to discard. The remaining cards will be passed to the Chancellor.' or \\\n check[\"statusID\"] != 9 or not (check[\"1\"] == \"Liberal\" or check[\"1\"] == \"Fascist\") or \\\n not (check[\"2\"] == \"Liberal\" or check[\"2\"] == \"Fascist\") or \\\n not (check[\"3\"] == \"Liberal\" or check[\"3\"] == \"Fascist\"): \n print(check)\n raise AssertionError(\"check the president draw cards, something is probably wrong.\")\n waitForIt()\n check = a.client_call_for_json(f'client/president_draw/{a.ids}')\n if check != {'wait': f\"Waiting for President {c.name} to select policies.\", \"statusID\": 9}:\n raise AssertionError()\n waitForIt()\n check = b.client_call_for_json(f'client/president_draw/{b.ids}')\n if check != {'wait': f\"Waiting for President {c.name} to select policies.\", \"statusID\": 9}:\n raise AssertionError()\n waitForIt()\n check = d.client_call_for_json(f'client/president_draw/{d.ids}')\n if check != {'wait': f\"Waiting for President {c.name} to select policies.\", \"statusID\": 9}:\n raise AssertionError() \n waitForIt()\n check = e.client_call_for_json(f'client/president_draw/{e.ids}')\n if check != {'wait': f\"Waiting for President {c.name} to select policies.\", \"statusID\": 9}:\n raise AssertionError()\n waitForIt()\n\n check = c.client_call_for_http(f'client/president_play/{c.ids}/Liberal/Fascist/Liberal')\n if check != \"confirm\":\n print(check)\n raise AssertionError('president card selection problem')\n waitForIt()\n\n statusInfoCheck5player1(10, 'Waiting for the Chancellor to select a policy.')\n\n check = d.client_call_for_json(f'client/chancellor_draw/{d.ids}')\n if check != {'1': \"Liberal\", \"2\": \"Fascist\", 'information': \"Select a policy card to enact.\", 'veto': False, 'vetoText': '', \"statusID\": 10, 'wait': ''}:\n print(check)\n raise AssertionError('chancellor draw problem')\n check = a.client_call_for_json(f'client/chancellor_draw/{a.ids}')\n if check != {'wait': f\"Waiting for Chancellor {d.name} to select a policy.\", \"statusID\": 10}:\n raise AssertionError(str(check))\n waitForIt()\n check = b.client_call_for_json(f'client/chancellor_draw/{b.ids}')\n if check != {'wait': f\"Waiting for Chancellor {d.name} to select a policy.\", \"statusID\": 10}:\n raise AssertionError(check)\n waitForIt()\n check = c.client_call_for_json(f'client/chancellor_draw/{c.ids}')\n if check != {'wait': f\"Waiting for Chancellor {d.name} to select a policy.\", \"statusID\": 10}:\n raise AssertionError()\n waitForIt()\n check = e.client_call_for_json(f'client/chancellor_draw/{e.ids}')\n if check != {'wait': f\"Waiting for Chancellor {d.name} to select a policy.\", \"statusID\": 10}:\n raise AssertionError()\n waitForIt()\n\n check = d.client_call_for_http(f'client/chancellor_play/{d.ids}/Fascist/Liberal')\n if check != 'confirm':\n print(\"error headsup\", check)\n raise AssertionError(\"something wrong with chancellor play\")\n\n statusInfoCheck5player1(97, \"A Fascist policy was enacted!\")\n\n waitForIt()\n if a.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n if b.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n if c.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n if d.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n if e.client_call_for_http(f'client/policy_result_review') != 'confirm':\n raise AssertionError('revolt')\n waitForIt()\n\n check = b.client_call_for_http(f'client/policy_result_confirm/{b.ids}')\n if check != 'confirm':\n raise AssertionError('revolt: ' + check)\n if a.client_call_for_http(f'client/policy_result_confirm/{a.ids}') != 'confirm':\n raise AssertionError('revolt')\n if c.client_call_for_http(f'client/policy_result_confirm/{c.ids}') != 'confirm':\n raise AssertionError('revolt')\n if d.client_call_for_http(f'client/policy_result_confirm/{d.ids}') != 'confirm':\n raise AssertionError('revolt')\n if e.client_call_for_http(f'client/policy_result_confirm/{e.ids}') != 'moving to Executive Powers':\n raise AssertionError('revolt')\n waitForIt()\n\n waitForIt()\n statusInfoCheck5player1(16, \"The President will now look at the top 3 policy cards in the Policy Deck.\", fasCount=3, libCount=0)\n\n checkAgainst = {'information': \"The President will now look at the top 3 policy cards in the Policy Deck.\", \"wait\": \"1\"}\n waitForIt()\n if a.client_call_for_json(f'client/president_policy_peek/{a.ids}') != checkAgainst:\n raise AssertionError(checkAgainst)\n waitForIt()\n if b.client_call_for_json(f'client/president_policy_peek/{b.ids}') != checkAgainst:\n raise AssertionError(checkAgainst)\n waitForIt()\n try:\n check = c.client_call_for_json(f'client/president_policy_peek/{c.ids}')\n if not (check['1'] != \"Liberal\" or check['1'] != \"Fascist\") or not (check['2'] != \"Liberal\" or check['2'] != \"Fascist\") or not (check['3'] != \"Liberal\" or check['3'] != \"Fascist\") or check['information'] != \"You may now peek at the next three policy cards in the policy deck.\":\n raise AssertionError('policy peek not working, ' + str(check))\n except KeyError:\n raise AssertionError('not all cards present in president policy peek' + str(check))\n waitForIt()\n if d.client_call_for_json(f'client/president_policy_peek/{d.ids}') != checkAgainst:\n raise AssertionError(checkAgainst)\n waitForIt()\n if e.client_call_for_json(f'client/president_policy_peek/{e.ids}') != checkAgainst:\n raise AssertionError('revolt')\n waitForIt()\n\n waitForIt()\n check = c.client_call_for_http(f'client/president_policy_peek_confirm/{c.ids}')\n if check != 'confirm':\n raise AssertionError('something wrong with policy peek confirmation, ' + check)\n\n\n waitForIt()\n statusInfoCheck5player1(3, f'The current President is {d.name}. Waiting for the President to nominate a Chancellor.')\n\n ###########START OF GAME LOOP##################\n # turn 4: d is president, d elections e, all vote ya, e is Hilter and 3 fascists policies have been enacted. Game over.\n check = d.client_call_for_json(f'client/nominate_chancellor/{d.ids}')\n others = [a, b, c, e]\n for i in others:\n print(i.ids, i.id)\n if i.ids not in check:\n print('What was got', check)\n raise AssertionError(\"president's json nomination has problem1\")\n if check[i.ids] != i.name:\n print('What was got', check)\n raise AssertionError(\"president's json nomination has problem2\")\n # if c.ids in check: # b is term limited, previous president\n # raise AssertionError(\"former president should not be available for nomination.\")\n # TODO - Additional checks on received JSON should probably be made...\n check = a.client_call_for_json(f'client/nominate_chancellor/{a.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n check = b.client_call_for_json(f'client/nominate_chancellor/{b.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n check = c.client_call_for_json(f'client/nominate_chancellor/{c.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n check = e.client_call_for_json(f'client/nominate_chancellor/{e.ids}')\n if check != {'wait': '1'}:\n raise AssertionError(\"problem with non-president json response.\")\n waitForIt()\n check = d.client_call_for_http(f'client/nominate_chancellor_order/{d.ids}/{e.ids}')\n if check != \"confirm\":\n raise AssertionError('nomination order not confirmed')\n waitForIt()\n\n\n statusInfoCheck5player1(6, f\"{d.name} nominated {e.name} for Chancellor.\", fasCount=3)\n # updateGStatusUpdate(f\"{findCurrentPresident().name} nominated {Player.objects.get(pk=selected).name} for Chancellor.\")\n\n waitForIt()\n a.client_call_for_http(f'client/submit_vote/{a.ids}/1')\n waitForIt()\n b.client_call_for_http(f'client/submit_vote/{b.ids}/1')\n waitForIt()\n c.client_call_for_http(f'client/submit_vote/{c.ids}/1')\n waitForIt()\n d.client_call_for_http(f'client/submit_vote/{d.ids}/1')\n waitForIt()\n e.client_call_for_http(f'client/submit_vote/{e.ids}/1')\n waitForIt()\n\n waitForIt()\n waitForIt()\n statusInfoCheck5player1(99, f\"Vote passed. President {d.name} and Chancellor {e.name} will now enact a policy decision.\")\n\n waitForIt()\n checkAgainst = {'a': True, 'b': True, \"c\": True, 'd': True, 'e': True, 'result': 'The vote passed.', \"number of votes\": 5}\n check = a.client_call_for_json(f'client/show_all_votes')\n if check != checkAgainst:\n raise AssertionError('vote check problem. ' + str(check))\n waitForIt()\n check = a.client_call_for_http(f'client/vote_show_confirmation/{a.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = b.client_call_for_http(f'client/vote_show_confirmation/{b.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = c.client_call_for_http(f'client/vote_show_confirmation/{c.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = d.client_call_for_http(f'client/vote_show_confirmation/{d.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n check = e.client_call_for_http(f'client/vote_show_confirmation/{e.ids}')\n if check != 'confirm':\n raise AssertionError('vote check confirmation problem. ' + str(check))\n waitForIt()\n\n statusInfoCheck5player1(100, f\"The Fascists have achieved victory by getting Hitler elected Chancellor.\", fasCount=3)\n\n check = a.client_call_for_json('client/game_over')\n checkAgainst = {'a': 'a : Liberal : Alive',\n 'b': 'b : Liberal : Alive',\n 'c': 'c : Liberal : Alive',\n 'd': 'd : Fascist : Alive',\n 'e': 'e : Hitler : Alive',\n 'fasPolicyCount': 3,\n 'libPolicyCount': 0,\n 'whatHappened': \"The Fascists have achieved victory by getting Hitler elected Chancellor.\"}\n if checkAgainst != check:\n raise AssertionError('end game text wrong: ' + str(check))\n print('done')", "def getLegalActions(self):\n return ['BOT', 'SLD']", "def _get_update_router_gw_actions(\n self,\n org_tier0_uuid, orgaddr, org_enable_snat,\n new_tier0_uuid, newaddr, new_enable_snat,\n lb_exist, fw_exist, sr_currently_exists):\n actions = {}\n # Remove router link port between tier1 and tier0 if tier0 router link\n # is removed or changed\n actions['remove_router_link_port'] = (\n org_tier0_uuid and\n (not new_tier0_uuid or org_tier0_uuid != new_tier0_uuid))\n\n # Remove SNAT rules for gw ip if gw ip is deleted/changed or\n # enable_snat is updated from True to False\n actions['remove_snat_rules'] = (\n org_enable_snat and orgaddr and\n (newaddr != orgaddr or not new_enable_snat))\n\n # Remove No-DNAT rules if GW was removed or snat was disabled\n actions['remove_no_dnat_rules'] = (\n orgaddr and org_enable_snat and\n (not newaddr or not new_enable_snat))\n\n # Revocate bgp announce for nonat subnets if tier0 router link is\n # changed or enable_snat is updated from False to True\n actions['revocate_bgp_announce'] = (\n not org_enable_snat and org_tier0_uuid and\n (new_tier0_uuid != org_tier0_uuid or new_enable_snat))\n\n # Add router link port between tier1 and tier0 if tier0 router link is\n # added or changed to a new one\n actions['add_router_link_port'] = (\n new_tier0_uuid and\n (not org_tier0_uuid or org_tier0_uuid != new_tier0_uuid))\n\n # Add SNAT rules for gw ip if gw ip is add/changed or\n # enable_snat is updated from False to True\n actions['add_snat_rules'] = (\n new_enable_snat and newaddr and\n (newaddr != orgaddr or not org_enable_snat))\n\n # Add No-DNAT rules if GW was added, and the router has SNAT enabled,\n # or if SNAT was enabled\n actions['add_no_dnat_rules'] = (\n new_enable_snat and newaddr and\n (not orgaddr or not org_enable_snat))\n\n # Bgp announce for nonat subnets if tier0 router link is changed or\n # enable_snat is updated from True to False\n actions['bgp_announce'] = (\n not new_enable_snat and new_tier0_uuid and\n (new_tier0_uuid != org_tier0_uuid or not org_enable_snat))\n\n # Advertise NAT routes if enable SNAT to support FIP. In the NoNAT\n # use case, only NSX connected routes need to be advertised.\n actions['advertise_route_nat_flag'] = (\n True if new_enable_snat else False)\n actions['advertise_route_connected_flag'] = (\n True if not new_enable_snat else False)\n\n # the purpose of this var is to be able to differ between\n # adding a gateway w/o snat and adding snat (when adding/removing gw\n # the snat option is on by default).\n new_with_snat = True if (new_enable_snat and newaddr) else False\n has_gw = True if newaddr else False\n\n if sr_currently_exists:\n # currently there is a service router on the backend\n actions['add_service_router'] = False\n # Should remove the service router if the GW was removed,\n # or no service needs it: SNAT, LBaaS or FWaaS\n actions['remove_service_router'] = (\n not has_gw or not (fw_exist or lb_exist or new_with_snat))\n if actions['remove_service_router']:\n LOG.info(\"Removing service router [has GW: %s, FW %s, LB %s, \"\n \"SNAT %s]\",\n has_gw, fw_exist, lb_exist, new_with_snat)\n else:\n # currently there is no service router on the backend\n actions['remove_service_router'] = False\n # Should add service router if there is a GW\n # and there is a service that needs it: SNAT, LB or FWaaS\n actions['add_service_router'] = (\n has_gw is not None and (new_with_snat or fw_exist or lb_exist))\n if actions['add_service_router']:\n LOG.info(\"Adding service router [has GW: %s, FW %s, LB %s, \"\n \"SNAT %s]\",\n has_gw, fw_exist, lb_exist, new_with_snat)\n\n return actions", "def check_invalid_actions(y):\n inval_actions = [\n [ 0.0, 1.0, 0.4], # ACCEL_BRAKE\n [ 1.0, 1.0, 0.4], # RIGHT_ACCEL_BRAKE\n [-1.0, 1.0, 0.4], # LEFT_ACCEL_BRAKE\n [ 1.0, 1.0, 0.0], # RIGHT_ACCEL\n [-1.0, 1.0, 0.0], # LEFT_ACCEL\n ]\n ia_count = 0\n for ia in inval_actions:\n ia_count += np.sum(np.all(y == ia, axis=1))\n if ia_count > 0:\n raise Exception('Invalid actions. Do something developer!')", "def test_cmd_cs_subscription_bad_action(self):\n bad_action = 'blahblah'\n\n result = self.runner.invoke(cli, ['subscription', bad_action])\n assert f\"invalid choice: {bad_action}\" in result.output\n assert result.exception", "def get_action_meaning(s1: int, s2: int) -> str:\r\n r1, g1, b1 = state_ball_mapper.get(s1)\r\n r2, g2, b2 = state_ball_mapper.get(s2)\r\n red_moved = r2 - r1 != 0\r\n green_moved = g2 - g1 != 0\r\n blue_moved = b2 - b1 != 0\r\n colour = ''\r\n rod_from = ''\r\n rod_to = ''\r\n \r\n if red_moved:\r\n colour = 'R'\r\n arr1, arr2 = r1 % 10, r2 % 10\r\n rod_from = get_rod_letter(r1)\r\n rod_to = get_rod_letter(r2)\r\n \r\n if green_moved:\r\n colour = 'G'\r\n arr1, arr2 = g1 % 10, g2 % 10\r\n rod_from = get_rod_letter(g1)\r\n rod_to = get_rod_letter(g2)\r\n \r\n if blue_moved:\r\n colour = 'B'\r\n arr1, arr2 = b1 % 10, b2 % 10\r\n rod_from = get_rod_letter(b1)\r\n rod_to = get_rod_letter(b2)\r\n \r\n action = f'[{colour} {rod_from}{arr1} -> {colour} {rod_to}{arr2}]'\r\n return action", "def test_resource_actions(self):\n test_resource = ResourceTypeName.get()\n expected_actions = sorted(['rt:get', 'rt:put', 'rt:update', 'rt:delete'])\n self.app.post(\n f'/v1/resource/{test_resource}',\n data=json.dumps({'actions': expected_actions}),\n headers=admin_headers)\n\n # Get the actions for a resource type\n resp = self.app.get(f'/v1/resource/{test_resource}/actions', headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n actions = json.loads(resp.body)['actions']\n self.assertEqual(actions, expected_actions)\n\n # Delete actions from a resource type\n modify_actions = expected_actions[-2:]\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions[:2])\n\n # OK returned when deleting actions not part of a resource type\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n\n # Put actions into a resource type\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions)\n\n # OK returned when putting actions already a part of a resource type.\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)", "def test_api_gage(self):\n r = requests.get('{server}/api/0.1/'.format(\n server=self.get_server_url())).json()\n r = requests.get(r['gages']).json()\n r = requests.get(r['gages'][0]['url']).json()\n self.assertIn('name', r)\n self.assertIn('id', r)\n self.assertIn('location', r)\n self.assertIn('name', r)\n self.assertIn('regions', r)\n self.assertIn('sensors', r)\n self.assertIn('html', r)\n self.assertIn('url', r)", "def test_should_contain_badge_classes(self):\n\n badgr = self.get_badgr_setup()\n with vcr.use_cassette('tests/vcr_cassettes/badge_retrieval.yaml'):\n self.assertTrue(isinstance(badgr.badges[0], Badge))", "def check_yang_feature_usage(ctx, stmt):\n if stmt.keyword == \"choice\":\n err_add(ctx.errors, stmt.pos, \"OC_STYLE_AVOID_CHOICE\",\n (stmt.arg))\n elif stmt.keyword == \"presence\":\n err_add(ctx.errors, stmt.pos, \"OC_STYLE_AVOID_PRESENCE\",\n (stmt.parent.arg))\n elif stmt.keyword == \"feature\":\n err_add(ctx.errors, stmt.pos, \"OC_STYLE_AVOID_FEATURES\",\n (stmt.arg))\n elif stmt.keyword == \"if-feature\":\n err_add(ctx.errors, stmt.parent.pos, \"OC_STYLE_AVOID_FEATURES\",\n (stmt.parent.arg))", "def V_checkers_ablation(s_grid, s_n, g_n, s_others,\n f1=4, k1=[3,5], n_h1=128, n_h2=32):\n with tf.variable_scope(\"stage-2\"):\n conv = convnet_1(s_grid, f1=f1, k1=k1, s1=[1,1], scope='conv')\n concated = tf.concat( [conv, s_n, g_n, s_others], axis=1 )\n h1 = tf.layers.dense(inputs=concated, units=n_h1, activation=tf.nn.relu, use_bias=True, name='V_h1')\n h2 = tf.layers.dense(inputs=h1, units=n_h2, activation=tf.nn.relu, use_bias=True, name='V_h2')\n out = tf.layers.dense(inputs=h2, units=1, activation=None, use_bias=False, name='V_out')\n return out", "def health_check():\n return \"Comet-API\"", "def InitActionCheck(initActionList, init):\n for actions in initActionList:\n action_class = getNameFromIRI(actions.is_a[0].iri)\n # if the action is a SpeedAction class\n if action_class == \"SpeedAction\":\n action_entity_ref = getNameFromIRI(actions.has_entity_ref[0].iri)\n target_speed = actions.has_target_speed[0]\n ontology_transition_dynamics = actions.has_transition_dynamics[0]\n xosc_transition_dynamics = checkTransitionDynamics(ontology_transition_dynamics)\n init.add_init_action(action_entity_ref, xosc.AbsoluteSpeedAction(target_speed, xosc_transition_dynamics))\n continue\n #if the action is TeleportAction\n if action_class == \"TeleportAction\":\n action_entity_ref = getNameFromIRI(actions.has_entity_ref[0].iri)\n # if the action has position as parameter set\n s: int = 0\n offset = 0\n lane_id = 0\n road_id = 0\n if len(actions.has_position) != 0:\n position = actions.has_position[0]\n if len(position.has_s) != 0:\n s = position.has_s[0]\n\n if len(position.has_offset) != 0:\n offset = position.has_offset[0]\n\n if len(position.has_lane_id) != 0:\n lane_id = position.has_lane_id[0]\n\n if len(position.has_road_id) != 0:\n road_id = position.has_road_id[0]\n\n init.add_init_action(action_entity_ref, xosc.TeleportAction(xosc.LanePosition(s, offset, lane_id, road_id)))\n continue\n if action_class == \"EnvironmentAction\": # if the action is an EnvironmentAction\n xosc_environment_action = checkEnvironmentAction(actions)\n init.add_global_action(xosc_environment_action)\n return init", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces(self):\n pass", "def check_arguments_for_rescoring(usage_key):\r\n descriptor = modulestore().get_item(usage_key)\r\n if not hasattr(descriptor, 'module_class') or not hasattr(descriptor.module_class, 'rescore_problem'):\r\n msg = \"Specified module does not support rescoring.\"\r\n raise NotImplementedError(msg)", "def rule_40_can_create_sg(session):\n\n def try_create(session, side):\n res, conn_vpc = session[\"config\"][side][\"res\"], session[\"conn\"][side](\"vpc\")\n subnet = conn_vpc.get_all_subnets([res[\"subnet_id\"]])[0]\n\n try:\n conn_vpc.create_security_group(\n \"foo\", \"bar\", vpc_id = subnet.vpc_id, dry_run = True)\n except EC2ResponseError as e:\n if 412 != e.status:\n raise e\n\n try_create(session, \"server\")\n try_create(session, \"client\")\n\n return True", "def test_replace_namespaced_route_status(self):\n pass", "def test_action_independence_single(self):\n SF1, OUT = ('SET_FIELD', ('IPV4_DST', 0x01010101)), ('OUTPUT', 6)\n DEC_TTL = ('DEC_NW_TTL', None)\n # 0.1.1.0/30 -> ip:1.1.1.1, output:1\n n1 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=0)\n ])\n # 1.1.1.1/32 -> output:1\n # 1.1.1.0/31 -> ip:1.1.1.1, output:1\n n2 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010101, None)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=0)\n ])\n # 1.1.1.0/32 -> ip:1.1.1.1, output1\n # 1.1.1.0/31 -> output:1\n n3 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010100, None)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=0)\n ])\n n4 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010101, None)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DEC_TTL, SF1, OUT])),\n Rule(priority=0)\n ])\n self.assertTrue(check_equal(n1, n2))\n self.assertFalse(check_equal(n1, n4))\n self.assertTrue(check_equal(n2, n3))\n self.assertTrue(check_equal(n1, n3))", "def test_parglare_builtin_action_override_repetition():\n # B+ will product B_1 rule with `collect` common action\n grammar = \"\"\"\n S: B+;\n B: \"b\";\n \"\"\"\n\n called = [False]\n\n def my_collect(_, __):\n called[0] = True\n return \"pass\"\n\n my_actions = {\n \"collect\": my_collect,\n }\n\n g = Grammar.from_string(grammar)\n p = Parser(g, actions=my_actions)\n assert p.parse(\"b b\") == 'pass'\n assert called[0]", "def check(self):\n # get the data from shotgun\n app = self.parent.app\n context = app.context\n\n # get step short name\n filters = [[\"id\", \"is\", context.step[\"id\"]]]\n fields = [\"short_name\"]\n stepShortName = app.shotgun.find_one(\n \"Step\", filters=filters, fields=fields)[\"short_name\"]\n\n try:\n sequenceNode = gNodes.getTopGNode()\n except:\n sequenceNode = None\n\n if sequenceNode:\n metadataCode = sequenceNode.grid_code.get()\n metadataPipeStep = sequenceNode.grid_pipeStep.get(asString=True)\n if not (stepShortName == metadataPipeStep and\n context.entity[\"name\"] == metadataCode):\n self.status = self.errorMode\n self.addError(\"Context and sequence node metadata don't match\")\n self.errorMessage = \"Context and sequence node metadata don't match\"\n else:\n self.status = \"OK\"\n else:\n self.status = \"OK\"", "def is_heating(action_data):\n return (action_data == HEATING_ACTION) | (action_data == TWO_STAGE_HEATING_ACTION)", "def getAction(self, gameState):\r\n \"*** YOUR CODE HERE ***\"\r\n util.raiseNotDefined()", "def getAction(self, gameState):\r\n \"*** YOUR CODE HERE ***\"\r\n util.raiseNotDefined()", "def test_lti20_put_like_delete_success(self):\r\n self.setup_system_xmodule_mocks_for_lti20_request_test()\r\n SCORE = 0.55 # pylint: disable=invalid-name\r\n COMMENT = u\"ಠ益ಠ\" # pylint: disable=invalid-name\r\n self.xmodule.module_score = SCORE\r\n self.xmodule.score_comment = COMMENT\r\n mock_request = self.get_signed_lti20_mock_request(self.GOOD_JSON_PUT_LIKE_DELETE)\r\n # Now call the handler\r\n response = self.xmodule.lti_2_0_result_rest_handler(mock_request, \"user/abcd\")\r\n # Now assert there's no score\r\n self.assertEqual(response.status_code, 200)\r\n self.assertIsNone(self.xmodule.module_score)\r\n self.assertEqual(self.xmodule.score_comment, u\"\")\r\n (_, evt_type, called_grade_obj), _ = self.system.publish.call_args\r\n self.assertEqual(called_grade_obj, {'user_id': self.USER_STANDIN.id, 'value': None, 'max_value': None})\r\n self.assertEqual(evt_type, 'grade')", "def check_visibilities(pop1, pop2):\n grb1 = pop1.grb\n grb2 = pop2.grb\n\n namelist = set(grb1[\"name\"].values)\n count = {\"North\":0, \"South\":0, \"Both\":0}\n\n # print(\"check : \",name)\n for loc in [\"North\",\"South\",\"Both\"]:\n print(f\"{24*'-':24s}\")\n print(f\"{loc:10s} err1 err2\")\n for name in namelist:\n mask1 = (grb1[\"name\"] == name) & (grb1.loca == loc)\n mask2 = (grb2[\"name\"] == name) & (grb2.loca == loc)\n if grb1[mask1].err.values[0] != grb2[mask2].err.values[0] :\n print(\"{:10s} {:4d} {:4d}\"\n .format(name,grb1[mask1].err.values[0],\n grb2[mask2].err.values[0]))\n count[loc]+=1\n print(\" Differences between the 2 populations :\",count)", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()" ]
[ "0.62013024", "0.5185163", "0.51849663", "0.50805354", "0.50188154", "0.49678308", "0.49544063", "0.49272656", "0.49225903", "0.49052584", "0.4849131", "0.48472935", "0.47611517", "0.4737386", "0.47336262", "0.46869755", "0.46836528", "0.4678428", "0.46226096", "0.46140948", "0.4605737", "0.46050668", "0.45999947", "0.4576194", "0.45543835", "0.45410335", "0.45342276", "0.45339355", "0.4508958", "0.44931182", "0.44839182", "0.4481881", "0.4475272", "0.44722468", "0.4465613", "0.44509843", "0.44428772", "0.44391763", "0.44387177", "0.44345373", "0.44345373", "0.44336107", "0.44306493", "0.44234127", "0.44209185", "0.4405721", "0.439773", "0.43954933", "0.43883234", "0.4388136", "0.43808135", "0.43789512", "0.4375168", "0.43675536", "0.43665293", "0.4364637", "0.43637124", "0.43524644", "0.43490368", "0.4347793", "0.43471864", "0.4343654", "0.43419176", "0.43396664", "0.43375263", "0.43337232", "0.4332879", "0.43198374", "0.4309287", "0.43025306", "0.43024597", "0.43014607", "0.43001714", "0.42954734", "0.42947268", "0.42930898", "0.4289901", "0.42871273", "0.4285676", "0.42853063", "0.42817867", "0.42800874", "0.4279115", "0.42790675", "0.4277427", "0.42756757", "0.42750394", "0.42703897", "0.42652303", "0.4265021", "0.4263996", "0.426013", "0.42596483", "0.42580673", "0.4257692", "0.4241714", "0.4241714", "0.42396528", "0.42392674", "0.4237833" ]
0.6528508
0
Ensure that elb v1 and elb v2 actions are both present in the elasticloadbalancing namespace
def test_services_with_multiple_pages_elb(self): results = get_actions_for_service("elasticloadbalancing") actions = [ "elasticloadbalancing:ApplySecurityGroupsToLoadBalancer", "elasticloadbalancing:AttachLoadBalancerToSubnets", "elasticloadbalancing:ConfigureHealthCheck", "elasticloadbalancing:CreateAppCookieStickinessPolicy", "elasticloadbalancing:CreateLBCookieStickinessPolicy", "elasticloadbalancing:CreateLoadBalancerListeners", "elasticloadbalancing:CreateLoadBalancerPolicy", "elasticloadbalancing:DeleteLoadBalancerListeners", "elasticloadbalancing:DeleteLoadBalancerPolicy", "elasticloadbalancing:DeregisterInstancesFromLoadBalancer", "elasticloadbalancing:DescribeInstanceHealth", "elasticloadbalancing:DescribeLoadBalancerPolicies", "elasticloadbalancing:DescribeLoadBalancerPolicyTypes", "elasticloadbalancing:DetachLoadBalancerFromSubnets", "elasticloadbalancing:DisableAvailabilityZonesForLoadBalancer", "elasticloadbalancing:EnableAvailabilityZonesForLoadBalancer", "elasticloadbalancing:RegisterInstancesWithLoadBalancer", "elasticloadbalancing:SetLoadBalancerListenerSSLCertificate", "elasticloadbalancing:SetLoadBalancerPoliciesForBackendServer", "elasticloadbalancing:SetLoadBalancerPoliciesOfListener", ] for action in actions: self.assertTrue(action in results)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_gh_226_elasticloadbalancing_v1_and_v2(self):\n results = get_actions_for_service(\"elasticloadbalancing\")\n # print(json.dumps(results, indent=4))\n lb_v1_only_action = \"elasticloadbalancing:CreateTargetGroup\"\n lb_v2_only_action = \"elasticloadbalancing:SetSecurityGroups\"\n self.assertTrue(lb_v1_only_action in results)\n self.assertTrue(lb_v2_only_action in results)", "def test_kafka_action_names_overlap_issue(self):\n # Kafka actions used to be in two pages but are now one. This verifies the current state.\n # results = get_actions_for_service(\"kafka\")\n # print(results)\n actions = [\n \"kafka:BatchAssociateScramSecret\",\n \"kafka:BatchDisassociateScramSecret\",\n \"kafka:CreateClusterV2\",\n \"kafka:DeleteConfiguration\",\n \"kafka:DescribeClusterV2\",\n \"kafka:ListClustersV2\",\n \"kafka:ListConfigurationRevisions\",\n \"kafka:ListKafkaVersions\",\n \"kafka:ListScramSecrets\",\n \"kafka:RebootBroker\",\n \"kafka:UpdateBrokerType\",\n \"kafka:UpdateConfiguration\",\n \"kafka:UpdateConnectivity\",\n \"kafka:UpdateSecurity\"\n ]\n\n for action in actions:\n self.assertTrue(action in self.all_actions)", "def test_request_with_two_bundles(self):\n xml = self._make_request()\n request = request_from_xml(xml)\n self.assertTrue(validate_request(request, self.policy))", "def test_match_endpoints():\n\n service_names = [\n \"iap-ingress-kfctl-8c9b.endpoints.kubeflow-ci-deployment.cloud.goog\",\n ]\n\n for s in service_names:\n assert cleanup_ci.is_match(s, patterns=cleanup_ci.E2E_PATTERNS)", "def check_action_sanity(self):\n for action in crest.get_all_actions(self.model):\n assert action._name is not None, f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is 'None'\"\n assert action._name != \"\", f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is empty string\"\n\n assert isinstance(action.transition, crest.Transition), f\"Action {action._name}'s state is not a crest.Transition. It is: {action.transition} ({action.transition.__class__})\"\n assert action.state in crest.get_transitions(action._parent), f\"Action's transition {action.transition._name} ({action.transition}) is not in the transitions of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.target, crest.Port), f\"Action {action._name}'s target is not a crest.Port\"\n assert action.target in api.get_targets(action._parent), f\"Action's target {action.target._name} ({action.target}) is not in the targets of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.function, (crestml.LearnedFunction, types.FunctionType)), f\"Action {action._name}'s function needs to be of type types.FunctionType or crestdsl.ml.LearnedFunction\"\n assert 'self' in inspect.signature(action.function).parameters, f\"Action {action._name}'s function has no self parameter. entity: {action._parent._name} ({action._parent.__class__.__name__})\"\n assert len(inspect.signature(action.function).parameters) == 1, f\"An action should have only one one argument 'self'\"\n\n for port in SH.get_read_ports_from_update(action.function, action):\n assert port in api.get_sources(action._parent), f\"Action {action._name} seems to be reading a port {port._name} ({port}) which is not in the sources of its entity {action._parent._name} ({action._parent})\"", "def test_box_actions_out_of_bound(env: gym.Env):\n env.reset(seed=42)\n\n oob_env = gym.make(env.spec.id, disable_env_checker=True)\n oob_env.reset(seed=42)\n\n assert isinstance(env.action_space, spaces.Box)\n dtype = env.action_space.dtype\n upper_bounds = env.action_space.high\n lower_bounds = env.action_space.low\n\n for i, (is_upper_bound, is_lower_bound) in enumerate(\n zip(env.action_space.bounded_above, env.action_space.bounded_below)\n ):\n if is_upper_bound:\n obs, _, _, _, _ = env.step(upper_bounds)\n oob_action = upper_bounds.copy()\n oob_action[i] += np.cast[dtype](OOB_VALUE)\n\n assert oob_action[i] > upper_bounds[i]\n oob_obs, _, _, _, _ = oob_env.step(oob_action)\n\n assert np.alltrue(obs == oob_obs)\n\n if is_lower_bound:\n obs, _, _, _, _ = env.step(\n lower_bounds\n ) # `env` is unwrapped, and in new step API\n oob_action = lower_bounds.copy()\n oob_action[i] -= np.cast[dtype](OOB_VALUE)\n\n assert oob_action[i] < lower_bounds[i]\n oob_obs, _, _, _, _ = oob_env.step(oob_action)\n\n assert np.alltrue(obs == oob_obs)\n\n env.close()", "def test_other_iam_data_fixes_in_GH_393(self):\n # Cassandra: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonkeyspacesforapachecassandra.html\n results = get_actions_for_service(\"cassandra\")\n self.assertTrue(\"cassandra:Restore\" in results)\n # Comprehend Medical: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazoncomprehendmedical.html\n results = get_actions_for_service(\"comprehendmedical\")\n # print(results)\n actions = [\n \"comprehendmedical:DescribeEntitiesDetectionV2Job\",\n \"comprehendmedical:DescribeICD10CMInferenceJob\",\n \"comprehendmedical:DescribePHIDetectionJob\",\n \"comprehendmedical:DescribeRxNormInferenceJob\",\n # \"comprehendmedical:DescribeSNOMEDCTInferenceJob\", # Not in SAR\n \"comprehendmedical:DetectEntitiesV2\",\n \"comprehendmedical:InferICD10CM\",\n \"comprehendmedical:InferRxNorm\",\n # \"comprehendmedical:InferSNOMEDCT\", # Not in SAR\n \"comprehendmedical:ListEntitiesDetectionV2Jobs\",\n \"comprehendmedical:ListICD10CMInferenceJobs\",\n \"comprehendmedical:ListPHIDetectionJobs\",\n \"comprehendmedical:ListRxNormInferenceJobs\",\n # \"comprehendmedical:ListSNOMEDCTInferenceJobs\", # Not in SAR\n \"comprehendmedical:StartEntitiesDetectionV2Job\",\n \"comprehendmedical:StartICD10CMInferenceJob\",\n \"comprehendmedical:StartPHIDetectionJob\",\n \"comprehendmedical:StartRxNormInferenceJob\",\n \"comprehendmedical:StopEntitiesDetectionV2Job\",\n \"comprehendmedical:StopICD10CMInferenceJob\",\n ]\n for action in actions:\n # if action not in results:\n # print(action)\n self.assertTrue(action in results)\n # Compute Optimizer\n results = get_actions_for_service(\"compute-optimizer\")\n actions = [\n \"compute-optimizer:DeleteRecommendationPreferences\",\n \"compute-optimizer:ExportEBSVolumeRecommendations\",\n \"compute-optimizer:ExportLambdaFunctionRecommendations\",\n \"compute-optimizer:GetEffectiveRecommendationPreferences\",\n \"compute-optimizer:GetEnrollmentStatusesForOrganization\",\n \"compute-optimizer:GetLambdaFunctionRecommendations\",\n \"compute-optimizer:GetRecommendationPreferences\",\n \"compute-optimizer:PutRecommendationPreferences\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # DataSync\n results = get_actions_for_service(\"datasync\")\n actions = [\n \"datasync:UpdateLocationNfs\",\n \"datasync:UpdateLocationObjectStorage\",\n \"datasync:UpdateLocationSmb\",\n \"datasync:UpdateTaskExecution\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Account Management\n results = get_actions_for_service(\"account\")\n actions = [\n \"account:DeleteAlternateContact\",\n \"account:GetAlternateContact\",\n \"account:PutAlternateContact\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS IAM Access Analyzer\n results = get_actions_for_service(\"access-analyzer\")\n actions = [\n \"access-analyzer:CancelPolicyGeneration\",\n \"access-analyzer:CreateAccessPreview\",\n \"access-analyzer:GetAccessPreview\",\n \"access-analyzer:GetGeneratedPolicy\",\n \"access-analyzer:ListAccessPreviewFindings\",\n \"access-analyzer:ListAccessPreviews\",\n \"access-analyzer:ListPolicyGenerations\",\n \"access-analyzer:StartPolicyGeneration\",\n \"access-analyzer:ValidatePolicy\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Elemental Activations\n results = get_actions_for_service(\"elemental-activations\")\n actions = [\n \"elemental-activations:CompleteAccountRegistration\",\n \"elemental-activations:StartAccountRegistration\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n # OpenSearch\n results = get_actions_for_service(\"es\")\n actions = [\n \"es:DescribeDomainChangeProgress\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Location\n results = get_actions_for_service(\"geo\")\n actions = [\n \"geo:CalculateRouteMatrix\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Amazon Managed Grafana\n results = get_actions_for_service(\"grafana\")\n actions = [\n \"grafana:DescribeWorkspaceAuthentication\",\n \"grafana:UpdateWorkspaceAuthentication\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # EC2 Image Builder\n results = get_actions_for_service(\"imagebuilder\")\n actions = [\n \"imagebuilder:ImportVmImage\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Timestream\n results = get_actions_for_service(\"timestream\")\n actions = [\n \"timestream:CreateScheduledQuery\",\n \"timestream:DeleteScheduledQuery\",\n \"timestream:DescribeScheduledQuery\",\n \"timestream:ExecuteScheduledQuery\",\n \"timestream:ListScheduledQueries\",\n \"timestream:UpdateScheduledQuery\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS Transfer Family\n results = get_actions_for_service(\"transfer\")\n actions = [\n \"transfer:CreateAccess\",\n \"transfer:CreateWorkflow\",\n \"transfer:DeleteAccess\",\n \"transfer:DeleteWorkflow\",\n \"transfer:DescribeAccess\",\n \"transfer:DescribeExecution\",\n \"transfer:DescribeWorkflow\",\n \"transfer:ListAccesses\",\n \"transfer:ListExecutions\",\n \"transfer:ListWorkflows\",\n \"transfer:SendWorkflowStepState\",\n \"transfer:UpdateAccess\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def check_deployment(version_stack_name, app_name):\n\n print(\"Polling Target Group ({}) until a successful state is reached...\".format(version_stack_name))\n elbv2 = boto3.client('elbv2')\n waiter = elbv2.get_waiter('target_in_service')\n cloudformation = boto3.client('cloudformation')\n response = cloudformation.describe_stack_resources(\n StackName=version_stack_name,\n LogicalResourceId='ALBTargetGroup'\n )\n target_group = response['StackResources'][0]['PhysicalResourceId']\n start_time = datetime.datetime.now()\n try:\n waiter.wait(TargetGroupArn=target_group)\n except botocore.exceptions.WaiterError:\n print('Health check did not pass!')\n response = cloudformation.describe_stack_resources(\n StackName=version_stack_name,\n LogicalResourceId='ECSService'\n )\n service = response['StackResources'][0]['PhysicalResourceId']\n print('Outputting events for service {}:'.format(service))\n response = cloudformation.describe_stack_resources(\n StackName=\"ECS-{}\".format(app_name),\n LogicalResourceId='ECSCluster'\n )\n cluster = response['StackResources'][0]['PhysicalResourceId']\n ecs = boto3.client('ecs')\n response = ecs.describe_services(\n cluster=cluster,\n services=[service]\n )\n for event in [x['message'] for x in response['services'][0]['events']]:\n print(event)\n# print('Deleting CloudFormation stack...')\n# response = cloudformation.delete_stack(\n# StackName=\"MV-{realm}-{app_name}-{version}-{env}\".format(env=os.environ['ENV'], app_name=os.environ['ECS_APP_NAME'], version=os.environ['BUILD_VERSION'], realm=os.environ['REALM'])\n# )\n# waiter = cf.get_waiter('stack_delete_complete')\n# waiter.wait(\n# StackName=\"MV-{realm}-{app_name}-{version}-{env}\".format(env=os.environ['ENV'], app_name=os.environ['ECS_APP_NAME'], version=os.environ['BUILD_VERSION'], realm=os.environ['REALM'])\n# )\n# print('CloudFormation stack deleted.')\n elapsed_time = datetime.datetime.now() - start_time\n print('Health check passed in {}'.format(elapsed_time))\n print(\"Done.\")", "def test_redeploy_edges(self):\n pass", "def test_action_independence_multiple(self):\n DST1, DST2 = ('SET_FIELD', ('IPV4_DST', 0x1)), ('SET_FIELD', ('IPV4_DST', 0x2))\n SRC1, SRC2 = ('SET_FIELD', ('IPV4_SRC', 0x1)), ('SET_FIELD', ('IPV4_SRC', 0x2))\n OUT1, OUT2 = ('OUTPUT', 1), ('OUTPUT', 2)\n n1 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n \"\"\"\n dst:1, src:2 -> output:1, dst:2, src:1, output:2\n dst:0/31 -> dst:1, src:2, output:1, dst:2, src:1, output:2\n \"\"\"\n n2 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 1, None),\n ('IPV4_SRC', 2, None)]),\n instructions=inst_from_acts([OUT1, DST2, SRC1, OUT2])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n \"\"\"\n dst:1 -> src:2, output:1, dst:2, src:1, output:2\n dst:0/31 -> dst:1, src:2, output:1, dst:2, src:1, output:2\n \"\"\"\n n3 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 1, None)]),\n instructions=inst_from_acts([SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n\n self.assertTrue(check_equal(n1, n2))\n self.assertTrue(check_equal(n2, n3))\n self.assertTrue(check_equal(n1, n3))", "def validate_availability_zones(self, context, resource_type,\n availability_zones):", "def has_action2(self, feature):\n return feature in self._action2", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces(self):\n pass", "def verifyActionCenterRts():\n pass", "def test_bad_action(self):\r\n action = 'robot-not-an-action'\r\n url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})\r\n response = self.client.get(url, {'identifiers': self.beta_tester.email, 'action': action})\r\n self.assertEqual(response.status_code, 400)", "def test_services_with_multiple_pages_aws_marketplace(self):\n # Overlap: AWS Marketplace, Marketplace Catalog, and AWS Marketplace Entitlement service, AWS Marketplace Image Building Service, AWS Marketplace Metering Service, AWS Marketplace Private Marketplace, and AWS Marketplace Procurement Systems\n # AWS Marketplace: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplace.html\n self.assertTrue(\"aws-marketplace:AcceptAgreementApprovalRequest\" in self.all_actions)\n # AWS Marketplace Catalog: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplacecatalog.html\n self.assertTrue(\"aws-marketplace:CancelChangeSet\" in self.all_actions)\n # AWS Marketplace Entitlement Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceentitlementservice.html\n self.assertTrue(\"aws-marketplace:GetEntitlements\" in self.all_actions)\n # AWS Marketplace Image Building Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceimagebuildingservice.html\n self.assertTrue(\"aws-marketplace:DescribeBuilds\" in self.all_actions)\n # AWS Marketplace Metering Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplacemeteringservice.html\n self.assertTrue(\"aws-marketplace:BatchMeterUsage\" in self.all_actions)\n # AWS Marketplace Private Marketplace: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceprivatemarketplace.html\n self.assertTrue(\"aws-marketplace:AssociateProductsWithPrivateMarketplace\" in self.all_actions)\n # AWS Marketplace Procurement Systems: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceprocurementsystemsintegration.html\n self.assertTrue(\"aws-marketplace:DescribeProcurementSystemConfiguration\" in self.all_actions)\n\n results = get_actions_for_service(\"aws-marketplace\")\n actions = [\n \"aws-marketplace:AcceptAgreementApprovalRequest\",\n \"aws-marketplace:BatchMeterUsage\",\n \"aws-marketplace:CancelAgreementRequest\",\n \"aws-marketplace:CancelChangeSet\",\n \"aws-marketplace:CompleteTask\",\n \"aws-marketplace:DescribeAgreement\",\n \"aws-marketplace:DescribeBuilds\",\n \"aws-marketplace:DescribeChangeSet\",\n \"aws-marketplace:DescribeEntity\",\n \"aws-marketplace:DescribeProcurementSystemConfiguration\",\n \"aws-marketplace:DescribeTask\",\n \"aws-marketplace:GetAgreementApprovalRequest\",\n \"aws-marketplace:GetAgreementRequest\",\n \"aws-marketplace:GetAgreementTerms\",\n \"aws-marketplace:GetEntitlements\",\n \"aws-marketplace:ListAgreementApprovalRequests\",\n \"aws-marketplace:ListAgreementRequests\",\n \"aws-marketplace:ListBuilds\",\n \"aws-marketplace:ListChangeSets\",\n \"aws-marketplace:ListEntities\",\n \"aws-marketplace:ListTasks\",\n \"aws-marketplace:MeterUsage\",\n \"aws-marketplace:PutProcurementSystemConfiguration\",\n \"aws-marketplace:RegisterUsage\",\n \"aws-marketplace:RejectAgreementApprovalRequest\",\n \"aws-marketplace:ResolveCustomer\",\n \"aws-marketplace:SearchAgreements\",\n \"aws-marketplace:StartBuild\",\n \"aws-marketplace:StartChangeSet\",\n \"aws-marketplace:Subscribe\",\n \"aws-marketplace:Unsubscribe\",\n \"aws-marketplace:UpdateAgreementApprovalRequest\",\n \"aws-marketplace:UpdateTask\",\n \"aws-marketplace:ViewSubscriptions\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def check_eapi(self, eapi):\n\t\treturn True", "def test_basic_add_multiple_endpoint(self):\n mac1, ip1 = self.setup_with_endpoint()\n mac2 = '00:11:22:33:33:35'\n ip2 = '3.4.3.6'\n self.add_endpoint(mac2, ip2, 'intersite-testsuite', 'app', 'epg2')\n time.sleep(2)\n\n self.assertTrue(self.verify_remote_site_has_entry(mac1, ip1, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg1'))\n self.assertTrue(self.verify_remote_site_has_entry(mac2, ip2, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg2'))", "def test_add_or_update_tags(self):\n p = self.load_policy({\n 'name': 'test-azure-tag',\n 'resource': 'azure.resourcegroup',\n 'filters': [\n {'type': 'value',\n 'key': 'name',\n 'op': 'eq',\n 'value_type': 'normalize',\n 'value': 'test_vm'}\n ],\n 'actions': [\n {'type': 'tag',\n 'tags': {'pre-existing-1': 'unmodified', 'pre-existing-2': 'unmodified'}},\n ],\n })\n p.run()\n\n # verify initial tag set\n s = Session()\n client = s.client('azure.mgmt.resource.ResourceManagementClient')\n rg = [rg for rg in client.resource_groups.list() if rg.name == 'test_vm'][0]\n self.assertEqual(rg.tags,\n {'pre-existing-1': 'unmodified', 'pre-existing-2': 'unmodified'})\n\n p = self.load_policy({\n 'name': 'test-azure-tag',\n 'resource': 'azure.resourcegroup',\n 'filters': [\n {'type': 'value',\n 'key': 'name',\n 'op': 'eq',\n 'value_type': 'normalize',\n 'value': 'test_vm'}\n ],\n 'actions': [\n {'type': 'tag',\n 'tags': {'tag1': 'value1', 'pre-existing-1': 'modified'}}\n ],\n })\n p.run()\n\n # verify modified tags\n rg = [rg for rg in client.resource_groups.list() if rg.name == 'test_vm'][0]\n self.assertEqual(rg.tags,\n {'tag1': 'value1', 'pre-existing-1': 'modified', 'pre-existing-2': 'unmodified'})", "def test_basic_add_multiple_endpoint(self):\n mac1, ip1 = self.setup_with_endpoint()\n mac2 = '00:11:22:33:33:35'\n ip2 = '3.4.3.6'\n self.add_endpoint(mac2, ip2, 'intersite-testsuite', 'app', 'epg')\n time.sleep(2)\n\n self.assertTrue(self.verify_remote_site_has_entry(mac1, ip1, 'intersite-testsuite',\n 'l3out', 'intersite-testsuite-app-epg'))\n self.assertTrue(self.verify_remote_site_has_entry(mac2, ip2, 'intersite-testsuite',\n 'l3out', 'intersite-testsuite-app-epg'))", "def check(self):\n illegalNamespaces = list()\n\n prog = re.compile(\"^[A-Z]{4}[0-9]{2}_[0-9]{3}:$\")\n\n for assetNode in pm.ls(type=\"gAsset\"):\n if assetNode.isReferenced() and not prog.match(assetNode.namespace()):\n illegalNamespaces.append(assetNode)\n\n if not illegalNamespaces:\n self.status = \"OK\"\n else:\n self.status = self.errorMode\n self.errorNodes = illegalNamespaces\n for illegalNamespace in illegalNamespaces:\n self.addError(\"%s has a illegal namespace\" % illegalNamespace)\n self.errorMessage = \"%s asset(s) have a illegal namespace\" % (\n len(illegalNamespaces))", "def test_create_role_binding_restriction_for_all_namespaces(self):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces(self):\n pass", "def test_action_independence_single(self):\n SF1, OUT = ('SET_FIELD', ('IPV4_DST', 0x01010101)), ('OUTPUT', 6)\n DEC_TTL = ('DEC_NW_TTL', None)\n # 0.1.1.0/30 -> ip:1.1.1.1, output:1\n n1 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=0)\n ])\n # 1.1.1.1/32 -> output:1\n # 1.1.1.0/31 -> ip:1.1.1.1, output:1\n n2 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010101, None)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=0)\n ])\n # 1.1.1.0/32 -> ip:1.1.1.1, output1\n # 1.1.1.0/31 -> output:1\n n3 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010100, None)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=0)\n ])\n n4 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010101, None)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DEC_TTL, SF1, OUT])),\n Rule(priority=0)\n ])\n self.assertTrue(check_equal(n1, n2))\n self.assertFalse(check_equal(n1, n4))\n self.assertTrue(check_equal(n2, n3))\n self.assertTrue(check_equal(n1, n3))", "def test_ec2_up(runner, ec2):\n result = runner.invoke(cli.cli, ['ec2', 'up', '-i', ec2['server'].id])\n assert result.exit_code == 0", "def check_host_activation(cls, ksm_merge_across_nodes):\n testflow.step(\"Deactivate the host %s\", sla_conf.HOSTS[0])\n assert ll_hosts.deactivate_host(\n positive=True,\n host=sla_conf.HOSTS[0],\n host_resource=sla_conf.VDS_HOSTS[0]\n )\n cls.update_merge_across_nodes_parameter(\n ksm_merge_across_nodes=ksm_merge_across_nodes\n )\n assert ll_hosts.activate_host(\n positive=True,\n host=sla_conf.HOSTS[0],\n host_resource=sla_conf.VDS_HOSTS[0]\n )\n testflow.step(\n \"%s: wait until KSM merge across nodes will be equal to %s\",\n sla_conf.VDS_HOSTS[0], ksm_merge_across_nodes\n )\n assert sla_helpers.wait_for_numa_aware_ksm_status(\n resource=sla_conf.VDS_HOSTS[0],\n expected_value=ksm_merge_across_nodes\n )", "def check(self):\n BadNamespaces = list()\n\n for namespace in pm.listNamespaces():\n BadNamespaces.append(namespace)\n\n if not BadNamespaces:\n self.status = \"OK\"\n else:\n self.status = self.errorMode\n self.errorNodes = namespace\n for namespace in BadNamespaces:\n self.addError(\"namespace %s exist\" % namespace)\n self.errorMessage = \"%s namespace\" % (len(BadNamespaces))", "def test_subscriber_access_if_vsg2_goes_down(self):", "def test_validate_bookstore_endpoint():\n expected = {\n \"bookstore_valid\": False,\n \"publish_valid\": False,\n \"archive_valid\": False,\n \"clone_valid\": True,\n }\n settings = BookstoreSettings(s3_endpoint_url=\"\")\n assert validate_bookstore(settings) == expected", "def verifyActionCenterFirewall():\n pass", "def test_lti20_rest_good_dispatch(self):\r\n for ginput, expected in self.GOOD_DISPATCH_INPUTS:\r\n self.assertEquals(self.xmodule.parse_lti_2_0_handler_suffix(ginput), expected)", "def check_consistency(self, es):", "def test_create_deployment_config_rollback_for_all_namespaces(self):\n pass", "def ensure_security_groups_created(vpc, environment):\n conglomerate_name = environment + '-conglomerate'\n load_balancer_name = environment + '-load-balancer'\n\n existing = vpc.security_groups.filter(Filters=[\n { 'Name': 'group-name', 'Values': [ conglomerate_name, load_balancer_name ] }\n ])\n ret = {}\n for security_group in existing:\n if security_group.group_name == conglomerate_name:\n ret['conglomerate'] = security_group\n elif security_group.group_name == load_balancer_name:\n ret['load-balancer'] = security_group\n else:\n raise Exception(\"Unexpected security group name: \" + security_group.group_name)\n\n if not ret['conglomerate']:\n # untested\n ret['conglomerate'] = vpc.create_security_group(\n GroupName=conglomerate_name,\n Description=conglomerate_name\n )\n if not ret['load-balancer']:\n # untested\n ret['load-balancer'] = vpc.create_security_group(\n GroupName=load_balancer_name,\n Description=load_balancer_name\n )\n\n try:\n ret['conglomerate'].authorize_ingress(IpPermissions=[\n { 'IpProtocol': 'icmp', 'FromPort': 0, 'ToPort': 255, 'IpRanges': [ { 'CidrIp': '0.0.0.0/0' } ] },\n { 'IpProtocol': 'tcp', 'FromPort': 9000, 'ToPort': 9000, 'UserIdGroupPairs': [ { 'GroupId': ret['load-balancer'].id } ] },\n ])\n except botocore.exceptions.ClientError as e:\n if e.response['Error']['Code'] != 'InvalidPermission.Duplicate':\n raise e\n\n try:\n ret['load-balancer'].authorize_ingress(IpPermissions=[\n { 'IpProtocol': 'tcp', 'FromPort': 80, 'ToPort': 80 },\n { 'IpProtocol': 'tcp', 'FromPort': 443, 'ToPort': 443 },\n { 'IpProtocol': 'icmp', 'FromPort': 0, 'ToPort': 255, 'IpRanges': [ { 'CidrIp': '0.0.0.0/0' } ] },\n { 'IpProtocol': 'tcp', 'FromPort': 1024, 'ToPort': 65535, 'IpRanges': [ { 'CidrIp': Constants['VpcCidr'] } ] },\n ])\n except botocore.exceptions.ClientError as e:\n if e.response['Error']['Code'] != 'InvalidPermission.Duplicate':\n raise e\n\n return ret", "def checkConflicts(self):\n\t\treturn", "def test_non_overlapping_similar_crud(self):\n\n # Stat validation reference variables\n verification_dict = dict()\n verification_dict[\"ops_create\"] = 0\n verification_dict[\"ops_update\"] = 0\n verification_dict[\"ops_delete\"] = 0\n verification_dict[\"rollback_item_count\"] = 0\n verification_dict[\"sync_write_aborted_count\"] = 0\n verification_dict[\"sync_write_committed_count\"] = 0\n\n for _, scope in self.bucket.scopes.items():\n for _, collection in scope.collections.items():\n verification_dict[\"ops_create\"] += collection.num_items\n if self.durability_level in self.supported_d_levels:\n verification_dict[\"sync_write_committed_count\"] \\\n += collection.num_items\n\n failed = self.durability_helper.verify_vbucket_details_stats(\n self.bucket, self.cluster_util.get_kv_nodes(self.cluster),\n vbuckets=self.cluster.vbuckets,\n expected_val=verification_dict)\n if failed:\n self.fail(\"Cbstat vbucket-details verification failed\")\n\n doc_ops = self.input.param(\"doc_ops\", \"create\")\n # Reset initial doc_loading params to NO_OPS\n doc_load_template = \\\n self.bucket_util.get_crud_template_from_package(\"initial_load\")\n doc_load_template[MetaCrudParams.DURABILITY_LEVEL] = \"\"\n doc_load_template[MetaCrudParams.COLLECTIONS_CONSIDERED_FOR_CRUD] = 3\n doc_load_template[\"doc_crud\"][\n MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 0\n doc_load_template[\"doc_crud\"][\n MetaCrudParams.DocCrud.COMMON_DOC_KEY] = \"test_collections\"\n\n # Create required doc_generators for CRUD ops\n doc_load_template[\"doc_crud\"][\n MetaCrudParams.DocCrud.READ_PERCENTAGE_PER_COLLECTION] = 25\n if DocLoading.Bucket.DocOps.CREATE in doc_ops:\n doc_load_template[\"doc_crud\"][\n MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 100\n elif DocLoading.Bucket.DocOps.UPDATE in doc_ops:\n doc_load_template[\"doc_crud\"][\n MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] = 50\n elif DocLoading.Bucket.DocOps.DELETE in doc_ops:\n doc_load_template[\"doc_crud\"][\n MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] = 50\n\n async_write_crud_spec = deepcopy(doc_load_template)\n sync_write_crud_spec = deepcopy(doc_load_template)\n\n sync_write_crud_spec[MetaCrudParams.DURABILITY_LEVEL] = \\\n self.durability_level\n\n async_write_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n async_write_crud_spec,\n mutation_num=1,\n async_load=True)\n sync_write_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n sync_write_crud_spec,\n mutation_num=2,\n async_load=True)\n\n # Wait for all task to complete\n self.task.jython_task_manager.get_task_result(async_write_loading_task)\n self.task.jython_task_manager.get_task_result(sync_write_loading_task)\n\n # Validate CRUD loading results\n self.bucket_util.validate_doc_loading_results(async_write_loading_task)\n self.bucket_util.validate_doc_loading_results(sync_write_loading_task)\n\n if async_write_loading_task.result is False:\n self.log_failure(\"Doc_ops failed in async_write_task\")\n if sync_write_loading_task.result is False:\n self.log_failure(\"Doc_ops failed in sync_write_task\")\n\n # Verify doc count and other stats\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)", "def test_request_with_one_day_overlap_problem(self):\n bundle1, bundle2, = self._get_two_bundles(\n bundle1_inception=\"2019-01-01T00:00:00\",\n bundle1_expiration=\"2019-01-22T00:00:00\",\n bundle2_inception=\"2019-01-14T00:00:00\",\n bundle2_expiration=\"2019-02-04T00:00:00\",\n )\n xml = self._make_request(bundle1=bundle1, bundle2=bundle2)\n request = request_from_xml(xml)\n policy = replace(\n self.policy,\n check_bundle_intervals=False, # want to test against ZSK policy, not KSK policy\n check_cycle_length=False, # want to test against ZSK policy, not KSK policy\n )\n with self.assertRaises(KSR_POLICY_SIG_OVERLAP_Violation) as exc:\n validate_request(request, policy)\n self.assertEqual(\n 'Bundle \"id=test-2 2019-01-14->2019-02-04\" overlap 8 days with '\n '\"id=test-1 2019-01-01->2019-01-22\" is < claimed minimum 9 days',\n str(exc.exception),\n )\n\n # test that the check can be disabled\n policy = replace(\n self.policy,\n check_bundle_overlap=False,\n max_bundle_interval=duration_to_timedelta(\"P13D\"),\n )\n self.assertTrue(validate_request(request, policy))", "def test_load_from_v2(self) -> None:\n self.save_new_valid_exploration(\n 'Exp1', '[email protected]', end_state_name='End')\n collection = collection_domain.Collection.from_yaml(\n 'cid', self.YAML_CONTENT_V2)\n self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)", "def b3_correctness(el_a, el_b, system_el2kbid, gold_el2kbid):\n correct = False\n\n if(inSameSet(el_a, el_b, system_el2kbid) and \n inSameSet(el_a, el_b, gold_el2kbid) and\n sameLinking(el_a, el_b, system_el2kbid, gold_el2kbid) #THIS CONDITION DEPARTS FROM THE ORIGINAL BCUBED (extesion for the Entity Linking problem)\n ):\n correct = True\n\n return correct", "def tr_check_availability(agent_directory, agent_full_name, slot_range):\r\n tr_create_booking_register(agent_directory, agent_full_name) # CHANGE THIS WHEN POSSIBLE. IT IS ERRASING ALL BOOKINGS. NOW THE SYSTEM IS NOT CONSTRAINT IN TR RESOURCES.\r\n tr_booking_df = pd.read_csv(f'{agent_directory}''/'f'{agent_full_name}_booking.csv', header=0, delimiter=\",\", engine='python')\r\n tr_booking_df['booking_type'] = tr_booking_df['booking_type'].fillna(\"\")\r\n # Creates 2 lists: booked_slots_list & free_slots_list and checks availability.\r\n free_slots_list = []\r\n booked_slots_list = []\r\n prebooked_slots_list = []\r\n for x in slot_range:\r\n if tr_booking_df.loc[x - 1, 'booking_type'] == \"pre-book\":\r\n prebooked_slots_list.append(x)\r\n elif tr_booking_df.loc[x - 1, 'booking_type'] == \"booked\":\r\n booked_slots_list.append(x)\r\n else:\r\n free_slots_list.append(x)\r\n # Checks availability\r\n if len(booked_slots_list) >= 1:\r\n tr_msg_ca_body = \"negative\"\r\n else:\r\n tr_msg_ca_body = \"positive\"\r\n return tr_msg_ca_body", "def validate_collision(self):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces(self):\n pass", "def test_load_response_descriptor_events_event_event_resource_spaces(self):\n pass", "def update_target_groups():\n\n # detect which region the explorer(s) are located\n for j in range(NUM_OF_SHARDS):\n key_explorer = \"explorers_\" + str(j)\n array_instance_ip = parse_network_config(key_explorer)\n array_instance_id = retrieve_instance_id(array_instance_ip)\n\n reg = retrieve_instance_region(array_instance_ip[0])\n # all nodes registered for the same endpoints should be located in the same region, if not, exit\n verify_nodes_same_region(reg, array_instance_ip)\n\n elbv2_client = boto3.client('elbv2', region_name=reg)\n\n array_target_group = create_name_target_group(j, ID_DOMAIN_NAME)\n pp.pprint(array_target_group)\n\n # 1/3 - retrieve target group arn\n print(\"==== retrieve target group arn\")\n dict_tg_arn = dict()\n for tg in array_target_group:\n resp = elbv2_client.describe_target_groups(Names=[tg])\n tg_arn = resp[\"TargetGroups\"][0][\"TargetGroupArn\"]\n dict_tg_arn[tg] = tg_arn\n pp.pprint(dict_tg_arn)\n\n # 2/3 - find all the instances\n print(\"==== find all the instances current registered\")\n dict_tg_instanceid = defaultdict(list)\n for tg in array_target_group:\n resp = elbv2_client.describe_target_health(TargetGroupArn=dict_tg_arn[tg])\n num_of_targets = len(resp[\"TargetHealthDescriptions\"])\n for k in range(num_of_targets):\n instance_id = resp[\"TargetHealthDescriptions\"][k][\"Target\"][\"Id\"]\n dict_tg_instanceid[tg].append(instance_id)\n pp.pprint(dict_tg_instanceid)\n\n # 3/3 - deregister all instances, then we can have a clean and nice target group\n print(\"==== deregister all instances\")\n for tg in array_target_group:\n for instance_id in dict_tg_instanceid[tg]:\n try:\n resp = elbv2_client.deregister_targets(TargetGroupArn=dict_tg_arn[tg],\n Targets=[{'Id': instance_id}])\n except Exception as e:\n print(\"Unexpected error to deregister the instance: %s\" % e)\n\n # 3/3 - register instances into the tg\n print(\"==== register all instances\")\n # outer for loop: loop through 2 tg, https and wss\n # inner loop: add every single instance id into each tg\n for tg in array_target_group:\n for instance in array_instance_id:\n response = elbv2_client.register_targets(\n TargetGroupArn=dict_tg_arn[tg],\n Targets=[{'Id': instance, }, ]\n )", "def check_load_balancing(testlog):\n\n # Global Dictionary representing packet count received at each host\n # (Keys: Host Names, Values: Packet Count)\n global RECEIVED_PKT_DICT\n\n testlog.log_step(\"Final ECMP Load Balancing Results\")\n NVP1_count = RECEIVED_PKT_DICT[\"NVP1 (h3)\"]\n NVP2_count = RECEIVED_PKT_DICT[\"NVP2 (h4)\"]\n NVP1_percent = (NVP1_count/TOTAL_MACS)*100\n NVP2_percent = (NVP2_count/TOTAL_MACS)*100\n\n print(\"NVP1 was chosen {} out of {} times, which is {:.2f}% of the time\\n\".format(\n NVP1_count, TOTAL_MACS, NVP1_percent))\n print(\"NVP2 was chosen {} out of {} times, which is {:.2f}% of the time\\n\".format(\n NVP2_count, TOTAL_MACS, NVP2_percent))\n\n # Check ECMP Load Balancing if expected\n if (PKT_TYPE == \"l2_basic\"):\n # Check if at least 40% of total packets were received at one host\n # Check that total NVP1_count and NVP2_count equals the number of packets\n assert((NVP1_percent >= 0.4) or (NVP2_percent >= 0.4) and\n ((NVP1_count + NVP2_count) == TOTAL_MACS)),\\\n \"ECMP Load Balancing Failed for Known Unicast Traffic\"\n\n step_desc = \"ECMP Load Balancing check passed for Known Unicast traffic\"\n testlog.log_info(step_desc)\n else:\n # Check that all of the packets are only received at one NVP and\n # zero packets are received at the other NVP\n assert(((NVP1_count == 0) and (NVP2_count == TOTAL_MACS)) or\n (NVP2_count == 0) and (NVP1_count == TOTAL_MACS)), \\\n \"Load Balancing was not expected, but packets were not\" +\\\n \" received correctly for {} traffic\".format(PKT_TYPE)\n\n step_desc = \"Load Balancing is not expected for {} traffic. \".format(PKT_TYPE) + \\\n \"All traffic was directed to the BUM Next Hop\"\n testlog.log_info(step_desc)", "def test_auto_update_bs_invalid_alloys(self):\n with app.test_client() as client:\n self.login_client(client)\n\n # Missing carbon elements\n alloy_store = deepcopy(ALLOY_STORE)\n alloy_store['alloys']['parent']['compositions'].pop(0)\n\n res = client.post(\n '/v1/sim/configs/bs',\n data=json.dumps(\n {\n 'method': 'Li98',\n 'alloy_store': alloy_store\n }\n ),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(data['message'], 'Missing element error.')\n self.assertEqual(data['status'], 'fail')\n self.assert400(res)\n\n # Bad symbol elements\n alloy_store = deepcopy(ALLOY_STORE)\n alloy_store['alloys']['parent']['compositions'][0]['symbol'] = 'Cx'\n\n res = client.post(\n '/v1/sim/configs/bs',\n data=json.dumps(\n {\n 'method': 'Li98',\n 'alloy_store': alloy_store\n }\n ),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(data['message'], 'Invalid element symbol error.')\n self.assertEqual(data['status'], 'fail')\n self.assert400(res)\n\n # Bad Carbon weight elements\n alloy_store = deepcopy(ALLOY_STORE)\n alloy_store['alloys']['parent']['compositions'][0]['weight'] = 0.9\n\n res = client.post(\n '/v1/sim/configs/bs',\n data=json.dumps(\n {\n 'method': 'Li98',\n 'alloy_store': alloy_store\n }\n ),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(data['message'], 'Invalid element weight error.')\n self.assertEqual(data['status'], 'fail')\n self.assert400(res)\n\n # Bad symbol key elements\n alloy_store = deepcopy(ALLOY_STORE)\n alloy_store['alloys']['parent']['compositions'][0].pop('symbol')\n\n res = client.post(\n '/v1/sim/configs/bs',\n data=json.dumps(\n {\n 'method': 'Li98',\n 'alloy_store': alloy_store\n }\n ),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(data['message'], 'Invalid element error.')\n self.assertEqual(data['status'], 'fail')\n self.assert400(res)", "def test_basic_add_multiple_endpoint(self):\n args = self.get_args()\n config = self.create_config_file()\n self.write_config_file(config, args)\n execute_tool(args, test_mode=True)\n\n time.sleep(2)\n mac1 = '00:11:22:33:33:34'\n ip1 = '3.4.3.5'\n self.add_endpoint(mac1, ip1, 'intersite-testsuite', 'app', 'epg')\n mac2 = '00:11:22:33:33:35'\n ip2 = '3.4.3.6'\n self.add_endpoint(mac2, ip2, 'intersite-testsuite', 'app', 'epg')\n time.sleep(2)\n\n self.assertTrue(self.verify_remote_site_has_entry_with_provided_contract(mac1, ip1, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg', 'contract-1'))\n self.assertTrue(self.verify_remote_site_has_entry_with_provided_contract(mac2, ip2, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg', 'contract-1'))", "def validate(self, namespace):\n pass", "def check_availability(self):\n pass", "def test_resource_actions(self):\n test_resource = ResourceTypeName.get()\n expected_actions = sorted(['rt:get', 'rt:put', 'rt:update', 'rt:delete'])\n self.app.post(\n f'/v1/resource/{test_resource}',\n data=json.dumps({'actions': expected_actions}),\n headers=admin_headers)\n\n # Get the actions for a resource type\n resp = self.app.get(f'/v1/resource/{test_resource}/actions', headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n actions = json.loads(resp.body)['actions']\n self.assertEqual(actions, expected_actions)\n\n # Delete actions from a resource type\n modify_actions = expected_actions[-2:]\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions[:2])\n\n # OK returned when deleting actions not part of a resource type\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n\n # Put actions into a resource type\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions)\n\n # OK returned when putting actions already a part of a resource type.\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)", "def test_auto_update_ae_invalid_alloys(self):\n with app.test_client() as client:\n self.login_client(client)\n\n # Missing carbon elements\n alloy_store = deepcopy(ALLOY_STORE)\n alloy_store['alloys']['parent']['compositions'].pop(0)\n\n res = client.post(\n '/v1/sim/configs/ae',\n data=json.dumps(\n {\n 'method': 'Li98',\n 'alloy_store': alloy_store\n }\n ),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(data['message'], 'Missing element error.')\n self.assertEqual(data['status'], 'fail')\n self.assert400(res)\n\n # Bad symbol elements\n alloy_store = deepcopy(ALLOY_STORE)\n alloy_store['alloys']['parent']['compositions'][0]['symbol'] = 'Cx'\n\n res = client.post(\n '/v1/sim/configs/ae',\n data=json.dumps(\n {\n 'method': 'Li98',\n 'alloy_store': alloy_store\n }\n ),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(data['message'], 'Invalid element symbol error.')\n self.assertEqual(data['status'], 'fail')\n self.assert400(res)\n\n # Bad Carbon weight elements\n alloy_store = deepcopy(ALLOY_STORE)\n alloy_store['alloys']['parent']['compositions'][0]['weight'] = 0.9\n\n res = client.post(\n '/v1/sim/configs/ae',\n data=json.dumps(\n {\n 'method': 'Li98',\n 'alloy_store': alloy_store\n }\n ),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(data['message'], 'Invalid element weight error.')\n self.assertEqual(data['status'], 'fail')\n self.assert400(res)\n\n # Bad symbol key elements\n alloy_store = deepcopy(ALLOY_STORE)\n alloy_store['alloys']['parent']['compositions'][0].pop('symbol')\n\n res = client.post(\n '/v1/sim/configs/ae',\n data=json.dumps(\n {\n 'method': 'Li98',\n 'alloy_store': alloy_store\n }\n ),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(data['message'], 'Invalid element error.')\n self.assertEqual(data['status'], 'fail')\n self.assert400(res)", "def _enforce(self, req, action, target=None):\n if target is None:\n target = {}\n try:\n self.policy.enforce(req.context, action, target)\n except exception.Forbidden as e:\n LOG.debug(\"User not permitted to perform '%s' action\", action)\n raise webob.exc.HTTPForbidden(explanation=e.msg, request=req)", "def autoscaling_load_balancer_healthcheck_check(cache: dict, session, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:\n # ISO Time\n iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()\n for asg in describe_auto_scaling_groups(cache, session)[\"AutoScalingGroups\"]:\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(asg,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson)\n asgArn = asg[\"AutoScalingGroupARN\"]\n asgName = asg[\"AutoScalingGroupName\"]\n healthCheckType = asg[\"HealthCheckType\"]\n # Check specific metadata\n asgLbs = asg[\"LoadBalancerNames\"]\n asgTgs = asg[\"TargetGroupARNs\"]\n # If either list is empty it means there are no ELBs or ELBv2s associated with this ASG\n if not (asgLbs or asgTgs):\n # this is a passing check\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"{asgArn}/asg-elb-asgs-elb-healthcheck-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": asgArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[Autoscaling.2] Autoscaling Groups with load balancer targets should use ELB health checks\",\n \"Description\": f\"Autoscaling group {asgName} does not have any ELB or Target Groups associated and is not in scope for this check.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For information about enabling ELB health checks refer to the Add Elastic Load Balancing health checks to an Auto Scaling group section of the Amazon EC2 Auto Scaling User Guide\",\n \"Url\": \"https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-add-elb-healthcheck.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Compute\",\n \"AssetService\": \"AWS Auto Scaling\",\n \"AssetComponent\": \"Autoscaling Group\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsAutoScalingAutoScalingGroup\",\n \"Id\": asgArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsAutoScalingAutoScalingGroup\": {\n \"HealthCheckType\": healthCheckType\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 DE.AE-4\",\n \"NIST CSF V1.1 DE.DP-4\",\n \"NIST SP 800-53 Rev. 4 AU-6\",\n \"NIST SP 800-53 Rev. 4 CA-2\",\n \"NIST SP 800-53 Rev. 4 CA-7\",\n \"NIST SP 800-53 Rev. 4 CP-2\",\n \"NIST SP 800-53 Rev. 4 IR-4\",\n \"NIST SP 800-53 Rev. 4 RA-3\",\n \"NIST SP 800-53 Rev. 4 RA-5\",\n \"NIST SP 800-53 Rev. 4 SI-4\",\n \"AICPA TSC CC7.2\",\n \"AICPA TSC CC7.3\",\n \"ISO 27001:2013 A.16.1.2\",\n \"ISO 27001:2013 A.16.1.3\",\n \"ISO 27001:2013 A.16.1.4\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding\n else:\n if healthCheckType != \"ELB\":\n # this is a failing check\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"{asgArn}/asg-elb-asgs-elb-healthcheck-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": asgArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"LOW\"},\n \"Confidence\": 99,\n \"Title\": \"[Autoscaling.2] Autoscaling Groups with load balancer targets should use ELB health checks\",\n \"Description\": f\"Autoscaling group {asgName} has ELB or ELBv2 Targets but does not use an ELB Health Check. If you attached a load balancer or target group to your Auto Scaling group, you can configure the group to mark an instance as unhealthy when Elastic Load Balancing reports it as unhealthy. If connection draining is enabled for your load balancer, Amazon EC2 Auto Scaling waits for in-flight requests to complete or the maximum timeout to expire, whichever comes first, before terminating instances due to a scaling event or health check replacement. Review the remediation section for more information on this configuration.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For information about enabling ELB health checks refer to the Add Elastic Load Balancing health checks to an Auto Scaling group section of the Amazon EC2 Auto Scaling User Guide\",\n \"Url\": \"https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-add-elb-healthcheck.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Compute\",\n \"AssetService\": \"AWS Auto Scaling\",\n \"AssetComponent\": \"Autoscaling Group\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsAutoScalingAutoScalingGroup\",\n \"Id\": asgArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsAutoScalingAutoScalingGroup\": {\n \"HealthCheckType\": healthCheckType\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 DE.AE-4\",\n \"NIST CSF V1.1 DE.DP-4\",\n \"NIST SP 800-53 Rev. 4 AU-6\",\n \"NIST SP 800-53 Rev. 4 CA-2\",\n \"NIST SP 800-53 Rev. 4 CA-7\",\n \"NIST SP 800-53 Rev. 4 CP-2\",\n \"NIST SP 800-53 Rev. 4 IR-4\",\n \"NIST SP 800-53 Rev. 4 RA-3\",\n \"NIST SP 800-53 Rev. 4 RA-5\",\n \"NIST SP 800-53 Rev. 4 SI-4\",\n \"AICPA TSC CC7.2\",\n \"AICPA TSC CC7.3\",\n \"ISO 27001:2013 A.16.1.2\",\n \"ISO 27001:2013 A.16.1.3\",\n \"ISO 27001:2013 A.16.1.4\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n else:\n # this is a passing check\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"{asgArn}/asg-elb-asgs-elb-healthcheck-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": asgArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[Autoscaling.2] Autoscaling Groups with load balancer targets should use ELB health checks\",\n \"Description\": f\"Autoscaling group {asgName} has ELB or ELBv2 Targets and uses an ELB Health Check.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For information about enabling ELB health checks refer to the Add Elastic Load Balancing health checks to an Auto Scaling group section of the Amazon EC2 Auto Scaling User Guide\",\n \"Url\": \"https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-add-elb-healthcheck.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Compute\",\n \"AssetService\": \"AWS Auto Scaling\",\n \"AssetComponent\": \"Autoscaling Group\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsAutoScalingAutoScalingGroup\",\n \"Id\": asgArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsAutoScalingAutoScalingGroup\": {\n \"HealthCheckType\": healthCheckType\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 DE.AE-4\",\n \"NIST CSF V1.1 DE.DP-4\",\n \"NIST SP 800-53 Rev. 4 AU-6\",\n \"NIST SP 800-53 Rev. 4 CA-2\",\n \"NIST SP 800-53 Rev. 4 CA-7\",\n \"NIST SP 800-53 Rev. 4 CP-2\",\n \"NIST SP 800-53 Rev. 4 IR-4\",\n \"NIST SP 800-53 Rev. 4 RA-3\",\n \"NIST SP 800-53 Rev. 4 RA-5\",\n \"NIST SP 800-53 Rev. 4 SI-4\",\n \"AICPA TSC CC7.2\",\n \"AICPA TSC CC7.3\",\n \"ISO 27001:2013 A.16.1.2\",\n \"ISO 27001:2013 A.16.1.3\",\n \"ISO 27001:2013 A.16.1.4\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding", "def process_load_balancer ( vpc_conn,\n ec2_conn,\n elb_conn,\n cloudwatch_conn,\n r53_conn,\n iam_conn,\n vpc,\n base_name,\n base_topicarn,\n app_name,\n params,\n aws_account_type,\n app_visibility = None,\n public_dns_cname = None,\n public_tcp_ports = [],\n app_tcp_ports = [],\n use_ssl = False,\n ssl_hostname = None\n ) :\n\n if not app_name :\n app_name = params[ 'app-name' ]\n\n if app_visibility == 'PUBLIC' :\n subnet_type = 'PRIVATE' # Public apps have app LB's that sit private. The PROXY LB is public.\n elif app_visibility == 'HBO' :\n subnet_type = 'PUBLIC' # HBO apps have app LB's that site public.\n elif app_visibility == 'PRIVATE' :\n subnet_type = 'PRIVATE'\n else :\n subnet_type = params[ 'subnet-type' ]\n\n if not public_dns_cname :\n public_dns_cname = params.get( 'public-dns-alias' )\n\n create = params.get( 'create', 'NO' )\n if create == 'YES':\n print \"Creating load balancer security group.\"\n lb_secgrp = find_secgrp(ec2_conn, get_lb_secgrp_name( base_name, app_name ))\n if not lb_secgrp :\n lb_secgrp = create_secgrp( ec2_conn,\n vpc,\n get_lb_secgrp_name( base_name, app_name ),\n 'Controls access to the ' + app_name + ' LB' )\n remove_all_rules( ec2_conn, [ lb_secgrp ] , deep=True, base_name=base_name)\n ## reload the security group after removing the rules\n lb_secgrp = find_secgrp(ec2_conn, get_lb_secgrp_name( base_name, app_name ))\n \n health_check_port = params.get( 'health-check-port', 8080 )\n health_check_url = params.get( 'health-check-url' )\n if not health_check_url :\n health_check_url = '/' + app_name + '/ping.html'\n\n ## Figure out if we need to find the SSL cert.\n ssl_cert_arn = None\n if use_ssl :\n cert = get_aws_ssl_certificate( iam_conn, ssl_cert_name )\n if cert :\n ssl_cert_arn = cert.arn\n else :\n print \"ERROR: Use SSL was specified, but could not find certificate matching host: \" + ssl_cert_name\n sys.exit( 5 )\n\n ## Generate the correct listener rules\n listeners = [ ( 80, 8080, 'http' ) ] # Default listener\n if params.get( 'listener-rules' ) :\n listeners = []\n for listener_rule in params[ 'listener-rules' ] :\n if params[ 'protocol' ] == 'https' :\n if not ssl_cert_arn :\n print \"ERRROR: https protocol specified, but use_ssl was NOT specified.\"\n sys.exit( 5 )\n listeners.append( ( params[ 'incoming-port' ],\n params[ 'outgoing-port' ],\n params[ 'protocol' ],\n ssl_cert_arn) )\n else :\n listeners.append( ( params[ 'incoming-port' ],\n params[ 'outgoing-port' ],\n params[ 'protocol' ] ) )\n ##\n ## FIX: There is a bug here where the public ports are supposed to be set on the proxy if\n ## app_visibility is PUBLIC. Don't have time to fix/regression test now...\n ##\n elif len( public_tcp_ports ) == len( app_tcp_ports ) and len( public_tcp_ports ) > 0 :\n listeners = []\n for public_port, app_port in zip( public_tcp_ports, app_tcp_ports ) :\n if public_port == 443 :\n if not ssl_cert_arn :\n print \"ERRROR: https protocol specified, but use_ssl was NOT specified.\"\n sys.exit( 5 )\n listeners.append( ( public_port, app_port, 'https', ssl_cert_arn ) )\n else :\n listeners.append( ( public_port, app_port, 'http' ) )\n\n\n print \"Creating load balancer.\"\n elb = create_elb( elb_conn,\n get_elb_name( base_name, app_name ),\n get_vpc_subnets( vpc_conn, vpc, subnet_type ),\n listeners,\n lb_secgrp,\n health_check_port,\n health_check_url,\n subnet_type == 'PUBLIC' )\n \n elb = find_elb(elb_conn, elb.name)\n \n if params.get( 'monitors' ) :\n add_monitors_to_elb( cloudwatch_conn, base_name, app_name, base_topicarn, params[ 'monitors' ] )\n\n if subnet_type == 'PUBLIC' :\n print \"Setting public DNS alias for load balancer.\"\n set_dns_cname( r53_conn, public_dns_cname, elb.dns_name )\n else :\n dns_alias = create_internal_elb_dns_name( base_name, app_name )\n print \"Configuring DNS name for load balancer: \" + dns_alias\n set_dns_cname( r53_conn, dns_alias, elb.dns_name )\n\n if app_visibility == 'HBO' :\n for port in public_tcp_ports :\n lb_secgrp.authorize( ip_protocol = \"tcp\",\n from_port = port,\n to_port = port,\n cidr_ip = hbo_cidr_list ) \n\n elif app_visibility == 'PUBLIC' :\n print \"Creating proxy load balancer.\"\n proxy_type = app_name + '-PX'\n proxy_secgrp = find_secgrp(ec2_conn, get_secgrp_name( base_name, proxy_type ))\n if not proxy_secgrp :\n proxy_secgrp = create_secgrp( ec2_conn,\n vpc,\n get_secgrp_name( base_name, proxy_type ),\n 'Controls access to the ' + proxy_type + ' servers.' )\n \n lb_proxy_secgrp = find_secgrp(ec2_conn, get_lb_secgrp_name( base_name, proxy_type ))\n \n if not lb_proxy_secgrp :\n lb_proxy_secgrp = create_secgrp( ec2_conn,\n vpc,\n get_lb_secgrp_name( base_name, proxy_type ),\n 'Controls access to the ' + proxy_type + ' load balancer.' )\n\n remove_all_rules( ec2_conn, [ lb_proxy_secgrp, proxy_secgrp ], deep=True, base_name=base_name) \n ## reload the security group after removing the rules\n lb_proxy_secgrp = find_secgrp(ec2_conn, get_lb_secgrp_name( base_name, proxy_type ))\n proxy_secgrp = find_secgrp(ec2_conn, get_secgrp_name( base_name, proxy_type ))\n\n \n ##\n ## FIX: In reality, we need to set the group rules between lb_proxy and proxy to match\n ## the listener ports that were passed in/configured.\n ##\n grant_ssh_access( ec2_conn, [ proxy_secgrp ], find_group( ec2_conn, base_name, 'NAT' ) )\n \n \n ## proxy server port is always 80\n ## updated by yliu, 2014/6/13\n ##if use_ssl :\n ## proxy_port = 443\n ##else :\n ## proxy_port = 80\n proxy_port = 80\n\n ## backend elb port that the proxy server passes request to \n if use_ssl :\n proxy_to_elb_port = 443\n else :\n proxy_to_elb_port = 80\n\n grant_grp_access( ec2_conn, [ lb_proxy_secgrp ], proxy_secgrp, proxy_port )\n grant_grp_access( ec2_conn, [ proxy_secgrp ], lb_secgrp, proxy_to_elb_port )\n for port in public_tcp_ports :\n lb_proxy_secgrp.authorize( ip_protocol = \"tcp\",\n from_port = port,\n to_port = port,\n cidr_ip = all_ip_cidr ) \n\n proxy_listeners = [ ( 80, 80, 'http' ) ]\n if use_ssl :\n proxy_listeners = [ ( 443, proxy_port, 'https', ssl_cert_arn ) ]\n\n proxy_elb = create_elb( elb_conn,\n get_elb_name( base_name, proxy_type ),\n get_vpc_subnets( vpc_conn, vpc, 'PUBLIC' ),\n proxy_listeners,\n lb_proxy_secgrp,\n proxy_port,\n '/robots.txt',\n True )\n add_monitors_to_elb( cloudwatch_conn, base_name, app_name, base_topicarn, proxy_lb_monitor_rules )\n\n if public_dns_cname :\n print \"Setting public DNS alias for load balancer.\"\n set_dns_cname( r53_conn, public_dns_cname, proxy_elb.dns_name )\n else :\n public_dns_cname = ''\n\n print \"Creating proxy instances.\"\n proxy_ami = get_ami_by_name( ec2_conn, proxy_ami_name )\n subnets = get_vpc_subnets( vpc_conn, vpc, 'PRIVATE' )\n\n ## direct proxy server to access backend elb over given protocol\n ## added by yliu, 2014/6/13\n if use_ssl :\n app_elb_protocol = 'https'\n else :\n app_elb_protocol = 'http'\n \n proxy_userdata = get_proxy_userdata( public_dns_cname, elb.dns_name, app_elb_protocol, app_name )\n proxy_instances = []\n \n proxy_keypair = get_keypair_name( aws_account_type, vpc.region.name, \"APACHE\" )\n \n for subnet in subnets : \n instance = launch_instance_vpc( ec2_conn,\n proxy_ami,\n base_name = base_name,\n instance_type = proxy_type,\n keypair = proxy_keypair,\n machine_type = 'm3.xlarge',\n security_group_id = proxy_secgrp.id ,\n subnet_id = subnet.id,\n user_data = proxy_userdata,\n public_ip = False )\n proxy_instances.append( instance )\n\n print \"Setting alarms on the proxy\"\n add_monitors_to_instance( cloudwatch_conn, base_name, instance.id, 'PROXY', base_topicarn, proxy_monitor_rules )\n \n proxy_instance_ids = [ i.id for i in proxy_instances ]\n\n print \"Waiting for proxy instances to be ready\"\n aws_waits( ec2_conn.get_only_instances, proxy_instance_ids )\n\n print \"Adding the new proxy instances into the load balancer.\"\n \n status = swap_elb_instances( elb_conn = elb_conn,\n elb = proxy_elb,\n new_instance_ids = proxy_instance_ids,\n terminate_old_instances = True,\n ec2_conn = ec2_conn,\n cloudwatch_conn = cloudwatch_conn,\n swap_smoothly = False )\n\n else :\n elb = find_elb( elb_conn, get_elb_name( base_name, app_name ) )\n print \"Processing load-balancer actions.\"\n for action_param in params.get( 'actions', [] ) :\n if action_param[ 'type' ] == 'RESTART_INSTANCES' :\n restart_elb_instances( ec2_conn, elb_conn, elb, params.get( 'restart-smoothly', 'YES' ) == 'YES' )\n\n lb_secgrp = find_group( ec2_conn, base_name, get_lb_secgrp_type( app_name ) )\n dns_alias = None\n\n return ( elb, lb_secgrp, dns_alias )", "def clb_cross_zone_balancing_check(cache: dict, session, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:\n elb = session.client(\"elb\")\n # ISO Time\n iso8601Time = (datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat())\n for lb in describe_clbs(cache, session):\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(lb,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson)\n clbName = lb[\"LoadBalancerName\"]\n clbArn = f\"arn:{awsPartition}:elasticloadbalancing:{awsRegion}:{awsAccountId}:loadbalancer/{clbName}\"\n dnsName = lb[\"DNSName\"]\n lbSgs = lb[\"SecurityGroups\"]\n lbSubnets = lb[\"Subnets\"]\n lbAzs = lb[\"AvailabilityZones\"]\n lbVpc = lb[\"VPCId\"]\n clbScheme = lb[\"Scheme\"]\n # Get Attrs\n response = elb.describe_load_balancer_attributes(LoadBalancerName=clbName)\n crossZoneCheck = str(\n response[\"LoadBalancerAttributes\"][\"CrossZoneLoadBalancing\"][\"Enabled\"]\n )\n if crossZoneCheck == \"False\":\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": clbArn + \"/classic-loadbalancer-cross-zone-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": clbArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"LOW\"},\n \"Confidence\": 99,\n \"Title\": \"[ELB.3] Classic load balancers should have cross-zone load balancing configured\",\n \"Description\": \"Classic load balancer \"\n + clbName\n + \" does not have cross-zone load balancing configured. Refer to the remediation instructions to remediate this behavior\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information on cross-zone load balancing refer to the Configure Cross-Zone Load Balancing for Your Classic Load Balancer section of the Classic Load Balancers User Guide.\",\n \"Url\": \"https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-disable-crosszone-lb.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Networking\",\n \"AssetService\": \"AWS Elastic Load Balancer\",\n \"AssetComponent\": \"Classic Load Balancer\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsElbLoadBalancer\",\n \"Id\": clbArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsElbLoadBalancer\": {\n \"DnsName\": dnsName,\n \"Scheme\": clbScheme,\n \"SecurityGroups\": lbSgs,\n \"Subnets\": lbSubnets,\n \"VpcId\": lbVpc,\n \"AvailabilityZones\": lbAzs,\n \"LoadBalancerName\": clbName\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 ID.BE-5\",\n \"NIST CSF V1.1 PR.DS-4\",\n \"NIST CSF V1.1 PR.PT-5\",\n \"NIST SP 800-53 Rev. 4 AU-4\",\n \"NIST SP 800-53 Rev. 4 CP-2\",\n \"NIST SP 800-53 Rev. 4 CP-7\",\n \"NIST SP 800-53 Rev. 4 CP-8\",\n \"NIST SP 800-53 Rev. 4 CP-11\",\n \"NIST SP 800-53 Rev. 4 CP-13\",\n \"NIST SP 800-53 Rev. 4 PL-8\",\n \"NIST SP 800-53 Rev. 4 SA-14\",\n \"NIST SP 800-53 Rev. 4 SC-5\",\n \"NIST SP 800-53 Rev. 4 SC-6\",\n \"AICPA TSC CC3.1\",\n \"AICPA TSC A1.1\",\n \"AICPA TSC A1.2\",\n \"ISO 27001:2013 A.11.1.4\",\n \"ISO 27001:2013 A.12.3.1\",\n \"ISO 27001:2013 A.17.1.1\",\n \"ISO 27001:2013 A.17.1.2\",\n \"ISO 27001:2013 A.17.2.1\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\",\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": clbArn + \"/classic-loadbalancer-cross-zone-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": clbArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[ELB.3] Classic load balancers should have cross-zone load balancing configured\",\n \"Description\": \"Classic load balancer \"\n + clbName\n + \" has cross-zone load balancing configured.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information on cross-zone load balancing refer to the Configure Cross-Zone Load Balancing for Your Classic Load Balancer section of the Classic Load Balancers User Guide.\",\n \"Url\": \"https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-disable-crosszone-lb.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Networking\",\n \"AssetService\": \"AWS Elastic Load Balancer\",\n \"AssetComponent\": \"Classic Load Balancer\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsElbLoadBalancer\",\n \"Id\": clbArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsElbLoadBalancer\": {\n \"DnsName\": dnsName,\n \"Scheme\": clbScheme,\n \"SecurityGroups\": lbSgs,\n \"Subnets\": lbSubnets,\n \"VpcId\": lbVpc,\n \"AvailabilityZones\": lbAzs,\n \"LoadBalancerName\": clbName\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 ID.BE-5\",\n \"NIST CSF V1.1 PR.DS-4\",\n \"NIST CSF V1.1 PR.PT-5\",\n \"NIST SP 800-53 Rev. 4 AU-4\",\n \"NIST SP 800-53 Rev. 4 CP-2\",\n \"NIST SP 800-53 Rev. 4 CP-7\",\n \"NIST SP 800-53 Rev. 4 CP-8\",\n \"NIST SP 800-53 Rev. 4 CP-11\",\n \"NIST SP 800-53 Rev. 4 CP-13\",\n \"NIST SP 800-53 Rev. 4 PL-8\",\n \"NIST SP 800-53 Rev. 4 SA-14\",\n \"NIST SP 800-53 Rev. 4 SC-5\",\n \"NIST SP 800-53 Rev. 4 SC-6\",\n \"AICPA TSC CC3.1\",\n \"AICPA TSC A1.1\",\n \"AICPA TSC A1.2\",\n \"ISO 27001:2013 A.11.1.4\",\n \"ISO 27001:2013 A.12.3.1\",\n \"ISO 27001:2013 A.17.1.1\",\n \"ISO 27001:2013 A.17.1.2\",\n \"ISO 27001:2013 A.17.2.1\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\",\n }\n yield finding", "def test_get_actions_with_arn_type_and_access_level_case_2(self):\n desired_output = [\n 'ssm:DeleteParameter',\n 'ssm:DeleteParameters',\n 'ssm:LabelParameterVersion',\n 'ssm:PutParameter'\n]\n output = get_actions_with_arn_type_and_access_level(\n \"ssm\", \"parameter\", \"Write\"\n )\n for item in desired_output:\n self.assertTrue(item in output)", "def _cluster_status_action(self):\n yaml_load_err = \"Status of '{}' could not be loaded as yaml:\\n{}\"\n status_raw = zaza.model.run_action_on_leader(\"ovn-central\",\n \"cluster-status\")\n status_data = status_raw.data[\"results\"]\n # Verify expected items in the action result\n self.assertIn(\"ovnnb\", status_data)\n self.assertIn(\"ovnsb\", status_data)\n\n try:\n nb_status = yaml.safe_load(status_data[\"ovnnb\"])\n except yaml.YAMLError:\n self.fail(yaml_load_err.format(\"northbound-cluster\",\n status_data[\"ovnnb\"]))\n try:\n sb_status = yaml.safe_load(status_data[\"ovnsb\"])\n except yaml.YAMLError:\n self.fail(yaml_load_err.format(\"southbound-cluster\",\n status_data[\"ovnsb\"]))\n\n return sb_status, nb_status", "def pre_upgrade_checks(self):\n\n #HostOverview\n Logger.info(\"******************************************************************************************************************************************************\")\n Logger.info(\"\\t\\t\\t\\t\\t\\t\\tHOST OVERVIEW\")\n Logger.info(\"******************************************************************************************************************************************************\")\n print (\"\\n\")\n Logger.info(\"Ambari version\\t\\t:{0}\".format(self.ambari_version))\n\n #Check OS\n os = platform.dist()\n if os[1] != None:\n Logger.info(\"Operating System\\t\\t:{0} {1} - {2}\".format(os[0],os[1],os[2]))\n else:\n Logger.error(\"Unable to fetch OS details.\")\n self.terminate()\n return\n\n self.check_java_version()\n self.check_exactly_one_current_version()\n\n\n #Check if rack awareness is enabled ?\n rack_awareness = \"SELECT DISTINCT rack_info FROM hosts WHERE rack_info!='/default-rack';\"\n self.cursor.execute(rack_awareness)\n result = self.cursor.fetchone()\n if result is None or len(result) != 1:\n Logger.info(\"Rack Awareness ?\\t\\tNo\\n\")\n else:\n Logger.info(\"Rack Awareness ?\\t\\tYes\\n\")\n\n #Security Overview\n self.check_security()\n\n #Check High Availability configuration\n self.check_high_availability()\n\n #Check Metastores\n self.check_metastore()", "def test_get_event_admin_correct_event(self):\n self.seed_static_data()\n params = {'id': 1, 'event_id': 1}\n response = self.app.get('/api/v1/tag', headers=self.user2_headers, data=params)\n self.assertEqual(response.status_code, 403)", "def _enforce(self, req, action):\n try:\n self.policy.enforce(req.context, action, {})\n except exception.Forbidden:\n raise HTTPForbidden()", "def _validate_instance_bundles(instance_bundles, mode):\n for bundle in instance_bundles:\n if mode in ['ssh', 'sftp']:\n if not INSTANCE_ID_RE.match(bundle['instance_id']):\n raise AssertionError('Missing instance_id')", "def action2(self, e, arg1, arg2):\n print \"%s.action2() invoked with args (%s:%s, %s:%s)\" %\\\n (self.__machine_name, arg1, type(arg1), arg2, type(arg2))", "def test_otoroshi_controllers_adminapi_events_controller_alert_events(self):\n pass", "def ensure_load_balancer_created(vpc, security_group, subnet1, subnet2, target_group_arn, ssl_certificate_arn, environment):\n name = environment + '-load-balancer'\n\n # If it already exists, create returns the existing data\n response = ELB.create_load_balancer(\n Name=name,\n Subnets=[ subnet1.id, subnet2.id ],\n SecurityGroups=[ security_group.id ],\n IpAddressType='dualstack',\n Tags=[\n { 'Key': 'Name', 'Value': name },\n { 'Key': 'Environment', 'Value': environment }\n ]\n )\n\n load_balancer = response['LoadBalancers'][0]\n arn = load_balancer['LoadBalancerArn']\n\n # There seems to be no harm in creating listeners if they already exist\n ELB.create_listener(\n LoadBalancerArn=arn,\n Protocol='HTTP',\n Port=80,\n DefaultActions=[{ 'Type': 'forward', 'TargetGroupArn': target_group_arn } ]\n )\n\n ELB.create_listener(\n LoadBalancerArn=arn,\n Protocol='HTTPS',\n Port=443,\n SslPolicy='ELBSecurityPolicy-TLS-1-2-2017-01',\n Certificates=[ { 'CertificateArn': ssl_certificate_arn } ],\n DefaultActions=[ { 'Type': 'forward', 'TargetGroupArn': target_group_arn } ]\n )\n\n return load_balancer", "def validate_endpoint(cmd, namespace):\n n = namespace\n\n if not n.endpoint:\n n.endpoint = get_config_value(cmd, 'communication', 'endpoint', None)", "def register_elb_instances(elbclient, elbname, instance_ids):\r\n Instances = list(map(\r\n lambda x: {'InstanceId': x},\r\n instance_ids\r\n ))\r\n try:\r\n elbclient.register_instances_with_load_balancer(\r\n LoadBalancerName=elbname,\r\n Instances=Instances,\r\n DryRun=True\r\n )\r\n except Exception as ex:\r\n print(ex.message)\r\n return False\r\n return True", "def _check_type_compatibility(self, type_name1, type_name2,\n operation):\n if type_name1 != type_name2:\n raise TypeCompatibilityError(type_name1, type_name2, operation)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder(self):\n pass", "def test_must_be_associated(self):\n\n def handle(event):\n return 0x0000, event.action_information\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ProceduralEventLogging)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)]\n )\n\n ae.add_requested_context(ProceduralEventLogging)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n assoc.release()\n assert assoc.is_released\n assert not assoc.is_established\n with pytest.raises(RuntimeError):\n assoc.send_n_action(None, None, None, None)\n\n scp.shutdown()", "def test_multiple_aclhooks_2(self):\n self._test_hook_approval_sequence([True, None], True)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_template_action_spaces(self):\n pass", "def test_basic_add_multiple_endpoint(self):\n args = self.get_args()\n config = self.create_config_file('l3out1')\n self.write_config_file(config, args)\n collector = execute_tool(args, test_mode=True)\n\n time.sleep(2)\n mac1 = '00:11:22:33:33:34'\n ip1 = '3.4.3.5'\n self.add_endpoint(mac1, ip1, 'intersite-testsuite', 'app', 'epg')\n mac2 = '00:11:22:33:33:35'\n ip2 = '3.4.3.6'\n self.add_endpoint(mac2, ip2, 'intersite-testsuite', 'app', 'epg')\n time.sleep(2)\n\n self.assertTrue(self.verify_remote_site_has_entry(mac1, ip1, 'intersite-testsuite',\n 'l3out1', 'intersite-testsuite-app-epg'))\n self.assertTrue(self.verify_remote_site_has_entry(mac2, ip2, 'intersite-testsuite',\n 'l3out1', 'intersite-testsuite-app-epg'))\n\n config = self.create_config_file('l3out2')\n self.write_config_file(config, args)\n collector.reload_config()\n time.sleep(2)\n self.assertTrue(self.verify_remote_site_has_entry(mac1, ip1, 'intersite-testsuite',\n 'l3out2', 'intersite-testsuite-app-epg'))\n self.assertTrue(self.verify_remote_site_has_entry(mac2, ip2, 'intersite-testsuite',\n 'l3out2', 'intersite-testsuite-app-epg'))", "def action_intersection(s1, s2):\n isect = s1 & s2\n L1 = [ ( (a.oid, a.index_oid), a) for a in s1 ]\n L2 = [ ( (a.oid, a.index_oid), a) for a in s2 ]\n ds1 = dict(L1)\n ds2 = dict(L2)\n for k1, action1 in ds1.items():\n action2 = ds2.get(k1)\n if action2 is not None:\n # replace action in union with correct one or conflict\n isect.add(which_action(action1, action2))\n return isect", "def test_basic_api(self):\n self.create_and_verify_stack(\"single/basic_api\")\n\n first_dep_ids = self.get_stack_deployment_ids()\n self.assertEqual(len(first_dep_ids), 1)\n\n self.set_template_resource_property(\"MyApi\", \"DefinitionUri\", self.get_s3_uri(\"swagger2.json\"))\n self.update_stack()\n\n second_dep_ids = self.get_stack_deployment_ids()\n self.assertEqual(len(second_dep_ids), 1)\n\n self.assertEqual(len(set(first_dep_ids).intersection(second_dep_ids)), 0)", "def test_deprecated_update_bs(self):\n with app.test_client() as client:\n self.login_client(client)\n\n res = client.put(\n '/v1/sim/configs/ae',\n data=json.dumps({}),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(data['message'], 'Method Not Allowed.')\n self.assertEqual(data['status'], 'fail')\n self.assertEqual(res.status_code, 405)", "def test_addAAtoAnnounceAEwithoutAA(self) -> None:\n\t\tdct = \t{ 'm2m:ae' : {\n\t\t\t\t \t'lbl':\t[ 'aLabel'],\n\t\t\t\t \t'aa': \t[ 'lbl' ]\n\t\t\t\t}}\n\t\tr, rsc = UPDATE(aeURL, ORIGINATOR, dct)\n\t\tself.assertEqual(rsc, RC.updated)\n\t\tself.assertIsNotNone(findXPath(r, 'm2m:ae/lbl'))", "def test_arg_conflict(self):\n oim = OIM()\n rc, _, _, msg = oim.request('--hostname', 'test.' + DOMAIN, '--csr', 'foo')\n self.assertEqual(rc, 2, \"CSR and hostname options do not conflict\\n%s\" % msg)", "def test_subscriber_access_for_two_vsg_services(self):", "def deploy_handler_with_advance_op(self):\n bucket = [\"src_bucket\", \"metadata\",\"advance_op\"]\n self.check_for_buckets(bucket)\n self.__deploy_function(EXPORTED_FUNCTION.ADVANCE_OP)", "def check(self):\r\n for action in self._actions:\r\n action.check()", "def test_get_actions_with_arn_type_and_access_level_case_3(self):\n desired_output = [\n 's3:PutAccountPublicAccessBlock',\n 's3:PutAccessPointPublicAccessBlock'\n ]\n output = get_actions_with_arn_type_and_access_level(\n # \"ram\", \"resource-share\", \"Write\"\n \"s3\", \"*\", \"Permissions management\"\n )\n print(output)\n for item in desired_output:\n self.assertTrue(item in output)\n # self.assertListEqual(desired_output, output)", "def check_bundle_overlaps(\n request: Request, policy: RequestPolicy, logger: Logger\n) -> None:\n if not policy.check_bundle_overlap:\n logger.warning(\n \"KSR-POLICY-SIG-OVERLAP: Disabled by policy (check_bundle_overlap)\"\n )\n return\n\n logger.debug(\"Verifying request {} bundle times and overlap:\".format(request.id))\n for i in range(len(request.bundles)):\n overlap_str = \"-\"\n previous = request.bundles[i - 1]\n this = request.bundles[i]\n if i:\n overlap = previous.expiration - this.inception\n overlap_str = fmt_timedelta(overlap)\n logger.debug(\n \"{num:<2} {id:8} {inception:19} {expiration:20} {overlap}\".format(\n num=i + 1,\n id=this.id[:8],\n inception=fmt_timestamp(this.inception),\n expiration=fmt_timestamp(this.expiration),\n overlap=overlap_str,\n )\n )\n\n # check that bundles overlap, and with how much\n for i in range(1, len(request.bundles)):\n previous = request.bundles[i - 1]\n this = request.bundles[i]\n if this.inception > previous.expiration:\n raise KSR_POLICY_SIG_OVERLAP_Violation(\n f'Bundle \"{this.id}\" does not overlap with previous bundle '\n f'\"{previous.id}\" ({this.inception} > {previous.expiration})'\n )\n overlap = previous.expiration - this.inception\n if overlap < request.zsk_policy.min_validity_overlap:\n raise KSR_POLICY_SIG_OVERLAP_Violation(\n 'Bundle \"{}\" overlap {} with \"{}\" is < claimed minimum {}'.format(\n fmt_bundle(this),\n fmt_timedelta(overlap),\n fmt_bundle(previous),\n fmt_timedelta(request.zsk_policy.min_validity_overlap),\n )\n )\n if overlap > request.zsk_policy.max_validity_overlap:\n raise KSR_POLICY_SIG_OVERLAP_Violation(\n 'Bundle \"{}\" overlap {} with \"{}\" is > claimed maximum {}'.format(\n fmt_bundle(this),\n fmt_timedelta(overlap),\n fmt_bundle(previous),\n fmt_timedelta(request.zsk_policy.max_validity_overlap),\n )\n )\n logger.info(\n f\"KSR-POLICY-SIG-OVERLAP: All bundles overlap in accordance with the stated ZSK operator policy\"\n )", "def _pre_flight_checks(self) -> Squonk2AgentRv:\n\n # If a Squonk2Org record exists its UUID cannot have changed.\n # We cannot change the organisation once deployed. The corresponding Units,\n # Products and Projects are organisation-specific. The Squonk2Org table\n # records the organisation ID and the Account Server URL where the ID\n # is valid. None of these values can change once deployed.\n\n assert self.__configuration_checked\n assert self.__configured\n\n if self.__org_record and self.__org_record.uuid != self.__CFG_SQUONK2_ORG_UUID:\n msg: str = f'Configured Squonk2 Organisation ({self.__CFG_SQUONK2_ORG_UUID})'\\\n f' does not match pre-existing record ({self.__org_record.uuid})'\n _LOGGER.error(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n\n # OK, so the ORG exists and its UUID has not changed.\n # Is it known to the configured AS?\n if not self._get_squonk2_owner_tokens():\n msg = 'Failed to get AS or DM token for organisation owner'\n _LOGGER.warning(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n _LOGGER.debug('Got Squonk2 API Access Tokens')\n\n # Get the ORG from the AS API.\n # If it knows the org the response will be successful,\n # and we'll also have the Org's name.\n as_o_rv = AsApi.get_organisation(self.__org_owner_as_token,\n org_id=self.__CFG_SQUONK2_ORG_UUID)\n if not as_o_rv.success:\n msg = 'Failed to get AS Organisation'\n _LOGGER.warning(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n\n # The org is known to the AS.\n # Get the AS API version (for reference)\n as_v_rv: AsApiRv = AsApi.get_version()\n if not as_v_rv.success:\n msg = 'Failed to get version from AS'\n _LOGGER.warning(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n\n as_version: str = as_v_rv.msg['version']\n _LOGGER.debug('Happy with Squonk2 Account Server (as_version=%s)', as_version)\n\n # Everything seems to be OK but we might not have an organisation in this\n # call (it may be the first call of the instance lifetime).\n # So, if there's no Squonk2Org record, create one,\n # recording the ORG ID and the AS and version we used.\n if not self.__org_record:\n assert self.__CFG_SQUONK2_ASAPI_URL\n _LOGGER.info('Creating NEW Squonk2Org record for %s.'\n ' as-url=%s as-org=\"%s\" as-version=%s',\n self.__CFG_SQUONK2_ORG_UUID,\n self.__CFG_SQUONK2_ASAPI_URL,\n as_o_rv.msg['name'],\n as_version)\n self.__org_record = Squonk2Org(uuid=self.__CFG_SQUONK2_ORG_UUID,\n name=as_o_rv.msg['name'],\n as_url=self.__CFG_SQUONK2_ASAPI_URL,\n as_version=as_version)\n self.__org_record.save()\n _LOGGER.info('Created Squonk2Org record for %s',\n self.__CFG_SQUONK2_ORG_UUID)\n else:\n _LOGGER.debug('Squonk2Org for %s \"%s\" already exists - nothing to do',\n self.__org_record.uuid,\n self.__org_record.name)\n\n # Organisation is known to AS, and it hasn't changed.\n _LOGGER.debug('Successful pre-flight checks')\n return SuccessRv", "def __validate_upgrade_type(self):\n if self.upgrade_type not in self.upgrade_function.keys():\n self.fail(\"Unsupported upgrade_type: %s\" % self.upgrade_type)", "def legal_actions(self):\n raise NotImplementedError", "def __validate_instance_id(self, instance_ids):\n try:\n if instance_ids:\n for id in instance_ids:\n self.euca.validate_instance_id(id)\n except InstanceValidationError:\n sys.exit(1)", "def test_endpoints_urljoin(self):\n for endpoint in self.client.user.endpoints.values():\n assert not endpoint.startswith('/')", "def validate(self):\n # double-checks node/device names matches dictionary keys\n for name, node in self.nodes.iteritems():\n if name != node.name:\n raise ConfigurationNameMismatchError(name, node.name)\n self.validateName(node)\n\n # make sure system-manager alias exists\n if \"system-manager\" not in self.aliases:\n raise ConfigurationMissingSystemManagerAliasError()\n\n # make sure the node that the alias points to exists\n for alias, nodeName in self.aliases.iteritems():\n if nodeName not in self.nodes:\n raise ConfigurationMissingAliasNodeError(alias, nodeName)\n\n # make sure there is one and only one active node\n activeNodes = [node.name for node in self.nodes.values() if node.role == Roles.ACTIVE]\n if not activeNodes:\n raise ConfigurationMissingActiveNodeError()\n if len(activeNodes) > 1:\n raise ConfigurationTooManyActiveNodesError(activeNodes)", "def has_edge(self, v1, v2):\n\n return v1 in self.get_reachables(v2[0], v2[1])", "def test_basic_remove_one_of_multiple_endpoint(self):\n args = self.get_args()\n config = self.create_config_file()\n self.write_config_file(config, args)\n execute_tool(args, test_mode=True)\n\n time.sleep(2)\n mac1 = '00:11:22:33:33:34'\n ip1 = '3.4.3.5'\n self.add_endpoint(mac1, ip1, 'intersite-testsuite', 'app', 'epg')\n mac2 = '00:11:22:33:33:35'\n ip2 = '3.4.3.6'\n self.add_endpoint(mac2, ip2, 'intersite-testsuite', 'app', 'epg')\n time.sleep(2)\n\n self.assertTrue(self.verify_remote_site_has_entry_with_provided_contract(mac1, ip1, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg', 'contract-1'))\n self.assertTrue(self.verify_remote_site_has_entry_with_provided_contract(mac2, ip2, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg', 'contract-1'))\n\n self.remove_endpoint(mac1, ip1, 'intersite-testsuite', 'app', 'epg')\n self.assertFalse(self.verify_remote_site_has_entry_with_provided_contract(mac1, ip1, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg', 'contract-1'))\n self.assertTrue(self.verify_remote_site_has_entry_with_provided_contract(mac2, ip2, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg', 'contract-1'))", "def validate_availability_zones(self, context, resource_type,\n availability_zones):\n if not availability_zones:\n return\n if len(availability_zones) > 1:\n raise asr1k_exc.OnlyOneAZHintAllowed()\n if resource_type == 'router':\n agent_type = constants.AGENT_TYPE_ASR1K_L3\n else:\n return\n azs = n_agents_db.get_availability_zones_by_agent_type(\n context, agent_type, availability_zones)\n diff = set(availability_zones) - set(azs)\n if diff:\n exc = az_exc.AvailabilityZoneNotFound(availability_zone=diff.pop())\n if resource_type == 'router' and cfg.CONF.asr1k.ignore_invalid_az_hint_for_router:\n LOG.warning(\"Skipping (otherwise fatal) AZ hint validation for router (orig error: %s)\", exc)\n else:\n raise exc", "def test_services_with_multiple_pages_apigateway(self):\n # API Gateway Management V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonapigatewaymanagement.html\n self.assertTrue(\"apigateway:AddCertificateToDomain\" in self.all_actions)\n self.assertTrue(\"apigateway:RemoveCertificateFromDomain\" in self.all_actions)\n self.assertTrue(\"apigateway:SetWebACL\" in self.all_actions)\n # API Gateway Management V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonapigatewaymanagement.html\n # API Gateway V2 doesn't have any unique actions in but it does have some unique resource types. Let's make sure those resource types are in the IAM Definition.\n # Resource types unique to API Gateway V2:\n resource_types = get_arn_types_for_service(\"apigateway\")\n resource_types = list(resource_types.keys())\n self.assertTrue(\"AccessLogSettings\" in resource_types)\n # Resource types unique to API Gateway V1:\n self.assertTrue(\"RestApi\" in resource_types)", "def test_parse_request_type_2b(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=False)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def verify_vn_in_control_nodes(self):\n self.api_s_route_targets = self.api_s_inspect.get_cs_route_targets(\n vn_id=self.uuid)\n\n self.cn_verification_flag = True\n for cn in self.inputs.bgp_ips:\n cn_config_vn_obj = self.cn_inspect[cn].get_cn_config_vn(\n vn_name=self.vn_name, project=self.project_name, domain=self.domain_name)\n if not cn_config_vn_obj:\n self.logger.warn('Control-node %s does not have VN %s info ' %\n (cn, self.vn_name))\n self.cn_verification_flag = self.cn_verification_flag and False\n return False\n self.logger.debug(\"Control-node %s : VN object is : %s\" %\n (cn, cn_config_vn_obj))\n if self.vn_fq_name not in cn_config_vn_obj['node_name']:\n self.logger.debug(\n 'IFMAP View of Control-node does not yet have the VN detail',\n ' of %s' % (self.vn_fq_name))\n self.cn_verification_flag = self.cn_verification_flag and False\n return False\n # TODO UUID verification to be done once the API is available\n cn_object = self.cn_inspect[\n cn].get_cn_routing_instance(ri_name=self.ri_name)\n if not cn_object:\n self.logger.debug(\n 'No Routing Instance found in CN %s with name %s' %\n (cn, self.ri_name))\n self.cn_verification_flag = self.cn_verification_flag and False\n return False\n try:\n rt_names = self.api_s_inspect.get_cs_rt_names(\n self.api_s_route_targets)\n if cn_object['export_target'][0] not in rt_names:\n self.logger.debug(\n \"Route target %s for VN %s is not found in Control-node %s\" %\n (rt_names, self.vn_name, cn))\n self.cn_verification_flag = self.cn_verification_flag and False\n return False\n except Exception as e:\n self.logger.exception(\n \"Got exception from control node verification as %s\" % (e))\n self.cn_verification_flag = self.cn_verification_flag and False\n return False\n # end for\n self.logger.info(\n 'On all control nodes, Config, RI and RT verification for VN %s '\n 'passed' % (self.vn_name))\n self.cn_verification_flag = self.cn_verification_flag and True\n return True", "def check_if_can_evolve(self):\n # This sounds similar to generate actions\n pass", "def test_auto_update_bs_invalid_payloads(self):\n with app.test_client() as client:\n self.login_client(client)\n\n # Empty payload\n res = client.post(\n '/v1/sim/configs/bs',\n data=json.dumps({}),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(data['message'], 'Invalid payload.')\n self.assertEqual(data['status'], 'fail')\n self.assert400(res)\n\n # No alloy store payload\n res = client.post(\n '/v1/sim/configs/bs',\n data=json.dumps({'method': 'Li98'}),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(data['message'], 'Alloy store required.')\n self.assertEqual(data['status'], 'fail')\n self.assert400(res)\n\n # No method payload\n res = client.post(\n '/v1/sim/configs/bs',\n data=json.dumps({'alloy_store': ALLOY_STORE}),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(data['message'], 'Method required.')\n self.assertEqual(data['status'], 'fail')\n self.assert400(res)\n\n # Wrong method payload\n res = client.post(\n '/v1/sim/configs/bs',\n data=json.dumps(\n {\n 'method': 'LiAndKirkaldy',\n 'alloy_store': ALLOY_STORE\n }\n ),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n msg = 'Method must be one of [\"Li98\" | \"Kirkaldy83\"].'\n self.assertEqual(data['message'], msg)\n self.assertEqual(data['status'], 'fail')\n self.assert400(res)", "def test_create_deployment_config_for_all_namespaces(self):\n pass", "def test_ovs_ovn_migration(self):\n # The setUp method of this test class will perform the migration steps.\n # The tests.yaml is programmed to do further validation after the\n # migration.\n\n # Reset the n-gw and n-ovs instance-mtu configuration option so it does\n # not influence how further tests are executed.\n reset_config_keys = ['instance-mtu']\n for app in ('neutron-gateway', 'neutron-openvswitch'):\n try:\n zaza.model.reset_application_config(app, reset_config_keys)\n logging.info('Reset configuration to default on \"{}\" for \"{}\"'\n .format(app, reset_config_keys))\n except KeyError:\n pass\n zaza.model.wait_for_agent_status()\n zaza.model.block_until_all_units_idle()\n zaza.model.wait_for_application_states(\n states=self.target_deploy_status)", "def test_removeLBLfromAnnouncedAE(self) -> None:\n\t\tdct = \t{ 'm2m:ae' : {\n\t\t\t\t\t'lbl': None\n\t\t\t\t}}\n\t\tr, rsc = UPDATE(aeURL, ORIGINATOR, dct)\n\t\tself.assertEqual(rsc, RC.updated)\n\t\tself.assertIsNone(findXPath(r, 'm2m:ae/lbl'))\n\n\t\t# retrieve the announced AE\n\t\tr, rsc = RETRIEVE(f'{REMOTEURL}/~{TestRemote_Annc.remoteAeRI}', ORIGINATOR)\n\t\tself.assertEqual(rsc, RC.OK)\n\t\tself.assertIsNone(findXPath(r, 'm2m:aeA/lbl'))", "def test_validate_available_choice_2(self):\n self.assertRaises(\n InvalidStatusOperationError,\n validate_available_choice,\n BeerStyle,\n str(BeerStyle.LAGER.value),\n )" ]
[ "0.7524008", "0.5502818", "0.53605366", "0.5155062", "0.50617534", "0.5045359", "0.49514994", "0.4864293", "0.48232004", "0.47804296", "0.47697622", "0.4720676", "0.47133136", "0.4713231", "0.47080573", "0.4707765", "0.47045755", "0.46887946", "0.46873748", "0.4683964", "0.4671751", "0.4657946", "0.4651486", "0.46361938", "0.46213722", "0.46195677", "0.46189997", "0.4603296", "0.45958737", "0.45919067", "0.45913562", "0.45823064", "0.45797512", "0.45718417", "0.45703894", "0.4569733", "0.45643848", "0.45613512", "0.45440885", "0.45427367", "0.45388874", "0.45368102", "0.45358187", "0.4535815", "0.45340908", "0.45334452", "0.45303336", "0.45272505", "0.45249632", "0.45246467", "0.45246342", "0.45188", "0.45182008", "0.45153683", "0.45148826", "0.45106664", "0.45105156", "0.45074973", "0.4497637", "0.4490457", "0.44838604", "0.44817072", "0.44788095", "0.44742927", "0.44736236", "0.4468417", "0.44683895", "0.44651064", "0.44619277", "0.44593075", "0.44590977", "0.44584078", "0.44582286", "0.44531772", "0.44520825", "0.44441378", "0.44371414", "0.44346932", "0.44321492", "0.44297233", "0.44296068", "0.44272625", "0.44264373", "0.44264194", "0.44207984", "0.44207436", "0.44127837", "0.44119453", "0.44101238", "0.4408478", "0.44078198", "0.4406142", "0.44057536", "0.44037056", "0.44011372", "0.43953347", "0.4390378", "0.4382778", "0.43789008", "0.43716228" ]
0.5852793
1
Ensure that lex v1 and lex v2 actions are both present in the lex namespace
def test_services_with_multiple_pages_lex(self): # Lex V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonlex.html self.assertTrue("lex:DeleteUtterances" in self.all_actions) # Lex V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonlexv2.html self.assertTrue("lex:ListBotLocales" in self.all_actions) results = get_actions_for_service("lex") actions = [ "lex:CreateIntentVersion", "lex:CreateSlotTypeVersion", "lex:DeleteBotChannelAssociation", "lex:DeleteIntentVersion", "lex:DeleteSlotTypeVersion", "lex:GetBot", "lex:GetBotAlias", "lex:GetBotAliases", "lex:GetBotChannelAssociation", "lex:GetBotChannelAssociations", "lex:GetBotVersions", "lex:GetBots", "lex:GetBuiltinIntent", "lex:GetBuiltinIntents", "lex:GetBuiltinSlotTypes", "lex:GetExport", "lex:GetImport", "lex:GetIntent", "lex:GetIntentVersions", "lex:GetIntents", "lex:GetMigration", "lex:GetMigrations", "lex:GetSlotType", "lex:GetSlotTypeVersions", "lex:GetSlotTypes", "lex:GetUtterancesView", "lex:PostContent", "lex:PostText", "lex:PutBot", "lex:PutBotAlias", "lex:PutIntent", "lex:PutSlotType", "lex:StartMigration", ] for action in actions: self.assertTrue(action in results)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_user_grammar_actions():\n grammar = \"\"\"\n S: A B C;\n @nonterm_action\n C: A B;\n A: \"a\";\n @term_action\n B: \"b\";\n \"\"\"\n\n called = [False, False]\n\n def nonterm_action(_, __):\n called[0] = True\n\n def term_action(_, __):\n called[1] = True\n\n my_actions = {\n \"nonterm_action\": nonterm_action,\n \"term_action\": term_action,\n }\n\n g = Grammar.from_string(grammar)\n p = Parser(g, actions=my_actions)\n assert p.parse(\"a b a b\")\n assert all(called)", "def test_kafka_action_names_overlap_issue(self):\n # Kafka actions used to be in two pages but are now one. This verifies the current state.\n # results = get_actions_for_service(\"kafka\")\n # print(results)\n actions = [\n \"kafka:BatchAssociateScramSecret\",\n \"kafka:BatchDisassociateScramSecret\",\n \"kafka:CreateClusterV2\",\n \"kafka:DeleteConfiguration\",\n \"kafka:DescribeClusterV2\",\n \"kafka:ListClustersV2\",\n \"kafka:ListConfigurationRevisions\",\n \"kafka:ListKafkaVersions\",\n \"kafka:ListScramSecrets\",\n \"kafka:RebootBroker\",\n \"kafka:UpdateBrokerType\",\n \"kafka:UpdateConfiguration\",\n \"kafka:UpdateConnectivity\",\n \"kafka:UpdateSecurity\"\n ]\n\n for action in actions:\n self.assertTrue(action in self.all_actions)", "def dispatch(lex):\n\n if lex.intent == \"BasicHelp\":\n return help_user(lex)\n else:\n return not_understood(lex)", "def InitActionCheck(initActionList, init):\n for actions in initActionList:\n action_class = getNameFromIRI(actions.is_a[0].iri)\n # if the action is a SpeedAction class\n if action_class == \"SpeedAction\":\n action_entity_ref = getNameFromIRI(actions.has_entity_ref[0].iri)\n target_speed = actions.has_target_speed[0]\n ontology_transition_dynamics = actions.has_transition_dynamics[0]\n xosc_transition_dynamics = checkTransitionDynamics(ontology_transition_dynamics)\n init.add_init_action(action_entity_ref, xosc.AbsoluteSpeedAction(target_speed, xosc_transition_dynamics))\n continue\n #if the action is TeleportAction\n if action_class == \"TeleportAction\":\n action_entity_ref = getNameFromIRI(actions.has_entity_ref[0].iri)\n # if the action has position as parameter set\n s: int = 0\n offset = 0\n lane_id = 0\n road_id = 0\n if len(actions.has_position) != 0:\n position = actions.has_position[0]\n if len(position.has_s) != 0:\n s = position.has_s[0]\n\n if len(position.has_offset) != 0:\n offset = position.has_offset[0]\n\n if len(position.has_lane_id) != 0:\n lane_id = position.has_lane_id[0]\n\n if len(position.has_road_id) != 0:\n road_id = position.has_road_id[0]\n\n init.add_init_action(action_entity_ref, xosc.TeleportAction(xosc.LanePosition(s, offset, lane_id, road_id)))\n continue\n if action_class == \"EnvironmentAction\": # if the action is an EnvironmentAction\n xosc_environment_action = checkEnvironmentAction(actions)\n init.add_global_action(xosc_environment_action)\n return init", "def test_get_actions_with_arn_type_and_access_level_case_2(self):\n desired_output = [\n 'ssm:DeleteParameter',\n 'ssm:DeleteParameters',\n 'ssm:LabelParameterVersion',\n 'ssm:PutParameter'\n]\n output = get_actions_with_arn_type_and_access_level(\n \"ssm\", \"parameter\", \"Write\"\n )\n for item in desired_output:\n self.assertTrue(item in output)", "def check(self):\n illegalNamespaces = list()\n\n progStandard = re.compile(\"^[A-Z]{4}[0-9]{2}_[0-9]{3}$\")\n progShot = re.compile(\"^SH[0-9]{4}_[0-9]{3}$\")\n\n for namespaces in pm.namespaceInfo(listOnlyNamespaces=True, internal=False, recurse=True):\n for namespace in namespaces.split(\":\"):\n if not progStandard.match(namespace) and not progShot.match(namespace) not in [\"UI\", \"shared\"]:\n illegalNamespaces.append(namespace)\n\n if not illegalNamespaces:\n self.status = \"OK\"\n else:\n self.status = self.errorMode\n self.errorNodes = illegalNamespaces\n for illegalNamespace in illegalNamespaces:\n self.addError(\"%s is a illegal namespace\" % illegalNamespace)\n self.errorMessage = \"%s illegal namespace\" % (\n len(illegalNamespaces))", "def verifyActionCenterRts():\n pass", "def __initSpellingActions(self):\n self.spellingActGrp = createActionGroup(self)\n \n self.spellCheckAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Check spelling'),\n UI.PixmapCache.getIcon(\"spellchecking.png\"),\n QCoreApplication.translate(\n 'ViewManager', 'Check &spelling...'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Shift+F7\", \"Spelling|Spell Check\")),\n 0,\n self.spellingActGrp, 'vm_spelling_spellcheck')\n self.spellCheckAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Perform spell check of current editor'))\n self.spellCheckAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Check spelling</b>\"\"\"\n \"\"\"<p>Perform a spell check of the current editor.</p>\"\"\"\n ))\n self.spellCheckAct.triggered.connect(self.__spellCheck)\n self.spellingActions.append(self.spellCheckAct)\n \n self.autoSpellCheckAct = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Automatic spell checking'),\n UI.PixmapCache.getIcon(\"autospellchecking.png\"),\n QCoreApplication.translate(\n 'ViewManager', '&Automatic spell checking'),\n 0, 0,\n self.spellingActGrp, 'vm_spelling_autospellcheck', True)\n self.autoSpellCheckAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', '(De-)Activate automatic spell checking'))\n self.autoSpellCheckAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Automatic spell checking</b>\"\"\"\n \"\"\"<p>Activate or deactivate the automatic spell checking\"\"\"\n \"\"\" function of all editors.</p>\"\"\"\n ))\n self.autoSpellCheckAct.setChecked(\n Preferences.getEditor(\"AutoSpellCheckingEnabled\"))\n self.autoSpellCheckAct.triggered.connect(\n self.__setAutoSpellChecking)\n self.spellingActions.append(self.autoSpellCheckAct)\n \n self.__enableSpellingActions()", "def test_unsupported_action(self):\n lang, _ = Language.objects.get_or_create(name=\"Test Language\", code=\"text-x-lang\")\n\n status = notify_external_apps(instance=lang, action=\"TEST\")\n self.assertFalse(status.success)\n self.assertGreater(len(status.message), 0)\n\n status = notify_external_apps(instance=lang, action=\"DELETE\")\n self.assertFalse(status.success)\n self.assertGreater(len(status.message), 0)\n\n status = notify_external_apps(instance=lang, action=\"CREATE\")\n self.assertFalse(status.success)\n self.assertGreater(len(status.message), 0)", "def test_lti20_rest_good_dispatch(self):\r\n for ginput, expected in self.GOOD_DISPATCH_INPUTS:\r\n self.assertEquals(self.xmodule.parse_lti_2_0_handler_suffix(ginput), expected)", "def test_parses_ambiguous_grammars(self):\n lexed_positive = [\n Token(\n value=\"Hegh\",\n token_type=AKT.VERB,\n line_number=0,\n ),\n Token(\n value=\"be'\",\n token_type=AKT.BE,\n line_number=0,\n ),\n ]\n self.assertTrue(parse(AmbiguousKlingonGrammar, lexed_positive))\n\n lexed_negative = [\n Token(\n value=\"Hegh\",\n token_type=AKT.VERB,\n line_number=0,\n ),\n Token(\n value=\"be'\",\n token_type=AKT.BE,\n line_number=0,\n ),\n Token(\n value=\"be'\",\n token_type=AKT.BE,\n line_number=0,\n ),\n ]\n self.assertTrue(parse(AmbiguousKlingonGrammar, lexed_negative))", "def check_action_sanity(self):\n for action in crest.get_all_actions(self.model):\n assert action._name is not None, f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is 'None'\"\n assert action._name != \"\", f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is empty string\"\n\n assert isinstance(action.transition, crest.Transition), f\"Action {action._name}'s state is not a crest.Transition. It is: {action.transition} ({action.transition.__class__})\"\n assert action.state in crest.get_transitions(action._parent), f\"Action's transition {action.transition._name} ({action.transition}) is not in the transitions of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.target, crest.Port), f\"Action {action._name}'s target is not a crest.Port\"\n assert action.target in api.get_targets(action._parent), f\"Action's target {action.target._name} ({action.target}) is not in the targets of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.function, (crestml.LearnedFunction, types.FunctionType)), f\"Action {action._name}'s function needs to be of type types.FunctionType or crestdsl.ml.LearnedFunction\"\n assert 'self' in inspect.signature(action.function).parameters, f\"Action {action._name}'s function has no self parameter. entity: {action._parent._name} ({action._parent.__class__.__name__})\"\n assert len(inspect.signature(action.function).parameters) == 1, f\"An action should have only one one argument 'self'\"\n\n for port in SH.get_read_ports_from_update(action.function, action):\n assert port in api.get_sources(action._parent), f\"Action {action._name} seems to be reading a port {port._name} ({port}) which is not in the sources of its entity {action._parent._name} ({action._parent})\"", "def test_create_resource_access_review_for_all_namespaces(self):\n pass", "def actions():\n pass", "def legal_actions(self):\n raise NotImplementedError", "def _syn_common_checks(self: SynopsisImpl, linter: Linter, cursor: Cursor, docstring: PetscDocStringImpl) -> None:\n items = self.items\n name_loc, symbol_name = items['name']\n assert name_loc is not None # pacify type checkers\n self._check_symbol_matches_synopsis_name(docstring, cursor, name_loc, symbol_name)\n self._check_synopsis_description_separator(docstring, name_loc.start.line)\n self._check_blurb_length(docstring, cursor, items['blurb'])\n return", "def actions() -> None:\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_template_action_spaces(self):\n pass", "def testverb(self):\r\n from pydsl.Parser.Parser import parse, parser_factory\r\n tokelist = [x.content for x in EncodingLexer('utf8')(p0good)]\r\n self.assertTrue(parse(productionset0, tokelist , \"default\"))\r\n self.assertTrue(parse(productionset0, tokelist , \"lr0\"))\r\n self.assertTrue(parse(productionset0, tokelist , \"ll1\"))\r\n tokelist = [x.content for x in EncodingLexer('utf8')(p0bad)]\r\n self.assertFalse(parse(productionset0, tokelist , \"default\"))\r\n self.assertFalse(parse(productionset0, tokelist , \"lr0\"))\r\n self.assertFalse(parse(productionset0, tokelist , \"ll1\"))", "def _check_tokens(number_token=None, name_token=None, gpe_token=None):\n assert number_token is None or number_token == number_token.lower(), \\\n \"Tokens need to be lowercase: %s\" % number_token\n assert name_token is None or name_token == name_token.lower(), \\\n \"Tokens need to be lowercase: %s\" % name_token\n assert gpe_token is None or gpe_token == gpe_token.lower(), \\\n \"Tokens need to be lowercase: %s\" % gpe_token", "def validate_syntax(self):\n resolves_present = False\n uses_present = False\n if not self.wf.get('workflow', None):\n pu.fail('A workflow block must be present\\n')\n else:\n for _, wf_block in dict(self.wf['workflow']).items():\n if wf_block.get('resolves', None):\n resolves_present = True\n if not resolves_present:\n pu.fail('[resolves] attribute must be present\\n')\n if not self.wf.get('action', None):\n pu.fail('Atleast one action block must be present\\n')\n else:\n for _, a_block in self.wf['action'].items():\n if a_block.get('uses', None):\n uses_present = True\n if not uses_present:\n pu.fail('[uses] attribute must be present\\n')", "def test_extra_roles(modpath):\n retcode, out = flake8(\n join(modpath, \"RST304/sphinx-roles.py\"),\n roles=\"need,need_incoming\",\n )\n assert not retcode, out", "def get_legal_actions(self):\n pass", "def test_gh_226_elasticloadbalancing_v1_and_v2(self):\n results = get_actions_for_service(\"elasticloadbalancing\")\n # print(json.dumps(results, indent=4))\n lb_v1_only_action = \"elasticloadbalancing:CreateTargetGroup\"\n lb_v2_only_action = \"elasticloadbalancing:SetSecurityGroups\"\n self.assertTrue(lb_v1_only_action in results)\n self.assertTrue(lb_v2_only_action in results)", "def test_get_actions_with_arn_type_and_access_level_case_4(self):\n desired_output = [\n 'secretsmanager:ListSecrets'\n ]\n output = get_actions_with_arn_type_and_access_level(\n \"secretsmanager\", \"*\", \"List\"\n )\n self.assertListEqual(desired_output, output)", "def test_multiple_aclhooks_2(self):\n self._test_hook_approval_sequence([True, None], True)", "def test_issue4104(en_lookup_nlp):\n words = [\"dry\", \"spun\", \"spun-dry\"]\n doc = Doc(en_lookup_nlp.vocab, words=words)\n lemmatizer = en_lookup_nlp.get_pipe(\"lemmatizer\")\n doc = lemmatizer(doc)\n assert [token.lemma_ for token in doc] == [\"dry\", \"spin\", \"spin-dry\"]", "def test_create_local_resource_access_review_for_all_namespaces(self):\n pass", "def test_resource_actions(self):\n test_resource = ResourceTypeName.get()\n expected_actions = sorted(['rt:get', 'rt:put', 'rt:update', 'rt:delete'])\n self.app.post(\n f'/v1/resource/{test_resource}',\n data=json.dumps({'actions': expected_actions}),\n headers=admin_headers)\n\n # Get the actions for a resource type\n resp = self.app.get(f'/v1/resource/{test_resource}/actions', headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n actions = json.loads(resp.body)['actions']\n self.assertEqual(actions, expected_actions)\n\n # Delete actions from a resource type\n modify_actions = expected_actions[-2:]\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions[:2])\n\n # OK returned when deleting actions not part of a resource type\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n\n # Put actions into a resource type\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions)\n\n # OK returned when putting actions already a part of a resource type.\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)", "def test_unknown_action(self):\n exit_string = actions.main([\"foo\"])\n self.assertEqual(\"Action foo undefined\", exit_string)", "def test_accepted(self):\n actions = signoff_actions(appversions={\"code\": \"fx1.0\"},\n locales={\"code\": \"de\"})\n actions = list(actions)\n eq_(len(actions), 1)\n so = Signoff.objects.get(action=actions[0][0])\n eq_(so.push.tip.shortrev, \"l10n de 0002\")\n eq_(so.locale.code, \"de\")\n eq_(so.action_set.count(), 2)", "def test_create_namespaced_resource_access_review(self):\n pass", "def test_command_verify():\n wozardry.parse_args([\"verify\", kValid1])\n wozardry.parse_args([\"verify\", kValid2])", "def test_imports_on_global_namespace_without_path(Script):\n completions = Script(\"import operator\").completions()\n assert [c.name for c in completions] == ['operator']\n completions = Script(\"import operator\", path='example.py').completions()\n assert [c.name for c in completions] == ['operator']\n\n # the first one has a path the second doesn't\n completions = Script(\"import keyword\", path='example.py').completions()\n assert [c.name for c in completions] == ['keyword']\n completions = Script(\"import keyword\").completions()\n assert [c.name for c in completions] == ['keyword']", "def _get_legal_actions(self):\n raise NotImplementedError", "def test_incompatible_rules():\n\n grammar = \"\"\"\n A: B | C;\n B: 'enumeration';\n C: value=INT;\n \"\"\"\n with pytest.raises(TextXSyntaxError):\n metamodel_from_str(grammar)", "def test_create_subject_access_review_for_all_namespaces(self):\n pass", "def test_subintent_response_matches_with_action(project: Text):\n config_path = os.path.join(project, DEFAULT_CONFIG_PATH)\n domain_path = \"data/test_domains/simple_retrieval_intent.yml\"\n data_path = \"data/test/simple_retrieval_intent_nlu.yml\"\n importer = TrainingDataImporter.load_from_dict(\n {}, config_path, domain_path, data_path\n )\n\n domain = importer.get_domain()\n # Test retrieval intent response is matched correctly to actions\n # ie. utter_chitchat/faq response compatible with action utter_chitchat\n with pytest.warns(None) as record:\n domain.check_missing_responses()\n assert not record", "def check_num_arguments(self):\n if len(self.args) != 2:\n self.cli_parser.error(\"Please provide paths to an \"\n \"interactions file and an annotations file.\")", "def test_man1ext(self):\n self.chck_triple('man1ext')", "def test_get_actions_with_arn_type_and_access_level_case_3(self):\n desired_output = [\n 's3:PutAccountPublicAccessBlock',\n 's3:PutAccessPointPublicAccessBlock'\n ]\n output = get_actions_with_arn_type_and_access_level(\n # \"ram\", \"resource-share\", \"Write\"\n \"s3\", \"*\", \"Permissions management\"\n )\n print(output)\n for item in desired_output:\n self.assertTrue(item in output)\n # self.assertListEqual(desired_output, output)", "def different_actions(old_action: PersistentAction, new_action: PersistentAction) -> bool:\n if Invocation.different_required(old_action.required, new_action.required):\n return True\n\n if old_action.command != new_action.command:\n if old_action.command is None:\n old_action_kind = \"a phony command\"\n else:\n old_action_kind = \"the command: \" + \" \".join(old_action.command)\n\n if new_action.command is None:\n new_action_kind = \"a phony command\"\n else:\n new_action_kind = \"the command: \" + \" \".join(new_action.command)\n\n Logger.why(f\"Must run actions because changed {old_action_kind} \" f\"into {new_action_kind}\")\n return True\n\n return False", "def check_semver():\n body: t.Any = request.json\n check_error({'input': {'old': {}, 'new': {}}}, body)\n response_new = rpc_search({'input': body['input']['new']})\n response_old = rpc_search({'input': body['input']['old']})\n\n modules_new = response_new['yang-catalog:modules']['module']\n modules_old = response_old['yang-catalog:modules']['module']\n\n if len(modules_new) == 0 or len(modules_old) == 0:\n abort(404, description='No hits found either in old or new input')\n\n output_modules_list = []\n for mod_old in modules_old:\n name_new = None\n semver_new = None\n revision_new = None\n status_new = None\n name_old = mod_old['name']\n revision_old = mod_old['revision']\n organization_old = mod_old['organization']\n status_old = mod_old['compilation-status']\n for mod_new in modules_new:\n name_new = mod_new['name']\n revision_new = mod_new['revision']\n organization_new = mod_new['organization']\n status_new = mod_new['compilation-status']\n if name_new == name_old and organization_new == organization_old:\n if revision_old == revision_new:\n break\n semver_new = mod_new.get('derived-semantic-version')\n break\n if semver_new:\n semver_old = mod_old.get('derived-semantic-version')\n if semver_old:\n if semver_new != semver_old:\n output_mod = {}\n if status_old != 'passed' and status_new != 'passed':\n reason = 'Both modules failed compilation'\n elif status_old != 'passed' and status_new == 'passed':\n reason = 'Older module failed compilation'\n elif status_new != 'passed' and status_old == 'passed':\n reason = 'Newer module failed compilation'\n else:\n file_name = (\n f'{ac.w_yangcatalog_api_prefix}/services/file1={name_new}@{revision_new}'\n f'/check-update-from/file2={name_old}@{revision_old}'\n )\n reason = f'pyang --check-update-from output: {file_name}'\n\n diff = (\n f'{ac.w_yangcatalog_api_prefix}/services/diff-tree'\n f'/file1={name_old}@{revision_old}/file2={name_new}@{revision_new}'\n )\n output_mod['yang-module-pyang-tree-diff'] = diff\n\n output_mod['name'] = name_old\n output_mod['revision-old'] = revision_old\n output_mod['revision-new'] = revision_new\n output_mod['organization'] = organization_old\n output_mod['old-derived-semantic-version'] = semver_old\n output_mod['new-derived-semantic-version'] = semver_new\n output_mod['derived-semantic-version-results'] = reason\n diff = (\n f'{ac.w_yangcatalog_api_prefix}/services/diff-file'\n f'/file1={name_old}@{revision_old}/file2={name_new}@{revision_new}'\n )\n output_mod['yang-module-diff'] = diff\n output_modules_list.append(output_mod)\n if len(output_modules_list) == 0:\n abort(404, description='No different semantic versions with provided input')\n output = {'output': output_modules_list}\n return output", "def test_unknown_action(self):\n exit_string = actions.main(['foo'])\n self.assertEqual('Action \"foo\" undefined', exit_string)", "def compatible(self, name: str, actions: Actions):\n assert isinstance(name, str)\n assert isinstance(actions, Actions)\n\n if name in self.required_processes and name in self._required_actions:\n return set(self.required_actions(name)).issubset(set(actions.keys()))\n elif name in self.required_processes:\n return True\n elif name in self.forbidden_processes:\n return False\n else:\n return True", "def root_lex_analysis():\n\n return {\"cat\": \"VerbLex\",\n \"vform\": \"bare\",\n \"orthoForm\": [\"*RootForm\"]}", "def check(self):\r\n for action in self._actions:\r\n action.check()", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces(self):\n pass", "def test_bad_action(self):\r\n action = 'robot-not-an-action'\r\n url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})\r\n response = self.client.get(url, {'identifiers': self.enrolled_student.email, 'action': action})\r\n self.assertEqual(response.status_code, 400)", "def test_request_with_two_bundles(self):\n xml = self._make_request()\n request = request_from_xml(xml)\n self.assertTrue(validate_request(request, self.policy))", "def test_intent_support(self):\n dispatcher = self.get_dispatcher()\n for intent in self.get_intents():\n self.assertIsNot(dispatcher(intent), None)", "def validate(self, namespace):\n pass", "def check(self):\n BadNamespaces = list()\n\n for namespace in pm.listNamespaces():\n BadNamespaces.append(namespace)\n\n if not BadNamespaces:\n self.status = \"OK\"\n else:\n self.status = self.errorMode\n self.errorNodes = namespace\n for namespace in BadNamespaces:\n self.addError(\"namespace %s exist\" % namespace)\n self.errorMessage = \"%s namespace\" % (len(BadNamespaces))", "def validate_access_token(cmd, namespace):\n n = namespace\n\n if not n.access_token:\n n.access_token = get_config_value(cmd, 'communication', 'access_token', None)", "def verifyActionCenterFirewall():\n pass", "def check(self):\n illegalNamespaces = list()\n\n prog = re.compile(\"^[A-Z]{4}[0-9]{2}_[0-9]{3}:$\")\n\n for assetNode in pm.ls(type=\"gAsset\"):\n if assetNode.isReferenced() and not prog.match(assetNode.namespace()):\n illegalNamespaces.append(assetNode)\n\n if not illegalNamespaces:\n self.status = \"OK\"\n else:\n self.status = self.errorMode\n self.errorNodes = illegalNamespaces\n for illegalNamespace in illegalNamespaces:\n self.addError(\"%s has a illegal namespace\" % illegalNamespace)\n self.errorMessage = \"%s asset(s) have a illegal namespace\" % (\n len(illegalNamespaces))", "def main():\n check_slugs()\n check_identifiers()", "def check(self, linter: Linter, cursor: Cursor, docstring: PetscDocStringImpl) -> None:\n super().check(linter, cursor, docstring)\n self._check_prefixes(docstring)\n return", "def test_parglare_builtin_action_override_repetition():\n # B+ will product B_1 rule with `collect` common action\n grammar = \"\"\"\n S: B+;\n B: \"b\";\n \"\"\"\n\n called = [False]\n\n def my_collect(_, __):\n called[0] = True\n return \"pass\"\n\n my_actions = {\n \"collect\": my_collect,\n }\n\n g = Grammar.from_string(grammar)\n p = Parser(g, actions=my_actions)\n assert p.parse(\"b b\") == 'pass'\n assert called[0]", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_library_variable_set_usage_list_action_spaces(self):\n pass", "def check(self, input, ast):\n assert False # Must be redefined", "def test_create_local_subject_access_review_for_all_namespaces(self):\n pass", "def parse_action(self, page, from_sphinx=False):\n subparsers = self.subparsers[self.resource].add_subparsers(\n dest='action',\n metavar='action'\n )\n subparsers.required = True\n\n # parse the action from OPTIONS\n parser = ResourceOptionsParser(self.v2, page, self.resource, subparsers)\n if parser.deprecated:\n description = 'This resource has been deprecated and will be removed in a future release.'\n if not from_sphinx:\n description = colored(description, 'yellow')\n self.subparsers[self.resource].description = description\n\n if from_sphinx:\n # Our Sphinx plugin runs `parse_action` for *every* available\n # resource + action in the API so that it can generate usage\n # strings for automatic doc generation.\n #\n # Because of this behavior, we want to silently ignore the\n # `SystemExit` argparse will raise when you're missing required\n # positional arguments (which some actions have).\n try:\n self.parser.parse_known_args(self.argv)[0]\n except SystemExit:\n pass\n else:\n self.parser.parse_known_args()[0]\n\n # parse any action arguments\n if self.resource != 'settings':\n for method in ('list', 'modify', 'create'):\n if method in parser.parser.choices:\n parser.build_query_arguments(\n method,\n 'GET' if method == 'list' else 'POST'\n )\n if from_sphinx:\n parsed, extra = self.parser.parse_known_args(self.argv)\n else:\n parsed, extra = self.parser.parse_known_args()\n\n if extra and self.verbose:\n # If extraneous arguments were provided, warn the user\n cprint('{}: unrecognized arguments: {}'.format(\n self.parser.prog,\n ' '.join(extra)\n ), 'yellow', file=self.stdout)\n\n # build a dictionary of all of the _valid_ flags specified on the\n # command line so we can pass them on to the underlying awxkit call\n # we ignore special global flags like `--help` and `--conf.xyz`, and\n # the positional resource argument (i.e., \"jobs\")\n # everything else is a flag used as a query argument for the HTTP\n # request we'll make (e.g., --username=\"Joe\", --verbosity=3)\n parsed = parsed.__dict__\n parsed = dict(\n (k, v) for k, v in parsed.items()\n if (\n v is not None and\n k not in ('help', 'resource') and\n not k.startswith('conf.')\n )\n )\n\n # if `id` is one of the arguments, it's a detail view\n if 'id' in parsed:\n page.endpoint += '{}/'.format(str(parsed.pop('id')))\n\n # determine the awxkit method to call\n action = self.original_action = parsed.pop('action')\n page, action = handle_custom_actions(\n self.resource, action, page\n )\n self.method = {\n 'list': 'get',\n 'modify': 'patch',\n }.get(action, action)\n\n if self.method == 'patch' and not parsed:\n # If we're doing an HTTP PATCH with an empty payload,\n # just print the help message (it's a no-op anyways)\n parser.parser.choices['modify'].print_help()\n return\n\n if self.help:\n # If --help is specified on a subarg parser, bail out\n # and print its help text\n parser.parser.choices[self.original_action].print_help()\n return\n\n if self.original_action == 'create':\n return page.post(parsed)\n\n return getattr(page, self.method)(**parsed)", "def is_valid_git_action(action):\n\n return action in GIT_ACTIONS", "def getLegalActions(self):\n return ['BOT', 'SLD']", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces(self):\n pass", "def test_uses_wraps(self):\n @self.actions(\"ctx_name\", [])\n def myview(request, some_id):\n \"\"\"docstring\"\"\"\n\n self.assertEqual(myview.func_name, \"myview\")\n self.assertEqual(myview.func_doc, \"docstring\")", "def test_other_iam_data_fixes_in_GH_393(self):\n # Cassandra: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonkeyspacesforapachecassandra.html\n results = get_actions_for_service(\"cassandra\")\n self.assertTrue(\"cassandra:Restore\" in results)\n # Comprehend Medical: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazoncomprehendmedical.html\n results = get_actions_for_service(\"comprehendmedical\")\n # print(results)\n actions = [\n \"comprehendmedical:DescribeEntitiesDetectionV2Job\",\n \"comprehendmedical:DescribeICD10CMInferenceJob\",\n \"comprehendmedical:DescribePHIDetectionJob\",\n \"comprehendmedical:DescribeRxNormInferenceJob\",\n # \"comprehendmedical:DescribeSNOMEDCTInferenceJob\", # Not in SAR\n \"comprehendmedical:DetectEntitiesV2\",\n \"comprehendmedical:InferICD10CM\",\n \"comprehendmedical:InferRxNorm\",\n # \"comprehendmedical:InferSNOMEDCT\", # Not in SAR\n \"comprehendmedical:ListEntitiesDetectionV2Jobs\",\n \"comprehendmedical:ListICD10CMInferenceJobs\",\n \"comprehendmedical:ListPHIDetectionJobs\",\n \"comprehendmedical:ListRxNormInferenceJobs\",\n # \"comprehendmedical:ListSNOMEDCTInferenceJobs\", # Not in SAR\n \"comprehendmedical:StartEntitiesDetectionV2Job\",\n \"comprehendmedical:StartICD10CMInferenceJob\",\n \"comprehendmedical:StartPHIDetectionJob\",\n \"comprehendmedical:StartRxNormInferenceJob\",\n \"comprehendmedical:StopEntitiesDetectionV2Job\",\n \"comprehendmedical:StopICD10CMInferenceJob\",\n ]\n for action in actions:\n # if action not in results:\n # print(action)\n self.assertTrue(action in results)\n # Compute Optimizer\n results = get_actions_for_service(\"compute-optimizer\")\n actions = [\n \"compute-optimizer:DeleteRecommendationPreferences\",\n \"compute-optimizer:ExportEBSVolumeRecommendations\",\n \"compute-optimizer:ExportLambdaFunctionRecommendations\",\n \"compute-optimizer:GetEffectiveRecommendationPreferences\",\n \"compute-optimizer:GetEnrollmentStatusesForOrganization\",\n \"compute-optimizer:GetLambdaFunctionRecommendations\",\n \"compute-optimizer:GetRecommendationPreferences\",\n \"compute-optimizer:PutRecommendationPreferences\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # DataSync\n results = get_actions_for_service(\"datasync\")\n actions = [\n \"datasync:UpdateLocationNfs\",\n \"datasync:UpdateLocationObjectStorage\",\n \"datasync:UpdateLocationSmb\",\n \"datasync:UpdateTaskExecution\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Account Management\n results = get_actions_for_service(\"account\")\n actions = [\n \"account:DeleteAlternateContact\",\n \"account:GetAlternateContact\",\n \"account:PutAlternateContact\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS IAM Access Analyzer\n results = get_actions_for_service(\"access-analyzer\")\n actions = [\n \"access-analyzer:CancelPolicyGeneration\",\n \"access-analyzer:CreateAccessPreview\",\n \"access-analyzer:GetAccessPreview\",\n \"access-analyzer:GetGeneratedPolicy\",\n \"access-analyzer:ListAccessPreviewFindings\",\n \"access-analyzer:ListAccessPreviews\",\n \"access-analyzer:ListPolicyGenerations\",\n \"access-analyzer:StartPolicyGeneration\",\n \"access-analyzer:ValidatePolicy\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Elemental Activations\n results = get_actions_for_service(\"elemental-activations\")\n actions = [\n \"elemental-activations:CompleteAccountRegistration\",\n \"elemental-activations:StartAccountRegistration\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n # OpenSearch\n results = get_actions_for_service(\"es\")\n actions = [\n \"es:DescribeDomainChangeProgress\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Location\n results = get_actions_for_service(\"geo\")\n actions = [\n \"geo:CalculateRouteMatrix\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Amazon Managed Grafana\n results = get_actions_for_service(\"grafana\")\n actions = [\n \"grafana:DescribeWorkspaceAuthentication\",\n \"grafana:UpdateWorkspaceAuthentication\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # EC2 Image Builder\n results = get_actions_for_service(\"imagebuilder\")\n actions = [\n \"imagebuilder:ImportVmImage\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Timestream\n results = get_actions_for_service(\"timestream\")\n actions = [\n \"timestream:CreateScheduledQuery\",\n \"timestream:DeleteScheduledQuery\",\n \"timestream:DescribeScheduledQuery\",\n \"timestream:ExecuteScheduledQuery\",\n \"timestream:ListScheduledQueries\",\n \"timestream:UpdateScheduledQuery\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS Transfer Family\n results = get_actions_for_service(\"transfer\")\n actions = [\n \"transfer:CreateAccess\",\n \"transfer:CreateWorkflow\",\n \"transfer:DeleteAccess\",\n \"transfer:DeleteWorkflow\",\n \"transfer:DescribeAccess\",\n \"transfer:DescribeExecution\",\n \"transfer:DescribeWorkflow\",\n \"transfer:ListAccesses\",\n \"transfer:ListExecutions\",\n \"transfer:ListWorkflows\",\n \"transfer:SendWorkflowStepState\",\n \"transfer:UpdateAccess\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def rule_axiom_2(self, idx: int, line: Statement) -> None:\n self.at_most_refs(line, 0, 'axiom 2')\n if line.formula != self.AXIOMS[2].formula:\n raise InvalidRule('not axiom 2')", "def validate(self, actions: List[Action]) -> None:\n self.validate_forbidden_actions(actions)\n self.validate_short_sequences(actions)\n self.validate_multiple_reaction_steps(actions)\n self.validate_missing_sln(actions)", "def test_10_unsupported_actions(self):\n\n def __count_pulled_packages(pth):\n self.pkgrepo(\"list -F tsv -H -s {0}\".format(pth))\n return len(self.output.splitlines())\n\n def __check_errout(pfmri):\n s1 = \"invalid action in package {0}\".format(pfmri)\n s2 = \"Malformed action in package '{0}'\".format(pfmri)\n self.assert_(s1 in self.errout or s2 in self.errout,\n \"{0} not in error\".format(pfmri))\n\n def __empty_repo(uri, arg_string):\n if uri.startswith(\"http://\"):\n rurl = self.dcs[4].get_repo_url()\n self.pkgrepo(\"remove -s {0} '*'\".format(rurl))\n # Refresh the depot to get it to realize that\n # the catalog has changed.\n self.dcs[4].refresh()\n elif arg_string:\n portable.remove(uri)\n else:\n self.pkgrepo(\"remove -s {0} '*'\".format(uri))\n\n\n def __test_rec(duri, arg_string, pfmris):\n self.debug(\"\\n\\nNow pkgrecv'ing to {0}\".format(duri))\n\n # It's necessary to use the -D option below because\n # otherwise pkgrecv will fail because the manifest\n # doesn't validate.\n\n novalidate = \"-D manifest_validate=Never \"\n # Check that invalid action attributes don't cause\n # tracebacks.\n self.pkgrecv(self.durl1, novalidate +\n \"-d {0} {1} {2}\".format(duri, arg_string,\n \" \".join(pfmris)), exit=pkgdefs.EXIT_OOPS)\n for pfmri in pfmris:\n __check_errout(pfmri)\n self.assertEqual(__count_pulled_packages(duri), 0)\n if arg_string:\n portable.remove(duri)\n\n self.pkgrecv(self.rurl1, novalidate +\n \"-d {0} {1} {2}\".format(duri, arg_string,\n \" \".join(pfmris)), exit=pkgdefs.EXIT_OOPS)\n for pfmri in pfmris:\n __check_errout(pfmri)\n self.assertEqual(__count_pulled_packages(duri), 0)\n if arg_string:\n portable.remove(duri)\n\n # Check that other packages are retrieved and the exit\n # code reflects partial success.\n self.pkgrecv(self.durl1, novalidate +\n \"-d {0} {1} -m all-timestamps '*'\".format(\n duri, arg_string), exit=pkgdefs.EXIT_PARTIAL)\n for pfmri in pfmris:\n __check_errout(pfmri)\n self.assertEqual(__count_pulled_packages(duri),\n len(self.published) - len(pfmris))\n __empty_repo(duri, arg_string)\n\n self.pkgrecv(self.rurl1, novalidate +\n \"-d {0} {1} -m all-timestamps '*'\".format(\n duri, arg_string), exit=pkgdefs.EXIT_PARTIAL)\n for pfmri in pfmris:\n __check_errout(pfmri)\n self.assertEqual(__count_pulled_packages(duri),\n len(self.published) - len(pfmris))\n __empty_repo(duri, arg_string)\n\n self.rurl1 = self.dcs[1].get_repo_url()\n repo = self.dcs[1].get_repo()\n rd = repo.get_pub_rstore()\n pfmri = fmri.PkgFmri(self.published[4])\n mp = rd.manifest(pfmri)\n\n with open(mp, \"rb\") as fh:\n original_txt = fh.read()\n txt = original_txt.replace(\"type=require\", \"type=foo\")\n with open(mp, \"wb\") as fh:\n fh.write(txt)\n\n rpth = tempfile.mkdtemp(dir=self.test_root)\n self.pkgrepo(\"create {0}\".format(rpth))\n adir = tempfile.mkdtemp(dir=self.test_root)\n\n # The __empty repo function above assumes that the only http uri\n # used is the one for depot number 4.\n dest_uris = ((rpth, \"\"), (self.durl4, \"\"),\n (os.path.join(adir, \"archive.p5p\"), \"-a\"))\n for duri, arg_string in dest_uris:\n __test_rec(duri, arg_string, [self.published[4]])\n\n # Test that multiple packages failing are handled correctly.\n for i in range(5, 7):\n pfmri = fmri.PkgFmri(self.published[i])\n mp = rd.manifest(pfmri)\n with open(mp, \"rb\") as fh:\n original_txt = fh.read()\n txt = \"foop\\n\" + original_txt\n with open(mp, \"wb\") as fh:\n fh.write(txt)\n\n for duri, arg_string, in dest_uris:\n __test_rec(duri, arg_string, self.published[4:7])", "def test_name00401m2_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/ElemDecl/name/name00401m/name00401m2.xsd\",\n instance=\"sunData/ElemDecl/name/name00401m/name00401m2_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )", "def parse_access_predicated(left: ValueType, _: LexicalAccess, right: Identifier):\n return NamedAccess([left, right])", "def test_incorrect_namespace(self):\n self._test( # pylint: disable=no-value-for-parameter\n [u\":meth:`path.that.does.not.exist`\"], error_classes.MissingNamespace\n )", "def _check_xml_syntax_error(self):\n self.msg_args = []\n for xml_file in self.filter_files_ext('xml', relpath=True):\n result = self.parse_xml(os.path.join(self.module_path, xml_file))\n if isinstance(result, string_types):\n self.msg_args.append((\n xml_file, result.strip('\\n').replace('\\n', '|')))\n if self.msg_args:\n return False\n return True", "def verify_deprecated_policy(old_policy, new_policy, default_rule, context):\n\n if _ENFORCER:\n current_rule = str(_ENFORCER.rules[old_policy])\n else:\n current_rule = None\n\n if current_rule != default_rule:\n LOG.warning(\"Start using the new action '{0}'. The existing \"\n \"action '{1}' is being deprecated and will be \"\n \"removed in future release.\".format(new_policy,\n old_policy))\n target = {'project_id': context.project_id,\n 'user_id': context.user_id}\n\n return authorize(context=context, action=old_policy, target=target)\n else:\n return False", "def semantic_validate(instance):\n unknown_templates = {}\n for name, requires in instance[\"application\"][\"requires\"].items():\n if name in instance[\"application\"][\"services\"]:\n raise ValidationError(errors=[\n \"/application/requires/{}: the name {} conflicts with service\"\n \" /application/services/{}\".format(name,\n repr(name),\n name),\n ])\n if requires[\"template\"] not in instance[\"local\"][\"templates\"]:\n unknown_templates[\"/application/requires/{}/template\".format(\n name)] = requires[\"template\"]\n for service_name, service in instance[\"application\"][\"services\"].items():\n for name, requires in service[\"requires\"].items():\n if name in instance[\"application\"][\"requires\"]:\n raise ValidationError(errors=[\n \"/application/services/{}/requires/{}: the name {}\"\n \" conflicts with /application/requires/{}\".format(\n service_name, name, repr(name), name)\n ])\n if requires[\"template\"] not in instance[\"local\"][\"templates\"]:\n unknown_templates[\n \"/application/services/{}/requires/{}/template\".\n format(service_name, name)] = requires[\"template\"]\n if unknown_templates:\n raise ValidationError(errors=[\n \"{}: the template {} does not exist \"\n \"in /local/templates\".format(path, repr(name))\n for (path, name) in unknown_templates.items()\n ])", "def test_actions(self, actions):\n try:\n for action in actions:\n self.get_action(action['type'])(**action)\n except Exception as e:\n print('Exception: {}'.format(str(e)))", "def test_instance():\n assert isinstance(lex.lex().build(), lex._lexer)", "def test_distinguish_path_polarity2():\n Monomer('A')\n Monomer('B', ['act'], {'act' :['y', 'n']})\n Monomer('C', ['T185'], {'T185':['u', 'p']})\n Parameter('k', 1)\n Rule('A_inhibit_B', A() + B(act='y') >> A() + B(act='n'), k)\n Rule('B_dephos_C', B(act='y') + C(T185='p') >>\n B(act='y') + C(T185='u'), k)\n Initial(A(), k)\n Initial(B(act='y'), k)\n Initial(C(T185='p'), k)\n Annotation(A, 'http://identifiers.org/hgnc/HGNC:1')\n Annotation(B, 'http://identifiers.org/hgnc/HGNC:2')\n Annotation(C, 'http://identifiers.org/hgnc/HGNC:3')\n Annotation('A_inhibit_B', 'A', 'rule_has_subject')\n Annotation('A_inhibit_B', 'B', 'rule_has_object')\n Annotation('B_dephos_C', 'B', 'rule_has_subject')\n Annotation('B_dephos_C', 'C', 'rule_has_object')\n C.site_annotations = [\n Annotation(('T185', 'p'), 'phosphorylation', 'is_modification'),\n Annotation('T185', 'T', 'is_residue'),\n Annotation('T185', '185', 'is_position'),\n ]\n # Create the model checker\n stmts = _path_polarity_stmt_list()\n mc = ModelChecker(model, stmts)\n results = mc.check_model()\n assert len(results) == len(stmts)\n assert isinstance(results[0], tuple)\n assert results[0][1].paths == [[('A_inhibit_B', 1), ('B_dephos_C', -1),\n ('C_T185_p_obs', 1)]]\n assert results[1][1].paths == []\n assert results[2][1].paths == [[('A_inhibit_B', 1), ('B_dephos_C', -1),\n ('C_T185_p_obs', 1)]]\n assert results[3][1].paths == [[('B_dephos_C', 1), ('C_T185_p_obs', -1)]]", "def test_replace_namespaced_route_status(self):\n pass", "def check_commands(self):\n pass", "def test_wrong_xml_namespace(self):\n with pytest.raises(IndexError):\n mocked_request = self.get_signed_grade_mock_request(namespace_lti_v1p1=False)\n self.xmodule.parse_grade_xml_body(mocked_request.body)", "def _validate_builtin(_):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces(self):\n pass", "def test_create_namespaced_subject_rules_review(self):\n pass", "def test_advance_ast_avaliable():\n assert _test_advanced_ast_presence()", "def _check_symbol_matches_synopsis_name(self: SynopsisImpl, docstring: PetscDocStringImpl, cursor: Cursor, loc: SourceRange, symbol: str) -> None:\n if symbol != cursor.name:\n if len(difflib.get_close_matches(symbol, [cursor.name], n=1)):\n mess = f\"Docstring name '{symbol}' does not match symbol. Assuming you meant '{cursor.name}'\"\n patch = Patch(loc, cursor.name)\n else:\n mess = f\"Docstring name '{symbol}' does not match symbol name '{cursor.name}'\"\n patch = None\n docstring.add_diagnostic_from_source_range(\n Diagnostic.Kind.ERROR, self.diags.matching_symbol_name, mess, loc, patch=patch\n )\n return", "def validateOperator(self, tokens):\n return tokens", "def violate_rule(state, action):\n if action in state:\n return True\n return False", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_delete_action_spaces(self):\n pass", "def check_unstructured(extractions):\n if not extractions:\n return True\n for ext in extractions:\n if not hasattr(ext, 'args'):\n return False\n return True", "def check_structured(extractions):\n if not extractions:\n return True\n for ext in extractions:\n if not hasattr(ext, 'arg1'):\n return False\n return True", "def __enableSpellingActions(self):\n from QScintilla.SpellChecker import SpellChecker\n spellingAvailable = SpellChecker.isAvailable()\n \n self.spellCheckAct.setEnabled(\n len(self.editors) != 0 and spellingAvailable)\n self.autoSpellCheckAct.setEnabled(spellingAvailable)", "def apicheck():\n\n async def predicate(ctx: commands.Context):\n travitia_keys = await ctx.bot.get_shared_api_tokens(\"travitia\")\n key = travitia_keys.get(\"api_key\") is None\n if ctx.invoked_with == \"help\" and key:\n return False\n if key:\n await ctx.send(\"The API key is not registered, the command is unavailable.\")\n return False\n return True\n\n return commands.check(predicate)", "def test_basic_api(self):\n self.create_and_verify_stack(\"single/basic_api\")\n\n first_dep_ids = self.get_stack_deployment_ids()\n self.assertEqual(len(first_dep_ids), 1)\n\n self.set_template_resource_property(\"MyApi\", \"DefinitionUri\", self.get_s3_uri(\"swagger2.json\"))\n self.update_stack()\n\n second_dep_ids = self.get_stack_deployment_ids()\n self.assertEqual(len(second_dep_ids), 1)\n\n self.assertEqual(len(set(first_dep_ids).intersection(second_dep_ids)), 0)", "def unlex(tokens):", "def has_action2(self, feature):\n return feature in self._action2", "def test_bad_action(self):\r\n action = 'robot-not-an-action'\r\n url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})\r\n response = self.client.get(url, {'identifiers': self.beta_tester.email, 'action': action})\r\n self.assertEqual(response.status_code, 400)", "def test_issue7306(en_lookup_nlp):\n doc = Doc(en_lookup_nlp.vocab, words=[\"singing\"])\n lemmatizer = en_lookup_nlp.get_pipe(\"lemmatizer\")\n doc = lemmatizer(doc)\n assert doc[0].lemma_ == \"sing\"" ]
[ "0.5523519", "0.54551274", "0.52533966", "0.4947026", "0.49150628", "0.4877916", "0.4869475", "0.4861803", "0.47912464", "0.47661808", "0.47431847", "0.47416365", "0.4730279", "0.47081596", "0.46722156", "0.46685877", "0.46628264", "0.46338037", "0.463026", "0.46180958", "0.45894623", "0.45869774", "0.45716003", "0.45601535", "0.45562217", "0.45410743", "0.45358202", "0.45188215", "0.4517557", "0.45168594", "0.4508975", "0.45066348", "0.45006892", "0.44999847", "0.4485097", "0.44841477", "0.44712272", "0.44666958", "0.4464412", "0.44584933", "0.44559905", "0.44515297", "0.44478762", "0.44466197", "0.4440304", "0.44356743", "0.4432982", "0.44220525", "0.4412983", "0.4412313", "0.44108987", "0.44037646", "0.44028318", "0.4398795", "0.4394409", "0.43921506", "0.43902254", "0.43877977", "0.43867937", "0.43834266", "0.43829745", "0.4377982", "0.43757173", "0.43750015", "0.4366417", "0.43596497", "0.4354805", "0.43499133", "0.43392813", "0.43382317", "0.43377402", "0.43343154", "0.43267134", "0.43265653", "0.43238354", "0.43234783", "0.43153676", "0.4296917", "0.4293076", "0.42875576", "0.4276754", "0.42764997", "0.4276495", "0.4273839", "0.427349", "0.42668882", "0.42654118", "0.42585593", "0.42567053", "0.4252395", "0.42501384", "0.42494816", "0.42442465", "0.4243203", "0.4242321", "0.4241001", "0.4236332", "0.42355272", "0.42353347", "0.42322683" ]
0.6062302
0
Ensure that Kinesis Analytics V1 actions are both present in the ses namespace
def test_services_with_multiple_pages_kinesis_analytics(self): # Kinesis Analytics V1 results = get_actions_for_service("kinesisanalytics") actions = [ "kinesisanalytics:GetApplicationState", # Only in v1, not v2 "kinesisanalytics:ListApplications", # In both ] for action in actions: self.assertTrue(action in results)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_kafka_action_names_overlap_issue(self):\n # Kafka actions used to be in two pages but are now one. This verifies the current state.\n # results = get_actions_for_service(\"kafka\")\n # print(results)\n actions = [\n \"kafka:BatchAssociateScramSecret\",\n \"kafka:BatchDisassociateScramSecret\",\n \"kafka:CreateClusterV2\",\n \"kafka:DeleteConfiguration\",\n \"kafka:DescribeClusterV2\",\n \"kafka:ListClustersV2\",\n \"kafka:ListConfigurationRevisions\",\n \"kafka:ListKafkaVersions\",\n \"kafka:ListScramSecrets\",\n \"kafka:RebootBroker\",\n \"kafka:UpdateBrokerType\",\n \"kafka:UpdateConfiguration\",\n \"kafka:UpdateConnectivity\",\n \"kafka:UpdateSecurity\"\n ]\n\n for action in actions:\n self.assertTrue(action in self.all_actions)", "def test_services_with_multiple_pages_ses(self):\n # SES V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonses.html\n self.assertTrue(\"ses:PutIdentityPolicy\" in self.all_actions)\n # SES V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonsimpleemailservicev2.html\n self.assertTrue(\"ses:ListImportJobs\" in self.all_actions)\n\n results = get_actions_for_service(\"ses\")\n actions = [\n \"ses:CloneReceiptRuleSet\",\n \"ses:CreateConfigurationSetTrackingOptions\",\n \"ses:CreateReceiptFilter\",\n \"ses:CreateReceiptRule\",\n \"ses:CreateReceiptRuleSet\",\n \"ses:CreateTemplate\",\n \"ses:DeleteConfigurationSetTrackingOptions\",\n \"ses:DeleteIdentity\",\n \"ses:DeleteIdentityPolicy\",\n \"ses:DeleteReceiptFilter\",\n \"ses:DeleteReceiptRule\",\n \"ses:DeleteReceiptRuleSet\",\n \"ses:DeleteTemplate\",\n \"ses:DeleteVerifiedEmailAddress\",\n \"ses:DescribeActiveReceiptRuleSet\",\n \"ses:DescribeConfigurationSet\",\n \"ses:DescribeReceiptRule\",\n \"ses:DescribeReceiptRuleSet\",\n \"ses:GetAccountSendingEnabled\",\n \"ses:GetIdentityDkimAttributes\",\n \"ses:GetIdentityMailFromDomainAttributes\",\n \"ses:GetIdentityNotificationAttributes\",\n \"ses:GetIdentityPolicies\",\n \"ses:GetIdentityVerificationAttributes\",\n \"ses:GetSendQuota\",\n \"ses:GetSendStatistics\",\n \"ses:GetTemplate\",\n \"ses:ListIdentities\",\n \"ses:ListIdentityPolicies\",\n \"ses:ListReceiptFilters\",\n \"ses:ListReceiptRuleSets\",\n \"ses:ListTemplates\",\n \"ses:ListVerifiedEmailAddresses\",\n \"ses:PutIdentityPolicy\",\n \"ses:ReorderReceiptRuleSet\",\n \"ses:SendBounce\",\n \"ses:SendBulkTemplatedEmail\",\n \"ses:SendRawEmail\",\n \"ses:SendTemplatedEmail\",\n \"ses:SetActiveReceiptRuleSet\",\n \"ses:SetIdentityDkimEnabled\",\n \"ses:SetIdentityFeedbackForwardingEnabled\",\n \"ses:SetIdentityHeadersInNotificationsEnabled\",\n \"ses:SetIdentityMailFromDomain\",\n \"ses:SetIdentityNotificationTopic\",\n \"ses:SetReceiptRulePosition\",\n \"ses:TestRenderTemplate\",\n \"ses:UpdateAccountSendingEnabled\",\n \"ses:UpdateConfigurationSetReputationMetricsEnabled\",\n \"ses:UpdateConfigurationSetSendingEnabled\",\n \"ses:UpdateConfigurationSetTrackingOptions\",\n \"ses:UpdateReceiptRule\",\n \"ses:UpdateTemplate\",\n \"ses:VerifyDomainDkim\",\n \"ses:VerifyDomainIdentity\",\n \"ses:VerifyEmailAddress\",\n \"ses:VerifyEmailIdentity\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_get_actions_with_arn_type_and_access_level_case_2(self):\n desired_output = [\n 'ssm:DeleteParameter',\n 'ssm:DeleteParameters',\n 'ssm:LabelParameterVersion',\n 'ssm:PutParameter'\n]\n output = get_actions_with_arn_type_and_access_level(\n \"ssm\", \"parameter\", \"Write\"\n )\n for item in desired_output:\n self.assertTrue(item in output)", "def verifyActionCenterRts():\n pass", "def test_gh_226_elasticloadbalancing_v1_and_v2(self):\n results = get_actions_for_service(\"elasticloadbalancing\")\n # print(json.dumps(results, indent=4))\n lb_v1_only_action = \"elasticloadbalancing:CreateTargetGroup\"\n lb_v2_only_action = \"elasticloadbalancing:SetSecurityGroups\"\n self.assertTrue(lb_v1_only_action in results)\n self.assertTrue(lb_v2_only_action in results)", "def test_other_iam_data_fixes_in_GH_393(self):\n # Cassandra: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonkeyspacesforapachecassandra.html\n results = get_actions_for_service(\"cassandra\")\n self.assertTrue(\"cassandra:Restore\" in results)\n # Comprehend Medical: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazoncomprehendmedical.html\n results = get_actions_for_service(\"comprehendmedical\")\n # print(results)\n actions = [\n \"comprehendmedical:DescribeEntitiesDetectionV2Job\",\n \"comprehendmedical:DescribeICD10CMInferenceJob\",\n \"comprehendmedical:DescribePHIDetectionJob\",\n \"comprehendmedical:DescribeRxNormInferenceJob\",\n # \"comprehendmedical:DescribeSNOMEDCTInferenceJob\", # Not in SAR\n \"comprehendmedical:DetectEntitiesV2\",\n \"comprehendmedical:InferICD10CM\",\n \"comprehendmedical:InferRxNorm\",\n # \"comprehendmedical:InferSNOMEDCT\", # Not in SAR\n \"comprehendmedical:ListEntitiesDetectionV2Jobs\",\n \"comprehendmedical:ListICD10CMInferenceJobs\",\n \"comprehendmedical:ListPHIDetectionJobs\",\n \"comprehendmedical:ListRxNormInferenceJobs\",\n # \"comprehendmedical:ListSNOMEDCTInferenceJobs\", # Not in SAR\n \"comprehendmedical:StartEntitiesDetectionV2Job\",\n \"comprehendmedical:StartICD10CMInferenceJob\",\n \"comprehendmedical:StartPHIDetectionJob\",\n \"comprehendmedical:StartRxNormInferenceJob\",\n \"comprehendmedical:StopEntitiesDetectionV2Job\",\n \"comprehendmedical:StopICD10CMInferenceJob\",\n ]\n for action in actions:\n # if action not in results:\n # print(action)\n self.assertTrue(action in results)\n # Compute Optimizer\n results = get_actions_for_service(\"compute-optimizer\")\n actions = [\n \"compute-optimizer:DeleteRecommendationPreferences\",\n \"compute-optimizer:ExportEBSVolumeRecommendations\",\n \"compute-optimizer:ExportLambdaFunctionRecommendations\",\n \"compute-optimizer:GetEffectiveRecommendationPreferences\",\n \"compute-optimizer:GetEnrollmentStatusesForOrganization\",\n \"compute-optimizer:GetLambdaFunctionRecommendations\",\n \"compute-optimizer:GetRecommendationPreferences\",\n \"compute-optimizer:PutRecommendationPreferences\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # DataSync\n results = get_actions_for_service(\"datasync\")\n actions = [\n \"datasync:UpdateLocationNfs\",\n \"datasync:UpdateLocationObjectStorage\",\n \"datasync:UpdateLocationSmb\",\n \"datasync:UpdateTaskExecution\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Account Management\n results = get_actions_for_service(\"account\")\n actions = [\n \"account:DeleteAlternateContact\",\n \"account:GetAlternateContact\",\n \"account:PutAlternateContact\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS IAM Access Analyzer\n results = get_actions_for_service(\"access-analyzer\")\n actions = [\n \"access-analyzer:CancelPolicyGeneration\",\n \"access-analyzer:CreateAccessPreview\",\n \"access-analyzer:GetAccessPreview\",\n \"access-analyzer:GetGeneratedPolicy\",\n \"access-analyzer:ListAccessPreviewFindings\",\n \"access-analyzer:ListAccessPreviews\",\n \"access-analyzer:ListPolicyGenerations\",\n \"access-analyzer:StartPolicyGeneration\",\n \"access-analyzer:ValidatePolicy\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Elemental Activations\n results = get_actions_for_service(\"elemental-activations\")\n actions = [\n \"elemental-activations:CompleteAccountRegistration\",\n \"elemental-activations:StartAccountRegistration\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n # OpenSearch\n results = get_actions_for_service(\"es\")\n actions = [\n \"es:DescribeDomainChangeProgress\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Location\n results = get_actions_for_service(\"geo\")\n actions = [\n \"geo:CalculateRouteMatrix\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Amazon Managed Grafana\n results = get_actions_for_service(\"grafana\")\n actions = [\n \"grafana:DescribeWorkspaceAuthentication\",\n \"grafana:UpdateWorkspaceAuthentication\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # EC2 Image Builder\n results = get_actions_for_service(\"imagebuilder\")\n actions = [\n \"imagebuilder:ImportVmImage\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Timestream\n results = get_actions_for_service(\"timestream\")\n actions = [\n \"timestream:CreateScheduledQuery\",\n \"timestream:DeleteScheduledQuery\",\n \"timestream:DescribeScheduledQuery\",\n \"timestream:ExecuteScheduledQuery\",\n \"timestream:ListScheduledQueries\",\n \"timestream:UpdateScheduledQuery\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS Transfer Family\n results = get_actions_for_service(\"transfer\")\n actions = [\n \"transfer:CreateAccess\",\n \"transfer:CreateWorkflow\",\n \"transfer:DeleteAccess\",\n \"transfer:DeleteWorkflow\",\n \"transfer:DescribeAccess\",\n \"transfer:DescribeExecution\",\n \"transfer:DescribeWorkflow\",\n \"transfer:ListAccesses\",\n \"transfer:ListExecutions\",\n \"transfer:ListWorkflows\",\n \"transfer:SendWorkflowStepState\",\n \"transfer:UpdateAccess\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_delete_action_spaces(self):\n pass", "def aws_es_os_coginto_authentication_check(cache: dict, session, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:\n # ISO Time\n iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()\n for response in describe_es_os_domains(cache, session):\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(response,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson)\n esDomainName = response[\"DomainStatus\"][\"DomainName\"]\n esVersion = response[\"DomainStatus\"][\"ElasticsearchVersion\"]\n domainId = response[\"DomainStatus\"][\"DomainId\"]\n domainArn = response[\"DomainStatus\"][\"ARN\"]\n try:\n cognitoEnabledCheck = response[\"DomainStatus\"][\"CognitoOptions\"][\"Enabled\"]\n except:\n cognitoEnabledCheck = False\n # this is a failing check\n if cognitoEnabledCheck is False:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"{domainArn}/elasticsearch-cognito-auth-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": domainArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"MEDIUM\"},\n \"Confidence\": 99,\n \"Title\": \"[OpenSearch.2] OpenSearch/AWS ElasticSearch Service domains should use Cognito authentication for Kibana\",\n \"Description\": \"OpenSearch/AWS ElasticSearch Service domain \"\n + esDomainName\n + \" does not use Cognito authentication for Kibana. Refer to the remediation instructions if this configuration is not intended\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"If your domain should use Cognito authentication for Kibana refer to the Amazon Cognito Authentication for Kibana section of the Amazon Elasticsearch Service Developer Guide\",\n \"Url\": \"https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-cognito-auth.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Analytics\",\n \"AssetService\": \"Amazon OpenSearch Service\",\n \"AssetComponent\": \"Search Domain\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsOpenSearchServiceDomain\",\n \"Id\": domainArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsOpenSearchServiceDomain\": {\n \"Id\": domainId,\n \"DomainName\": esDomainName,\n \"EngineVersion\": esVersion,\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.AC-6\",\n \"NIST SP 800-53 Rev. 4 AC-1\",\n \"NIST SP 800-53 Rev. 4 AC-2\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 AC-16\",\n \"NIST SP 800-53 Rev. 4 AC-19\",\n \"NIST SP 800-53 Rev. 4 AC-24\",\n \"NIST SP 800-53 Rev. 4 IA-1\",\n \"NIST SP 800-53 Rev. 4 IA-2\",\n \"NIST SP 800-53 Rev. 4 IA-4\",\n \"NIST SP 800-53 Rev. 4 IA-5\",\n \"NIST SP 800-53 Rev. 4 IA-8\",\n \"NIST SP 800-53 Rev. 4 PE-2\",\n \"NIST SP 800-53 Rev. 4 PS-3\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.7.1.1\",\n \"ISO 27001:2013 A.9.2.1\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"{domainArn}/elasticsearch-cognito-auth-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": domainArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[OpenSearch.2] OpenSearch/AWS ElasticSearch Service domains should use Cognito authentication for Kibana\",\n \"Description\": \"OpenSearch/AWS ElasticSearch Service domain \"\n + esDomainName\n + \" uses Cognito authentication for Kibana.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"If your domain should use Cognito authentication for Kibana refer to the Amazon Cognito Authentication for Kibana section of the Amazon Elasticsearch Service Developer Guide\",\n \"Url\": \"https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-cognito-auth.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Analytics\",\n \"AssetService\": \"Amazon OpenSearch Service\",\n \"AssetComponent\": \"Search Domain\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsOpenSearchServiceDomain\",\n \"Id\": domainArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsOpenSearchServiceDomain\": {\n \"Id\": domainId,\n \"DomainName\": esDomainName,\n \"EngineVersion\": esVersion,\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.AC-6\",\n \"NIST SP 800-53 Rev. 4 AC-1\",\n \"NIST SP 800-53 Rev. 4 AC-2\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 AC-16\",\n \"NIST SP 800-53 Rev. 4 AC-19\",\n \"NIST SP 800-53 Rev. 4 AC-24\",\n \"NIST SP 800-53 Rev. 4 IA-1\",\n \"NIST SP 800-53 Rev. 4 IA-2\",\n \"NIST SP 800-53 Rev. 4 IA-4\",\n \"NIST SP 800-53 Rev. 4 IA-5\",\n \"NIST SP 800-53 Rev. 4 IA-8\",\n \"NIST SP 800-53 Rev. 4 PE-2\",\n \"NIST SP 800-53 Rev. 4 PS-3\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.7.1.1\",\n \"ISO 27001:2013 A.9.2.1\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding", "def lambda_handler(event, context):\n set_logging(level=logging.DEBUG)\n\n try:\n payload = json.loads(event[\"Records\"][0][\"Sns\"][\"Message\"])\n account_id = payload['account_id']\n account_name = payload['account_name']\n # get the last region from the list to process\n region = payload['regions'].pop()\n # region = payload['region']\n # if request_id is present in payload, it means this lambda was called from the API\n request_id = payload.get('request_id', None)\n except Exception:\n logging.exception(f\"Failed to parse event\\n{event}\")\n return\n\n try:\n config = Config()\n\n main_account = Account(region=config.aws.region)\n ddb_table = main_account.resource(\"dynamodb\").Table(config.sqspolicy.ddb_table_name)\n\n account = Account(id=account_id,\n name=account_name,\n region=region,\n role_name=config.aws.role_name_identification)\n if account.session is None:\n return\n\n logging.debug(f\"Checking for public SQS policies in {account}\")\n\n # existing open issues for account to check if resolved\n open_issues = IssueOperations.get_account_open_issues(ddb_table, account_id, SQSPolicyIssue)\n # make dictionary for fast search by id\n # and filter by current region\n open_issues = {issue.issue_id: issue for issue in open_issues if issue.issue_details.region == region}\n logging.debug(f\"SQS in DDB:\\n{open_issues.keys()}\")\n\n checker = SQSPolicyChecker(account=account)\n if checker.check():\n for queue in checker.queues:\n logging.debug(f\"Checking {queue.name}\")\n if queue.public:\n issue = SQSPolicyIssue(account_id, queue.url)\n issue.issue_details.tags = queue.tags\n issue.issue_details.name = queue.name\n issue.issue_details.region = queue.account.region\n issue.issue_details.policy = queue.policy\n if config.sqspolicy.in_whitelist(account_id, queue.url):\n issue.status = IssueStatus.Whitelisted\n else:\n issue.status = IssueStatus.Open\n logging.debug(f\"Setting {queue.name} status {issue.status}\")\n IssueOperations.update(ddb_table, issue)\n # remove issue id from issues_list_from_db (if exists)\n # as we already checked it\n open_issues.pop(queue.url, None)\n\n logging.debug(f\"SQS in DDB:\\n{open_issues.keys()}\")\n # all other unresolved issues in DDB are for removed/remediated queues\n for issue in open_issues.values():\n IssueOperations.set_status_resolved(ddb_table, issue)\n if request_id:\n api_table = main_account.resource(\"dynamodb\").Table(config.api.ddb_table_name)\n DDB.track_progress(api_table, request_id)\n except Exception:\n logging.exception(f\"Failed to check SQS policies for '{account_id} ({account_name})'\")\n return\n\n # push SNS messages until the list with regions to check is empty\n if len(payload['regions']) > 0:\n try:\n Sns.publish(payload[\"sns_arn\"], payload)\n except Exception:\n logging.exception(\"Failed to chain insecure services checking\")\n\n logging.debug(f\"Checked SQS policies for '{account_id} ({account_name})'\")", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces(self):\n pass", "def test_must_be_associated(self):\n\n def handle(event):\n return 0x0000, event.action_information\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ProceduralEventLogging)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)]\n )\n\n ae.add_requested_context(ProceduralEventLogging)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n assoc.release()\n assert assoc.is_released\n assert not assoc.is_established\n with pytest.raises(RuntimeError):\n assoc.send_n_action(None, None, None, None)\n\n scp.shutdown()", "def exists_intent_action(self, intent_keyword):\n pass", "def test_get_snsname_arn_auth_exception_handling(self, aws_res_mock):\n # local imports of code-under-test ensure moto has mocks\n # registered before any possible calls out to AWS\n from awstools.awstools import get_snsname_arn\n\n # create a mock SNS client that returns what we tell it to\n client = boto3.client('sns')\n stub = Stubber(client)\n stub.add_client_error('create_topic', service_error_code='AuthorizationError')\n stub.activate()\n\n\n # since firesim manager code doesn't take clients as method parameters\n # now we mock boto3.client to return our stubbed client\n with patch.object(boto3._get_default_session(), 'client', return_value=client) as mock_session:\n topic_arn = get_snsname_arn()\n\n stub.assert_no_pending_responses()\n topic_arn.should.be.none\n\n # TODO we could mock rootLogger.critical to capture it's calls and args and validate that we're seeing the correct \"nice\" message\n\n # make sure get_snsname_arn() actually called out to get a sns\n # client, otherwise we aren't testing what we think we are\n mock_session.assert_called_once_with('sns')\n\n aws_res_mock.assert_called_once()", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces(self):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_template_action_spaces(self):\n pass", "def test_get_actions_with_arn_type_and_access_level_case_3(self):\n desired_output = [\n 's3:PutAccountPublicAccessBlock',\n 's3:PutAccessPointPublicAccessBlock'\n ]\n output = get_actions_with_arn_type_and_access_level(\n # \"ram\", \"resource-share\", \"Write\"\n \"s3\", \"*\", \"Permissions management\"\n )\n print(output)\n for item in desired_output:\n self.assertTrue(item in output)\n # self.assertListEqual(desired_output, output)", "def _is_s3_notif(event):\n return (\n event.get(\"Records\")\n and isinstance(event.get(\"Records\"), list)\n and \"s3\" in event.get(\"Records\")[0]\n )", "def check_script(vouts):\n for vout in [v for v in vouts[::-1] if v['hex'].startswith('6a')]:\n verb = BlockchainSpider.decode_op_return(vout['hex'])\n action = Spoolverb.from_verb(verb).action\n if action in Spoolverb.supported_actions:\n return verb\n raise Exception(\"Invalid ascribe transaction\")", "def check_snstopicpolicy_crossaccount(self, snsitem):\n #(region, account, arn, aws_object) = audit_object\n #\"Principal\": { \"AWS\": \"*\" }\n # \"AWS\": \"arn:aws:iam::027213240437:root\"\n policy = snsitem.config.get('SNSPolicy', {})\n for statement in policy.get(\"Statement\", []):\n account_numbers = []\n account_number = ''\n princ_aws = statement.get(\"Principal\", {}) \\\n .get(\"AWS\", \"error\")\n if princ_aws == \"*\":\n account_number = statement.get(\"Condition\", {}) \\\n .get(\"StringEquals\", {}) \\\n .get(\"AWS:SourceOwner\", None)\n if not account_number:\n tag = \"SNS Topic open to everyone\"\n notes = \"An SNS policy where { 'Principal': { 'AWS': '*' } } must also have\"\n notes += \" a {'Condition': {'StringEquals': { 'AWS:SourceOwner': '<ACCOUNT_NUMBER>' } } }\"\n notes += \" or it is open to the world. In this case, anyone is allowed to perform \"\n notes += \" this action(s): {}\".format(statement.get(\"Action\"))\n self.add_issue(10, tag, snsitem, notes=notes)\n continue\n else:\n try:\n account_numbers.append(str(account_number))\n except ValueError:\n raise InvalidSourceOwner(account_number)\n else:\n if isinstance(princ_aws, list):\n for entry in princ_aws:\n account_numbers.append(str(re.search('arn:aws:iam::([0-9-]+):', entry).group(1)))\n else:\n try:\n account_numbers.append(str(re.search('arn:aws:iam::([0-9-]+):', princ_aws).group(1)))\n except:\n import json\n print json.dumps(snsitem.config, indent=4)\n raise InvalidARN(princ_aws)\n\n for account_number in account_numbers:\n account = Account.query.filter(Account.number == account_number).first()\n account_name = None\n if account is not None:\n account_name = account.name\n\n if not account_name:\n tag = \"Unknown Cross Account Access\"\n notes = \"from {} to {}\".format(account_number, snsitem.account)\n self.add_issue(10, tag, snsitem, notes=notes)\n elif account_name != snsitem.account:\n tag = \"Friendly Cross Account Access\"\n notes = \"from {} to {}\".format(account_name, snsitem.account)\n self.add_issue(0, tag, snsitem, notes=notes)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces(self):\n pass", "def copy_active_kinesis_destinations(events: dict, context: dict) -> dict:\n if 'SourceTableName' not in events:\n raise KeyError('Requires SourceTableName')\n if 'TargetTableName' not in events:\n raise KeyError('Requires TargetTableName')\n\n ACTIVE_STATUSES = ['ACTIVE', 'ENABLING']\n source_table_name = events['SourceTableName']\n target_table_name = events['TargetTableName']\n kinesis_destinations = _describe_kinesis_destinations(table_name=source_table_name)\n destinations = [d['StreamArn'] for d in kinesis_destinations['KinesisDataStreamDestinations']\n if d['DestinationStatus'] in ACTIVE_STATUSES]\n\n for d in destinations:\n _enable_kinesis_destinations(table_name=target_table_name, kinesis_arn=d)\n\n return destinations", "def test_send_to_kinesis_stream(search_events, boto3_client, monkeypatch):\n monkeypatch.setattr(\"boto3.client\", boto3_client)\n lambdautils.utils.send_to_kinesis_stream(search_events, \"dummy_stream\")\n boto3_client(\"kinesis\").put_records.call_count == 1", "def test_accepted(self):\n actions = signoff_actions(appversions={\"code\": \"fx1.0\"},\n locales={\"code\": \"de\"})\n actions = list(actions)\n eq_(len(actions), 1)\n so = Signoff.objects.get(action=actions[0][0])\n eq_(so.push.tip.shortrev, \"l10n de 0002\")\n eq_(so.locale.code, \"de\")\n eq_(so.action_set.count(), 2)", "def action_intersection(s1, s2):\n isect = s1 & s2\n L1 = [ ( (a.oid, a.index_oid), a) for a in s1 ]\n L2 = [ ( (a.oid, a.index_oid), a) for a in s2 ]\n ds1 = dict(L1)\n ds2 = dict(L2)\n for k1, action1 in ds1.items():\n action2 = ds2.get(k1)\n if action2 is not None:\n # replace action in union with correct one or conflict\n isect.add(which_action(action1, action2))\n return isect", "def test_subscriber_access_for_two_vsg_services(self):", "def test_subscriber_access_if_vsg2_goes_down(self):", "def test_subscriber_access_if_vsg1_goes_down(self):", "def has_action2(self, feature):\n return feature in self._action2", "def available_actions(speaker, action, args, soco_function, use_local_speaker_list):\n print(\"Currently available playback actions: {}\".format(speaker.available_actions))\n return True", "def _validate_event(cls, event):\n event_key_diff = cls.required_event_keys().difference(set(event))\n if not event_key_diff:\n return\n\n missing_event_keys = ', '.join('\\'{}\\''.format(key) for key in event_key_diff)\n raise AppConfigError('App event is missing the following required '\n 'keys: {}'.format(missing_event_keys))", "def handler(event, context):\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n\n # if (event['session']['application']['applicationId'] !=\n # \"amzn1.echo-sdk-ams.app.[unique-value-here]\"):\n # raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])", "def test_services_with_multiple_pages_aws_marketplace(self):\n # Overlap: AWS Marketplace, Marketplace Catalog, and AWS Marketplace Entitlement service, AWS Marketplace Image Building Service, AWS Marketplace Metering Service, AWS Marketplace Private Marketplace, and AWS Marketplace Procurement Systems\n # AWS Marketplace: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplace.html\n self.assertTrue(\"aws-marketplace:AcceptAgreementApprovalRequest\" in self.all_actions)\n # AWS Marketplace Catalog: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplacecatalog.html\n self.assertTrue(\"aws-marketplace:CancelChangeSet\" in self.all_actions)\n # AWS Marketplace Entitlement Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceentitlementservice.html\n self.assertTrue(\"aws-marketplace:GetEntitlements\" in self.all_actions)\n # AWS Marketplace Image Building Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceimagebuildingservice.html\n self.assertTrue(\"aws-marketplace:DescribeBuilds\" in self.all_actions)\n # AWS Marketplace Metering Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplacemeteringservice.html\n self.assertTrue(\"aws-marketplace:BatchMeterUsage\" in self.all_actions)\n # AWS Marketplace Private Marketplace: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceprivatemarketplace.html\n self.assertTrue(\"aws-marketplace:AssociateProductsWithPrivateMarketplace\" in self.all_actions)\n # AWS Marketplace Procurement Systems: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceprocurementsystemsintegration.html\n self.assertTrue(\"aws-marketplace:DescribeProcurementSystemConfiguration\" in self.all_actions)\n\n results = get_actions_for_service(\"aws-marketplace\")\n actions = [\n \"aws-marketplace:AcceptAgreementApprovalRequest\",\n \"aws-marketplace:BatchMeterUsage\",\n \"aws-marketplace:CancelAgreementRequest\",\n \"aws-marketplace:CancelChangeSet\",\n \"aws-marketplace:CompleteTask\",\n \"aws-marketplace:DescribeAgreement\",\n \"aws-marketplace:DescribeBuilds\",\n \"aws-marketplace:DescribeChangeSet\",\n \"aws-marketplace:DescribeEntity\",\n \"aws-marketplace:DescribeProcurementSystemConfiguration\",\n \"aws-marketplace:DescribeTask\",\n \"aws-marketplace:GetAgreementApprovalRequest\",\n \"aws-marketplace:GetAgreementRequest\",\n \"aws-marketplace:GetAgreementTerms\",\n \"aws-marketplace:GetEntitlements\",\n \"aws-marketplace:ListAgreementApprovalRequests\",\n \"aws-marketplace:ListAgreementRequests\",\n \"aws-marketplace:ListBuilds\",\n \"aws-marketplace:ListChangeSets\",\n \"aws-marketplace:ListEntities\",\n \"aws-marketplace:ListTasks\",\n \"aws-marketplace:MeterUsage\",\n \"aws-marketplace:PutProcurementSystemConfiguration\",\n \"aws-marketplace:RegisterUsage\",\n \"aws-marketplace:RejectAgreementApprovalRequest\",\n \"aws-marketplace:ResolveCustomer\",\n \"aws-marketplace:SearchAgreements\",\n \"aws-marketplace:StartBuild\",\n \"aws-marketplace:StartChangeSet\",\n \"aws-marketplace:Subscribe\",\n \"aws-marketplace:Unsubscribe\",\n \"aws-marketplace:UpdateAgreementApprovalRequest\",\n \"aws-marketplace:UpdateTask\",\n \"aws-marketplace:ViewSubscriptions\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def create_sns(self):\n return True", "def test_get_actions_with_arn_type_and_access_level_case_4(self):\n desired_output = [\n 'secretsmanager:ListSecrets'\n ]\n output = get_actions_with_arn_type_and_access_level(\n \"secretsmanager\", \"*\", \"List\"\n )\n self.assertListEqual(desired_output, output)", "def get_actions(\n self, observations: Observations, action_space: gym.Space\n ) -> Actions:\n return super().get_actions(observations, action_space)", "def legal_actions(self):\n raise NotImplementedError", "def test_kinesis_producer_other_region(sdc_builder, sdc_executor, aws):\n endpoint = SERVICE_ENDPOINT_FORMAT.format('kinesis', aws.region)\n\n # build producer pipeline\n stream_name = '{}_{}'.format(aws.kinesis_stream_prefix, get_random_string(string.ascii_letters, 10))\n raw_str = 'Hello World!'\n\n # Create Kinesis stream and capture the ShardId\n client = aws.kinesis\n try:\n logger.info('Creating %s Kinesis stream on AWS ...', stream_name)\n client.create_stream(StreamName=stream_name, ShardCount=1)\n aws.wait_for_stream_status(stream_name=stream_name, status='ACTIVE')\n desc_response = client.describe_stream(StreamName=stream_name)\n shard_id = desc_response['StreamDescription']['Shards'][0]['ShardId']\n\n builder = sdc_builder.get_pipeline_builder()\n builder.add_error_stage('Discard')\n\n dev_raw_data_source = builder.add_stage('Dev Raw Data Source').set_attributes(data_format='TEXT',\n raw_data=raw_str)\n kinesis_producer = builder.add_stage('Kinesis Producer')\n kinesis_producer.set_attributes(data_format='TEXT', stream_name=stream_name)\n\n dev_raw_data_source >> kinesis_producer\n producer_dest_pipeline = builder.build().configure_for_environment(aws)\n kinesis_producer.set_attributes(region='OTHER', endpoint=endpoint)\n\n # add pipeline and capture pipeline messages to assert\n sdc_executor.add_pipeline(producer_dest_pipeline)\n sdc_executor.start_pipeline(producer_dest_pipeline).wait_for_pipeline_batch_count(10)\n sdc_executor.stop_pipeline(producer_dest_pipeline)\n\n history = sdc_executor.get_pipeline_history(producer_dest_pipeline)\n msgs_sent_count = history.latest.metrics.counter('pipeline.batchOutputRecords.counter').count\n logger.debug('Number of messages ingested into the pipeline = %s', msgs_sent_count)\n\n # read data from Kinesis to assert it is what got ingested into the pipeline\n shard_iterator = client.get_shard_iterator(StreamName=stream_name,\n ShardId=shard_id, ShardIteratorType='TRIM_HORIZON')\n response = client.get_records(ShardIterator=shard_iterator['ShardIterator'])\n msgs_received = [response['Records'][i]['Data'].decode().strip()\n for i in range(msgs_sent_count)]\n\n logger.debug('Number of messages received from Kinesis = %d', (len(msgs_received)))\n\n assert msgs_received == [raw_str] * msgs_sent_count\n finally:\n _ensure_pipeline_is_stopped(sdc_executor, producer_dest_pipeline)\n logger.info('Deleting %s Kinesis stream on AWS ...', stream_name)\n client.delete_stream(StreamName=stream_name)", "def _validate_header(self):\n if 'X-Amz-Sns-Topic-Arn' in self._headers:\n if self._topic_arn != self._headers.get(\n 'X-Amz-Sns-Topic-Arn'):\n self.error = 'Invalid TopicArn.'\n raise ValueError('Invalid TopicArn')\n else:\n self.error = 'Invalid TopicArn'\n raise ValueError('Invalid TopicArn')\n\n return True", "def _setup_ses(self):\n print(\"\\n ** Setting up SES mocking\")\n ses = boto3.client('ses', region_name=\"us-east-1\")\n ses.verify_domain_identity(Domain='donatemates.com')\n #response = ses.verify_email_address(EmailAddress='[email protected]')", "def validate_params(aws_default_region, aws_role_arn, aws_role_session_name, aws_access_key_id, aws_secret_access_key):\n if not aws_default_region:\n raise DemistoException('You must specify AWS default region.')\n\n if bool(aws_access_key_id) != bool(aws_secret_access_key):\n raise DemistoException('You must provide Access Key id and Secret key id to configure the instance with '\n 'credentials.')\n if bool(aws_role_arn) != bool(aws_role_session_name):\n raise DemistoException('Role session name is required when using role ARN.')", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces(self):\n pass", "def find_actions(namespace, action_prefix):\n actions = {}\n for key, value in namespace.iteritems():\n if key.startswith(action_prefix):\n actions[key[len(action_prefix):]] = analyse_action(value)\n return actions", "def check_action_sanity(self):\n for action in crest.get_all_actions(self.model):\n assert action._name is not None, f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is 'None'\"\n assert action._name != \"\", f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is empty string\"\n\n assert isinstance(action.transition, crest.Transition), f\"Action {action._name}'s state is not a crest.Transition. It is: {action.transition} ({action.transition.__class__})\"\n assert action.state in crest.get_transitions(action._parent), f\"Action's transition {action.transition._name} ({action.transition}) is not in the transitions of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.target, crest.Port), f\"Action {action._name}'s target is not a crest.Port\"\n assert action.target in api.get_targets(action._parent), f\"Action's target {action.target._name} ({action.target}) is not in the targets of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.function, (crestml.LearnedFunction, types.FunctionType)), f\"Action {action._name}'s function needs to be of type types.FunctionType or crestdsl.ml.LearnedFunction\"\n assert 'self' in inspect.signature(action.function).parameters, f\"Action {action._name}'s function has no self parameter. entity: {action._parent._name} ({action._parent.__class__.__name__})\"\n assert len(inspect.signature(action.function).parameters) == 1, f\"An action should have only one one argument 'self'\"\n\n for port in SH.get_read_ports_from_update(action.function, action):\n assert port in api.get_sources(action._parent), f\"Action {action._name} seems to be reading a port {port._name} ({port}) which is not in the sources of its entity {action._parent._name} ({action._parent})\"", "def test_must_be_associated(self):\n\n def handle(event):\n return 0x0000\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(BasicFilmSession)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_DELETE, handle)]\n )\n\n ae.add_requested_context(BasicFilmSession)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n assoc.release()\n assert assoc.is_released\n assert not assoc.is_established\n with pytest.raises(RuntimeError):\n assoc.send_n_delete(None, None)\n\n scp.shutdown()", "async def before_action(self, action: str, *args, **kwargs) -> bool:\n return True", "def test_acknowledge_hmac_validation_failed(client):\n res = client.get(\n \"/v0/acknowledge?fp=splunk_82998ef6bb3db9dff3dsfdsfsdc\" \"&t=97244b15a21f45e002b2e913866ff7545510f9b08dea5241f\"\n )\n assert res.status == \"500 INTERNAL SERVER ERROR\"", "def test_no_arn(client, transactional_db, mocker):\n mock = mocker.patch('coordinator.api.models.boto3.client')\n assert Event.objects.count() == 0\n\n ev = Event(event_type='error', message='test error event')\n ev.save()\n\n assert Event.objects.count() == 1\n assert mock().publish.call_count == 0", "def _action(self, action, data=None, api=\"signin\"):\n if not data:\n data = {}\n\n data['action'] = action\n # data['redirect_uri'] = self._REDIRECT_URL\n data['csrf'] = self._csrf_token()\n\n print(data)\n\n r = self.session()._post(\n \"https://signin.aws.amazon.com/{0}\".format(api),\n data=data,\n )\n\n if r.status_code != 200:\n print(r.text)\n raise Exception(\"failed action {0}\".format(action))\n\n out = json.loads(r.text)\n if out['state'].lower() != 'success':\n if 'Message' in out['properties']:\n raise Exception(\"failed action {0}: {1}\".format(action, out['properties']['Message']))\n else:\n raise Exception(\"failed action {0}\".format(action))\n\n return out['properties']", "def test_wsgi_script_on_cognito_event_request(self):\n lh = LambdaHandler(\"tests.test_wsgi_script_name_settings\")\n\n event = {\n \"version\": \"1\",\n \"region\": \"eu-west-1\",\n \"userPoolId\": \"region_poolID\",\n \"userName\": \"uuu-id-here\",\n \"callerContext\": {\n \"awsSdkVersion\": \"aws-sdk-js-2.149.0\",\n \"clientId\": \"client-id-here\",\n },\n \"triggerSource\": \"PreSignUp_SignUp\",\n \"request\": {\n \"userAttributes\": {\"email\": \"[email protected]\"},\n \"validationData\": None,\n },\n \"response\": {\n \"autoConfirmUser\": False,\n \"autoVerifyEmail\": False,\n \"autoVerifyPhone\": False,\n },\n }\n\n response = lh.handler(event, None)\n\n self.assertEqual(response[\"response\"][\"autoConfirmUser\"], False)", "def test_get_snsname_arn_sanity(self, aws_res_mock):\n # local imports of code-under-test ensure moto has mocks\n # registered before any possible calls out to AWS\n from awstools.awstools import get_snsname_arn\n\n arn = get_snsname_arn()\n\n client = boto3.client('sns')\n response = client.get_topic_attributes(TopicArn=arn)\n\n # output will normally be captured and suppressed but printed\n # iff the test fails. So, leaving in something that dumps the response\n # can be useful. See https://docs.pytest.org/en/4.6.x/capture.html\n pprint(response)\n response['ResponseMetadata']['HTTPStatusCode'].should.equal(200)\n response['Attributes']['TopicArn'].should.equal(arn)\n\n # check that our mock of aws_resource_names was used\n aws_res_mock.assert_called_once()", "def get_actions(\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = GetActions.create(\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def test_get_actions_with_arn_type_and_access_level_case_5(self):\n\n output = get_actions_with_arn_type_and_access_level(\n \"s3\", \"object\", \"List\"\n )\n self.assertTrue(\"s3:ListMultipartUploadParts\" in output)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0(self):\n pass", "def test_aws_service_api_validate_subscription_post(self):\n pass", "def check_no_namespace(progress_controller=None):\n if progress_controller is None:\n progress_controller = ProgressControllerBase()\n if len(pm.listNamespaces()):\n progress_controller.complete()\n raise PublishError(\n \"There should be no <b>Namespaces</b> in a <b>Model</b> scene.\"\n )\n progress_controller.complete()", "def test_connect_post_namespaced_status_webhooks(self):\n pass", "def test_setup_succeeds(self):\n assert self.add_statestream(base_topic='pub')", "def test_aws_keys_from_env():\n\n # Init variables\n ds = nio.DataSink()\n aws_access_key_id = \"ABCDACCESS\"\n aws_secret_access_key = \"DEFGSECRET\"\n\n # Set env vars\n os.environ[\"AWS_ACCESS_KEY_ID\"] = aws_access_key_id\n os.environ[\"AWS_SECRET_ACCESS_KEY\"] = aws_secret_access_key\n\n # Call function to return creds\n access_key_test, secret_key_test = ds._return_aws_keys()\n\n # Assert match\n assert aws_access_key_id == access_key_test\n assert aws_secret_access_key == secret_key_test", "def enforce(context, action, target, do_raise=True):\n \"\"\"\n ======================================================================================\n context = <xdrs.context.RequestContext object at 0x6dcf050>\n target = {'project_id': u'4537aca4a4a4462fa4c59ad5b5581f00', 'user_id': u'91d732b65831491d8bd952b3111e62dd'}\n action = xdrs:get_algorithms\n ======================================================================================\n \"\"\"\n init()\n \n credentials = context.to_dict()\n \"\"\"\n ======================================================================================\n credentials = {'project_name': u'admin', 'user_id': u'91d732b65831491d8bd952b3111e62dd', 'roles': [u'heat_stack_owner', u'_member_', u'admin'], 'timestamp': '2015-03-10T06:48:40.110653', 'auth_token': 'MIIT9wYJKoZIhvcNAQcCoIIT6DCCE+QCAQExCTAHBgUrDgMCGjCCEk0GCSqGSIb3DQEHAaCCEj4EghI6eyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxNS0wMy0xMFQwNjo0ODozOS41MzU2NjEiLCAiZXhwaXJlcyI6ICIyMDE1LTAzLTEwVDA3OjQ4OjM5WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogImFkbWluIHRlbmFudCIsICJlbmFibGVkIjogdHJ1ZSwgImlkIjogIjQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgIm5hbWUiOiAiYWRtaW4ifX0sICJzZXJ2aWNlQ2F0YWxvZyI6IFt7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc0L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjIvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAiaWQiOiAiMTZiMTVjYzVmZjUwNGNiODlmNTg2NjRlMjdhNjljNjkiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc0L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImNvbXB1dGUiLCAibmFtZSI6ICJub3ZhIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjk2OTYvIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjk2OTYvIiwgImlkIjogIjFiMjkzYTgxNjk2YjRiN2Y4OTZlYWQ0NjIyYTFjMmExIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6OTY5Ni8ifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAibmV0d29yayIsICJuYW1lIjogIm5ldXRyb24ifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3Ni92Mi80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgImlkIjogIjNhNzY3OWNjZTdkZjRhY2ZhMTZiM2NhNTJkZGNmYzgyIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3Ni92Mi80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWV2MiIsICJuYW1lIjogImNpbmRlcnYyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjMiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3NC92MyIsICJpZCI6ICIwYmIxZDFiODhhZmU0MGRhOTNiY2IxNTg0Y2ExN2ZiOSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjMifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZXYzIiwgIm5hbWUiOiAibm92YXYzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwODAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MCIsICJpZCI6ICIxZTMyZTE3MmU3OWM0YzVhYTZiNWM3ZjhkNzVhZjRmYiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwODAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiczMiLCAibmFtZSI6ICJzd2lmdF9zMyJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo5MjkyIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjkyOTIiLCAiaWQiOiAiM2QxYzc5MjY1MWEwNDljNWE2MWUzNWJmZWZjNGM4OGIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo5MjkyIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImltYWdlIiwgIm5hbWUiOiAiZ2xhbmNlIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzciLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3NyIsICJpZCI6ICIzOWE0YzA2NDIzYTg0OTNjOTI4ZGExOGY0YTVjY2MxZiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzcifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAibWV0ZXJpbmciLCAibmFtZSI6ICJjZWlsb21ldGVyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDAvdjEvIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDAvdjEvIiwgImlkIjogIjU1NzBiOGY4MTE0OTRlMWI5NTVkYjZlNTAzZGYyYWZkIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODAwMC92MS8ifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY2xvdWRmb3JtYXRpb24iLCAibmFtZSI6ICJoZWF0LWNmbiJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YxLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzYvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAiaWQiOiAiMGExYzhkYTRmMTU2NDk1YWFkMjEzMGUyYzA2OTE5ODIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YxLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogInZvbHVtZSIsICJuYW1lIjogImNpbmRlciJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4NzczL3NlcnZpY2VzL0FkbWluIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzMvc2VydmljZXMvQ2xvdWQiLCAiaWQiOiAiMDMzZjY3ZTk1MDBjNDljYThmOGIxODkzZTJhN2VkYWYiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4NzczL3NlcnZpY2VzL0Nsb3VkIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImVjMiIsICJuYW1lIjogIm5vdmFfZWMyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDQvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODAwNC92MS80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCIsICJpZCI6ICI0YmViNjQ0MjUzYWU0NzdmOWU5NDk2ZWVkZDEwOTNhNSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDQvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAib3JjaGVzdHJhdGlvbiIsICJuYW1lIjogImhlYXQifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC8iLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC92MS9BVVRIXzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgImlkIjogIjNhMTA2MzU0MjYxMDQzMjk5YTVkMjQ3ZTVmMjU5NGQyIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC92MS9BVVRIXzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogIm9iamVjdC1zdG9yZSIsICJuYW1lIjogInN3aWZ0In0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjM1MzU3L3YyLjAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6NTAwMC92Mi4wIiwgImlkIjogIjVjNGVlN2MzMTE4NDQyNGM5NDJhMWM1MjgxODU3MmZiIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6NTAwMC92Mi4wIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImlkZW50aXR5IiwgIm5hbWUiOiAia2V5c3RvbmUifV0sICJ1c2VyIjogeyJ1c2VybmFtZSI6ICJhZG1pbiIsICJyb2xlc19saW5rcyI6IFtdLCAiaWQiOiAiOTFkNzMyYjY1ODMxNDkxZDhiZDk1MmIzMTExZTYyZGQiLCAicm9sZXMiOiBbeyJuYW1lIjogImhlYXRfc3RhY2tfb3duZXIifSwgeyJuYW1lIjogIl9tZW1iZXJfIn0sIHsibmFtZSI6ICJhZG1pbiJ9XSwgIm5hbWUiOiAiYWRtaW4ifSwgIm1ldGFkYXRhIjogeyJpc19hZG1pbiI6IDAsICJyb2xlcyI6IFsiZDlmZGVlODI1NjE3NGJlNWE3MmFjZGZmNDNkM2VkZDMiLCAiOWZlMmZmOWVlNDM4NGIxODk0YTkwODc4ZDNlOTJiYWIiLCAiN2E1ZTg5MmFiYTE5NDI3NWI3ZjQxZWM4Njg2ZDUwOGYiXX19fTGCAYEwggF9AgEBMFwwVzELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVVuc2V0MQ4wDAYDVQQHDAVVbnNldDEOMAwGA1UECgwFVW5zZXQxGDAWBgNVBAMMD3d3dy5leGFtcGxlLmNvbQIBATAHBgUrDgMCGjANBgkqhkiG9w0BAQEFAASCAQBkwVlwVgYM+mCIXICViGPgW+AZ--Y3NfWjW92GTBqW4keVrPosYxz--b2SVSGqwOHI1xFPqIx1+fzBCcilE5rIuJ3gxAc2VEWl4whMkriqWo6M8YY+GxGJ07h1NZ3Jc9Mrk7RTWPwU9YPilWPSU9sRx4bv+y7XpL8EIEvi+0dvHKgGI+nvqEYVFIf1vYQN5bvSnAgC1rZ9oB0M4Pg1wd47xQcenZL+XOWb8uxUReAvT-lfjXav7DhwUzPgmlY2XpN+9yfhAXAFF0GkokwjncvC5YTILOa41eMUg8ip47+rijNpQ2FuxVpRhQ-xL9it8+vAYkGLqe7eaQylsf0Nu6JJ', 'remote_address': '172.21.7.40', 'quota_class': None, 'is_admin': True, 'tenant': u'4537aca4a4a4462fa4c59ad5b5581f00', 'service_catalog': [{u'endpoints_links': [], u'endpoints': [{u'adminURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00', u'region': u'RegionOne', u'publicURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00', u'id': u'0a1c8da4f156495aad2130e2c0691982', u'internalURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00'}], u'type': u'volume', u'name': u'cinder'}], 'request_id': 'req-c0439276-3600-49cb-8de5-680b3f7d735c', 'instance_lock_checked': False, 'project_id': u'4537aca4a4a4462fa4c59ad5b5581f00', 'user_name': u'admin', 'read_deleted': 'no', 'user': u'91d732b65831491d8bd952b3111e62dd'}\n ======================================================================================\n \"\"\"\n\n # Add the exception arguments if asked to do a raise\n extra = {}\n if do_raise:\n extra.update(exc=exception.PolicyNotAuthorized, action=action)\n\n \"\"\"\n ======================================================================================\n action = xdrs:get_algorithms\n target = <xdrs.objects.instance.Instance object at 0x62b4a50>\n credentials = {'project_name': u'admin', 'user_id': u'91d732b65831491d8bd952b3111e62dd', 'roles': [u'heat_stack_owner', u'_member_', u'admin'], 'timestamp': '2015-03-10T06:48:40.110653', 'auth_token': 'MIIT9wYJKoZIhvcNAQcCoIIT6DCCE+QCAQExCTAHBgUrDgMCGjCCEk0GCSqGSIb3DQEHAaCCEj4EghI6eyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxNS0wMy0xMFQwNjo0ODozOS41MzU2NjEiLCAiZXhwaXJlcyI6ICIyMDE1LTAzLTEwVDA3OjQ4OjM5WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogImFkbWluIHRlbmFudCIsICJlbmFibGVkIjogdHJ1ZSwgImlkIjogIjQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgIm5hbWUiOiAiYWRtaW4ifX0sICJzZXJ2aWNlQ2F0YWxvZyI6IFt7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc0L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjIvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAiaWQiOiAiMTZiMTVjYzVmZjUwNGNiODlmNTg2NjRlMjdhNjljNjkiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc0L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImNvbXB1dGUiLCAibmFtZSI6ICJub3ZhIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjk2OTYvIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjk2OTYvIiwgImlkIjogIjFiMjkzYTgxNjk2YjRiN2Y4OTZlYWQ0NjIyYTFjMmExIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6OTY5Ni8ifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAibmV0d29yayIsICJuYW1lIjogIm5ldXRyb24ifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3Ni92Mi80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgImlkIjogIjNhNzY3OWNjZTdkZjRhY2ZhMTZiM2NhNTJkZGNmYzgyIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3Ni92Mi80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWV2MiIsICJuYW1lIjogImNpbmRlcnYyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjMiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3NC92MyIsICJpZCI6ICIwYmIxZDFiODhhZmU0MGRhOTNiY2IxNTg0Y2ExN2ZiOSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjMifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZXYzIiwgIm5hbWUiOiAibm92YXYzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwODAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MCIsICJpZCI6ICIxZTMyZTE3MmU3OWM0YzVhYTZiNWM3ZjhkNzVhZjRmYiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwODAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiczMiLCAibmFtZSI6ICJzd2lmdF9zMyJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo5MjkyIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjkyOTIiLCAiaWQiOiAiM2QxYzc5MjY1MWEwNDljNWE2MWUzNWJmZWZjNGM4OGIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo5MjkyIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImltYWdlIiwgIm5hbWUiOiAiZ2xhbmNlIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzciLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3NyIsICJpZCI6ICIzOWE0YzA2NDIzYTg0OTNjOTI4ZGExOGY0YTVjY2MxZiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzcifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAibWV0ZXJpbmciLCAibmFtZSI6ICJjZWlsb21ldGVyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDAvdjEvIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDAvdjEvIiwgImlkIjogIjU1NzBiOGY4MTE0OTRlMWI5NTVkYjZlNTAzZGYyYWZkIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODAwMC92MS8ifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY2xvdWRmb3JtYXRpb24iLCAibmFtZSI6ICJoZWF0LWNmbiJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YxLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzYvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAiaWQiOiAiMGExYzhkYTRmMTU2NDk1YWFkMjEzMGUyYzA2OTE5ODIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YxLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogInZvbHVtZSIsICJuYW1lIjogImNpbmRlciJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4NzczL3NlcnZpY2VzL0FkbWluIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzMvc2VydmljZXMvQ2xvdWQiLCAiaWQiOiAiMDMzZjY3ZTk1MDBjNDljYThmOGIxODkzZTJhN2VkYWYiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4NzczL3NlcnZpY2VzL0Nsb3VkIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImVjMiIsICJuYW1lIjogIm5vdmFfZWMyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDQvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODAwNC92MS80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCIsICJpZCI6ICI0YmViNjQ0MjUzYWU0NzdmOWU5NDk2ZWVkZDEwOTNhNSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDQvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAib3JjaGVzdHJhdGlvbiIsICJuYW1lIjogImhlYXQifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC8iLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC92MS9BVVRIXzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgImlkIjogIjNhMTA2MzU0MjYxMDQzMjk5YTVkMjQ3ZTVmMjU5NGQyIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC92MS9BVVRIXzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogIm9iamVjdC1zdG9yZSIsICJuYW1lIjogInN3aWZ0In0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjM1MzU3L3YyLjAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6NTAwMC92Mi4wIiwgImlkIjogIjVjNGVlN2MzMTE4NDQyNGM5NDJhMWM1MjgxODU3MmZiIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6NTAwMC92Mi4wIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImlkZW50aXR5IiwgIm5hbWUiOiAia2V5c3RvbmUifV0sICJ1c2VyIjogeyJ1c2VybmFtZSI6ICJhZG1pbiIsICJyb2xlc19saW5rcyI6IFtdLCAiaWQiOiAiOTFkNzMyYjY1ODMxNDkxZDhiZDk1MmIzMTExZTYyZGQiLCAicm9sZXMiOiBbeyJuYW1lIjogImhlYXRfc3RhY2tfb3duZXIifSwgeyJuYW1lIjogIl9tZW1iZXJfIn0sIHsibmFtZSI6ICJhZG1pbiJ9XSwgIm5hbWUiOiAiYWRtaW4ifSwgIm1ldGFkYXRhIjogeyJpc19hZG1pbiI6IDAsICJyb2xlcyI6IFsiZDlmZGVlODI1NjE3NGJlNWE3MmFjZGZmNDNkM2VkZDMiLCAiOWZlMmZmOWVlNDM4NGIxODk0YTkwODc4ZDNlOTJiYWIiLCAiN2E1ZTg5MmFiYTE5NDI3NWI3ZjQxZWM4Njg2ZDUwOGYiXX19fTGCAYEwggF9AgEBMFwwVzELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVVuc2V0MQ4wDAYDVQQHDAVVbnNldDEOMAwGA1UECgwFVW5zZXQxGDAWBgNVBAMMD3d3dy5leGFtcGxlLmNvbQIBATAHBgUrDgMCGjANBgkqhkiG9w0BAQEFAASCAQBkwVlwVgYM+mCIXICViGPgW+AZ--Y3NfWjW92GTBqW4keVrPosYxz--b2SVSGqwOHI1xFPqIx1+fzBCcilE5rIuJ3gxAc2VEWl4whMkriqWo6M8YY+GxGJ07h1NZ3Jc9Mrk7RTWPwU9YPilWPSU9sRx4bv+y7XpL8EIEvi+0dvHKgGI+nvqEYVFIf1vYQN5bvSnAgC1rZ9oB0M4Pg1wd47xQcenZL+XOWb8uxUReAvT-lfjXav7DhwUzPgmlY2XpN+9yfhAXAFF0GkokwjncvC5YTILOa41eMUg8ip47+rijNpQ2FuxVpRhQ-xL9it8+vAYkGLqe7eaQylsf0Nu6JJ', 'remote_address': '172.21.7.40', 'quota_class': None, 'is_admin': True, 'tenant': u'4537aca4a4a4462fa4c59ad5b5581f00', 'service_catalog': [{u'endpoints_links': [], u'endpoints': [{u'adminURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00', u'region': u'RegionOne', u'publicURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00', u'id': u'0a1c8da4f156495aad2130e2c0691982', u'internalURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00'}], u'type': u'volume', u'name': u'cinder'}], 'request_id': 'req-c0439276-3600-49cb-8de5-680b3f7d735c', 'instance_lock_checked': False, 'project_id': u'4537aca4a4a4462fa4c59ad5b5581f00', 'user_name': u'admin', 'read_deleted': 'no', 'user': u'91d732b65831491d8bd952b3111e62dd'}\n extra = {'action': 'xdrs:get_algorithms', 'exc': <class 'xdrs.exception.PolicyNotAuthorized'>}\n ======================================================================================\n \"\"\"\n return policy.check(action, target, credentials, **extra)", "def test__put_actioned_by_into():\n user = User.precreate(202305160043, name = 'East')\n \n for input_value, defaults, expected_output in (\n (None, False, {}),\n (None, True, {'actioned_by_user': None}),\n (user, True, {'actioned_by_user': user.to_data(defaults = True, include_internals = True)}),\n ):\n data = put_actioned_by_into(input_value, {}, defaults)\n vampytest.assert_eq(data, expected_output)", "def snmpqosqos_error_api_ses_add_insession(self) :\n\t\ttry :\n\t\t\treturn self._snmpqosqos_error_api_ses_add_insession\n\t\texcept Exception as e:\n\t\t\traise e", "def test_is_ims_sub_activities(self):\r\n emaSession = ema_functions.emaLogin()\r\n session = {}\r\n session['emaSession'] = emaSession\r\n sub1 = class_ims_ema.sub('+353760000001')\r\n #test1 = sub1.subscriberCreate(session)\r\n test2 = sub1.subscriberGet(session)\r\n #test3 = sub1.subscriberDelete(session)\r\n test4 = sub1.subscriberGet(session)\r\n #self.assertTrue(test1.status_code == 200 and test2.status_code == 200 and test3.status_code == 200 and test4.status_code == 500)\r\n self.assertTrue(test2.status_code == 200 and test4.status_code == 500)", "def test_aws_service_api_keypair_delete(self):\n pass", "def authorize(self, action, author_id=None):\n if action not in CHANGE_TYPES:\n return False\n return True", "def _policy_action_validate(policy_domain, midToken, mc_id, assetId, action, application, returnKey):\n logger = logging.getLogger(LogDefaults.default_log_name)\n\n logger.debug(\"midToken:%s, mc_id:%s, assetId:%s, action:%s, app:%s\",\n midToken, mc_id, assetId, action, application)\n\n partial_event = None\n\n client_uuid = str(mc_id)\n assetId = str(assetId)\n\n policy_session_dict = MongoPolicySessionApi.get_policy_session(mc_id)\n\n # TODO IF client does not exist, PI should not exist. There is no need\n # TODO to check for client. Move to PI API file.\n if not policy_session_dict:\n new_response = dict(\n access='denied',\n uuid=client_uuid,\n cause='client does not exist'\n )\n return new_response, partial_event\n\n p_ingest_svc = PlibIngestSvc()\n resp_obj, asset_data = p_ingest_svc.lookup_by_assetId(assetId, midToken)\n json_response = resp_obj.json_body if resp_obj else dict()\n in_response = json_response.get('response', None)\n cause = in_response['cause'] if in_response else None\n\n if not resp_obj.success or resp_obj.http_status == HTTPStatus.NOT_FOUND:\n new_response = dict(\n access='denied',\n uuid=client_uuid,\n cause=cause\n )\n return new_response, partial_event\n\n filter_by = dict(\n action=action,\n application=application)\n found, pi_list = PolicyValidationApi._get_matching_policy_instances_v2(\n policy_domain, midToken, client_uuid, filter_by)\n\n logger.debug(\"get_matching_pis: filter_by:%s, found:%s pi_list:%s\",\n filter_by, found, pi_list)\n\n if not found:\n new_response = dict(\n access='denied',\n uuid=client_uuid,\n cause='no client policy for assetId'\n )\n return new_response, partial_event\n\n authorization_granted_list = list()\n authorization_denied_list = list()\n policy_names = dict()\n for pi in pi_list:\n pi_ok, new_response = PolicyValidationApi._pi_validate(\n pi, client_uuid, policy_session_dict)\n if pi_ok:\n authorization_granted_list.append(new_response)\n policy_names['granted'] = pi['name']\n else:\n authorization_denied_list.append(new_response)\n policy_names['denied'] = pi['name']\n # END: FOR PI\n is_granted = len(authorization_granted_list) > 0\n\n logger.debug(\"granted list=%s\", authorization_granted_list)\n logger.debug(\"denied list=%s\", authorization_denied_list)\n\n policy_name = policy_names['granted'] if is_granted else policy_names.get('denied', \"\")\n\n partial_event = partial(\n DDPolicyEventsWrapper.construct_event, policy=policy_name)\n\n if not is_granted:\n new_response = dict(\n access='denied',\n uuid=client_uuid,\n # FIXME: inconsistent with new_response format\n cause_list=authorization_denied_list\n )\n logger.debug(\"authorization denied: %s\", new_response)\n return new_response, partial_event\n\n # Key service lookup\n key_dict = None\n if returnKey:\n p_key_svc = PlibKeySvc()\n get_resp_obj = p_key_svc.lookup_by_assetId(assetId)\n keysvc_success = (\n get_resp_obj.success and\n get_resp_obj.json_body['status'] == HTTPStatus.OK)\n if not keysvc_success:\n new_response = dict(\n access='denied',\n uuid=client_uuid,\n cause=get_resp_obj.json_body\n )\n return new_response, partial_event\n key_dict = get_resp_obj.json_body['response']['key']\n\n # For now return head of authorization granted list if not empty\n new_response = dict(\n access='granted',\n uuid=client_uuid,\n cause=authorization_granted_list[0]['cause']\n )\n if returnKey:\n new_response['key'] = key_dict\n logger.debug(\"authorization granted: %s\", new_response)\n return new_response, partial_event", "def aws(ctx): # pylint: disable=unused-argument\n pass # pylint: disable=unnecessary-pass", "def test_ingress_returns_envelope_unchanged():\n plugin_instance = PluginVipCustomisation()\n assert plugin_instance.ingress('envelope', 'http_headers', 'operation') == ('envelope', 'http_headers')", "def assert_event_okay(event):\n assert event[\"client_id\"] == CLIENT_ID\n assert event[\"redirect_uri\"] == REDIRECT_URI\n assert event[\"response_type\"] == \"token\"", "def test_publish(self):\n target_arn = 'testing'\n supercuboid_key = 'acd123'\n message_id = '123456'\n receipt_handle = 'a1b2c3d4'\n message = serializer.encodeIngestMessage(supercuboid_key, message_id, receipt_handle)\n self.sns.publish(self.topic_arn, message)\n message = self.sns.subscribe(self.topic_arn)", "def isActionKey(event_string):\n\n actionKeys = [ \"Return\", \"Escape\", \"Tab\", \"BackSpace\", \"Delete\",\n \"Page_Up\", \"Page_Down\", \"Home\", \"End\" ]\n\n reply = event_string in actionKeys\n debug.println(debug.LEVEL_FINEST,\n \"orca.isActionKey: returning: %s\" % reply)\n return reply", "def test_same_start_events(sample_events, woodshop, caplog):\n caplog.set_level(logging.INFO)\n event1, event2 = sample_events.make_same_start_events()\n woodshop.start_event(event1)\n woodshop.start_event(event2)\n woodshop.log_conflicts(event2.start_time)\n woodshop.end_event(event1)\n woodshop.log_conflicts(event1.end_time)\n woodshop.end_event(event2)\n woodshop.log_conflicts(event2.end_time)\n assert len(caplog.messages) == 1\n message = caplog.messages[0]\n assert \"Schedule conflict: place='Woodshop'\" in message\n expected_conflict_times = \"Conflict(start_time='{}', end_time='{}',\".format(\n event1.start_time, event1.end_time)\n assert expected_conflict_times in message\n assert event1.meetup_id in message\n assert event2.meetup_id in message", "def test_session_is_accessed(self):\n response = self.client.get(\"/auth_processor_attr_access/\")\n self.assertContains(response, \"Session accessed\")", "def get_actions(self, request):\n actions = super().get_actions(request)\n if not settings.PUBLISHER_CODE:\n del actions['create_cwr']\n if 'delete_selected' in actions:\n del actions['delete_selected']\n return actions", "async def before_action(self, action, *args, **kwargs):\n return True", "def test_otoroshi_controllers_adminapi_events_controller_alert_events(self):\n pass", "def k8s_actions(config, underlay):\n return k8smanager.K8SManager(config, underlay)", "def check_keys_in_bundles(\n request: Request, policy: RequestPolicy, logger: Logger\n) -> None:\n if not policy.check_keys_match_ksk_operator_policy:\n logger.warning(\n \"KSR-POLICY-KEYS: Disabled by policy (check_keys_match_ksk_operator_policy)\"\n )\n return\n\n # Check the number of keys per bundle slot. The first and the last slot typically has two keys.\n if len(request.bundles) != len(policy.num_keys_per_bundle):\n raise KSR_POLICY_KEYS_Violation(\n f\"Can't check number of keys per bundle for a KSR with \"\n f\"{len(request.bundles)} bundles\"\n )\n for _idx in range(len(request.bundles)):\n _bundle = request.bundles[_idx]\n if len(_bundle.keys) != policy.num_keys_per_bundle[_idx]:\n _num_keys = len(_bundle.keys)\n _expected = policy.num_keys_per_bundle[_idx]\n raise KSR_POLICY_KEYS_Violation(\n f\"Bundle #{_idx + 1}/{_bundle.id} has {_num_keys} keys, not {_expected}\"\n )\n\n # Check the number of different key sets in a request.\n #\n # The standard is to have exactly three keys in the request (early,on-time,late),\n # but on some occasions a different number might be acceptable.\n # In ksr-root-2016-q3-fallback-1.xml, there were only two key sets.\n if policy.num_different_keys_in_all_bundles is not None:\n _keys = {}\n for _bundle in request.bundles:\n for _key in _bundle.keys:\n _keys[_key.key_identifier] = 1\n num_keys = len(_keys)\n\n if num_keys != 3:\n logger.warning(\n \"Request {} does not have three (early,on-time,late) key sets in it ({})\".format(\n request.id, num_keys\n )\n )\n if num_keys != policy.num_different_keys_in_all_bundles:\n raise KSR_POLICY_KEYS_Violation(\n f\"Unacceptable number of key sets in request {request.id}, \"\n f\"({num_keys} keys instead of {policy.num_different_keys_in_all_bundles})\"\n )\n\n logger.info(\n f\"KSR-POLICY-KEYS: Validated number of keys per bundle, and for all bundles\"\n )", "def test_claim_resources_success_evacuate_no_shared(self):\n # the source allocation is also held by the instance_uuid so report\n # client will see it.\n current_allocs = {\n 'allocations': {\n uuids.source_host: {\n 'generation': 42,\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20\n },\n },\n },\n \"consumer_generation\": 1,\n \"project_id\": uuids.project_id,\n \"user_id\": uuids.user_id\n }\n self.ks_adap_mock.get.return_value = fake_requests.FakeResponse(\n status_code=200,\n content=jsonutils.dumps(current_allocs))\n put_allocations_resp_mock = fake_requests.FakeResponse(status_code=204)\n self.ks_adap_mock.put.return_value = put_allocations_resp_mock\n consumer_uuid = uuids.consumer_uuid\n # this is an evacuate so we have the same resources request towards the\n # dest host\n alloc_req = {\n 'allocations': {\n uuids.dest_host: {\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20,\n }\n },\n },\n # this allocation request comes from the scheduler therefore it\n # does not have consumer_generation in it.\n \"project_id\": uuids.project_id,\n \"user_id\": uuids.user_id\n }\n\n project_id = uuids.project_id\n user_id = uuids.user_id\n res = self.client.claim_resources(self.context, consumer_uuid,\n alloc_req, project_id, user_id,\n allocation_request_version='1.28')\n\n expected_url = \"/allocations/%s\" % consumer_uuid\n # we expect that both the source and dest allocations are here\n expected_payload = {\n 'allocations': {\n uuids.source_host: {\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20\n },\n },\n uuids.dest_host: {\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20,\n }\n },\n },\n # report client uses the consumer_generation that it got from\n # placement when asked for the existing allocations\n 'consumer_generation': 1,\n 'project_id': project_id,\n 'user_id': user_id}\n self.ks_adap_mock.put.assert_called_once_with(\n expected_url, microversion='1.28', json=mock.ANY,\n global_request_id=self.context.global_id)\n # We have to pull the json body from the mock call_args to validate\n # it separately otherwise hash seed issues get in the way.\n actual_payload = self.ks_adap_mock.put.call_args[1]['json']\n self.assertEqual(expected_payload, actual_payload)\n\n self.assertTrue(res)", "def test_read_namespaced_route_status(self):\n pass", "def lambda_handler(event, context):\n\n \"\"\"\n This statement prevents someone else from configuring a skill that sends \n requests to this function.\n \"\"\"\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])", "def test_no_abstract_syntax_match(self):\n\n def handle(event):\n return 0x0000, event.action_information\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ProceduralEventLogging)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)]\n )\n\n ae.add_requested_context(ProceduralEventLogging)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n msg = (\n r\"No presentation context for 'Verification SOP Class' has been \"\n r\"accepted by the peer for the SCU role\"\n )\n with pytest.raises(ValueError, match=msg):\n assoc.send_n_action(None, 1, Verification, None)\n assoc.release()\n assert assoc.is_released\n\n scp.shutdown()", "def _ValidRequest(request):\n if not request.json:\n abort(400)\n sessId = request.json['sessionId']\n sessKey = request.json['sessionKey']\n # Check if it is active and correct key\n return database.SessionActive(sessId) and database.CorrectSessionKey(sessId, sessKey)", "def test_create_namespaced_policy(self):\n pass", "def test_resource_actions(self):\n test_resource = ResourceTypeName.get()\n expected_actions = sorted(['rt:get', 'rt:put', 'rt:update', 'rt:delete'])\n self.app.post(\n f'/v1/resource/{test_resource}',\n data=json.dumps({'actions': expected_actions}),\n headers=admin_headers)\n\n # Get the actions for a resource type\n resp = self.app.get(f'/v1/resource/{test_resource}/actions', headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n actions = json.loads(resp.body)['actions']\n self.assertEqual(actions, expected_actions)\n\n # Delete actions from a resource type\n modify_actions = expected_actions[-2:]\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions[:2])\n\n # OK returned when deleting actions not part of a resource type\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n\n # Put actions into a resource type\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions)\n\n # OK returned when putting actions already a part of a resource type.\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)", "def checksignals(self):\n if self.space.check_signal_action is not None:\n self.space.check_signal_action.perform(self, None)", "def test_create_policy_for_all_namespaces(self):\n pass", "def test_claim_resources_success_force_evacuate_no_shared(self):\n # the source allocation is also held by the instance_uuid so report\n # client will see it.\n current_allocs = {\n 'allocations': {\n uuids.source_host: {\n 'generation': 42,\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20\n },\n },\n },\n \"consumer_generation\": 1,\n \"project_id\": uuids.project_id,\n \"user_id\": uuids.user_id\n }\n\n self.ks_adap_mock.get.return_value = fake_requests.FakeResponse(\n status_code=200,\n content=jsonutils.dumps(current_allocs))\n self.ks_adap_mock.put.return_value = fake_requests.FakeResponse(\n status_code=204)\n consumer_uuid = uuids.consumer_uuid\n # this is an evacuate so we have the same resources request towards the\n # dest host\n alloc_req = {\n 'allocations': {\n uuids.dest_host: {\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20,\n }\n },\n },\n # this allocation request comes from the conductor that read the\n # allocation from placement therefore it has consumer_generation in\n # it.\n \"consumer_generation\": 1,\n \"project_id\": uuids.project_id,\n \"user_id\": uuids.user_id\n }\n\n project_id = uuids.project_id\n user_id = uuids.user_id\n res = self.client.claim_resources(self.context, consumer_uuid,\n alloc_req, project_id, user_id,\n allocation_request_version='1.28')\n\n expected_url = \"/allocations/%s\" % consumer_uuid\n # we expect that both the source and dest allocations are here\n expected_payload = {\n 'allocations': {\n uuids.source_host: {\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20\n },\n },\n uuids.dest_host: {\n 'resources': {\n 'VCPU': 1,\n 'MEMORY_MB': 1024,\n 'DISK_GB': 20,\n }\n },\n },\n # report client uses the consumer_generation that it got in the\n # allocation request\n 'consumer_generation': 1,\n 'project_id': project_id,\n 'user_id': user_id}\n self.ks_adap_mock.put.assert_called_once_with(\n expected_url, microversion='1.28', json=mock.ANY,\n global_request_id=self.context.global_id)\n # We have to pull the json body from the mock call_args to validate\n # it separately otherwise hash seed issues get in the way.\n actual_payload = self.ks_adap_mock.put.call_args[1]['json']\n self.assertEqual(expected_payload, actual_payload)\n\n self.assertTrue(res)", "def required_event_keys(cls):\n return {'app_type', 'destination_function_name', 'schedule_expression'}", "def test_400_enable_qos(self):\n if self._get_openstack_release() >= self.trusty_mitaka:\n unit = self.n_ovs_sentry\n set_default = {'enable-qos': 'False'}\n set_alternate = {'enable-qos': 'True'}\n self.d.configure('neutron-api', set_alternate)\n self._wait_and_check(sleep=60)\n qos_plugin = 'qos'\n config = u._get_config(\n self.neutron_api_sentry, '/etc/neutron/neutron.conf')\n service_plugins = config.get(\n 'DEFAULT',\n 'service_plugins').split(',')\n if qos_plugin not in service_plugins:\n message = \"{} not in service_plugins\".format(qos_plugin)\n amulet.raise_status(amulet.FAIL, msg=message)\n\n config = u._get_config(\n unit,\n '/etc/neutron/plugins/ml2/openvswitch_agent.ini')\n extensions = config.get('agent', 'extensions').split(',')\n if qos_plugin not in extensions:\n message = \"qos not in extensions\"\n amulet.raise_status(amulet.FAIL, msg=message)\n\n u.log.debug('Setting QoS back to {}'.format(\n set_default['enable-qos']))\n self.d.configure('neutron-api', set_default)\n self._wait_and_check()\n u.log.debug('OK')", "def __init__(self, session, assoc_type):\n super(AssociateRequest, self).__init__()\n self.session = session\n self.assoc_type = assoc_type\n self.namespace = OPENID2_NS", "def aws_elasticsearch_public_access_check(cache: dict, session, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:\n # ISO Time\n iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()\n for response in describe_es_os_domains(cache, session):\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(response,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson)\n esDomainName = response[\"DomainStatus\"][\"DomainName\"]\n esVersion = response[\"DomainStatus\"][\"ElasticsearchVersion\"]\n domainId = response[\"DomainStatus\"][\"DomainId\"]\n domainArn = response[\"DomainStatus\"][\"ARN\"]\n # Determine if ES has Cognito Enabled\n try:\n cognitoEnabledCheck = str(response[\"DomainStatus\"][\"CognitoOptions\"][\"Enabled\"])\n except KeyError:\n cognitoEnabledCheck = False\n # Determine if ES is in a VPC\n try:\n vpcId = str(response[\"VPCOptions\"][\"VPCId\"])\n except KeyError:\n vpcId = \"NO_VPC\"\n # Determine if there is a policy and then parse through it. If the \"AWS\": \"*\" principal is allowed (anonymous access) without\n # any conditions we can assume there is not anything else to stop them\n try:\n policyDoc = response[\"AccessPolicies\"]\n policyJson = json.loads(policyDoc.encode().decode(\"unicode_escape\"))\n hasPolicy = True\n for sid in policyJson[\"Statement\"]:\n try:\n conditionCheck = str(sid[\"Condition\"])\n hasCondition = True\n except:\n conditionCheck = \"\"\n hasCondition = False\n if str(sid[\"Principal\"]) == '{\"AWS\": \"*\"}' and hasCondition is False:\n policyAllowAnon = True\n else:\n policyAllowAnon = False\n except KeyError or ValueError:\n policyDoc = \"\"\n policyJson = \"NO_POLICY\"\n policyAllowAnon = \"NO_POLICY\"\n hasPolicy = False\n # Full Public Check\n if policyAllowAnon is True and vpcId == \"NO_VPC\" and cognitoEnabledCheck is False:\n fullPublic = True\n else:\n fullPublic = False\n # This is a failing check\n if fullPublic is True:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"{domainArn}/elasticsearch-public-access-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": domainArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\n \"Software and Configuration Checks/AWS Security Best Practices\",\n \"Effects/Data Exposure\"\n ],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"CRITICAL\"},\n \"Confidence\": 99,\n \"Title\": \"[OpenSearch.9] OpenSearch/AWS ElasticSearch Service domains should not be exposed to the public\",\n \"Description\": \"OpenSearch/AWS ElasticSearch Service domain \"\n + esDomainName\n + \" is open to public due to not using a VPC, Cognito, or any additional conditions within the resource policy. Public access will allow malicious actors to attack the confidentiality, integrity or availability of documents indexed in your Domain. Refer to the remediation instructions if this configuration is not intended.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For information on protecting Domains with a Resource-based Policy refer to the Identity and Access Management in Amazon Elasticsearch Service section of the Amazon Elasticsearch Service Developer Guide\",\n \"Url\": \"https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-ac.html\"\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Analytics\",\n \"AssetService\": \"Amazon OpenSearch Service\",\n \"AssetComponent\": \"Search Domain\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsOpenSearchServiceDomain\",\n \"Id\": domainArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsOpenSearchServiceDomain\": {\n \"Id\": domainId,\n \"DomainName\": esDomainName,\n \"EngineVersion\": esVersion\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.AC-3\",\n \"NIST CSF V1.1 PR.AC-4\",\n \"NIST CSF V1.1 PR.DS-5\",\n \"NIST SP 800-53 Rev. 4 AC-1\",\n \"NIST SP 800-53 Rev. 4 AC-2\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 AC-4\",\n \"NIST SP 800-53 Rev. 4 AC-5\",\n \"NIST SP 800-53 Rev. 4 AC-6\",\n \"NIST SP 800-53 Rev. 4 AC-14\",\n \"NIST SP 800-53 Rev. 4 AC-16\",\n \"NIST SP 800-53 Rev. 4 AC-17\",\n \"NIST SP 800-53 Rev. 4 AC-19\",\n \"NIST SP 800-53 Rev. 4 AC-20\",\n \"NIST SP 800-53 Rev. 4 AC-24\",\n \"NIST SP 800-53 Rev. 4 PE-19\",\n \"NIST SP 800-53 Rev. 4 PS-3\",\n \"NIST SP 800-53 Rev. 4 PS-6\",\n \"NIST SP 800-53 Rev. 4 SC-7\",\n \"NIST SP 800-53 Rev. 4 SC-8\",\n \"NIST SP 800-53 Rev. 4 SC-13\",\n \"NIST SP 800-53 Rev. 4 SC-15\",\n \"NIST SP 800-53 Rev. 4 SC-31\",\n \"NIST SP 800-53 Rev. 4 SI-4\",\n \"AICPA TSC CC6.3\",\n \"AICPA TSC CC6.6\",\n \"AICPA TSC CC7.2\",\n \"ISO 27001:2013 A.6.1.2\",\n \"ISO 27001:2013 A.6.2.1\",\n \"ISO 27001:2013 A.6.2.2\",\n \"ISO 27001:2013 A.7.1.1\",\n \"ISO 27001:2013 A.7.1.2\",\n \"ISO 27001:2013 A.7.3.1\",\n \"ISO 27001:2013 A.8.2.2\",\n \"ISO 27001:2013 A.8.2.3\",\n \"ISO 27001:2013 A.9.1.1\",\n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.2.3\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.10.1.1\",\n \"ISO 27001:2013 A.11.1.4\",\n \"ISO 27001:2013 A.11.1.5\",\n \"ISO 27001:2013 A.11.2.1\",\n \"ISO 27001:2013 A.11.2.6\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.13.1.3\",\n \"ISO 27001:2013 A.13.2.1\",\n \"ISO 27001:2013 A.13.2.3\",\n \"ISO 27001:2013 A.13.2.4\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"{domainArn}/elasticsearch-public-access-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": domainArn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\n \"Software and Configuration Checks/AWS Security Best Practices\",\n \"Effects/Data Exposure\"\n ],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"CRITICAL\"},\n \"Confidence\": 99,\n \"Title\": \"[OpenSearch.9] OpenSearch/AWS ElasticSearch Service domains should not be exposed to the public\",\n \"Description\": \"OpenSearch/AWS ElasticSearch Service domain \"\n + esDomainName\n + \" is not to the public due to using a VPC, Cognito, or any additional conditions within the resource policy.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For information on protecting Domains with a Resource-based Policy refer to the Identity and Access Management in Amazon Elasticsearch Service section of the Amazon Elasticsearch Service Developer Guide\",\n \"Url\": \"https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-ac.html\"\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Analytics\",\n \"AssetService\": \"Amazon OpenSearch Service\",\n \"AssetComponent\": \"Search Domain\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsOpenSearchServiceDomain\",\n \"Id\": domainArn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsOpenSearchServiceDomain\": {\n \"Id\": domainId,\n \"DomainName\": esDomainName,\n \"EngineVersion\": esVersion\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.AC-3\",\n \"NIST CSF V1.1 PR.AC-4\",\n \"NIST CSF V1.1 PR.DS-5\",\n \"NIST SP 800-53 Rev. 4 AC-1\",\n \"NIST SP 800-53 Rev. 4 AC-2\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 AC-4\",\n \"NIST SP 800-53 Rev. 4 AC-5\",\n \"NIST SP 800-53 Rev. 4 AC-6\",\n \"NIST SP 800-53 Rev. 4 AC-14\",\n \"NIST SP 800-53 Rev. 4 AC-16\",\n \"NIST SP 800-53 Rev. 4 AC-17\",\n \"NIST SP 800-53 Rev. 4 AC-19\",\n \"NIST SP 800-53 Rev. 4 AC-20\",\n \"NIST SP 800-53 Rev. 4 AC-24\",\n \"NIST SP 800-53 Rev. 4 PE-19\",\n \"NIST SP 800-53 Rev. 4 PS-3\",\n \"NIST SP 800-53 Rev. 4 PS-6\",\n \"NIST SP 800-53 Rev. 4 SC-7\",\n \"NIST SP 800-53 Rev. 4 SC-8\",\n \"NIST SP 800-53 Rev. 4 SC-13\",\n \"NIST SP 800-53 Rev. 4 SC-15\",\n \"NIST SP 800-53 Rev. 4 SC-31\",\n \"NIST SP 800-53 Rev. 4 SI-4\",\n \"AICPA TSC CC6.3\",\n \"AICPA TSC CC6.6\",\n \"AICPA TSC CC7.2\",\n \"ISO 27001:2013 A.6.1.2\",\n \"ISO 27001:2013 A.6.2.1\",\n \"ISO 27001:2013 A.6.2.2\",\n \"ISO 27001:2013 A.7.1.1\",\n \"ISO 27001:2013 A.7.1.2\",\n \"ISO 27001:2013 A.7.3.1\",\n \"ISO 27001:2013 A.8.2.2\",\n \"ISO 27001:2013 A.8.2.3\",\n \"ISO 27001:2013 A.9.1.1\",\n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.2.3\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.10.1.1\",\n \"ISO 27001:2013 A.11.1.4\",\n \"ISO 27001:2013 A.11.1.5\",\n \"ISO 27001:2013 A.11.2.1\",\n \"ISO 27001:2013 A.11.2.6\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.13.1.3\",\n \"ISO 27001:2013 A.13.2.1\",\n \"ISO 27001:2013 A.13.2.3\",\n \"ISO 27001:2013 A.13.2.4\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding", "def test_custom_query_response_descriptor_octopus_server_web_api_actions_list_event_agents_responder_spaces(self):\n pass", "def actions_required(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"actions_required\")", "def actions_required(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"actions_required\")", "def actions_required(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"actions_required\")", "def actions_required(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"actions_required\")", "def test_intent_support(self):\n dispatcher = self.get_dispatcher()\n for intent in self.get_intents():\n self.assertIsNot(dispatcher(intent), None)", "def test_action_register_methods(self) -> None:\n with self.assertWarns(RemovedInReviewBoard70Warning,\n self.deprecation_message):\n foo_action = FooAction()\n foo_action.register()\n\n self.assertEqual(actions_registry.get('action_id', 'foo-action'),\n foo_action)\n\n foo_action.unregister()\n\n self.assertIsNone(actions_registry.get('action_id', 'foo-action'))", "def test_request_with_two_bundles(self):\n xml = self._make_request()\n request = request_from_xml(xml)\n self.assertTrue(validate_request(request, self.policy))", "def testBaseActionRecording(self):\n\t x = BaseAction('x')\n\t self.failUnless(x.key== 'x')" ]
[ "0.57531", "0.5480236", "0.5119307", "0.5114083", "0.50098765", "0.49987218", "0.4811319", "0.47456703", "0.4734747", "0.4714206", "0.4702901", "0.4696541", "0.46635452", "0.46622923", "0.4657708", "0.46406722", "0.46395832", "0.45904428", "0.45893326", "0.45853838", "0.4572667", "0.4558702", "0.45453376", "0.45447615", "0.4521195", "0.4483556", "0.44694838", "0.44670665", "0.44603643", "0.4452167", "0.44451717", "0.44450128", "0.44397846", "0.44185594", "0.44146797", "0.4394273", "0.43873253", "0.4386701", "0.43793768", "0.43717283", "0.4364959", "0.43597272", "0.43537125", "0.4346646", "0.434305", "0.43422005", "0.4340143", "0.4329466", "0.4324512", "0.43243492", "0.4323182", "0.43230122", "0.43163487", "0.43156546", "0.4307443", "0.43034446", "0.42914584", "0.42894095", "0.42884248", "0.4287687", "0.42847902", "0.42767662", "0.4275489", "0.42616522", "0.4257452", "0.42550334", "0.42541048", "0.42527032", "0.4250743", "0.4246634", "0.42449567", "0.4227934", "0.42242768", "0.42183805", "0.42123997", "0.42115423", "0.42096862", "0.42054707", "0.42043543", "0.4200837", "0.4194638", "0.4191934", "0.41904068", "0.41896877", "0.41805887", "0.41755307", "0.41754675", "0.41747788", "0.41746464", "0.41719902", "0.41716933", "0.41704315", "0.4170046", "0.4170046", "0.4170046", "0.4170046", "0.41698414", "0.4167175", "0.41649237", "0.41634974" ]
0.55055636
1
Ensure that ses v1 and ses v2 actions are both present in the ses namespace
def test_services_with_multiple_pages_ses(self): # SES V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonses.html self.assertTrue("ses:PutIdentityPolicy" in self.all_actions) # SES V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonsimpleemailservicev2.html self.assertTrue("ses:ListImportJobs" in self.all_actions) results = get_actions_for_service("ses") actions = [ "ses:CloneReceiptRuleSet", "ses:CreateConfigurationSetTrackingOptions", "ses:CreateReceiptFilter", "ses:CreateReceiptRule", "ses:CreateReceiptRuleSet", "ses:CreateTemplate", "ses:DeleteConfigurationSetTrackingOptions", "ses:DeleteIdentity", "ses:DeleteIdentityPolicy", "ses:DeleteReceiptFilter", "ses:DeleteReceiptRule", "ses:DeleteReceiptRuleSet", "ses:DeleteTemplate", "ses:DeleteVerifiedEmailAddress", "ses:DescribeActiveReceiptRuleSet", "ses:DescribeConfigurationSet", "ses:DescribeReceiptRule", "ses:DescribeReceiptRuleSet", "ses:GetAccountSendingEnabled", "ses:GetIdentityDkimAttributes", "ses:GetIdentityMailFromDomainAttributes", "ses:GetIdentityNotificationAttributes", "ses:GetIdentityPolicies", "ses:GetIdentityVerificationAttributes", "ses:GetSendQuota", "ses:GetSendStatistics", "ses:GetTemplate", "ses:ListIdentities", "ses:ListIdentityPolicies", "ses:ListReceiptFilters", "ses:ListReceiptRuleSets", "ses:ListTemplates", "ses:ListVerifiedEmailAddresses", "ses:PutIdentityPolicy", "ses:ReorderReceiptRuleSet", "ses:SendBounce", "ses:SendBulkTemplatedEmail", "ses:SendRawEmail", "ses:SendTemplatedEmail", "ses:SetActiveReceiptRuleSet", "ses:SetIdentityDkimEnabled", "ses:SetIdentityFeedbackForwardingEnabled", "ses:SetIdentityHeadersInNotificationsEnabled", "ses:SetIdentityMailFromDomain", "ses:SetIdentityNotificationTopic", "ses:SetReceiptRulePosition", "ses:TestRenderTemplate", "ses:UpdateAccountSendingEnabled", "ses:UpdateConfigurationSetReputationMetricsEnabled", "ses:UpdateConfigurationSetSendingEnabled", "ses:UpdateConfigurationSetTrackingOptions", "ses:UpdateReceiptRule", "ses:UpdateTemplate", "ses:VerifyDomainDkim", "ses:VerifyDomainIdentity", "ses:VerifyEmailAddress", "ses:VerifyEmailIdentity", ] for action in actions: self.assertTrue(action in results)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_gh_226_elasticloadbalancing_v1_and_v2(self):\n results = get_actions_for_service(\"elasticloadbalancing\")\n # print(json.dumps(results, indent=4))\n lb_v1_only_action = \"elasticloadbalancing:CreateTargetGroup\"\n lb_v2_only_action = \"elasticloadbalancing:SetSecurityGroups\"\n self.assertTrue(lb_v1_only_action in results)\n self.assertTrue(lb_v2_only_action in results)", "def test_subscriber_access_for_two_vsg_services(self):", "def test_subscriber_access_if_vsg2_goes_down(self):", "def test_kafka_action_names_overlap_issue(self):\n # Kafka actions used to be in two pages but are now one. This verifies the current state.\n # results = get_actions_for_service(\"kafka\")\n # print(results)\n actions = [\n \"kafka:BatchAssociateScramSecret\",\n \"kafka:BatchDisassociateScramSecret\",\n \"kafka:CreateClusterV2\",\n \"kafka:DeleteConfiguration\",\n \"kafka:DescribeClusterV2\",\n \"kafka:ListClustersV2\",\n \"kafka:ListConfigurationRevisions\",\n \"kafka:ListKafkaVersions\",\n \"kafka:ListScramSecrets\",\n \"kafka:RebootBroker\",\n \"kafka:UpdateBrokerType\",\n \"kafka:UpdateConfiguration\",\n \"kafka:UpdateConnectivity\",\n \"kafka:UpdateSecurity\"\n ]\n\n for action in actions:\n self.assertTrue(action in self.all_actions)", "def verifyActionCenterRts():\n pass", "def test_request_with_two_bundles(self):\n xml = self._make_request()\n request = request_from_xml(xml)\n self.assertTrue(validate_request(request, self.policy))", "def test_subscriber_access_if_vsg1_goes_down(self):", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces(self):\n pass", "def test_services_with_multiple_pages_greengrass(self):\n # Greengrass V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiotgreengrass.html\n self.assertTrue(\"greengrass:CreateResourceDefinition\" in self.all_actions)\n # Greengrass V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiotgreengrassv2.html\n self.assertTrue(\"greengrass:CreateComponentVersion\" in self.all_actions)\n results = get_actions_for_service(\"greengrass\")\n actions = [\n \"greengrass:AssociateRoleToGroup\",\n \"greengrass:CreateConnectorDefinition\",\n \"greengrass:CreateConnectorDefinitionVersion\",\n \"greengrass:CreateCoreDefinition\",\n \"greengrass:CreateCoreDefinitionVersion\",\n \"greengrass:CreateDeviceDefinition\",\n \"greengrass:CreateDeviceDefinitionVersion\",\n \"greengrass:CreateFunctionDefinition\",\n \"greengrass:CreateFunctionDefinitionVersion\",\n \"greengrass:CreateGroup\",\n \"greengrass:CreateGroupCertificateAuthority\",\n \"greengrass:CreateGroupVersion\",\n \"greengrass:CreateLoggerDefinition\",\n \"greengrass:CreateLoggerDefinitionVersion\",\n \"greengrass:CreateResourceDefinition\",\n \"greengrass:CreateResourceDefinitionVersion\",\n \"greengrass:CreateSoftwareUpdateJob\",\n \"greengrass:CreateSubscriptionDefinition\",\n \"greengrass:CreateSubscriptionDefinitionVersion\",\n \"greengrass:DeleteConnectorDefinition\",\n \"greengrass:DeleteCoreDefinition\",\n \"greengrass:DeleteDeviceDefinition\",\n \"greengrass:DeleteFunctionDefinition\",\n \"greengrass:DeleteGroup\",\n \"greengrass:DeleteLoggerDefinition\",\n \"greengrass:DeleteResourceDefinition\",\n \"greengrass:DeleteSubscriptionDefinition\",\n \"greengrass:DisassociateRoleFromGroup\",\n \"greengrass:Discover\",\n \"greengrass:GetAssociatedRole\",\n \"greengrass:GetBulkDeploymentStatus\",\n \"greengrass:GetConnectorDefinition\",\n \"greengrass:GetConnectorDefinitionVersion\",\n \"greengrass:GetCoreDefinition\",\n \"greengrass:GetCoreDefinitionVersion\",\n \"greengrass:GetDeploymentStatus\",\n \"greengrass:GetDeviceDefinition\",\n \"greengrass:GetDeviceDefinitionVersion\",\n \"greengrass:GetFunctionDefinition\",\n \"greengrass:GetFunctionDefinitionVersion\",\n \"greengrass:GetGroup\",\n \"greengrass:GetGroupCertificateAuthority\",\n \"greengrass:GetGroupCertificateConfiguration\",\n \"greengrass:GetGroupVersion\",\n \"greengrass:GetLoggerDefinition\",\n \"greengrass:GetLoggerDefinitionVersion\",\n \"greengrass:GetResourceDefinition\",\n \"greengrass:GetResourceDefinitionVersion\",\n \"greengrass:GetSubscriptionDefinition\",\n \"greengrass:GetSubscriptionDefinitionVersion\",\n \"greengrass:GetThingRuntimeConfiguration\",\n \"greengrass:ListBulkDeploymentDetailedReports\",\n \"greengrass:ListBulkDeployments\",\n \"greengrass:ListConnectorDefinitionVersions\",\n \"greengrass:ListConnectorDefinitions\",\n \"greengrass:ListCoreDefinitionVersions\",\n \"greengrass:ListCoreDefinitions\",\n \"greengrass:ListDeviceDefinitionVersions\",\n \"greengrass:ListDeviceDefinitions\",\n \"greengrass:ListFunctionDefinitionVersions\",\n \"greengrass:ListFunctionDefinitions\",\n \"greengrass:ListGroupCertificateAuthorities\",\n \"greengrass:ListGroupVersions\",\n \"greengrass:ListGroups\",\n \"greengrass:ListLoggerDefinitionVersions\",\n \"greengrass:ListLoggerDefinitions\",\n \"greengrass:ListResourceDefinitionVersions\",\n \"greengrass:ListResourceDefinitions\",\n \"greengrass:ListSubscriptionDefinitionVersions\",\n \"greengrass:ListSubscriptionDefinitions\",\n \"greengrass:ResetDeployments\",\n \"greengrass:StartBulkDeployment\",\n \"greengrass:StopBulkDeployment\",\n \"greengrass:UpdateConnectorDefinition\",\n \"greengrass:UpdateCoreDefinition\",\n \"greengrass:UpdateDeviceDefinition\",\n \"greengrass:UpdateFunctionDefinition\",\n \"greengrass:UpdateGroup\",\n \"greengrass:UpdateGroupCertificateConfiguration\",\n \"greengrass:UpdateLoggerDefinition\",\n \"greengrass:UpdateResourceDefinition\",\n \"greengrass:UpdateSubscriptionDefinition\",\n \"greengrass:UpdateThingRuntimeConfiguration\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n # if action not in results:\n # print(action)", "def check_script(vouts):\n for vout in [v for v in vouts[::-1] if v['hex'].startswith('6a')]:\n verb = BlockchainSpider.decode_op_return(vout['hex'])\n action = Spoolverb.from_verb(verb).action\n if action in Spoolverb.supported_actions:\n return verb\n raise Exception(\"Invalid ascribe transaction\")", "def test_get_snsname_arn_auth_exception_handling(self, aws_res_mock):\n # local imports of code-under-test ensure moto has mocks\n # registered before any possible calls out to AWS\n from awstools.awstools import get_snsname_arn\n\n # create a mock SNS client that returns what we tell it to\n client = boto3.client('sns')\n stub = Stubber(client)\n stub.add_client_error('create_topic', service_error_code='AuthorizationError')\n stub.activate()\n\n\n # since firesim manager code doesn't take clients as method parameters\n # now we mock boto3.client to return our stubbed client\n with patch.object(boto3._get_default_session(), 'client', return_value=client) as mock_session:\n topic_arn = get_snsname_arn()\n\n stub.assert_no_pending_responses()\n topic_arn.should.be.none\n\n # TODO we could mock rootLogger.critical to capture it's calls and args and validate that we're seeing the correct \"nice\" message\n\n # make sure get_snsname_arn() actually called out to get a sns\n # client, otherwise we aren't testing what we think we are\n mock_session.assert_called_once_with('sns')\n\n aws_res_mock.assert_called_once()", "def test_other_iam_data_fixes_in_GH_393(self):\n # Cassandra: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonkeyspacesforapachecassandra.html\n results = get_actions_for_service(\"cassandra\")\n self.assertTrue(\"cassandra:Restore\" in results)\n # Comprehend Medical: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazoncomprehendmedical.html\n results = get_actions_for_service(\"comprehendmedical\")\n # print(results)\n actions = [\n \"comprehendmedical:DescribeEntitiesDetectionV2Job\",\n \"comprehendmedical:DescribeICD10CMInferenceJob\",\n \"comprehendmedical:DescribePHIDetectionJob\",\n \"comprehendmedical:DescribeRxNormInferenceJob\",\n # \"comprehendmedical:DescribeSNOMEDCTInferenceJob\", # Not in SAR\n \"comprehendmedical:DetectEntitiesV2\",\n \"comprehendmedical:InferICD10CM\",\n \"comprehendmedical:InferRxNorm\",\n # \"comprehendmedical:InferSNOMEDCT\", # Not in SAR\n \"comprehendmedical:ListEntitiesDetectionV2Jobs\",\n \"comprehendmedical:ListICD10CMInferenceJobs\",\n \"comprehendmedical:ListPHIDetectionJobs\",\n \"comprehendmedical:ListRxNormInferenceJobs\",\n # \"comprehendmedical:ListSNOMEDCTInferenceJobs\", # Not in SAR\n \"comprehendmedical:StartEntitiesDetectionV2Job\",\n \"comprehendmedical:StartICD10CMInferenceJob\",\n \"comprehendmedical:StartPHIDetectionJob\",\n \"comprehendmedical:StartRxNormInferenceJob\",\n \"comprehendmedical:StopEntitiesDetectionV2Job\",\n \"comprehendmedical:StopICD10CMInferenceJob\",\n ]\n for action in actions:\n # if action not in results:\n # print(action)\n self.assertTrue(action in results)\n # Compute Optimizer\n results = get_actions_for_service(\"compute-optimizer\")\n actions = [\n \"compute-optimizer:DeleteRecommendationPreferences\",\n \"compute-optimizer:ExportEBSVolumeRecommendations\",\n \"compute-optimizer:ExportLambdaFunctionRecommendations\",\n \"compute-optimizer:GetEffectiveRecommendationPreferences\",\n \"compute-optimizer:GetEnrollmentStatusesForOrganization\",\n \"compute-optimizer:GetLambdaFunctionRecommendations\",\n \"compute-optimizer:GetRecommendationPreferences\",\n \"compute-optimizer:PutRecommendationPreferences\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # DataSync\n results = get_actions_for_service(\"datasync\")\n actions = [\n \"datasync:UpdateLocationNfs\",\n \"datasync:UpdateLocationObjectStorage\",\n \"datasync:UpdateLocationSmb\",\n \"datasync:UpdateTaskExecution\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Account Management\n results = get_actions_for_service(\"account\")\n actions = [\n \"account:DeleteAlternateContact\",\n \"account:GetAlternateContact\",\n \"account:PutAlternateContact\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS IAM Access Analyzer\n results = get_actions_for_service(\"access-analyzer\")\n actions = [\n \"access-analyzer:CancelPolicyGeneration\",\n \"access-analyzer:CreateAccessPreview\",\n \"access-analyzer:GetAccessPreview\",\n \"access-analyzer:GetGeneratedPolicy\",\n \"access-analyzer:ListAccessPreviewFindings\",\n \"access-analyzer:ListAccessPreviews\",\n \"access-analyzer:ListPolicyGenerations\",\n \"access-analyzer:StartPolicyGeneration\",\n \"access-analyzer:ValidatePolicy\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Elemental Activations\n results = get_actions_for_service(\"elemental-activations\")\n actions = [\n \"elemental-activations:CompleteAccountRegistration\",\n \"elemental-activations:StartAccountRegistration\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n # OpenSearch\n results = get_actions_for_service(\"es\")\n actions = [\n \"es:DescribeDomainChangeProgress\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Location\n results = get_actions_for_service(\"geo\")\n actions = [\n \"geo:CalculateRouteMatrix\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Amazon Managed Grafana\n results = get_actions_for_service(\"grafana\")\n actions = [\n \"grafana:DescribeWorkspaceAuthentication\",\n \"grafana:UpdateWorkspaceAuthentication\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # EC2 Image Builder\n results = get_actions_for_service(\"imagebuilder\")\n actions = [\n \"imagebuilder:ImportVmImage\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Timestream\n results = get_actions_for_service(\"timestream\")\n actions = [\n \"timestream:CreateScheduledQuery\",\n \"timestream:DeleteScheduledQuery\",\n \"timestream:DescribeScheduledQuery\",\n \"timestream:ExecuteScheduledQuery\",\n \"timestream:ListScheduledQueries\",\n \"timestream:UpdateScheduledQuery\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS Transfer Family\n results = get_actions_for_service(\"transfer\")\n actions = [\n \"transfer:CreateAccess\",\n \"transfer:CreateWorkflow\",\n \"transfer:DeleteAccess\",\n \"transfer:DeleteWorkflow\",\n \"transfer:DescribeAccess\",\n \"transfer:DescribeExecution\",\n \"transfer:DescribeWorkflow\",\n \"transfer:ListAccesses\",\n \"transfer:ListExecutions\",\n \"transfer:ListWorkflows\",\n \"transfer:SendWorkflowStepState\",\n \"transfer:UpdateAccess\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def _setup_ses(self):\n print(\"\\n ** Setting up SES mocking\")\n ses = boto3.client('ses', region_name=\"us-east-1\")\n ses.verify_domain_identity(Domain='donatemates.com')\n #response = ses.verify_email_address(EmailAddress='[email protected]')", "def test_aws_service_api_validate_subscription_post(self):\n pass", "def test_parse_request_type_2a(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.SESSION_INIT)", "def test_get_actions_with_arn_type_and_access_level_case_2(self):\n desired_output = [\n 'ssm:DeleteParameter',\n 'ssm:DeleteParameters',\n 'ssm:LabelParameterVersion',\n 'ssm:PutParameter'\n]\n output = get_actions_with_arn_type_and_access_level(\n \"ssm\", \"parameter\", \"Write\"\n )\n for item in desired_output:\n self.assertTrue(item in output)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_template_action_spaces(self):\n pass", "def test_accepted(self):\n actions = signoff_actions(appversions={\"code\": \"fx1.0\"},\n locales={\"code\": \"de\"})\n actions = list(actions)\n eq_(len(actions), 1)\n so = Signoff.objects.get(action=actions[0][0])\n eq_(so.push.tip.shortrev, \"l10n de 0002\")\n eq_(so.locale.code, \"de\")\n eq_(so.action_set.count(), 2)", "def test_services_with_multiple_pages_aws_marketplace(self):\n # Overlap: AWS Marketplace, Marketplace Catalog, and AWS Marketplace Entitlement service, AWS Marketplace Image Building Service, AWS Marketplace Metering Service, AWS Marketplace Private Marketplace, and AWS Marketplace Procurement Systems\n # AWS Marketplace: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplace.html\n self.assertTrue(\"aws-marketplace:AcceptAgreementApprovalRequest\" in self.all_actions)\n # AWS Marketplace Catalog: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplacecatalog.html\n self.assertTrue(\"aws-marketplace:CancelChangeSet\" in self.all_actions)\n # AWS Marketplace Entitlement Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceentitlementservice.html\n self.assertTrue(\"aws-marketplace:GetEntitlements\" in self.all_actions)\n # AWS Marketplace Image Building Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceimagebuildingservice.html\n self.assertTrue(\"aws-marketplace:DescribeBuilds\" in self.all_actions)\n # AWS Marketplace Metering Service: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplacemeteringservice.html\n self.assertTrue(\"aws-marketplace:BatchMeterUsage\" in self.all_actions)\n # AWS Marketplace Private Marketplace: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceprivatemarketplace.html\n self.assertTrue(\"aws-marketplace:AssociateProductsWithPrivateMarketplace\" in self.all_actions)\n # AWS Marketplace Procurement Systems: https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsmarketplaceprocurementsystemsintegration.html\n self.assertTrue(\"aws-marketplace:DescribeProcurementSystemConfiguration\" in self.all_actions)\n\n results = get_actions_for_service(\"aws-marketplace\")\n actions = [\n \"aws-marketplace:AcceptAgreementApprovalRequest\",\n \"aws-marketplace:BatchMeterUsage\",\n \"aws-marketplace:CancelAgreementRequest\",\n \"aws-marketplace:CancelChangeSet\",\n \"aws-marketplace:CompleteTask\",\n \"aws-marketplace:DescribeAgreement\",\n \"aws-marketplace:DescribeBuilds\",\n \"aws-marketplace:DescribeChangeSet\",\n \"aws-marketplace:DescribeEntity\",\n \"aws-marketplace:DescribeProcurementSystemConfiguration\",\n \"aws-marketplace:DescribeTask\",\n \"aws-marketplace:GetAgreementApprovalRequest\",\n \"aws-marketplace:GetAgreementRequest\",\n \"aws-marketplace:GetAgreementTerms\",\n \"aws-marketplace:GetEntitlements\",\n \"aws-marketplace:ListAgreementApprovalRequests\",\n \"aws-marketplace:ListAgreementRequests\",\n \"aws-marketplace:ListBuilds\",\n \"aws-marketplace:ListChangeSets\",\n \"aws-marketplace:ListEntities\",\n \"aws-marketplace:ListTasks\",\n \"aws-marketplace:MeterUsage\",\n \"aws-marketplace:PutProcurementSystemConfiguration\",\n \"aws-marketplace:RegisterUsage\",\n \"aws-marketplace:RejectAgreementApprovalRequest\",\n \"aws-marketplace:ResolveCustomer\",\n \"aws-marketplace:SearchAgreements\",\n \"aws-marketplace:StartBuild\",\n \"aws-marketplace:StartChangeSet\",\n \"aws-marketplace:Subscribe\",\n \"aws-marketplace:Unsubscribe\",\n \"aws-marketplace:UpdateAgreementApprovalRequest\",\n \"aws-marketplace:UpdateTask\",\n \"aws-marketplace:ViewSubscriptions\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_create_pod_security_policy_self_subject_review_for_all_namespaces(self):\n pass", "def test_replace_namespaced_route_status(self):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_delete_action_spaces(self):\n pass", "def _ensure_unit(self, target_access_string: int) -> Squonk2AgentRv:\n if not self.__org_record:\n msg: str = 'The Squonk2Org record does not match' \\\n ' the configured SQUONK2_ORG_UUID.' \\\n ' You cannot change the SQUONK2_ORG_UUID once it has been used'\n return Squonk2AgentRv(success=False, msg=msg)\n\n # Now we check and create a Squonk2Unit...\n unit_name_truncated, unit_name_full = self._build_unit_name(target_access_string)\n sq2_unit: Optional[Squonk2Unit] = Squonk2Unit.objects.filter(name=unit_name_full).first()\n if not sq2_unit:\n _LOGGER.info('No existing Squonk2Unit for \"%s\"', target_access_string)\n # Get the list of Units from Squonk.\n sq2a_rv: Squonk2AgentRv = self._get_or_create_unit(unit_name_truncated, unit_name_full)\n if not sq2a_rv.success:\n _LOGGER.error('Failed to create Unit \"%s\" (%s)', target_access_string, sq2a_rv.msg)\n return Squonk2AgentRv(success=False, msg=sq2a_rv.msg)\n\n unit_uuid: str = sq2a_rv.msg\n sq2_unit = Squonk2Unit(uuid=unit_uuid,\n name=unit_name_full,\n organisation_id=self.__org_record.id)\n sq2_unit.save()\n _LOGGER.info('Created Squonk2Unit %s \"%s\" (for \"%s\")',\n unit_uuid,\n unit_name_full,\n target_access_string)\n else:\n _LOGGER.debug('Squonk2Unit %s \"%s\" already exists (for \"%s\") - nothing to do',\n sq2_unit.uuid,\n unit_name_full,\n target_access_string)\n\n return Squonk2AgentRv(success=True, msg=sq2_unit)", "def has_action2(self, feature):\n return feature in self._action2", "def test_must_be_associated(self):\n\n def handle(event):\n return 0x0000, event.action_information\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ProceduralEventLogging)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)]\n )\n\n ae.add_requested_context(ProceduralEventLogging)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n assoc.release()\n assert assoc.is_released\n assert not assoc.is_established\n with pytest.raises(RuntimeError):\n assoc.send_n_action(None, None, None, None)\n\n scp.shutdown()", "def test_cmd_cs_subscription_bad_action(self):\n bad_action = 'blahblah'\n\n result = self.runner.invoke(cli, ['subscription', bad_action])\n assert f\"invalid choice: {bad_action}\" in result.output\n assert result.exception", "def legal_actions(self):\n raise NotImplementedError", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces(self):\n pass", "def check_action_sanity(self):\n for action in crest.get_all_actions(self.model):\n assert action._name is not None, f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is 'None'\"\n assert action._name != \"\", f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is empty string\"\n\n assert isinstance(action.transition, crest.Transition), f\"Action {action._name}'s state is not a crest.Transition. It is: {action.transition} ({action.transition.__class__})\"\n assert action.state in crest.get_transitions(action._parent), f\"Action's transition {action.transition._name} ({action.transition}) is not in the transitions of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.target, crest.Port), f\"Action {action._name}'s target is not a crest.Port\"\n assert action.target in api.get_targets(action._parent), f\"Action's target {action.target._name} ({action.target}) is not in the targets of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.function, (crestml.LearnedFunction, types.FunctionType)), f\"Action {action._name}'s function needs to be of type types.FunctionType or crestdsl.ml.LearnedFunction\"\n assert 'self' in inspect.signature(action.function).parameters, f\"Action {action._name}'s function has no self parameter. entity: {action._parent._name} ({action._parent.__class__.__name__})\"\n assert len(inspect.signature(action.function).parameters) == 1, f\"An action should have only one one argument 'self'\"\n\n for port in SH.get_read_ports_from_update(action.function, action):\n assert port in api.get_sources(action._parent), f\"Action {action._name} seems to be reading a port {port._name} ({port}) which is not in the sources of its entity {action._parent._name} ({action._parent})\"", "def test_action_register_methods(self) -> None:\n with self.assertWarns(RemovedInReviewBoard70Warning,\n self.deprecation_message):\n foo_action = FooAction()\n foo_action.register()\n\n self.assertEqual(actions_registry.get('action_id', 'foo-action'),\n foo_action)\n\n foo_action.unregister()\n\n self.assertIsNone(actions_registry.get('action_id', 'foo-action'))", "def test_create_resource_access_review_for_all_namespaces(self):\n pass", "def test_login_2sa(self):\n dsm_7 = SynologyDSMMock(\n VALID_HOST,\n VALID_PORT,\n VALID_USER_2SA,\n VALID_PASSWORD,\n VALID_HTTPS,\n VALID_VERIFY_SSL,\n )\n dsm_7.dsm_version = 7\n with pytest.raises(SynologyDSMLogin2SARequiredException) as error:\n dsm_7.login()\n error_value = error.value.args[0]\n assert error_value[\"api\"] == \"SYNO.API.Auth\"\n assert error_value[\"code\"] == 403\n assert error_value[\"reason\"] == \"One time password not specified\"\n assert (\n error_value[\"details\"]\n == \"Two-step authentication required for account: valid_user_2sa\"\n )\n\n assert dsm_7.login(VALID_OTP)\n\n assert dsm_7._session_id == SESSION_ID\n assert dsm_7._syno_token == SYNO_TOKEN\n assert dsm_7._device_token == DEVICE_TOKEN\n assert dsm_7.device_token == DEVICE_TOKEN", "def _enforce(self, req, action):\n try:\n self.policy.enforce(req.context, action, {})\n except exception.Forbidden:\n raise HTTPForbidden()", "def test_services_with_multiple_pages_lex(self):\n # Lex V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonlex.html\n self.assertTrue(\"lex:DeleteUtterances\" in self.all_actions)\n # Lex V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonlexv2.html\n self.assertTrue(\"lex:ListBotLocales\" in self.all_actions)\n results = get_actions_for_service(\"lex\")\n actions = [\n \"lex:CreateIntentVersion\",\n \"lex:CreateSlotTypeVersion\",\n \"lex:DeleteBotChannelAssociation\",\n \"lex:DeleteIntentVersion\",\n \"lex:DeleteSlotTypeVersion\",\n \"lex:GetBot\",\n \"lex:GetBotAlias\",\n \"lex:GetBotAliases\",\n \"lex:GetBotChannelAssociation\",\n \"lex:GetBotChannelAssociations\",\n \"lex:GetBotVersions\",\n \"lex:GetBots\",\n \"lex:GetBuiltinIntent\",\n \"lex:GetBuiltinIntents\",\n \"lex:GetBuiltinSlotTypes\",\n \"lex:GetExport\",\n \"lex:GetImport\",\n \"lex:GetIntent\",\n \"lex:GetIntentVersions\",\n \"lex:GetIntents\",\n \"lex:GetMigration\",\n \"lex:GetMigrations\",\n \"lex:GetSlotType\",\n \"lex:GetSlotTypeVersions\",\n \"lex:GetSlotTypes\",\n \"lex:GetUtterancesView\",\n \"lex:PostContent\",\n \"lex:PostText\",\n \"lex:PutBot\",\n \"lex:PutBotAlias\",\n \"lex:PutIntent\",\n \"lex:PutSlotType\",\n \"lex:StartMigration\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_create_policy_for_all_namespaces(self):\n pass", "def can_send(self, s_params: SendParams) -> Squonk2AgentRv:\n assert s_params\n assert isinstance(s_params, SendParams)\n\n if _TEST_MODE:\n msg: str = 'Squonk2Agent is in TEST mode'\n _LOGGER.warning(msg)\n\n # Every public API **MUST** call ping().\n # This ensures Squonk2 is available and gets suitable API tokens...\n if not self.ping():\n msg = 'Squonk2 ping failed.'\\\n ' Are we configured properly and is Squonk2 alive?'\n _LOGGER.error(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n\n return self._verify_access(c_params=s_params.common)", "def test_index_response_descriptor_subscriptions_subscription_subscription_resource_spaces(self):\n pass", "def verifyActionCenterFirewall():\n pass", "def test_is_ims_sub_activities(self):\r\n emaSession = ema_functions.emaLogin()\r\n session = {}\r\n session['emaSession'] = emaSession\r\n sub1 = class_ims_ema.sub('+353760000001')\r\n #test1 = sub1.subscriberCreate(session)\r\n test2 = sub1.subscriberGet(session)\r\n #test3 = sub1.subscriberDelete(session)\r\n test4 = sub1.subscriberGet(session)\r\n #self.assertTrue(test1.status_code == 200 and test2.status_code == 200 and test3.status_code == 200 and test4.status_code == 500)\r\n self.assertTrue(test2.status_code == 200 and test4.status_code == 500)", "def _check_namespace_access(self, namespace, user):\n if not namespace.owners.filter(id=user.id).count():\n raise exceptions.PermissionDenied(\n 'The namespace listed on your filename must match one of '\n 'the namespaces you have access to.'\n )", "def test_parse_request_type_2c(self):\n self.test_auth_data['username'] = 'short'\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_connect_post_namespaced_status_webhooks(self):\n pass", "def test_zsk_policy_no_bundle_overlap(self):\n signature_algorithm = self._make_signature_algorithm()\n request_policy = f\"\"\"\n <RequestPolicy>\n <ZSK>\n <PublishSafety>P10D</PublishSafety>\n <RetireSafety>P10D</RetireSafety>\n <MaxSignatureValidity>P21D</MaxSignatureValidity>\n <MinSignatureValidity>P21D</MinSignatureValidity>\n <MaxValidityOverlap>P12D</MaxValidityOverlap>\n <MinValidityOverlap>P9D</MinValidityOverlap>\n {signature_algorithm}\n </ZSK>\n </RequestPolicy>\n \"\"\"\n\n bundle1, bundle2, = self._get_two_bundles()\n xml = self._make_request(\n request_policy=request_policy, bundle1=bundle1, bundle2=bundle2\n )\n request = request_from_xml(xml)\n policy = replace(\n self.policy,\n check_bundle_intervals=False, # want to test against ZSK policy, not KSK policy\n check_cycle_length=False, # want to test against ZSK policy, not KSK policy\n )\n with self.assertRaises(KSR_POLICY_SIG_OVERLAP_Violation) as exc:\n validate_request(request, policy)\n self.assertEqual(\n 'Bundle \"test-2\" does not overlap with previous bundle \"test-1\" (2019-02-01 00:00:00+00:00 > '\n \"2019-01-22 00:00:00+00:00)\",\n str(exc.exception),\n )", "def test_distinct_sessions_auth_token(self):\n\n sess1 = None\n sess2 = None\n\n with self.app_sess1 as c:\n ret = c.get('/', headers={'X-Auth-Token': 'pretend_token'})\n sess1 = ret.data\n\n with self.app_sess2 as c:\n ret = c.get('/', headers={'X-Auth-Token': 'another_pretend_token'})\n sess2 = ret.data\n\n self.assertNotEqual(sess1, sess2)", "def test_ocsp():\n ocsp_pyasn1.OCSP_VALIDATION_CACHE = {} # reset the memory cache\n ocsp = ocsp_pyasn1.SnowflakeOCSP()\n for url in URLS:\n connection = _openssl_connect(url)\n assert ocsp.validate(url, connection), \\\n 'Failed to validate: {0}'.format(url)", "def test_no_abstract_syntax_match(self):\n\n def handle(event):\n return 0x0000, event.action_information\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ProceduralEventLogging)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)]\n )\n\n ae.add_requested_context(ProceduralEventLogging)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n msg = (\n r\"No presentation context for 'Verification SOP Class' has been \"\n r\"accepted by the peer for the SCU role\"\n )\n with pytest.raises(ValueError, match=msg):\n assoc.send_n_action(None, 1, Verification, None)\n assoc.release()\n assert assoc.is_released\n\n scp.shutdown()", "def test_create_namespaced_resource_access_review(self):\n pass", "def test_create_namespaced_policy(self):\n pass", "def __init__(self, session, assoc_type):\n super(AssociateRequest, self).__init__()\n self.session = session\n self.assoc_type = assoc_type\n self.namespace = OPENID2_NS", "def test_replace_namespaced_policy(self):\n pass", "def test_create_subject_access_review_for_all_namespaces(self):\n pass", "def test_invalid_namespace(self):\r\n self.attempt_login(403, ns=\"http%3A%2F%2Fspecs.openid.net%2Fauth%2F2.0\")", "def test_create_role_for_all_namespaces(self):\n pass", "def enforce(context, action, target, do_raise=True):\n \"\"\"\n ======================================================================================\n context = <xdrs.context.RequestContext object at 0x6dcf050>\n target = {'project_id': u'4537aca4a4a4462fa4c59ad5b5581f00', 'user_id': u'91d732b65831491d8bd952b3111e62dd'}\n action = xdrs:get_algorithms\n ======================================================================================\n \"\"\"\n init()\n \n credentials = context.to_dict()\n \"\"\"\n ======================================================================================\n credentials = {'project_name': u'admin', 'user_id': u'91d732b65831491d8bd952b3111e62dd', 'roles': [u'heat_stack_owner', u'_member_', u'admin'], 'timestamp': '2015-03-10T06:48:40.110653', 'auth_token': 'MIIT9wYJKoZIhvcNAQcCoIIT6DCCE+QCAQExCTAHBgUrDgMCGjCCEk0GCSqGSIb3DQEHAaCCEj4EghI6eyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxNS0wMy0xMFQwNjo0ODozOS41MzU2NjEiLCAiZXhwaXJlcyI6ICIyMDE1LTAzLTEwVDA3OjQ4OjM5WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogImFkbWluIHRlbmFudCIsICJlbmFibGVkIjogdHJ1ZSwgImlkIjogIjQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgIm5hbWUiOiAiYWRtaW4ifX0sICJzZXJ2aWNlQ2F0YWxvZyI6IFt7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc0L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjIvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAiaWQiOiAiMTZiMTVjYzVmZjUwNGNiODlmNTg2NjRlMjdhNjljNjkiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc0L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImNvbXB1dGUiLCAibmFtZSI6ICJub3ZhIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjk2OTYvIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjk2OTYvIiwgImlkIjogIjFiMjkzYTgxNjk2YjRiN2Y4OTZlYWQ0NjIyYTFjMmExIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6OTY5Ni8ifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAibmV0d29yayIsICJuYW1lIjogIm5ldXRyb24ifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3Ni92Mi80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgImlkIjogIjNhNzY3OWNjZTdkZjRhY2ZhMTZiM2NhNTJkZGNmYzgyIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3Ni92Mi80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWV2MiIsICJuYW1lIjogImNpbmRlcnYyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjMiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3NC92MyIsICJpZCI6ICIwYmIxZDFiODhhZmU0MGRhOTNiY2IxNTg0Y2ExN2ZiOSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjMifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZXYzIiwgIm5hbWUiOiAibm92YXYzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwODAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MCIsICJpZCI6ICIxZTMyZTE3MmU3OWM0YzVhYTZiNWM3ZjhkNzVhZjRmYiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwODAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiczMiLCAibmFtZSI6ICJzd2lmdF9zMyJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo5MjkyIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjkyOTIiLCAiaWQiOiAiM2QxYzc5MjY1MWEwNDljNWE2MWUzNWJmZWZjNGM4OGIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo5MjkyIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImltYWdlIiwgIm5hbWUiOiAiZ2xhbmNlIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzciLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3NyIsICJpZCI6ICIzOWE0YzA2NDIzYTg0OTNjOTI4ZGExOGY0YTVjY2MxZiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzcifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAibWV0ZXJpbmciLCAibmFtZSI6ICJjZWlsb21ldGVyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDAvdjEvIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDAvdjEvIiwgImlkIjogIjU1NzBiOGY4MTE0OTRlMWI5NTVkYjZlNTAzZGYyYWZkIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODAwMC92MS8ifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY2xvdWRmb3JtYXRpb24iLCAibmFtZSI6ICJoZWF0LWNmbiJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YxLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzYvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAiaWQiOiAiMGExYzhkYTRmMTU2NDk1YWFkMjEzMGUyYzA2OTE5ODIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YxLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogInZvbHVtZSIsICJuYW1lIjogImNpbmRlciJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4NzczL3NlcnZpY2VzL0FkbWluIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzMvc2VydmljZXMvQ2xvdWQiLCAiaWQiOiAiMDMzZjY3ZTk1MDBjNDljYThmOGIxODkzZTJhN2VkYWYiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4NzczL3NlcnZpY2VzL0Nsb3VkIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImVjMiIsICJuYW1lIjogIm5vdmFfZWMyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDQvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODAwNC92MS80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCIsICJpZCI6ICI0YmViNjQ0MjUzYWU0NzdmOWU5NDk2ZWVkZDEwOTNhNSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDQvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAib3JjaGVzdHJhdGlvbiIsICJuYW1lIjogImhlYXQifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC8iLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC92MS9BVVRIXzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgImlkIjogIjNhMTA2MzU0MjYxMDQzMjk5YTVkMjQ3ZTVmMjU5NGQyIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC92MS9BVVRIXzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogIm9iamVjdC1zdG9yZSIsICJuYW1lIjogInN3aWZ0In0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjM1MzU3L3YyLjAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6NTAwMC92Mi4wIiwgImlkIjogIjVjNGVlN2MzMTE4NDQyNGM5NDJhMWM1MjgxODU3MmZiIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6NTAwMC92Mi4wIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImlkZW50aXR5IiwgIm5hbWUiOiAia2V5c3RvbmUifV0sICJ1c2VyIjogeyJ1c2VybmFtZSI6ICJhZG1pbiIsICJyb2xlc19saW5rcyI6IFtdLCAiaWQiOiAiOTFkNzMyYjY1ODMxNDkxZDhiZDk1MmIzMTExZTYyZGQiLCAicm9sZXMiOiBbeyJuYW1lIjogImhlYXRfc3RhY2tfb3duZXIifSwgeyJuYW1lIjogIl9tZW1iZXJfIn0sIHsibmFtZSI6ICJhZG1pbiJ9XSwgIm5hbWUiOiAiYWRtaW4ifSwgIm1ldGFkYXRhIjogeyJpc19hZG1pbiI6IDAsICJyb2xlcyI6IFsiZDlmZGVlODI1NjE3NGJlNWE3MmFjZGZmNDNkM2VkZDMiLCAiOWZlMmZmOWVlNDM4NGIxODk0YTkwODc4ZDNlOTJiYWIiLCAiN2E1ZTg5MmFiYTE5NDI3NWI3ZjQxZWM4Njg2ZDUwOGYiXX19fTGCAYEwggF9AgEBMFwwVzELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVVuc2V0MQ4wDAYDVQQHDAVVbnNldDEOMAwGA1UECgwFVW5zZXQxGDAWBgNVBAMMD3d3dy5leGFtcGxlLmNvbQIBATAHBgUrDgMCGjANBgkqhkiG9w0BAQEFAASCAQBkwVlwVgYM+mCIXICViGPgW+AZ--Y3NfWjW92GTBqW4keVrPosYxz--b2SVSGqwOHI1xFPqIx1+fzBCcilE5rIuJ3gxAc2VEWl4whMkriqWo6M8YY+GxGJ07h1NZ3Jc9Mrk7RTWPwU9YPilWPSU9sRx4bv+y7XpL8EIEvi+0dvHKgGI+nvqEYVFIf1vYQN5bvSnAgC1rZ9oB0M4Pg1wd47xQcenZL+XOWb8uxUReAvT-lfjXav7DhwUzPgmlY2XpN+9yfhAXAFF0GkokwjncvC5YTILOa41eMUg8ip47+rijNpQ2FuxVpRhQ-xL9it8+vAYkGLqe7eaQylsf0Nu6JJ', 'remote_address': '172.21.7.40', 'quota_class': None, 'is_admin': True, 'tenant': u'4537aca4a4a4462fa4c59ad5b5581f00', 'service_catalog': [{u'endpoints_links': [], u'endpoints': [{u'adminURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00', u'region': u'RegionOne', u'publicURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00', u'id': u'0a1c8da4f156495aad2130e2c0691982', u'internalURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00'}], u'type': u'volume', u'name': u'cinder'}], 'request_id': 'req-c0439276-3600-49cb-8de5-680b3f7d735c', 'instance_lock_checked': False, 'project_id': u'4537aca4a4a4462fa4c59ad5b5581f00', 'user_name': u'admin', 'read_deleted': 'no', 'user': u'91d732b65831491d8bd952b3111e62dd'}\n ======================================================================================\n \"\"\"\n\n # Add the exception arguments if asked to do a raise\n extra = {}\n if do_raise:\n extra.update(exc=exception.PolicyNotAuthorized, action=action)\n\n \"\"\"\n ======================================================================================\n action = xdrs:get_algorithms\n target = <xdrs.objects.instance.Instance object at 0x62b4a50>\n credentials = {'project_name': u'admin', 'user_id': u'91d732b65831491d8bd952b3111e62dd', 'roles': [u'heat_stack_owner', u'_member_', u'admin'], 'timestamp': '2015-03-10T06:48:40.110653', 'auth_token': 'MIIT9wYJKoZIhvcNAQcCoIIT6DCCE+QCAQExCTAHBgUrDgMCGjCCEk0GCSqGSIb3DQEHAaCCEj4EghI6eyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxNS0wMy0xMFQwNjo0ODozOS41MzU2NjEiLCAiZXhwaXJlcyI6ICIyMDE1LTAzLTEwVDA3OjQ4OjM5WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogImFkbWluIHRlbmFudCIsICJlbmFibGVkIjogdHJ1ZSwgImlkIjogIjQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgIm5hbWUiOiAiYWRtaW4ifX0sICJzZXJ2aWNlQ2F0YWxvZyI6IFt7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc0L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjIvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAiaWQiOiAiMTZiMTVjYzVmZjUwNGNiODlmNTg2NjRlMjdhNjljNjkiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc0L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImNvbXB1dGUiLCAibmFtZSI6ICJub3ZhIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjk2OTYvIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjk2OTYvIiwgImlkIjogIjFiMjkzYTgxNjk2YjRiN2Y4OTZlYWQ0NjIyYTFjMmExIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6OTY5Ni8ifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAibmV0d29yayIsICJuYW1lIjogIm5ldXRyb24ifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3Ni92Mi80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YyLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgImlkIjogIjNhNzY3OWNjZTdkZjRhY2ZhMTZiM2NhNTJkZGNmYzgyIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3Ni92Mi80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWV2MiIsICJuYW1lIjogImNpbmRlcnYyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjMiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3NC92MyIsICJpZCI6ICIwYmIxZDFiODhhZmU0MGRhOTNiY2IxNTg0Y2ExN2ZiOSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzQvdjMifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZXYzIiwgIm5hbWUiOiAibm92YXYzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwODAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MCIsICJpZCI6ICIxZTMyZTE3MmU3OWM0YzVhYTZiNWM3ZjhkNzVhZjRmYiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwODAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiczMiLCAibmFtZSI6ICJzd2lmdF9zMyJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo5MjkyIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjkyOTIiLCAiaWQiOiAiM2QxYzc5MjY1MWEwNDljNWE2MWUzNWJmZWZjNGM4OGIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo5MjkyIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImltYWdlIiwgIm5hbWUiOiAiZ2xhbmNlIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzciLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODc3NyIsICJpZCI6ICIzOWE0YzA2NDIzYTg0OTNjOTI4ZGExOGY0YTVjY2MxZiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzcifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAibWV0ZXJpbmciLCAibmFtZSI6ICJjZWlsb21ldGVyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDAvdjEvIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDAvdjEvIiwgImlkIjogIjU1NzBiOGY4MTE0OTRlMWI5NTVkYjZlNTAzZGYyYWZkIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODAwMC92MS8ifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY2xvdWRmb3JtYXRpb24iLCAibmFtZSI6ICJoZWF0LWNmbiJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YxLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzYvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAiaWQiOiAiMGExYzhkYTRmMTU2NDk1YWFkMjEzMGUyYzA2OTE5ODIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4Nzc2L3YxLzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogInZvbHVtZSIsICJuYW1lIjogImNpbmRlciJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4NzczL3NlcnZpY2VzL0FkbWluIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjg3NzMvc2VydmljZXMvQ2xvdWQiLCAiaWQiOiAiMDMzZjY3ZTk1MDBjNDljYThmOGIxODkzZTJhN2VkYWYiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMjEuNy40MDo4NzczL3NlcnZpY2VzL0Nsb3VkIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImVjMiIsICJuYW1lIjogIm5vdmFfZWMyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDQvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODAwNC92MS80NTM3YWNhNGE0YTQ0NjJmYTRjNTlhZDViNTU4MWYwMCIsICJpZCI6ICI0YmViNjQ0MjUzYWU0NzdmOWU5NDk2ZWVkZDEwOTNhNSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjgwMDQvdjEvNDUzN2FjYTRhNGE0NDYyZmE0YzU5YWQ1YjU1ODFmMDAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAib3JjaGVzdHJhdGlvbiIsICJuYW1lIjogImhlYXQifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC8iLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC92MS9BVVRIXzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIiwgImlkIjogIjNhMTA2MzU0MjYxMDQzMjk5YTVkMjQ3ZTVmMjU5NGQyIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6ODA4MC92MS9BVVRIXzQ1MzdhY2E0YTRhNDQ2MmZhNGM1OWFkNWI1NTgxZjAwIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogIm9iamVjdC1zdG9yZSIsICJuYW1lIjogInN3aWZ0In0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4yMS43LjQwOjM1MzU3L3YyLjAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6NTAwMC92Mi4wIiwgImlkIjogIjVjNGVlN2MzMTE4NDQyNGM5NDJhMWM1MjgxODU3MmZiIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjIxLjcuNDA6NTAwMC92Mi4wIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImlkZW50aXR5IiwgIm5hbWUiOiAia2V5c3RvbmUifV0sICJ1c2VyIjogeyJ1c2VybmFtZSI6ICJhZG1pbiIsICJyb2xlc19saW5rcyI6IFtdLCAiaWQiOiAiOTFkNzMyYjY1ODMxNDkxZDhiZDk1MmIzMTExZTYyZGQiLCAicm9sZXMiOiBbeyJuYW1lIjogImhlYXRfc3RhY2tfb3duZXIifSwgeyJuYW1lIjogIl9tZW1iZXJfIn0sIHsibmFtZSI6ICJhZG1pbiJ9XSwgIm5hbWUiOiAiYWRtaW4ifSwgIm1ldGFkYXRhIjogeyJpc19hZG1pbiI6IDAsICJyb2xlcyI6IFsiZDlmZGVlODI1NjE3NGJlNWE3MmFjZGZmNDNkM2VkZDMiLCAiOWZlMmZmOWVlNDM4NGIxODk0YTkwODc4ZDNlOTJiYWIiLCAiN2E1ZTg5MmFiYTE5NDI3NWI3ZjQxZWM4Njg2ZDUwOGYiXX19fTGCAYEwggF9AgEBMFwwVzELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVVuc2V0MQ4wDAYDVQQHDAVVbnNldDEOMAwGA1UECgwFVW5zZXQxGDAWBgNVBAMMD3d3dy5leGFtcGxlLmNvbQIBATAHBgUrDgMCGjANBgkqhkiG9w0BAQEFAASCAQBkwVlwVgYM+mCIXICViGPgW+AZ--Y3NfWjW92GTBqW4keVrPosYxz--b2SVSGqwOHI1xFPqIx1+fzBCcilE5rIuJ3gxAc2VEWl4whMkriqWo6M8YY+GxGJ07h1NZ3Jc9Mrk7RTWPwU9YPilWPSU9sRx4bv+y7XpL8EIEvi+0dvHKgGI+nvqEYVFIf1vYQN5bvSnAgC1rZ9oB0M4Pg1wd47xQcenZL+XOWb8uxUReAvT-lfjXav7DhwUzPgmlY2XpN+9yfhAXAFF0GkokwjncvC5YTILOa41eMUg8ip47+rijNpQ2FuxVpRhQ-xL9it8+vAYkGLqe7eaQylsf0Nu6JJ', 'remote_address': '172.21.7.40', 'quota_class': None, 'is_admin': True, 'tenant': u'4537aca4a4a4462fa4c59ad5b5581f00', 'service_catalog': [{u'endpoints_links': [], u'endpoints': [{u'adminURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00', u'region': u'RegionOne', u'publicURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00', u'id': u'0a1c8da4f156495aad2130e2c0691982', u'internalURL': u'http://172.21.7.40:8776/v1/4537aca4a4a4462fa4c59ad5b5581f00'}], u'type': u'volume', u'name': u'cinder'}], 'request_id': 'req-c0439276-3600-49cb-8de5-680b3f7d735c', 'instance_lock_checked': False, 'project_id': u'4537aca4a4a4462fa4c59ad5b5581f00', 'user_name': u'admin', 'read_deleted': 'no', 'user': u'91d732b65831491d8bd952b3111e62dd'}\n extra = {'action': 'xdrs:get_algorithms', 'exc': <class 'xdrs.exception.PolicyNotAuthorized'>}\n ======================================================================================\n \"\"\"\n return policy.check(action, target, credentials, **extra)", "def test_patch_namespaced_route_status(self):\n pass", "def test_bad_action(self):\r\n action = 'robot-not-an-action'\r\n url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})\r\n response = self.client.get(url, {'identifiers': self.enrolled_student.email, 'action': action})\r\n self.assertEqual(response.status_code, 400)", "def test_create_pod_security_policy_subject_review_for_all_namespaces(self):\n pass", "def test_create_pod_security_policy_review_for_all_namespaces(self):\n pass", "def test_modify_response_descriptor_subscriptions_subscription_subscription_resource_spaces(self):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces(self):\n pass", "def authorize(self, action, author_id=None):\n if action not in CHANGE_TYPES:\n return False\n return True", "def test_read_namespaced_route_status(self):\n pass", "def test_138_service_catalog(self):\n u.log.debug('Checking keystone service catalog...')\n self.set_api_version(2)\n endpoint_check = {\n 'adminURL': u.valid_url,\n 'id': u.not_null,\n 'region': 'RegionOne',\n 'publicURL': u.valid_url,\n 'internalURL': u.valid_url\n }\n expected = {\n 'volume': [endpoint_check],\n 'identity': [endpoint_check]\n }\n actual = self.keystone_v2.service_catalog.get_endpoints()\n\n ret = u.validate_svc_catalog_endpoint_data(expected, actual)\n if ret:\n amulet.raise_status(amulet.FAIL, msg=ret)", "def test_create_namespaced_pod_security_policy_self_subject_review(self):\n pass", "async def test_validate_session(api_client: TestClient, coresys: CoreSys):\n with patch(\"aiohttp.web_request.BaseRequest.__getitem__\", return_value=None):\n resp = await api_client.post(\n \"/ingress/validate_session\",\n json={\"session\": \"non-existing\"},\n )\n assert resp.status == 401\n\n with patch(\n \"aiohttp.web_request.BaseRequest.__getitem__\",\n return_value=coresys.homeassistant,\n ):\n resp = await api_client.post(\"/ingress/session\")\n result = await resp.json()\n\n assert \"session\" in result[\"data\"]\n session = result[\"data\"][\"session\"]\n assert session in coresys.ingress.sessions\n\n valid_time = coresys.ingress.sessions[session]\n\n resp = await api_client.post(\n \"/ingress/validate_session\",\n json={\"session\": session},\n )\n assert resp.status == 200\n assert await resp.json() == {\"result\": \"ok\", \"data\": {}}\n\n assert coresys.ingress.sessions[session] > valid_time", "def test_parse_request_type_2e(self):\n self.test_auth_data.pop('username')\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_create_namespaced_pod_security_policy_review(self):\n pass", "def test_replace_namespaced_role(self):\n pass", "def _pre_flight_checks(self) -> Squonk2AgentRv:\n\n # If a Squonk2Org record exists its UUID cannot have changed.\n # We cannot change the organisation once deployed. The corresponding Units,\n # Products and Projects are organisation-specific. The Squonk2Org table\n # records the organisation ID and the Account Server URL where the ID\n # is valid. None of these values can change once deployed.\n\n assert self.__configuration_checked\n assert self.__configured\n\n if self.__org_record and self.__org_record.uuid != self.__CFG_SQUONK2_ORG_UUID:\n msg: str = f'Configured Squonk2 Organisation ({self.__CFG_SQUONK2_ORG_UUID})'\\\n f' does not match pre-existing record ({self.__org_record.uuid})'\n _LOGGER.error(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n\n # OK, so the ORG exists and its UUID has not changed.\n # Is it known to the configured AS?\n if not self._get_squonk2_owner_tokens():\n msg = 'Failed to get AS or DM token for organisation owner'\n _LOGGER.warning(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n _LOGGER.debug('Got Squonk2 API Access Tokens')\n\n # Get the ORG from the AS API.\n # If it knows the org the response will be successful,\n # and we'll also have the Org's name.\n as_o_rv = AsApi.get_organisation(self.__org_owner_as_token,\n org_id=self.__CFG_SQUONK2_ORG_UUID)\n if not as_o_rv.success:\n msg = 'Failed to get AS Organisation'\n _LOGGER.warning(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n\n # The org is known to the AS.\n # Get the AS API version (for reference)\n as_v_rv: AsApiRv = AsApi.get_version()\n if not as_v_rv.success:\n msg = 'Failed to get version from AS'\n _LOGGER.warning(msg)\n return Squonk2AgentRv(success=False, msg=msg)\n\n as_version: str = as_v_rv.msg['version']\n _LOGGER.debug('Happy with Squonk2 Account Server (as_version=%s)', as_version)\n\n # Everything seems to be OK but we might not have an organisation in this\n # call (it may be the first call of the instance lifetime).\n # So, if there's no Squonk2Org record, create one,\n # recording the ORG ID and the AS and version we used.\n if not self.__org_record:\n assert self.__CFG_SQUONK2_ASAPI_URL\n _LOGGER.info('Creating NEW Squonk2Org record for %s.'\n ' as-url=%s as-org=\"%s\" as-version=%s',\n self.__CFG_SQUONK2_ORG_UUID,\n self.__CFG_SQUONK2_ASAPI_URL,\n as_o_rv.msg['name'],\n as_version)\n self.__org_record = Squonk2Org(uuid=self.__CFG_SQUONK2_ORG_UUID,\n name=as_o_rv.msg['name'],\n as_url=self.__CFG_SQUONK2_ASAPI_URL,\n as_version=as_version)\n self.__org_record.save()\n _LOGGER.info('Created Squonk2Org record for %s',\n self.__CFG_SQUONK2_ORG_UUID)\n else:\n _LOGGER.debug('Squonk2Org for %s \"%s\" already exists - nothing to do',\n self.__org_record.uuid,\n self.__org_record.name)\n\n # Organisation is known to AS, and it hasn't changed.\n _LOGGER.debug('Successful pre-flight checks')\n return SuccessRv", "def test_session_auth_token(self):\n\n sess1 = None\n sess2 = None\n test_header = {'X-Auth-Token': 'pretend_token'}\n\n with self.app_sess1 as c:\n ret = c.get('/', headers=test_header)\n sess1 = ret.data\n\n with self.app_sess2 as c:\n ret = c.get('/', headers=test_header)\n sess2 = ret.data\n\n self.assertEqual(sess1, sess2)", "def _enforce(self, req, action, target=None):\n if target is None:\n target = {}\n try:\n self.policy.enforce(req.context, action, target)\n except exception.Forbidden as e:\n LOG.debug(\"User not permitted to perform '%s' action\", action)\n raise webob.exc.HTTPForbidden(explanation=e.msg, request=req)", "def action_intersection(s1, s2):\n isect = s1 & s2\n L1 = [ ( (a.oid, a.index_oid), a) for a in s1 ]\n L2 = [ ( (a.oid, a.index_oid), a) for a in s2 ]\n ds1 = dict(L1)\n ds2 = dict(L2)\n for k1, action1 in ds1.items():\n action2 = ds2.get(k1)\n if action2 is not None:\n # replace action in union with correct one or conflict\n isect.add(which_action(action1, action2))\n return isect", "def check_snstopicpolicy_crossaccount(self, snsitem):\n #(region, account, arn, aws_object) = audit_object\n #\"Principal\": { \"AWS\": \"*\" }\n # \"AWS\": \"arn:aws:iam::027213240437:root\"\n policy = snsitem.config.get('SNSPolicy', {})\n for statement in policy.get(\"Statement\", []):\n account_numbers = []\n account_number = ''\n princ_aws = statement.get(\"Principal\", {}) \\\n .get(\"AWS\", \"error\")\n if princ_aws == \"*\":\n account_number = statement.get(\"Condition\", {}) \\\n .get(\"StringEquals\", {}) \\\n .get(\"AWS:SourceOwner\", None)\n if not account_number:\n tag = \"SNS Topic open to everyone\"\n notes = \"An SNS policy where { 'Principal': { 'AWS': '*' } } must also have\"\n notes += \" a {'Condition': {'StringEquals': { 'AWS:SourceOwner': '<ACCOUNT_NUMBER>' } } }\"\n notes += \" or it is open to the world. In this case, anyone is allowed to perform \"\n notes += \" this action(s): {}\".format(statement.get(\"Action\"))\n self.add_issue(10, tag, snsitem, notes=notes)\n continue\n else:\n try:\n account_numbers.append(str(account_number))\n except ValueError:\n raise InvalidSourceOwner(account_number)\n else:\n if isinstance(princ_aws, list):\n for entry in princ_aws:\n account_numbers.append(str(re.search('arn:aws:iam::([0-9-]+):', entry).group(1)))\n else:\n try:\n account_numbers.append(str(re.search('arn:aws:iam::([0-9-]+):', princ_aws).group(1)))\n except:\n import json\n print json.dumps(snsitem.config, indent=4)\n raise InvalidARN(princ_aws)\n\n for account_number in account_numbers:\n account = Account.query.filter(Account.number == account_number).first()\n account_name = None\n if account is not None:\n account_name = account.name\n\n if not account_name:\n tag = \"Unknown Cross Account Access\"\n notes = \"from {} to {}\".format(account_number, snsitem.account)\n self.add_issue(10, tag, snsitem, notes=notes)\n elif account_name != snsitem.account:\n tag = \"Friendly Cross Account Access\"\n notes = \"from {} to {}\".format(account_name, snsitem.account)\n self.add_issue(0, tag, snsitem, notes=notes)", "def test_upgradeOid(self):\n self.assertTrue(\n _hasExplicitOid(self.store, 'item_axiom_storeid_dummy_v2'))\n self.assertFalse(\n _hasExplicitOid(self.store, 'item_axiom_storeid_dummy2_v1'))", "def test_parse_request_type_2f(self):\n self.test_auth_data.pop('user_secret')\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_services_with_multiple_pages_kinesis_analytics(self):\n # Kinesis Analytics V1\n results = get_actions_for_service(\"kinesisanalytics\")\n actions = [\n \"kinesisanalytics:GetApplicationState\", # Only in v1, not v2\n \"kinesisanalytics:ListApplications\", # In both\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_create_route_for_all_namespaces(self):\n pass", "def test_validate_put_existing(client):\n response = client.put(\n '/user/1',\n data=json.dumps({\n 'name': 'Jeff Knupp',\n 'email': '[email protected]',\n }),\n headers={'Content-Type': 'application/json'}\n )\n assert response.status_code == 400\n assert response.json['message'] == INVALID_ACTION_MESSAGE", "def test_patch_namespaced_policy(self):\n pass", "def snmpqosqos_error_api_ses_add_insession(self) :\n\t\ttry :\n\t\t\treturn self._snmpqosqos_error_api_ses_add_insession\n\t\texcept Exception as e:\n\t\t\traise e", "def test_create_namespaced_pod_security_policy_subject_review(self):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces(self):\n pass", "def test_existing_session_auth_token(self):\n\n test_header = {'X-Auth-Token': 'pretend_token'}\n\n with self.app_sess1 as c:\n ret1 = c.get('/', headers=test_header)\n ret2 = c.get('/', headers=test_header)\n self.assertEqual(ret1.data, ret2.data)", "def test_read_namespaced_policy(self):\n pass", "def test_session_is_accessed(self):\n response = self.client.get(\"/auth_processor_attr_access/\")\n self.assertContains(response, \"Session accessed\")", "def validate(self, namespace):\n pass", "def test_multiple_aclhooks_2(self):\n self._test_hook_approval_sequence([True, None], True)", "def test_services_with_multiple_pages_apigateway(self):\n # API Gateway Management V1: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonapigatewaymanagement.html\n self.assertTrue(\"apigateway:AddCertificateToDomain\" in self.all_actions)\n self.assertTrue(\"apigateway:RemoveCertificateFromDomain\" in self.all_actions)\n self.assertTrue(\"apigateway:SetWebACL\" in self.all_actions)\n # API Gateway Management V2: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonapigatewaymanagement.html\n # API Gateway V2 doesn't have any unique actions in but it does have some unique resource types. Let's make sure those resource types are in the IAM Definition.\n # Resource types unique to API Gateway V2:\n resource_types = get_arn_types_for_service(\"apigateway\")\n resource_types = list(resource_types.keys())\n self.assertTrue(\"AccessLogSettings\" in resource_types)\n # Resource types unique to API Gateway V1:\n self.assertTrue(\"RestApi\" in resource_types)", "def test_lti20_rest_good_dispatch(self):\r\n for ginput, expected in self.GOOD_DISPATCH_INPUTS:\r\n self.assertEquals(self.xmodule.parse_lti_2_0_handler_suffix(ginput), expected)", "def test_patch_namespaced_role(self):\n pass", "def SoapAction(self) -> str:", "def rule_40_can_create_sg(session):\n\n def try_create(session, side):\n res, conn_vpc = session[\"config\"][side][\"res\"], session[\"conn\"][side](\"vpc\")\n subnet = conn_vpc.get_all_subnets([res[\"subnet_id\"]])[0]\n\n try:\n conn_vpc.create_security_group(\n \"foo\", \"bar\", vpc_id = subnet.vpc_id, dry_run = True)\n except EC2ResponseError as e:\n if 412 != e.status:\n raise e\n\n try_create(session, \"server\")\n try_create(session, \"client\")\n\n return True", "def _assert_has_spc(subcase, fem):\n if 'SPC' not in subcase:\n has_ps = False\n for unused_nid, node in fem.nodes.items():\n if node.ps:\n has_ps = True\n break\n assert subcase.has_parameter('SPC', 'STATSUB') or has_ps, subcase", "def test_arg_conflict(self):\n oim = OIM()\n rc, _, _, msg = oim.request('--hostname', 'test.' + DOMAIN, '--csr', 'foo')\n self.assertEqual(rc, 2, \"CSR and hostname options do not conflict\\n%s\" % msg)", "def test_parse_request_type_2b(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=False)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def snmpqosqos_error_api_ses_notready(self) :\n\t\ttry :\n\t\t\treturn self._snmpqosqos_error_api_ses_notready\n\t\texcept Exception as e:\n\t\t\traise e", "def test_wrong_number_of_keys_in_a_bundle(self):\n xml = self._make_request()\n request = request_from_xml(xml)\n policy = replace(self.policy, num_keys_per_bundle=[2, 1])\n with self.assertRaises(KSR_POLICY_KEYS_Violation) as exc:\n validate_request(request, policy)\n self.assertEqual(\"Bundle #1/test-1 has 1 keys, not 2\", str(exc.exception))", "def test_create_namespaced_role(self):\n pass", "def test_dashboards_v2_request_access(self):\n pass", "def sid_name_error(sid_name):\n if sid_name in cilKeyReference.sid_soi_reference:\n pass\n else:\n raise Exception(\"No reference for \\\"{0}\\\" in cilKeyReference.py\".format(sid_name))" ]
[ "0.5713234", "0.550883", "0.5373594", "0.53643465", "0.5290291", "0.5130985", "0.5121447", "0.48156258", "0.479505", "0.479243", "0.47852328", "0.47827813", "0.47478974", "0.4735454", "0.4725077", "0.47070414", "0.46997523", "0.4678125", "0.46708792", "0.4640849", "0.46401328", "0.46335325", "0.461904", "0.46109438", "0.46084777", "0.46025914", "0.45958033", "0.4581516", "0.4574944", "0.4572893", "0.4571244", "0.4569181", "0.45522052", "0.45485064", "0.45396549", "0.45382023", "0.45360342", "0.45348367", "0.45242077", "0.4524122", "0.4520972", "0.45157218", "0.4515175", "0.4515053", "0.45144778", "0.45142114", "0.45109636", "0.45073193", "0.45032963", "0.4500341", "0.44984403", "0.4498234", "0.44967014", "0.44915447", "0.4487537", "0.448513", "0.44759402", "0.44734642", "0.44733155", "0.4470322", "0.4467106", "0.44609404", "0.44608784", "0.4455521", "0.4455273", "0.4452911", "0.44512707", "0.44490543", "0.44454867", "0.44399315", "0.4435161", "0.44349304", "0.44286993", "0.44283676", "0.44280064", "0.44279578", "0.4427285", "0.4424463", "0.44168118", "0.44160232", "0.4415727", "0.4413482", "0.44121698", "0.4410811", "0.44085854", "0.44081604", "0.44036204", "0.4403395", "0.4402633", "0.44004494", "0.43925285", "0.43892306", "0.43807268", "0.43776274", "0.43773997", "0.43732035", "0.43730608", "0.43711087", "0.43659937", "0.43600887" ]
0.62560695
0
Other missing actions from GH 393
def test_other_iam_data_fixes_in_GH_393(self): # Cassandra: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonkeyspacesforapachecassandra.html results = get_actions_for_service("cassandra") self.assertTrue("cassandra:Restore" in results) # Comprehend Medical: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazoncomprehendmedical.html results = get_actions_for_service("comprehendmedical") # print(results) actions = [ "comprehendmedical:DescribeEntitiesDetectionV2Job", "comprehendmedical:DescribeICD10CMInferenceJob", "comprehendmedical:DescribePHIDetectionJob", "comprehendmedical:DescribeRxNormInferenceJob", # "comprehendmedical:DescribeSNOMEDCTInferenceJob", # Not in SAR "comprehendmedical:DetectEntitiesV2", "comprehendmedical:InferICD10CM", "comprehendmedical:InferRxNorm", # "comprehendmedical:InferSNOMEDCT", # Not in SAR "comprehendmedical:ListEntitiesDetectionV2Jobs", "comprehendmedical:ListICD10CMInferenceJobs", "comprehendmedical:ListPHIDetectionJobs", "comprehendmedical:ListRxNormInferenceJobs", # "comprehendmedical:ListSNOMEDCTInferenceJobs", # Not in SAR "comprehendmedical:StartEntitiesDetectionV2Job", "comprehendmedical:StartICD10CMInferenceJob", "comprehendmedical:StartPHIDetectionJob", "comprehendmedical:StartRxNormInferenceJob", "comprehendmedical:StopEntitiesDetectionV2Job", "comprehendmedical:StopICD10CMInferenceJob", ] for action in actions: # if action not in results: # print(action) self.assertTrue(action in results) # Compute Optimizer results = get_actions_for_service("compute-optimizer") actions = [ "compute-optimizer:DeleteRecommendationPreferences", "compute-optimizer:ExportEBSVolumeRecommendations", "compute-optimizer:ExportLambdaFunctionRecommendations", "compute-optimizer:GetEffectiveRecommendationPreferences", "compute-optimizer:GetEnrollmentStatusesForOrganization", "compute-optimizer:GetLambdaFunctionRecommendations", "compute-optimizer:GetRecommendationPreferences", "compute-optimizer:PutRecommendationPreferences", ] for action in actions: self.assertTrue(action in results) # DataSync results = get_actions_for_service("datasync") actions = [ "datasync:UpdateLocationNfs", "datasync:UpdateLocationObjectStorage", "datasync:UpdateLocationSmb", "datasync:UpdateTaskExecution" ] for action in actions: self.assertTrue(action in results) # Account Management results = get_actions_for_service("account") actions = [ "account:DeleteAlternateContact", "account:GetAlternateContact", "account:PutAlternateContact", ] for action in actions: self.assertTrue(action in results) # AWS IAM Access Analyzer results = get_actions_for_service("access-analyzer") actions = [ "access-analyzer:CancelPolicyGeneration", "access-analyzer:CreateAccessPreview", "access-analyzer:GetAccessPreview", "access-analyzer:GetGeneratedPolicy", "access-analyzer:ListAccessPreviewFindings", "access-analyzer:ListAccessPreviews", "access-analyzer:ListPolicyGenerations", "access-analyzer:StartPolicyGeneration", "access-analyzer:ValidatePolicy", ] for action in actions: self.assertTrue(action in results) # Elemental Activations results = get_actions_for_service("elemental-activations") actions = [ "elemental-activations:CompleteAccountRegistration", "elemental-activations:StartAccountRegistration" ] for action in actions: self.assertTrue(action in results) # OpenSearch results = get_actions_for_service("es") actions = [ "es:DescribeDomainChangeProgress", ] for action in actions: self.assertTrue(action in results) # Location results = get_actions_for_service("geo") actions = [ "geo:CalculateRouteMatrix", ] for action in actions: self.assertTrue(action in results) # Amazon Managed Grafana results = get_actions_for_service("grafana") actions = [ "grafana:DescribeWorkspaceAuthentication", "grafana:UpdateWorkspaceAuthentication", ] for action in actions: self.assertTrue(action in results) # EC2 Image Builder results = get_actions_for_service("imagebuilder") actions = [ "imagebuilder:ImportVmImage", ] for action in actions: self.assertTrue(action in results) # Timestream results = get_actions_for_service("timestream") actions = [ "timestream:CreateScheduledQuery", "timestream:DeleteScheduledQuery", "timestream:DescribeScheduledQuery", "timestream:ExecuteScheduledQuery", "timestream:ListScheduledQueries", "timestream:UpdateScheduledQuery", ] for action in actions: self.assertTrue(action in results) # AWS Transfer Family results = get_actions_for_service("transfer") actions = [ "transfer:CreateAccess", "transfer:CreateWorkflow", "transfer:DeleteAccess", "transfer:DeleteWorkflow", "transfer:DescribeAccess", "transfer:DescribeExecution", "transfer:DescribeWorkflow", "transfer:ListAccesses", "transfer:ListExecutions", "transfer:ListWorkflows", "transfer:SendWorkflowStepState", "transfer:UpdateAccess", ] for action in actions: self.assertTrue(action in results)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def actions() -> None:\n pass", "def actions():\n pass", "def test_10_unsupported_actions(self):\n\n def __count_pulled_packages(pth):\n self.pkgrepo(\"list -F tsv -H -s {0}\".format(pth))\n return len(self.output.splitlines())\n\n def __check_errout(pfmri):\n s1 = \"invalid action in package {0}\".format(pfmri)\n s2 = \"Malformed action in package '{0}'\".format(pfmri)\n self.assert_(s1 in self.errout or s2 in self.errout,\n \"{0} not in error\".format(pfmri))\n\n def __empty_repo(uri, arg_string):\n if uri.startswith(\"http://\"):\n rurl = self.dcs[4].get_repo_url()\n self.pkgrepo(\"remove -s {0} '*'\".format(rurl))\n # Refresh the depot to get it to realize that\n # the catalog has changed.\n self.dcs[4].refresh()\n elif arg_string:\n portable.remove(uri)\n else:\n self.pkgrepo(\"remove -s {0} '*'\".format(uri))\n\n\n def __test_rec(duri, arg_string, pfmris):\n self.debug(\"\\n\\nNow pkgrecv'ing to {0}\".format(duri))\n\n # It's necessary to use the -D option below because\n # otherwise pkgrecv will fail because the manifest\n # doesn't validate.\n\n novalidate = \"-D manifest_validate=Never \"\n # Check that invalid action attributes don't cause\n # tracebacks.\n self.pkgrecv(self.durl1, novalidate +\n \"-d {0} {1} {2}\".format(duri, arg_string,\n \" \".join(pfmris)), exit=pkgdefs.EXIT_OOPS)\n for pfmri in pfmris:\n __check_errout(pfmri)\n self.assertEqual(__count_pulled_packages(duri), 0)\n if arg_string:\n portable.remove(duri)\n\n self.pkgrecv(self.rurl1, novalidate +\n \"-d {0} {1} {2}\".format(duri, arg_string,\n \" \".join(pfmris)), exit=pkgdefs.EXIT_OOPS)\n for pfmri in pfmris:\n __check_errout(pfmri)\n self.assertEqual(__count_pulled_packages(duri), 0)\n if arg_string:\n portable.remove(duri)\n\n # Check that other packages are retrieved and the exit\n # code reflects partial success.\n self.pkgrecv(self.durl1, novalidate +\n \"-d {0} {1} -m all-timestamps '*'\".format(\n duri, arg_string), exit=pkgdefs.EXIT_PARTIAL)\n for pfmri in pfmris:\n __check_errout(pfmri)\n self.assertEqual(__count_pulled_packages(duri),\n len(self.published) - len(pfmris))\n __empty_repo(duri, arg_string)\n\n self.pkgrecv(self.rurl1, novalidate +\n \"-d {0} {1} -m all-timestamps '*'\".format(\n duri, arg_string), exit=pkgdefs.EXIT_PARTIAL)\n for pfmri in pfmris:\n __check_errout(pfmri)\n self.assertEqual(__count_pulled_packages(duri),\n len(self.published) - len(pfmris))\n __empty_repo(duri, arg_string)\n\n self.rurl1 = self.dcs[1].get_repo_url()\n repo = self.dcs[1].get_repo()\n rd = repo.get_pub_rstore()\n pfmri = fmri.PkgFmri(self.published[4])\n mp = rd.manifest(pfmri)\n\n with open(mp, \"rb\") as fh:\n original_txt = fh.read()\n txt = original_txt.replace(\"type=require\", \"type=foo\")\n with open(mp, \"wb\") as fh:\n fh.write(txt)\n\n rpth = tempfile.mkdtemp(dir=self.test_root)\n self.pkgrepo(\"create {0}\".format(rpth))\n adir = tempfile.mkdtemp(dir=self.test_root)\n\n # The __empty repo function above assumes that the only http uri\n # used is the one for depot number 4.\n dest_uris = ((rpth, \"\"), (self.durl4, \"\"),\n (os.path.join(adir, \"archive.p5p\"), \"-a\"))\n for duri, arg_string in dest_uris:\n __test_rec(duri, arg_string, [self.published[4]])\n\n # Test that multiple packages failing are handled correctly.\n for i in range(5, 7):\n pfmri = fmri.PkgFmri(self.published[i])\n mp = rd.manifest(pfmri)\n with open(mp, \"rb\") as fh:\n original_txt = fh.read()\n txt = \"foop\\n\" + original_txt\n with open(mp, \"wb\") as fh:\n fh.write(txt)\n\n for duri, arg_string, in dest_uris:\n __test_rec(duri, arg_string, self.published[4:7])", "def _get_legal_actions(self):\n raise NotImplementedError", "def test_issue_post_issue_reaction(self):\n pass", "def legal_actions(self):\n raise NotImplementedError", "def _action(self):\n pass", "def get_legal_actions(self):\n pass", "def get_actions(self):\r\n return -4,4", "def _do_action(self):\n pass", "def _do_action(self):\n pass", "def actions(self):\n raise NotImplementedError", "def test_issue_search_issues(self):\n pass", "def __call__(self,action=None):\n raise NYI", "def actions(self, state):\n\t\traise NotImplementedError", "def action(self):\n pass", "def action(self):\n pass", "def test_unknown_action(self):\n exit_string = actions.main([\"foo\"])\n self.assertEqual(\"Action foo undefined\", exit_string)", "def test_unknown_action(self):\n exit_string = actions.main(['foo'])\n self.assertEqual('Action \"foo\" undefined', exit_string)", "def test_issue_get_issue(self):\n pass", "def _generate_actions(self) -> list:\n pass", "def action_spec(self):\r\n pass", "def test_issue_delete_issue_reaction(self):\n pass", "def test_issue_edit_comment_deprecated(self):\n pass", "def test_unsupported_action(self):\r\n self.xmodule.verify_oauth_body_sign = Mock()\r\n request = Request(self.environ)\r\n request.body = self.get_request_body({'action': 'wrongAction'})\r\n response = self.xmodule.grade_handler(request, '')\r\n real_response = self.get_response_values(response)\r\n expected_response = {\r\n 'action': None,\r\n 'code_major': 'unsupported',\r\n 'description': 'Target does not support the requested operation.',\r\n 'messageIdentifier': self.DEFAULTS['messageIdentifier'],\r\n }\r\n self.assertEqual(response.status_code, 200)\r\n self.assertDictEqual(expected_response, real_response)", "def test_4_4_1_1(self):\n pass", "def test_unsupported_action(self):\n self.xmodule.verify_oauth_body_sign = Mock()\n request = Request(self.environ)\n request.body = self.get_request_body({'action': 'wrongAction'})\n response = self.xmodule.grade_handler(request, '')\n real_response = self.get_response_values(response)\n expected_response = {\n 'action': None,\n 'code_major': 'unsupported',\n 'description': 'Target does not support the requested operation.',\n 'messageIdentifier': self.defaults['messageIdentifier'],\n }\n assert response.status_code == 200\n self.assertDictEqual(expected_response, real_response)", "def test_bad_action(self):\r\n action = 'robot-not-an-action'\r\n url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})\r\n response = self.client.get(url, {'identifiers': self.beta_tester.email, 'action': action})\r\n self.assertEqual(response.status_code, 400)", "def test_issue_list_issues(self):\n pass", "def take_action(self, action):\n\t\traise NotImplementedError", "def test_pull_error(self):\n raise NotImplementedError", "def test_issue_get_issue_reactions(self):\n pass", "def test_op_no_ticket(self):\n assert OP_NO_TICKET == 0x4000", "def test_issue_edit_issue(self):\n pass", "def act(self) -> None:\n pass", "def test_create_unexpected_problem(self):\n pass", "def _after_serve_actions(self):\n pass", "def test_no_action(self):\n self.request.log(\"Hello World\")\n self.request.end()\n entry = self.get_entry()\n assert entry['action'] is None", "def act(self):\n pass", "def test_single_issue():\n pass", "def getAction(self, gameState):\r\n \"*** YOUR CODE HERE ***\"\r\n util.raiseNotDefined()", "def getAction(self, gameState):\r\n \"*** YOUR CODE HERE ***\"\r\n util.raiseNotDefined()", "def test_issue_delete_comment_deprecated(self):\n pass", "def test_issue_create_issue(self):\n pass", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()", "def action(self, state):\n return NotImplementedError()", "def action_run(self):\n pass", "def test_user_actions_post(self):\n pass", "def exercise_b2_106():\r\n pass", "def support(self):", "def default_action(self):\n pass", "def test_issue_post_comment_reaction(self):\n pass", "def test_unrecognized_actions_rejected(self):\n # Unexpected whitespace.\n with self.assertRaises(BisectLog.ParserBug):\n _ = BisectLog(\" git bisect skip c123\")\n with self.assertRaises(BisectLog.ParserBug):\n _ = BisectLog(\"git bisect skip c123\")\n with self.assertRaises(BisectLog.ParserBug):\n _ = BisectLog(\"git bisect skip c123\")\n with self.assertRaises(BisectLog.ParserBug):\n _ = BisectLog(\" # git bisect skip c123\")\n # Unrecognized action with commit.\n with self.assertRaises(BisectLog.ParserBug):\n _ = BisectLog(\"git bisect foo c123\")\n # Unrecognized action without commit.\n with self.assertRaises(BisectLog.ParserBug):\n _ = BisectLog(\"git bisect bar\")", "def sanity_check(self):\n pass", "def experiment3():\n raise FAKE_ERROR", "def problem_298():\n pass", "def actions(self, states, agent_indices):\n return NotImplementedError()", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces(self):\n pass", "def getActions(self, state): \n util.raiseNotDefined()", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()", "def act(self):\n raise NotImplementedError", "def test_invalid_action(self):\n subject = self.subject(1, name='test_marker')\n with self.assertRaises(NotImplementedError) as context:\n subject.action\n with self.assertRaises(NotImplementedError) as context:\n subject.get_action_url()", "def take_action(self, *args, **kwargs):\r\n pass", "def actions(self, state):\n raise NotImplementedError # Override this!", "def check_action_sanity(self):\n for action in crest.get_all_actions(self.model):\n assert action._name is not None, f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is 'None'\"\n assert action._name != \"\", f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is empty string\"\n\n assert isinstance(action.transition, crest.Transition), f\"Action {action._name}'s state is not a crest.Transition. It is: {action.transition} ({action.transition.__class__})\"\n assert action.state in crest.get_transitions(action._parent), f\"Action's transition {action.transition._name} ({action.transition}) is not in the transitions of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.target, crest.Port), f\"Action {action._name}'s target is not a crest.Port\"\n assert action.target in api.get_targets(action._parent), f\"Action's target {action.target._name} ({action.target}) is not in the targets of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.function, (crestml.LearnedFunction, types.FunctionType)), f\"Action {action._name}'s function needs to be of type types.FunctionType or crestdsl.ml.LearnedFunction\"\n assert 'self' in inspect.signature(action.function).parameters, f\"Action {action._name}'s function has no self parameter. entity: {action._parent._name} ({action._parent.__class__.__name__})\"\n assert len(inspect.signature(action.function).parameters) == 1, f\"An action should have only one one argument 'self'\"\n\n for port in SH.get_read_ports_from_update(action.function, action):\n assert port in api.get_sources(action._parent), f\"Action {action._name} seems to be reading a port {port._name} ({port}) which is not in the sources of its entity {action._parent._name} ({action._parent})\"", "def onJT808Operation(self):\n pass", "def actions(self, state, player):\r\n raise NotImplementedError", "def test_error_when_student_code_is_incorrectly_packaged(\n self, default_hooks\n ):\n result = default_hooks.act_on_cloned_repo(NO_DIR_STRUCTURE_REPO)\n\n assert result.status == Status.ERROR", "def __call__(self, state, action):\n pass", "def test_actions(self, actions):\n try:\n for action in actions:\n self.get_action(action['type'])(**action)\n except Exception as e:\n print('Exception: {}'.format(str(e)))", "def get_available_actions(self, state):\n pass", "def getAction(self, state):\n raiseNotDefined()", "async def issues(self, ctx):\n await ctx.message.delete()\n await ctx.send(\"Issue tracker: https://github.com/TheSuperGamer20578/Sudan-bot/issues\")", "def test_update9(self):\n pass", "def bad(self):\n raise NotImplementedError", "def bad(self):\n raise NotImplementedError", "def test_get_github_repos_info_negative(self):\n self.assertEqual(app.get_github_repos_info(\"undefined_user12345\")[\"status\"], 500)", "def test_giturl_missing(self):\r\n response = self.client.get(self.test_url)\r\n self.assertEqual(200, response.status_code)\r\n self.assertIn(\r\n ('giturl must be defined in your '\r\n 'course settings before you can export to git.'),\r\n response.content\r\n )\r\n\r\n response = self.client.get('{}?action=push'.format(self.test_url))\r\n self.assertEqual(200, response.status_code)\r\n self.assertIn(\r\n ('giturl must be defined in your '\r\n 'course settings before you can export to git.'),\r\n response.content\r\n )", "def exploit_act(self, states_ns):\n raise NotImplementedError", "def test_issue_delete_comment_reaction(self):\n pass", "def _get_actions(self):\n return self.__actions", "def _get_actions(self):\n return self.__actions", "def _get_actions(self):\n return self.__actions", "def exercise_b2_69():\r\n pass", "def test_get_messaging_actions_for_order(self):\n pass", "def main():\n parser = argparse.ArgumentParser()\n group = parser.add_mutually_exclusive_group()\n group.add_argument(\"-v\",\n \"--verbose\",\n action=\"store_true\",\n help=\"Show verbose information\")\n group.add_argument(\"-q\",\n \"--quiet\",\n action=\"store_true\",\n help=\"Display less information\")\n parser.add_argument(\n 'category',\n help='Use the task you want to create like issue, pr, repo ',\n choices=[\"issue\", \"pr\", \"repo\"])\n parser.add_argument(\n 'action',\n help='Use the action to perform in the category.',\n choices=[\"create\", \"list\", \"edit\", \"delete\", \"close\", \"status\"])\n parser.add_argument(\"-t\",\n \"--title\",\n help=\"Title of issue or PR or name of repository\")\n parser.add_argument(\"-d\",\n \"--description\",\n help=\"Description of issue or PR or repo.\")\n parser.add_argument(\"-c\", \"--config\", help=\"Configuration file to use.\")\n parser.add_argument(\"-T\",\n \"--token\",\n help=\"Personal access token for github.\")\n parser.add_argument(\"-u\", \"--username\", help=\"Username of the user\")\n parser.add_argument(\"-a\",\n \"--assignee\",\n help=\"Filter by assignee or set assignee\")\n parser.add_argument(\"-b\",\n \"--base\",\n help=\"Filter by base branch the pull request are being merged to (ONLY FOR PR AND REPO)\")\n parser.add_argument(\"-A\", \"--author\", help=\"Filter by or set author\")\n parser.add_argument(\"-l\",\n \"--label\",\n help=\"Filter or set label separated by comma\")\n parser.add_argument(\"-L\", \"--limit\", help=\"Maximum number to fetch\")\n parser.add_argument(\"-s\", \"--state\", help=\"Filter by state\")\n parser.add_argument(\n \"-S\",\n \"--since\",\n help=\"List issues that have been updated at or after the given date.\"\n \" (You can also use value like 2 weeks ago)\")\n parser.add_argument(\"-r\",\n \"--repo\",\n help=\"Repository to perform action on.\")\n args = parser.parse_args()\n category_specific_action = handle_category_action(args)\n category_specific_action(args)\n return 0", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action(self):\n pass", "def _get_legal_actions(self):\n return self.game.get_legal_actions()", "def exercise_b2_53():\r\n pass", "def _initWindowActions(self):\n raise RuntimeError('Not implemented')", "def result(self, state, action):\n\t\traise NotImplementedError", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0(self):\n pass", "def choose_action(self):\r\n pass" ]
[ "0.67267066", "0.65667135", "0.64318216", "0.6355506", "0.6210274", "0.6148939", "0.6109755", "0.60450846", "0.60199803", "0.5994511", "0.5994511", "0.59711343", "0.5918675", "0.591639", "0.59116274", "0.5877087", "0.5877087", "0.5775038", "0.57616264", "0.5738314", "0.57139903", "0.57088864", "0.56815875", "0.5653944", "0.56318957", "0.5629789", "0.56046903", "0.55972016", "0.5594454", "0.55774516", "0.55697477", "0.55566764", "0.554672", "0.554549", "0.55395544", "0.5535242", "0.5526815", "0.552469", "0.55215484", "0.55207807", "0.55118096", "0.55118096", "0.5498833", "0.5491511", "0.5489603", "0.5489603", "0.5489603", "0.5489603", "0.5489603", "0.5489603", "0.5489603", "0.5484871", "0.5482135", "0.5474557", "0.546743", "0.54630536", "0.5461566", "0.54387856", "0.5429849", "0.5428585", "0.54260105", "0.5418471", "0.54118407", "0.54043204", "0.5388166", "0.53815496", "0.53815496", "0.5374884", "0.5333078", "0.5331224", "0.53311473", "0.53253204", "0.53106034", "0.5290812", "0.5288991", "0.5286226", "0.5282364", "0.5270548", "0.5269168", "0.5261946", "0.5261818", "0.5258662", "0.5258662", "0.5253744", "0.5251786", "0.52491295", "0.5239666", "0.5237989", "0.5237989", "0.5237989", "0.52283305", "0.5225852", "0.5224842", "0.52200145", "0.52197856", "0.5218314", "0.5210364", "0.52067775", "0.5205524", "0.52018476" ]
0.60793984
7
Ensure that kafka actions are not overwritten in the IAM definition
def test_kafka_action_names_overlap_issue(self): # Kafka actions used to be in two pages but are now one. This verifies the current state. # results = get_actions_for_service("kafka") # print(results) actions = [ "kafka:BatchAssociateScramSecret", "kafka:BatchDisassociateScramSecret", "kafka:CreateClusterV2", "kafka:DeleteConfiguration", "kafka:DescribeClusterV2", "kafka:ListClustersV2", "kafka:ListConfigurationRevisions", "kafka:ListKafkaVersions", "kafka:ListScramSecrets", "kafka:RebootBroker", "kafka:UpdateBrokerType", "kafka:UpdateConfiguration", "kafka:UpdateConnectivity", "kafka:UpdateSecurity" ] for action in actions: self.assertTrue(action in self.all_actions)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def legal_actions(self):\n raise NotImplementedError", "def get_actions(self, request):\n actions = super().get_actions(request)\n if not settings.PUBLISHER_CODE:\n del actions['create_cwr']\n if 'delete_selected' in actions:\n del actions['delete_selected']\n return actions", "def test_other_iam_data_fixes_in_GH_393(self):\n # Cassandra: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonkeyspacesforapachecassandra.html\n results = get_actions_for_service(\"cassandra\")\n self.assertTrue(\"cassandra:Restore\" in results)\n # Comprehend Medical: https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazoncomprehendmedical.html\n results = get_actions_for_service(\"comprehendmedical\")\n # print(results)\n actions = [\n \"comprehendmedical:DescribeEntitiesDetectionV2Job\",\n \"comprehendmedical:DescribeICD10CMInferenceJob\",\n \"comprehendmedical:DescribePHIDetectionJob\",\n \"comprehendmedical:DescribeRxNormInferenceJob\",\n # \"comprehendmedical:DescribeSNOMEDCTInferenceJob\", # Not in SAR\n \"comprehendmedical:DetectEntitiesV2\",\n \"comprehendmedical:InferICD10CM\",\n \"comprehendmedical:InferRxNorm\",\n # \"comprehendmedical:InferSNOMEDCT\", # Not in SAR\n \"comprehendmedical:ListEntitiesDetectionV2Jobs\",\n \"comprehendmedical:ListICD10CMInferenceJobs\",\n \"comprehendmedical:ListPHIDetectionJobs\",\n \"comprehendmedical:ListRxNormInferenceJobs\",\n # \"comprehendmedical:ListSNOMEDCTInferenceJobs\", # Not in SAR\n \"comprehendmedical:StartEntitiesDetectionV2Job\",\n \"comprehendmedical:StartICD10CMInferenceJob\",\n \"comprehendmedical:StartPHIDetectionJob\",\n \"comprehendmedical:StartRxNormInferenceJob\",\n \"comprehendmedical:StopEntitiesDetectionV2Job\",\n \"comprehendmedical:StopICD10CMInferenceJob\",\n ]\n for action in actions:\n # if action not in results:\n # print(action)\n self.assertTrue(action in results)\n # Compute Optimizer\n results = get_actions_for_service(\"compute-optimizer\")\n actions = [\n \"compute-optimizer:DeleteRecommendationPreferences\",\n \"compute-optimizer:ExportEBSVolumeRecommendations\",\n \"compute-optimizer:ExportLambdaFunctionRecommendations\",\n \"compute-optimizer:GetEffectiveRecommendationPreferences\",\n \"compute-optimizer:GetEnrollmentStatusesForOrganization\",\n \"compute-optimizer:GetLambdaFunctionRecommendations\",\n \"compute-optimizer:GetRecommendationPreferences\",\n \"compute-optimizer:PutRecommendationPreferences\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # DataSync\n results = get_actions_for_service(\"datasync\")\n actions = [\n \"datasync:UpdateLocationNfs\",\n \"datasync:UpdateLocationObjectStorage\",\n \"datasync:UpdateLocationSmb\",\n \"datasync:UpdateTaskExecution\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Account Management\n results = get_actions_for_service(\"account\")\n actions = [\n \"account:DeleteAlternateContact\",\n \"account:GetAlternateContact\",\n \"account:PutAlternateContact\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS IAM Access Analyzer\n results = get_actions_for_service(\"access-analyzer\")\n actions = [\n \"access-analyzer:CancelPolicyGeneration\",\n \"access-analyzer:CreateAccessPreview\",\n \"access-analyzer:GetAccessPreview\",\n \"access-analyzer:GetGeneratedPolicy\",\n \"access-analyzer:ListAccessPreviewFindings\",\n \"access-analyzer:ListAccessPreviews\",\n \"access-analyzer:ListPolicyGenerations\",\n \"access-analyzer:StartPolicyGeneration\",\n \"access-analyzer:ValidatePolicy\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Elemental Activations\n results = get_actions_for_service(\"elemental-activations\")\n actions = [\n \"elemental-activations:CompleteAccountRegistration\",\n \"elemental-activations:StartAccountRegistration\"\n ]\n for action in actions:\n self.assertTrue(action in results)\n # OpenSearch\n results = get_actions_for_service(\"es\")\n actions = [\n \"es:DescribeDomainChangeProgress\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Location\n results = get_actions_for_service(\"geo\")\n actions = [\n \"geo:CalculateRouteMatrix\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # Amazon Managed Grafana\n results = get_actions_for_service(\"grafana\")\n actions = [\n \"grafana:DescribeWorkspaceAuthentication\",\n \"grafana:UpdateWorkspaceAuthentication\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # EC2 Image Builder\n results = get_actions_for_service(\"imagebuilder\")\n actions = [\n \"imagebuilder:ImportVmImage\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n # Timestream\n results = get_actions_for_service(\"timestream\")\n actions = [\n \"timestream:CreateScheduledQuery\",\n \"timestream:DeleteScheduledQuery\",\n \"timestream:DescribeScheduledQuery\",\n \"timestream:ExecuteScheduledQuery\",\n \"timestream:ListScheduledQueries\",\n \"timestream:UpdateScheduledQuery\",\n ]\n for action in actions:\n self.assertTrue(action in results)\n\n # AWS Transfer Family\n results = get_actions_for_service(\"transfer\")\n actions = [\n \"transfer:CreateAccess\",\n \"transfer:CreateWorkflow\",\n \"transfer:DeleteAccess\",\n \"transfer:DeleteWorkflow\",\n \"transfer:DescribeAccess\",\n \"transfer:DescribeExecution\",\n \"transfer:DescribeWorkflow\",\n \"transfer:ListAccesses\",\n \"transfer:ListExecutions\",\n \"transfer:ListWorkflows\",\n \"transfer:SendWorkflowStepState\",\n \"transfer:UpdateAccess\",\n ]\n for action in actions:\n self.assertTrue(action in results)", "def test_get_actions_with_arn_type_and_access_level_case_2(self):\n desired_output = [\n 'ssm:DeleteParameter',\n 'ssm:DeleteParameters',\n 'ssm:LabelParameterVersion',\n 'ssm:PutParameter'\n]\n output = get_actions_with_arn_type_and_access_level(\n \"ssm\", \"parameter\", \"Write\"\n )\n for item in desired_output:\n self.assertTrue(item in output)", "def authorize(self, action, author_id=None):\n if action not in CHANGE_TYPES:\n return False\n return True", "def test_get_actions_with_arn_type_and_access_level_case_5(self):\n\n output = get_actions_with_arn_type_and_access_level(\n \"s3\", \"object\", \"List\"\n )\n self.assertTrue(\"s3:ListMultipartUploadParts\" in output)", "def test_get_actions_with_arn_type_and_access_level_case_3(self):\n desired_output = [\n 's3:PutAccountPublicAccessBlock',\n 's3:PutAccessPointPublicAccessBlock'\n ]\n output = get_actions_with_arn_type_and_access_level(\n # \"ram\", \"resource-share\", \"Write\"\n \"s3\", \"*\", \"Permissions management\"\n )\n print(output)\n for item in desired_output:\n self.assertTrue(item in output)\n # self.assertListEqual(desired_output, output)", "async def before_action(self, action, *args, **kwargs):\n return True", "async def before_action(self, action: str, *args, **kwargs) -> bool:\n return True", "def _enforce(self, req, action, target=None):\n if target is None:\n target = {}\n try:\n self.policy.enforce(req.context, action, target)\n except exception.Forbidden as e:\n LOG.debug(\"User not permitted to perform '%s' action\", action)\n raise webob.exc.HTTPForbidden(explanation=e.msg, request=req)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_delete_action_spaces(self):\n pass", "def _enforce(self, req, action):\n try:\n self.policy.enforce(req.context, action, {})\n except exception.Forbidden:\n raise HTTPForbidden()", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_template_action_spaces(self):\n pass", "def clean_iam_access_keys(self, batch=False):\n main_account = Account(region=config.aws.region)\n ddb_table = main_account.resource(\"dynamodb\").Table(self.config.iamUserKeysRotation.ddb_table_name)\n\n retention_period = self.config.iamUserKeysRotation.remediation_retention_period\n\n jira = JiraReporting(self.config)\n slack = SlackNotification(self.config)\n\n for account_id, account_name in self.config.iamUserKeysRotation.remediation_accounts.items():\n logging.debug(\"* Account Name:\" + account_name + \" :::Account ID:::\" + account_id)\n issues = IssueOperations.get_account_open_issues(ddb_table, account_id, IAMKeyRotationIssue)\n for issue in issues:\n key_id = issue.issue_id\n username = issue.issue_details.username\n\n user_in_whitelist = self.config.iamUserKeysRotation.in_whitelist(account_id, username)\n key_in_whitelist = self.config.iamUserKeysRotation.in_whitelist(account_id, key_id)\n\n if user_in_whitelist or key_in_whitelist:\n logging.debug(f\"Skipping '{key_id} / {username}' (in whitelist)\")\n\n # Adding label with \"whitelisted\" to jira ticket.\n jira.add_label(\n ticket_id=issue.jira_details.ticket,\n label=IssueStatus.Whitelisted.value\n )\n continue\n\n if issue.timestamps.reported is None:\n logging.debug(f\"Skipping '{key_id} / {username}' (was not reported)\")\n continue\n\n if issue.timestamps.remediated is not None:\n logging.debug(f\"Skipping '{key_id} / {username}' (has been already remediated)\")\n continue\n\n updated_date = issue.timestamp_as_datetime\n no_of_days_issue_created = (self.config.now - updated_date).days\n\n if no_of_days_issue_created >= retention_period:\n try:\n if not batch and \\\n not confirm(f\"Do you want to remediate stale access key '{key_id} / {username}'\", False):\n continue\n\n account = Account(id=account_id,\n name=account_name,\n role_name=self.config.aws.role_name_reporting)\n if account.session is None:\n continue\n\n logging.debug(f\"Remediating stale access key '{key_id} / {username}'\")\n remediation_succeed = True\n try:\n IAMOperations.disable_access_key(account.client(\"iam\"), username, key_id)\n comment = (f\"Stale access key '{key_id} / {username}' issue \"\n f\"in '{account_name} / {account_id}' account \"\n f\"was remediated by hammer\")\n except Exception:\n remediation_succeed = False\n logging.exception(\"Failed to disable '{key_id} / {username}' stale access key\")\n comment = (f\"Failed to remediate stale access key '{key_id} / {username}' issue \"\n f\"in '{account_name} / {account_id}' account \"\n f\"due to some limitations. Please, check manually\")\n\n jira.remediate_issue(\n ticket_id=issue.jira_details.ticket,\n comment=comment,\n reassign=remediation_succeed,\n )\n slack.report_issue(\n msg=f\"{comment}\"\n f\"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}\",\n account_id=account_id,\n )\n IssueOperations.set_status_remediated(ddb_table, issue)\n except Exception:\n logging.exception(f\"Error occurred while disabling '{key_id} / {username}' \"\n f\"in '{account_name} / {account_id}'\")\n else:\n logging.debug(f\"Skipping '{key_id} / {username}' \"\n f\"({retention_period - no_of_days_issue_created} days before remediation)\")", "def test_get_actions_with_arn_type_and_access_level_case_4(self):\n desired_output = [\n 'secretsmanager:ListSecrets'\n ]\n output = get_actions_with_arn_type_and_access_level(\n \"secretsmanager\", \"*\", \"List\"\n )\n self.assertListEqual(desired_output, output)", "def actions() -> None:\n pass", "def actions():\n pass", "def _generate_actions(self) -> list:\n pass", "def _get_legal_actions(self):\n raise NotImplementedError", "def get_legal_actions(self):\n pass", "def robot_is_willing_default(requester, action, ctxt) :\n if action.get_actor() == \"compliant robot\" :\n raise ActionHandled()", "def pre_access_control_list_create(self, resource_dict):\n pass", "def prepare_actions(self, training_job_name):\n if self.actions is None:\n # user cannot manually specify action_json in rule_parameters for actions.\n self.rule_parameters.pop(\"action_json\", None)\n return\n\n self.actions.update_training_job_prefix_if_not_specified(training_job_name)\n action_params = {\"action_json\": self.actions.serialize()}\n self.rule_parameters.update(action_params)", "def test_allowlist_not_overwritten(self):\n handler = MyHandler()\n handler.name = \"RebuildImagesOnImageAdvisoryChange\"\n allowed = handler.allow_build(\n ArtifactType.IMAGE, advisory_state=\"SHIPPED_LIVE\")\n self.assertTrue(allowed)\n\n handler.name = \"foo\"\n allowed = handler.allow_build(\n ArtifactType.IMAGE, advisory_state=\"SHIPPED_LIVE\")\n self.assertFalse(allowed)", "def check_action_sanity(self):\n for action in crest.get_all_actions(self.model):\n assert action._name is not None, f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is 'None'\"\n assert action._name != \"\", f\"There is an Action in {action._parent._name} ({action._parent.__class__.__name__}) whose name is empty string\"\n\n assert isinstance(action.transition, crest.Transition), f\"Action {action._name}'s state is not a crest.Transition. It is: {action.transition} ({action.transition.__class__})\"\n assert action.state in crest.get_transitions(action._parent), f\"Action's transition {action.transition._name} ({action.transition}) is not in the transitions of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.target, crest.Port), f\"Action {action._name}'s target is not a crest.Port\"\n assert action.target in api.get_targets(action._parent), f\"Action's target {action.target._name} ({action.target}) is not in the targets of entity {action._parent._name} ({action._parent})\"\n\n assert isinstance(action.function, (crestml.LearnedFunction, types.FunctionType)), f\"Action {action._name}'s function needs to be of type types.FunctionType or crestdsl.ml.LearnedFunction\"\n assert 'self' in inspect.signature(action.function).parameters, f\"Action {action._name}'s function has no self parameter. entity: {action._parent._name} ({action._parent.__class__.__name__})\"\n assert len(inspect.signature(action.function).parameters) == 1, f\"An action should have only one one argument 'self'\"\n\n for port in SH.get_read_ports_from_update(action.function, action):\n assert port in api.get_sources(action._parent), f\"Action {action._name} seems to be reading a port {port._name} ({port}) which is not in the sources of its entity {action._parent._name} ({action._parent})\"", "def actions(self):\n raise NotImplementedError", "def add_insufficient_data_action(self, action_arn=None):\r\n if not action_arn:\r\n return\r\n self.actions_enabled = 'true'\r\n self.insufficient_data_actions.append(action_arn)", "def update_execution_policies(\n target_role: iam.Role, project_name: str, project_id: str\n):\n target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\n # \"sts:AssumeRole\"\n \"iam:PassRole\"\n ],\n resources=[\n f\"arn:aws:iam::{cdk.Aws.ACCOUNT_ID}:role/cdk*\",\n ],\n )\n )\n\n policy = target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\n \"sts:AssumeRole\",\n \"iam:PassRole\",\n ],\n resources=[\n target_role.role_arn,\n ],\n )\n )\n\n target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\n \"cloudformation:DescribeStackEvents\",\n \"cloudformation:GetTemplate\",\n \"cloudformation:CreateChangeSet\",\n \"cloudformation:DescribeChangeSet\",\n \"cloudformation:ExecuteChangeSet\",\n \"cloudformation:DeleteChangeSet\",\n \"cloudformation:DescribeStacks\",\n \"cloudformation:DeleteStack\",\n ],\n resources=[\n f\"arn:aws:cloudformation:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:stack/{project_name}*/*\",\n ],\n )\n )\n target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\n \"cloudformation:DescribeStackEvents\",\n \"cloudformation:GetTemplate\",\n \"cloudformation:DescribeStacks\",\n ],\n resources=[\n f\"arn:aws:cloudformation:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:stack/CDKToolkit/*\",\n ],\n )\n )\n target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\n \"ssm:GetParameter\",\n ],\n resources=[\n f\"arn:aws:ssm:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:parameter/cdk-bootstrap/*\",\n f\"arn:aws:ssm:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:parameter/sagemaker-{project_name}*\",\n ],\n )\n )\n\n target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\"*\"],\n resources=[\"*\"],\n conditions={\n \"ForAnyValue:StringEquals\": {\n \"aws:CalledVia\": [\"cloudformation.amazonaws.com\"]\n }\n },\n )\n )\n\n target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\n \"logs:CreateLogGroup\",\n \"logs:CreateLogStream\",\n \"logs:PutLogEvents\",\n ],\n resources=[\n f\"arn:aws:logs:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:log-group:/aws/codebuild/sagemaker-{project_id}*\",\n f\"arn:aws:logs:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:/aws/codebuild/sagemaker-{project_id}*:*\",\n ],\n )\n )\n\n target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\n \"codebuild:CreateReportGroup\",\n \"codebuild:CreateReport\",\n \"codebuild:UpdateReport\",\n \"codebuild:BatchPutTestCases\",\n \"codebuild:BatchPutCodeCoverages\",\n ],\n resources=[\n f\"arn:aws:codebuild:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:report-group/sagemaker-{project_id}*\",\n ],\n )\n )\n\n target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\n \"codepipeline:PutApprovalResult\",\n ],\n resources=[\n f\"arn:aws:codepipeline:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:sagemaker-{project_id}*\",\n ],\n )\n )\n\n target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\n \"codebuild:BatchGetBuilds\",\n \"codebuild:StartBuild\",\n \"codebuild:StopBuild\",\n ],\n resources=[\n f\"arn:aws:codebuild:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:project/sagemaker-{project_id}*\",\n ],\n )\n )\n\n target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\n \"glue:SearchTables\",\n \"glue:BatchCreatePartition\",\n \"athena:StartQueryExecution\",\n \"glue:CreateTable\",\n \"glue:GetTables\",\n \"glue:GetTableVersions\",\n \"glue:GetPartitions\",\n \"glue:BatchDeletePartition\",\n \"glue:UpdateTable\",\n \"glue:DeleteTableVersion\",\n \"glue:BatchGetPartition\",\n \"glue:DeleteTable\",\n \"cloudformation:DescribeStacks\",\n \"glue:GetTable\",\n \"glue:GetDatabase\",\n \"glue:GetPartition\",\n \"glue:GetTableVersion\",\n \"glue:CreateDatabase\",\n \"glue:BatchDeleteTableVersion\",\n \"athena:GetQueryExecution\",\n \"glue:BatchDeleteTable\",\n \"glue:CreatePartition\",\n \"glue:DeletePartition\",\n \"glue:UpdatePartition\",\n ],\n resources=[\n \"arn:aws:glue:*:*:catalog\",\n \"arn:aws:glue:*:*:database/default\",\n \"arn:aws:glue:*:*:database/global_temp\",\n \"arn:aws:glue:*:*:database/sagemaker*\",\n \"arn:aws:glue:*:*:table/sagemaker*\",\n \"arn:aws:glue:*:*:tableVersion/sagemaker*\",\n f\"arn:aws:athena:*:{cdk.Aws.ACCOUNT_ID}:workgroup/*\",\n ],\n )\n )\n target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\"glue:StartJobRun\"],\n resources=[\n f\"arn:aws:glue:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:job/sagemaker-*\"\n ],\n )\n )\n\n target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\"glue:GetJobRun\", \"glue:GetJobRuns\", \"glue:GetJobs\"],\n resources=[f\"*\"],\n )\n )\n\n target_role.add_to_principal_policy(\n iam.PolicyStatement(\n actions=[\n \"dynamodb:BatchGetItem\",\n \"dynamodb:GetRecords\",\n \"dynamodb:GetShardIterator\",\n \"dynamodb:Query\",\n \"dynamodb:GetItem\",\n \"dynamodb:Scan\",\n \"dynamodb:ConditionCheckItem\",\n \"dynamodb:DescribeTable\",\n ],\n resources=[\n f\"arn:aws:dynamodb:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:table/sagemaker-{project_id}*\"\n ],\n )\n )\n\n return policy", "async def audit_actions(self, ctx: Context) -> None:\n\n if ctx.invoked_subcommand is None:\n await ctx.send_help('auditaction')", "def valid_actions(self) -> List[str]:\n return list(self.action_map().keys())", "def test_custom_get_actions(self):\n # Action not specified\n self.assertFalse(admin.GlobalAdminHandler.add_custom_get_action(\n \"\", None))\n self.assertFalse(admin.GlobalAdminHandler._custom_get_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n\n # Handler not specified\n self.assertFalse(admin.GlobalAdminHandler.add_custom_get_action(\n self.CUSTOM_ACTION_NAME, None))\n self.assertFalse(admin.GlobalAdminHandler._custom_get_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n\n # All required fields specified\n self.assertTrue(admin.GlobalAdminHandler.add_custom_get_action(\n self.CUSTOM_ACTION_NAME, self.custom_handler))\n self.assertTrue(admin.GlobalAdminHandler._custom_get_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n\n # Duplicate entry not allowed\n self.assertFalse(admin.GlobalAdminHandler.add_custom_get_action(\n self.CUSTOM_ACTION_NAME, self.custom_handler2))\n self.assertTrue(admin.GlobalAdminHandler._custom_get_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n self.assertEqual(\n self.custom_handler,\n admin.GlobalAdminHandler\n ._custom_get_actions[self.CUSTOM_ACTION_NAME].handler)\n\n # Force overwrite existing entry\n self.assertTrue(admin.GlobalAdminHandler.add_custom_get_action(\n self.CUSTOM_ACTION_NAME, self.custom_handler2,\n overwrite=True))\n self.assertTrue(admin.GlobalAdminHandler._custom_get_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n self.assertEqual(\n self.custom_handler2,\n admin.GlobalAdminHandler\n ._custom_get_actions[self.CUSTOM_ACTION_NAME].handler)\n\n # Remove the action\n admin.GlobalAdminHandler.remove_custom_get_action(\n self.CUSTOM_ACTION_NAME)\n self.assertFalse(admin.GlobalAdminHandler._custom_get_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n\n # Should not overwrite Dashboard action\n self.assertTrue(dashboard.DashboardHandler.add_custom_get_action(\n self.CUSTOM_ACTION_NAME, handler=self.custom_handler))\n self.assertTrue(dashboard.DashboardHandler._custom_get_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n self.assertFalse(admin.GlobalAdminHandler.add_custom_get_action(\n self.CUSTOM_ACTION_NAME, self.custom_handler))\n self.assertFalse(admin.GlobalAdminHandler._custom_get_actions.has_key(\n self.CUSTOM_ACTION_NAME))", "def apply_rl_actions(self, rl_actions):\n pass", "def test_patch_cluster_policy(self):\n pass", "def unapprove(self):\n self._check_if_open()\n return super(BitbucketCloudBase, self).delete(\"approve\")", "def action_defined(sender, instance, created, raw, using, **kwargs):\n if created:\n raw_hook_event.send(\n sender=None,\n event_name=\"action_defined\",\n instance=instance,\n payload=ActionSerializer(instance).data,\n user=instance.team,\n )", "def actions_required(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"actions_required\")", "def actions_required(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"actions_required\")", "def actions_required(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"actions_required\")", "def actions_required(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"actions_required\")", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces(self):\n pass", "def apply_unconditionally(self, message: Message):\n for action, args in self._actions:\n if action not in {move, mark}:\n raise RuntimeError('refusing to execute untested action')\n if action is mark:\n args = (message._origin_server, *args)\n action(message, *args)", "def action_spec(self):\r\n pass", "def actions_required(self) -> Optional[str]:\n return pulumi.get(self, \"actions_required\")", "def test_resource_actions(self):\n test_resource = ResourceTypeName.get()\n expected_actions = sorted(['rt:get', 'rt:put', 'rt:update', 'rt:delete'])\n self.app.post(\n f'/v1/resource/{test_resource}',\n data=json.dumps({'actions': expected_actions}),\n headers=admin_headers)\n\n # Get the actions for a resource type\n resp = self.app.get(f'/v1/resource/{test_resource}/actions', headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n actions = json.loads(resp.body)['actions']\n self.assertEqual(actions, expected_actions)\n\n # Delete actions from a resource type\n modify_actions = expected_actions[-2:]\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions[:2])\n\n # OK returned when deleting actions not part of a resource type\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n\n # Put actions into a resource type\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions)\n\n # OK returned when putting actions already a part of a resource type.\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)", "def test_permission_remove_action_not_granted(self):\n test_name = sys._getframe().f_code.co_name\n rv, output = self._execute('permission remove anonymous TICKET_CREATE')\n self.assertEqual(2, rv)\n self.assertEqual(self.expected_results[test_name], output)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_delete_action(self):\n pass", "def test_permission_remove_multiple_actions_ok(self):\n test_name = sys._getframe().f_code.co_name\n self._execute('permission remove anonymous WIKI_CREATE WIKI_MODIFY')\n rv, output = self._execute('permission list')\n self.assertEqual(0, rv)\n self.assertEqual(self.expected_results[test_name], output)", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces(self):\n pass", "def test_actions(self, actions):\n try:\n for action in actions:\n self.get_action(action['type'])(**action)\n except Exception as e:\n print('Exception: {}'.format(str(e)))", "def test_all_actions_setup(self, mocked_find):\n\n setup_identity_cache()\n\n mocked_find.side_effect = KeyError(\"Error forced for testing\")\n\n url = \"/v1/actions/CreateProjectAndUser\"\n data = {\"project_name\": \"test_project\", \"email\": \"[email protected]\"}\n response = self.client.post(url, data, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_503_SERVICE_UNAVAILABLE)\n\n new_task = Task.objects.all()[0]\n\n class_conf = new_task.config\n expected_action_names = CreateProjectAndUser.default_actions[:]\n expected_action_names += class_conf.additional_actions\n\n actions = new_task.actions\n observed_action_names = [a.action_name for a in actions]\n self.assertEqual(observed_action_names, expected_action_names)", "def initActions(self):\n\n\t\t#\n\t\t# this is super confusing to me, this is also defined in bToolBar\n\t\t# todo: where do I define it?????\n\t\t# if we define in both places,we get\n\t\t# WARNING: QAction::event: Ambiguous shortcut overload: 1\n\t\t'''\n\t\tmyName = '1'\n\t\tkeyboardAction1 = QtWidgets.QAction(myName, self)\n\t\tkeyboardAction1.setShortcut('1')# or 'Ctrl+r' or '&r' for alt+r\n\t\tkeyboardAction1.setToolTip('Set 1 [1]')\n\t\tkeyboardAction1.triggered.connect(lambda state, myName=myName: self.myAction_callback(myName))\n\t\tself.addAction(keyboardAction1)\n\t\t'''\n\n\t\t#\n\t\tmyName = 'Set Bad'\n\t\ttoggleBadAction = QtWidgets.QAction(myName, self)\n\t\ttoggleBadAction.setShortcut('b')# or 'Ctrl+r' or '&r' for alt+r\n\t\ttoggleBadAction.setToolTip('Set Bad [b]')\n\t\ttoggleBadAction.triggered.connect(lambda state, myName=myName: self.myAction_callback(myName))\n\t\tself.addAction(toggleBadAction)\n\n\t\t#\n\t\tmyName = 'Set Good'\n\t\ttoggleBadAction = QtWidgets.QAction(myName, self)\n\t\ttoggleBadAction.setShortcut('g')# or 'Ctrl+r' or '&r' for alt+r\n\t\ttoggleBadAction.setToolTip('Set Good [g]')\n\t\ttoggleBadAction.triggered.connect(lambda state, myName=myName: self.myAction_callback(myName))\n\t\tself.addAction(toggleBadAction)\n\n\t\t#\n\t\tmyName = 'Increase Tracing Sliding-Z'\n\t\ttoggleBadAction = QtWidgets.QAction(myName, self)\n\t\t#toggleBadAction.setShortcut('Shift+QtCore.Qt.Key_Plus')# or 'Ctrl+r' or '&r' for alt+r\n\t\t#myKeySequence = QtGui.QKeySequence(QtCore.Qt.Key_Shift + QtCore.Qt.Key_G)\n\t\tmyKeySequence = '.'\n\t\ttoggleBadAction.setShortcut(myKeySequence)# or 'Ctrl+r' or '&r' for alt+r\n\t\ttoggleBadAction.setToolTip('Increase Tracing Sliding-Z [.]')\n\t\ttoggleBadAction.triggered.connect(lambda state, myName=myName: self.myAction_callback(myName))\n\t\tself.addAction(toggleBadAction)\n\n\t\tmyName = 'Decrease Tracing Sliding-Z'\n\t\ttoggleBadAction = QtWidgets.QAction(myName, self)\n\t\t#toggleBadAction.setShortcut('Shift+QtCore.Qt.Key_Plus')# or 'Ctrl+r' or '&r' for alt+r\n\t\t#myKeySequence = QtGui.QKeySequence(QtCore.Qt.Key_Shift + QtCore.Qt.Key_G)\n\t\tmyKeySequence = ','\n\t\ttoggleBadAction.setShortcut(myKeySequence)# or 'Ctrl+r' or '&r' for alt+r\n\t\ttoggleBadAction.setToolTip('Decrease Tracing Sliding-Z [,]')\n\t\ttoggleBadAction.triggered.connect(lambda state, myName=myName: self.myAction_callback(myName))\n\t\tself.addAction(toggleBadAction)\n\n\t\t#\n\t\tmyName = 'Increase Sliding-Z'\n\t\ttoggleBadAction = QtWidgets.QAction(myName, self)\n\t\tmyKeySequence = QtGui.QKeySequence(QtCore.Qt.Key_Greater)\n\t\ttoggleBadAction.setShortcut(myKeySequence)# or 'Ctrl+r' or '&r' for alt+r\n\t\ttoggleBadAction.setToolTip('Increase Tracing Sliding-Z [>]')\n\t\ttoggleBadAction.triggered.connect(lambda state, myName=myName: self.myAction_callback(myName))\n\t\tself.addAction(toggleBadAction)\n\n\t\tmyName = 'Decrease Sliding-Z'\n\t\ttoggleBadAction = QtWidgets.QAction(myName, self)\n\t\t#toggleBadAction.setShortcut('Shift+QtCore.Qt.Key_Plus')# or 'Ctrl+r' or '&r' for alt+r\n\t\tmyKeySequence = QtGui.QKeySequence(QtCore.Qt.Key_Less)\n\t\ttoggleBadAction.setShortcut(myKeySequence)# or 'Ctrl+r' or '&r' for alt+r\n\t\ttoggleBadAction.setToolTip('Decrease Sliding-Z [<]')\n\t\ttoggleBadAction.triggered.connect(lambda state, myName=myName: self.myAction_callback(myName))\n\t\tself.addAction(toggleBadAction)\n\n\t\t'''\n\t\tfor action in self.actions():\n\t\t\tprint(' bStackWidget action:', action, action.text(), action.shortcut().toString())\n\t\t'''\n\t\t#print('bScatterPlotWidget.actions():', self.actions())", "def test_user_actions_post(self):\n pass", "def write_authorize_examinercommon(cls, user, obj):\n if obj.delivered_by != None:\n raise PermissionDenied()", "def authorize(self, action, author_id=None):\n return False", "def check_enable_actions(self):\n\n if self._profile_name is None:\n self.enableProfileActions.emit(\n False, \"Remove Profile\", False)\n elif self._profile_name.lower() == 'default':\n self.enableProfileActions.emit(\n False, \"Cannot Remove Default Profile\", False)\n else:\n self.enableProfileActions.emit(True, \"Remove Profile\", True)", "def filter_action(action):\n return True\n if not action.get('app_id'):\n return None\n if not action.get('user_id'):\n return None\n return action", "def actions(self, actions):\n\n self._actions = actions", "def actions(self, actions):\n\n self._actions = actions", "def test_old_access(self):\n created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)\n last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)\n key = Key('username', 'keyid', 'Active', created, last_used)\n key.audit(60, 80, 10, 9)\n assert key.audit_state == 'stagnant_expire'", "def allow_map_to_audit(self):\n return self.audit_id is None and self.audit is None", "def test_create_cluster_policy(self):\n pass", "def get_actions(self, request):\n actions = super(RateLimitedIPAdmin, self).get_actions(request)\n del actions['delete_selected']\n return actions", "def action(self, action):\n allowed_values = [\"APPLY\", \"PRECHECK\"]\n if action not in allowed_values:\n raise ValueError(\n \"Invalid value for `action`, must be one of {0}\"\n .format(allowed_values)\n )\n self._action = action", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces(self):\n pass", "def test_was_produced_by_action(self):\n\n test_content = {\n AbstractAction.ACTION: WordExtraction.__name__,\n AbstractAction.RESULT: ['One', 'Two']\n }\n\n assert WordExtraction.produced(test_content)\n\n test_content[AbstractAction.ACTION] = ''\n\n assert not WordExtraction.produced(test_content)", "def test__put_actioned_by_into():\n user = User.precreate(202305160043, name = 'East')\n \n for input_value, defaults, expected_output in (\n (None, False, {}),\n (None, True, {'actioned_by_user': None}),\n (user, True, {'actioned_by_user': user.to_data(defaults = True, include_internals = True)}),\n ):\n data = put_actioned_by_into(input_value, {}, defaults)\n vampytest.assert_eq(data, expected_output)", "def action_required(self, action_required):\n\n self._action_required = action_required", "def test_kyc_put_request_legal(self):\n pass", "def post_access_control_list_create(self, resource_dict):\n pass", "def test_aws_service_api_keypair_delete(self):\n pass", "def test_undefined_action_is_logged(self):\n create_file(self.authz_file, textwrap.dedent(\"\"\"\\\n [groups]\n administrators = éat\n [wiki:WikiStart]\n änon = UNKNOWN_VIEW, TEST_CREATE, !TEST_MODIFY\n [milestone:milestone1]\n * = UNKNOWN_MODIFY, !TEST_VIEW\n \"\"\"))\n authz_policy = AuthzPolicy(self.env)\n authz_policy.parse_authz()\n\n self.assertEqual(2, len(self.env.log_messages))\n self.assertIn(('WARNING',\n 'The action UNKNOWN_VIEW in the [wiki:WikiStart] '\n 'section of trac-authz-policy is not a valid action.'),\n self.env.log_messages)\n self.assertIn(('WARNING',\n 'The action UNKNOWN_MODIFY in the [milestone:milestone1] '\n 'section of trac-authz-policy is not a valid action.'),\n self.env.log_messages)", "def test_rotate_access(self):\n created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)\n last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)\n key = Key('username', 'keyid', 'Active', created, last_used)\n key.audit(60, 80, 20, 10)\n assert key.audit_state == 'stagnant'", "def test_incorrect_action(self, put, get, auth, circuits_app, fn_cloud_foundry_action, fn_cloud_foundry_applications):\n auth.return_value = AuthenticationMock()\n put.return_value = give_response(201, GUIDS_MOCK[\"resources\"][0])\n get.return_value = give_response(200, GUIDS_MOCK)\n\n function_params = {\n \"fn_cloud_foundry_action\": fn_cloud_foundry_action,\n \"fn_cloud_foundry_applications\": fn_cloud_foundry_applications\n }\n results = call_fn_cloud_foundry_manage_applications_function(circuits_app, function_params)\n assert results[\"success\"] == False", "def test_replace_cluster_policy(self):\n pass", "def test_multiple_aclhooks_2(self):\n self._test_hook_approval_sequence([True, None], True)", "def test_patch_bucket(self):\n pass", "def test_normal(self):\n created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)\n last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)\n k = Key('username', 'keyid', 'Active', created, last_used)\n k.audit(60, 80, 20, 19)\n assert k.creation_age == 15\n assert k.audit_state == 'good'", "def _action(self):\n pass", "def test_create_namespaced_pod_security_policy_review(self):\n pass", "def testBaseActionRecording(self):\n\t x = BaseAction('x')\n\t self.failUnless(x.key== 'x')", "def test_aws_service_api_keypair_import_post(self):\n pass", "def test_custom_action_response_descriptor_octopus_server_web_api_actions_machine_policy_template_action(self):\n pass", "def publish_action(self, action):\n raise NotImplementedError", "def take_action(self, *args, **kwargs):\r\n pass", "def test_review_story_restrict_to_only_admin(self):\n self.client.post('/api/stories', headers={'token': user_token}, data=json.dumps(story1))\n res = self.client.put('/api/stories/1/review', headers={'token': user_token}, data=json.dumps({\n 'status': 'Approved'\n }))\n result = json.loads(res.data.decode())\n self.assertEqual(result['message'], 'Permission denied')\n self.assertEqual(res.status_code, 403)", "def reset_actions(self, period=None, acode=None, override_sticky=False):\n periods = [period] if period else self.periods\n acodes = [acode] if acode else list(self.actions.keys())\n for p in periods:\n if p not in self.applied_actions: self.applied_actions[p] = {}\n for a in acodes:\n if a in self.actions and self.actions[a].is_sticky and not override_sticky: continue\n self.applied_actions[p][a] = {}", "def test_tiers_update_permission_post(self):\n pass", "def test_patch_namespaced_policy(self):\n pass", "def test_aws_service_api_keypair_generate_post(self):\n pass", "def test_get_messaging_actions_for_order(self):\n pass", "def test_changelist_actions(self):\n user = User.objects.create(username='test')\n url = reverse('admin:prescription_prescription_changelist')\n request = self._mocked_authenticated_request(url, user)\n admin = PrescriptionAdmin(Prescription, site)\n\n self.assertFalse(user.has_perm('prescription.can_delete'))\n self.assertFalse(user.has_perm('prescription.can_delete_approval'))\n\n actions = admin.get_actions(request)\n\n self.assertTrue('delete_selected' not in actions)\n self.assertTrue('delete_approval_endorsement' not in actions)\n\n content_type = ContentType.objects.get(app_label='prescription',\n model='prescription')\n delete = Permission.objects.get(codename='delete_prescription',\n content_type=content_type)\n approval = Permission.objects.get(codename='can_delete_approval',\n content_type=content_type)\n permissions = [\n (delete, 'delete_selected'),\n (approval, 'delete_approval_endorsement')]\n\n for permission, action in permissions:\n # ensure that for each permission and action name, the user is\n # able to perform that action from the action dropdown.\n user.user_permissions.add(permission)\n user = User.objects.get(username='test')\n request = self._mocked_authenticated_request(url, user)\n actions = admin.get_actions(request)\n self.assertTrue(action in actions)", "def appprotect_setup(request, kube_apis, test_namespace) -> None:\n\n print(\"------------------------- Deploy logconf -----------------------------\")\n src_log_yaml = f\"{TEST_DATA}/ap-waf/logconf.yaml\"\n global log_name\n log_name = create_ap_logconf_from_yaml(kube_apis.custom_objects, src_log_yaml, test_namespace)\n\n print(\"------------------------- Create UserSig CRD resource-----------------------------\")\n usersig_name = create_ap_usersig_from_yaml(kube_apis.custom_objects, uds_crd_resource, test_namespace)\n\n print(f\"------------------------- Deploy dataguard-alarm appolicy ---------------------------\")\n src_pol_yaml = f\"{TEST_DATA}/ap-waf/{ap_policy_uds}.yaml\"\n global ap_pol_name\n ap_pol_name = create_ap_policy_from_yaml(kube_apis.custom_objects, src_pol_yaml, test_namespace)\n\n def fin():\n if request.config.getoption(\"--skip-fixture-teardown\") == \"no\":\n print(\"Clean up:\")\n delete_ap_policy(kube_apis.custom_objects, ap_pol_name, test_namespace)\n delete_ap_usersig(kube_apis.custom_objects, usersig_name, test_namespace)\n delete_ap_logconf(kube_apis.custom_objects, log_name, test_namespace)\n\n request.addfinalizer(fin)", "def test_contradictory_multiple_actions(self):\n self.render_config_template(\n modules=[{\n \"name\": \"system\",\n \"metricsets\": [\"process\"],\n \"period\": \"1s\"\n }],\n processors=[{\n \"include_fields\": {\n \"fields\": [\"system.process.memory.size\", \"proc.memory.rss.pct\"],\n },\n }, {\n \"drop_fields\": {\n \"fields\": [\"system.process.memory.size\", \"proc.memory.rss.pct\"],\n },\n }]\n )\n metricbeat = self.start_beat()\n self.wait_until(\n lambda: self.output_count(lambda x: x >= 1),\n max_timeout=15)\n metricbeat.kill_and_wait()\n\n output = self.read_output(\n required_fields=[\"@timestamp\"],\n )[0]\n\n for key in [\n \"system.process.memory.size\",\n \"system.process.memory.rss\",\n \"system.process.cpu.start_time\",\n \"system.process.cpu.total.pct\",\n \"system.process.name\",\n \"system.process.pid\",\n \"system.process.memory.rss.pct\"\n ]:\n assert key not in output", "def get_available_actions(self, state):\n pass", "def test_custom_post_actions(self):\n # Action not specified\n self.assertFalse(admin.GlobalAdminHandler.add_custom_post_action(\n \"\", None))\n self.assertFalse(admin.GlobalAdminHandler._custom_post_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n\n # Handler not specified\n self.assertFalse(admin.GlobalAdminHandler.add_custom_post_action(\n self.CUSTOM_ACTION_NAME, None))\n self.assertFalse(admin.GlobalAdminHandler._custom_post_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n\n # All required fields specified\n self.assertTrue(admin.GlobalAdminHandler.add_custom_post_action(\n self.CUSTOM_ACTION_NAME, self.custom_handler))\n self.assertTrue(admin.GlobalAdminHandler._custom_post_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n\n # Duplicate entry not allowed\n self.assertFalse(admin.GlobalAdminHandler.add_custom_post_action(\n self.CUSTOM_ACTION_NAME, self.custom_handler2))\n self.assertTrue(admin.GlobalAdminHandler._custom_post_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n self.assertEqual(\n self.custom_handler,\n admin.GlobalAdminHandler\n ._custom_post_actions[self.CUSTOM_ACTION_NAME])\n\n # Force overwrite existing entry\n self.assertTrue(admin.GlobalAdminHandler.add_custom_post_action(\n self.CUSTOM_ACTION_NAME, self.custom_handler2,\n overwrite=True))\n self.assertTrue(admin.GlobalAdminHandler._custom_post_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n self.assertEqual(\n self.custom_handler2,\n admin.GlobalAdminHandler\n ._custom_post_actions[self.CUSTOM_ACTION_NAME])\n\n # Remove the action\n admin.GlobalAdminHandler.remove_custom_post_action(\n self.CUSTOM_ACTION_NAME)\n self.assertFalse(admin.GlobalAdminHandler._custom_post_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n\n # Should not overwrite Dashboard action\n self.assertTrue(dashboard.DashboardHandler.add_custom_post_action(\n self.CUSTOM_ACTION_NAME, self.custom_handler))\n self.assertTrue(dashboard.DashboardHandler._custom_post_actions.has_key(\n self.CUSTOM_ACTION_NAME))\n self.assertFalse(admin.GlobalAdminHandler.add_custom_post_action(\n self.CUSTOM_ACTION_NAME, self.custom_handler))\n self.assertFalse(admin.GlobalAdminHandler._custom_post_actions.has_key(\n self.CUSTOM_ACTION_NAME))", "def testRecord(self):\n\t\taction = SetAttributeAction('mock', 'y', ('key', ), 5)\n\t\tself.failUnless(action == action.record())", "def test_create_describe_delete_acls(kafka_admin_client):\n\n # Check that we don't have any ACLs in the cluster\n acls, error = kafka_admin_client.describe_acls(\n ACLFilter(\n principal=None,\n host=\"*\",\n operation=ACLOperation.ANY,\n permission_type=ACLPermissionType.ANY,\n resource_pattern=ResourcePattern(ResourceType.TOPIC, \"topic\")\n )\n )\n\n assert error is NoError\n assert len(acls) == 0\n\n # Try to add an ACL\n acl = ACL(\n principal=\"User:test\",\n host=\"*\",\n operation=ACLOperation.READ,\n permission_type=ACLPermissionType.ALLOW,\n resource_pattern=ResourcePattern(ResourceType.TOPIC, \"topic\")\n )\n result = kafka_admin_client.create_acls([acl])\n\n assert len(result[\"failed\"]) == 0\n assert len(result[\"succeeded\"]) == 1\n\n # Check that we can list the ACL we created\n acl_filter = ACLFilter(\n principal=None,\n host=\"*\",\n operation=ACLOperation.ANY,\n permission_type=ACLPermissionType.ANY,\n resource_pattern=ResourcePattern(ResourceType.TOPIC, \"topic\")\n )\n acls, error = kafka_admin_client.describe_acls(acl_filter)\n\n assert error is NoError\n assert len(acls) == 1\n\n # Remove the ACL\n delete_results = kafka_admin_client.delete_acls(\n [\n ACLFilter(\n principal=\"User:test\",\n host=\"*\",\n operation=ACLOperation.READ,\n permission_type=ACLPermissionType.ALLOW,\n resource_pattern=ResourcePattern(ResourceType.TOPIC, \"topic\")\n )\n ]\n )\n\n assert len(delete_results) == 1\n assert len(delete_results[0][1]) == 1 # Check number of affected ACLs\n\n # Make sure the ACL does not exist in the cluster anymore\n acls, error = kafka_admin_client.describe_acls(\n ACLFilter(\n principal=\"*\",\n host=\"*\",\n operation=ACLOperation.ANY,\n permission_type=ACLPermissionType.ANY,\n resource_pattern=ResourcePattern(ResourceType.TOPIC, \"topic\")\n )\n )\n\n assert error is NoError\n assert len(acls) == 0", "def test_create_describe_delete_acls(kafka_admin_client):\n\n # Check that we don't have any ACLs in the cluster\n acls, error = kafka_admin_client.describe_acls(\n ACLFilter(\n principal=None,\n host=\"*\",\n operation=ACLOperation.ANY,\n permission_type=ACLPermissionType.ANY,\n resource_pattern=ResourcePattern(ResourceType.TOPIC, \"topic\")\n )\n )\n\n assert error is NoError\n assert len(acls) == 0\n\n # Try to add an ACL\n acl = ACL(\n principal=\"User:test\",\n host=\"*\",\n operation=ACLOperation.READ,\n permission_type=ACLPermissionType.ALLOW,\n resource_pattern=ResourcePattern(ResourceType.TOPIC, \"topic\")\n )\n result = kafka_admin_client.create_acls([acl])\n\n assert len(result[\"failed\"]) == 0\n assert len(result[\"succeeded\"]) == 1\n\n # Check that we can list the ACL we created\n acl_filter = ACLFilter(\n principal=None,\n host=\"*\",\n operation=ACLOperation.ANY,\n permission_type=ACLPermissionType.ANY,\n resource_pattern=ResourcePattern(ResourceType.TOPIC, \"topic\")\n )\n acls, error = kafka_admin_client.describe_acls(acl_filter)\n\n assert error is NoError\n assert len(acls) == 1\n\n # Remove the ACL\n delete_results = kafka_admin_client.delete_acls(\n [\n ACLFilter(\n principal=\"User:test\",\n host=\"*\",\n operation=ACLOperation.READ,\n permission_type=ACLPermissionType.ALLOW,\n resource_pattern=ResourcePattern(ResourceType.TOPIC, \"topic\")\n )\n ]\n )\n\n assert len(delete_results) == 1\n assert len(delete_results[0][1]) == 1 # Check number of affected ACLs\n\n # Make sure the ACL does not exist in the cluster anymore\n acls, error = kafka_admin_client.describe_acls(\n ACLFilter(\n principal=\"*\",\n host=\"*\",\n operation=ACLOperation.ANY,\n permission_type=ACLPermissionType.ANY,\n resource_pattern=ResourcePattern(ResourceType.TOPIC, \"topic\")\n )\n )\n\n assert error is NoError\n assert len(acls) == 0", "def __init__(__self__, *,\n actions: pulumi.Input[Sequence[pulumi.Input[str]]],\n principal: pulumi.Input[str]):\n pulumi.set(__self__, \"actions\", actions)\n pulumi.set(__self__, \"principal\", principal)", "def __init__(__self__, *,\n actions: pulumi.Input[Sequence[pulumi.Input[str]]],\n principal: pulumi.Input[str]):\n pulumi.set(__self__, \"actions\", actions)\n pulumi.set(__self__, \"principal\", principal)" ]
[ "0.57733667", "0.5674129", "0.5623118", "0.55654687", "0.54859173", "0.5416407", "0.53971905", "0.5386766", "0.5313959", "0.5262459", "0.52369434", "0.5227309", "0.5218352", "0.5207143", "0.51959413", "0.51855755", "0.5174657", "0.51633334", "0.51326454", "0.51265115", "0.50608176", "0.5060752", "0.50577503", "0.50553393", "0.5053246", "0.50392556", "0.5028449", "0.50030196", "0.49959403", "0.4994682", "0.49924886", "0.49857396", "0.49803573", "0.4974278", "0.49675694", "0.49636367", "0.49636367", "0.49636367", "0.49636367", "0.49569353", "0.4953613", "0.49519452", "0.4938534", "0.49358013", "0.4926583", "0.4923825", "0.49204007", "0.49192467", "0.49173626", "0.49163598", "0.4916057", "0.49097055", "0.4907931", "0.49021584", "0.49006417", "0.4899123", "0.4888844", "0.4888844", "0.4879063", "0.48763502", "0.48740137", "0.48737594", "0.4858635", "0.48512542", "0.48453707", "0.48452237", "0.48448896", "0.4843723", "0.48434064", "0.48263216", "0.4825318", "0.48109385", "0.4807639", "0.48025134", "0.47940895", "0.47940865", "0.47924143", "0.47895685", "0.47871754", "0.47864524", "0.475245", "0.47516215", "0.47464195", "0.47442928", "0.47437543", "0.47377816", "0.47332838", "0.4728524", "0.47284147", "0.47240585", "0.4722387", "0.47184226", "0.47137228", "0.4712344", "0.47103766", "0.47083044", "0.47078308", "0.47078308", "0.4706574", "0.4706574" ]
0.67827463
0
1. Maintain a decreasing stack by scanning nums from left to right. 2. Then scan the nums from right to left and calculate the maxWidth between each ramp.
def maxWidthRamp(self, nums: list[int]) -> int: maxWidth = 0 descStack = [] # Generate decreasing stack. for i, num in enumerate(nums): if not descStack or nums[descStack[-1]] > num: descStack.append(i) # Check elements from right to left. for j in reversed(range(len(nums))): while descStack and nums[descStack[-1]] <= nums[j]: maxWidth = max(maxWidth, j - descStack.pop()) return maxWidth
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def peg_width_per_levels(base_width):\n limiter = 2\n decrementer = -2\n decrementing_width = int(base_width)\n peg_count_per_level = []\n while decrementing_width >= limiter:\n peg_count_per_level.append(int(decrementing_width))\n decrementing_width += decrementer\n return peg_count_per_level", "def fn(x):\n ans = rsm = ii = 0 \n for i in range(len(nums)): \n rsm += nums[i]\n while rsm > x: # sliding window \n rsm -= nums[ii]\n ii += 1\n ans += i - ii + 1\n return ans", "def maximumGap(self, nums: List[int]) -> int:\r\n n = len(nums)\r\n if n < 2: return 0 \r\n l, r = min(nums), max(nums)\r\n if r - l == 0: return 0 \r\n gap_instance = max(1, (r - l) // n)\r\n gapcnts = math.ceil((r - l + 1) / gap_instance)\r\n buckets = [[-1, -1] for _ in range(gapcnts)] \r\n calpos = lambda num: (num - l) // gap_instance\r\n\r\n for num in nums:\r\n pos = calpos(num)\r\n if num < buckets[pos][0] or buckets[pos][0] == -1:\r\n buckets[pos][0] = num \r\n if num > buckets[pos][1] or buckets[pos][1] == -1:\r\n buckets[pos][1] = num \r\n\r\n ans, pre = 0, l\r\n for small, large in buckets:\r\n if small == -1:\r\n continue \r\n else:\r\n ans = max(small - pre, ans)\r\n pre = large\r\n return ans", "def findPeakElement2(self, nums: List[int]) -> int:\n nums.insert(0, -float('inf'))\n nums.append(-float('inf'))\n l, r = 0, len(nums)\n\n while l < r:\n mid = l + (r - l) // 2\n if nums[mid] > nums[mid-1] and nums[mid] > nums[mid+1]:\n return mid - 1\n \n elif nums[mid] <= nums[mid-1] and nums[mid] <= nums[mid+1]:\n r = mid \n elif nums[mid-1] <= nums[mid] <= nums[mid+1]:\n l = mid\n elif nums[mid-1] >= nums[mid] >= nums[mid+1]:\n r = mid\n return l", "def get_next_width(current_width,width_array):\n active_width = float(current_width)/MaxWidth\n\n active_width_constant = width_array.index(get_width_constant(active_width,width_array))\n\n width_multiplier = width_array[(active_width_constant+1)%len(width_array)]\n\n return int((MaxWidth-(WinBorder*2))*width_multiplier)", "def findMaximumSubarraySlidingWindow(self, k, nums):\n window_start, window_sum, window_max= 0, 0, 0\n for i in range(len(nums)):\n window_sum += nums[i] #add the next element\n # slide the window, we don't need to slide if we have not hit the required window size of K\n if i >= k-1:\n window_max = max(window_sum, window_max) # calculate the maximum sum\n window_sum -= nums[window_start] #substract the element going out\n window_start += 1 #slide the window ahead\n return window_max", "def findMaxLength(self, nums):\n dict1 = dict()\n count = 0\n maxlen = 0\n for i in range(len(nums)):\n if nums[i] == 1:\n count = count + 1\n else:\n count = count - 1\n\n if count == 0:\n maxlen = max(maxlen, i + 1)\n if count not in dict1:\n dict1[count] = i\n else:\n maxlen = max(maxlen, i - (dict1.get(count)))\n return maxlen", "def min_width(blocks):\r\n assert(len(blocks) > 0)\r\n return sum(blocks) + len(blocks) - 1", "def find_rising_flank(arr, method='Size'):\n arr = arr.copy()\n #arr[arr<arr.max()*0.01] = 0\n prev_val = -np.inf\n start_index = None\n len_ctr = 0\n pairs = []\n for index, val in enumerate(arr):\n if val > prev_val:\n if start_index is None:\n start_index = index - 1\n start_val = val\n len_ctr += 1\n else:\n if start_index is not None:\n if method == 'Length':\n pairs.append((len_ctr, start_index, index))\n elif method == 'Size':\n pairs.append((prev_val-start_val, start_index, index))\n start_index = None\n start_val = None\n len_ctr = 0\n prev_val = val\n #import pdb\n #pdb.set_trace()\n end_longest_streak = sorted(pairs)[-1][-1]\n return end_longest_streak", "def calculate_min_max_tiles(self):", "def splitArray(self, nums: List[int], m: int) -> int:\n l = max(nums)\n r = sum(nums)\n ans = r\n\n while l <= r:\n mid = (l + r) // 2\n range_sum = 0\n range_sum_count = 1\n for i in range(len(nums)):\n if (range_sum + nums[i] > mid):\n range_sum = nums[i]\n range_sum_count += 1\n else:\n range_sum += nums[i]\n if range_sum_count <= m:\n ans = min(ans, mid)\n r = mid - 1\n else:\n l = mid + 1\n return ans", "def build_max_heap(A):\r\n i = int((len(A)-2)//2)\r\n while i >= 0:\r\n max_heapify(A, i)\r\n i -= 1\r\n return A", "def robSingle_2(self, nums, start, end):\n # print((start, end))\n # print(nums[start: end + 1])\n curMax = 0\n preMax = 0\n for num in nums[start:end + 1]:\n preMax, curMax = curMax, max(curMax, preMax + num)\n # print(curMax)\n # print(\"####################################\")\n return curMax", "def calc_slice_size(scale_w):\n total_res = SLICE_THRE\n in_w = int((total_res - 1 - scale_w) // (1 + scale_w))\n out_w = int((in_w + 1) * scale_w) + 2\n return in_w, out_w", "def fix(xs):\n\n if xs >= 0:\n res = np.floor(xs)\n else:\n res = np.ceil(xs)\n return res", "def fix(xs):\n\n # res = [np.floor(e) if e >= 0 else np.ceil(e) for e in xs]\n if xs >= 0:\n res = np.floor(xs)\n else:\n res = np.ceil(xs)\n return res", "def maxSlidingWindow(self, nums: List[int], k: int) -> List[int]:\n max_heap = []\n item_dict = {}\n result = []\n\n for i in range(len(nums)):\n item = [-nums[i], False]\n heapq.heappush(max_heap, item)\n\n if nums[i] not in item_dict:\n item_dict[nums[i]] = [item]\n else:\n item_dict[nums[i]].append(item)\n\n if i - k >= 0:\n # \"remove\" element from heap\n item_to_remove = nums[i - k]\n\n in_heap_item = item_dict[item_to_remove].pop()\n in_heap_item[1] = True # mark as removed\n\n while max_heap[0][1] is True:\n heapq.heappop(max_heap)\n\n result.append(-max_heap[0][0])\n if i == k - 1:\n # handle the first window\n result.append(-max_heap[0][0])\n\n return result", "def four():\r\n \r\n i = 999\r\n j = i\r\n largest = 0\r\n \r\n while i > 0:\r\n while j > 0:\r\n number = str(i * j)\r\n forward = str(number)\r\n reverse = \"\"\r\n for char in number:\r\n reverse = char + reverse\r\n if forward == reverse:\r\n if largest < i * j:\r\n largest = i * j\r\n break\r\n else:\r\n j = j - 1\r\n i = i - 1\r\n j = i\r\n return largest", "def maxSumOfThreeSubarrays(self, nums: List[int], k: int) -> List[int]:\n\n n = len(nums)\n if n < 3 * k or k == 0:\n return 0\n\n prefix_sum = [0]\n for num in nums:\n prefix_sum.append(prefix_sum[-1] + num)\n\n left = [0] * n\n left_i = [0] * n\n right = [0] * (n + 1) # add one to right (for case of k == 1)\n right_i = [0] * (n + 1)\n\n for i in range(k - 1, n):\n window = prefix_sum[i + 1] - prefix_sum[i + 1 - k]\n if window > left[i - 1]: # > cause we prefex left start\n left[i] = window\n left_i[i] = i - (k - 1)\n else:\n left[i] = left[i - 1]\n left_i[i] = left_i[i - 1]\n\n for i in reversed(range(n - k + 1)):\n window = prefix_sum[i + k] - prefix_sum[i]\n if window >= right[i + 1]: # >= cause we prefex left start\n right[i] = window\n right_i[i] = i\n else:\n right[i] = right[i + 1]\n right_i[i] = right_i[i + 1]\n\n max_sum = 0\n a, b, c = 0, 0, 0\n for i in range(k, n - 2 * k + 1):\n curr_sum = prefix_sum[i + k] - prefix_sum[i] + left[i - 1] + right[i + k]\n if curr_sum > max_sum:\n max_sum = curr_sum\n a, b, c = left_i[i - 1], i, right_i[i + k]\n\n return [a, b, c]", "def GetScaleBlocks(width):\n\n rord=numpy.log10(abs(width)/2.0)\n nrord=rord % 1\n\n if nrord < numpy.log10(2):\n spc=0.2*pow(10,numpy.floor(rord))\n smallspc=spc\n bigspc=5*spc\n newspc=[0,smallspc,smallspc*2,smallspc*3,smallspc*4,smallspc*5]\n elif nrord < numpy.log10(5):\n spc=0.5*pow(10,numpy.floor(rord))\n smallspc=spc\n bigspc=5*spc\n newspc=[0,smallspc,smallspc*2,smallspc*3,smallspc*4]\n else:\n spc=pow(10,numpy.floor(rord))\n smallspc=spc\n bigspc=spc*5\n newspc=[0,smallspc,smallspc*2,smallspc*3,smallspc*4,smallspc*5]\n\n if len(newspc) == 5:\n #labels=['0',None,\"%g\" % smallspc*2,None,\"%g\" % (smallspc*4)]\n labels=['0',None,None,None,\"%g\" % (smallspc*4)]\n else:\n labels=['0',None,None,None,None,\"%g\" % (smallspc*5)]\n\n temp_max=newspc[len(newspc)-1]\n start=temp_max\n for temp in numpy.arange(start,width-bigspc/2,bigspc):\n temp_max=temp_max+bigspc\n newspc.append(temp_max)\n labels.append(\"%g\" % temp_max)\n\n #start=temp_max\n #for temp in Numeric.arange(start,width-smallspc/2,smallspc):\n # labels.append(None)\n # temp_max=temp_max+smallspc \n # newspc.append(temp_max) \n\n return (numpy.array(newspc,numpy.float32),labels)", "def headandtail(zigma, numls):\n numls.sort()\n nummin, nummax = 0, 0\n for i in xrange(1, zigma+1):\n nummin += numls[i-1]\n nummax += numls[-i]\n print nummin + nummax", "def find_max_with_count(A):\n\n def frmax(lo, hi):\n \"\"\"Use recursion to find maximum value in A[lo:hi+1] incl. count\"\"\"\n if lo == hi: return (0, A[lo])\n\n mid = (lo+hi)//2\n ctleft,left = frmax(lo, mid)\n ctright,right = frmax(mid+1, hi)\n return (1+ctleft+ctright, max(left, right))\n\n return frmax(0, len(A)-1)", "def build_max_heap(a):\r\n for i in range(math.floor((len(a) - 1)/2), -1, -1):\r\n max_heapify(a, i)", "def get_long_len(nums):\n return len(str(max(nums + [sum(nums)])))", "def flattenFrames(stack):\n \n maxHeight=0\n frameList=[]\n \n \n print('\\n')\n for i, frame in enumerate(stack):\n #medFrame = ndimage.filters.median_filter(frame,size=(1,60)) #Takes 3.5 minutes\n medFrame = ndimage.filters.uniform_filter1d(frame, 60) #Takes 1.0 minutes and has same output as med filter\n shifts = shiftDetector(medFrame)\n newFrame = adjustFrame(frame, shifts)\n frameList.append(newFrame)\n if newFrame.shape[0] > maxHeight:\n maxHeight = newFrame.shape[0]\n \n #Show percentage of loop completed.\n print('\\rFinding and correcting shifts {:.2f}% done'.format(100.0*((i+1)/len(stack))),end='', flush=True)\n \n flattenedStack = padFrames(frameList, maxHeight)\n\n return flattenedStack", "def count_max_acc(alon, curr_max, count, pos):\n if pos == len(alon):\n return count\n curr_num = alon[pos]\n if curr_num > curr_max:\n curr_max = curr_num\n count = 0\n if curr_num == curr_max:\n count += 1\n return count_max_acc(alon, curr_max, count, pos+1)", "def calc_dim(s):\n s = s.detach().numpy()\n dim = 0\n # calculate how much 90% would be\n s_square = [i ** 2 for i in s]\n sum_square = sum(s_square)\n goal = .9 * sum_square\n # find 90%\n count = 0\n while count < goal:\n count += s_square[dim]\n dim += 1\n return dim # return this many dimensions", "def sub_division(width: float, minimum_division: float, stretch_factor: float) -> list:\n\n sum_x = 0\n next_ = minimum_division\n new_grid = []\n max_dx = 20/100\n x = width/2\n\n while sum_x < x:\n remaining = x - sum_x\n\n if next_ > max_dx:\n n = np.ceil(remaining/max_dx)\n\n if n == 0:\n new_grid.append(remaining)\n\n next_ = remaining/n\n\n for _ in range(0, int(n)):\n new_grid.append(next_)\n sum_x += next_\n\n remaining = x - sum_x\n\n if next_ < remaining:\n new_grid.append(next_)\n sum_x += next_\n else:\n remaining += new_grid[-1]\n new_grid[-1] = remaining/2\n new_grid.append(remaining/2)\n sum_x = x\n\n next_ = next_ * stretch_factor\n\n x1 = new_grid[::-1]\n x2 = new_grid+x1\n\n return x2", "def robSingle(self, nums, start, end):\n # print((start, end))\n # print(nums[start: end])\n curMax = 0\n preMax = 0\n for num in nums[start:end]:\n preMax, curMax = curMax, max(curMax, preMax + num)\n # print(curMax)\n # print(\"####################################\")\n return curMax", "def bu(lengths: List[int], L: int) -> int:\n N = len(lengths)\n dp = [0] + [-1]*L\n for l in lengths:\n for j in range(l, L+1):\n dp[j] = max(dp[j], dp[j-l]+1 if dp[j-l] != -1 else -1)\n return dp[-1]", "def max_scoring_num_rolls(dice=six_sided, num_samples=1000):\n # BEGIN PROBLEM 8\n\n \"\"\"maxi, number_of_dice, ret = 0, 10, 0\n while number_of_dice > 0:\n avg = make_averaged(roll_dice)(number_of_dice, dice)\n maxi = max(maxi, avg)\n if avg >= maxi:\n ret = number_of_dice\n number_of_dice -= 1\n return ret\"\"\"\n\n\n\n counterA = 1\n num_rolls=1\n max_value = 0\n best_num_rolls = 0\n while counterA <= 10:\n num_rolls = counterA\n average_function = make_averaged(roll_dice)(counterA, dice)\n if average_function > max_value:\n max_value = average_function\n best_num_rolls = counterA\n counterA +=1\n return best_num_rolls\n\n \"\"\"counterA = 1\n maxvalue = 0\n maxvaluenumber = 0\n while(counterA<=10):\n num_rolls = counterA\n average_for_roll = make_averaged(roll_dice(num_rolls, dice), num_samples)\n counterB = average_for_roll(roll_dice(counterA, dice))\n if(counterB>maxvalue):\n maxvalue = counterB\n maxvaluenumber = counterA\n counterA +=1\n return maxvaluenumber\"\"\"\n # END PROBLEM 8", "def get_max_sum3(a):\n s = ms = a[0]\n n = len(a)\n mstart = 0\n mend = 0\n start = 0\n end = 0\n for i in range(1, n):\n if s + a[i] < a[i]:\n s = a[i]\n start = i\n end = i\n else:\n s = s + a[i]\n end = i\n if ms < s:\n ms = s\n mstart = start\n mend = end\n return mstart, mend", "def solution(n: int) -> int:\n sizearr = n + 1\n\n # create zero-filled multi_arr\n multi_arr = [[0 for x in range(sizearr)] for n in range(sizearr)]\n\n # base value is always skipped after being padded\n multi_arr[0][0] = 1\n for last in range(1, sizearr):\n for next in range(0, sizearr):\n multi_arr[last][next] = multi_arr[last - 1][next]\n if next >= last:\n multi_arr[last][next] += multi_arr[last - 1][next - last]\n\n return multi_arr[n][n] - 1", "def max_total_length(murals):\n if not murals:\n return 0\n\n no_overlap = []\n for mural in murals:\n if mural[1] <= murals[0][0] or mural[0] >= murals[0][1]:\n no_overlap.append(mural)\n\n value = murals[0][1] - murals[0][0]\n del murals[0]\n return max(value + max_total_length(no_overlap), max_total_length(murals))", "def longestIncreasingSubsequence(nums):\n if not nums:\n return 0\n \n dp = [None] * len(nums)\n dp[0] = 1\n maxans = 1\n \n for i in range(1, len(dp)):\n maxval = 0\n for j in range(0, i):\n if nums[i] > nums[j]:\n maxval = max(maxval, dp[j])\n \n dp[i] = maxval + 1\n maxans = max(maxans, dp[i])\n \n return maxans", "def get_max_draw_down(ts_vals):\r\n MDD = 0\r\n DD = 0\r\n peak = -99999\r\n for value in ts_vals:\r\n if (value > peak):\r\n peak = value\r\n else:\r\n DD = (peak - value) / peak\r\n if (DD > MDD):\r\n MDD = DD\r\n return MDD", "def tower_of_hanoi_stack(n, beg, aux, end):", "def widthOfBinaryTree(self, root: Optional[TreeNode]) -> int:\n q = deque([(root, 1)])\n max_width = 1\n\n while len(q) > 0:\n temp_q = deque()\n local_max_width = float('-inf')\n local_min_width = float('+inf')\n\n for (node, position) in q:\n local_max_width = max(local_max_width, position)\n local_min_width = min(local_min_width, position)\n if node.left:\n temp_q.append((node.left, position * 2 - 1))\n if node.right:\n temp_q.append((node.right, position * 2))\n max_width = max(max_width, local_max_width - local_min_width + 1)\n q.clear()\n q = temp_q\n\n return max_width", "def build_max_heap(A):\n A.insert(0, len(A))\n for i in range(len(A)//2, 0, -1):\n max_heapify(A, i)", "def maxResult(self, nums: List[int], k: int) -> int:\n # Solution 1 - 964 ms\n # Solution 2 - 864 ms\n n = len(nums)\n if n == 0:\n return 0\n if n == 1:\n return nums[0]\n\n dp = [0] * n\n dp[0] = nums[0]\n max_sum = dp[0]\n max_sum_pointer = 0\n for i in range(1, n):\n if max_sum_pointer >= i - k:\n if max_sum < dp[i - 1] and i > 0:\n max_sum = dp[i - 1]\n max_sum_pointer = i - 1\n else:\n if i - k > 0:\n max_sum = dp[i - k]\n max_sum_pointer = i - k\n for p in range(i - k, i):\n if max_sum <= dp[p]:\n max_sum = dp[p]\n max_sum_pointer = p\n\n dp[i] = max_sum + nums[i]\n\n dp[-1] = max_sum + nums[-1]\n return dp[-1]", "def splitArray(self, nums: List[int], m: int) -> int:\n low, high, res = max(nums), sum(nums), -1\n while low <= high:\n pivot=(low+high)//2\n if self.isValid(nums,m,pivot):\n res, high = pivot, pivot - 1\n else:\n low = pivot + 1\n return res", "def find_greater_numbers(nums):\n times = 0\n for loop in range(len(nums) - 1):\n for follow in range(loop + 1, len(nums)):\n if (nums[loop] < nums[follow]):\n times+= 1\n return times", "def minNumbersOfJumbs(array):\n\n # case array is empty or has 1 element\n if len(array) < 2:\n return 0\n jump = array[0]\n max_reach = array[0]\n # case for number is enough to reach end\n if jump >= len(array) - 1:\n return 1\n step = 1\n idx = 1", "def find_min(nums):\n left, right = 0, len(nums) - 1\n while left + 1 < right:\n mid = (left + right) // 2\n if nums[mid] < nums[right]:\n right = mid\n else:\n left = mid\n if nums[left] < nums[right]:\n return nums[left]\n else:\n return nums[right]", "def find_max_gap(self, free_space_ranges):\n start = end = 200\n curr_start = 200\n #print(free_space_ranges)\n for i in range(201, 880):\n if free_space_ranges[i] != 0:\n if free_space_ranges[i-1] == 0:\n curr_start = i\n else:\n if (i-curr_start) > end-start:\n start = curr_start\n end = i\n return start, end", "def find_max_min(number):\n if max(number) == min(number):\n return [len(number)]\n return [min(number), max(number)]", "def max_level(board):\n acc_board = accum_board(board)\n for row in acc_board:\n row.append(0)\n acc_board.append([0]*len(acc_board[0]))\n m, n = len(board), len(board[0])\n max_level_sum = float('-inf')\n top_left = None\n for i in range(m):\n for j in range(n):\n for k in range(min(m-i, n-j)):\n level = (acc_board[i+k][j+k] +\n acc_board[i-1][j-1] -\n acc_board[i-1][j+k] -\n acc_board[i+k][j-1])\n if level > max_level_sum:\n max_level_sum = level\n top_left = (j+1, i+1, k+1)\n return top_left", "def get_sliding_window_max(numbers: Sequence[int], k: int) -> Iterable[int]:\n\n assert 1 <= k <= len(numbers)\n\n # Deque to store useful for calculation number indices.\n dq: Deque[int] = deque()\n\n for i in range(k):\n while dq and numbers[dq[-1]] < numbers[i]:\n dq.pop()\n\n dq.append(i)\n\n yield numbers[dq[0]]\n\n for i in range(k, len(numbers)):\n while dq[0] <= i - k:\n dq.popleft()\n\n while dq and numbers[dq[-1]] < numbers[i]:\n dq.pop()\n\n dq.append(i)\n\n yield numbers[dq[0]]", "def num_array(lower_limit = 0, upper_limit = 5, increment = 1):\n numbers = []\n while lower_limit < upper_limit:\n numbers.append(lower_limit)\n lower_limit += increment\n return numbers", "def bit_smarter(limit):\n c_lengths = {}\n\n for s in range(1, limit+1):\n c_lengths[s] = s_collatz_length(s, c_lengths)\n\n return max(c_lengths, key=lambda x: c_lengths[x])", "def maxSlidingWindow_v2(self, nums: List[int], k: int) -> List[int]:\n m_queue = deque()\n result = []\n\n for i in range(len(nums)):\n while m_queue and m_queue[-1] < nums[i]:\n m_queue.pop()\n m_queue.append(nums[i])\n\n if i - k >= 0:\n item_to_remove = nums[i - k]\n\n if item_to_remove == m_queue[0]:\n m_queue.popleft()\n\n if i >= k - 1:\n result.append(m_queue[0])\n\n return result", "def smooth5(size: int) -> int:\n if size < 6:\n return size\n if not size % 2:\n return size\n\n new = np.inf\n power5 = 1\n while power5 < size:\n power35 = power5\n while power35 < size:\n power2 = 2 ** ((-int(-size // power35) - 1).bit_length())\n n = power2 * power35\n if n == size:\n return new\n elif n < new:\n new = n\n power35 *= 3\n if power35 == size:\n return new\n if power35 < new:\n new = power35\n power5 *= 5\n if power5 == size:\n return new\n if power5 < new:\n new = power5\n return new", "def left_bs(arr, st, end, n):\n if arr[st]>=n:\n return st-1\n if arr[end]<n:\n return end+1 \n mid = st + (end-st)//2\n while st <= end:\n if arr[mid] < n and arr[mid+1] >= n:\n return mid + 1\n if arr[mid] < n:\n st = mid\n else:\n end = mid-1\n mid = st + (end-st)//2", "def maxVal(item_list, rem_space):\n if item_list == [] or rem_space == 0: # no items or space\n result = (0, ())\n else:\n next_item = item_list[0]\n if next_item.getCost() > rem_space:\n result = maxVal(item_list[1:], rem_space)\n else:\n with_val, with_list = maxVal(item_list[1:],\n rem_space-next_item.getCost())\n with_val += next_item.getValue()\n\n without_val, without_list = maxVal(item_list[1:],\n rem_space)\n if with_val > without_val:\n result = (with_val, with_list + (next_item, ))\n else:\n result = (without_val, without_list)\n return result", "def _bucket_boundaries(max_length, min_length=8, length_bucket_step=1.1):\n assert length_bucket_step > 1.0\n x = min_length\n boundaries = []\n while x < max_length:\n boundaries.append(x)\n x = max(x + 1, int(x * length_bucket_step))\n return boundaries", "def count(A,target):\n\n def rcount(lo, hi, target):\n \"\"\"Use recursion to find maximum value in A[lo:hi+1].\"\"\"\n if lo == hi:\n return 1 if A[lo] == target else 0\n\n mid = (lo+hi)//2\n left = rcount(lo, mid, target)\n right = rcount(mid+1, hi, target)\n return left + right\n\n return rcount(0, len(A)-1, target)", "def findSpaceLength(Histogram, High):\n summ=0\n length=0\n number=0\n for kol in Histogram:\n if kol==0:\n length+=1\n elif kol>0 and length>0:\n if length<High:\n summ+=length\n length=0\n number+=1\n else:length=0\n if number<>0: return max(summ/number, (1/5)*High) ## in a case if there is no space in line\n else: return (1/5)*High", "def maxSubArray(self, nums: List[int]) -> int:\n # O(n) solution\n # 我们定义函数 S(i) ,它的功能是计算以 0(包括 0)开始加到 i(包括 i)的值。\n # 那么 S(j) - S(i - 1) 就等于 从 i 开始(包括 i)加到 j(包括 j)的值\n # 我们进一步分析,实际上我们只需要遍历一次计算出所有的 S(i), 其中 i = 0,1,2....,n-1。\n # 然后我们再减去之前的 S(k),其中 k = 0,1,i - 1,中的最小值即可。 因此我们需要 用一个变量来维护这个最小值,还需要一个变量维护最大值。\n max_sum = nums[0]\n min_sum_from_start = curr_sum = 0\n for i in range(len(nums)):\n curr_sum = curr_sum + nums[i]\n if curr_sum - min_sum_from_start > max_sum:\n max_sum = curr_sum-min_sum_from_start\n if curr_sum < min_sum_from_start:\n min_sum_from_start = curr_sum\n return max_sum", "def lengthOfLIS(self, nums):\n n = len(nums)\n if n <= 1:\n return n\n\n max_len = 0\n\n dp = [0] * n\n for i, num in enumerate(nums):\n if i == 0:\n dp[0] = 1\n max_len = 1\n else:\n prev_max = 0\n for j in xrange(i):\n if nums[j] < num:\n prev_max = max(prev_max, dp[j])\n dp[i] = prev_max + 1\n max_len = max(max_len, dp[i])\n\n return max_len", "def get_max_width(binary_mask):\n start_px = 0\n end_px = 0\n\n for i, row in enumerate(binary_mask):\n max = np.argmax(row)\n if max > 0:\n start_px = i\n break\n\n for i, row in enumerate(binary_mask[::-1]):\n max = np.argmax(row)\n if max > 0:\n end_px = i\n break\n\n return binary_mask.shape[0] - start_px - end_px", "def calc_long_runs(num_weeks, initial_mileage, days_first_week, days_last_week,\r\n longest_run):\r\n long_runs = []\r\n if days_last_week >= 6:\r\n last_run = round(longest_run / 2)\r\n else:\r\n last_run = 0\r\n first_long_run = min(round(initial_mileage/3), longest_run)\r\n if num_weeks == 2:\r\n if days_first_week <= 5:\r\n long_runs.append(0)\r\n else:\r\n long_runs.append(last_run)\r\n elif num_weeks == 3:\r\n if days_first_week <= 5:\r\n long_runs.append(0)\r\n else:\r\n long_runs.append(longest_run)\r\n long_runs.append(last_run)\r\n elif num_weeks == 4:\r\n if days_first_week <= 5:\r\n long_runs.append(0)\r\n else:\r\n long_runs.append(first_long_run)\r\n long_runs += [longest_run, last_run]\r\n elif num_weeks > 4:\r\n if days_first_week <= 5:\r\n long_runs.append(0)\r\n num_weeks -= 1\r\n long_runs += [round(first_long_run + i / (num_weeks - 3) * (longest_run\r\n - first_long_run)) for i in range(num_weeks - 2)]\r\n long_runs.append(last_run)\r\n return long_runs + [0]", "def _brute_force_unbounded_knapsack_aux(weight_limit, weight_list, value_list, item_list):\n max_value = 0\n max_list = []\n for i in range(len(weight_list)):\n available_space = weight_limit - weight_list[i]\n if available_space >= 0:\n current_value, current_list = _brute_force_unbounded_knapsack_aux(\n available_space, weight_list, value_list, item_list)\n current_value += value_list[i]\n current_list.append(i)\n if (current_value > max_value):\n max_value = current_value\n max_list = current_list\n return max_value, max_list", "def solve_n_bins(x):\n from scipy.stats import iqr\n\n x = np.asarray(x)\n hat = 2 * iqr(x) / (len(x) ** (1 / 3))\n\n if hat == 0:\n return int(np.sqrt(len(x)))\n else:\n return int(np.ceil((x.max() - x.min()) / hat))", "def solve_n_bins(x):\n from scipy.stats import iqr\n\n x = np.asarray(x)\n hat = 2 * iqr(x) / (len(x) ** (1 / 3))\n\n if hat == 0:\n return int(np.sqrt(len(x)))\n else:\n return int(np.ceil((x.max() - x.min()) / hat))", "def build_max_heap(heap):\n\tfor j in range(heap.len//2, -1, -1):\n\t\tmax_heapify(heap, j)", "def dyadic_length_int(x):\n n = x.shape[0]\n return math.ceil(math.log2(n))", "def fn(i):\n if i == len(nums): return ans.append(stack.copy())\n if not stack or stack[-1] != nums[i]: fn(i+1)\n stack.append(nums[i])\n fn(i+1)\n stack.pop()", "def twoDize(array, width):\n count = 0\n output = []\n temp = []\n while len(array) > 0:\n temp.append(array.pop())\n if len(temp) == width:\n output.append(temp)\n temp = []\n return output", "def max_pp(level):\n base_pp = 6\n level_pp = 2 * level\n return base_pp + (level_pp - 2)", "def largest_island(grid: list[list[int]]) -> int:\n rows = len(grid)\n cols = len(grid[0])\n\n visited = [[False for _ in range(cols)] for _ in range(rows)]\n max_island_size = 0\n for i in range(rows):\n for j in range(cols):\n if grid[i][j] == 1 and not visited[i][j]:\n island_size = flood_island(grid, i, j, visited)\n max_island_size = max(max_island_size, island_size)\n\n return max_island_size", "def top_down_rod_cutting_helper(prices: List[int], length: int, dp: List[int]):\n if length == 0:\n return 0\n if dp[length] == 0:\n max_value = 0\n for i in range(1, length + 1):\n max_value = max(max_value, prices[i-1] + top_down_rod_cutting_helper(prices, length - i, dp))\n dp[length] = max_value\n return dp[length]", "def _divide_widths(self, width: int) -> list[int] | None:\n children = self._all_children\n\n if not children:\n return []\n\n # Calculate widths.\n dimensions = [c.preferred_width(width) for c in children]\n preferred_dimensions = [d.preferred for d in dimensions]\n\n # Sum dimensions\n sum_dimensions = sum_layout_dimensions(dimensions)\n\n # If there is not enough space for both.\n # Don't do anything.\n if sum_dimensions.min > width:\n return None\n\n # Find optimal sizes. (Start with minimal size, increase until we cover\n # the whole width.)\n sizes = [d.min for d in dimensions]\n\n child_generator = take_using_weights(\n items=list(range(len(dimensions))), weights=[d.weight for d in dimensions]\n )\n\n i = next(child_generator)\n\n # Increase until we meet at least the 'preferred' size.\n preferred_stop = min(width, sum_dimensions.preferred)\n\n while sum(sizes) < preferred_stop:\n if sizes[i] < preferred_dimensions[i]:\n sizes[i] += 1\n i = next(child_generator)\n\n # Increase until we use all the available space.\n max_dimensions = [d.max for d in dimensions]\n max_stop = min(width, sum_dimensions.max)\n\n while sum(sizes) < max_stop:\n if sizes[i] < max_dimensions[i]:\n sizes[i] += 1\n i = next(child_generator)\n\n return sizes", "def running_median(nums: List[int]) -> None:\n # Initialize variables\n max_heap = [] # Everything is multiplied by -1 to get a max heap\n min_heap = [nums[0]]\n median = nums[0]\n print(median)\n\n # Median printing loop\n for num in nums[1:]:\n # Insert into correct heap\n if num > median:\n heapq.heappush(min_heap, num)\n else:\n heapq.heappush(max_heap, -1*num)\n\n # Keep balance invariant\n if len(min_heap) - len(max_heap) > 1:\n move = heapq.heappop(min_heap)\n heapq.heappush(max_heap, -1*move)\n elif len(max_heap) - len(min_heap) > 1:\n move = -1*heapq.heappop(max_heap)\n heapq.heappush(min_heap, move)\n\n # Print and overwrite median\n if len(min_heap) > len(max_heap):\n median = min_heap[0]\n elif len(max_heap) > len(min_heap):\n median = -1*max_heap[0]\n else:\n median = (min_heap[0] + -1*max_heap[0])/2\n print(median)", "def nextPermutation(self, nums: List[int]) -> None:\n n = len(nums)\n if n <= 1:\n return\n \n right = n - 1\n left = n - 2\n \n while(left >= 0 and nums[left + 1] <= nums[left]):\n left = left - 1\n # jumps out of loop only when nums[l] < nums[l+1] or when l = 0\n \n if (left < 0):\n left = 0\n while(right > left):\n temp = nums[right]\n nums[right] = nums[left]\n nums[left] = temp\n right -= 1\n left += 1\n return\n \n if (left == (right-1)):\n temp = nums[right]\n nums[right] = nums[left]\n nums[left] = temp\n return\n \n sub = right\n \n # find the number just greater than nums[left] and label it sub \n while(nums[sub] <= nums[left]):\n sub -= 1\n \n temp = nums[sub]\n nums[sub] = nums[left]\n nums[left] = temp\n \n left += 1\n while(right > left):\n temp = nums[right]\n nums[right] = nums[left]\n nums[left] = temp\n right -= 1\n left += 1", "def calculate(self, A: List[int]) -> int:\n ple = PreviousLessElement()\n left = ple.calculate(A)\n\n \"\"\" calculate NLE of each element in the input \"\"\"\n nle = NextLessElement()\n right = nle.calculate(A)\n\n \"\"\" for an element whose either PLE or NLE does not exist,\n normalize it to :\n PLE -> index + 1\n NLE -> len(input) - index\n \"\"\"\n for i in range(len(A)):\n left[i] = i + 1 if left[i] == -1 else left[i]\n right[i] = len(A) - i if right[i] == -1 else right[i]\n\n \"\"\" number of sub-arrays with A[i] as the minimum will be those which\n starts with any value between A[i] and PLE (since any value in between will be greater than A[i])\n number of such values = left[i]\n\n ends with any value between A[i] and NLE (since any value in between will be greater than A[i])\n number of such values = right[i]\n\n total number of sub-arrays = left[i] * right[i]\n\n since each of these sub-arrays will have A[i] as the minimum, its contribution to the overall sum\n will be A[i] * number of sub-arrays -> A[i] * (left[i] * right[i])\n \"\"\"\n mod = (10 ** 9) + 7\n return sum(a * l * r for a, l, r in zip(A, left, right)) % mod", "def maxTurbulenceSize(self, arr: List[int]) -> int:\n if len(arr) == 1:\n return 1\n ret = 1\n tmp_ret = 0\n last_flag = None\n for i in range(1, len(arr)):\n if arr[i] == arr[i - 1]:\n current_flag = None\n else:\n current_flag = arr[i] > arr[i - 1]\n\n if current_flag is None:\n ret = max(ret, tmp_ret)\n tmp_ret = 1\n elif last_flag is None or last_flag == current_flag:\n ret = max(ret, tmp_ret)\n tmp_ret = 2\n else:\n tmp_ret += 1\n\n last_flag = current_flag\n return max(ret, tmp_ret)", "def smallerNumbersThanCurrent(nums: List[int]) -> List[int]:\n i, count = 0, 0\n arr = []\n for j in range(len(nums)):\n if nums[i] > nums[j]:\n count += 1\n arr.append(count)\n return arr", "def get_max_passes(example_height: int) -> int:\n return (example_height - 5) // 4", "def count_gold(pyramid):\n count = [0]\n for line in pyramid:\n count.append(count[-1] + line[-1])\n for i in range(len(line)-1, 0, -1):\n count[i] = max(count[i-1], count[i]) + line[i-1]\n\n # replace this for solution\n return max(count)", "def count_sliding_window(top, step=10, window_size=(20,20)):\n\tc = 0\n\tfor x in range(0, top.shape[0], step):\n\t\tif x + window_size[0] > top.shape[0]:\n\t\t\tx = top.shape[0] - window_size[0]\n\t\tfor y in range(0, top.shape[1], step):\n\t\t\tif y + window_size[1] > top.shape[1]:\n\t\t\t\ty = top.shape[1] - window_size[1]\n\t\t\tc += 1\n\treturn c", "def get_max_digs(self):\n\n # Get maximum number of digits\n # 20200607: np.ceil --> np.floor + 1\n # Consider edge case n = 9 vs n = 10\n # ceil(log_10(9)) == 1, ceil(log_10(10)) == 1\n # floor(log_10(9)) + 1 == 1, floor(log_10(10)) + 1 == 2\n max_digs = int(np.floor(np.log10(np.max(self.heap))) + 1)\n\n return max_digs", "def fn(i):\n if i < 0: return 0\n return max(fn(i-1), fn(i-2) + nums[i])", "def explore(self, nums, left, right, target):\n diff = sys.maxsize\n\n while left < right:\n cur_sum = nums[left] + nums[right]\n if cur_sum == target:\n return 0\n \n if abs(target - cur_sum) < abs(diff):\n diff = target - cur_sum\n if cur_sum < target:\n left += 1\n else:\n right -= 1\n return diff", "def recursive(a: tuple, i: int, j: int):\n if i == j: # if right bound == left bound then sequence is one element\n return 1, (a[i], )\n\n _pre_count, _pre_seq = recursive(a, i-1, j) # check if a[i] is continuation of previous max sequence\n if a[i] >= _pre_seq[-1]:\n return _pre_count + 1, _pre_seq + (a[i], )\n else:\n max_count = 1\n max_seq = (a[i],)\n for k in range(j, i): # if it's false - check all sequences between i and j\n tmp_count, tmp_seq = recursive(a, i-1, k) # from k to i-1\n if tmp_count+1 > max_count and a[i] >= tmp_seq[-1]: # find maximum\n max_count = tmp_count + 1\n max_seq = tmp_seq + (a[i], )\n\n for k in range(i):\n tmp_count, tmp_seq = recursive(a, k, 0) # and between 0 and i\n if tmp_count+1 > max_count and a[i] >= tmp_seq[-1]: # from 0 to k\n max_count = tmp_count + 1\n max_seq = tmp_seq + (a[i], )\n\n return (max_count, max_seq) if max_count > _pre_count else (_pre_count, _pre_seq)", "def upper_bound(power=5):\n total_digits = 1\n while True:\n max_sum = 9**power * total_digits\n if len(str(max_sum)) < total_digits:\n return max_sum\n total_digits += 1", "def find_windowsize(data):\n time = [i[0] for i in data]\n voltage = [i[1] for i in data]\n\n if len(time) != len(voltage):\n total_index_data = len(voltage)\n else:\n total_index_data = min(len(time), len(voltage))\n\n windowsize = round(total_index_data / 6)\n\n return windowsize", "def maxSubArray(self, nums) -> int:\n maxsub = -10000000\n i = 0\n solutions = self.create_matrix(nums)\n if len(nums)==1:\n return nums[0]\n while i <= len(nums) -1:\n j=i\n while j <= len(nums)-1:\n sum_ij = solutions[i][j]\n if sum_ij > maxsub:\n maxsub = sum_ij\n j+=1\n i +=1\n return maxsub", "def get_max(self, root):\n if not root:\n return 0, 0, 0\n \n inc, dec = 1, 1\n\n li, ld, lt = self.get_max(root.left)\n ri, rd, rt = self.get_max(root.right)\n\n if root.left:\n if li and root.left.val - root.val == 1:\n inc = li + 1\n\n if ld and root.left.val - root.val == -1:\n dec = ld + 1\n\n if root.right:\n if ri and root.right.val - root.val == 1:\n inc = max(inc, ri + 1)\n\n if rd and root.right.val - root.val == -1:\n dec = max(dec, rd + 1)\n\n return inc, dec, max(inc + dec - 1, lt, rt)", "def slice_sample_bounded_max(N, burn, logdist, xx, widths, step_out, max_attempts, bounds):\n xx = copy.deepcopy(xx)\n D = len(xx)\n samples = []\n if (not isinstance(widths, list)) or len(widths) == 1:\n widths = np.ones(D) * widths\n\n log_Px = logdist(xx)\n\n for ii in range(N + burn):\n log_uprime = np.log(random.random()) + log_Px\n for dd in random.sample(range(D), D):\n x_l = copy.deepcopy(xx)\n x_r = copy.deepcopy(xx)\n xprime = copy.deepcopy(xx)\n\n # Create a horizontal interval (x_l, x_r) enclosing xx\n rr = random.random()\n x_l[dd] = max(xx[dd] - rr*widths[dd], bounds[dd][0])\n x_r[dd] = min(xx[dd] + (1-rr)*widths[dd], bounds[dd][1])\n\n if step_out:\n while logdist(x_l) > log_uprime and x_l[dd] > bounds[dd][0]:\n\n x_l[dd] = max(x_l[dd] - widths[dd], bounds[dd][0])\n while logdist(x_r) > log_uprime and x_r[dd] < bounds[dd][1]:\n x_r[dd] = min(x_r[dd] + widths[dd], bounds[dd][1])\n\n # Propose xprimes and shrink interval until good one found\n zz = 0\n num_attempts = 0\n while True:\n zz += 1\n # print(x_l)\n xprime[dd] = random.random()*(x_r[dd] - x_l[dd]) + x_l[dd]\n \n log_Px = logdist(xx)\n if log_Px > log_uprime:\n xx[dd] = xprime[dd]\n break\n else:\n # Shrink in\n num_attempts += 1\n if num_attempts >= max_attempts:\n # print('Failed to find something')\n break\n elif xprime[dd] > xx[dd]:\n x_r[dd] = xprime[dd]\n elif xprime[dd] < xx[dd]:\n x_l[dd] = xprime[dd]\n else:\n raise Exception('Slice sampling failed to find an acceptable point')\n # Record samples\n if ii >= burn:\n samples.append(copy.deepcopy(xx))\n return samples", "def local_extrema_seuil(sweep, seuil1, seuil2, span) :\n\n #temporary elements\n temp_min = 0\n temp_min_arg = -1\n temp_max = 0\n temp_max_arg = -1\n\n #This holds the result\n up_i = 0\n down_i = 0\n up = array([])\n arg_up = array([])\n down = array([])\n arg_down = array([])\n #init the writing bolean\n min_write = True\n max_write = True\n sweep_size = size(sweep)\n\n for i in range(sweep_size) :\n value = sweep[i]\n #check if we are below the threshold, if yes, next point\n if abs(value) < seuil1 :\n max_write = True\n min_write = True\n if temp_max_arg != -1 :\n #Reshape the array\n s_up = array(shape(up))\n s_up[0] = s_up[0] + 1\n s_up = tuple(s_up)\n up = resize(up,s_up)\n arg_up = resize(arg_up,s_up)\n #Assign values\n up[up_i] = temp_max\n arg_up[up_i] = temp_max_arg\n up_i = up_i + 1\n temp_max = 0\n temp_max_arg = -1\n\n if temp_min_arg != -1 :\n #Reshape the array\n s_down = array(shape(down))\n s_down[0] = s_down[0] + 1\n s_down = tuple(s_down)\n down = resize(down,s_down)\n arg_down = resize(arg_down,s_down)\n #Assign values\n down[down_i] = temp_min\n arg_down[down_i] = temp_min_arg\n down_i = down_i + 1\n temp_min = 0\n temp_min_arg = -1\n\n continue\n\n\n #if we are in beetween the two threshold\n if abs(value) > seuil1 and abs(value) < seuil2 :\n if value < temp_min and min_write :\n temp_min = value\n temp_min_arg = i\n if value > temp_max and max_write:\n temp_max = value\n temp_max_arg = i\n\n #if we are above the threshold\n if abs(value) > seuil2 :\n #Make sure than min and max cannot be accessed before going back below seuil1\n if value < - seuil2 :\n min_write = False\n if(temp_min_arg + span > i) :\n temp_min = 0\n temp_min_arg = -1\n if value > seuil2 :\n max_write = False\n if(temp_max_arg + span > i) :\n temp_max = 0\n temp_max_arg = -1\n\n return [down, arg_down, up, arg_up]", "def problem_a(n, a):\r\n largest = max(a)\r\n i = 0\r\n old_streak_begin = 0\r\n old_streak_end = 0\r\n while i < n:\r\n if a[i] == largest:\r\n streak_begin = i\r\n while i < n and a[i] == largest:\r\n i += 1\r\n streak_end = i - 1\r\n i -= 1 # adjust index back\r\n if (streak_end - streak_begin) < (old_streak_end - old_streak_begin):\r\n streak_begin = old_streak_begin\r\n streak_end = old_streak_end\r\n else:\r\n old_streak_begin = streak_begin\r\n old_streak_end = streak_end\r\n \r\n i += 1\r\n \r\n print(streak_end - streak_begin + 1)", "def len_square(bound):\n\treturn (8 - 2 * bound)", "def find_max(A):\n\n def rmax(lo, hi):\n \"\"\"Use recursion to find maximum value in A[lo:hi+1].\"\"\"\n if lo == hi: return A[lo]\n\n mid = (lo+hi) // 2\n L = rmax(lo, mid)\n R = rmax(mid+1, hi)\n return max(L, R)\n\n return rmax(0, len(A)-1)", "def max_scoring_num_rolls(dice=six_sided, num_samples=1000):\n # BEGIN PROBLEM 9\n \"*** YOUR CODE HERE ***\"\n k, max_value, max_num = 1, 0, 0\n roll = make_averaged(roll_dice, num_samples)\n while k <= 10:\n current_value = roll(k, dice)\n #print('k: ' + str(k) + ' current_value: ' + str(current_value))\n if current_value > max_value:\n max_value, max_num = current_value, k\n k += 1\n return max_num\n # END PROBLEM 9", "def maxArea_fast(height):\n i, j = 0, len(height) - 1\n max_area = 0\n while j > i:\n a = min(height[i], height[j]) * (j - i)\n if a > max_area:\n max_area = a\n # only move the pointer with smaller value\n # min(h[i], h[j]) might increase or decrease or stay the same (but we can be sure to have the max if decrease)\n # if move the pointer with larger value, which is incorrect\n # min(h[i], h[j]) might decrease or stay the same (no chance of increase)\n # even there are lager values in h, but the area is still determined by the unmoved smaller pointer,\n # therefore the effective height won't change, but the width has decreased due to moving\n\n # Compare to brute force, checking all the pairs:\n # Suppose h[i] < h[j], then i += 1\n # skipped the pairs: (i, j-1), (i, j-2) ..., (i, i+1); all of which have areas smaller then (i, j)\n # therefore can be safely skipped\n if height[i] >= height[j]:\n j -= 1\n else:\n i += 1\n return max_area", "def straight_size(dice_list):\n # generates list of count dice values 1-6 in order. Any count of 0 breaks the straight\n roll_counts = [str(dice_list.count(value)) for value in range(1, 7)]\n return len(max(''.join(roll_counts).split('0'), key=len))", "def find_largest_adjacent_difference(nums):\n pass", "def cal_length (datalist):\n sortedlist = sorted(datalist, key = lambda x : len(x[1]))\n maxle = len(sortedlist[-1][1])\n minle = len(sortedlist[0][1])\n average = sum([len(x[2]) for x in sortedlist])/len(sortedlist)\n return minle, maxle, average", "def td(lengths: List[int], L: int) -> int:\n N = len(lengths)\n dp = [[None for _ in range(L+1)] for _ in range(N)]\n def fn(i: int, l: int, c: int) -> int:\n if l == 0: return c\n if i >= N or l < 0: return 0\n if dp[i][l] == None: dp[i][l] = max(fn(i, l-lengths[i], c+1), fn(i+1, l, c))\n return dp[i][l]\n return fn(0, L, 0)", "def get_sequence_lengths( widths ): \n seq_len = (widths - 2) / 8\n return seq_len" ]
[ "0.6181188", "0.61154115", "0.61118263", "0.58728755", "0.5702233", "0.5701698", "0.5680999", "0.55414397", "0.553379", "0.5523938", "0.55195713", "0.5513929", "0.5513566", "0.550096", "0.54771763", "0.5460938", "0.5459851", "0.5444232", "0.5441669", "0.54264915", "0.54041374", "0.5401712", "0.5398094", "0.536509", "0.5361847", "0.5356479", "0.5354691", "0.5353158", "0.5332718", "0.5330513", "0.53263724", "0.53145206", "0.52936345", "0.5289444", "0.52839285", "0.52755576", "0.5273689", "0.5271237", "0.52640486", "0.5255876", "0.525289", "0.5246684", "0.52393025", "0.52375484", "0.5234538", "0.5232648", "0.52219546", "0.52173364", "0.521649", "0.5208721", "0.5203963", "0.5195334", "0.5191408", "0.5188365", "0.51880455", "0.51764184", "0.51677525", "0.5161699", "0.5140909", "0.51319563", "0.5117309", "0.5110031", "0.5101938", "0.5101938", "0.5101601", "0.5101346", "0.50997514", "0.50976384", "0.5089849", "0.50868475", "0.5086364", "0.50851524", "0.50810635", "0.50800556", "0.5079493", "0.5072383", "0.50660014", "0.5062653", "0.5060753", "0.5059856", "0.5058581", "0.50580174", "0.50570196", "0.50569624", "0.5052965", "0.5049784", "0.50463426", "0.5040699", "0.5039099", "0.5038307", "0.503387", "0.50209326", "0.5019745", "0.50186026", "0.5013401", "0.5011616", "0.50101256", "0.5009733", "0.50071627", "0.500689" ]
0.8285866
0
A little function to make graphing less of a pain. Creates a plot with titles and axis labels. Adds a new line to a blank figure and labels it.
def plothusly(ax, x, y, *, xtitle='', ytitle='', datalabel='', title='', linestyle = '-', marker = ''): ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) ax.set_title(title) out = ax.plot(x, y, zorder=1, label=datalabel, linestyle = linestyle, marker = marker) return out
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def newGraph(self, xlab, ylab):\r\n if (not self.doMPL):\r\n newGraph = Gnuplot(debug=0)\r\n\t #newGraph('set data style linespoints')\r\n\t newGraph.set_label('xlabel', xlab)\r\n\t newGraph.set_label('ylabel', ylab)\r\n return newGraph\r\n else:\r\n self.mplFigCount = self.mplFigCount + 1\r\n if (self.graphLabelsX.__len__() <= self.mplFigCount):\r\n gg = self.graphLabelsX.__len__()\r\n while (gg <= self.mplFigCount):\r\n self.graphLabelsX.append('')\r\n gg = gg+1\r\n if (self.graphLabelsY.__len__() <= self.mplFigCount):\r\n gg = self.graphLabelsY.__len__()\r\n while (gg <= self.mplFigCount):\r\n self.graphLabelsY.append('')\r\n gg = gg+1\r\n self.graphLabelsX[self.mplFigCount] = xlab\r\n self.graphLabelsY[self.mplFigCount] = ylab\r\n figure(self.mplFigCount, (6,4))\r\n xlabel(self.graphLabelsX[self.mplFigCount])\r\n ylabel(self.graphLabelsY[self.mplFigCount])\r\n return self.mplFigCount", "def line_plot(self, x, y, labels, ax=None):\n if ax is None:\n fig, ax = plt.subplots()\n else:\n fig = None\n ax.plot(x, y, '--o', label=labels[0])\n ax.set_xlabel(labels[1])\n ax.set_ylabel(labels[2])\n ax.set_title(labels[3])\n return fig, ax", "def plot_line_graph(X, Y, xlabel, ylabel, title, fname):\r\n plt.figure(figsize=(20, 21))\r\n plt.plot(X, Y)\r\n plt.title(title)\r\n plt.xlabel(xlabel)\r\n plt.ylabel(ylabel)\r\n plt.xticks(X)\r\n plt.savefig(fname, bbox_inches='tight')\r\n plt.close()", "def plot1D(x, y, title=\"Title\", xlab=\"x-axis\", ylab=\"y-axis\"):\n plt.plot(x, y, linewidth=2)\n plt.title(title)\n plt.xlabel(xlab)\n plt.ylabel(ylab)", "def add_figure1(self,x,y,index=1,title='',xlabel='',ylabel=''):\n self.last_index = index\n ax = self.fig.add_subplot(self.position+index)\n ax.set_title(title)\n ax.set_xlabel(xlabel)\n ax.set_ylabel(ylabel)\n ax.plot(x,y)", "def single_plot(x_data, y_data, title, x_label, y_label):\n plt.figure(1, (18, 8)) # something, plot size\n plt.subplot(111)\n plt.plot(x_data, y_data)\n plt.title(title)\n plt.xlabel(x_label, fontsize=12)\n plt.ylabel(y_label, fontsize=12)\n # plt.legend(['Train', 'Test'], loc='upper left')\n plt.show()", "def add_graph(self, x, y, label):\n pyplot.plot(x[:len(y)], y[:len(x)], label=label)", "def make_1d_graph(self, xvals, yvals, xlabel, xunits,\n ylabel, yunits, nicexlabel=True,\n niceylabel=True ,xlims='edges', ylims=None,\n linestyle='-', color='darkblue', alpha=0.9,\n xlabelsize='18', ylabelsize='18', marker=None,\n plotlabel=None, subplotnum=None, linewidth=1):\n import matplotlib.pyplot as plt\n plt.rcParams['text.usetex'] = True\n plt.plot(\n xvals,\n yvals,\n linestyle=linestyle,\n color=color,\n alpha=alpha,\n marker=marker,\n label=plotlabel,\n linewidth=linewidth\n )\n if xlims is not None:\n if xlims == 'edges':\n plt.xlim(xvals[0], xvals[-1])\n else:\n plt.xlim(xlims)\n if xlabel is not None:\n if nicexlabel:\n xlabel = self.make_label(xlabel, xunits)\n plt.xlabel(xlabel, fontsize=xlabelsize)\n if ylims is not None:\n if ylims[0] == ylims[1]:\n plt.ylim(ylims[0]-0.1, ylims[0]+0.1)\n else:\n plt.ylim(ylims)\n if ylabel is not None:\n if subplotnum is not None:\n if (subplotnum-1)%4 == 0:\n if niceylabel:\n ylabel = self.make_label(ylabel, yunits)\n plt.ylabel(ylabel, fontsize=ylabelsize)\n else:\n if niceylabel:\n ylabel = self.make_label(ylabel, yunits)\n plt.ylabel(ylabel, fontsize=ylabelsize)", "def makeplot(x, ys, labels, xlabel, ylabel, plainlines = False, figure = None,\\\r\n filename = None, sigmas = None, logy = False, logx = False):\r\n \r\n #initialise a pyplot figure if needed\r\n if figure is None:\r\n f = plt.figure()\r\n #add axis\r\n a = f.add_subplot(111)\r\n else:\r\n a = f.axes[0]\r\n \r\n #styles for plotted data\r\n styles = ['rx-','yx-','gx-','mx-','rx-']\r\n formats = ['rx','yx','gx','mx','rx']\r\n \r\n #plain line styles\r\n if plainlines:\r\n styles = ['k-','r-','g-','y-','m-']\r\n \r\n #plot . . .\r\n for i in range(len(ys)):\r\n a.plot(x, ys[i], styles[i], label = labels[i])\r\n if sigmas is not None:\r\n for i in range(len(ys)):\r\n a.errorbar(x, ys[i],yerr = sigmas[i], fmt = formats[i], elinewidth = 1,\\\r\n ecolor = 'black', capsize = 2) \r\n if logx:\r\n a.set_xscale('log')\r\n if logy:\r\n a.set_yscale('log')\r\n \r\n #set labels\r\n a.set_xlabel(xlabel)\r\n a.set_ylabel(ylabel)\r\n \r\n #add legend\r\n a.legend(loc = 'best')\r\n \r\n #save\r\n if filename is not None:\r\n f.savefig(filename+\".svg\")\r\n \r\n return f", "def make_line_plot(data, x_label=\"Data\", y_label=\"Data Point\"):\n\n y = data\n x = range(len(y))\n\n plt.xlabel(x_label)\n plt.ylabel(y_label)\n plt.plot(x, y)\n plt.show()", "def multi_line_plot(x_data, y_data, title, x_label, y_label):\n plt.figure(1, (18, 8)) # something, plot size\n plt.subplot(111)\n legend = []\n for i in range(len(x_data)):\n plt.plot(x_data[i], y_data[i])\n legend.append((i+1))\n plt.title(title)\n plt.xlabel(x_label, fontsize=12)\n plt.ylabel(y_label, fontsize=12)\n plt.legend(legend, loc='upper left')\n plt.show()", "def line_graph():\n fig = plt.figure()\n ax = plt.axes()\n x = [1, 2, 3]\n y = [5, 6, 7]\n plt.plot(x, y)\n plt.show()", "def line_graph():\r\n #create the data in an array\r\n xval = np.arange(0,6,(np.pi*(1./10)))\r\n yval = np.cos(xval)\r\n data = np.array([xval,yval])\r\n data = data.transpose()\r\n y = np.arange(-1,1.5,0.5)\r\n #convert the data to a pd DataFrame\r\n df = pd.DataFrame(data,columns=[\"x\",\"y\"])\r\n #tell the DataFrame to plot the data\r\n ax = df.plot(x=\"x\",y=\"y\",label=\"0\",ylim=(-1,1),yticks=y,title=\"Cosine Approximated at Intervals of 1/(10pi)\")\r\n ax.set(xlabel=\"\",ylabel=\"\")\r\n\t#get the figure from the axes and save it\r\n fig = ax.get_figure()\r\n fig.savefig(\"my_line_graph.png\")", "def newplot(*args, **kwargs):\n\n if 'linewidth' and 'lw' not in kwargs.keys():\n kwargs['linewidth'] = 2\n\n plt.figure(figsize=FIGURE_SIZE, dpi=FIGURE_DPI)\n plt.plot(*args, **kwargs)\n\n plt.setp(plt.gca().spines.values(), linewidth=2)\n plt.xticks(fontsize=25, fontname='Times New Roman')\n plt.yticks(fontsize=25, fontname='Times New Roman')\n plt.ticklabel_format(useOffset=False)\n plt.ticklabel_format(style='sci', scilimits=(-3, 3))", "def simple_line():\n\n # Make two datasets\n dataset_a = DataSet(sine)\n dataset_b = DataSet(cosine)\n\n # Make plot and add data\n plot = Plot()\n plot.set_text()\n plot.add_dataset(dataset_a)\n plot.add_dataset(dataset_b)\n\n # Plot graph and display\n plot.plot()\n plot.save(name='./figures/2d_simple_line',fmt='png')\n plot.display()", "def generate_line_plot(\n self,\n y_array: np.array,\n x_axis_title: str,\n y_axis_title: str,\n graph_title: str,\n y_axis_unit: str = None,\n x_array: np.array = None,\n ) -> go.Figure:\n\n if x_array is None:\n x_array = np.arange(len(y_array))\n\n layout = go.Layout(\n title=go.layout.Title(text=str(graph_title).title()),\n xaxis=go.layout.XAxis(title=x_axis_title),\n yaxis=go.layout.YAxis(title=y_axis_title),\n )\n\n fig = go.Figure(layout=layout)\n fig.add_trace(\n go.Scatter(\n x=x_array, y=y_array, mode=\"lines\", name=str(graph_title).title()\n ),\n )\n\n if y_axis_unit is not None:\n fig.update_layout(yaxis_tickformat=y_axis_unit)\n\n return fig", "def graph(x, y, xlabel = \"\", ylabel = \"\", legend = \"\", color = \"\"):\n plt.plot(x, y, color, label = legend)\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n plt.legend(loc = 'best')\n plt.grid()", "def plotone(x,y,xlabel,ylabel,filename):\n fig=plt.figure()\n ax = fig.add_subplot(111)\n ax.plot(x,y,linewidth=2.0)\n plt.ylabel(ylabel)\n plt.xlabel(xlabel)\n fig.savefig(filename)", "def make_plot(counts):\n # YOUR CODE HERE\n posX=[]\n posY=[]\n negX=[]\n negY=[]\n\t\n count=1\n for i in counts:\n\tif len(i)!=0:\t\n\t\tposX.append(count)\n\t posY.append(i[0][1])\n\t\tnegX.append(count)\n\t negY.append(i[1][1])\n\t count=count+1\n\t\n line1, =plt.plot(posX,posY,marker=\"o\",label=\"Positive\",color=\"g\")\n line2, =plt.plot(negX,negY,marker=\"o\",label=\"Negative\",color=\"r\")\n plt.xlabel('Time Step')\n plt.ylabel('Word Count')\n plt.title('Basic Twitter Sentiment Analytics')\n plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)\n plt.legend(handler_map={line1: HandlerLine2D(numpoints=2)})\n plt.show()", "def makeGraph(xval, yval, title = \"GRAPH\", xlabel=\"X AXIS\", ylabel=\"Y AXIS\", axisRng=None, style='bo', clear=False):\n plt.plot(xval, yval, style)\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n plt.title(title)\n if axisRng is not None:\n plt.axis(axisRng)\n plt.show()\n if clear:\n plt.close()", "def make_1d_plot(data, fig_name, Label, Title, x_label, y_label, customDPI):\n plt.figure(1, figsize=(10.0, 6.0))\n plt.subplots_adjust(left=None, bottom=0, right=None, top=0.86, wspace=None, hspace=None)\n plt.plot(data, label=Label)\n plt.title(Title, fontsize=10, fontweight='bold', style='italic', y=1.025)\n plt.legend(loc = 'upper right', fontsize=10)\n plt.ylabel(x_label, fontsize=10, fontweight='bold')\n plt.xlabel(y_label, fontsize=10, fontweight='bold')\n plt.yticks(fontsize=8, fontweight='bold')\n plt.xticks(fontsize=8, fontweight='bold')\n pylab.savefig(fig_name, bbox_inches='tight', dpi=customDPI)\n plt.close('all')", "def linePlot(self):\n clf()\n plot(self.x,self.averages)\n xlabel('X Label (units)')\n ylabel('Y Label (units)')\n savefig('line.png')", "def _create_line_plot(experiment_param, nus, norms, ax, subtitle):\n for name in sorted(norms):\n errors = [experiment_param[nu][name] for nu in nus]\n ax.plot(nus, errors, label=name)\n\n ax.legend()\n ax.set_xticks(nus[1::2])\n ax.set_xticklabels(nus[1::2])\n ax.set_ylabel('Average error (%)', fontsize=15)\n ax.set_ylim([0,5])\n ax.set_title('Estimating {}\\n'.format(subtitle), fontsize=15)", "def make_plot(x,y):", "def create_figure(self) -> None:\n plt.ion()\n self.fig = plt.figure(1)\n self.axis = self.fig.add_subplot(111, xlim=(0, 1), ylim=(0, 1))\n self.axis.grid(True)\n plt.xticks(np.linspace(0, 1, self._param[\"n_v\"] + 1))\n plt.yticks(np.linspace(0, 1, self._param[\"n_v\"] + 1))\n a_plt, = self.axis.plot([], [], 'bx', markersize=5)\n l_plt, = self.axis.plot([], [], 'r.', markersize=15)\n self.plots = [a_plt, l_plt]", "def draw_line_plot(fig, x, y, labels):\r\n\r\n #Convert times to a displayable format\r\n (x_times, hour_mode) = times_to_axis(x)\r\n\r\n\r\n #Draw grid lines\r\n fig.grid(zorder=0)\r\n\r\n #Draw plot\r\n fig.plot(x_times, y, \"-\", label=None, zorder=2)\r\n \r\n \r\n #If necessary, enable processing of \"datetime\" objects on the x-axis\r\n if not hour_mode:\r\n fig.xaxis_date()\r\n\r\n\r\n #Label and style plot\r\n set_axis_labels(fig, *labels)\r\n style_x_labels(fig)", "def liveplot(x, y, xlim, ylim, title):\n plt.plot(x,y,'b.')\n plt.xlim(xlim)\n plt.ylim(ylim)\n plt.xlabel('North-South Axis')\n plt.ylabel('East-West Axis')\n plt.title(title)\n plt.show()", "def drawit(fignum=1,xlabel=\" \",ylabel=\" \",xvar=None,\n yvar=None,title=\" \",ylimit=None,\n xlimit=None):\n fig=plt.figure(fignum)\n fig.clf()\n ax1=fig.add_subplot(111)\n line=ax1.plot(xvar,yvar)\n ax1.set_xlim(xlimit)\n ax1.set_ylim(ylimit)\n ax1.set_title(title)\n ax1.set_xlabel(xlabel)\n ax1.set_ylabel(ylabel)\n fig.tight_layout()\n fig.canvas.draw()\n return fig,ax1,line[0]", "def showPlot1(title, x_label, y_label):\n num_robot_range = range(1, 11)\n times1 = []\n times2 = []\n for num_robots in num_robot_range:\n print \"Plotting\", num_robots, \"robots...\"\n times1.append(runSimulation(num_robots, 1.0, 20, 20, 0.8, 20, StandardRobot))\n times2.append(runSimulation(num_robots, 1.0, 20, 20, 0.8, 20, RandomWalkRobot))\n pylab.plot(num_robot_range, times1)\n pylab.plot(num_robot_range, times2)\n pylab.title(title)\n pylab.legend(('StandardRobot', 'RandomWalkRobot'))\n pylab.xlabel(x_label)\n pylab.ylabel(y_label)\n pylab.show()", "def show_line(dict, xlabel=\"x\", ylabel=\"y\", title=\"title\"):\n plt.clf()\n plt.cla()\n plt.plot(list(dict.keys()), list(dict.values()), alpha=0.4, color = 'g')\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n plt.title(title)\n plt.show()", "def make_plot(counts):\n cn1 = []\n cn2 = []\n time = []\n\n for x in counts:\n y1 = x[0]\n cn1.append(y1[1])\n y2 = x[1]\n cn2.append(y2[1])\n\n for i in range(len(counts)):\n time.append(i)\n\n posLine = plt.plot(time, cn1,'bo-', label='Positive')\n negLine = plt.plot(time, cn2,'go-', label='Negative')\n plt.axis([0, len(counts), 0, max(max(cn1), max(cn2))+50])\n plt.xlabel('Time step')\n plt.ylabel('Word count')\n plt.legend(loc = 'upper left')\n plt.show()\n plt.savefig(\"plot.png\", format=\"png\")", "def general_plotter(\n plots,\n title:str=None,\n xlabel:str=None,\n xlim:tuple=None,\n xinvert:bool=False,\n xscale = None,\n ylabel:str=None,\n ylim:tuple=None,\n yinvert:bool=False,\n yscale:str = None,\n grid:bool=False,\n legend=False,\n fname:str=None,\n dpi:int=200,\n figsize:tuple=None, #todo documentation\n tightlayout:bool=False,\n show:bool=True,\n usetex:bool=False,\n framelines:str=None,\n axvline:list=[],\n axhline:list=[]\n ):\n from matplotlib import pyplot as plt\n\n if figsize!=None:\n fig = plt.figure(figsize=figsize)\n else:\n fig=plt.figure()\n plt.rc('text', usetex=usetex)\n for plot in plots:\n if len(plot) == 4:\n xs,ys,label,linestyle = plot\n if \"style\" in linestyle:\n style = linestyle[\"style\"]\n del linestyle[\"style\"]\n plt.plot(xs, ys, style, label=label, **linestyle)\n else:\n plt.plot(xs, ys, label=label, **linestyle)\n elif len(plot) == 3:\n xs,ys,label = plot\n plt.plot(xs,ys,label=label)\n else:\n print(\"You passed too many values for a plot. There can be either 3 or 4.\")\n return 0\n for vline in axvline:\n if len(vline) == 2:\n plt.axvline(x=vline[0],**vline[1])\n else:\n plt.axvline(x=vline[0])\n for hline in axhline:\n plt.axhline(y=hline[0],**hline[1])\n if title!=None:\n plt.title(title)\n if xlabel !=None:\n plt.xlabel(xlabel)\n if xlim!=None:\n plt.xlim(xlim)\n if xinvert:\n plt.gca().invert_xaxis()\n if xscale != None:\n plt.xscale(xscale)\n if ylabel!=None:\n plt.ylabel(ylabel)\n if ylim != None:\n plt.ylim(ylim)\n if yinvert:\n plt.gca().invert_yaxis()\n if yscale != None:\n plt.yscale(yscale)\n if grid:\n plt.grid()\n #setup legend\n if type(legend)==int:\n plt.legend(loc=legend)\n else:\n if legend:\n plt.legend(loc=0)\n # draw framelines\n if framelines != None:\n ax = plt.gca()\n if \"r\" not in framelines:\n ax.spines[\"right\"].set_visible(False)\n if \"l\" not in framelines:\n ax.spines[\"left\"].set_visible(False)\n if \"t\" not in framelines:\n ax.spines[\"top\"].set_visible(False)\n if \"b\" not in framelines:\n ax.spines[\"bottom\"].set_visible(False)\n\n if tightlayout == True:\n fig.tight_layout()\n #save the figure with fname\n if fname!=None:\n plt.savefig(fname)\n else:\n if not show:\n print(\"Why do you want to create a graph that you don't save or show.\\nThis is utterly useless\")\n if show:\n plt.show()\n plt.close()\n return 1", "def generate_2D_plot(x, y, labels_dict, file_title, plot_title):\n fig = plt.figure()\n plt.plot(x, y)\n\n if labels_dict:\n plt.xlabel(labels_dict[\"x\"])\n plt.ylabel(labels_dict[\"y\"])\n if plot_title:\n plt.title(plot_title)\n\n plt.savefig(file_title)", "def plot_lines(data, title, xlabel, ylabel, labels=None, filename=None):\n ### Check that the data is a list\n if not isinstance(data, list):\n msg = \"data must be a list, not {0}\".format(type(data).__name__)\n raise TypeError(msg)\n\n ### Create a new figure\n fig = pylab.figure()\n\n ### Plot the data\n if labels:\n mylabels = labels[:]\n for i in range(len(data)-len(labels)):\n mylabels.append(\"\")\n for d, l in zip(data, mylabels):\n _plot_dict_line(d, l)\n # Add legend\n pylab.legend(loc='best')\n gca = pylab.gca()\n legend = gca.get_legend()\n pylab.setp(legend.get_texts(), fontsize='medium')\n else:\n for d in data:\n _plot_dict_line(d)\n\n ### Set the lower y limit to 0 or the lowest number in the values\n mins = [min(l.values()) for l in data]\n ymin = min(0, min(mins))\n pylab.ylim(ymin=ymin)\n\n ### Label the plot\n pylab.title(title)\n pylab.xlabel(xlabel)\n pylab.ylabel(ylabel)\n\n ### Draw grid lines\n pylab.grid(True)\n\n ### Show the plot\n fig.show()\n\n ### Save to file\n if filename:\n pylab.savefig(filename)", "def plot_one_axes(self, fig_num: int, title: str, y_label: str, labeled: np.ndarray, filled: np.ndarray,\n smoothed: np.ndarray, legend_entries: Dict[str, str]) -> matplotlib.figure.Figure:\n fig = plt.figure(fig_num)\n ax = fig.subplots(1, 1)\n labeled_lines = kine_graph_init(ax, labeled, y_label, self.frame_nums, [{'ls': '', 'marker': 'o', 'ms': 2,\n 'fillstyle': 'none', 'mew': 0.5}] * 3)\n ax.set_prop_cycle(None)\n filled_lines = kine_graph_add(ax, filled, self.frame_nums, [{'ls': '-', 'lw': 0.75}] * 3)\n ax.set_prop_cycle(None)\n smoothed_lines = kine_graph_add(ax, smoothed, self.frame_nums, [{'ls': '-'}] * 3)\n plt.tight_layout()\n fig.suptitle(title, x=0.7)\n fig.legend((labeled_lines[0], smoothed_lines[2], filled_lines[1]),\n (legend_entries['labeled'], legend_entries['smoothed'], legend_entries['filled']),\n ncol=2, handlelength=0.75, handletextpad=0.25, columnspacing=0.5, loc='upper left')\n make_interactive()\n return fig", "def plot_one_axes(self, fig_num: int, title: str, y_label: str, raw: np.ndarray, smoothed: np.ndarray,\n legend_entries: Sequence[str]) -> matplotlib.figure.Figure:\n fig = plt.figure(fig_num)\n ax = fig.subplots(1, 1)\n raw_lines = kine_graph_init(ax, raw, y_label, self.frame_nums, [{'ls': ':', 'lw': 2}] * 3)\n ax.set_prop_cycle(None)\n smoothed_lines = kine_graph_add(ax, smoothed, self.frame_nums, [{'ls': '-'}] * 3)\n plt.tight_layout()\n fig.suptitle(title, x=0.7)\n legend_text = ('Raw (' + legend_entries[0] + ')', 'Smoothed (' + legend_entries[1] + ')',\n 'Smoothed (' + legend_entries[2] + ')')\n fig.legend((raw_lines[0], smoothed_lines[1], smoothed_lines[2]), legend_text, ncol=3, handlelength=0.75,\n handletextpad=0.25, columnspacing=0.5, loc='lower left')\n make_interactive()\n return fig", "def plot(self, title, series, x, y, setup=None, xlabel='Epochs', ylabel=None):\n hr_min = datetime.datetime.now().strftime(\"%I:%M\")\n timestamp = datetime.datetime.now().strftime(\"%A, %B %d, %Y %I:%M%p\")\n self.vis.text(\n f'<b>LAST UPDATED</b><br>{time}', env=self.env, win='last_updated')\n\n # if setup.expname != 'NoName':\n # title += f\" ({setup.expname})\"\n # if setup.has_suggestion:\n # title += f\" ({setup.sugg_id})\"\n #title += f\" (Phase {setup.phaser.idx}) \"\n\n # if setup.config.sigopt:\n # display_title = f\"{display_title}:{setup.sugg_id}\"\n # if setup.config.mode is not None:\n # display_title += f\" ({setup.config.mode})\"\n\n display_title = f\"{title} ({hr_min})\"\n\n if title in self.plots: # update existing plot\n self.vis.line(\n X=np.array([x]),\n Y=np.array([y]),\n env=self.env,\n win=self.plots[title],\n name=series,\n update='append'\n )\n else: # new plot\n self.plots[title] = self.vis.line(\n X=np.array([x, x]),\n Y=np.array([y, y]),\n env=self.env,\n opts={\n 'legend': [series],\n 'title': display_title,\n 'xlabel': xlabel,\n 'ylabel': ylabel,\n })\n #mlb.gray(\"[plotted to visdom]\")", "def create_plot():\n\n fig, ax = plt.subplots()\n return fig, ax", "def make_plot(name, training_times, observed, all_times, mean, upper_limit, lower_limit):\n pyplot.figure()\n pyplot.plot(training_times, observed, \"b\", label=\"training series\")\n pyplot.plot(all_times, mean, \"r\", label=\"forecast\")\n pyplot.plot(all_times, upper_limit, \"g\", label=\"forecast upper bound\")\n pyplot.plot(all_times, lower_limit, \"g\", label=\"forecast lower bound\")\n pyplot.fill_between(all_times, lower_limit, upper_limit, color=\"grey\",\n alpha=\"0.2\")\n pyplot.axvline(training_times[-1], color=\"k\", linestyle=\"--\")\n pyplot.xlabel(\"time\")\n pyplot.ylabel(\"observations\")\n pyplot.legend(loc=0)\n pyplot.title(name)", "def set_up(self):\n self.h, = self.ax.plot(self.x, lw=2)\n self.ax.set_ylim(0,100)\n self.ax.set_xlim(0,100)\n self.ax.title.set_text(self.config[\"title\"])\n self.ax.set_xlabel(self.config[\"x_label\"])\n self.ax.set_ylabel(self.config[\"y_label\"])", "def plot_data(df, title=\"normalized Stock prices\", ylabel=\"Price\", xlabel=\"Date\" ):\n plt.clf()\n ax = df.plot(title=title, fontsize=12)\n ax.set_xlabel(xlabel)\n ax.set_ylabel(ylabel)\n plt.savefig('files/output/'+title+'.png')", "def create_plot_without_error(x, y, x_label, y_label):\n plt.plot(x, y, '.', color=\"#ff0000\", ms=1)\n plt.xlabel(x_label)\n plt.ylabel(y_label)", "def labelling():\n title = input('Please enter the title of output figure: ')\n plt.title(title)\n plt.xlabel(\"Time (s)\")\n plt.ylabel(\"Voltage (V)\")\n plt.legend()\n plt.savefig(\"Scope_%s\"%str(round(time.time()))) # Time stamp on file names\n plt.show()", "def add_figure(self,sig,index,title='',xlabel='',ylabel=''):\n self.last_index = index\n ax = self.fig.add_subplot(self.position+index)\n ax.set_title(title)\n ax.set_xlabel(xlabel)\n ax.set_ylabel(ylabel)\n ax.plot(sig)", "def ploter(self):\n if len(self.dataset[self.first_title]) != 2:\n print('plot is only avilable for two features')\n return\n x_axis = []\n y_axis = []\n for title in self.dataset:\n x_axis.append(self.dataset[title][0])\n y_axis.append(self.dataset[title][1])\n plt.plot(x_axis, y_axis, 'o')\n plt.show()", "def lineplot(filename, title, xlab, ylab, curves):\n lines, names = zip(*curves)\n for line in lines:\n plt.plot(range(len(line)), line)\n plt.xlabel(xlab)\n plt.ylabel(ylab)\n plt.legend(list(names))\n plt.title(title)\n plt.savefig(filename)\n plt.close()", "def createBlankPlot(self):\n\n fig = plt.figure(figsize=(8,6),dpi=80)\n fig.set_facecolor('#ededed')\n \n # Format plot\n ax = plt.subplot(111)\n \n fig.canvas.draw()\n \n return fig, ax", "def plot_xy(x, y, ax=None, xlabel='Energy [keV]', **kwargs):\n\n if not ax:\n new_plot = True\n plt.figure()\n ax = plt.axes()\n else:\n new_plot = False\n\n plt.semilogy(x, y, axes=ax, drawstyle='steps-mid', **kwargs)\n\n if new_plot:\n plt.xlabel(xlabel)\n plt.ylabel('Counts')\n\n if 'label' in kwargs:\n plt.legend()\n plt.show()\n\n return ax", "def line_figure(self, title):\n\n plt.figure()\n counter = 0\n\n # plot the x and y for each house and battery\n for battery in self.grid.batteries:\n x = []\n y = []\n for house in battery.connections:\n x.append(house.x)\n y.append(house.y)\n plt.plot([house.x, battery.x], [house.y, battery.y],\n color=colors[counter], linewidth=.25)\n plt.scatter(x, y, marker='p', color=colors[counter])\n plt.plot(battery.x, battery.y, marker='x', color=colors[counter],\n markersize=10)\n counter += 1\n plt.title(f\"{title}. Cost: {self.cost()}\")", "def showPlot1(title, x_label, y_label):\n num_robot_range = range(1, 11)#10 robots.\n times1 = []\n times2 = []\n for num_robots in num_robot_range:\n print \"Plotting\", num_robots, \"robots...\"\n #runSimulation(num_robots, speed, width, height, min_coverage, num_trials, robot_type)\n times1.append(runSimulation(num_robots, 1.0, 20, 20, 0.8, 20, StandardRobot))\n #calculates time to clean 80% of room, 20 trials, 20x20 grid room, speed 1, 10 robots of type StandardRobot\n times2.append(runSimulation(num_robots, 1.0, 20, 20, 0.8, 20, RandomWalkRobot))\n #calculates time to clean 80% of room, 20 trials, 20x20 grid room, speed 1, 10 robots of type RandomWalkRobot\n pylab.plot(num_robot_range, times1)\n #plots results for StandardRobot, x-axes= number of robots, y-axis = avg time taken.\n pylab.plot(num_robot_range, times2)\n #plots results for RandomWalkRobot, x-axes= number of robots, y-axis = avg time taken.\n pylab.title(title)\n pylab.legend(('StandardRobot', 'RandomWalkRobot'))\n pylab.xlabel(x_label)\n pylab.ylabel(y_label)\n pylab.show()", "def plot_data(df, title=\"normalized Stock prices\", ylabel=\"Price\", xlabel=\"Date\"):\n plt.clf()\n ax = df.plot(title=title, fontsize=12)\n ax.set_xlabel(xlabel)\n ax.set_ylabel(ylabel)\n plt.savefig('files/output/' + title + '.png')", "def plot_graph(x,y,plot_title = None,plot_props=None,x_props=None,y_props=None,figsize=None):\n import matplotlib.pyplot as plt\n if figsize is not None and ((figsize and type(figsize)) is list or (figsize and type(figsize)) is tuple):\n if len(figsize)==2:\n x_size,y_size = figsize\n fig,ax = plt.subplots(figsize=(x_size,y_size))\n else:\n fig,ax = plt.subplots()\n else:\n fig,ax = plt.subplots()\n if plot_props is not None:\n ax.plot(x,y,plot_props)\n else:\n ax.plot(x,y)\n if plot_title is not None and ((plot_title and type(plot_title)) is dict):\n if 'title' in plot_title.keys():\n if 'fontsize' in plot_title.keys():\n ax.set_title(plot_title['title'],fontsize=plot_title['fontsize'])\n else:\n ax.set_title(plot_title['title'])\n if x_props is not None and ((x_props and type(x_props)) is dict):\n if 'title' in x_props.keys():\n if 'fontsize_title' in x_props.keys():\n ax.set_xlabel(x_props['title'],fontsize=x_props['fontsize_title'])\n else:\n ax.set_xlabel(x_props['title'])\n if 'fontsize_ticks' in x_props.keys():\n for tick in ax.xaxis.get_major_ticks():\n tick.label.set_fontsize(x_props['fontsize_ticks']) \n # specify integer or one of preset strings, e.g.\n #tick.label.set_fontsize('x-small') \n if 'ticks_orientation' in x_props.keys():\n tick.label.set_rotation(x_props['ticks_orientation'])\n if y_props is not None and ((y_props and type(y_props)) is dict):\n if 'title' in y_props.keys():\n if 'fontsize_title' in y_props.keys():\n ax.set_ylabel(y_props['title'],fontsize=y_props['fontsize_title'])\n else:\n ax.set_ylabel(y_props['title'])\n if 'fontsize_ticks' in y_props.keys():\n for tick in ax.yaxis.get_major_ticks():\n tick.label.set_fontsize(y_props['fontsize_ticks']) \n # specify integer or one of preset strings, e.g.\n #tick.label.set_fontsize('x-small') \n if 'ticks_orientation' in y_props.keys():\n tick.label.set_rotation(y_props['ticks_orientation'])\n return fig,ax", "def _plot(self, df, head, title, lines, verbose: bool = False):\n fig = go.Figure(layout=set_layout())\n\n if isinstance(lines, str):\n lines = [lines]\n elif not isinstance(lines, list):\n raise ValueError(\"Only string or list is valid type for lines.\")\n\n for n in lines:\n fig.add_trace(self._plot_line(df, head=head, y=n, line_name=n.upper()))\n\n if verbose:\n fig.add_trace(self._plot_stock_data(self._df, head))\n\n fig.update_layout(\n title_text=f\"{title} Chart ({self.stock_code})\",\n xaxis_rangeslider_visible=False,\n )\n fig.show()", "def make_plot(x, y):\n\n\tplt.figure()\n\tstyles = ['b-', 'g-', 'r-', 'm-', 'y-']\n\tfor i in range(5):\n\t\tplt.plot(x, y[i], styles[i])", "def plot():\n pass", "def plot(data,fig,figure_number = 1,xlabel='',ylabel='',title=''):\n fig.add_subplot(1,2,figure_number)\n plt.title(title)\n plt.xlabel(xlabel)\n # only plot the y label if ipython is True, else the plot looks to cramped\n if is_ipython:\n plt.ylabel(ylabel)\n plt.plot(np.array(data),'orange')\n plt.ylim((0,1))\n if not is_ipython:\n plt.pause(0.000001) # pause a bit so that plots are updated", "def make_plot(self):\n self.ax[0].set_ylabel(r'$C_{{\\ell}}^{{\\kappa\\kappa}}$')\n self.ax[1].set_ylabel('$\\mathrm{rel. dev. [\\%]$}')\n self.ax[1].set_xlabel(r'$\\ell$')", "def init_plot(self):\n self.dpi = 100\n self.fig = Figure((5.0, 5.0), dpi = self.dpi)\n\n self.main_plot = self.fig.add_subplot(111)\n self.main_plot.set_axis_bgcolor('black')\n self.main_plot.set_title('Dynamic venous flow view', size = 12)\n\n pylab.setp(self.main_plot.get_xticklabels(), fontsize = 8)\n pylab.setp(self.main_plot.get_yticklabels(), fontsize = 8)\n\n # Plot the data as a green line\n self.plot_data = self.main_plot.plot(\n self.daq.data0,\n linewidth = 1,\n color = (0, 1, 0),\n )[0]\n self.main_plot.grid(True, color='gray')", "def plot_graph(self) -> None:", "def plotGraph(self, y1, y2, title=\"Y1 and Y2\", xLabel=\"X\", yLabel=\"Y\", yOneLegend=\"Y1\", yTwoLegend=\"Y2\", name=None):\n # Clear the canvas\n plt.clf()\n\n # Plot data\n plt.plot(y1, color=\"black\", label=yOneLegend)\n plt.plot(y2, color=\"magenta\", label=yTwoLegend)\n\n plt.xlabel(xLabel)\n plt.ylabel(yLabel)\n plt.title(title)\n plt.legend(loc=\"upper left\")\n\n if name:\n plt.savefig(\"plots/\"+name, bbox_inches=\"tight\")\n return\n plt.savefig(\"plots/xyplot.png\", bbox_inches=\"tight\")", "def create_line_graph(plot_df, title=\"\", x_title=\"\", y_title=\"\"):\n fig_data = [\n go.Scatter(\n x=plot_df.iloc[:, 0].astype(str),\n y=plot_df.iloc[:, 1],\n text=plot_df.iloc[:, 1],\n mode=\"lines\",\n line={\"width\": 7, \"color\": color_palette[0]},\n hoverinfo=\"x+y\",\n )\n ]\n\n fig_layout = build_scatter_layout(\n title,\n plot_df.iloc[:, 1].min() * 1.05,\n plot_df.iloc[:, 1].max() * 1.05,\n x_title,\n y_title,\n )\n\n return dict(data=fig_data, layout=fig_layout)", "def __plot(name, x, y):\n import matplotlib.pyplot as plt\n\n plt.plot(x, y)\n plt.xlabel('elements')\n plt.ylabel('time (seconds)')\n plt.savefig(\"{}\".format(name))", "def showPlot2(title, x_label, y_label):\n aspect_ratios = []\n times1 = []\n times2 = []\n for width in [10, 20, 25, 50]:\n height = 300/width\n print \"Plotting cleaning time for a room of width:\", width, \"by height:\", height\n aspect_ratios.append(float(width) / height)\n times1.append(runSimulation(2, 1.0, width, height, 0.8, 200, StandardRobot))\n times2.append(runSimulation(2, 1.0, width, height, 0.8, 200, RandomWalkRobot))\n pylab.plot(aspect_ratios, times1)\n pylab.plot(aspect_ratios, times2)\n pylab.title(title)\n pylab.legend(('StandardRobot', 'RandomWalkRobot'))\n pylab.xlabel(x_label)\n pylab.ylabel(y_label)\n pylab.show()", "def _plot(self, step, rewards, losses):\n plt.figure(figsize=(20, 5))\n plt.subplot(131)\n plt.title('Total Episode Reward')\n plt.plot(rewards)\n plt.subplot(132)\n plt.title('MSE Loss')\n plt.plot(losses)\n plt.show()", "def OneImmedSpecterPlot(Xax, Ydat, Label, xmin, xmax, ymin, ymax, XLab, YLab,\n SupTitle, Title, FileName, currentDate, currentTime, Software_version):\n\n plt.figure()\n rc('font', size=8, weight='normal')\n plt.plot(Xax, Ydat, color='b', linestyle='-', linewidth='1.00', label=Label)\n plt.axis([xmin, xmax, ymin, ymax])\n plt.xlabel(XLab)\n plt.ylabel(YLab)\n plt.suptitle(SupTitle, fontsize=9, fontweight='bold')\n plt.title(Title, fontsize=7, x=0.46, y=1.005)\n plt.grid(visible=True, which='both', color='0.00', linestyle='--')\n plt.legend(loc='upper right', fontsize=8)\n plt.text(0.7, 0.03, 'Processed ' + currentDate + ' at ' + currentTime,\n fontsize=5, transform=plt.gcf().transFigure)\n plt.text(0.03, 0.03, 'Software version: ' + Software_version + ' [email protected], IRA NASU',\n fontsize=5, transform=plt.gcf().transFigure)\n pylab.savefig(FileName, bbox_inches='tight', dpi=160)\n plt.close('all')\n return", "def plotting(x,y,name,variable,unit,label_name=\"Simulation\",title=None,mins=False):\n ax = plt.figure(str(name))\n # ax.legend(\"best\")\n\n if mins:\n x/=60 #change time to mins from secs\n plt.xlabel(\"t [min]\")\n else:\n plt.xlabel(\"t [s]\")\n\n if title!= None:\n plt.title(str(title))\n\n plt.plot(x-x[0],y,label=label_name)\n\n\n lab = str(str(variable)+\" \"+\"[\"+unit+\"]\")\n plt.legend(loc='best')\n plt.ylabel(lab)\n plt.grid(True)\n # plt.savefig(title)\n plt.show()\n\n return ax", "def show_plot() :\n logger.info(\"Show plot\")\n pylab.axis('equal')\n pylab.xlabel(\"Longitud\")\n pylab.ylabel(\"Latitud\")\n pylab.grid(True)\n pylab.title(\"Product tiles and product source\")\n pylab.show()", "def _handle_create_line(self, axes, style_args):\n stream_data = self.server.stream_data\n # sample data for initial create\n x_data = numpy.arange(0, 2, 1)\n y_data = numpy.array([0]*2)\n\n line, = axes.plot(x_data, y_data, '-', **style_args)\n # NOTE: client may set 'label'\n line_name = style_args['label']\n if line_name in stream_data:\n # preserve old line data with a new name\n stream_data[line_name+\"_old_\"+timestamp()] = stream_data[line_name]\n # always start with no data for the new line\n stream_data[line_name] = {'y': [], 'line': line, 'last_len': 0}\n if FLAGS.timestamp:\n stream_data[line_name]['x'] = []\n return line_name", "def twoline(file_name, title, line1, line1_label, line2, line2_label, x_labels):\n line_chart = pygal.Line(include_x_axis=True)\n line_chart.title = title\n line_chart.x_labels = x_labels\n line_chart.add(line1_label, line1)\n line_chart.add(line2_label, line2)\n line_chart.render_to_file(file_name)\n return True", "def plot_lines(self):\n self.plot(3)", "def lineplot(self, name: str, headers: [str], data: [[int]], img_title: str):\n total_lines = len(data)\n x = np.arange(0, len(data[0]))\n\n for line in range(total_lines):\n ax = sns.lineplot(x=x, y=data[line], color=C.IRT_COLORS[line], label=headers[line])\n\n ax.legend(loc='best')\n ax.set(xlabel='Steps', ylabel='IRT', title=img_title)\n self.save_plot(name)\n plt.show()", "def create_graph(x, y, title, y_name):\n plot_div = {\n 'data': [\n {'x': x,\n 'y': y,\n 'type': 'bar'},\n ],\n 'layout': {\n 'title': title,\n 'xaxis': {\n 'title': 'nazwa'\n },\n 'yaxis': {\n 'title': y_name\n },\n }\n }\n return plot_div", "def visualize(*x, color=\"red\", linewidth=3, linestyle=\"-\"):\n x = x\n\n # If user doesn't submit submit x, it will create a default DataSet.\n if x == ():\n x = np.linspace(0, 70, 5000)\n else:\n x = x[0]\n\n plt.plot(x, create(x), linewidth=linewidth, c=color, linestyle=linestyle)", "def test_make_plot_custom(self):\n print(sys._getframe().f_code.co_name)\n try:\n x = np.arange(0,6)*300000\n y = np.arange(0,6)\n pp.make_plot(x,y,plot_type='c',plot_title='test',ylabel='test',xlabel='test',xticks=[0,2,4,6],yticks=[0,2,4,6])\n except Exception as e:\n raise\n plt.close('all')", "def OneValueWithTimePlot(timeline, Ydat1, Label1, xmin, xmax, ymin, ymax, x_auto, y_auto,\n XLabel, YLabel, SupTitle, Title, FileName,\n currentDate, currentTime, Software_version):\n\n rc('font', size=6, weight='bold')\n fig = plt.figure(figsize=(9, 5))\n ax1 = fig.add_subplot(111)\n ax1.plot(Ydat1, linestyle='-', linewidth='1.00', label=Label1)\n ax1.legend(loc='upper right', fontsize=6)\n ax1.grid(visible=True, which='both', color='silver', linestyle='-')\n if x_auto == 0: ax1.set_xlim([xmin, xmax])\n if y_auto == 0: ax1.set_ylim([ymin, ymax])\n ax1.set_ylabel(YLabel, fontsize=6, fontweight='bold')\n ax1.set_title(Title, fontsize=6)\n ax1.set_xlabel(XLabel, fontsize=6, fontweight='bold')\n text = ax1.get_xticks().tolist()\n for i in range(len(text)-1):\n k = int(text[i])\n text[i] = timeline[k] # text[i] = timeline[i] ???\n ax1.set_xticklabels(text, fontsize=6, fontweight='bold')\n fig.subplots_adjust(top=0.92)\n fig.suptitle(SupTitle, fontsize=8, fontweight='bold')\n fig.text(0.79, 0.03, 'Processed ' + currentDate + ' at ' + currentTime,\n fontsize=4, transform=plt.gcf().transFigure)\n fig.text(0.11, 0.03, 'Software version: ' + Software_version + ', [email protected], IRA NASU',\n fontsize=4, transform=plt.gcf().transFigure)\n pylab.savefig(FileName, bbox_inches='tight', dpi=160)\n plt.close('all')\n return", "def generate_history_plot(data, labels_dict, file_title, plot_title):\n fig = plt.figure()\n ax = sns.histplot(data)\n\n if labels_dict:\n ax.set_xlabel(labels_dict[\"x\"])\n if plot_title:\n ax.set_title(plot_title)\n\n plt.savefig(file_title)", "def plottwo(x,y1,y2,y1label,y2label,xlabel,ylabel,filename):\n fig=plt.figure()\n ax = fig.add_subplot(111)\n ax.plot(x,y1,'ro-',linewidth=2.0,label=y1label)\n ax.plot(x,y2,'gs--',linewidth=2.0, label=y2label)\n plt.ylabel(ylabel)\n plt.xlabel(xlabel)\n plt.legend()\n fig.savefig(filename)", "def linechart(self, x = \"Predictor\", y = \"Response\", color = None, template = \"ggplot2\",\n has_title = True, title = None):\n \n \n x_clean, df_clean = clean_varname(self._df, var = x)\n y_clean, df_clean = clean_varname(df_clean, var = y)\n\n if color:\n color_clean, df_clean = clean_varname(df_clean, var = color)\n else:\n color_clean = color\n\n if has_title:\n if not title:\n title = f\"Time Series of {y_clean}\"\n\n fig = px.line(df_clean, x=x_clean, y=y_clean, color = color_clean, template = template, title = title)\n\n return fig", "def make_plot(self, conts, xlabel=None, ylabel=None, legend=None):\n\n # preparations\n assert not ((xlabel is None) and (ylabel is None)), \"Set xlabel and ylabel\"\n\n if legend is None:\n for i, element in enumerate(conts):\n plt.plot(*element.T, linewidth=1.5)\n else:\n for i, element in enumerate(conts):\n plt.plot(*element.T, label=legend[i], linewidth=1.5)\n plt.legend(fontsize=self.medium_fontsize)\n\n plt.grid(True)\n plt.ticklabel_format(style='sci', scilimits=self.scilimits)\n plt.tick_params(labelsize=self.medium_fontsize)\n plt.xlabel(xlabel, fontsize=self.big_fontsize)\n plt.ylabel(ylabel, fontsize=self.big_fontsize)\n plt.show()", "def plot(self, *args, **kwargs):\n\n n = len(args)\n\n self.fig, ax = plt.subplots(n,1)\n if 'title' in kwargs:\n self.fig.canvas.set_window_title(kwargs['title'])\n self.fig.suptitle(kwargs['title'], fontsize=11, fontweight='bold')\n if n == 1:\n ax.plot(self.vecs['time'], self.vecs[args[0]])\n ax.set_title('Time vs. ' + args[0].title())\n\n ax.set_ylabel(args[0].title())\n ax.set_xlim([self.vecs['time'][0], self.vecs['time'][-1]])\n\n else:\n for i in range(n):\n ax[i].plot(self.vecs['time'], self.vecs[args[i]])\n ax[i].set_title('Time vs. ' + args[i].title())\n ax[i].set_ylabel(args[i].title())\n ax[i].set_xlim([self.vecs['time'][0], self.vecs['time'][-1]])\n if i != (n-1):\n plt.setp(ax[i].get_xticklabels(), visible=False)\n else:\n ax[i].set_xlabel('Time')\n\n plt.tight_layout(h_pad=0.2)\n plt.subplots_adjust(top=0.85)\n plt.show()", "def buildPlot(self):\r\n style.use('fivethirtyeight')\r\n self.fig = plt.figure()\r\n self.ax1 = self.fig.add_subplot(1,1,1)\r\n self.ax1.clear()\r\n self.ax1.plot(self.inputValInt,self.inputValInt1)", "def plotData(data, xLabel, yLabel, plotTitle, save=False, saveName=None):\n fig, ax = plt.subplots()\n ax.plot(data)\n ax.set(xlabel=xLabel, ylabel=yLabel,\n title=plotTitle)\n ax.grid()\n if save:\n if saveName is not None:\n fig.savefig(saveName)\n else:\n fig.savefig(\"figure\")\n plt.show()", "def diplayGraph(root, df, side, title, color):\n\n figure = plt.Figure(figsize=(5, 4), dpi=100)\n figure.patch.set_facecolor(\"black\")\n ax = figure.add_subplot(111)\n line = FigureCanvasTkAgg(figure, root)\n line.get_tk_widget().pack(side= side, fill=tk.BOTH)\n df.plot(kind=\"line\", legend=True, ax=ax, color=color, marker=\"o\", fontsize=10)\n ax.set_facecolor(\"black\")\n ax.xaxis.set_major_locator(mdates.AutoDateLocator())\n ax.grid(b=True, which=\"major\", color=\"#666666\", linestyle=\"-\")\n ax.spines[\"top\"].set_color(\"white\")\n ax.spines[\"bottom\"].set_color(\"white\")\n ax.spines[\"left\"].set_color(\"white\")\n ax.spines[\"right\"].set_color(\"white\")\n ax.tick_params(axis=\"x\", colors=\"white\")\n ax.tick_params(axis=\"y\", colors=\"white\")\n ax.set_title(title, color=\"white\")\n\n return figure, ax, line", "def drawfigure(data, label, headers, drawer, **kargs):\n if kargs['discrete']:\n xvec = range(len(headers))\n else:\n xvec = [float(x) for x in headers]\n drawer.draw(data, label, xvec)\n plt.xlabel(kargs['xlabel'])\n plt.ylabel(kargs['ylabel'])\n plt.title(kargs['title'])\n if kargs['xticks']:\n plt.xticks(xvec, headers)\n if kargs['legend'] and len(label)>0:\n plt.legend()\n plt.show()", "def plot_graph(x, y, xlabel='', ylabel='', title='', xScaleLog=False, yScaleLog=False, color='blue'):\n if xScaleLog and yScaleLog :\n pl.loglog( x, y)\n elif xScaleLog :\n pl.semilogx( x, y)\n \n elif yScaleLog :\n pl.semilogy( x, y) \n else:\n pl.plot(x, y, color=color)\n \n pl.xlabel(xlabel)\n pl.ylabel(ylabel)\n pl.title(title)", "def replot(self,ax):\n self.XP_Plotter.replot(ax)\n # theoretical lines\n self.lines_theory[0].set_xdata(self.xx)\n self.lines_theory[1].set_xdata(self.xx)\n self.lines_theory[2].set_xdata(self.xx_itpl)\n for line in self.lines_theory: \n ax.draw_artist(line)", "def plot_basic(time, data, lgnd=None):\n pylab.figure()\n pylab.plot(time, data)\n pylab.xlabel('time, s')\n pylab.ylabel('data')\n pylab.title('Basic Plotter')\n if lgnd != None:\n pylab.legend(lgnd)\n pylab.grid(True)\n pylab.show()", "def plot_nominal(x_labels, y_labels, fig_size=(10, 10), tick_size=10, annots=[], show_direction=True):\n fig, ax = plt.subplots(nrows=1, ncols=1, figsize=fig_size)\n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n\n if show_direction:\n plt.plot(range(len(x_labels)+1), range(len(y_labels)+1), 'r')\n plt.arrow(0.5, 0.5, 0.1, 0.1, width=0.05, color='r')\n\n ax.set_xticks(list(range(1, len(x_labels)+1)))\n ax.set_yticks(list(range(1, len(y_labels)+1)))\n ax.set_xticklabels(x_labels, fontsize=tick_size)\n ax.set_yticklabels(y_labels, fontsize=tick_size)\n\n for (x,y), annot_string in annots:\n ax.annotate(annot_string, (x,y), fontsize=tick_size)\n\n plt.show()", "def _plot(self, rewards, losses, epsilons):\n plt.figure(figsize=(20,5))\n plt.subplot(131)\n plt.title('Episodic Reward')\n plt.plot(rewards)\n plt.subplot(132)\n plt.title('TD Loss')\n plt.plot(losses)\n plt.subplot(133)\n plt.title('Epsilon')\n plt.plot(epsilons)\n plt.tight_layout()\n plt.show()", "def train_test_plot():\n plt.title('Crude Oil Time-Series')\n plt.plot(train,label=\"Training Set\")\n plt.plot(test,label=\"Test Set\")\n plt.xlabel('Number of Days')\n plt.ylabel('Price in USD')\n plt.legend(loc = 'upper left')\n plt.savefig('../plots/Time-Series.jpg')", "def graphplot(self):\n if self.binned:\n self.line.set_ydata(self.fft_bins_y)\n else:\n self.line.set_ydata(self.spec_y)\n self.line2.set_ydata(self.wave_y)\n self.ax1.draw_artist(self.ax1.patch)\n self.ax2.draw_artist(self.ax2.patch)\n self.ax1.draw_artist(self.line)\n self.ax2.draw_artist(self.line2)\n self.fig.canvas.update()\n self.fig.canvas.flush_events()", "def figsetup(title, xlab, ylab, fname, show=False):\n plt.xlabel(xlab)\n plt.ylabel(ylab)\n plt.title(fname)\n plt.tight_layout()\n plt.title(title)\n plt.legend()\n plt.savefig(\"../figs/\" + fname + \".png\", dpi=250)\n if show is False:\n plt.close()\n else:\n plt.show()\n return", "def figsetup(title, xlab, ylab, fname, show=False):\n plt.xlabel(xlab)\n plt.ylabel(ylab)\n plt.title(fname)\n plt.tight_layout()\n plt.title(title)\n plt.legend()\n plt.savefig(\"../figs/\" + fname + \".png\", dpi=250)\n if show is False:\n plt.close()\n else:\n plt.show()\n return", "def plot(df, saveName=None, extrem=None,\r\n axeslabel_fontsize=10., title_fontsize=20., axesvalues_fontsize=10., annotation_fontsize=10., legend_fontsize=8.):\r\n\r\n print \"plotting timeseries data...\"\r\n fig = plt.figure(tight_layout=True)\r\n \r\n ax = fig.add_subplot(111)\r\n df.plot(colormap=\"jet_r\", ax=ax, marker='x', title=\"Farge: Measured water level in observation wells and river Weser\")\r\n\r\n\r\n if extrem:\r\n print \"plotting low-high tide scatter data...\"\r\n # if we have extrem.... we want to plot them with same color\r\n handles, labels = ax.get_legend_handles_labels()\r\n colors = list()\r\n for h in handles:\r\n colors.append(h.get_color())\r\n if len(colors) != len(extrem):\r\n raise IndexError(\"Number of hydrographs do not correspond to number of passed extrem. Cannot get proper colors. Do hardcode quickly\")\r\n i = 0\r\n for a, c in zip(extrem, colors):\r\n i += 1\r\n print \"\\t>>> {0}/{1}\".format(i, len(extrem))\r\n for item, marker in zip(a, ['o', 's']): # a = list( hightide, lowtide)\r\n item.plot(x='datetime', y='y', ax=ax, marker=marker, lw=2., style='.', markeredgecolor='black', markeredgewidth=0.4, color=c, legend=False)\r\n\r\n #ax.set_xlim([datetime.date(2015, 1, 26), datetime.date(2015, 1, 30)])\r\n handles, labels = ax.get_legend_handles_labels()\r\n ax.legend(handles[0:7], labels[0:7], fontsize=legend_fontsize)\r\n ax.grid(True, which='major')\r\n ax.set_title(\"Measured water level in observation wells and river Weser\", fontsize=title_fontsize)\r\n ax.set_ylabel(\"m AMSL\", fontsize=axeslabel_fontsize)\r\n ax.set_xlabel(\"\", fontsize=axeslabel_fontsize)\r\n ax.tick_params(axis='both', which=\"both\", labelsize=axesvalues_fontsize)\r\n\r\n\r\n #figManager = plt.get_current_fig_manager()\r\n #figManager.window.showMaximized()\r\n\r\n if saveName:\r\n fig.savefig(saveName, dpi=300, tight_layout=True, format='pdf')\r\n print 'saving figure... :', saveName\r\n plt.show()", "def line(dates, series):\n # matplotlib.pyplot.gca().set_color_cycle(colour_cycle)\n\n def plot_and_return_title(plot_title, plot_data):\n matplotlib.pyplot.plot(dates, plot_data)\n return plot_title\n\n legend = [plot_and_return_title(title, data) for title, data in series]\n matplotlib.pyplot.legend(legend, loc='upper left')\n\n matplotlib.pyplot.show()", "def plot(self, x: np.array, y: np.array, fmt: str=None, label: str = None, **kwargs):\n\n if x.shape == y.shape and label not in self.lines:\n if label == None:\n label = \"Line \" + str(self.line_counter)\n self.line_counter += 1\n dataset = DataSet(x, y, label)\n if fmt:\n dataset.setObjectID(self.ax.plot(dataset.x, dataset.y, fmt, label=label, **kwargs))\n else: \n dataset.setObjectID(self.ax.plot(dataset.x, dataset.y, label=label, **kwargs))\n self.lines[dataset.name] = dataset\n self.legend()\n self.canvas.draw()\n else:\n if x.shape != y.shape:\n raise ValueError(\"x and y array shapes do not match.\")\n if(label in self.lines):\n raise ValueError(\"line with specified name already exists (unique constraint failed).\")\n raise ValueError(\"Error in required arguments for plotting.\")", "def set_labels(x, y=''):\n plt.xlabel(x)\n plt.ylabel(y)", "def advanced_line():\n\n # Make dataset specifying arguments\n dataset_a = DataSet(sine,line_style='-',line_width=1.5,marker_style='o',marker_size='4')\n\n # Make dataset changing options using setters\n dataset_b = DataSet(cosine)\n dataset_b.set_line(style='--',width=1.5)\n dataset_b.set_colour(colour='royalblue')\n\n # Make plot object and adjust properties using setters\n plot = Plot()\n plot.set_text(latex=True,label=12)\n plot.add_dataset(dataset_a)\n plot.add_dataset(dataset_b)\n plot.set_axes(xlim=(0,8),ylim=(-1.1,1.1),xlabel=r'$x$',ylabel=r'$f\\left(x\\right)$',xticks=(1.0,0.2),yticks=(0.2,0.05))\n\n # Plot graph and display\n plot.plot()\n plot.save(name='./figures/2d_advanced_line',fmt='png')\n plot.display()", "def plot(\n self,\n xscale=\"symlog\",\n xscale_linthresh=20,\n zoom=\"auto\",\n hlines=(),\n ):\n import matplotlib.pyplot as plt\n from matplotlib.colors import hsv_to_rgb\n\n ys = np.array(self.losses)\n xs = np.arange(ys.size)\n\n fig, ax = plt.subplots()\n ax.plot(xs, ys, \".-\")\n if xscale == \"symlog\":\n ax.set_xscale(xscale, linthresh=xscale_linthresh)\n else:\n ax.set_xscale(xscale)\n ax.set_xlabel(\"Iteration\")\n ax.set_ylabel(\"Loss\")\n\n if hlines:\n hlines = dict(hlines)\n for i, (label, value) in enumerate(hlines.items()):\n color = hsv_to_rgb([(0.1 * i) % 1.0, 0.9, 0.9])\n ax.axhline(value, color=color, ls=\"--\", label=label)\n ax.text(1, value, label, color=color, va=\"bottom\", ha=\"left\")\n\n if zoom is not None:\n if zoom == \"auto\":\n zoom = min(50, ys.size // 2)\n\n iax = ax.inset_axes([0.5, 0.5, 0.5, 0.5])\n iax.plot(xs[-zoom:], ys[-zoom:], \".-\")\n\n return fig, ax", "def preparePlot(xticks, yticks, figsize=(10.5, 6), hideLabels=False, gridColor='#999999',\n gridWidth=1.0):\n plt.close()\n fig, ax = plt.subplots(figsize=figsize, facecolor='white', edgecolor='white')\n ax.axes.tick_params(labelcolor='#999999', labelsize='10')\n for axis, ticks in [(ax.get_xaxis(), xticks), (ax.get_yaxis(), yticks)]:\n axis.set_ticks_position('none')\n axis.set_ticks(ticks)\n axis.label.set_color('#999999')\n if hideLabels: axis.set_ticklabels([])\n plt.grid(color=gridColor, linewidth=gridWidth, linestyle='-')\n map(lambda position: ax.spines[position].set_visible(False), ['bottom', 'top', 'left', 'right'])\n return fig, ax" ]
[ "0.73383814", "0.726306", "0.71619123", "0.7137723", "0.70683104", "0.70184547", "0.69500715", "0.6938889", "0.6920055", "0.69195294", "0.6891798", "0.67042345", "0.6702502", "0.66961604", "0.6673671", "0.66625386", "0.66425776", "0.66200614", "0.66146296", "0.6595933", "0.65903986", "0.6550335", "0.6546197", "0.6543843", "0.65383565", "0.65047395", "0.6497972", "0.6489937", "0.6486026", "0.64789826", "0.6447189", "0.64446473", "0.64297545", "0.63676107", "0.6349529", "0.6348031", "0.6343135", "0.63264966", "0.6325265", "0.62998354", "0.6297189", "0.62935704", "0.6288683", "0.62712383", "0.62414914", "0.62411", "0.61999506", "0.617303", "0.6165309", "0.61511034", "0.61495805", "0.6138825", "0.6138663", "0.6137419", "0.6135362", "0.61305", "0.61199677", "0.60964555", "0.60829455", "0.6065123", "0.6064099", "0.60636824", "0.60537994", "0.6047913", "0.6040258", "0.6034628", "0.6025122", "0.6021094", "0.60192496", "0.6010187", "0.600844", "0.60054386", "0.59977174", "0.5984991", "0.5981827", "0.59659994", "0.596221", "0.5950716", "0.59444386", "0.59427375", "0.592442", "0.59196615", "0.59179384", "0.5912632", "0.5910415", "0.59101254", "0.5901217", "0.589617", "0.58935934", "0.5889162", "0.58884645", "0.58839744", "0.58839744", "0.5883858", "0.58675843", "0.5854099", "0.5843034", "0.584242", "0.58393997", "0.5837759" ]
0.6106391
57
A little function to make graphing less of a pain Adds a new line to a blank figure and labels it
def plothus(ax, x, y, *, datalabel='', linestyle = '-', marker = ''): out = ax.plot(x, y, zorder=1, label=datalabel, linestyle = linestyle, marker = marker) return out
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_graph(self, x, y, label):\n pyplot.plot(x[:len(y)], y[:len(x)], label=label)", "def line_graph():\r\n #create the data in an array\r\n xval = np.arange(0,6,(np.pi*(1./10)))\r\n yval = np.cos(xval)\r\n data = np.array([xval,yval])\r\n data = data.transpose()\r\n y = np.arange(-1,1.5,0.5)\r\n #convert the data to a pd DataFrame\r\n df = pd.DataFrame(data,columns=[\"x\",\"y\"])\r\n #tell the DataFrame to plot the data\r\n ax = df.plot(x=\"x\",y=\"y\",label=\"0\",ylim=(-1,1),yticks=y,title=\"Cosine Approximated at Intervals of 1/(10pi)\")\r\n ax.set(xlabel=\"\",ylabel=\"\")\r\n\t#get the figure from the axes and save it\r\n fig = ax.get_figure()\r\n fig.savefig(\"my_line_graph.png\")", "def add_figure1(self,x,y,index=1,title='',xlabel='',ylabel=''):\n self.last_index = index\n ax = self.fig.add_subplot(self.position+index)\n ax.set_title(title)\n ax.set_xlabel(xlabel)\n ax.set_ylabel(ylabel)\n ax.plot(x,y)", "def simple_line():\n\n # Make two datasets\n dataset_a = DataSet(sine)\n dataset_b = DataSet(cosine)\n\n # Make plot and add data\n plot = Plot()\n plot.set_text()\n plot.add_dataset(dataset_a)\n plot.add_dataset(dataset_b)\n\n # Plot graph and display\n plot.plot()\n plot.save(name='./figures/2d_simple_line',fmt='png')\n plot.display()", "def make_1d_graph(self, xvals, yvals, xlabel, xunits,\n ylabel, yunits, nicexlabel=True,\n niceylabel=True ,xlims='edges', ylims=None,\n linestyle='-', color='darkblue', alpha=0.9,\n xlabelsize='18', ylabelsize='18', marker=None,\n plotlabel=None, subplotnum=None, linewidth=1):\n import matplotlib.pyplot as plt\n plt.rcParams['text.usetex'] = True\n plt.plot(\n xvals,\n yvals,\n linestyle=linestyle,\n color=color,\n alpha=alpha,\n marker=marker,\n label=plotlabel,\n linewidth=linewidth\n )\n if xlims is not None:\n if xlims == 'edges':\n plt.xlim(xvals[0], xvals[-1])\n else:\n plt.xlim(xlims)\n if xlabel is not None:\n if nicexlabel:\n xlabel = self.make_label(xlabel, xunits)\n plt.xlabel(xlabel, fontsize=xlabelsize)\n if ylims is not None:\n if ylims[0] == ylims[1]:\n plt.ylim(ylims[0]-0.1, ylims[0]+0.1)\n else:\n plt.ylim(ylims)\n if ylabel is not None:\n if subplotnum is not None:\n if (subplotnum-1)%4 == 0:\n if niceylabel:\n ylabel = self.make_label(ylabel, yunits)\n plt.ylabel(ylabel, fontsize=ylabelsize)\n else:\n if niceylabel:\n ylabel = self.make_label(ylabel, yunits)\n plt.ylabel(ylabel, fontsize=ylabelsize)", "def plot_line_graph(X, Y, xlabel, ylabel, title, fname):\r\n plt.figure(figsize=(20, 21))\r\n plt.plot(X, Y)\r\n plt.title(title)\r\n plt.xlabel(xlabel)\r\n plt.ylabel(ylabel)\r\n plt.xticks(X)\r\n plt.savefig(fname, bbox_inches='tight')\r\n plt.close()", "def line_graph():\n fig = plt.figure()\n ax = plt.axes()\n x = [1, 2, 3]\n y = [5, 6, 7]\n plt.plot(x, y)\n plt.show()", "def line_figure(self, title):\n\n plt.figure()\n counter = 0\n\n # plot the x and y for each house and battery\n for battery in self.grid.batteries:\n x = []\n y = []\n for house in battery.connections:\n x.append(house.x)\n y.append(house.y)\n plt.plot([house.x, battery.x], [house.y, battery.y],\n color=colors[counter], linewidth=.25)\n plt.scatter(x, y, marker='p', color=colors[counter])\n plt.plot(battery.x, battery.y, marker='x', color=colors[counter],\n markersize=10)\n counter += 1\n plt.title(f\"{title}. Cost: {self.cost()}\")", "def line_plot(self, x, y, labels, ax=None):\n if ax is None:\n fig, ax = plt.subplots()\n else:\n fig = None\n ax.plot(x, y, '--o', label=labels[0])\n ax.set_xlabel(labels[1])\n ax.set_ylabel(labels[2])\n ax.set_title(labels[3])\n return fig, ax", "def newGraph(self, xlab, ylab):\r\n if (not self.doMPL):\r\n newGraph = Gnuplot(debug=0)\r\n\t #newGraph('set data style linespoints')\r\n\t newGraph.set_label('xlabel', xlab)\r\n\t newGraph.set_label('ylabel', ylab)\r\n return newGraph\r\n else:\r\n self.mplFigCount = self.mplFigCount + 1\r\n if (self.graphLabelsX.__len__() <= self.mplFigCount):\r\n gg = self.graphLabelsX.__len__()\r\n while (gg <= self.mplFigCount):\r\n self.graphLabelsX.append('')\r\n gg = gg+1\r\n if (self.graphLabelsY.__len__() <= self.mplFigCount):\r\n gg = self.graphLabelsY.__len__()\r\n while (gg <= self.mplFigCount):\r\n self.graphLabelsY.append('')\r\n gg = gg+1\r\n self.graphLabelsX[self.mplFigCount] = xlab\r\n self.graphLabelsY[self.mplFigCount] = ylab\r\n figure(self.mplFigCount, (6,4))\r\n xlabel(self.graphLabelsX[self.mplFigCount])\r\n ylabel(self.graphLabelsY[self.mplFigCount])\r\n return self.mplFigCount", "def linePlot(self):\n clf()\n plot(self.x,self.averages)\n xlabel('X Label (units)')\n ylabel('Y Label (units)')\n savefig('line.png')", "def drawit(fignum=1,xlabel=\" \",ylabel=\" \",xvar=None,\n yvar=None,title=\" \",ylimit=None,\n xlimit=None):\n fig=plt.figure(fignum)\n fig.clf()\n ax1=fig.add_subplot(111)\n line=ax1.plot(xvar,yvar)\n ax1.set_xlim(xlimit)\n ax1.set_ylim(ylimit)\n ax1.set_title(title)\n ax1.set_xlabel(xlabel)\n ax1.set_ylabel(ylabel)\n fig.tight_layout()\n fig.canvas.draw()\n return fig,ax1,line[0]", "def make_plot(counts):\n # YOUR CODE HERE\n posX=[]\n posY=[]\n negX=[]\n negY=[]\n\t\n count=1\n for i in counts:\n\tif len(i)!=0:\t\n\t\tposX.append(count)\n\t posY.append(i[0][1])\n\t\tnegX.append(count)\n\t negY.append(i[1][1])\n\t count=count+1\n\t\n line1, =plt.plot(posX,posY,marker=\"o\",label=\"Positive\",color=\"g\")\n line2, =plt.plot(negX,negY,marker=\"o\",label=\"Negative\",color=\"r\")\n plt.xlabel('Time Step')\n plt.ylabel('Word Count')\n plt.title('Basic Twitter Sentiment Analytics')\n plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)\n plt.legend(handler_map={line1: HandlerLine2D(numpoints=2)})\n plt.show()", "def newplot(*args, **kwargs):\n\n if 'linewidth' and 'lw' not in kwargs.keys():\n kwargs['linewidth'] = 2\n\n plt.figure(figsize=FIGURE_SIZE, dpi=FIGURE_DPI)\n plt.plot(*args, **kwargs)\n\n plt.setp(plt.gca().spines.values(), linewidth=2)\n plt.xticks(fontsize=25, fontname='Times New Roman')\n plt.yticks(fontsize=25, fontname='Times New Roman')\n plt.ticklabel_format(useOffset=False)\n plt.ticklabel_format(style='sci', scilimits=(-3, 3))", "def plot_noerror(self, ax):\n tmp_lefts = deepcopy(self.lefts)\n tmp_lefts = np.append(tmp_lefts, self.lefts[-1] + self.widths[-1])\n tmp_values = deepcopy(self.values)\n tmp_values = np.append(tmp_values, self.values[-1])\n return ax.plot(tmp_lefts, tmp_values, color=self.color, drawstyle='steps-post', label=self.label, **self.options)", "def plot_graph(self) -> None:", "def plotLine(self):\n minc = 0\n maxc = 500\n num = 500\n levels = np.linspace(minc,maxc,num+1)\n title = textwrap.dedent(\"\"\"\\\n Orography difference between LGM and Modern ICE-5G data\n using {0} meter contour interval\"\"\").format((maxc-minc)/num)\n plt.figure()\n plt.contour(self.difference_in_ice_5g_orography,levels=levels)\n plt.title(title)\n pts.set_ticks_to_zero()\n #if self.save:\n #plt.savefig('something')\n print(\"Line contour plot created\")", "def plot():\n pass", "def make_line_plot(data, x_label=\"Data\", y_label=\"Data Point\"):\n\n y = data\n x = range(len(y))\n\n plt.xlabel(x_label)\n plt.ylabel(y_label)\n plt.plot(x, y)\n plt.show()", "def create_plot_without_error(x, y, x_label, y_label):\n plt.plot(x, y, '.', color=\"#ff0000\", ms=1)\n plt.xlabel(x_label)\n plt.ylabel(y_label)", "def plotone(x,y,xlabel,ylabel,filename):\n fig=plt.figure()\n ax = fig.add_subplot(111)\n ax.plot(x,y,linewidth=2.0)\n plt.ylabel(ylabel)\n plt.xlabel(xlabel)\n fig.savefig(filename)", "def plot_lines(self):\n self.plot(3)", "def multi_line_plot(x_data, y_data, title, x_label, y_label):\n plt.figure(1, (18, 8)) # something, plot size\n plt.subplot(111)\n legend = []\n for i in range(len(x_data)):\n plt.plot(x_data[i], y_data[i])\n legend.append((i+1))\n plt.title(title)\n plt.xlabel(x_label, fontsize=12)\n plt.ylabel(y_label, fontsize=12)\n plt.legend(legend, loc='upper left')\n plt.show()", "def twoline(file_name, title, line1, line1_label, line2, line2_label, x_labels):\n line_chart = pygal.Line(include_x_axis=True)\n line_chart.title = title\n line_chart.x_labels = x_labels\n line_chart.add(line1_label, line1)\n line_chart.add(line2_label, line2)\n line_chart.render_to_file(file_name)\n return True", "def draw_line_plot(fig, x, y, labels):\r\n\r\n #Convert times to a displayable format\r\n (x_times, hour_mode) = times_to_axis(x)\r\n\r\n\r\n #Draw grid lines\r\n fig.grid(zorder=0)\r\n\r\n #Draw plot\r\n fig.plot(x_times, y, \"-\", label=None, zorder=2)\r\n \r\n \r\n #If necessary, enable processing of \"datetime\" objects on the x-axis\r\n if not hour_mode:\r\n fig.xaxis_date()\r\n\r\n\r\n #Label and style plot\r\n set_axis_labels(fig, *labels)\r\n style_x_labels(fig)", "def make_plot(x,y):", "def _add_series(series, label, marker):\n plt.plot(series, label=label, marker=marker, linestyle=':', linewidth=0.5,\n markersize=4)", "def paint_graph(x_label, y_label, df, th_user, label_user, th_calc, label_calc):\n import seaborn as sns\n import matplotlib.pyplot as plt\n\n df = df.copy()\n df['target'] = df['target'].map({True: 'target', False: 'non-target'})\n title = '{} vs {}'.format(x_label, y_label)\n\n plt.figure(figsize=(1920 / 96, 1103 / 96), dpi=96)\n\n sns.stripplot(x=x_label, y=y_label, hue='target', data=df,\n palette={'target': 'blue', 'non-target': 'red'}, alpha=0.5, jitter=False, dodge=True)\n plt.axvline(x=th_user, label=label_user, c='g')\n plt.axvline(x=th_calc, label=label_calc, c='k')\n plt.title(title)\n plt.legend()\n\n plt.savefig('{}.png'.format(title))\n\n if plt.get_backend() == 'TkAgg':\n mng = plt.get_current_fig_manager()\n mng.window.state('zoomed')\n plt.show()", "def plot_one_axes(self, fig_num: int, title: str, y_label: str, labeled: np.ndarray, filled: np.ndarray,\n smoothed: np.ndarray, legend_entries: Dict[str, str]) -> matplotlib.figure.Figure:\n fig = plt.figure(fig_num)\n ax = fig.subplots(1, 1)\n labeled_lines = kine_graph_init(ax, labeled, y_label, self.frame_nums, [{'ls': '', 'marker': 'o', 'ms': 2,\n 'fillstyle': 'none', 'mew': 0.5}] * 3)\n ax.set_prop_cycle(None)\n filled_lines = kine_graph_add(ax, filled, self.frame_nums, [{'ls': '-', 'lw': 0.75}] * 3)\n ax.set_prop_cycle(None)\n smoothed_lines = kine_graph_add(ax, smoothed, self.frame_nums, [{'ls': '-'}] * 3)\n plt.tight_layout()\n fig.suptitle(title, x=0.7)\n fig.legend((labeled_lines[0], smoothed_lines[2], filled_lines[1]),\n (legend_entries['labeled'], legend_entries['smoothed'], legend_entries['filled']),\n ncol=2, handlelength=0.75, handletextpad=0.25, columnspacing=0.5, loc='upper left')\n make_interactive()\n return fig", "def add_to_plot(self, line_name, points):\n points = [x * 100 for x in points]\n plt.plot(points, label=line_name)", "def add_figure(self,sig,index,title='',xlabel='',ylabel=''):\n self.last_index = index\n ax = self.fig.add_subplot(self.position+index)\n ax.set_title(title)\n ax.set_xlabel(xlabel)\n ax.set_ylabel(ylabel)\n ax.plot(sig)", "def createBlankPlot(self):\n\n fig = plt.figure(figsize=(8,6),dpi=80)\n fig.set_facecolor('#ededed')\n \n # Format plot\n ax = plt.subplot(111)\n \n fig.canvas.draw()\n \n return fig, ax", "def graphplot(self):\n if self.binned:\n self.line.set_ydata(self.fft_bins_y)\n else:\n self.line.set_ydata(self.spec_y)\n self.line2.set_ydata(self.wave_y)\n self.ax1.draw_artist(self.ax1.patch)\n self.ax2.draw_artist(self.ax2.patch)\n self.ax1.draw_artist(self.line)\n self.ax2.draw_artist(self.line2)\n self.fig.canvas.update()\n self.fig.canvas.flush_events()", "def _create_line_plot(experiment_param, nus, norms, ax, subtitle):\n for name in sorted(norms):\n errors = [experiment_param[nu][name] for nu in nus]\n ax.plot(nus, errors, label=name)\n\n ax.legend()\n ax.set_xticks(nus[1::2])\n ax.set_xticklabels(nus[1::2])\n ax.set_ylabel('Average error (%)', fontsize=15)\n ax.set_ylim([0,5])\n ax.set_title('Estimating {}\\n'.format(subtitle), fontsize=15)", "def OneValueWithTimePlot(timeline, Ydat1, Label1, xmin, xmax, ymin, ymax, x_auto, y_auto,\n XLabel, YLabel, SupTitle, Title, FileName,\n currentDate, currentTime, Software_version):\n\n rc('font', size=6, weight='bold')\n fig = plt.figure(figsize=(9, 5))\n ax1 = fig.add_subplot(111)\n ax1.plot(Ydat1, linestyle='-', linewidth='1.00', label=Label1)\n ax1.legend(loc='upper right', fontsize=6)\n ax1.grid(visible=True, which='both', color='silver', linestyle='-')\n if x_auto == 0: ax1.set_xlim([xmin, xmax])\n if y_auto == 0: ax1.set_ylim([ymin, ymax])\n ax1.set_ylabel(YLabel, fontsize=6, fontweight='bold')\n ax1.set_title(Title, fontsize=6)\n ax1.set_xlabel(XLabel, fontsize=6, fontweight='bold')\n text = ax1.get_xticks().tolist()\n for i in range(len(text)-1):\n k = int(text[i])\n text[i] = timeline[k] # text[i] = timeline[i] ???\n ax1.set_xticklabels(text, fontsize=6, fontweight='bold')\n fig.subplots_adjust(top=0.92)\n fig.suptitle(SupTitle, fontsize=8, fontweight='bold')\n fig.text(0.79, 0.03, 'Processed ' + currentDate + ' at ' + currentTime,\n fontsize=4, transform=plt.gcf().transFigure)\n fig.text(0.11, 0.03, 'Software version: ' + Software_version + ', [email protected], IRA NASU',\n fontsize=4, transform=plt.gcf().transFigure)\n pylab.savefig(FileName, bbox_inches='tight', dpi=160)\n plt.close('all')\n return", "def plot_line(self,x_0,y_0,x_1,y_1,col=\"black\",line_width=1,line_type=\"solid\"):\n self._fig.add_shape(\n go.layout.Shape(\n type=\"line\",\n x0=x_0,\n y0=y_0,\n x1=x_1,\n y1=y_1,\n line=dict(\n color=col,\n width=line_width,\n dash=line_type\n )\n )\n )", "def labelling():\n title = input('Please enter the title of output figure: ')\n plt.title(title)\n plt.xlabel(\"Time (s)\")\n plt.ylabel(\"Voltage (V)\")\n plt.legend()\n plt.savefig(\"Scope_%s\"%str(round(time.time()))) # Time stamp on file names\n plt.show()", "def show_line(dict, xlabel=\"x\", ylabel=\"y\", title=\"title\"):\n plt.clf()\n plt.cla()\n plt.plot(list(dict.keys()), list(dict.values()), alpha=0.4, color = 'g')\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n plt.title(title)\n plt.show()", "def timeSpaceDiagramMethod(self):\n fig, ax1 = plt.subplots()\n\n ax1.set_xlabel('Time (s)', fontsize=24, fontweight='bold')\n ax1.set_ylabel('Distance (m)', fontsize=24, fontweight='bold')\n max_x_limit = self.xAxisRange-100\n plt.xlim([0, max_x_limit])\n plt.ylim([0, max(self.distance_Green)+400])\n plt.xticks(np.arange(0, self.xAxisRange-75, 50), fontsize=24)\n ax1.tick_params(axis='y', labelsize=18)\n for axis in ['top', 'bottom', 'left', 'right']:\n ax1.spines[axis].set_linewidth(4)\n # ax1.set_yticks(ticks=np.arange(0, 100, 20),fontsize = 24)\n #newYlabel = ['-400','0','395','810','1225']\n # plt.gca().set_yticklabels(newYlabel)\n # plt.yticks([])\n req_phase_length = len(self.greenRectangleStartPoint)\n for i in range(0, req_phase_length):\n x = self.greenRectangleStartPoint[i]\n y = self.distance_Green[i]\n ax1.add_patch(Rectangle(\n (x, y), self.greenTime[i], 30, angle=0.0, color='green', linewidth=2,))\n\n req_phase_length = len(self.clearanceRectangleStartPoint)\n for i in range(0, req_phase_length):\n x = self.clearanceRectangleStartPoint[i]\n y = self.distance_Clearance[i]\n ax1.add_patch(Rectangle(\n (x, y), self.clearanceTime[i], 30, angle=0.0, color='red', linewidth=2))\n\n\n if len(self.evTrajectoryTimePoint) > 0:\n ax1.scatter(self.evTrajectoryTimePoint, self.evTrajectoryDistancePoint, c=\"black\", linewidths=4,\n marker=\".\", edgecolor=\"none\", s=50, label='Connected Vehicles Trajectory', zorder=2)\n\n if len(self.transitTrajectoryTimePoint) > 0:\n ax1.scatter(self.transitTrajectoryTimePoint, self.transitTrajectoryDistancePoint, c=\"black\",\n linewidths=4, marker=\".\", edgecolor=\"none\", s=50, label='Connected Vehicles Trajectory', zorder=2)\n\n if len(self.truckTrajectoryTimePoint) > 0:\n ax1.scatter(self.truckTrajectoryTimePoint, self.truckTrajectoryDistancePoint, c=\"black\",\n linewidths=4, marker=\".\", edgecolor=\"none\", s=50, label='Connected Vehicles Trajectory', zorder=2)\n\n if len(self.carTrajectoryTimePoint) > 0:\n ax1.scatter(self.carTrajectoryTimePoint, self.carTrajectoryDistancePoint, c=\"black\", linewidths=4,\n marker=\".\", edgecolor=\"none\", s=50, label='Connected Vehicles Trajectory', zorder=2)\n\n if len(self.connectedVehicleTrajectoryTimePoint) > 0:\n ax1.scatter(self.connectedVehicleTrajectoryTimePoint, self.connectedVehicleTrajectoryDistancePoint, c=\"black\", linewidths=4,\n marker=\".\", edgecolor=\"none\", s=50, label='Connected Vehicles Trajectory', zorder=2) \n\n ax1.legend(loc='upper right', prop={\"size\": 16})\n ax1.set_title(\"Time-Space Diagram\", fontsize=20, fontweight='bold')\n fig.tight_layout() # otherwise the right y-label is slightly clipped\n plt.grid(color='black', linestyle='-', linewidth=0.5)\n plt.show()", "def diplayGraph(root, df, side, title, color):\n\n figure = plt.Figure(figsize=(5, 4), dpi=100)\n figure.patch.set_facecolor(\"black\")\n ax = figure.add_subplot(111)\n line = FigureCanvasTkAgg(figure, root)\n line.get_tk_widget().pack(side= side, fill=tk.BOTH)\n df.plot(kind=\"line\", legend=True, ax=ax, color=color, marker=\"o\", fontsize=10)\n ax.set_facecolor(\"black\")\n ax.xaxis.set_major_locator(mdates.AutoDateLocator())\n ax.grid(b=True, which=\"major\", color=\"#666666\", linestyle=\"-\")\n ax.spines[\"top\"].set_color(\"white\")\n ax.spines[\"bottom\"].set_color(\"white\")\n ax.spines[\"left\"].set_color(\"white\")\n ax.spines[\"right\"].set_color(\"white\")\n ax.tick_params(axis=\"x\", colors=\"white\")\n ax.tick_params(axis=\"y\", colors=\"white\")\n ax.set_title(title, color=\"white\")\n\n return figure, ax, line", "def plt_spec_lines():\n\n for i in range(0, Molecule.species_count):\n mid_line = (Molecule.right_endpt[i] + Molecule.left_endpt[i]) / 2\n shift1 = Molecule.energy[i] - PlotParameter.energy_vshift\n shift2 = Molecule.energy[i] + PlotParameter.name_vshift\n\n en = '{0:5.2f}'.format(Molecule.energy[i])\n\n plt.plot([Molecule.left_endpt[i], Molecule.right_endpt[i]], [Molecule.energy[i], Molecule.energy[i]],\n color=PlotParameter.species_line_color, lw=PlotParameter.species_line_width, linestyle='-')\n plt.text(mid_line, shift1, en, weight='bold', horizontalalignment='center',\n fontsize=PlotParameter.energy_font_size, color='black')\n plt.text(mid_line, shift2, Molecule.name[i], weight='bold', horizontalalignment='center',\n fontsize=PlotParameter.name_font_size, color='black')", "def draw_figure(X, Y, point_size=0.6):\r\n ax=plt.gca()\r\n ax.set_xticks([]); ax.set_yticks([])\r\n plt.plot(X,Y,'r.', markersize=point_size)\r\n plt.show()", "def draw_line(tick_length, tick_label=''):\n line = \"_\" * tick_length\n if tick_label:\n line += ' ' + tick_label\n print(line)", "def addLegendLine(line,n):\n dislin.leglin(' ',line,n)", "def graph(x, y, xlabel = \"\", ylabel = \"\", legend = \"\", color = \"\"):\n plt.plot(x, y, color, label = legend)\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n plt.legend(loc = 'best')\n plt.grid()", "def _handle_create_line(self, axes, style_args):\n stream_data = self.server.stream_data\n # sample data for initial create\n x_data = numpy.arange(0, 2, 1)\n y_data = numpy.array([0]*2)\n\n line, = axes.plot(x_data, y_data, '-', **style_args)\n # NOTE: client may set 'label'\n line_name = style_args['label']\n if line_name in stream_data:\n # preserve old line data with a new name\n stream_data[line_name+\"_old_\"+timestamp()] = stream_data[line_name]\n # always start with no data for the new line\n stream_data[line_name] = {'y': [], 'line': line, 'last_len': 0}\n if FLAGS.timestamp:\n stream_data[line_name]['x'] = []\n return line_name", "def create_rink():\n \n fig, ax = plt.subplots(figsize=(12, 9), dpi=600)\n # Нейтральная зона\n # Центральная линия\n line = plt.Line2D((0, 0), (-42.5, 42.5), lw=5, color='red', linestyle='-')\n plt.gca().add_line(line)\n\n line = plt.Line2D((0, 0), (-42.5, 42.5), lw=2, color='white', linestyle='--')\n plt.gca().add_line(line)\n\n # синяя линия\n line = plt.Line2D((25, 25), (-42.5, 42.5), lw=5, color='blue', linestyle='-')\n plt.gca().add_line(line)\n\n # Центральный круг\n ax.add_patch(Arc((0, 0), 30, 30, theta1=-90, theta2=90, lw=2, edgecolor='blue'))\n ax.add_patch(Circle((0, 0), 1.5, lw=2.5, edgecolor='blue', facecolor='blue'))\n\n # точки\n ax.add_patch(Circle((20, 22), 1, lw=5, edgecolor='red', facecolor='red'))\n ax.add_patch(Circle((20, -22), 1, lw=5, edgecolor='red', facecolor='red'))\n\n # Верхний круг вбрасывания\n line = plt.Line2D((75, 71, 71), (23, 23, 26), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n line = plt.Line2D((63, 67, 67), (23, 23, 26), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n line = plt.Line2D((63, 67, 67), (21, 21, 18), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n line = plt.Line2D((75, 71, 71), (21, 21, 18), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n\n line = plt.Line2D((71, 71), (7, 5), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n line = plt.Line2D((67, 67), (7, 5), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n line = plt.Line2D((67, 67), (37, 39), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n line = plt.Line2D((71, 71), (37, 39), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n\n ax.add_patch(Circle((69, 22), 1, lw=5, edgecolor='red', facecolor='red'))\n ax.add_patch(Arc((69, 22), 30, 30, theta1=0, theta2=360, lw=2, edgecolor='red'))\n \n # Нижний круг вбрасывания\n line = plt.Line2D((75, 71, 71), (-23, -23, -26), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n line = plt.Line2D((63, 67, 67), (-23, -23, -26), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n line = plt.Line2D((63, 67, 67), (-21, -21, -18), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n line = plt.Line2D((75, 71, 71), (-21, -21, -18), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n\n line = plt.Line2D((71, 71), (-7, -5), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n line = plt.Line2D((67, 67), (-7, -5), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n line = plt.Line2D((67, 67), (-37, -39), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n line = plt.Line2D((71, 71), (-37, -39), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n\n ax.add_patch(Circle((69, -22), 1, lw=5, edgecolor='red', facecolor='red'))\n ax.add_patch(Arc((69, -22), 30, 30, theta1=0, theta2=360, lw=2, edgecolor='red'))\n\n\n #Зона ворот\n line = plt.Line2D((89, 89), (-40.7, 40.7), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n ax.add_patch(Arc((89, 0), 16, 16, theta1=90, theta2=270, lw=2, edgecolor='red', facecolor='blue'))\n ax.add_patch(Rectangle((85.5,-4), 3.5, 8, lw=2 ,edgecolor='red', facecolor='blue', alpha=0.7))\n\n ax.add_patch(Arc((90, 1), 4, 4, theta1=-30, theta2=90, lw=2, edgecolor='red', facecolor='blue'))\n ax.add_patch(Arc((90, -1), 4, 4, theta1=270, theta2=30, lw=2, edgecolor='red', facecolor='blue'))\n line = plt.Line2D((89, 90), (3, 3), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n line = plt.Line2D((89, 90), (-3, -3), lw=2, color='red', linestyle='-')\n plt.gca().add_line(line)\n\n\n # Борта\n line = plt.Line2D((0, 80), (-42.6, -42.6), lw=5, color='black')\n plt.gca().add_line(line)\n\n line = plt.Line2D((0, 80), (42.6, 42.6), lw=5, color='black')\n plt.gca().add_line(line)\n\n line = plt.Line2D((100, 100), (-22.6, 22.6), lw=5, color='black')\n plt.gca().add_line(line)\n\n ax.add_patch(Arc((80, 22.6), 40, 40,\n theta1=0, theta2=90, edgecolor='black', lw=5))\n ax.add_patch(Arc((80, -22.6), 40, 40,\n theta1=270, theta2=360, edgecolor='black', lw=5))\n\n plt.xlim(0, 120)\n\n #plt.axis('auto')\n #plt.show()\n return ax, fig", "def plot_line(ax, d, t, nufnu, name, label, col, legend=False, zorder=1):\n lum = nufnu * 1e-23 * 1e-3 * 4 * np.pi * d**2\n fs = 11\n nsize = 10 # normal size for points\n if name=='AT2018cow':\n marker='*'\n fcol = col\n s=70\n else:\n if label=='SN':\n marker='o'\n s=nsize\n fcol = col # fill color\n label = 'SN'\n elif label=='GRB':\n marker='o'\n fcol = 'white' # unfilled\n s=nsize\n label = 'GRB'\n elif label=='Rel. SN':\n marker='s'\n fcol = col \n s=nsize\n label = 'Rel. SN'\n elif label=='TDE':\n marker='s'\n fcol = 'white' #unfilled\n s=nsize\n label='TDE'\n ax.scatter(\n t, lum, facecolor=fcol, edgecolor=col, \n marker=marker, s=s, zorder=zorder)\n if legend:\n ax.plot(t, lum, c=col, ls='-', label=label, zorder=zorder)\n else:\n ax.plot(t, lum, c=col, ls='-', label=None, zorder=zorder)\n return lum", "def make_plot(counts):\n cn1 = []\n cn2 = []\n time = []\n\n for x in counts:\n y1 = x[0]\n cn1.append(y1[1])\n y2 = x[1]\n cn2.append(y2[1])\n\n for i in range(len(counts)):\n time.append(i)\n\n posLine = plt.plot(time, cn1,'bo-', label='Positive')\n negLine = plt.plot(time, cn2,'go-', label='Negative')\n plt.axis([0, len(counts), 0, max(max(cn1), max(cn2))+50])\n plt.xlabel('Time step')\n plt.ylabel('Word count')\n plt.legend(loc = 'upper left')\n plt.show()\n plt.savefig(\"plot.png\", format=\"png\")", "def line(dates, series):\n # matplotlib.pyplot.gca().set_color_cycle(colour_cycle)\n\n def plot_and_return_title(plot_title, plot_data):\n matplotlib.pyplot.plot(dates, plot_data)\n return plot_title\n\n legend = [plot_and_return_title(title, data) for title, data in series]\n matplotlib.pyplot.legend(legend, loc='upper left')\n\n matplotlib.pyplot.show()", "def visualize(*x, color=\"red\", linewidth=3, linestyle=\"-\"):\n x = x\n\n # If user doesn't submit submit x, it will create a default DataSet.\n if x == ():\n x = np.linspace(0, 70, 5000)\n else:\n x = x[0]\n\n plt.plot(x, create(x), linewidth=linewidth, c=color, linestyle=linestyle)", "def function2():\r\n with open('data.txt', 'r') as file:\r\n read_data = file.read()\r\n data = read_data.split()\r\n line_chart = pygal.Line()\r\n line_chart.title = data[26]\r\n line_chart.x_labels = map(str, range(2554, 2558))\r\n line_chart.add(data[27], [int(data[28]), int(data[29]), int(data[30]), int(data[31])])\r\n line_chart.add(data[32], [int(data[33]), int(data[34]), int(data[35]), int(data[36])])\r\n line_chart.add(data[37], [int(data[38]), int(data[38]), int(data[40]), int(data[41])])\r\n line_chart.add(data[42], [int(data[43]), int(data[44]), int(data[45]), int(data[46])])\r\n line_chart.add(data[47], [int(data[48]), int(data[49]), int(data[50]), int(data[51])])\r\n line_chart.render_to_file('02.svg')", "def replot(self,ax):\n for i,line in enumerate(self.lines):\n line.set_ydata(self.data[i].f)\n line.set_xdata(self.data[i].x)\n for line in self.lines: \n ax.draw_artist(line)", "def test():\n data1 = resources_vs_time(0.0, 50)\n data2 = resources_vs_time(1.0, 10)\n data3 = resources_vs_time(2.0, 10)\n data4 = resources_vs_time(0.5, 10)\n print data1\n simpleplot.plot_lines(\"Growth\", 600, 600, \"time\", \"total resources\", [data1])", "def plotQuantity(self, step, quantity, name):\r\n if (not self.doMPL):\r\n self.graphs[name]('set data style linespoints')\r\n self.graphs[name].set_label('xlabel', 'Step')\r\n self.graphs[name].set_label('ylabel', name) \r\n if (step == 0):\r\n\t self.graphs[name].set_range('xrange', (0, 1))\r\n else:\r\n\t self.graphs[name].set_range('xrange', (0, step))\r\n self.xyData[name].append([step, quantity])\r\n miny = self.xyData[name][0][1]\r\n maxy = self.xyData[name][0][1]\r\n for i in range(0, self.xyData[name].__len__()):\r\n if (self.xyData[name][i][1] < miny):\r\n miny = self.xyData[name][i][1]\r\n elif (self.xyData[name][i][1] > maxy):\r\n maxy = self.xyData[name][i][1]\r\n if (self.xyData[name].__len__() == 1):\r\n\t self.graphs[name].set_range('yrange', (quantity-0.5, quantity+0.5))\r\n else:\r\n self.graphs[name].set_range('yrange', (miny-0.001, maxy+0.001))\r\n self.graphs[name].plot(self.xyData[name])\r\n if ((self.pause != 0) and (step % self.pause == 0)):\r\n \t print \"PRESS <RETURN> TO CONTINUE\"\r\n raw_input()\r\n else:\r\n if (step == 0):\r\n self.figures[name] = self.mplFigCount\r\n self.mplFigCount = self.mplFigCount + 1\r\n figure(self.figures[name], (6,4))\r\n ion()\r\n xlabel('Step')\r\n ylabel(name)\r\n self.xData[name].append(step)\r\n self.yData[name].append(quantity)\r\n plot(self.xData[name], self.yData[name])\r\n #show()\r\n if ((self.pause != 0) and (step % self.pause == 0)):\r\n \t print \"PRESS <RETURN> TO CONTINUE\"\r\n \t text = sys.stdin.read()\r\n sys.stdin = open(\"/dev/tty\")\r\n raw_input()", "def add_line(self, text: str):\r\n\r\n # create a new label\r\n new_line = QLabel()\r\n new_line.setText(text)\r\n new_line.setFont(self.master_font)\r\n\r\n new_line.setContentsMargins(0, 5, 0, 5)\r\n new_line.setFixedHeight(28)\r\n new_line.adjustSize()\r\n\r\n # add the label to the layout and set alignment\r\n self.line_list.addWidget(new_line)\r\n self.line_list.setAlignment(Qt.AlignTop)\r\n\r\n # add the label to the list\r\n self.list_labels.append(new_line)", "def investment_line(self):\n inv, marks = self._get_marks()\n fig = plt.figure(figsize=(4, 2), dpi=200)\n fig.patch.set_facecolor('#ececec')\n ax = fig.add_subplot(111)\n investmentValues = inv['Invested']\n #investmentValues = pd.Series([0], index=[investmentValues.index[0]-timedelta(1)]).append(investmentValues)\n ax.plot(investmentValues, lw=1.2, color=\"blue\", label='Invested', marker=\"o\", markersize=3, markerfacecolor=\"grey\")\n ax.set_xlabel('Time')\n ax.set_ylabel('Investments (€)')\n ax.set_title('Investment Amount (€) - Daily')\n ax.xaxis.set_major_locator(dates.MonthLocator())\n ax.xaxis.set_major_formatter(dates.DateFormatter('%b-%Y'))\n for x, y, mark in zip(marks.index, marks['Invested'], marks['Marks']):\n a = ax.get_ylim()\n if x == marks.index[0]:\n ax.annotate(str(mark) + \" €\", xy=(x + timedelta(abs((self.data.index[0] - self.data.index[-1]).days) / 80), y + (a[1]-a[0])/35), fontsize=5)\n else:\n ax.annotate(str(mark) + \" €\", xy=(x + timedelta(abs((self.data.index[0] - self.data.index[-1]).days) / 50), y - (a[1]-a[0])/35), fontsize=5)\n ax.grid(True)\n fig.autofmt_xdate()\n ax.legend()\n return fig, ax", "def generatePlot(data):\n addendum = \"\"\n destination = \"D:\\\\Research\\\\scripts\\\\Results\\\\FullSet1\\\\$FilteredPlots\\\\take 4\\\\\"\n if len(data.detections.smallIncrease) != 0:\n addendum = \"small increases\\\\\"\n if len(data.detections.smallDecrease) != 0:\n addendum = \"small decreases\\\\\"\n if len(data.detections.largeIncrease) != 0:\n addendum = \"large increases\\\\\"\n if len(data.detections.largeDecrease) != 0:\n addendum = \"large decreases\\\\\"\n if addendum == \"\":\n addendum = \"no decreases\\\\\"\n \n plt.figure(1)\n plt.subplot(211)\n #print np.min(data.magdata), np.max(data.magdata)\n axes = plt.gca()\n axes.set_title(\"Year: '{year}, Day: {day}\".format(year=data.calendarDay[:2], day=data.calendarDay[3:] ))\n axes.set_ylim([np.min(data.magdata)-1.2,np.max(data.magdata)+0.25])\n axes.set_ylabel(r'$\\mathbf{B}$ (nT)' )\n\n #plot formatting\n formats = dates.DateFormatter('%H:%M:%S')\n axes.xaxis.set_major_locator(dates.MinuteLocator())\n axes.xaxis.set_major_formatter(formats)\n \n br, = pp.plot(dates.date2num(data.timestamps),[row[0] for row in data.magdata],label='$B_r$')\n bt, = pp.plot(dates.date2num(data.timestamps),[row[1] for row in data.magdata],label='$B_t$')\n bn, = pp.plot(dates.date2num(data.timestamps),[row[2] for row in data.magdata],label='$B_n$')\n b0, = pp.plot(dates.date2num(data.timestamps),[row[3] for row in data.magdata],label='$B_0$')\n print len(data.detections.rotationBoundary)\n if len(data.detections.rotationBoundary) == 1:\n rotation, = pp.plot([dates.date2num(data.detections.rotationBoundary), dates.date2num(data.detections.rotationBoundary)], [np.min(data.magdata)-1,np.max(data.magdata)+0.25], linestyle='--', color = 'm', alpha = 0.4, label='$RB$')\n else:\n for index, value in enumerate(data.detections.rotationBoundary):\n rotation, = pp.plot([dates.date2num(value), dates.date2num(value)], [np.min(data.magdata)-1,np.max(data.magdata)+0.25], linestyle='--', color = 'm', alpha = 0.4, label='$RB$')\n if len(data.detections.rotationBoundary) != 0:\n pp.legend(handles=[br,bt,bn,b0,rotation], loc='lower left', ncol=4, fancybox=True, framealpha=0.5)\n else:\n pp.legend(handles=[br,bt,bn,b0], loc='lower left', ncol=4, fancybox=True, framealpha=0.5)\n\n start, end = axes.get_xlim()\n axes.xaxis.set_ticks(np.arange(start, end, (end-start)/5))\n \n \n\n plt.subplot(212)\n axes2 = plt.gca()\n #plot formatting\n formats = dates.DateFormatter('%H:%M:%S')\n axes2.xaxis.set_major_locator(dates.MinuteLocator())\n axes2.xaxis.set_major_formatter(formats)\n axes2.set_ylabel(r'$\\theta$ (deg)' )\n rotations, = pp.plot(dates.date2num(data.detections.rotationTimeTags),data.detections.rotations)\n #pp.legend(handles=[rotations], loc='lower left', ncol=4, fancybox=True, framealpha=0.5)\n \n\n outplotname = 'Plot ' + str(len(os.listdir(destination+addendum)) + 1)+ ' ' + data.timestamps[0].strftime('%y-%j-%H%M%S') + '.pdf'\n completename = os.path.join(destination+addendum,outplotname)\n plt.savefig(completename, bboxinches='tight')\n plt.clf()\n\n outplotname = 'Plot ' + str(len(os.listdir(destination+'rawdata\\\\'+addendum)) + 1)+ ' ' + data.timestamps[0].strftime('%y-%j-%H%M%S') + ' rawdata.csv'\n completename1 = os.path.join(destination+'rawdata\\\\'+addendum,outplotname)\n generateDataFile(data.rawdata,completename1)\n\n print \"Done generating plot...\"", "def plot1D(x, y, title=\"Title\", xlab=\"x-axis\", ylab=\"y-axis\"):\n plt.plot(x, y, linewidth=2)\n plt.title(title)\n plt.xlabel(xlab)\n plt.ylabel(ylab)", "def refresh_mpl_line(self, line=None):\n if line:\n self.set_label(self.label,line=line)\n self.set_color(self.color,line=line)\n self.set_style(self.style,line=line)\n self.set_marker(self.marker,line=line)\n self.set_markersize(self.markersize,line=line)\n self.set_linewidth(self.linewidth,line=line)", "def gdraw_line1(df=data):\n gr = df.groupby(['PUBorough', 'weekday'])\\\n .agg(trip_counts=('VendorID', 'count'))\\\n .reset_index(drop=False)\n return px.line(gr, x='weekday', y='trip_counts', color='PUBorough')\\\n .update_layout(\n template='plotly_dark',\n plot_bgcolor='rgba(0, 0, 0, 0)',\n paper_bgcolor='rgba(0, 0, 0, 0)',\n )", "def plt_connecting_lines():\n\n for i in range(0, Molecule.connection_count):\n tmp1 = Molecule.right_endpt[Molecule.left_connection[i] - 1]\n tmp2 = Molecule.left_endpt[Molecule.right_connection[i] - 1]\n tmp3 = Molecule.energy[Molecule.left_connection[i] - 1]\n tmp4 = Molecule.energy[Molecule.right_connection[i] - 1]\n\n plt.plot([tmp1, tmp2], [tmp3, tmp4], color=PlotParameter.connection_line_color,\n lw=PlotParameter.connection_line_width, linestyle='--')\n\n return None", "def plot(self):\n pass", "def plot_line_graph(target_offenses, counts, year_list, filename):\n\t#this is to demonstrate line graphs but the data is categorical so you should actually be using bar graphs\n\tfig, ax = plt.subplots()\n\tcolors = [\"blue\",\"red\",\"orange\",\"green\",\"yellow\",\"purple\"]\n\tfor index,offense in enumerate(target_offenses):\n\t\tplt.plot(year_list, counts[index], color=colors[index], marker= 'o', label=offense)\n\tax.get_xaxis().get_major_formatter().set_useOffset(False)\t\n\tplt.xlabel('Year')\n\tplt.ylabel('Number of offenses')\n\tplt.legend()\n\tplt.savefig(filename,format=\"png\")\n\tplt.show()", "def test_make_plot_custom(self):\n print(sys._getframe().f_code.co_name)\n try:\n x = np.arange(0,6)*300000\n y = np.arange(0,6)\n pp.make_plot(x,y,plot_type='c',plot_title='test',ylabel='test',xlabel='test',xticks=[0,2,4,6],yticks=[0,2,4,6])\n except Exception as e:\n raise\n plt.close('all')", "def my_simple_line(master, name, r, c, rsp, csp, px, py) -> object:\n line = tk.Label(master=master, text=name, anchor='w')\n line.grid(row=r, column=c, rowspan=rsp, columnspan=csp, padx=px, pady=py)\n return line", "def plothusly(ax, x, y, *, xtitle='', ytitle='',\n datalabel='', title='', linestyle = '-',\n marker = ''):\n\n ax.set_xlabel(xtitle)\n ax.set_ylabel(ytitle)\n ax.set_title(title)\n out = ax.plot(x, y, zorder=1, label=datalabel, linestyle = linestyle,\n marker = marker)\n return out", "def plot_one_axes(self, fig_num: int, title: str, y_label: str, raw: np.ndarray, smoothed: np.ndarray,\n legend_entries: Sequence[str]) -> matplotlib.figure.Figure:\n fig = plt.figure(fig_num)\n ax = fig.subplots(1, 1)\n raw_lines = kine_graph_init(ax, raw, y_label, self.frame_nums, [{'ls': ':', 'lw': 2}] * 3)\n ax.set_prop_cycle(None)\n smoothed_lines = kine_graph_add(ax, smoothed, self.frame_nums, [{'ls': '-'}] * 3)\n plt.tight_layout()\n fig.suptitle(title, x=0.7)\n legend_text = ('Raw (' + legend_entries[0] + ')', 'Smoothed (' + legend_entries[1] + ')',\n 'Smoothed (' + legend_entries[2] + ')')\n fig.legend((raw_lines[0], smoothed_lines[1], smoothed_lines[2]), legend_text, ncol=3, handlelength=0.75,\n handletextpad=0.25, columnspacing=0.5, loc='lower left')\n make_interactive()\n return fig", "def create_figure(self) -> None:\n plt.ion()\n self.fig = plt.figure(1)\n self.axis = self.fig.add_subplot(111, xlim=(0, 1), ylim=(0, 1))\n self.axis.grid(True)\n plt.xticks(np.linspace(0, 1, self._param[\"n_v\"] + 1))\n plt.yticks(np.linspace(0, 1, self._param[\"n_v\"] + 1))\n a_plt, = self.axis.plot([], [], 'bx', markersize=5)\n l_plt, = self.axis.plot([], [], 'r.', markersize=15)\n self.plots = [a_plt, l_plt]", "def replot(self,ax):\n self.XP_Plotter.replot(ax)\n # theoretical lines\n self.lines_theory[0].set_xdata(self.xx)\n self.lines_theory[1].set_xdata(self.xx)\n self.lines_theory[2].set_xdata(self.xx_itpl)\n for line in self.lines_theory: \n ax.draw_artist(line)", "def simple_plot(self):\n for i in np.arange(len(self.e2)):\n self.ax.plot(self.e2[i], 'o', label=self.labels[i])", "def _draw_line(plot, hori, vert, color, text):\n plot.plot(hori, vert, '-o'+color)\n plot.text(hori[-1]-3, vert[-1]+2, text, color=color)", "def plot_insertsize():", "def line_plot(x, y=None):\n mpl_fig = plt.figure()\n if y is None:\n plt.plot(x)\n else:\n plt.plot(x, y)\n return get_div_from_data(mpl_fig)", "def advanced_line():\n\n # Make dataset specifying arguments\n dataset_a = DataSet(sine,line_style='-',line_width=1.5,marker_style='o',marker_size='4')\n\n # Make dataset changing options using setters\n dataset_b = DataSet(cosine)\n dataset_b.set_line(style='--',width=1.5)\n dataset_b.set_colour(colour='royalblue')\n\n # Make plot object and adjust properties using setters\n plot = Plot()\n plot.set_text(latex=True,label=12)\n plot.add_dataset(dataset_a)\n plot.add_dataset(dataset_b)\n plot.set_axes(xlim=(0,8),ylim=(-1.1,1.1),xlabel=r'$x$',ylabel=r'$f\\left(x\\right)$',xticks=(1.0,0.2),yticks=(0.2,0.05))\n\n # Plot graph and display\n plot.plot()\n plot.save(name='./figures/2d_advanced_line',fmt='png')\n plot.display()", "def plot_kine_var(self, fig_num: int, title: str, y_labels: Sequence[str], prev_filled: np.ndarray,\n smoothed: np.ndarray, filled: np.ndarray, sfs: np.ndarray) -> matplotlib.figure.Figure:\n fig = plt.figure(fig_num)\n ax = fig.subplots(3, 1)\n prev_filled_lines = marker_graph_init(ax, prev_filled, '', self.frame_nums, color='red')\n smoothed_lines = marker_graph_add(ax, smoothed, self.frame_nums, color='blue')\n smoothed_filled_lines = marker_graph_add(ax, filled, self.frame_nums, ls=':', lw=2, color='green')\n sfs_lines = marker_graph_add(ax, sfs, self.frame_nums, color='green')\n for idx, sub_ax in enumerate(ax):\n plot_utils.update_ylabel(sub_ax, y_labels[idx], font_size=10)\n sub_ax.axvline(self.vicon_endpts[0])\n sub_ax.axvline(self.vicon_endpts[1])\n sub_ax.set_xlim(left=1)\n plt.tight_layout()\n fig.suptitle(title)\n fig.legend((prev_filled_lines[0], smoothed_lines[0], smoothed_filled_lines[0], sfs_lines[0]),\n ('Prev Filled', 'Smoothed', 'Smoothed/Filled', 'SFS'),\n ncol=4, handlelength=0.75, handletextpad=0.25, columnspacing=0.5, loc='lower left')\n make_interactive()\n return fig", "def plot(self):\n\t\tself.plotOfHeatingCurrent().plot()", "def single_plot(x_data, y_data, title, x_label, y_label):\n plt.figure(1, (18, 8)) # something, plot size\n plt.subplot(111)\n plt.plot(x_data, y_data)\n plt.title(title)\n plt.xlabel(x_label, fontsize=12)\n plt.ylabel(y_label, fontsize=12)\n # plt.legend(['Train', 'Test'], loc='upper left')\n plt.show()", "def _plot(self, step, rewards, losses):\n plt.figure(figsize=(20, 5))\n plt.subplot(131)\n plt.title('Total Episode Reward')\n plt.plot(rewards)\n plt.subplot(132)\n plt.title('MSE Loss')\n plt.plot(losses)\n plt.show()", "def plot(self): \n\t\txandy = sep_xy(self.start, self.end)\n\t\tplt.plot(xandy[0], xandy[1], 'k-', lw=1, color='green')", "def plot_single_activity(\n series, name, path_out,\n title_fs=TITLE_FS,\n axis_fs=AXIS_FS,\n tick_fs=TICK_FS,\n legend_fs=LEGEND_FS,\n resolution=RESOLUTION,\n args=ARGS,\n):\n fig, ax = plt.subplots(figsize=resolution, dpi=100)\n series.plot(alpha=0.3, color=\"blue\", style=\"-\", ax=ax)\n\n # series.resample('h').mean().plot(\n # label=\"mean\", style='-', color=\"black\", linewidth=2, ax=ax)\n\n h_median = series.resample('h').median()\n h_median.plot(\n label=\"median\", style='-', color=\"red\", linewidth=2, ax=ax)\n\n # lc = multiline(xs, ys, yint, cmap='bwr', lw=2)\n #\n # axcb = fig.colorbar(lc)\n # axcb.set_label('Y-intercept')\n # ax.set_title('Line Collection with mapped colors')\n\n # if args.legend:\n # axes[0].legend( # ncol=1, # borderaxespad=0.,\n # borderaxespad=0.,\n # loc=\"upper right\", # bbox_to_anchor=(1.1, 1.0),\n # fontsize=leg_fs,\n # fancybox=True, framealpha=0.5)\n # # plt.legend(ncol=2, borderaxespad=0.)\n ax.legend()\n\n # ffn = ioh.safename(path_out / f\"{name}.png\", \"file\")\n ffn = path_out / f\"{name.lower()}.png\"\n plot_path = ioh.safesavefig(ffn)\n\n logging.debug(f\"Figure exported to {plot_path}\")\n\n return plot_path, h_median", "def draw_lists_pyplot(y_array, line_weight=3, learnig_rate=1):\n y = y_array\n plt.plot(y, lw=line_weight, label='cost(a={:})'.format(learnig_rate))\n plt.legend()\n\n plt.title(\"Gradient Descent Optimizing Method\\nminimize cost function\")\n plt.xlabel('time-itoration')\n plt.ylabel('cost-function')\n plt.xlim(0,)\n plt.ylim(0,)\n\n plt.grid(b=None, which='major', axis='both')\n plt.show()", "def liveplot(x, y, xlim, ylim, title):\n plt.plot(x,y,'b.')\n plt.xlim(xlim)\n plt.ylim(ylim)\n plt.xlabel('North-South Axis')\n plt.ylabel('East-West Axis')\n plt.title(title)\n plt.show()", "def graph1():\r\n sheet = workbook.sheet_by_index(0)\r\n data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)]\r\n\r\n for i in range(2, sheet.nrows):\r\n list_data[0].append((data[i][0], round((data[i][3]/data[i][1])*100, 2)))\r\n list_data[1].append((data[i][0], round((data[i][4]/data[i][2])*100, 2)))\r\n list_data[2].append((data[i][0], round(((data[i][3] + data[i][4])/(data[i][1] + data[i][2]))*100, 2)))\r\n\r\n line_graph = pygal.XY()\r\n line_graph.title = 'อัตราการดื่มเครื่องดื่มแอลกอฮอล์รวม และแยกตามเพศ ระหว่างปี 2544 ถึง 2557'\r\n line_graph.x_labels = (2544, 2546, 2548, 2550, 2552, 2554, 2556, 2558)\r\n for i in range(3):\r\n line_graph.add(data_name[i], list_data[i])\r\n line_graph.render_to_file('1Alcohol consumption rate by genders between 2001 and 2014.svg')", "def create_chart(conf, entries):\r\n serie_index = 0\r\n for serie in conf['series']:\r\n data = []\r\n for entry in entries:\r\n if entry is not None:\r\n data.append(entry.datatolist(str(serie['db'])))\r\n conf['series'][serie_index]['data'] = data\r\n serie_index += 1\r\n \r\n \"\"\" Add PlotBands \"\"\" \r\n plotBands = []\r\n last_entry = len(entries)-1\r\n n = 1\r\n while n < last_entry and\\\r\n entries[n].phase is not None and\\\r\n entries[n] is not None and\\\r\n entries[n].next().phase is not None:\r\n begin = entries[n].dt\r\n phase = entries[n].phase\r\n n += 1\r\n while entries[n] is not None and\\\r\n entries[n].phase is not None and\\\r\n entries[n].phase == phase and\\\r\n n < last_entry:\r\n n += 1\r\n end = entries[n].dt\r\n plotBand = {\r\n 'color': PhaseColor[phase],\r\n 'from': datetime_to_timestamp(begin),\r\n 'to': datetime_to_timestamp(end)\r\n }\r\n plotBands.append(plotBand)\r\n conf['xAxis']['plotBands'] = plotBands\r\n \r\n \"\"\" Add Labels \"\"\" \r\n condition_flag_allumage = '((prec.phase is not None) and (prec.phase is not PHASE_ALLUMAGE))'\r\n condition_next_is_not_maintien = '((next.phase is not None) and (next.phase is not PHASE_MAINTIEN))'\r\n labels = json.loads(json.dumps(ChartLabel)) #make a copy of original object\r\n labels['name'] = 'Labels'\r\n for entry in entries:\r\n if entry is not None and entry.phase is not None:\r\n #Label Allumage \r\n if entry.event is not None:\r\n data = {\r\n \"x\": datetime_to_timestamp(entry.dt),\r\n \"title\": 'Allumage'\r\n }\r\n labels['data'].append(data)\r\n \"\"\"\r\n # Label Combustion \r\n if entry.phase == PHASE_COMBUSTION and\\\r\n entry.prec() is not None and\\\r\n entry.prec().phase is not PHASE_COMBUSTION and\\\r\n entry.all_next_verify_condition(5, condition_next_is_not_maintien):\r\n data = {\r\n \"x\": datetime_to_timestamp(entry.dt),\r\n \"title\": 'Combustion'\r\n }\r\n labels['data'].append(data)\r\n \"\"\"\r\n conf['series'].append(labels)\r\n\r\n \"\"\" Add Subtitle (plotbands legend) \"\"\"\r\n #conf[\"subtitle\"] = ChartLegend\r\n\r\n \"\"\" Add Title (date begin date end) \"\"\"\r\n if len(entries) > 3:\r\n begin = pretty_date(entries[0].dt)\r\n end = pretty_date(entries[len(entries)-1].dt)\r\n #conf[\"title\"][\"text\"] = 'Monitoring Chaudière du {0} au {1}'.format(begin, end)\r\n conf[\"title\"][\"text\"] = 'Monitoring Chaudière'\r\n conf[\"subtitle\"][\"text\"] = ' du {0} au {1}'.format(begin, end)\r\n\r\n else:\r\n conf[\"title\"][\"text\"] = 'Monitoring Chaudière'\r\n\r\n \"\"\" Return new conf \"\"\"\r\n return conf", "def draw_lists_pyplot(y_array, line_weight=3, learnig_rate=1):\n y = y_array\n plt.plot(y, lw=line_weight, label='cost(a={:})'.format(learnig_rate))\n plt.legend()\n\n plt.title(\"Gradient Descent Optimizing Method\\nminimize cost function\")\n plt.xlabel('time-itoration')\n plt.ylabel('cost-function')\n\n plt.xlim(0,)\n plt.ylim(0,)\n\n plt.grid(b=None, which='major', axis='both')\n plt.show()", "def plot(self): \n\t\txandy = sep_xy(self.start, self.end)\n\t\tplt.plot(xandy[0], xandy[1], 'k-', lw=1, color='blue')", "def plot_single(log, remove_redundant_entries, select_string):\n\tselected_sources = _transform_select_string(select_string,log)\n\tplot_data , lines, dates, _ = log.give_plot_data(remove_redundant_entries=remove_redundant_entries, sources=selected_sources)\n\tfig, ax = plt.subplots(figsize=(11,6))\n\tfig.autofmt_xdate()\n\tl, = plt.plot(dates,lines,picker=10, color='red', marker='.', linestyle='-', linewidth=0.5, ms=5, mec='blue', label=log.name )\n\tmyFmt = DateFormatter(\"%Y %d.%b %H:%M:%S\")\n\tax.xaxis.set_major_formatter(myFmt)\n\tplt.legend(loc='upper left')\n\tannot = ax.annotate(\"\", xy=(0,0), xytext=(0.01,0.01) ,textcoords='figure fraction', bbox=dict(boxstyle=\"round\", fc=\"cyan\"), arrowprops=dict(arrowstyle=\"->\"))\n\tannot.set_visible(False)\n\tax.set_xlabel('timestamps in UTC')\n\n\tif remove_redundant_entries == 1:\n\t\tax.set_ylabel('number of sequential entry')\n\t\tplt.title('Analysis of the file ' + log.name +'\\n' + 'where all entries having the same timestamp are removed')\n\t\tplt.subplots_adjust(left=0.1, bottom=0.23, right=0.95, top=0.90)\n\telse:\n\t\tplt.title('Analysis of the file ' + log.name)\n\t\tax.set_ylabel('sequential id')\n\t\tplt.subplots_adjust(left=0.1, bottom=0.23, right=0.95, top=0.95)\n\n\tdef update_annot(ind):\n\t\tx,y = l.get_data()\n\t\tannot.xy = (x[ind[\"ind\"][0]], y[ind[\"ind\"][0]])\n\t\tif remove_redundant_entries == 1:\n\t\t\ttext = plot_data[y[ind[\"ind\"][0]]-1]\n\t\telse:\n\t\t\ttemp = [x for x in plot_data if x.id == y[ind[\"ind\"][0]]]\n\t\t\ttext = temp[0]\n\t\tannot.set_text(text)\n\t\tannot.get_bbox_patch().set_alpha(0.4)\n\n\tdef hover(event):\n\t\tvis = annot.get_visible()\n\t\tif event.inaxes == ax:\n\t\t\tcont, ind = l.contains(event)\n\t\t\tif cont:\n\t\t\t\tupdate_annot(ind)\n\t\t\t\tannot.set_visible(True)\n\t\t\t\tfig.canvas.draw_idle()\n\t\t\telse:\n\t\t\t\tif vis:\n\t\t\t\t\tannot.set_visible(False)\n\t\t\t\t\tfig.canvas.draw_idle()\n\n\tfig.canvas.mpl_connect(\"motion_notify_event\", hover)\n\tfig.canvas.mpl_connect('key_press_event', _quit_figure)", "def plot_figure(param1, param2):\n return 0", "def figure4():\n\n plot_settings = {'y_limits': [-80, -50],\n 'x_limits': None,\n 'y_ticks': [-80, -70, -60, -50],\n 'locator_size': 5,\n 'y_label': 'Voltage (mV)',\n 'x_ticks': [],\n 'scale_size': 20,\n 'x_label': \"\",\n 'scale_loc': 4,\n 'figure_name': 'figure_4',\n 'legend': ['control', 'apamin'],\n 'legend_size': 8,\n 'y_on': True}\n line_styles = ['-', 'dotted']\n\n plt.figure(figsize=(5, 3), dpi=96)\n\n plt.subplot(2, 1, 1) # Generate figure 1 (top)\n for ix, g_sk_bar in enumerate([0.3, 0]):\n t, y = solver(100, g_sk_bar=g_sk_bar)\n plt.plot(t, y[:, 0], c='k', linestyle=line_styles[ix])\n alter_figure(plot_settings) # Alter figure for publication\n\n plt.subplot(2, 1, 2)\n t1 = 1200\n t, y = solver(t1, t_start=50, duration=t1, i_bias_on=0.33, g_sk_bar=0.03)\n plt.plot(t, y[:, 0], 'k-')\n\n plot_settings['y_limits'] = [-100, 30]\n plot_settings['x_limits'] = [0, t1]\n plot_settings['y_ticks'] = [-80, -60, -40, -20, 0, 20]\n plot_settings['locator_size'] = 10\n plot_settings['scale_size'] = 100\n plot_settings['legend'] = None\n alter_figure(plot_settings, close=True) # Alter plot for publication", "def makeplot(x, ys, labels, xlabel, ylabel, plainlines = False, figure = None,\\\r\n filename = None, sigmas = None, logy = False, logx = False):\r\n \r\n #initialise a pyplot figure if needed\r\n if figure is None:\r\n f = plt.figure()\r\n #add axis\r\n a = f.add_subplot(111)\r\n else:\r\n a = f.axes[0]\r\n \r\n #styles for plotted data\r\n styles = ['rx-','yx-','gx-','mx-','rx-']\r\n formats = ['rx','yx','gx','mx','rx']\r\n \r\n #plain line styles\r\n if plainlines:\r\n styles = ['k-','r-','g-','y-','m-']\r\n \r\n #plot . . .\r\n for i in range(len(ys)):\r\n a.plot(x, ys[i], styles[i], label = labels[i])\r\n if sigmas is not None:\r\n for i in range(len(ys)):\r\n a.errorbar(x, ys[i],yerr = sigmas[i], fmt = formats[i], elinewidth = 1,\\\r\n ecolor = 'black', capsize = 2) \r\n if logx:\r\n a.set_xscale('log')\r\n if logy:\r\n a.set_yscale('log')\r\n \r\n #set labels\r\n a.set_xlabel(xlabel)\r\n a.set_ylabel(ylabel)\r\n \r\n #add legend\r\n a.legend(loc = 'best')\r\n \r\n #save\r\n if filename is not None:\r\n f.savefig(filename+\".svg\")\r\n \r\n return f", "def visualize_time_series(fig_ax, data, inp_color, missing_data, lag_color, first_date,\n x_label=\"Number of Days\", y_label=\"Log of Aluminium Price\", title=\"Prices over time\"):\n fig, ax = fig_ax\n ((x_train_raw, y_train_raw), y_pred_list) = data\n\n missing_x, missing_y = missing_data\n is_missing = len(missing_x) != 0\n\n first_date = datetime.strptime(first_date, '%Y-%m-%d')\n\n convert_date = lambda x: [\n np.datetime64((first_date + timedelta(days=d)).strftime('%Y-%m-%d'))\n for d in x\n ]\n convert_price = lambda x: x[\"Output\"].to_list()\n\n x_train = convert_date(x_train_raw[\"Date\"].to_list())\n y_train = convert_price(y_train_raw)\n \n cut_point = x_train[-1]\n ax.plot(x_train, y_train, color=color[inp_color])\n\n for i, y_pred in enumerate(y_pred_list):\n data, plot_name, color_code, is_bridge = y_pred\n mean_pred, x_test_raw = data[\"mean\"], data[\"x\"]\n x_test = convert_date(x_test_raw)\n\n if i == 0 and is_missing:\n missing_x = convert_date(missing_x)\n ax.axvline(x_test[0], color=color[lag_color], linestyle='--', linewidth=0.5, dashes=(5, 0), alpha=0.2)\n ax.plot([missing_x[-1], x_test[0]], [missing_y[-1], mean_pred[0]], color[lag_color], linestyle=\"dashed\")\n ax.axvspan(cut_point, x_test[0], color=color[lag_color], alpha=0.1)\n\n plot_bound(ax, data, x_test, color[color_code], plot_name)\n\n if is_bridge and (not is_missing): \n ax.plot([x_train[-1], x_test[0]], [y_train[-1], mean_pred[0]], color[color_code], linewidth=1.5)\n\n if is_missing:\n ax.plot(missing_x, missing_y, color=color[lag_color], linestyle=\"dashed\")\n ax.plot([x_train[-1], missing_x[0]], [y_train[-1], missing_y[0]], color[lag_color], linestyle=\"dashed\")\n ax.axvline(cut_point, color=color[lag_color], linestyle='--', linewidth=0.5, dashes=(5, 0), alpha=0.2)\n else:\n ax.axvline(cut_point, color=color[\"k\"], linestyle='--')\n\n ax.xaxis.set_minor_locator(AutoMinorLocator())\n ax.legend()\n\n # ax.set_xlabel(x_label)\n ax.set_ylabel(y_label)\n ax.set_title(title)\n\n # ax.set_xlim(left=cut_point-np.timedelta64(1, 'm'))\n plot_axis_date(ax, x_train + missing_x + x_test)\n ax.grid()\n return fig, ax", "def value_line(self):\n marks = self._get_marks(False)\n marks['Val'] = self.data['Value']\n fig = plt.figure(figsize=(4,2), dpi=200)\n fig.patch.set_facecolor('#ececec')\n ax = fig.add_subplot(111)\n ax.plot(self.data['Value'], alpha=0.8, lw=1.2, color=\"green\", label='Value')\n ax.scatter([x for x in marks[marks['Marks']>0].index], marks[marks['Marks']>0]['Val'], marker='^', s=20, c=\"b\", label=\"Buy\")\n ax.scatter([x for x in marks[marks['Marks']<0].index], marks[marks['Marks']<0]['Val'], marker='v', s=20, c=\"r\", label=\"Sell\")\n ax.set_xlabel('Time')\n ax.set_ylabel('Portfolio\\'s Value (€)')\n ax.set_title('Portfolio\\'s Value (€) - Daily')\n ax.xaxis.set_major_locator(dates.MonthLocator())\n ax.xaxis.set_major_formatter(dates.DateFormatter('%b-%Y'))\n for x, y, mark in zip(marks.index, marks['Val'], marks['Marks']):\n a = ax.get_ylim()\n if x == marks.index[0]:\n ax.annotate(str(mark) + \" €\", xy=(x + timedelta(abs((self.data.index[0] - self.data.index[-1]).days) / 80), y + (a[1]-a[0])/35), fontsize=5)\n else:\n if mark > 0:\n ax.annotate(str(mark) + \" €\", xy=(x + timedelta(abs((self.data.index[0] - self.data.index[-1]).days) / 60), y - (a[1]-a[0])/35), fontsize=5)\n else:\n ax.annotate(str(mark) + \" €\", xy=(x - timedelta(abs((self.data.index[0] - self.data.index[-1]).days) / 15), y + (a[1]-a[0])/35), fontsize=5)\n ax.grid(True)\n fig.autofmt_xdate()\n ax.legend()\n return fig, ax", "def plot(title, ylabel,data,toprange,span):\n fig = plt.figure()\n ax = fig.add_subplot(111)\n x = [1,2,3]\n nodata = True\n for key in data.keys():\n if len(data[key]) > 0:\n nodata = False\n ax.plot(x[:len(data[key])],data[key],marker='o',label=key)\n if nodata:\n print data\n print \"none\"\n plt.close()\n return\n plt.xlabel(\"Block Number\")\n plt.ylabel(\"Performance\")\n plt.xticks(x)\n plt.legend()\n plt.axis([0,len(x)+1,0,0.1+toprange])\n plt.title(title)\n if not os.path.isdir(\"analysis\"):\n os.mkdir(\"analysis\")\n if not os.path.isdir(\"analysis/\"+span):\n os.mkdir(\"analysis/\"+span)\n plt.savefig(\"analysis/\"+span+\"/\"+title.replace(\" \",\"\").lower()+\".png\")\n plt.close()", "def peek(self, **kwargs):\n\n plt.figure()\n axes = plt.gca()\n data_lab=self.meta['OBS-FREQ'][0:2] + ' ' + self.meta['OBS-FREQ'][2:5]\n axes.plot(self.data.index,self.data,label=data_lab)\n axes.set_yscale(\"log\")\n axes.set_ylim(1e-4,1)\n axes.set_title('Nobeyama Radioheliograph')\n axes.set_xlabel('Start time: ' + self.data.index[0].strftime(TIME_FORMAT))\n axes.set_ylabel('Correlation')\n axes.legend()\n plt.show()", "def plot(self): \n\t\txandy = sep_xy(self.start, self.end)\n\t\tplt.plot(xandy[0], xandy[1], 'k-', lw=1, color='red')", "def make_1d_plot(data, fig_name, Label, Title, x_label, y_label, customDPI):\n plt.figure(1, figsize=(10.0, 6.0))\n plt.subplots_adjust(left=None, bottom=0, right=None, top=0.86, wspace=None, hspace=None)\n plt.plot(data, label=Label)\n plt.title(Title, fontsize=10, fontweight='bold', style='italic', y=1.025)\n plt.legend(loc = 'upper right', fontsize=10)\n plt.ylabel(x_label, fontsize=10, fontweight='bold')\n plt.xlabel(y_label, fontsize=10, fontweight='bold')\n plt.yticks(fontsize=8, fontweight='bold')\n plt.xticks(fontsize=8, fontweight='bold')\n pylab.savefig(fig_name, bbox_inches='tight', dpi=customDPI)\n plt.close('all')", "def show1d(self, data):\n self.__plot1d.clear()\n self.__plot1d.addCurve(legend=\"data\", x=range(len(data)), y=data)\n self.setCurrentIndex(self.__index1d)", "def show_custom_graph(self):\n pass", "def _draw_label(label, label_x, label_y):\n pass" ]
[ "0.6918902", "0.6715607", "0.66300875", "0.6610081", "0.6605", "0.65108675", "0.6509052", "0.6483655", "0.64417446", "0.6436465", "0.64165217", "0.64094067", "0.6303501", "0.6279875", "0.62422055", "0.6156551", "0.61285985", "0.6121611", "0.6119316", "0.61107874", "0.61096317", "0.60980076", "0.60548234", "0.6049465", "0.6039621", "0.60328615", "0.60264695", "0.5994367", "0.5988487", "0.5984225", "0.59696203", "0.59618115", "0.5961229", "0.5959394", "0.5958197", "0.5955759", "0.59155816", "0.5911805", "0.5901553", "0.58983797", "0.58844584", "0.5879867", "0.5875956", "0.58659506", "0.5865657", "0.58650845", "0.5855256", "0.5852698", "0.5850971", "0.58296555", "0.58138096", "0.5810863", "0.5801594", "0.5800015", "0.5793815", "0.57904536", "0.57812846", "0.57760024", "0.5774108", "0.5764696", "0.5762665", "0.5746514", "0.5732656", "0.573174", "0.5724806", "0.5724684", "0.572373", "0.5721437", "0.57154894", "0.570813", "0.5706475", "0.5705442", "0.56921834", "0.5681568", "0.56805235", "0.5666163", "0.5652224", "0.564851", "0.5646977", "0.5643195", "0.56429964", "0.56416047", "0.56400937", "0.56380874", "0.56258947", "0.5623015", "0.56186664", "0.5611011", "0.56054723", "0.5605238", "0.5602307", "0.5601776", "0.5597938", "0.5585491", "0.55824614", "0.5580812", "0.55800253", "0.5578112", "0.5578025", "0.55765796" ]
0.5919027
36
Given an input (instance of the BenchInput tuple), constructs and validates a disjunctive ChaumPedersen proof, returning the time (in seconds) to do each operation.
def chaum_pedersen_bench(bi: BenchInput) -> Tuple[float, float]: (keypair, r, s) = bi ciphertext = get_optional(elgamal_encrypt(0, r, keypair.public_key)) start1 = timer() proof = make_disjunctive_chaum_pedersen_zero( ciphertext, r, keypair.public_key, ONE_MOD_Q, s ) end1 = timer() valid = proof.is_valid(ciphertext, keypair.public_key, ONE_MOD_Q) end2 = timer() if not valid: raise Exception("Wasn't expecting an invalid proof during a benchmark!") return end1 - start1, end2 - end1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def part2(input):\n ps = PlanetSystem(input)\n c = ps.total_cycle_time()\n return c", "def part_2():\n input_ = parse_input() + list(range(10, 1_000_001))\n cups = turn_input_into_cups(input_)\n cups = solve(cups, first_cup=cups[input_[0]], turns=10_000_000)\n\n return cups[1].next.number * cups[1].next.next.number", "def dpTime():\n start_time = time.time()\n subjects = loadSubjects(SUBJECT_FILENAME)\n maxWork = 50\n answer = dpAdvisor(subjects, maxWork)\n end_time = time.time()\n printSubjects(answer)\n print 'Time taken: ', end_time - start_time\n return None", "def process():\r\n\t### INPUT ####\r\n\tpq = eat(str)\r\n\tp, q = map(int, pq.split('/'))\r\n\t\r\n\t### COMPUT ####\r\n\td = gcd(p, q)\r\n\tp, q = p//d, q//d\r\n\teprint(bin(q), bin(p))\r\n\tif len(bin(q).strip('0')) != 2:\r\n\t\treturn IMPOSSIBLE\r\n\t#length of p\r\n\tbinp = bin(p)\r\n\tbinq = bin(q)\r\n\treturn len(binq) - len(binp)\r\n\t\r\n\t### OUTPUT ####\r\n\treturn solve()", "def batchWallTime(cls, time, parsedCmd, numCpus):\n numTargets = 0\n for refList in parsedCmd.id.refList:\n numTargets += len(refList)\n return time*numTargets/float(numCpus)", "def part_2(puzzle_input: Tuple[Number] = p1) -> Number:\n for (noun, verb) in permutations(range(len(p1)), 2):\n # Create a fresh copy for each run\n program = list(p1)\n restore_program(memory_updates={1: noun, 2: verb}, memory=program)\n c = Computer(program)\n c.run_program()\n if c.read(0) == 19_690_720:\n return 100 * noun + verb\n raise ExecutionError(\"Could not satisfy requirement\")", "def _calcExecTime(self, migTask, dPrime):\n #print \"ae\", self\n # Let's start making U = 0.9999 (which probably causes deadline misses).\n # If we force U = 1, we won't be able to use La.\n if self.util() >= 0.9999:\n self._lastCost = 0.0\n return 0.0\n cPrime = (0.9999 - self.util())*migTask.period()\n\n # Temporarily add the slice\n tempSlice = WmSlice(-1, cPrime, dPrime, migTask)\n self._addSlice(tempSlice)\n\n L = self._L()\n min_d = self._minDeadline()\n\n #print \"L\", L\n #print self\n #print \"Calculating cost. dPrime\", dPrime\n\n # QPA\n t = self._lastDeadline(L)\n h = self._h(t)\n #print t\n while round(t,12) >= round(min_d,12): # We are checking demand only for the migratory task\n # We round the checking to 12 decimal places. Otherwise, it could make the algorithm repeat undefinedly, in\n # case new calculated cost is not 100% precise. We do the same when applying floor(). The other comparisons don't\n # need this correction, since they are not so critical.\n if round(h,12) > round(t,12):\n #print \"HIGH. t %.15f\" % t, \"h(t) %.15f\" % h, \". C was\", cPrime\n cPrime = (t - self._h_oth(t, tempSlice)) / floor(round((t + migTask.period() - dPrime)/migTask.period(), 12))\n #print \"New C is\", cPrime\n tempSlice._wcet = cPrime # Update slice cost to fix demand\n\n if cPrime <= 0.0: # Stop if the cost gets negative\n self._removeLastSlice()\n self._lastCost = 0.0\n return 0.0\n\n #print \"OK. t\", t, \"h(t)\",h, \"new t\",\n t = self._lastDeadline(t)\n #print t\n h = self._h(t)\n #print \"OK. t\", t, \"h(t)\",h\n\n #print self\n #print \"Final cost\", cPrime\n #if not self._qpa():\n # print self.tasks()\n #assert self._qpa()\n\n self._removeLastSlice()\n self._lastCost = cPrime\n return cPrime", "def prove_CM() -> Proof:\n # Optional Task 6.7f", "def calculateWaitingTime(self, inputs):\n CollisionCounter.CollisionCounter.getInstance().waitingTimeCalculated(self.time)\n timeUntilDepature = self.getAtt('departure_time', inputs) - self.time\n remainingLoadingTime = self.calculateLoadingTime(inputs)\n # calculates first maximum possible waiting time\n sampleTime = int((timeUntilDepature - remainingLoadingTime) / self.participants)\n\n if sampleTime >= 1:\n # result is big enough for a standard treatment\n self.waitingTime = MyRandom.RandomNumber.getInstance().getRandomNumber(sampleTime + 1)\n elif sampleTime < 1:\n # reslut is too small, special treatment necessary\n upperLimit = (10 * (1 - (math.exp(sampleTime - 1)))) + 1\n self.waitingTime = MyRandom.RandomNumber.getInstance().getRandomNumber(max((min(upperLimit,\n timeUntilDepature)) + 1, 1))\n # decides whether charging is allowed during waiting time\n if not self.stayedConnected:\n self.stayConnected = True\n self.stayedConnected = True\n else:\n self.stayConnected = False\n self.stayedConnected = False", "def part_a(puzzle_input):\r\n ec = ExperimentalCoprocessor(puzzle_input)\r\n try:\r\n while True:\r\n ec.step()\r\n except Exception:\r\n pass\r\n return str(ec.mul_counter)", "def timeThem(*args, **kwargs):\n\n funcs = []\n funcArgs = list(args[:])\n \n #filter arguments\n for arg in args:\n if callable(arg):\n funcs.append(arg)\n funcArgs.remove(arg)\n \n key = \"inNumber\"\n inNumber=10\n if key in kwargs:\n inNumber = kwargs[key]\n del kwargs[key]\n\n durations = []\n refTime = 0.0\n\n for func in funcs:\n retVal = func(*funcArgs, **kwargs)\n duration = timeit(partial(func, *funcArgs, **kwargs), number=inNumber)\n \n comparison = \"\"\n if refTime <= 0.0:\n refTime = duration\n else:\n comparison = \" ( *{:.2f})\".format(duration / refTime)\n \n print(\"{: <16} : {:.4f}\".format(func.__name__, duration) + comparison + \" returns '{}' ({})\".format(retVal, type(retVal).__name__))\n durations.append(duration)\n \n return durations", "def enter_data_for_time_calc():\n print(\"Pace & Distance -> Time\")\n print(\"=\" * 50)\n\n pace = input(\"Pace[min/km]: \")\n distance = float(input(\"Distance[km]: \"))\n\n calc_time(pace, distance)", "def benchmark(func, inputs):\n t0 = time.clock()\n results = [func(x) for x in inputs]\n t1 = time.clock()\n average_time = (t1 - t0) / len(inputs)\n return average_time, results", "def run_timings():\n\n running_times = []\n\n while recorded_time := input(f\"Enter your 10k time: \"):\n if not recorded_time:\n break\n running_times.append(float(recorded_time))\n average_pace = sum(running_times) / len(running_times)\n return average_pace", "def test_countdown_performance():\n profiler = cProfile.Profile()\n profiler.enable()\n countdown(\"0 0 0 0 0 0 1000\")\n profiler.disable()\n stats = profiler.getstats()\n tot_time = stats[0].totaltime\n assert tot_time < 3, \"Wow, your computer is really slow. Or is it my code?\"", "async def test(dut):\n\n dut._log.info(\"Running test...\")\n cocotb.fork(Clock(dut.clk, 1, units=\"ns\").start())\n dut.rst <= 0\n await Timer(0.2, units = \"ns\")\n dut.rst <= 1\n await Timer(1, units = \"ns\")\n dut.rst <= 0\n m = []\n map = {0: [0,0], 1:[0,1], 2:[1,1]}\n dut.en <= 1\n for i in range(350):\n m.append(randint(0,2))\n m.append(randint(0,2))\n num = map.get( m[2*i+1] ) + map.get( m[2*i] )\n dut.m_in <= int.from_bytes(bit_handle.arr_to_str(num), \"big\")\n await Timer(1, units = \"ns\")\n m.append(0)\n dut.m_in <= 0\n await Timer(1, units = \"ns\")\n dut.en <= 0\n await Timer(10, units = \"ns\")\n expect, m0_str = hrss(m)\n try:\n if dut.m1.value != expect:\n fail = 1\n report.write(\" + m1 = %X \\n + but i expect it = %X\\n\" %( int(dut.m1.value), expect ) )\n else:\n report.write(\"It is true that: + m1 = %X\\n\" %( int(dut.m1.value) ) )\n except:\n fail = 1\n report.write(\"Out is unidentified, but i expect it = %X\\n\" %( expect ) )\n\n m0 = binit(dut.m0.value, 9113)\n try:\n if m0 != m0_str:\n fail = 1\n report.write(\" + m0 = %s \\n + but i expect it = %s\\n\" %( m0, m0_str ) )\n else:\n report.write(\"It is true that: + m0 = %s\\n\" %( m0 ) )\n except:\n fail = 1\n report.write(\"Out is unidentified, but i expect it = %s\\n\" %( m0_str ) )\n\n dut._log.info(\"Running test...done\")", "async def my_test_dff(dut):\n\n #### Create and start clock with concurrent coroutine operation\n # Clock with 50% duty cycle and a period of 10ps\n cocotb.fork(Clock(dut.clk, 10, \"ps\").start())\n\n # Syncronize with the clock\n await RisingEdge(dut.clk)\n\n #### Generate transactions\n # In this case, all possible combinations in every consecutive order \n # 2 inputs (D and rstN) = 4 possible binary combinations (00, 01, 10, 11) => 2^4 = 16 possible combinations in every consecutive order\n # Declare the number of inputs\n num_of_inputs = 2\n # Create a list of permutations for those two inputs (list: [(0, 1), (1, 0)])\n transactions = list(permutations(range(num_of_inputs), 2))\n # Permutations do not account for repeat value combinations; so add those in to get (list: [(0, 1), (1, 0), [0, 0], [1, 1]]) the 4 possible binary combinations\n for i in range(num_of_inputs):\n transactions.append([i, i])\n # Create a list of permutations on top of the list of permutations to account for the \"in every consecutive order\" part\n transactions = list(permutations(transactions, 2))\n # Again, we must add in the missed repeat value combinations; there were 4 missed this time instead of the 2 above\n for i in range(num_of_inputs):\n transactions.append(([i, i], [i, i]))\n if i == 1:\n transactions.append(([i, 0], [i, 0]))\n transactions.append(([0, i], [0, i]))\n\n # Run the simulation with the transactions generated\n for i in range(len(transactions)):\n\n # Assign the stimulus to the DUT's ports\n dut.D <= transactions[i][0][0]\n dut.rstN <= transactions[i][0][1]\n \n # Simulate some small time (less than half the period) for the random integers to reach the DUT's input ports\n await Timer(1, \"ps\")\n #print(f\"The D input: {dut.D.value}\")\n #print(f\"The rstN input: {dut.rstN.value}\")\n \n # Detect the falling edge of clock \n await FallingEdge(dut.clk)\n\n # Simulate some small time (less than half the period) for the output to update accordingly after the falling edge (aka if reset is low)\n await Timer(1, \"ps\")\n #print(f\"The output after the falling edge: {dut.Q.value}\")\n\n # Detect the rising edge of clock\n await RisingEdge(dut.clk)\n\n # Simulate some small time (less than half the period) for the output to update accordingly after the rising edge (aka if \"D\" is different than \"Q\")\n await Timer(1, \"ps\")\n #print(f\"The output after the rising edge: {dut.Q.value}\")\n\n # Assert an error message and stop simulation if the output does not match the model's output\n assert dut.Q.value == my_dff_model(transactions[i][0][0], transactions[i][0][1]), f\"Failure: Transaction - {transactions[i][0]} failed!\"\n\n #### There is a double simulation per \"for\" loop because of how the transaction was built \n # Assign the stimulus to the DUT's ports\n dut.D <= transactions[i][1][0]\n dut.rstN <= transactions[i][1][1]\n\n #Simulate some small time (less than half the period) for the random integers to reach the DUT's input ports\n await Timer(1, \"ps\")\n #print(f\"The D input: {dut.D.value}\")\n #print(f\"The rstN input: {dut.rstN.value}\")\n\n # Detect the falling edge of clock\n await FallingEdge(dut.clk)\n\n # Simulate some small time (less than half the period) for the output to update accordingly after the falling edge (aka if reset is low)\n await Timer(1, \"ps\")\n #print(f\"The output after the falling edge: {dut.Q.value}\")\n\n # Detect the rising edge of clock\n await RisingEdge(dut.clk)\n\n # Simulate some small time (less than half the period) for the output to update accordingly after the rising edge (aka if \"D\" is different than \"Q\")\n await Timer(1, \"ps\")\n #print(f\"The output after the rising edge: {dut.Q.value}\")\n\n\n assert dut.Q.value == my_dff_model(transactions[i][1][0], transactions[i][1][1]), f\"Failure: Transaction - {transactions[i][1]} failed!\"", "def verify_implementation():\n import csv\n import time\n\n times = dict()\n max_run_times = 200\n for n in range(1, max_run_times):\n sentence = ''\n for _ in range(n):\n sentence += random.choice('01')\n sentence += 'ε'\n start = time.time()\n parse(SmallGrammar, sentence)\n end = time.time()\n times[n] = end - start\n print(n)\n with open('cyk_run_times.csv', 'w') as fout:\n writer = csv.writer(fout)\n for n in range(1, max_run_times):\n writer.writerow([n, times[n]])", "def solution(sequences):\n sorted_log = sort_log(sequences)\n guards = log(sorted_log)\n suspect = sleepy_guard(guards)\n table = guards[suspect].table()\n minute = find_minute(table)\n print(f\"sleepy guard {suspect} slept for {guards[suspect].has_slept()}\")\n print(f\"most at minute {minute}\")\n print(f\"answer: {suspect * minute}\")\n return suspect * minute", "def time(diners):\n if str(diners) in cache:\n return cache[str(diners)]\n if diners[0] <= 3:\n r = diners[0]\n cache[str(diners)] = r\n return r\n else:\n mintime = diners[0]\n for i in range(1, diners[0]//2+1):\n mintime = min(mintime, 1+time(sorted(diners[1:] + [diners[0]-i] + [i], key = lambda x: -x)))\n cache[str(diners)] = mintime\n return mintime\n # return min(diners[0], 1+time(sorted(diners[1:] + [diners[0]//2] + [diners[0]//2 + diners[0]%2], key = lambda x: -x)))\n # return min(\n # 1+time([max(0, x-1) for x in diners]),\n # 1+time(sorted(diners[1:] + [diners[0]//2] + [diners[0]//2 + diners[0]%2], key = lambda x: -x))\n # )", "def evaluate(self, time) -> float:\n ...", "def _get_timings_perinput(funcs, input_=None):\n\n global _TIMEOUT\n global _NUM_REPEATS\n\n timings_l = []\n\n from IPython import get_ipython\n if get_ipython() is None:\n iter_funcs = trange(len(funcs), desc='Loop functions', leave=False)\n else:\n iter_funcs = range(len(funcs))\n\n for j in iter_funcs:\n f = funcs[j]\n ii = 1\n process_next = True\n while process_next:\n for jj in 1, 2, 5:\n iter_rep = ii * jj\n if input_ is None:\n t = min(timeit.repeat(functools.partial(f), repeat=_NUM_REPEATS, number=iter_rep))\n else:\n t = min(timeit.repeat(functools.partial(f, *input_), repeat=_NUM_REPEATS, number=iter_rep))\n if t > _TIMEOUT:\n process_next = False\n break\n ii *= 10\n timings_l.append(t / iter_rep)\n return timings_l", "def test_find_parallel_duration():\n pt2_example = {\n \"C\": [],\n \"A\": [\"C\"],\n \"F\": [\"C\"],\n \"B\": [\"A\"],\n \"D\": [\"A\"],\n \"E\": [\"B\", \"D\", \"F\"],\n }\n assert find_parallel_duration(pt2_example, 2, 0) == 15", "def part1(input):\n sys = AmpSystem(input)\n return sys.max_thruster_signal([i for i in range(5)])", "def compare_cow_transport_algorithms():\n cow_set = load_cows(\"ps1_cow_data.txt\")\n \n def get_run_time(func):\n start = time.time()\n print(func(cow_set))\n end = time.time()\n return end-start\n \n greedy_time = get_run_time(greedy_cow_transport)\n brute_force_time = get_run_time(brute_force_cow_transport)\n \n print(\"---\"*20)\n print(\"greedy runtime: \",greedy_time)\n print(\"brute force runtime: \", brute_force_time)", "def bruteForceTime():\n start_time = time.time()\n subjects = loadSubjects(SUBJECT_FILENAME)\n maxWork = 8\n answer = bruteForceAdvisor(subjects, maxWork)\n end_time = time.time()\n printSubjects(answer)\n print 'Time taken: ', end_time - start_time\n return None", "def psych(output_str, input_str_1, input_1, input_str_2, input_2, pressure=29.92):\n\n dry_bulb = 0\n wet_bulb = 0\n dewpoint = 0\n rel_hum = 0\n hum_rat = 0\n spec_vol = 0\n enthalpy = 0\n pressure *= 0.491154\n\n if input_str_1 in ('db', 'DB', 'wb', 'WB', 'dp', 'DP', 'rh', 'RH', 'hr', 'HR', 'sv', 'SV', 'en', 'EN') and \\\n input_str_2 in ('db', 'DB', 'wb', 'WB', 'dp', 'DP', 'rh', 'RH', 'hr', 'HR', 'sv', 'SV', 'en', 'EN'):\n\n if input_str_1 in ('db', 'DB'):\n dry_bulb = input_1\n elif input_str_2 in ('db', 'DB'):\n dry_bulb = input_2\n\n if input_str_1 in ('wb', 'WB'):\n wet_bulb = input_1\n elif input_str_2 in ('wb', 'WB'):\n wet_bulb = input_2\n\n if input_str_1 in ('dp', 'DP'):\n dewpoint = input_1\n elif input_str_2 in ('dp', 'DP'):\n dewpoint = input_2\n\n if input_str_1 in ('rh', 'RH'):\n rel_hum = input_1 / 100\n elif input_str_2 in ('rh', 'RH'):\n rel_hum = input_2 / 100\n\n if input_str_1 in ('hr', 'HR'):\n hum_rat = input_1\n elif input_str_2 in ('hr', 'HR'):\n hum_rat = input_2\n\n if input_str_1 in ('sv', 'SV'):\n spec_vol = input_1\n elif input_str_2 in ('sv', 'SV'):\n spec_vol = input_2\n\n if input_str_1 in ('en', 'EN'):\n enthalpy = input_1\n elif input_str_2 in ('en', 'EN'):\n enthalpy = input_2\n else:\n return ValueError('Invalid input types')\n\n if hum_rat < 0:\n return ValueError('Humidity ratio less than 0')\n if rel_hum < 0 or rel_hum > 1:\n return ValueError('Relative humidity less than 0 or greater than 100')\n\n ############################################################################################\n\n if input_str_1 in ('db', 'DB') or input_str_2 in ('db', 'DB'):\n if output_str in ('db', 'DB'):\n return dry_bulb\n\n db_r = dry_bulb + 459.67\n\n if input_str_1 in ('wb', 'WB') or input_str_2 in ('wb', 'WB'):\n if output_str in ('wb', 'WB'):\n return wet_bulb\n\n db_r = dry_bulb + 459.67\n wb_r = wet_bulb + 459.67\n\n pres_wb_sat = sat_pres(wb_r)\n\n hr_wb_sat = 0.62198 * pres_wb_sat / (pressure - pres_wb_sat)\n\n hum_rat = (hr_wb_sat * (1093 - .556 * wet_bulb) - 0.24 * (dry_bulb - wet_bulb)) / \\\n (1093 + .444 * dry_bulb - wet_bulb)\n if output_str in ('hr', 'HR'):\n return hum_rat\n\n pres_db_sat = sat_pres(db_r)\n\n hr_db_sat = 0.62198 * pres_db_sat / (pressure - pres_db_sat)\n\n mu = hum_rat / hr_db_sat\n\n rel_hum = mu / (1 - (1 - mu) * (pres_db_sat / pressure))\n if rel_hum < 0 or rel_hum > 1:\n return -1 # ValueError('Calculated relative humidity less than 0')\n if output_str in ('rh', 'RH'):\n return rel_hum * 100\n\n if output_str in ('sv', 'SV'):\n spec_vol = 1545.32 * db_r * (1 + 1.6078 * hum_rat) / (28.9645 * pressure * 144)\n return spec_vol\n\n if output_str in ('en', 'EN'):\n enthalpy = 0.24 * dry_bulb + hum_rat * (1061 + 0.444 * dry_bulb)\n return enthalpy\n\n if output_str in ('dp', 'DP'):\n pres_vapor = (pressure * hum_rat) / (0.62198 + hum_rat)\n\n dewpoint = calc_dewpoint(pres_vapor)\n return dewpoint\n else:\n return ValueError('Unknown output request')\n elif input_str_1 in ('dp', 'DP') or input_str_2 in ('dp', 'DP'):\n if output_str in ('dp', 'DP'):\n return dewpoint\n\n dp_r = dewpoint + 459.67\n\n pres_vapor = sat_pres(dp_r)\n\n hum_rat = 0.62198 * pres_vapor / (pressure - pres_vapor)\n if hum_rat < 0:\n return -1 # ValueError('Calculated humidity ratio below 0')\n if output_str in ('hr', 'HR'):\n return hum_rat\n\n pres_db_sat = sat_pres(db_r)\n\n hr_db_sat = 0.62198 * pres_db_sat / (pressure - pres_db_sat)\n\n mu = hum_rat / hr_db_sat\n\n rel_hum = mu / (1 - (1 - mu) * (pres_db_sat / pressure))\n if rel_hum < 0 or rel_hum > 1:\n return -1\n if output_str in ('rh', 'RH'):\n return rel_hum * 100\n\n if output_str in ('sv', 'SV'):\n spec_vol = 1545.32 * db_r * (1 + 1.6078 * hum_rat) / (28.9645 * pressure * 144)\n return spec_vol\n\n if output_str in ('en', 'EN'):\n enthalpy = 0.24 * dry_bulb + hum_rat * (1061 + 0.444 * dry_bulb)\n return enthalpy\n\n if output_str in ('wb', 'WB'):\n wet_bulb = calc_wetbulb(dry_bulb, hum_rat, pressure)\n return wet_bulb\n else:\n return ValueError('Unknown output request')\n elif input_str_1 in ('rh', 'RH') or input_str_2 in ('rh', 'RH'):\n if output_str in ('rh', 'RH'):\n return rel_hum * 100\n\n pres_db_sat = sat_pres(db_r)\n\n pres_vapor = pres_db_sat * rel_hum\n\n hum_rat = 0.62198 * pres_vapor / (pressure - pres_vapor)\n if hum_rat < 0:\n return -1\n if output_str in ('hr', 'HR'):\n return hum_rat\n\n # hr_db_sat = 0.62198 * pres_db_sat / (pressure - pres_db_sat)\n\n # mu = hum_rat / hr_db_sat\n\n if output_str in ('sv', 'SV'):\n spec_vol = 1545.32 * db_r * (1 + 1.6078 * hum_rat) / (28.9645 * pressure * 144)\n return spec_vol\n\n if output_str in ('en', 'EN'):\n enthalpy = 0.24 * dry_bulb + hum_rat * (1061 + 0.444 * dry_bulb)\n return enthalpy\n\n if output_str in ('dp', 'DP'):\n dewpoint = calc_dewpoint(pres_vapor)\n return dewpoint\n\n if output_str in ('wb', 'WB'):\n wet_bulb = calc_wetbulb(dry_bulb, hum_rat, pressure)\n return wet_bulb\n else:\n return ValueError('Unknown output request')\n elif input_str_1 in ('hr', 'HR') or input_str_2 in ('hr', 'HR'):\n if output_str in ('hr', 'HR'):\n return hum_rat\n\n pres_db_sat = sat_pres(db_r)\n\n pres_vapor = hum_rat * pressure / (hum_rat + 0.62198)\n\n rel_hum = pres_vapor / pres_db_sat\n if rel_hum < 0 or rel_hum > 1:\n return -1\n if output_str in ('rh', 'RH'):\n return rel_hum * 100\n\n # hr_db_sat = 0.62198 * pres_db_sat / (pressure - pres_db_sat)\n\n # mu = hum_rat / hr_db_sat\n\n if output_str in ('sv', 'SV'):\n spec_vol = 1545.32 * db_r * (1 + 1.6078 * hum_rat) / (28.9645 * pressure * 144)\n return spec_vol\n\n if output_str in ('en', 'EN'):\n enthalpy = 0.24 * dry_bulb + hum_rat * (1061 + 0.444 * dry_bulb)\n return enthalpy\n\n if output_str in ('dp', 'DP'):\n dewpoint = calc_dewpoint(pres_vapor)\n return dewpoint\n\n if output_str in ('wb', 'WB'):\n wet_bulb = calc_wetbulb(dry_bulb, hum_rat, pressure)\n return wet_bulb\n else:\n return ValueError('Unknown output request')\n elif input_str_1 in ('sv', 'SV') or input_str_2 in ('sv', 'SV'):\n if output_str in ('sv', 'SV'):\n return spec_vol\n\n # pres_db_sat = sat_pres(db_r)\n\n hum_rat = (spec_vol * 28.9645 * (pressure * 144) / (1545.32 * db_r) - 1) / 1.6078\n if hum_rat < 0:\n return -1\n if output_str in ('hr', 'HR'):\n return hum_rat\n\n pres_vapor = hum_rat * pressure / (hum_rat + 0.62198)\n\n rel_hum = pres_vapor / pressure\n if rel_hum < 0 or rel_hum > 1:\n return -1\n if output_str in ('rh', 'RH'):\n return rel_hum * 100\n\n if output_str in ('en', 'EN'):\n enthalpy = 0.24 * dry_bulb + hum_rat * (1061 + 0.444 * dry_bulb)\n return enthalpy\n\n if output_str in ('dp', 'DP'):\n dewpoint = calc_dewpoint(pres_vapor)\n return dewpoint\n\n if output_str in ('wb', 'WB'):\n wet_bulb = calc_wetbulb(dry_bulb, hum_rat, pressure)\n return wet_bulb\n else:\n return ValueError('Unknown output request')\n elif input_str_1 in ('en', 'EN') or input_str_2 in ('en', 'EN'):\n if output_str in ('en', 'EN'):\n return enthalpy\n\n pres_db_sat = sat_pres(dry_bulb)\n\n hum_rat = (enthalpy - 0.24 * dry_bulb) / (1061 + 0.444 * dry_bulb)\n if hum_rat < 0:\n return -1\n if output_str in ('hr', 'HR'):\n return hum_rat\n\n pres_vapor = hum_rat * pressure / (hum_rat + 0.62198)\n\n rel_hum = pres_vapor / pres_db_sat\n if rel_hum < 0 or rel_hum > 1:\n return -1\n if output_str in ('rh', 'RH'):\n return rel_hum * 100\n\n # hr_db_sat = 0.62198 * pres_db_sat / (pressure - pres_db_sat)\n\n # mu = hum_rat / hr_db_sat\n\n if output_str in ('sv', 'SV'):\n spec_vol = 1545.32 * db_r * (1 + 1.6078 * hum_rat) / (28.9645 * pressure * 144)\n return spec_vol\n\n if output_str in ('dp', 'DP'):\n dewpoint = calc_dewpoint(pres_vapor)\n return dewpoint\n\n if output_str in ('wb', 'WB'):\n wet_bulb = calc_wetbulb(dry_bulb, hum_rat, pressure)\n return wet_bulb\n else:\n return ValueError('Unknown output request')\n elif input_str_1 in ('wb', 'WB') or input_str_2 in ('wb', 'WB'):\n if output_str in ('wb', 'WB'):\n return wet_bulb\n\n wb_r = wet_bulb + 459.67\n\n if input_str_1 in ('dp', 'DP') or input_str_2 in ('dp', 'DP'):\n if output_str in ('dp', 'DP'):\n return dewpoint\n\n dp_r = dewpoint + 459.67\n\n pres_vapor = sat_pres(dp_r)\n\n hum_rat = 0.62198 * pres_vapor / (pressure - pres_vapor)\n if hum_rat < 0:\n return -1\n if output_str in ('hr', 'HR'):\n return hum_rat\n\n pres_wb_sat = sat_pres(wb_r)\n\n hr_wb_sat = 0.62198 * pres_wb_sat / (pressure - pres_wb_sat)\n\n dry_bulb = ((1093 - 0.556 * wet_bulb) * hr_wb_sat + 0.24 * wet_bulb - (1093 - wet_bulb) * hum_rat) / \\\n (0.444 * hum_rat + 0.24)\n if output_str in ('db', 'DB'):\n return dry_bulb\n\n db_r = dry_bulb + 459.67\n\n pres_db_sat = sat_pres(dry_bulb)\n\n hr_db_sat = 0.62198 * pres_db_sat / (pressure - pres_db_sat)\n mu = hum_rat / hr_db_sat\n\n rel_hum = mu / (1 - (1 - mu) * (pres_db_sat / pressure))\n if rel_hum < 0 or rel_hum > 1:\n return -1\n if output_str in ('rh', 'RH'):\n return rel_hum\n\n if output_str in ('sv', 'SV'):\n spec_vol = 1545.32 * db_r * (1 + 1.6078 * hum_rat) / (28.9645 * pressure * 144)\n return spec_vol\n\n if output_str in ('en', 'EN'):\n enthalpy = 0.24 * dry_bulb + hum_rat * (1061 + 0.444 * dry_bulb)\n return enthalpy\n else:\n return ValueError('Unknown output request')\n elif input_str_1 in ('rh', 'RH') or input_str_2 in ('rh', 'RH'):\n if output_str in ('rh', 'RH'):\n return rel_hum * 100\n elif input_str_1 in ('hr', 'HR') or input_str_2 in ('hr', 'HR'):\n if output_str in ('hr', 'HR'):\n return hum_rat\n elif input_str_1 in ('sv', 'SV') or input_str_2 in ('sv', 'SV'):\n if output_str in ('sv', 'SV'):\n return spec_vol\n elif input_str_1 in ('en', 'EN') or input_str_2 in ('en', 'EN'):\n if output_str in ('en', 'EN'):\n return enthalpy\n return -1 # no enthalpy, wet bulb and enthalpy are too closely related to avoid problems\n elif input_str_1 in ('dp', 'DP') or input_str_2 in ('dp', 'DP'):\n if output_str in ('dp', 'DP'):\n return dewpoint\n\n dp_r = dewpoint + 459.67\n\n if input_str_1 in ('rh', 'RH') or input_str_2 in ('rh', 'RH'):\n if output_str in ('rh', 'RH'):\n return rel_hum * 100\n elif input_str_1 in ('sv', 'SV') or input_str_2 in ('sv', 'SV'):\n if output_str in ('sv', 'SV'):\n return spec_vol\n elif input_str_1 in ('en', 'EN') or input_str_2 in ('en', 'EN'):\n if output_str in ('en', 'EN'):\n return enthalpy\n elif input_str_1 in ('hr', 'HR') or input_str_2 in ('hr', 'HR'):\n if output_str in ('hr', 'HR'):\n return hum_rat\n return -1 # no humidity ratio - it is the dew point more or less\n elif input_str_1 in ('hr', 'HR') or input_str_2 in ('hr', 'HR'):\n if output_str in ('hr', 'HR'):\n return hum_rat\n\n if input_str_1 in ('rh', 'RH') or input_str_2 in ('rh', 'RH'):\n if output_str in ('rh', 'RH'):\n return rel_hum * 100\n elif input_str_1 in ('sv', 'SV') or input_str_2 in ('sv', 'SV'):\n if output_str in ('sv', 'SV'):\n return spec_vol\n elif input_str_1 in ('en', 'EN') or input_str_2 in ('en', 'EN'):\n if output_str in ('en', 'EN'):\n return enthalpy\n elif input_str_1 in ('rh', 'RH') or input_str_2 in ('rh', 'RH'):\n if output_str in ('rh', 'RH'):\n return rel_hum * 100\n\n if input_str_1 in ('sv', 'SV') or input_str_2 in ('sv', 'SV'):\n if output_str in ('sv', 'SV'):\n return spec_vol\n elif input_str_1 in ('en', 'EN') or input_str_2 in ('en', 'EN'):\n if output_str in ('en', 'EN'):\n return enthalpy\n elif input_str_1 in ('sv', 'SV') or input_str_2 in ('sv', 'SV'):\n if output_str in ('sv', 'SV'):\n return spec_vol\n\n if input_str_1 in ('en', 'EN') or input_str_2 in ('en', 'EN'):\n if output_str in ('en', 'EN'):\n return enthalpy", "def part_1():\n input_ = parse_input()\n cups = turn_input_into_cups(input_)\n cups = solve(cups, first_cup=cups[input_[0]], turns=100)\n\n answer = []\n current_cup = cups[1].next\n while current_cup != cups[1]:\n answer.append(str(current_cup.number))\n current_cup = current_cup.next\n\n return \"\".join(answer)", "def compute_duty_factor():\n [time,\n ankle_l_trajectory,\n ankle_r_trajectory,\n foot_l_contact,\n foot_r_contact,\n muscle_lh_activations,\n muscle_rh_activations,\n muscle_lh_forces,\n muscle_rh_forces,\n joint_lh_positions,\n joint_rh_positions] = load_data()\n \n print(np.sum(foot_l_contact)/len(foot_l_contact))\n print(np.sum(foot_r_contact)/len(foot_r_contact))\n\n return np.sum(foot_l_contact)/len(foot_l_contact)*0.5 + np.sum(foot_r_contact)/len(foot_r_contact)*0.5", "def run_inference(self, input):\n #TODO(142164990): Add support for io.BytesIO heavily used on Raspberry Pi.\n #TODO(142164990): Add benchmarks for all supported types to catch regressions.\n if isinstance(input, bytes):\n result = self._engine.RunInferenceBytes(input)\n elif _is_valid_ctypes_input(input):\n pointer, size = input\n result = self._engine.RunInferenceRaw(pointer.value, size)\n elif _libgst and isinstance(input, Gst.Buffer):\n with _gst_buffer_map(input) as (pointer, size):\n result = self._engine.RunInferenceRaw(pointer.value, size)\n else:\n result = self._engine.RunInference(input)\n latency = self._engine.get_inference_time()\n return (latency, result)", "def timeit_compare(funcs, inputs, setups='pass', **kwargs):\n number = kwargs.get('number', 100000)\n print_conditions = kwargs.get('print_conditions', False)\n performance = defaultdict(list)\n if isinstance(setups, list): \n # user specifies their own list of setups corresponding to funcs\n pass\n elif setups == 'pass':\n # specify no setups for built-in functions like join\n setups = ['pass' for f in funcs]\n elif setups == 'main': \n # uniformly import all setups from the local environment\n fnames = [f[:f.find(\"(\")] for f in funcs]\n setups = [\"from __main__ import \" + fname for fname in fnames]\n \n # convert the input ranges to a set of conditions\n conditions = get_conditions(inputs)\n if print_conditions: \n print \"conditions: \" + conditions\n \n def timer(func, value, setup):\n return timeit.Timer(func.format(*value), setup=setup)\n\n for i, f in enumerate(funcs):\n print \"testing \" + f + \"...\"\n for value in conditions:\n test = timer(f, value, setups[i])\n result = test.timeit(number=number)\n performance[f].append(list(value) + [result])\n return performance", "def main():\n assert how_many_seconds(2) == 7200\n assert how_many_seconds(10) == 36000\n assert how_many_seconds(24) == 86400\n assert how_many_seconds(36) == 129600\n print('Passed.')", "def solution1(inp):\n inp = get_lines(inp)\n earliest = int(inp[0])\n notes = inp[1].split(',')\n min_bus = None\n for bus in notes:\n if bus == 'x':\n continue\n bus = int(bus)\n wait_time = bus - earliest % bus\n if min_bus == None or wait_time < (min_bus - earliest % min_bus):\n min_bus = bus\n return min_bus * (min_bus - earliest % min_bus)", "def part2(input):\n sys = AmpSystem(input)\n return sys.max_thruster_signal([i for i in range(5, 10)])", "def part1(puzzle_input):\n\n puzzle_input_arr = puzzle_input.strip().split('\\n')\n seconds_to_action = {} # {timestamp: (datetime, action)}\n for line in puzzle_input_arr:\n m = re.match(r'\\[(\\d+)-(\\d+)-(\\d+) (\\d+):(\\d+)\\](.*)', line)\n dt = datetime(1970, int(m.group(2)), int(m.group(3)), int(m.group(4)), int(m.group(5)))\n seconds = dt.timestamp()\n seconds_to_action[seconds] = (dt, m.group(6))\n seconds_sorted = sorted(seconds_to_action.keys())\n guard_to_minutes_slept = {} # {guard ID: number of minutes slept}\n guard_to_minute = {} # {Guard ID: [5, 6, 7, 8, 9...24, 30, 31...54, 24, 25, 26, 27, 28]}\n for second in seconds_sorted:\n dt, action = seconds_to_action[second]\n if \"begins\" in action:\n guard_id = int(re.match(r' Guard #(\\d+)', action).group(1))\n if guard_id not in guard_to_minutes_slept: # I could use a default dictionary for this\n guard_to_minutes_slept[guard_id] = 0\n guard_to_minute[guard_id] = []\n elif \"falls\" in action:\n sleep_time = dt\n elif \"wakes\" in action:\n difference_in_minutes = int((dt.timestamp() - sleep_time.timestamp()) // 60)\n guard_to_minutes_slept[guard_id] += difference_in_minutes\n guard_to_minute[guard_id] += [(sleep_time.minute + i) % 60 for i in range(difference_in_minutes)]\n guard_with_longest_sleep = max(guard_to_minutes_slept, key=guard_to_minutes_slept.get)\n most_common_minute = max(guard_to_minute[guard_with_longest_sleep],\n key=guard_to_minute[guard_with_longest_sleep].count)\n return guard_with_longest_sleep * most_common_minute", "def diffWaysToCompute(self, input):\n ops = {'+': lambda a,b: a+b,\n '-': lambda a, b: a-b,\n '*': lambda a, b: a*b}\n\n def _ways(string):\n res = []\n if not string:\n return res\n\n for i, c in enumerate(string):\n if c in \"+-*\":\n left = _ways(string[0:i])\n right = _ways(string[i + 1:])\n res += [ops[c](l, r) for l, r in itertools.product(left, right)]\n if not res:\n res.append(int(string))\n return res\n\n return _ways(input)", "def cipher_execution(op, input, output, password):\n command = [\n EXEC_NAME,\n op,\n input,\n '-o',\n output,\n '-k',\n password\n ]\n start_time = time.time()\n subprocess.call(command, 1)\n end_time = time.time() - start_time\n print(\"%s took %f seconds\" % (input, end_time))\n return end_time", "def test_task1_with_example_input():\n distance = task1(input_stream())\n assert distance == 25", "def part_2(puzzle_input):\n nr_players, nr_marbles = parse_input(puzzle_input)\n nr_marbles *= 100\n puzzle_string= (f\"{nr_players} players; last marble is worth {nr_marbles}\"\n \" points\")\n return part_1b(puzzle_string)", "def evaluate(inp, coll = False):\n distance = 0\n errors = 0\n waypoints = [[0,0]] #The robot originates in 0,0\n waypoints.extend(inp) #Add the generated waypoints inbetween start and goal\n waypoints.append([goal[0],goal[1]]) #Add the goal waypoint to complete the route\n for i in range(0, len(waypoints) -1):\n distance += sqrt(pow(waypoints[i][0] - waypoints[i+1][0], 2) + pow(waypoints[i][1] - waypoints[i+1][1], 2)) #Calculates the distance between this waypoint ond the next\n \n errors += collisions(waypoints[i][0], waypoints[i][1], \n waypoints[i+1][0], waypoints[i+1][1])\n\n if collisions:\n return distance + errors*total_distance*2, errors\n return distance + errors*total_distance*2", "def computePID(setpoint,\n _input, last_error, prev_time):\n kp = 0.5\n kd = 5\n ki = 0.02\n\n current_time = time.time()\n d_time = current_time - prev_time\n \n error = setpoint - _input\n #print(\"error\", error)\n\n #error_sum += (error * d_time)\n \n d_error = (error - last_error) / d_time\n\n output = kp * error #+ kd * d_error#ki * error_sum + kd * d_error\n last_error = error\n prev_time = current_time\n \n return output, last_error, prev_time", "def strategy_cheap(cookies, cps, time_left, build_info):\n items = build_info.build_items()\n result = None\n cost = float('+inf')\n overall = cookies + time_left * cps\n for item in items:\n temp_cost = build_info.get_cost(item)\n if temp_cost <= overall and cost > temp_cost:\n result = item\n cost = temp_cost\n return result", "def main():\n counter = 0\n inputs = ['','','']\n score_one = 0\n score_two = 0\n\n for line in sys.stdin:\n # Line 1 is the number of cards to expect for each player (1 <= N <= 1000)\n if counter == 0:\n inputs[counter] = line.strip()\n else:\n inputs[counter] = list(line.strip().replace(' ', ''))\n for each in range(int(inputs[0])):\n inputs[counter][each] = inputs[counter][each].replace('A', '13')\n inputs[counter][each] = inputs[counter][each].replace('K', '12')\n inputs[counter][each] = inputs[counter][each].replace('Q', '11')\n inputs[counter][each] = inputs[counter][each].replace('J', '10')\n\n counter += 1\n\n for card in range(int(inputs[0])):\n # if they're the same, do nothing\n if int(inputs[1][card]) == int(inputs[2][card]):\n continue\n\n # if A is greater, plus one:\n if int(inputs[1][card]) > int(inputs[2][card]):\n score_one += 1\n else:\n score_two += 1\n\n if score_one > score_two:\n print \"PLAYER 1 WINS\"\n elif score_two > score_one:\n print \"PLAYER 2 WINS\"\n else:\n print \"TIE\"", "def run(self, desired_result, input, limit):\n cfg = desired_result.configuration.data\n\n opt_seq = ''\n\n for flag in OPT_FLAGS:\n if cfg[flag] == 'on':\n opt_seq += ' {0}'.format(flag)\n\n tmp_dir = \"./tmp\" \n compd_opt_cmd = 'opt -S {0} mcsema/test.proposed.inline.ll -o {1}/test.proposed.opt.ll'.format(\n opt_seq, tmp_dir)\n compd_opt_result = self.call_program(compd_opt_cmd)\n if compd_opt_result['returncode'] != 0:\n print(compd_opt_result)\n assert 0\n\n mcsema_opt_cmd = 'opt -S {0} ../binary/test.mcsema.inline.ll -o {1}/test.mcsema.opt.ll'.format(\n opt_seq, tmp_dir)\n mcsema_opt_result = self.call_program(mcsema_opt_cmd)\n if mcsema_opt_result['returncode'] != 0:\n print(mcsema_opt_result)\n assert 0\n\n matcher = args.matcher\n if(matcher == ''):\n matcher = home + '/Github//validating-binary-decompilation/source/build/bin//matcher'\n\n matcher_run_cmd = '{0} --file1 {1}/test.mcsema.opt.ll:{2} --file2 {1}/test.proposed.opt.ll:{2} --potential-match-accuracy'.format(\n matcher, tmp_dir, args.func)\n\n matcher_run_result = self.call_program(matcher_run_cmd)\n if matcher_run_result['returncode'] != 0:\n print(matcher_run_result)\n assert 0\n\n matcher_stderr = matcher_run_result['stderr']\n z = re.findall(\n r\"^Accuracy:(\\d+\\.[\\deE+-]+)\",\n matcher_stderr,\n re.MULTILINE)\n cost = 1 - float(z[0])\n\n log.debug('[Run] Cost:{0} [{1}]'.format(cost, opt_seq))\n\n # Early exit\n outfile = args.outdir + '/' + 'normalizer_final_config.json'\n if cost == 0:\n log.info(\n \"run: Early Exit: Optimal pass sequence written to {0}: [{1}]\".format(\n outfile, opt_seq))\n with open(outfile, 'a') as fd:\n fd.write('{0}\\n'.format(opt_seq))\n\n return Result(time=cost)", "def estimateCt(y, inp):\n\treturn getK2(inp) * (1 - math.exp(-getLambda(inp) * y / getY90(inp)))", "def test_task2_with_example_input():\n distance = task2(input_stream())\n assert distance == 286", "def time_complexities():\n return \"Best Case: O(n), Average Case: O(n), Worst Case: O(n)\"", "def task6_arithmetic_progression_check(list_input):\n difference = list_input[1] - list_input[0]\n for i in range(2, len(list_input)):\n if difference != list_input[i] - list_input[i - 1]:\n return False\n return True", "def usable_numbers(time):\n curr_val = time\n index = 0\n\n while index+1 < len(unit_key):\n unit_test = converter[unit_key[index+1]]\n if time // unit_test < 1:\n break\n index += 1\n\n return time//converter[unit_key[index]], unit_order[index]", "def run_performance():\n # Create a Struct data instance from config\n inputs = Struct(config)\n inputs.throttle = throttle\n # Get oxidizer properties at the given temperature\n n2o = n2o_properties(inputs.ox.T_tank)\n # Our integration variables are oxidizer mass and liquid oxidizer volume\n Mox = n2o.rho_l*(inputs.ox.liquid_V) + n2o.rho_g*(inputs.ox.tank_V-inputs.ox.liquid_V)\n if inputs.options.output_on:\n print(\"Initial oxidizer mass: {} kg.\".format(Mox))\n\n start = time.perf_counter() # Start timer for integration\n\n time, record = integration(inputs) # Time = time for integration, record = output data\n F_thrust = record.F_thrust\n p_cc = record.p_cc\n p_oxtank = record.p_oxtank\n p_oxpresstank = record.p_oxpresstank\n p_fueltank = record.p_fueltank\n p_fuelpresstank = record.p_fuelpresstank\n p_oxmanifold = record.p_oxmanifold\n T_oxtank = record.T_oxtank\n T_cc = record.T_cc\n area_core = record.area_core\n OF = record.OF_i\n gamma_ex = record.gamma_ex\n m_dot_ox = record.m_dot_ox\n m_dot_fuel = record.m_dot_fuel\n p_crit = record.p_crit\n m_dot_ox_crit = record.m_dot_ox_crit\n M_e = record.M_e\n p_exit = record.p_exit\n p_shock = record.p_shock\n\n time_elapsed = start-time.perf_counter() # Stop the timer and print elapsed time\n if inputs.options.output_on:\n print(\"Time elapsed for this timestep: {} sec.\".format(time_elapsed))", "def part2(puzzle_input):\n\n puzzle_input_arr = puzzle_input.split('\\n')\n seconds_to_action = {} # {timestamp: (datetime, action)\n for line in puzzle_input_arr:\n m = re.match(r'\\[(\\d+)-(\\d+)-(\\d+) (\\d+):(\\d+)\\](.*)', line)\n dt = datetime(1970, int(m.group(2)), int(m.group(3)), int(m.group(4)), int(m.group(5)))\n seconds = dt.timestamp()\n seconds_to_action[seconds] = (dt, m.group(6))\n seconds_sorted = sorted(seconds_to_action.keys())\n guard_to_minutes_slept = {} # {Guard ID: number of minutes slept}\n guard_to_minute = {} # {Guard Id: [5, 6, 7, 8, 9...24, 30, 31...54, 24, 25, 26, 27, 28]}\n minute_to_guard_slept= {} # {minute: [guard IDs]}\n guard_id = 0\n sleep_time = None\n for second in seconds_sorted:\n dt, action = seconds_to_action[second]\n if \"begins\" in action:\n guard_id = int(re.match(r' Guard #(\\d+)', action).group(1))\n if guard_id not in guard_to_minutes_slept:\n guard_to_minutes_slept[guard_id] = 0\n guard_to_minute[guard_id] = []\n elif \"falls\" in action:\n sleep_time = dt\n elif \"wakes\" in action:\n difference_in_minutes = int((dt.timestamp() - sleep_time.timestamp()) // 60)\n guard_to_minutes_slept[guard_id] += difference_in_minutes\n for i in range(difference_in_minutes):\n if (sleep_time.minute + i) % 60 not in minute_to_guard_slept:\n minute_to_guard_slept[(sleep_time.minute + i) % 60] = [guard_id]\n else:\n minute_to_guard_slept[(sleep_time.minute + i) % 60].append(guard_id)\n most_frequent_number_of_occurrences, sleepiest_guard_id = (0, 0)\n sleepiest_minute = 0\n for minute in minute_to_guard_slept:\n c = collections.Counter(minute_to_guard_slept[minute])\n if c.most_common(1)[0][1] > most_frequent_number_of_occurrences:\n sleepiest_guard_id, most_frequent_number_of_occurrences = c.most_common(1)[0]\n sleepiest_minute = minute\n return sleepiest_guard_id * sleepiest_minute", "def test_compare_difference_string_faster(self):\n test_algorithm = 'bubble'\n test_algorithm_time = 2\n test_sorted_time = 4\n result = calculate_compare_time_difference(test_algorithm_time, test_sorted_time, test_algorithm)\n self.assertEqual('bubble was 2 seconds faster.', result)", "def enter_data_for_pace_calc():\n print(\"Distance & Time -> Pace\")\n print(\"=\" * 50)\n\n distance = float(input(\"Distance[km]: \"))\n time = input(\"Time[hh:mm:ss]: \")\n\n print(calc_pace(time, distance))", "def test_binary_hamming_distance_dtype_cpu(self, inputs, dtype):\n preds, target = inputs\n if (preds < 0).any() and dtype == torch.half:\n pytest.xfail(reason=\"torch.sigmoid in metric does not support cpu + half precision\")\n self.run_precision_test_cpu(\n preds=preds,\n target=target,\n metric_module=BinaryHammingDistance,\n metric_functional=binary_hamming_distance,\n metric_args={\"threshold\": THRESHOLD},\n dtype=dtype,\n )", "def calculateLoadingTime(self, inputs):\n neededCharge = BATTERY_CAPACITY * (1 - (self.getAtt('current_soc', inputs) / 100))\n return int((neededCharge / (NORM_VOLTAGE * CHARGE_SPEED)) * 60)", "def __call__(self, input_, dt=None):\n if not self.auto_mode:\n return self._last_output\n\n now = self._current_time()\n if dt is None:\n dt = now - self._last_time if now - self._last_time else 1e-16\n elif dt <= 0:\n raise ValueError('dt has nonpositive value {}. Must be positive.'.format(dt))\n\n if self.sample_time is not None and dt < self.sample_time and self._last_output is not None:\n # only update every sample_time seconds\n return self._last_output\n\n # compute error terms\n error = self.setpoint - input_\n d_input = input_ - (self._last_input if self._last_input is not None else input_)\n\n # compute the proportional term\n if not self.proportional_on_measurement:\n # regular proportional-on-error, simply set the proportional term\n self._proportional = self.Kp * error\n else:\n # add the proportional error on measurement to error_sum\n self._proportional -= self.Kp * d_input\n\n # compute integral and derivative terms\n self._integral += self.Ki * error * dt\n self._integral = self._clamp(self._integral, self.output_limits) # avoid integral windup\n\n self._derivative = -self.Kd * d_input / dt\n\n # compute final output\n # print(\"Proportional: {}, Integral: {}, Derivative: {}\".format(self._proportional,self._integral,self._derivative))\n output = self._proportional + self._integral + self._derivative\n output = self._clamp(output, self.output_limits)\n\n # keep track of state\n self._last_output = output\n self._last_input = input_\n self._last_time = now\n\n return output", "def run_precompiled(self, desired_result, input, limit, compile_result,\n result_id):\n opt_seq = ''\n\n cfg = desired_result.configuration.data\n for flag in OPT_FLAGS:\n if cfg[flag] == 'on':\n opt_seq += ' {0}'.format(flag)\n\n tmp_dir = self.get_tmpdir(result_id)\n\n matcher = args.matcher\n if(matcher == ''):\n matcher = home + '/Github//validating-binary-decompilation/source/build/bin//matcher'\n\n matcher_run_cmd = '{0} --file1 {1}/test.mcsema.opt.ll:{2} --file2 {1}/test.proposed.opt.ll:{2} --potential-match-accuracy'.format(\n matcher, tmp_dir, args.func)\n\n matcher_run_result = self.call_program(matcher_run_cmd)\n if matcher_run_result['returncode'] != 0:\n print(matcher_run_result['stderr'])\n assert 0\n\n matcher_stderr = matcher_run_result['stderr']\n z = re.findall(\n r\"^Accuracy:(\\d+\\.[\\deE+-]+)\",\n matcher_stderr,\n re.MULTILINE)\n cost = 1 - float(z[0])\n\n log.debug('[RunPreC] Cost:{0} [{1}]'.format(cost, opt_seq))\n\n # Early exit\n outfile = args.outdir + '/' + 'normalizer_final_config.json'\n if cost == 0:\n log.info(\n \"run_precompiled: Early Exit: Optimal pass sequence written to {0}: [{1}]\".format(\n outfile, opt_seq))\n\n#shutil.rmtree(\"./tmp\")\n# os.remove(\"opentuner.log\")\n#\n with open(outfile, 'a') as fd:\n fd.write('{0}\\n'.format(opt_seq))\n\n return Result(time=cost)", "def cpu_time(self):", "def advantage(cpu_dt, gpu_dt):\n assert gpu_dt >= 0 and cpu_dt >= 0\n if gpu_dt == 0 and cpu_dt == 0:\n return numpy.nan\n elif gpu_dt == 0:\n return numpy.inf\n else:\n return cpu_dt / gpu_dt", "def advantage(cpu_dt, gpu_dt):\r\n assert gpu_dt >= 0 and cpu_dt >= 0\r\n if gpu_dt == 0 and cpu_dt == 0:\r\n return numpy.nan\r\n elif gpu_dt == 0:\r\n return numpy.inf\r\n else:\r\n return cpu_dt / gpu_dt", "def bench_report(t1, t2):\n print \"\\n\\n Time taken: {0}\".format(t2 - t1)", "def _omori_time(integ, c, p):\n if p == 1:\n return c*(exp(integ) - 1)\n else:\n return (integ*(1 - p) + c**(1 - p))**(1/(1 - p)) - c", "def _check_m_contr_fixed_phase(\n factor1, factor2, normal, inv, shared_dumms\n):\n\n factors = [factor1, factor2]\n indices1 = factor1.indices\n indices2 = factor2.indices\n\n # Free dummies are dummies whose both appearance are on undecided slots.\n normal_dumms = set()\n free_normal_dumms = set()\n inv_dumms = set()\n free_inv_dumms = set()\n for dumm in shared_dumms:\n i1, i2 = [j.m_dumms[dumm] for j in factors]\n m1, m2 = indices1[i1].m, indices2[i2].m\n if m1 == m2:\n dumms = normal_dumms\n frees = free_normal_dumms\n elif m1 == -m2:\n dumms = inv_dumms\n frees = free_inv_dumms\n else:\n assert False\n dumms.add(dumm)\n if not factor1.is_decided(i1) and not factor2.is_decided(i2):\n frees.add(dumm)\n continue\n\n if len(normal_dumms) != len(normal) or len(inv_dumms) != len(inv):\n return None\n\n phase = _UNITY\n for to_proc, dumms, frees in [\n (normal, normal_dumms, free_normal_dumms),\n (inv, inv_dumms, free_inv_dumms)\n ]:\n for i1, i2 in to_proc:\n decided1 = factor1.is_decided(i1)\n decided2 = factor2.is_decided(i2)\n m1 = indices1[i1].m_symb\n m2 = indices2[i2].m_symb\n if m1 == m2 and m1 in dumms:\n # When we found the contraction is already satisfied.\n factor1.decide(i1)\n factor2.decide(i2)\n if m1 in frees:\n frees.remove(m1)\n # Now the contraction must be currently unsatisfied.\n elif decided1 and decided2:\n return None\n elif decided1:\n # Try refill slot 2 to match the m in slot 1.\n if m1 not in dumms:\n return None\n curr_i2 = factor2.m_dumms[m1]\n if factor2.is_decided(curr_i2):\n return None\n phase *= factor2.swap(curr_i2, i2)\n elif decided2:\n # Try refill slot 1 to match the m in slot 2.\n if m2 not in dumms:\n return None\n curr_i1 = factor1.m_dumms[m2]\n if factor1.is_decided(curr_i1):\n return None\n phase *= factor1.swap(curr_i1, i1)\n else:\n # Try to move a free contraction to the two required slots.\n if len(frees) == 0:\n return None\n # Try to get an m already touching.\n if indices1[i1].m_symb in frees:\n new_m = indices1[i1].m_symb\n frees.remove(new_m)\n elif indices2[i2].m_symb in frees:\n new_m = indices2[i2].m_symb\n frees.remove(new_m)\n else:\n new_m = frees.pop()\n curr_i1 = factor1.m_dumms[new_m]\n curr_i2 = factor2.m_dumms[new_m]\n assert not factor1.is_decided(curr_i1)\n assert not factor2.is_decided(curr_i2)\n phase *= factor1.swap(curr_i1, i1)\n phase *= factor2.swap(curr_i2, i2)\n\n # Continue to next contraction.\n continue\n # Continue to inverted contractions.\n continue\n\n return phase", "def character_time(character):\n\tcheck_dot = len(password) + 1\n\t\n\ttest = subprocess.Popen([PROCESS, PASSWORD_FILE, password + character + '&'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n\t\n\ttest.stderr.read(33) # Throw out \"Welcome to the password checker!\"\n\t\n\tstart_time = 0.0\n\tend_time = 0.0\n\t\n\twhile test.stderr.read(1) != '':\n\t\tif check_dot == 1:\n\t\t\tstart_time = time.time()\n\t\t\n\t\telif check_dot == 0:\n\t\t\tend_time = time.time()\n\t\t\tbreak\n\t\t\n\t\tcheck_dot -= 1\n\t\n\treturn end_time - start_time", "def prove_CP() -> Proof:\n # Optional Task 6.7d", "def main():\n string_count = int(input().strip())\n for _ in range(string_count):\n string = input().strip()\n result = calc_min_cost(string)\n print(result)", "def test_benchmark1(capsys):\n student_1 = Student('114007245','Mario Castillo',2018, [\"INST 201\",\"INST 326\"])\n student_2 = Student('117006012', 'Joe Rogan', 2018, [\"MATH 115\",\"PSYC 100\"])\n student_3 = Student(\"117008490\", \"Kanye West\", 2018, [\"MATH 120\",\"STAT 003\"])\n student_4 = Student('118009044', \"Elon Musk\", 2018, [\"PSYC 100\",\"MATH 003\"])\n \n student_1.benchmark_I()\n outerr = capsys.readouterr()\n out = outerr.out\n assert out == ('You have not completed the Benchmark I requirements.\\n'\n 'You have not taken MATH 115 or higher.\\n'\n 'You have not taken PSYC 100.\\n')\n\n student_2.benchmark_I()\n outerr = capsys.readouterr()\n out = outerr.out\n assert out == (f'You have completed all of your Benchmark I courses! '\n f'Congratulations, {student_2.student_name}!\\n')\n\n student_3.benchmark_I()\n outerr = capsys.readouterr()\n out = outerr.out \n assert out == ('You have not completed the Benchmark I requirements.\\n'\n 'You have not taken PSYC 100.\\n')\n\n student_4.benchmark_I()\n outerr = capsys.readouterr()\n out = outerr.out\n assert out == ('You have not completed the Benchmark I requirements.\\n'\n 'You have not taken MATH 115 or higher.\\n')", "def speed():\r\n\r\n algo = ['logistic_sgd', 'logistic_cg', 'mlp', 'convolutional_mlp',\r\n 'dA', 'SdA', 'DBN', 'rbm', 'rnnrbm']\r\n to_exec = [True] * len(algo)\r\n# to_exec = [False] * len(algo)\r\n# to_exec[-1] = True\r\n do_float64 = True\r\n do_float32 = True\r\n do_gpu = True\r\n\r\n algo_executed = [s for idx, s in enumerate(algo) if to_exec[idx]]\r\n #Timming expected are from the buildbot that have an i7-920 @\r\n # 2.67GHz with hyperthread enabled for the cpu, 12G of ram. An GeForce GTX\r\n # 285 for the GPU. OS=Fedora 14, gcc=4.5.1, python/BLAS from EPD\r\n # 7.1-2 (python 2.7.2, mkl unknow). BLAS with only 1 thread.\r\n\r\n expected_times_64 = numpy.asarray([10.0, 22.5, 76.1, 73.7, 116.4,\r\n 346.9, 381.9, 558.1, 186.3])\r\n expected_times_32 = numpy.asarray([11.6, 29.6, 42.5, 66.5, 71,\r\n 191.2, 226.8, 432.8, 176.2])\r\n\r\n # Number with just 1 decimal are new value that are faster with\r\n # the Theano version 0.5rc2 Other number are older. They are not\r\n # updated, as we where faster in the past!\r\n # TODO: find why and fix this!\r\n\r\n# Here is the value for the buildbot on February 3th 2012.\r\n# sgd, cg mlp conv da\r\n# sda dbn rbm\r\n# gpu times[3.72957802, 9.94316864, 29.1772666, 9.13857198, 25.91144657,\r\n# 18.30802011, 53.38651466, 285.41386175]\r\n# expected [3.076634879, 7.555234910, 18.99226785, 9.58915591, 24.130070450,\r\n# 24.77524018, 92.66246653, 322.340329170]\r\n# sgd, cg mlp conv da\r\n# sda dbn rbm\r\n#expected/get [0.82492841, 0.75984178, 0.65092691, 1.04930573, 0.93125138\r\n# 1.35324519 1.7356905 1.12937868]\r\n expected_times_gpu = numpy.asarray([3.07663488, 7.55523491, 18.99226785,\r\n 9.6, 24.13007045,\r\n 20.4, 56, 302.6, 315.4])\r\n expected_times_64 = [s for idx, s in enumerate(expected_times_64)\r\n if to_exec[idx]]\r\n expected_times_32 = [s for idx, s in enumerate(expected_times_32)\r\n if to_exec[idx]]\r\n expected_times_gpu = [s for idx, s in enumerate(expected_times_gpu)\r\n if to_exec[idx]]\r\n\r\n def time_test(m, l, idx, f, **kwargs):\r\n if not to_exec[idx]:\r\n return\r\n print algo[idx]\r\n ts = m.call_time\r\n try:\r\n f(**kwargs)\r\n except Exception, e:\r\n print >> sys.stderr, 'test', algo[idx], 'FAILED', e\r\n l.append(numpy.nan)\r\n return\r\n te = m.call_time\r\n l.append(te - ts)\r\n\r\n def do_tests():\r\n m = theano.compile.mode.get_default_mode()\r\n l = []\r\n time_test(m, l, 0, logistic_sgd.sgd_optimization_mnist, n_epochs=30)\r\n time_test(m, l, 1, logistic_cg.cg_optimization_mnist, n_epochs=30)\r\n time_test(m, l, 2, mlp.test_mlp, n_epochs=5)\r\n time_test(m, l, 3, convolutional_mlp.evaluate_lenet5, n_epochs=5,\r\n nkerns=[5, 5])\r\n time_test(m, l, 4, dA.test_dA, training_epochs=2,\r\n output_folder='tmp_dA_plots')\r\n time_test(m, l, 5, SdA.test_SdA, pretraining_epochs=1,\r\n training_epochs=2, batch_size=300)\r\n time_test(m, l, 6, DBN.test_DBN, pretraining_epochs=1,\r\n training_epochs=2, batch_size=300)\r\n time_test(m, l, 7, rbm.test_rbm, training_epochs=1, batch_size=300,\r\n n_chains=1, n_samples=1, output_folder='tmp_rbm_plots')\r\n time_test(m, l, 8, rnnrbm.test_rnnrbm, num_epochs=1)\r\n return numpy.asarray(l)\r\n\r\n #test in float64 in FAST_RUN mode on the cpu\r\n import theano\r\n if do_float64:\r\n theano.config.floatX = 'float64'\r\n theano.config.mode = 'FAST_RUN'\r\n float64_times = do_tests()\r\n print >> sys.stderr, algo_executed\r\n print >> sys.stderr, 'float64 times', float64_times\r\n print >> sys.stderr, 'float64 expected', expected_times_64\r\n print >> sys.stderr, 'float64 % expected/get', (\r\n expected_times_64 / float64_times)\r\n\r\n #test in float32 in FAST_RUN mode on the cpu\r\n theano.config.floatX = 'float32'\r\n if do_float32:\r\n float32_times = do_tests()\r\n print >> sys.stderr, algo_executed\r\n print >> sys.stderr, 'float32 times', float32_times\r\n print >> sys.stderr, 'float32 expected', expected_times_32\r\n print >> sys.stderr, 'float32 % expected/get', (\r\n expected_times_32 / float32_times)\r\n\r\n if do_float64:\r\n print >> sys.stderr, 'float64/float32', (\r\n float64_times / float32_times)\r\n print >> sys.stderr\r\n print >> sys.stderr, 'Duplicate the timing to have everything in one place'\r\n print >> sys.stderr, algo_executed\r\n print >> sys.stderr, 'float64 times', float64_times\r\n print >> sys.stderr, 'float64 expected', expected_times_64\r\n print >> sys.stderr, 'float64 % expected/get', (\r\n expected_times_64 / float64_times)\r\n print >> sys.stderr, 'float32 times', float32_times\r\n print >> sys.stderr, 'float32 expected', expected_times_32\r\n print >> sys.stderr, 'float32 % expected/get', (\r\n expected_times_32 / float32_times)\r\n\r\n print >> sys.stderr, 'float64/float32', (\r\n float64_times / float32_times)\r\n print >> sys.stderr, 'expected float64/float32', (\r\n expected_times_64 / float32_times)\r\n\r\n #test in float32 in FAST_RUN mode on the gpu\r\n import theano.sandbox.cuda\r\n if do_gpu:\r\n theano.sandbox.cuda.use('gpu')\r\n gpu_times = do_tests()\r\n print >> sys.stderr, algo_executed\r\n print >> sys.stderr, 'gpu times', gpu_times\r\n print >> sys.stderr, 'gpu expected', expected_times_gpu\r\n print >> sys.stderr, 'gpu % expected/get', (\r\n expected_times_gpu / gpu_times)\r\n\r\n if do_float64:\r\n print >> sys.stderr, 'float64/gpu', float64_times / gpu_times\r\n\r\n if (do_float64 + do_float32 + do_gpu) > 1:\r\n print >> sys.stderr\r\n print >> sys.stderr, 'Duplicate the timing to have everything in one place'\r\n print >> sys.stderr, algo_executed\r\n if do_float64:\r\n print >> sys.stderr, 'float64 times', float64_times\r\n print >> sys.stderr, 'float64 expected', expected_times_64\r\n print >> sys.stderr, 'float64 % expected/get', (\r\n expected_times_64 / float64_times)\r\n if do_float32:\r\n print >> sys.stderr, 'float32 times', float32_times\r\n print >> sys.stderr, 'float32 expected', expected_times_32\r\n print >> sys.stderr, 'float32 % expected/get', (\r\n expected_times_32 / float32_times)\r\n if do_gpu:\r\n print >> sys.stderr, 'gpu times', gpu_times\r\n print >> sys.stderr, 'gpu expected', expected_times_gpu\r\n print >> sys.stderr, 'gpu % expected/get', (\r\n expected_times_gpu / gpu_times)\r\n\r\n if do_float64 and do_float32:\r\n print >> sys.stderr, 'float64/float32', (\r\n float64_times / float32_times)\r\n print >> sys.stderr, 'expected float64/float32', (\r\n expected_times_64 / float32_times)\r\n if do_float64 and do_gpu:\r\n print >> sys.stderr, 'float64/gpu', float64_times / gpu_times\r\n print >> sys.stderr, 'expected float64/gpu', (\r\n expected_times_64 / gpu_times)\r\n if do_float32 and do_gpu:\r\n print >> sys.stderr, 'float32/gpu', float32_times / gpu_times\r\n print >> sys.stderr, 'expected float32/gpu', (\r\n expected_times_32 / gpu_times)\r\n\r\n def compare(x, y):\r\n ratio = x / y\r\n # If there is more then 5% difference between the expected\r\n # time and the real time, we consider this an error.\r\n return sum((ratio < 0.95) + (ratio > 1.05))\r\n\r\n if do_float64:\r\n err = compare(expected_times_64, float64_times)\r\n print >> sys.stderr, 'speed_failure_float64=' + str(err)\r\n if do_float32:\r\n err = compare(expected_times_32, float32_times)\r\n print >> sys.stderr, 'speed_failure_float32=' + str(err)\r\n if do_gpu:\r\n err = compare(expected_times_gpu, gpu_times)\r\n print >> sys.stderr, 'speed_failure_gpu=' + str(err)\r\n\r\n assert not numpy.isnan(gpu_times).any()", "def cpt_calc():\n\n if request.method == \"POST\":\n testmin = float(request.form.get(\"techTestMin\"))\n scoremin = float(request.form.get(\"techScoreMin\"))\n computerTestCheckBox = request.form.get(\"computer-test-checkbox\")\n\n # If the \"Computer Testing\" prompt is selected, indicate as such\n if computerTestCheckBox:\n compCheckBox = \"✓\"\n else:\n compCheckBox = \"\"\n\n testhr = testmin / 60\n scorehr = scoremin / 60\n totalmin = testmin + scoremin\n totalhr = totalmin / 60\n\n # Calculate time for 96138 (\"eight\") and work towards calculating 96139 (\"nine\")\n eight_min = 30\n remaining = totalmin - 30\n\n # Calcuate the technician's remaining time divided by 30 to determine whether the person meets the cutoff for >50% of unit 96138\n remaining_30 = remaining / 30\n\n # Round the whole number down\n remaining_floor = math.floor(remaining_30)\n fractional, whole = math.modf(remaining_30)\n\n # Cutoff is set at 16 out of 30 minutes\n cutoff = 0.53\n\n # Add an extra unit to 96139 if user input meets the cutoff\n if fractional >= cutoff:\n extra = 1\n else:\n extra = 0\n\n if eight_min == 30:\n eight = 1\n\n nine = remaining_floor + extra\n\n return render_template('/index.html', techTestMin=testmin, techScoreMin=scoremin, techTestHr=round(testhr, 2),\n testScoreHr=round(scorehr, 2),techTotalHr=round(totalhr, 2), techTotalMin=round(totalmin, 2),\n eight=eight, nine=nine, neurCheckBox=compCheckBox)\n else:\n return render_template(\"index.html\")", "def PhenomPCore(\n flow,\n fhigh,\n n_freq,\n write2stdout,\n write2bin,\n check,\n timing,\n chi1,\n chi2,\n m1,\n m2,\n chip,\n thetaj,\n alpha0,\n distance,\n phic,\n fref,\n use_buffer,\n n_streams,\n legacy):\n\n # Parse the 'timing' option. If it is given,\n # then assume that it specifies a range of frequencies\n # to test, the number of frequencies to test, and the\n # number of calls to average results over\n if(timing[0] is not None):\n flag_timing = True\n n_freq_lo, n_freq_hi, n_n_freq, n_avg = timing\n # ... if it isn't given, just perform one run\n else:\n flag_timing = False\n n_freq_lo = n_freq\n n_freq_hi = n_freq\n n_n_freq = 1\n n_avg = 0\n\n # Generate timing tests\n if(flag_timing):\n\n # Generate the list of n_freq's that we are going to time\n n_freq_list = [int(10**(log_n_freq_i))\n for log_n_freq_i in np.linspace(np.log10(n_freq_lo), np.log10(n_freq_hi), n_n_freq)]\n\n # Generate timing results for each n_freq\n n_burn = 1\n for i_n_freq, n_freq_i in enumerate(n_freq_list):\n\n # Initialize buffer (saves time for repeated calls)\n if(use_buffer and not legacy):\n buf = lalsimulation.PhenomPCore_buffer(int(n_freq_i), n_streams)\n else:\n buf = None\n\n # Initialize the model call (apply some unit conversions here)\n lal_inputs = model.inputs(\n chi1=chi1,\n chi2=chi2,\n m1=m1,\n m2=m2,\n chip=chip,\n thetaJ=thetaj,\n alpha0=alpha0,\n distance=distance,\n phic=phic,\n fref=fref,\n freqs=[\n flow,\n fhigh,\n n_freq_i])\n\n # Create a timing callable\n t = timeit.Timer(lambda: lal_inputs.run(buf, legacy))\n\n # Burn a number of calls (to avoid contamination from Cuda context initialization if buf=None, for example)\n if(n_burn > 0):\n if(n_burn == 1):\n lal_cuda.log.comment(\"Burning a call: %f seconds.\" % (t.timeit(number=n_burn)))\n else:\n lal_cuda.log.comment(\"Burning %d calls: %f seconds.\" % (n_burn, t.timeit(number=n_burn)))\n n_burn = 0\n\n # Call the model n_avg times to generate the timing result\n wallclock_i = t.timeit(number=n_avg)\n\n # Print timing result\n if(len(n_freq_list) == 1):\n lal_cuda.log.comment(\"Average timing of %d calls: %.5f seconds.\" % (n_avg, wallclock_i / float(n_avg)))\n else:\n if(i_n_freq == 0):\n print(\"# Column 01: Iteration\")\n print(\"# 02: No. of frequencies\")\n print(\"# 03: Total time for %d calls [s]\" % (n_avg))\n print(\"# 04: Avg. time per call [s]\")\n print(\"%3d %8d %10.3le %10.3le\" % (i_n_freq, n_freq_i, wallclock_i, wallclock_i / float(n_avg)))\n\n # Clean-up buffer\n if(buf):\n lalsimulation.free_PhenomPCore_buffer(buf)\n\n # ... if n_avg<=1, then just run the model and exit.\n else:\n # Don't bother with a buffer (saves no time with just one call)\n buf = None\n\n # Initialize model call\n lal_inputs = model.inputs(\n chi1=chi1,\n chi2=chi2,\n m1=m1,\n m2=m2,\n chip=chip,\n thetaJ=thetaj,\n alpha0=alpha0,\n distance=distance,\n phic=phic,\n fref=fref,\n freqs=[\n flow,\n fhigh,\n n_freq])\n\n # Perform call\n lal_outputs = lal_inputs.run(buf=buf, legacy=legacy)\n\n # Write results to stdout &/or binary files\n if(write2bin):\n model.to_binary(lal_inputs, lal_outputs)\n if(write2stdout):\n print(model.to_string(lal_inputs, lal_outputs))\n\n # Check results against standards (if parameters match)\n if(check):\n model.calc_difference_from_reference(lal_inputs, lal_outputs)", "def timecalc():\n print(\"timecalc started. Ain't nobody got TIME for: \\n\")\n if len(sys.argv) == 2:\n print(\"single input argument, assuming this is a UTC epoch timestamp in ms\")\n dt = int(sys.argv[1])\n dt = datetime.datetime.utcfromtimestamp(dt / 1000.0)\n else:\n if \":\" in sys.argv[2]:\n dt = sys.argv[1] + \" \" + sys.argv[2]\n dt = datetime.datetime.strptime(dt, DATETIME_FORMAT)\n else:\n print(\"timecalc requires time in either UTC epoch time or datetime in {}\".format(DATETIME_FORMAT))\n raise ValueError('UTC datetime needs to be {}'.format(DATETIME_FORMAT))\n\n gpstime = utctoweekseconds(dt)\n towsec = gpstime[2] + (gpstime[3] / 1000000.0)\n\n print(\"UTC DATETIME: {} \\nGPS WEEK: {}, TOW: {}\".format(dt, gpstime[0], towsec))", "def get(input=None):\n if isinstance(input, datetime.datetime):\n return Elapsed((datetime.datetime.now() - input).total_seconds())\n if not input or isinstance(input, int):\n pid = input if input else os.getpid()\n output = os.popen(\"ps -p %s -o etime | grep -v ELAPSED\" %\n pid).read().strip()\n if output:\n return Elapsed(_parse_ps_output(output))", "def test_run_profile(dpa_model_spec, dpa_init):\n test_times = np.array(\n DateTime(['2019:001:00:00:00', '2019:001:12:00:00', '2019:002:00:00:00', '2019:003:00:00:00']).secs)\n pitch = np.array([150, 90, 156, 156])\n roll = np.array([0, -5, 10, 0])\n test_schedule = {'pitch': pitch, 'roll': roll}\n results = run_profile(test_times, test_schedule, '1dpamzt', dpa_model_spec, dpa_init)\n dpa_results = results['1dpamzt']\n assert np.all(dpa_results.mvals > -10)\n assert len(dpa_results.times) > 1", "def check_cflcushion(delt=0.1, cfl_cushion_upper=0.5, cfl_cushion_lower=0.1, code_dt_max=0.1, nstep=100):\n \n # Define some characteristic delta t's as log10()\n vec_cfl_dt_discrete = [-1., -2., -3., -3., -3., -3., -2., -3., -1., -1] \n vec_code_dt = [delt]; changes_in_delt = []\n print(0.1/0.22)\n print(0.1, 0.1/0.22*0.5)\n \n # Construct a continues vector of time steps\n vec_cfl_dt = []\n for i in range(len(vec_cfl_dt_discrete)-1):\n vec_cfl_dt += list(vec_cfl_dt_discrete[i] + np.array(range(nstep))/nstep*(vec_cfl_dt_discrete[i+1]-vec_cfl_dt_discrete[i]))\n vec_cfl_dt = 10**np.array(vec_cfl_dt) \n vec_step = range(len(vec_cfl_dt))\n \n # Mimic the CFL decrease condition\n for i, cfl_dt in enumerate(vec_cfl_dt):\n if (vec_code_dt[-1] > cfl_dt*cfl_cushion_upper):\n print(10**((np.log10(cfl_cushion_upper)+np.log10(cfl_cushion_lower))/2))\n vec_code_dt.append(cfl_dt*10**((np.log10(cfl_cushion_upper)+np.log10(cfl_cushion_lower))/2))\n changes_in_delt.append(i)\n print()\n print(f\"DECREASE! Because {vec_code_dt[-2]:6.2e} > {cfl_dt*cfl_cushion_upper:6.2e}\")\n print(f\" {cfl_dt*cfl_cushion_upper:6.2e} = cfl_dt*cfl_cushion_upper\")\n print(f\" {cfl_dt:6.2e} = cfl_dt\")\n print(f\" {vec_code_dt[-2]:6.2e} = code_dt\") \n print(f\" ==> code_dt = {vec_code_dt[-1]}\")\n elif (vec_code_dt[-1] < np.min([cfl_dt*cfl_cushion_lower, code_dt_max])):\n vec_code_dt.append(np.min([cfl_dt*10**((np.log10(cfl_cushion_upper)+np.log10(cfl_cushion_lower))/2), code_dt_max]))\n changes_in_delt.append(i)\n print()\n print(f\"INCREASE! Because {vec_code_dt[-2]:6.2e} < {np.min([cfl_dt*cfl_cushion_lower, code_dt_max]):6.2e}\")\n print(f\" {cfl_dt*cfl_cushion_lower:6.2e} = cfl_dt*cfl_cushion/delt_adjust\")\n print(f\" {cfl_dt:6.2e} = cfl_dt\")\n print(f\" {vec_code_dt[-2]:6.2e} = code_dt\") \n print(f\" ==> code_dt = {vec_code_dt[-1]}\")\n else:\n vec_code_dt.append(vec_code_dt[-1])\n \n # Create a figure\n fig = plt.figure(figsize=(18, 9)); fig.set_tight_layout(False)\n grid_specifications = gridspec.GridSpec(1,1)\n grid_specifications.update(top=0.98, left=0.05, right=0.95, bottom=0.06, wspace=0.35, hspace=0.45)\n ax = plt.subplot(grid_specifications[0])\n \n # Plot dt(istep)\n ax.plot(vec_step, vec_cfl_dt, color='black', label='CFL dt')\n ax.plot(vec_step, vec_cfl_dt*cfl_cushion_upper, color='black', alpha=0.5, label='CFL dt*CFL cushion upper')\n ax.plot(vec_step, vec_cfl_dt*cfl_cushion_lower, color='black', alpha=0.2, label='CFL dt*CFL cushion lower')\n ax.plot(vec_step, vec_code_dt[1:], color='maroon', label='code dt')\n \n # Highlight the changes \n if False:\n for change in changes_in_delt:\n ax.axvline(x=change, color='maroon', alpha=0.5, zorder=1)\n \n # Show figure\n ax.set_yscale('log')\n ax.autoscale()\n ax.legend(labelspacing=0.0, handlelength=1, shadow=True)\n plt.show()\n return", "def pc_work_time(self):\n return _TestA_swig.cleanslate_sptr_pc_work_time(self)", "def get_perf(target, output, mask):\n\n\toutput = np.stack(output, axis=0)\n\tmk = mask*np.reshape(target[:,:,0] == 0, (par['num_time_steps'], par['batch_size']))\n\n\ttarget = np.argmax(target, axis = 2)\n\toutput = np.argmax(output, axis = 2)\n\n\treturn np.sum(np.float32(target == output)*np.squeeze(mk))/np.sum(mk)", "def fuzz(binary, sample, verbose,loop_limit, prog):\n # PLACEHOLDER\n # Check magic bytes / struct of sample input -> make best guess for input format\n # This should be moved into mutation logic -> this is a shortcut for midpoint\n sample_processed = SampleParser(sample)\n \n try:\n # data: that one plaintext file\n # ASCII text: plaintext\n # JSON data: json\n # CSV text: csv\n # HTML document, ASCII text: xml2\n mutations = { # walmart switch statement\n 'JSON' : lambda sample_processed:JsonMutator(sample_processed.data, min=2, max=10),\n 'CSV': lambda sample_processed:CsvMutator(sample_processed.csv(), min=2, max=10),\n 'JFIF': lambda sample_processed:JpgMutator(sample_processed.jpg(), min=2, max=10),\n 'XML': lambda sample_processed:XmlMutator(sample_processed.xml(), min=2, max=10),\n 'HTML document, ASCII text': lambda sample_processed:XmlMutator(sample_processed.xml(), min=2, max=10)\n }[sample_processed.guess](sample_processed)\n except KeyError as e:\n print('Unmatched data type: {}, defaulting to generic mutator'.format(e))\n mutations = Mutator(sample_processed.data)\n # need a default: ascii\n except Exception as e:\n print(\"mutator fucked up: {}\".format(e))\n \n print('Running fuzzer with a {} second limit...'.format(TIME_LIMIT))\n\n # nevermind\n strategy = mutations.complex_mutate\n \n # Loop for whole timelimit \n # In future - try multiple strategies in time limit\n cov = float(0)\n last = 0\n while(1):\n prog.iterations += 1 \n if (prog.iterations - last >= loop_limit):\n prog.reinit_breakpoints\n cov = float(0)\n mutations.reset()\n\n # in future, call parent method -> give me a mutation.. \n current_input = strategy()\n\n # Spawn process - should be stopped after exec. \n pid, status = prog.spawn_process(stdout=False)\n prog.getregs()\n # Now that the process has been spawned, we can populate the breakpoints\n prog.populate_breakpoints()\n if verbose:\n print(current_input)\n print(\"coverage: {}, this run: {}\".format(prog.coverage(), cov))\n print(\"pid {}\".format(pid))\n #prog.breakpoint_status()\n\n # Start the process proper \n prog.cont()\n prog.send(current_input) \n\n # simulate EOF \n prog.close_input() \n # why in the everloving fuck does RESIZING A TERMINAL alter the behaviour of waitpid ????????\n # sigwinch. thats why. \n \n if prog.coverage() > cov:\n cov = prog.coverage()\n mutations.add_pop(current_input)\n last = prog.iterations\n # Wait for something to happen. \n while(1):\n # sigsegv doesn't count as a termination signal.\n # since it gets caught by ptrace (only sigkill goes through ptrace) \n # WSTOPSIG == 11 == SIGSEGV -> segfault\n\n \n\n pid, status = prog.wait()\n if(os.WIFSTOPPED(status) and (os.WSTOPSIG(status) == signal.SIGSEGV)):\n # Placeholder -> Need to create file with crash input and integrate \n # fuzzing engine. \n\n # Update stats\n prog.getregs()\n prog.crash_eips.append(prog.registers.eip) \n #if verbose:\n # print(\"Input crashed program with signal: {}\".format(os.WSTOPSIG(status)))\n\n with open(\"bad.txt\", \"ab+\") as f:\n # write the byte string\n # since most formats have newlines in them\n f.write(str(current_input).encode(\"unicode-escape\") + b\"\\n\")\n break\n # we have hit one of our basic block breakpoints\n elif(os.WIFSTOPPED(status) and (os.WSTOPSIG(status) == signal.SIGTRAP)):\n # we need to decrement eip, replace the breakpoint with its saved value\n prog.restore_current_bp() \n\n elif(os.WIFEXITED(status)):\n break\n\n #prog.step()\n prog.cont()", "def simple_output_equals_input_single():\n examples = [\n benchmark.Example(\n inputs={\n 'tens': [10, 20, 30, 40, 50],\n },\n output=[10, 20, 30, 40, 50],\n ),\n ]\n constants = [999]\n description = 'find the right input variable'\n target_program = 'tens'\n source = 'handwritten task'\n return benchmark.Benchmark(examples=examples,\n constants=constants,\n description=description,\n target_program=target_program,\n source=source,\n name='simple_output_equals_input_single')", "def parse_peal_speed(peal_speed: str) -> int:\n\n def exit_with_message(error_text: str) -> NoReturn:\n \"\"\"Raise an exception with a useful error message.\"\"\"\n raise PealSpeedParseError(peal_speed, error_text)\n\n # Strip whitespace from the argument, so that if the user is in fact insane enough to pad their\n # CLI arguments with whitespace then they can do so and not crash the program. This also has\n # the side effect of cloning the input string so we can freely modify it.\n stripped_peal_speed = peal_speed.strip()\n\n # Remove the 'm' from the end of the peal speed - it doesn't add any clarity\n if stripped_peal_speed.endswith(\"m\"):\n stripped_peal_speed = stripped_peal_speed[:-1]\n\n if \"h\" in stripped_peal_speed:\n # Split the peal speed into its hour and minute components, and print a helpful message\n # if there are too many parts\n split_parts = stripped_peal_speed.split(\"h\")\n if len(split_parts) > 2:\n exit_with_message(\"The peal speed should contain at most one 'h'.\")\n\n hour_string, minute_string = split_parts\n # Strip the input values so that the user can put whitespace into the input if they want\n hour_string = hour_string.strip()\n minute_string = minute_string.strip()\n\n # Parse the hours value, and print messages if it is invalid\n try:\n hours = int(hour_string)\n except ValueError:\n exit_with_message(f\"The hour value '{hour_string}' is not an integer.\")\n\n if hours < 0:\n exit_with_message(f\"The hour value '{hour_string}' must be a positive integer.\")\n\n # Parse the minute value, and print messages if it is invalid\n try:\n minutes = 0 if minute_string == \"\" else int(minute_string)\n except ValueError:\n exit_with_message(f\"The minute value '{minute_string}' is not an integer.\")\n\n if minutes < 0:\n exit_with_message(f\"The minute value '{minute_string}' must be a positive integer.\")\n if minutes > 59:\n exit_with_message(f\"The minute value '{minute_string}' must be smaller than 60.\")\n\n return hours * 60 + minutes\n\n # If the user doesn't put an 'h' in their string, then we assume it's just a minute value that\n # may or may not be bigger than 60\n try:\n minutes = int(stripped_peal_speed)\n except ValueError:\n exit_with_message(f\"The minute value '{stripped_peal_speed}' is not an integer.\")\n\n if minutes < 0:\n exit_with_message(f\"The minute value '{stripped_peal_speed}' must be a positive integer.\")\n\n return minutes", "def compute_time(size, data1, data2):\n\n print(\"List size:\", size)\n my_list = list(range(size)) # Make list [0, 1, 2, 3, ..., size - 1]\n start = clock() # Start the clock\n ans = is_ascending(my_list) # Compute answer\n elapsed1 = clock() - start # Stop the clock\n print(\" is_ascending: {} Elapsed: {:12.7f}\".format(ans, elapsed1))\n start = clock() # Start the clock\n ans = is_ascending2(my_list) # Compute answer\n elapsed2 = clock() - start # Stop the clockt\n print(\" is_ascending2: {} Elapsed: {:12.7f}\".format(ans, elapsed2))\n print(\" Speedup: {:6.1f}\".format(elapsed1/elapsed2)) # Compute speedup\n print()\n data1.append((size, elapsed1))\n data2.append((size, elapsed2))", "def compare_cow_transport_algorithms():\r\n cows_test = load_cows(\"ps1_cow_data.txt\")\r\n time0 = time.time()\r\n greedy = greedy_cow_transport(cows_test)\r\n time1 = time.time()\r\n brute = brute_force_cow_transport(cows_test)\r\n time2 = time.time()\r\n print (\"# of Trips by Greedy Algorithm:\", len(greedy))\r\n print (\"# of Trips by Brute Force Algo:\", len(brute))\r\n \r\n print (\"Greedy Algo took time:\", time1-time0)\r\n print (\"Brute Force Algo took time:\", time2-time1)\r\n pass", "def test_multilabel_hamming_distance_dtype_cpu(self, inputs, dtype):\n preds, target = inputs\n if (preds < 0).any() and dtype == torch.half:\n pytest.xfail(reason=\"torch.sigmoid in metric does not support cpu + half precision\")\n self.run_precision_test_cpu(\n preds=preds,\n target=target,\n metric_module=MultilabelHammingDistance,\n metric_functional=multilabel_hamming_distance,\n metric_args={\"num_labels\": NUM_CLASSES, \"threshold\": THRESHOLD},\n dtype=dtype,\n )", "def _calc_estimated_chops_from_timepoints(self):\n if self.verbose:\n logger.info(\"Start calc Chop Times: length: {} raw time: {}\".format(self.length,self.times[-1]))\n \n self._chops = None\n self._indices = None\n self._estimated_chops = None\n self._estimated_indices = None\n \n #--- warn if times less than chop length\n if self.times[-1] <= self.length:\n logger.warning(\"<Raw Times> : {} smaler than <Chop Times> : {}\\n\\n\".format(self.times[-1],self.length))\n \n self._estimated_chops,self._estimated_indices = get_chop_times_indices(self.times,\n chop_length=self.length,\n exit_on_error=self.exit_on_error) \n \n if self.verbose:\n self.GetInfo()\n \n return self._estimated_chops,self._estimated_indices", "def test_unit_clebsch_delta_m(j1, j2):\n for _ in range(10):\n m1 = np.random.choice(np.arange(-j1, j1+1))\n m1p = np.random.choice(np.arange(-j1, j1+1))\n m2 = np.random.choice(np.arange(-j2, j2+1))\n m2p = np.random.choice(np.arange(-j2, j2+1))\n\n sum_match = 0\n sum_differ = 0\n for j3 in np.arange(abs(j1-j2), j1+j2+1):\n for m3 in np.arange(-j3, j3+1):\n c1 = clebsch(j1, j2, j3, m1, m2, m3)\n c2 = clebsch(j1, j2, j3, m1p, m2p, m3)\n sum_match += c1**2\n sum_differ += c1*c2\n assert sum_match == pytest.approx(1)\n assert sum_differ == pytest.approx(int(m1 == m1p and m2 == m2p))", "def test_check_cost():", "def simple_output_equals_input_multiple():\n examples = [\n benchmark.Example(\n inputs={\n 'ones': [1, 2, 3, 4, 5],\n 'tens': [10, 20, 30, 40, 50],\n 'hundreds': [100, 200, 300],\n },\n output=[10, 20, 30, 40, 50],\n ),\n ]\n constants = [999]\n description = 'find the right input variable'\n target_program = 'tens'\n source = 'handwritten task'\n return benchmark.Benchmark(examples=examples,\n constants=constants,\n description=description,\n target_program=target_program,\n source=source,\n name='simple_output_equals_input_multiple')", "def evaluate_clauses(self, input_: ByteTensor) -> ByteTensor:\n # Check that all set action bits are also set in the input.\n input_ = input_.expand_as(self.action)\n matches, _ = torch.min((self.action & input_).eq(self.action), 3)\n\n # Same check for inv_action and inv_input.\n inv_input = (~input_).expand_as(self.action)\n inv_matches, _ = torch.min((self.inv_action & inv_input).\n eq(self.inv_action), 3)\n\n # Clause is true if both tests pass.\n clause_result = matches & inv_matches\n return clause_result.view(*self.clause_shape)", "def approximateTime(meal):\n RATE = 4.2535969274764765e-05 # seconds per character.\n time = len(meal)**1 * RATE\n return time", "def question05(input):\r\n n_threat = input[0] # number of threats\r\n threats = [(input[i * 2 + 1], input[i * 2 + 2]) for i in range(n_threat)] # list of threats in (t, f) format\r\n threats.sort(key=lambda thr: thr[0]) # sort the list of threats by t\r\n instances = [{'t': threats[0][0], 'f': threats[0][1]}] # initial list of instances in {'t': t, 'f': f} format\r\n for threat in threats:\r\n counter = False\r\n instances.sort(key=lambda inst: inst['t'], reverse=True) # sort the list of instances by t in reverse order\r\n for instance in instances:\r\n if abs(instance['f'] - threat[1]) <= threat[0] - instance['t']: # can counter\r\n counter = True\r\n instance['t'] = threat[0] # update the instance's current t\r\n instance['f'] = threat[1] # update the instance's current f\r\n break\r\n if not counter: # cannot counter\r\n instances.append({'t': threat[0], 'f': threat[1]}) # add a new instance\r\n return len(instances)", "def run_timing():\n time_log = []\n while True:\n one_run = input(\"Enter your time for this 10 km: \")\n if not one_run:\n break\n try:\n time_log.append(float(one_run))\n except ValueError:\n print(\n \"Hey, you enter something strange, \"\n \"please enter a valid number\")\n avg_time = sum(time_log) / len(time_log)\n return f\"Your average time is about: {avg_time:.1f} \" \\\n f\"over {len(time_log)} runs\"", "def test_compare_difference_string_slower(self):\n test_algorithm = 'bubble'\n test_algorithm_time = 5\n test_sorted_time = 1\n result = calculate_compare_time_difference(test_algorithm_time, test_sorted_time, test_algorithm)\n self.assertEqual('bubble was 4 seconds slower.', result)", "def tests() -> None:\n assert input_parser(\"123\") == '123'\n assert input_parser(\"(add 12 12)\") == '24'\n assert input_parser(\"(add 0 (add 3 4))\") == '7'\n assert input_parser(\"(add 3 (add (add 3 3) 3))\") == '12'\n assert input_parser(\"(multiply 3 (multiply (multiply 3 3) 3))\") == '81'\n assert input_parser(\"(multiply 2 (multiply 3 4))\") == '24'\n assert input_parser(\"(multiply 0 (multiply 3 4))\") == '0'\n\n assert input_parser(\"(add 4 1)\") == '5'\n assert input_parser(\"(multiply 4 1)\") == '4'\n \n assert input_parser(\"(add 4 (add 1 8))\") == '13'\n assert input_parser(\"(add (add 1 8) 4)\") == '13'\n assert input_parser(\"(multiply (multiply 1 2) 12)\") == '24'\n assert input_parser(\"(multiply 4 (multiply 8 12))\") == '384'\n\n assert input_parser(\"(add (multiply 4 5) (multiply 10 10))\") == '120'\n assert input_parser(\"(add (multiply (add 4 (add 3 (add 3 (add 3 (add 1 (multiply 4 5)))))) 5) (multiply 10 10))\") == '270'\n \n assert input_parser(\"(add (multiply 4 5) (multiply 10 10) (add 1 2 3 4 5 6 7 (add 4 4) 9) (multiply 4 5))\") == '185'\n\n assert input_parser('(subtract 2 1)') == '1'\n assert input_parser(\"(divide 55 5)\") == '11'", "def benchmark_tsp(problem_source, num_tries, ortools_config, dwave_config):\n dwave_engine = DWaveEngine.default()\n qbsolv_engine = QBSolvEngine.default()\n results = []\n ortools_calculation_time = ortools_config.get('calculation_time', 5)\n for i, problem in enumerate(problem_source, 1):\n for j in range(1, num_tries+1):\n print('Benchmarking problem {0}, try {1}.'.format(i, j))\n start = time()\n dwave_config['annealing_time'] = 20\n dwave_solution_1, number_of_samples_1, info_1 = dwave_engine.solve(problem, **dwave_config)\n dwave_time_1 = time() - start\n\n start = time()\n dwave_config['annealing_time'] = 40\n dwave_solution_2, number_of_samples_2, info_2 = dwave_engine.solve(problem, **dwave_config)\n dwave_time_2 = time() - start\n\n start = time()\n dwave_config['annealing_time'] = 60\n dwave_solution_3, number_of_samples_3, info_3 = dwave_engine.solve(problem, **dwave_config)\n dwave_time_3 = time() - start\n\n start = time()\n qbsolv_solution = qbsolv_engine.solve(problem, **dwave_config)\n qbsolv_time = time() - start\n\n if number_of_samples_1 is not None:\n sample_percentage_1 = number_of_samples_1 / dwave_config['num_reads'] * 100\n else:\n sample_percentage_1 = np.nan\n\n if number_of_samples_2 is not None:\n sample_percentage_2 = number_of_samples_2 / dwave_config['num_reads'] * 100\n else:\n sample_percentage_2 = np.nan\n\n if number_of_samples_3 is not None:\n sample_percentage_3 = number_of_samples_3 / dwave_config['num_reads'] * 100\n else:\n sample_percentage_3 = np.nan\n\n\n # start = time()\n # ortools_solution = calculate_routes(outposts=problem.outposts,\n # vehicles=problem.vehicles,\n # graph=problem.graph,\n # starting_point=problem.starting_point,\n # calculation_time=ortools_calculation_time)\n # ortools_time = time() - start\n optimal_cost = utilities.compute_all_tsp_solutions(problem.graph)[0][1]\n record = Record(\n problem_num=i,\n try_num=j,\n dwave_time_1=dwave_time_1,\n dwave_qpu_time_1=info_1['timing']['total_real_time']/10e6,\n dwave_time_2=dwave_time_2,\n dwave_qpu_time_2=info_2['timing']['total_real_time']/10e6,\n dwave_time_3=dwave_time_3,\n dwave_qpu_time_3=info_3['timing']['total_real_time']/10e6,\n\n qbsolv_time=qbsolv_time,\n sample_percentage_1=sample_percentage_1,\n sample_percentage_2=sample_percentage_2,\n sample_percentage_3=sample_percentage_3,\n\n dwave_cost_1=np.nan if dwave_solution_1 is None else dwave_solution_1.total_cost(),\n dwave_cost_2=np.nan if dwave_solution_2 is None else dwave_solution_2.total_cost(),\n dwave_cost_3=np.nan if dwave_solution_3 is None else dwave_solution_3.total_cost(),\n\n qbsolv_cost=np.nan if qbsolv_solution is None else qbsolv_solution.total_cost(),\n optimal_cost=optimal_cost)\n results.append(record)\n return pd.DataFrame(results)", "def test_handcrafted_examples(self):\n for i in range(1000):\n self.assertEqual(perfectd(0), True)\n self.assertEqual(prime(0), False)\n self.assertEqual(prime(2), True)\n self.assertEqual(prime(7), True)\n self.assertEqual(prime(15), False)\n self.assertEqual(perfectd(6), True)\n self.assertEqual(perfectd(15), False)", "def sim_chimes(scenarios: str, params: Parameters = None,\n current_hospitalized: int = None,\n doubling_time: float = None,\n known_infected: int = None,\n relative_contact_rate: float = None,\n susceptible: int = None,\n hospitalized: RateLos = None,\n icu: RateLos = None,\n ventilated: RateLos = None,\n market_share: float = None,\n n_days: int = None,\n recovery_days: float = None,\n ):\n\n if params is not None:\n params_dict = vars(params)\n else:\n params_dict = {\"current_hospitalized\": None,\n \"doubling_time\": None,\n \"known_infected\": None,\n \"relative_contact_rate\": None,\n \"susceptible\": None,\n \"hospitalized\": None,\n \"icu\": None,\n \"ventilated\": None,\n \"market_share\": None,\n \"n_days\": None,\n \"recovery_days\": None,\n }\n\n # Check for parameter updates passed\n vals_passed = {key: value for (key, value) in vars().items()\n if key not in ['scenario', 'params']}\n\n for key, value in vals_passed.items():\n if value is not None:\n params_dict[key] = value\n\n # Create Parameters object\n p = Parameters(\n current_hospitalized=params_dict['current_hospitalized'],\n doubling_time=params_dict['doubling_time'],\n known_infected=params_dict['known_infected'],\n market_share=params_dict['market_share'],\n n_days=params_dict['n_days'],\n relative_contact_rate=params_dict['relative_contact_rate'],\n susceptible=params_dict['susceptible'],\n hospitalized=params_dict['hospitalized'],\n icu=params_dict['icu'],\n ventilated=params_dict['ventilated'],\n )\n\n base_input_params_dict = vars(p)\n\n # Create a range of social distances\n\n soc_dists = np.arange(0.05, 0.60, 0.05)\n # array([0.05, 0.1 , 0.15, 0.2 , 0.25, 0.3 , 0.35, 0.4 , 0.45, 0.5 , 0.55,\n # 0.6 , 0.65, 0.7 , 0.75, 0.8 , 0.85])\n\n num_scenarios = len(soc_dists)\n\n # We can store outputs any way we want. For this demo, just going to\n # use a master list. # This will be a list of dicts of the\n # result dataframes (+ 1 dict containing the scenario inputs)\n\n results_list = []\n\n for sdpct in soc_dists:\n sim_scenario = '{}{:.0f}'.format(scenarios, 100 * sdpct)\n\n # Update the parameters for this scenario\n p.relative_contact_rate = sdpct\n input_params_dict = vars(p)\n\n # Run the model\n m = SimSirModel(p)\n\n # Gather results\n results = gather_sim_results(m, sim_scenario, input_params_dict)\n\n # Append results to results list\n\n results_list.append(results.copy())\n\n return results_list", "def benchmark(self, **kwargs):\n num_iterations = kwargs.get(\"benchmark_iterations\")\n\n start_time = time.time()\n\n # store how far off we are\n deviations = []\n\n for _ in xrange(num_iterations):\n kwargs[\"roll\"] = decimal.Decimal(random.uniform(\n self.MIN_BENCHMARK_ROLL, self.MAX_BENCHMARK_ROLL))\n kwargs[\"pitch\"] = decimal.Decimal(random.uniform(\n self.MIN_BENCHMARK_PITCH, self.MAX_BENCHMARK_PITCH))\n\n _, deviation = self.find_closest_trajectory(**kwargs)\n deviations.append(deviation)\n\n # calculate results from the benchmarking\n total_time = time.time() - start_time\n average_time = total_time / num_iterations\n average_deviation = sum(deviations) / len(deviations)\n\n print \"AVERAGE TIME: %s AVERAGE DEVIATION: %s\" \\\n % (average_time, average_deviation)", "def test_binary_hamming_distance_differentiability(self, inputs):\n preds, target = inputs\n self.run_differentiability_test(\n preds=preds,\n target=target,\n metric_module=BinaryHammingDistance,\n metric_functional=binary_hamming_distance,\n metric_args={\"threshold\": THRESHOLD},\n )", "def test_solver1c(N, version='scalar'):\n def I(x): return sin(2*x*pi/L)\n def f(x, t): return 0\n\n class Action:\n def __init__(self):\n self.solutions = []\n self.time_level_counter = 0\n\n def __call__(self, u, x, t):\n if self.time_level_counter % N == 0:\n self.solutions.append(u.copy())\n self.time_level_counter += 1\n\n action = Action()\n n = 100; tstop = 6; L = 10\n dt, x, cpu = solver(I, f, 1.0, lambda t: 0, lambda t: 0,\n L, n, 0, tstop,\n user_action=action, version=version)\n print 'CPU time:', cpu\n print 'Max value in final u:', arrmax(action.solutions[-1])", "def test_cfu(self):\n DATA = [\n # byte add\n ((0, 0, 0), 0),\n ((0, 0x01020304, 0x01020304), 20),\n ((0, 0x01010101, 0xffffffff), 1024),\n # byte swap\n ((1, 0x01020304, 0xffffffff), 0x04030201),\n ((1, 0x0102ff00, 0xffffffff), 0x00ff0201),\n # bit swap\n ((2, 0x01020304, 0xffffffff), 0x20c04080),\n ((2, 0xffffffff, 0xffffffff), 0xffffffff),\n ((2, 0x10203040, 0xffffffff), 0x020c0408),\n # Factorial\n ((3, 1, 0), 1),\n ((3, 2, 0), 2),\n ((3, 3, 0), 6),\n ((3, 4, 0), 24),\n ]\n\n def process():\n for n, (inputs, expected_output) in enumerate(DATA):\n func, i0, i1 = inputs\n yield self.dut.cmd_function_id.eq(func)\n yield self.dut.cmd_in0.eq(i0)\n yield self.dut.cmd_in1.eq(i1)\n yield self.dut.cmd_valid.eq(1)\n yield self.dut.rsp_ready.eq(0)\n yield\n yield self.dut.cmd_valid.eq(0)\n yield self.dut.rsp_ready.eq(1)\n yield Delay(0.1)\n assert (yield from self.wait_response_valid()), (\n \"op{func}({i0:08X}, {i1:08X}) failed to complete\")\n actual_output = (yield self.dut.rsp_out)\n assert actual_output == expected_output, (\n f\"\\nHEX: op{func}(0x{i0:08X}, 0x{i1:08X}) expected: {expected_output:08X} got: {actual_output:08X}\" +\n f\"\\nDEC: op{func}(0x{i0}, 0x{i1}) expected: {expected_output} got: {actual_output}\")\n yield\n self.run_sim(process, True)", "def test_unit_clebsch_delta_j(j1, j2):\n for _ in range(10):\n j3 = np.random.choice(np.arange(abs(j1-j2), j1+j2+1))\n j3p = np.random.choice(np.arange(abs(j1-j2), j1+j2+1))\n m3 = np.random.choice(np.arange(-j3, j3+1))\n m3p = np.random.choice(np.arange(-j3p, j3p+1))\n\n sum_match = 0\n sum_differ = 0\n for m1 in np.arange(-j1, j1+1):\n for m2 in np.arange(-j2, j2+1):\n c1 = clebsch(j1, j2, j3, m1, m2, m3)\n c2 = clebsch(j1, j2, j3p, m1, m2, m3p)\n sum_match += c1**2\n sum_differ += c1*c2\n assert sum_match == pytest.approx(1)\n assert sum_differ == pytest.approx(int(j3 == j3p and m3 == m3p))" ]
[ "0.57208085", "0.54804933", "0.5131725", "0.50099516", "0.49980843", "0.49875277", "0.4948482", "0.49447292", "0.49406472", "0.4887707", "0.4879715", "0.48475006", "0.48428223", "0.4833638", "0.48149478", "0.48010787", "0.4800391", "0.4798947", "0.47890905", "0.47886792", "0.4788516", "0.47770703", "0.47671026", "0.47664678", "0.47606117", "0.47534144", "0.47014824", "0.46926782", "0.46814704", "0.46700004", "0.46557334", "0.46546942", "0.46458036", "0.46278036", "0.45954975", "0.4593754", "0.45876306", "0.4587425", "0.4568385", "0.456452", "0.45572197", "0.4555816", "0.4545524", "0.45353657", "0.45334008", "0.45333233", "0.45187634", "0.4514715", "0.4513632", "0.45134088", "0.4512085", "0.45108056", "0.45064178", "0.44932055", "0.44908252", "0.44838363", "0.44752672", "0.4464844", "0.44614592", "0.44579023", "0.4457767", "0.4456906", "0.44520134", "0.44426873", "0.44345674", "0.44338778", "0.4433609", "0.4429844", "0.44294816", "0.44275144", "0.44266087", "0.4426093", "0.4423696", "0.44231915", "0.4419912", "0.44174173", "0.44149548", "0.4414671", "0.44129348", "0.44109744", "0.44077477", "0.4403352", "0.44020587", "0.4401605", "0.44010636", "0.43997493", "0.43954477", "0.43931076", "0.4388858", "0.43831238", "0.437797", "0.43737864", "0.4373318", "0.43643582", "0.43643305", "0.43630597", "0.43621457", "0.43587366", "0.43477738", "0.43442103" ]
0.60869694
0
Placeholder function used just to warm up the parallel mapper prior to benchmarking.
def identity(x: int) -> int: return x
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def warmup():\n return ''", "def warmup():\n return ''", "def warmup():\n return ''", "def parallelizer(func, arg=False):\n if arg:\n func(arg)\n else:\n func()", "def pfmap(func, workers=8):\n return fmap(func)", "def mapper(fun: Callable[[str], Pin], /) -> None:", "def warmup():\n\treturn ''", "def warmup():\n\treturn ''", "def test_map_args_include_time():\n pass", "def _map_fn(self):\n raise NotImplementedError", "def par(func):\n opt[\"par\"].add(key(func))\n return func", "def mapper() -> Callable[[str], Pin]:", "def map(self, function):\n pass", "def parallel_map(work_func, *sequences, **kwargs):\n # kwargs\n cores = kwargs.get('cores', None)\n ordered = kwargs.get('ordered', False)\n buffer_factor = kwargs.get('buffer_factor', 2.0)\n use_multiprocessing = kwargs.get('use_multiprocessing', False)\n heart_beat = kwargs.get('heart_beat', 0.001)\n fill_activate = 'fill_void' in kwargs\n fill_value = kwargs.get('fill_void', None)\n name = kwargs.get('name', None)\n\n if name:\n log = logging.getLogger(__name__ + '[%s]' % name)\n else:\n log = logging.getLogger(__name__)\n\n if heart_beat <= 0:\n raise ValueError(\"heart_beat must be >0.\")\n\n if cores is None or cores <= 0:\n cores = multiprocessing.cpu_count()\n log.debug(\"Using all cores (%d)\", cores)\n else:\n log.debug(\"Only using %d cores\", cores)\n\n # Choose parallel types\n if use_multiprocessing:\n queue_t = multiprocessing.Queue\n worker_t = _WorkerProcess\n else:\n queue_t = queue.Queue\n worker_t = _WorkerThread\n\n queue_work = queue_t(int(cores * buffer_factor))\n queue_results = queue_t(int(cores * buffer_factor))\n\n log.log(1, \"Constructing worker processes\")\n workers = [worker_t(name, i, work_func, queue_work, queue_results,\n heart_beat)\n for i in range(cores)]\n\n log.log(1, \"Constructing feeder thread\")\n feeder_thread = _FeedQueueThread(name, sequences, queue_work,\n len(workers), heart_beat, fill_activate,\n fill_value)\n\n return ParallelResultsIterator(name, ordered, use_multiprocessing,\n heart_beat, queue_work,\n queue_results, feeder_thread, workers)", "def map():", "def _doMap(self, func, iterable):\n name = \"Mapper\"\n sys.stderr.write(\"Master[%s phase]: starting\\n\" % name)\n pipes = [mp.Pipe() for _ in range(self.num_workers)]\n proc = [mp.Process(target=spawn_mapper(func), name=name, args=(q,)) for q in pipes]\n for p in proc:\n p.daemon = True\n p.start()\n for output_p, input_p in pipes:\n input_p.close() # we don't need to read from the pipes\n qi = 0\n for item in iterable:\n pipes[qi][0].send(item)\n qi = (qi+1) % self.num_workers\n for q,_ in pipes:\n q.send(None) # add termination tokens\n q.close()\n for p in proc:\n p.join()\n sys.stderr.write(\"Master[%s phase]: ended..\\n\" % name)", "def test_parallel_kwargs():\r\n lst = range(10)\r\n for n_jobs in (1, 4):\r\n yield (nose.tools.assert_equal,\r\n [f(x, y=1) for x in lst],\r\n Parallel(n_jobs=n_jobs)(delayed(f)(x, y=1) for x in lst)\r\n )", "def foreach(function):\n return partial(map, function)", "def __init__(self):\n self.map = [None] * 1000", "def task_mapper(task_function, task_iterable, parallel_procs=None):\n\n num_procs = get_num_processors(parallel_procs)\n\n if num_procs == 0:\n LOG.debug('Using serial task processor...')\n return serial_pc(task_function, task_iterable)\n else:\n LOG.debug('Using %d-parallel task processors...', num_procs)\n return parallel_pc(task_function, task_iterable, num_procs)", "def example5(name):\n print(\"main started\") # main thread\n with concurrent.futures.ThreadPoolExecutor(max_workers=3) as e:\n e.map(my_func, ['realpython', 'bar', 'haha'])\n print(\"main ended\")", "def test_map_args_all_none():\n pass", "def map(_, params):\n import numpy as np\n from itertools import product\n from random import shuffle\n\n if 'param_set' in params:\n parameter_sets = params['param_set']\n else:\n alphas = params['alphas']\n Vs = params['Vs']\n gammas = params['gammas']\n parameter_sets = [item for item in product(alphas, gammas, Vs)]\n shuffle(parameter_sets)\n\n ## discretize the parameter configurations and equitably distribute\n ## them for the next map instance to deal with.\n chunk_length = len(parameter_sets)/params['nprocs']\n leftover = len(parameter_sets) % params['nprocs']\n for n in xrange(params['nprocs']):\n if n < leftover:\n left = n*(1+chunk_length)\n to_yield = parameter_sets[left:left+1+chunk_length]\n else:\n left = leftover*(1+chunk_length) + (n-leftover)*chunk_length\n to_yield = parameter_sets[left:left+chunk_length]\n #print n, to_yield, len(to_yield)\n yield (n, to_yield)", "def test_map_args_no_kwargs():\n pass", "def benchmark(func):\n def wrapper(*args, **kwargs):\n t = time.process_time()\n res = func(*args, **kwargs)\n t_sec = round((time.process_time()-t) % 60,1)\n t_min = int((time.process_time()-t)/ 60)\n ls.logger.info(f'Application function {func.__name__} execution time {t_min} [min] {t_sec} [sec]')\n return res\n return wrapper", "def fastMap(mapper, data):\n i = 0\n ans = []\n while i < len(data):\n with Pool(MAX_POOL_SIZE) as pool:\n ans.extend(pool.map(mapper, data[i:i+MAX_POOL_SIZE]))\n i += MAX_POOL_SIZE\n\n return ans", "def __init__(\r\n self,\r\n mapper_grids: MapperGrids,\r\n regularization: Optional[AbstractRegularization],\r\n run_time_dict: Optional[Dict] = None,\r\n ):\r\n super().__init__(\r\n mapper_grids=mapper_grids,\r\n regularization=regularization,\r\n run_time_dict=run_time_dict,\r\n )", "def preprocess_func(cls, func):\n return DaskWrapper.put(func, hash=False, broadcast=True)", "def jit(func):\n return func", "def mock_grid_map(fn, args_list, **kwargs):\n assert callable(fn)\n assert \"mem_free\" in kwargs and kwargs[\"mem_free\"] == \"2G\"\n assert \"queue\" in kwargs and kwargs[\"queue\"] == \"cognition-all.q\"\n assert \"require_cluster\" in kwargs and kwargs[\"require_cluster\"]\n assert \"add_env\" in kwargs\n add_env = kwargs[\"add_env\"]\n assert \"USE_MEM_FREE\" in add_env\n assert \"CREATE_PLOTS\" in add_env\n for agent_1, agent_2 in args_list:\n yield GameResult(AgentResult(agent_1), AgentResult(agent_2))", "def _map_task(\n fn: Callable[[Iterator[Block]], Iterator[Block]], *blocks: Block\n) -> Iterator[Block]:\n output_metadata = []\n stats = BlockExecStats.builder()\n for b_out in fn(iter(blocks)):\n m_out = BlockAccessor.for_block(b_out).get_metadata([], None)\n m_out.exec_stats = stats.build()\n output_metadata.append(m_out)\n yield b_out\n stats = BlockExecStats.builder()\n yield output_metadata", "def _post_proc_para_wrapper(pred_map_mmap_path, tile_info, func, func_kwargs):\n idx, tile_tl, tile_br = tile_info\n wsi_pred_map_ptr = np.load(pred_map_mmap_path, mmap_mode=\"r\")\n tile_pred_map = wsi_pred_map_ptr[tile_tl[0] : tile_br[0], tile_tl[1] : tile_br[1]]\n tile_pred_map = np.array(tile_pred_map) # from mmap to ram\n return func(tile_pred_map, **func_kwargs), tile_info", "def iterate_mproc_map(wrap_func, iterate_vals, nb_workers=CPU_COUNT, desc='', ordered=True):\n iterate_vals = list(iterate_vals)\n nb_workers = 1 if not nb_workers else int(nb_workers)\n nb_workers = CPU_COUNT if nb_workers < 0 else nb_workers\n\n if desc is not None:\n pbar = tqdm.tqdm(total=len(iterate_vals), desc=str('%r @%i-threads' % (desc, nb_workers)))\n else:\n pbar = None\n\n if nb_workers > 1:\n logging.debug('perform parallel in %i threads', nb_workers)\n # Standard mproc.Pool created a demon processes which can be called\n # inside its children, cascade or multiprocessing\n # https://stackoverflow.com/questions/6974695/python-process-pool-non-daemonic\n\n # pool = mproc.Pool(nb_workers)\n # pool = NonDaemonPool(nb_workers)\n pool = ProcessPool(nb_workers)\n # pool = Pool(nb_workers)\n mapping = pool.imap if ordered else pool.uimap\n else:\n logging.debug('perform sequential')\n pool = None\n mapping = map\n\n for out in mapping(wrap_func, iterate_vals):\n pbar.update() if pbar else None\n yield out\n\n if pool:\n pool.close()\n pool.join()\n pool.clear()\n\n pbar.close() if pbar else None", "def benchmark(func):\n start = time.time()\n\n @functools.wraps(func)\n def wrapper(*args, **kwargs):\n rc = func(*args, **kwargs)\n print('Running time: {}'.format(time.time() - start))\n return rc\n return wrapper", "def __init__(self, set_args, load_sensor_names,\n sensor_names, \n cnt_preprocessors, marker_def):\n self.__dict__.update(locals())\n del self.self\n if self.load_sensor_names == 'all':\n self.load_sensor_names = None", "def pre_processor(self):", "def test_mapper_overoptimization(self):\n self.qp.load_qasm_file(self._get_resource_path('qasm/overoptimization.qasm'), name='test')\n coupling_map = {0: [2], 1: [2], 2: [3], 3: []}\n result1 = self.qp.execute([\"test\"], backend=\"local_qasm_simulator\", coupling_map=coupling_map)\n count1 = result1.get_counts(\"test\")\n result2 = self.qp.execute([\"test\"], backend=\"local_qasm_simulator\", coupling_map=None)\n count2 = result2.get_counts(\"test\")\n self.assertEqual(count1.keys(), count2.keys(), )", "def test_workon_with_parallel_backend(self):\n\n def foo(x):\n return [dict(name=\"result\", type=\"objective\", value=x * 2)]\n\n import joblib\n\n with joblib.parallel_backend(\"loky\"):\n experiment = workon(\n foo, space={\"x\": \"uniform(0, 10)\"}, max_trials=5, name=\"voici\"\n )\n\n assert experiment.name == \"voici\"\n assert len(experiment.fetch_trials()) == 5\n\n with joblib.parallel_backend(\"loky\", n_jobs=-1):\n experiment = workon(\n foo, space={\"x\": \"uniform(0, 10)\"}, max_trials=3, name=\"voici\"\n )\n\n assert experiment.name == \"voici\"\n assert len(experiment.fetch_trials()) == 3", "def _optimise(self):\n pass", "def preprocess(generator):\n\n def preprocess_decorator(method):\n\n @wrapper(method)\n def preprocess_wrapper(self, *args, **kwargs):\n self.increment_pc()\n pc = tuple(self.program_counter)\n try:\n return self._pool.pop(pc), True\n except KeyError:\n key = (generator, args)\n pcs = self._needed_data.setdefault(key, [])\n pcs.append(pc)\n self.fork_pc()\n try:\n return method(self, *args, **kwargs), False\n finally:\n self.unfork_pc()\n\n return preprocess_wrapper\n return preprocess_decorator", "def precompute(self, features, mode, params):\n return None", "def __init__(self, mapper, workers, date, logger):\n \n self.mapper = mapper\n self.workers = workers\n self.date = date\n self.log = logger", "def do_benchmark(items, function_to_test, benchmark=None):\n def do():\n for _ in function_to_test(items):\n pass\n if benchmark is None:\n do()\n else:\n benchmark(do)", "def warmUpExercise():\n\n return np.identity(5)", "def translation_rule_cpu(func):\n # functions to call before running the translation rule\n setup_funcs = (\n functools.partial(ensure_platform_flush, \"cpu\"),\n ensure_omnistaging,\n )\n\n @functools.wraps(func)\n def wrapped(*args, **kwargs):\n for f in setup_funcs:\n f()\n return func(*args, **kwargs)\n\n return wrapped", "def dummy_wrapper(func):\n return func", "def post_instrument_class(self, mapper):\n pass", "def test_pmap_jit_disabled(self, mock_fake_pmap_and_jit):\n\n with self.subTest(\"PmapJitNotDisabled\"):\n with flagsaver.flagsaver(jaxline_disable_pmap_jit=False):\n utils.disable_pmap_jit(lambda: None)()\n mock_fake_pmap_and_jit.assert_not_called()\n\n with self.subTest(\"PmapJitDisabled\"):\n with flagsaver.flagsaver(jaxline_disable_pmap_jit=True):\n utils.disable_pmap_jit(lambda: None)()\n mock_fake_pmap_and_jit.assert_called_once()", "def precache_context_to_samples(source_path, target_path, num_parallel_precache=1):\n pass", "def test_initialized() -> None:\n MapieRegressor()", "def reduce_run():", "def with_cpu(ops, model):\n ...", "def profiled(func):\n @functools.wraps(func)\n def inner(*args, **kwargs):\n inner.ncalls += 1\n return func(*args, **kwargs)\n\n inner.ncalls = 0\n return inner", "def __init__(self, map):\n self.map = map\n self.fast_validate = (6, map)", "def define_parallel_training_functions(self):\n if self.post_lst is not None:\n self.p_train_step = jax.pmap(\n functools.partial(\n train_step_post,\n train_step_fn=self.train_step_fn,\n learning_rate_fn=self.lr_schedule,\n criterion=self.criterion,\n metrics_fn=self.metrics_fn,\n post_lst=self.post_lst,\n ),\n axis_name=\"batch\",\n )\n else:\n self.p_train_step = jax.pmap(\n functools.partial(\n self.train_step_fn,\n learning_rate_fn=self.lr_schedule,\n criterion=self.criterion,\n metrics_fn=self.metrics_fn,\n ),\n axis_name=\"batch\",\n )\n self.p_eval_step = jax.pmap(\n functools.partial(\n self.eval_step_fn, criterion=self.criterion, metrics_fn=self.metrics_fn\n ),\n axis_name=\"batch\",\n )", "def benchmark(func):\n import time\n @wraps(func)\n def wrapper(*args, **kwargs):\n t = time.clock()\n res = func(*args, **kwargs)\n print(func.__name__, time.clock()-t)\n return res\n return wrapper", "def applyMapping(self):\n pass", "def map_and_batch_with_legacy_function(map_func,\n batch_size,\n num_parallel_batches=None,\n drop_remainder=False,\n num_parallel_calls=None):\n\n if num_parallel_batches is None and num_parallel_calls is None:\n num_parallel_calls = batch_size\n elif num_parallel_batches is not None and num_parallel_calls is None:\n num_parallel_calls = batch_size * num_parallel_batches\n elif num_parallel_batches is not None and num_parallel_calls is not None:\n raise ValueError(\n \"`map_and_batch_with_legacy_function` allows only one of \"\n \"`num_parallel_batches` and \"\n \"`num_parallel_calls` to be set, but \"\n f\"`num_parallel_batches` was set to {num_parallel_batches} \"\n f\"and `num_parallel_calls` as set to {num_parallel_calls}.\")\n\n def _apply_fn(dataset):\n return _MapAndBatchDataset(dataset, map_func, batch_size,\n num_parallel_calls, drop_remainder,\n use_legacy_function=True)\n\n return _apply_fn", "def map(func, iterable, chunksize=None, ncpu=0, limit=True, progress=False):\n if (ncpu == 0):\n if (not progress):\n return _map(func, iterable)\n else:\n r = []\n if isinstance(progress, str):\n txt = progress\n else:\n txt = func.__name__\n for k in _PBar(desc=txt).iterover(iterable):\n r.append(func(k))\n return r\n elif progress:\n _n = _mp.cpu_count()\n if (ncpu <= 0):\n # use all available cpus\n p = _mp.Pool(_n)\n elif (ncpu > _n) & (limit is True):\n p = _mp.Pool(_n)\n else:\n p = _mp.Pool(ncpu)\n\n if not hasattr(iterable, '__len__'):\n iterable = list(iterable)\n ntasks = len(iterable)\n\n if isinstance(progress, str):\n txt = progress\n else:\n txt = func.__name__\n\n with _PBar(ntasks, desc=txt) as pb:\n # get the pool working asynchronously\n if islambda(func):\n amap = p.map_async(PicklableLambda(func), iterable, chunksize)\n else:\n amap = p.map_async(func, iterable, chunksize)\n left = 1\n while left > 0:\n _time.sleep(0.1)\n left = amap._number_left\n pb.update(ntasks - left)\n return amap.get()\n else:\n return map_async(func, iterable, chunksize, ncpu=ncpu, limit=limit).get()", "def multiprocess_callback(self, func):\n\n self.mul_func_map[func.__name__] = func", "def init():\n for task_kind, tasks in my_async.tasks.items():\n for task in tasks:\n my_async.async_func_map[task.origin_func] = task.origin_func", "def __init__(self, *args, **kwargs):\n # count the cores available on the local machine\n self.tasks = mp.cpu_count()\n super(ParallelPreprocessor, self).__init__(*args, **kwargs)", "def test_lazy_evaluation(self):\n pass", "def optimize_parameters(self):\n pass", "def optimize_parameters(self):\n pass", "def optimize_parameters(self):\n pass", "def test_inter_process_cache():\r\n\r\n x, y = theano.tensor.dvectors('xy')\r\n f = theano.function([x, y], [MyOp()(x), MyOp()(y)])\r\n f(numpy.arange(60), numpy.arange(60))\r\n if theano.config.mode == 'FAST_COMPILE' or theano.config.cxx == \"\":\r\n assert MyOp.nb_called == 0\r\n else:\r\n assert MyOp.nb_called == 1\r\n\r\n # What if we compile a new function with new variables?\r\n x, y = theano.tensor.dvectors('xy')\r\n f = theano.function([x, y], [MyOp()(x), MyOp()(y)])\r\n f(numpy.arange(60), numpy.arange(60))\r\n if theano.config.mode == 'FAST_COMPILE' or theano.config.cxx == \"\":\r\n assert MyOp.nb_called == 0\r\n else:\r\n assert MyOp.nb_called == 1", "def ParallelToserial(self):\n pass", "def _dummy(*args, **kwargs):\n pass", "def _get_executor_init(self, workers):\n def pool_fn(seqs):\n pool = get_pool_class(True)(\n workers, initializer=init_pool_generator,\n initargs=(seqs, self.random_seed, get_worker_id_queue()))\n _DATA_POOLS.add(pool)\n return pool\n return pool_fn", "def test_parallel_pickling():\r\n def g(x):\r\n return x ** 2\r\n nose.tools.assert_raises(PickleError,\r\n Parallel(),\r\n (delayed(g)(x) for x in range(10))\r\n )", "def get_task_mapper(parallel_procs=ALL_PROCESSORS):\n\n # Get the number of processes to use\n num_procs = get_num_processors(parallel_procs)\n\n # Set up the task mapper\n if num_procs:\n LOG.info('Attempting parallel processing with %d processes.', num_procs)\n if check_multiprocessing:\n import multiprocessing\n pool = multiprocessing.Pool(processes=num_procs)\n mapper = pool.map\n else:\n LOG.warning('Failed to initialize parallel processing.')\n LOG.warning('Falling back to serial mode.')\n mapper = map\n else:\n LOG.info('Using serial processing.')\n mapper = map\n\n return mapper", "def __init__(self, func, args_list, kwargs_dict, setup_line_list, check_too_fast, run_sec, name, perf_counter_reference_time):\n self.func = func\n self.orig_func_name = getattr(self.func, \"__name__\", self.func)\n self.args_list = args_list.copy()\n self.kwargs_dict = kwargs_dict.copy()\n self.setup_line_list = setup_line_list\n self.check_too_fast = check_too_fast\n self.run_sec = run_sec\n self.name = name\n self.perf_counter_reference_time = perf_counter_reference_time\n if callable(self.func):\n _ns = {}\n self.src = self.__get_final_inner_function()\n if self.run_sec is not None and self.run_sec != -1 and self.run_sec < 0.1:\n raise Err('_TimeIT.__init__()', 'run_sec: <{:.1f}> must be at least <0.1 second> or <-1 to run it once> or <None to print the `func code block`>'.format(self.run_sec))\n\n _code = compile(self.src, 'benchmarkit-src', \"exec\")\n exec(_code, globals(), _ns)\n self.inner = _ns[\"inner\"]\n else:\n raise ValueError('<func>: is not a `callable` type: <{}>'.format(self.func))", "def _do_mapping(self):\n pass", "def map_values_c(fun):\n return partial(map_values, fun)", "def custom_processing(self, funct: callable, data_tmp: np.ndarray, **kwargs) -> np.ndarray:\n tic = time.time()\n data_tmp = funct(data_tmp, **kwargs)\n self.process_time.append(time.time() - tic)\n return data_tmp", "def _ConstructDefaultProcessor(self):\n return generate_legacy_perf_dashboard_json.LegacyResultsProcessor()", "def __call__(self):\n if self.numbatches is None:\n pool = self.pooler()\n if self.batchsize is None:\n self.batchsize = self.pooler.nInPool()\n self.numbatches = self.pooler.nInPool()//self.batchsize\n for i in xrange(self.numbatches):\n pool = self.pooler()\n self._reset_batch()\n if self.samplemethod == 'balance' and len(self.keysamplers)>0:\n batchinds,keyids = self._samplebalanced(pool)\n elif self.samplemethod == 'uniform':\n batchinds,keyids = self._sampleuniform(pool)\n else:\n batchinds,keyids = self._samplesequential(i)\n batch = self._extractInds(pool,batchinds,keyids)\n for k in batch:\n batch[k][np.isnan(batch[k])] = self.nanreplacement\n yield batch", "def dummy_fn(self, *args, **kwargs):", "def profile_map_reduce(func, args, kwargs, func_result):\n (collection, map_fn, reduce_fn) = args[:3]\n\n report_kvs = _profile_query(collection)\n report_kvs['Map_Function'] = map_fn\n report_kvs['Reduce_Function'] = reduce_fn\n\n return report_kvs", "def _sequential_minimal_optimization(self):\n qpmin, qpmin_tmp = 99999999, 99999999\n while qpmin != qpmin_tmp:\n qpmin = qpmin_tmp\n qpmin_tmp = 123", "def preprocessing(pairs, nb=4):\n generated = Parallel(n_jobs=nb, verbose=5)(delayed(_load_brick)(*p) for p in pairs)\n return generated", "def map(self, func, args_list):\n for args in args_list:\n self.add_task(func, args)", "def map(self, func, args_list):\n for args in args_list:\n self.add_task(func, args)", "def map(self, func, args_list):\n for args in args_list:\n self.add_task(func, args)", "def map(self, func, args_list):\n for args in args_list:\n self.add_task(func, args)", "def map(self, func, args_list):\n for args in args_list:\n self.add_task(func, args)", "def dummy_func(*args, **kwargs):\r\n pass", "def preprocess_func(cls, func):\n pass", "def _shared_params(self):\n return BenchmarkBase._shared_params(self)._replace(\n model='trivial',\n num_gpus=8,\n distortions=False,\n variable_update='independent',\n data_dir=self.fake_data_dir)", "def _shared_params(self):\n return BenchmarkBase._shared_params(self)._replace(\n num_gpus=1,\n model='resnet50',\n num_warmup_batches=5,\n num_batches=50,\n distortions=False,\n forward_only=True,\n device='cpu',\n data_format='NHWC',\n num_intra_threads=0)", "def apply_only(self, function, worker, *args, **kwargs):\n pass", "def _precompute(self, A):\n return ()", "def _shared_params(self):\n return BenchmarkBase._shared_params(self)._replace(\n model='alexnet', batch_size=512, distortions=False)", "def benchmarkFunc(iter, args=()):\n def decorator(func):\n benchmarkFuncs.append((func, args, iter))\n return func\n return decorator", "def test_parallel_database_mapper_blat(self):\r\n\r\n params = {'refseqs_fp': self.refseqs1_fp,\r\n 'min_percent_id': 0.97,\r\n 'evalue': 1e-10,\r\n 'max_accepts': 1,\r\n 'max_rejects': 32,\r\n 'queryalnfract': 0.35,\r\n 'targetalnfract': 0.0,\r\n 'observation_metadata_fp': None\r\n }\r\n\r\n app = ParallelDatabaseMapperBlat()\r\n r = app(self.inseqs1_fp,\r\n self.test_out,\r\n params,\r\n job_prefix='PTEST',\r\n poll_directly=True,\r\n suppress_submit_jobs=False)\r\n observation_map_fp = glob(\r\n join(self.test_out, 'observation_map.txt'))[0]\r\n omap = parse_otu_map(open(observation_map_fp, 'U'))\r\n self.assertEqual(len(omap[0]), 3)\r\n self.assertItemsEqual(\r\n omap[1],\r\n ['eco:b0015',\r\n 'eco:b0122',\r\n 'eco:b0015:duplicate'])\r\n self.assertItemsEqual(omap[2], ['eco:b0015-pr', 'eco:b0122-pr'])", "def dummy_fn(self):\n\t\tpass", "def test_result_reduce_ddp():\n tutils.reset_seed()\n tutils.set_random_master_port()\n\n worldsize = 2\n mp.spawn(_ddp_test_fn, args=(worldsize,), nprocs=worldsize)", "def __init__(self, *args, wick_parallel=0, **kwargs):\n super().__init__(*args, **kwargs)\n self._wick_parallel = wick_parallel", "def map(self, mapper):\n def _map(iterator):\n return mapper(next(iterator))\n return self.__class__(self, _map)", "def measure_mp_speedup():\n modes = [\n # name, function\n ('dSMC', ana.d_smc),\n ('dAMC', ana.d_amc),\n ('EDF-VD', ana.d_edf_vd),\n ('pSMC', ana.p_smc),\n ('pAMC-BB', ana.p_amc_bb),\n ('pAMC-BB+', ft.partial(ana.p_amc_bb, ignore_hi_mode=True))\n ]\n times_seq = {}\n task_sets_list = pickle.load(open(task_sets_path + 'task_sets_fairgen', 'rb'))\n start_total_seq = time()\n for name, func in modes:\n start_mode_seq = time()\n rates = []\n for task_sets in task_sets_list:\n results = []\n for task_set in task_sets:\n results.append(func(task_set))\n rates.append(100 * np.average(results))\n stop_mode_seq = time()\n times_seq[name] = stop_mode_seq - start_mode_seq\n stop_total_seq = time()\n times_seq['Overall'] = stop_total_seq - start_total_seq\n\n times_par = {}\n start_total_par = time()\n pool = mp.Pool()\n for name, func in modes:\n start_mode_par = time()\n rates = []\n for task_sets in task_sets_list:\n rates.append(100 * np.average(pool.map(func, task_sets)))\n stop_mode_par = time()\n times_par[name] = stop_mode_par - start_mode_par\n stop_total_par = time()\n times_par['Overall'] = stop_total_par - start_total_par\n\n speedups = {}\n for name, _ in modes:\n speedups[name] = times_seq[name] / times_par[name]\n speedups['Overall'] = times_seq['Overall'] / times_par['Overall']\n\n print(\"PERFORMANCE MEASUREMENTS\")\n print(\"Number of cores: %d\" % mp.cpu_count())\n print(\"Scheme: Sequential time / Parallel time / Speedup\")\n for name, _ in modes:\n print(\"%s: %.3fs / %.3fs / %.3f\" % (name, times_seq[name], times_par[name], speedups[name]))\n print(\"Overall: %.3fs / %.3fs / %.3f\" % (times_seq['Overall'], times_par['Overall'], speedups['Overall']))" ]
[ "0.58914787", "0.58914787", "0.58914787", "0.58523786", "0.58267385", "0.5818383", "0.5795961", "0.5795961", "0.5604408", "0.5542775", "0.5542228", "0.54952943", "0.5452233", "0.5393733", "0.5384684", "0.5382945", "0.5378095", "0.5368914", "0.53356403", "0.52627987", "0.52194774", "0.521712", "0.52095616", "0.5207109", "0.5178269", "0.51757306", "0.51605755", "0.51558554", "0.5147155", "0.5138068", "0.5128684", "0.5128141", "0.51181626", "0.5106964", "0.5095863", "0.5085945", "0.5073469", "0.507111", "0.5061525", "0.5053407", "0.505303", "0.50372636", "0.5023689", "0.50227517", "0.502119", "0.5020518", "0.5013476", "0.50040865", "0.5003651", "0.49963206", "0.49939936", "0.49824837", "0.49822894", "0.4980795", "0.49799633", "0.49628013", "0.49595362", "0.49539798", "0.49356404", "0.4928633", "0.49283075", "0.49259174", "0.49257535", "0.49189115", "0.49189115", "0.49189115", "0.49178863", "0.49176154", "0.49038345", "0.490035", "0.4895003", "0.4887661", "0.48814005", "0.48764324", "0.48706633", "0.48616454", "0.48589334", "0.48543397", "0.4847153", "0.48467773", "0.48457953", "0.48358005", "0.4831652", "0.4831652", "0.4831652", "0.4831652", "0.4831652", "0.4831036", "0.4829977", "0.48269972", "0.48256132", "0.48179224", "0.4810127", "0.48063248", "0.4805966", "0.4804674", "0.48024392", "0.48020083", "0.47908297", "0.4790087", "0.4787841" ]
0.0
-1
Average of a list of numbers
def average(l: List[float]) -> float: n = len(l) if n == 0: return 0 return sum(l) / n
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def number_list_average(numbers):\n return sum(numbers)/len(numbers)", "def avg(list):\n return sum(list) / len(list)", "def avg(l):\n return (sum(l)/float(len(l)))", "def mean(num_list):\n i = 0\n num_sum = 0.0\n for item in num_list:\n num_sum += item\n i += 1\n return num_sum/i", "def average(l):\n return float(sum(l)/len(l))", "def calc_average(numbers):\n return sum(numbers) // len(numbers) # integer division //", "def average(lst):\n return sum(lst)/len(lst)", "def average(num_list):\n nums_average = None\n nums_sum = 0\n if len(num_list) != 0:\n for num in num_list:\n nums_sum = nums_sum + num\n nums_average = nums_sum / len(num_list) # average formula\n return nums_average", "def func(lst):\n tot = 0\n for i in lst:\n tot = tot + i\n avg = tot / len(lst)\n return avg", "def CalculateListAverage(values):\n if not values:\n return 0\n return sum(values) / float(len(values))", "def mean(numbers):\n return float(sum(numbers)) / float(len(numbers))", "def average(nums):\n\n return sum(nums) / float(len(nums))", "def avg(lst: list):\n return sum(lst) / len(lst)", "def mean(list_of_numbers):\n return 1. * sum(list_of_numbers) / len(list_of_numbers)", "def average(numbers):\n numbers_sum = 0\n numbers_count = 0\n for number in numbers:\n numbers_sum += number\n numbers_count += 1\n\n return numbers_sum / numbers_count", "def average(nums):\n avg = sum(nums) / len(nums)\n return avg", "def average(numlist):\n numlist_sum = 0 # initialize sum to zero\n # Iterate over NUMLIST and add each element to the sum\n for num in numlist:\n numlist_sum += num\n\n # Return NUMLIST_SUM divided by LEN(NUMLIST) to calculate average\n return numlist_sum / len(numlist)", "def get_mean(numlist):\n return np.mean(numlist)", "def average(self, num_list):\n try:\n total = 0\n accumulator = 0\n\n for number in num_list:\n try:\n total += number\n accumulator += 1\n except Exception as e:\n print (\"Error: \", e)\n\n average = total / accumulator\n except Exception as e:\n print(\"Error: \", e)\n\n return average", "def _avg(cls, l):\n\n return sum(l) / float(len(l))", "def avg(iterable: Iterable[Num]) -> float:\n\t\n\tsum_nums = 0\n\tnums = 0\n\t\n\tfor num in iterable:\n\t\tsum_nums += num\n\t\tnums += 1\n\t\n\treturn sum_nums / nums", "def avg(values):\n return sum(values) / float(len(values))", "def mean(xs):\n ave = 0\n for xs_split in xs:\n num = float(xs_split)\n print(xs_split)\n ave = ave+num\n average = ave/len(xs)\n return average", "def average(values):\n return sum(values) / len(values)", "def average(values):\n return sum(values) / len(values)", "def mean(numbers):\n return int(sum(numbers)) / max(len(numbers), 1)", "def avg():\n\n # call sum method to add up the values in the collection & div by the num of items\n # call len method to compute the # of vals in collection which is divided by sum total \n mean = sum(inlist) / len(inlist)\n return mean \n\n # alternate method would be calling the reduce method with lamda \n # return reduce(lambda a, b: a + b, inlist) / len(inlist)", "def average(values):\n\treturn sum(values)/len(values)", "def list_mean(lst):\n return list_sum(lst) / len(lst)", "def mean(array_list):\n count = 0.0\n for value in array_list:\n count += value\n return count/len(array_list)", "def price_average(lst):\n\n return sum(lst) / len(lst)", "def average(num):\n return sum(num) / len(num)", "def average(some_list):\n # This function will take a list and return average of value of element in list.\n result = 0 # Define result to contain sum of element in list.\n for i in some_list:\n result += i \n return result/len(some_list)", "def _mean(listvalue):\n\treturn sum(listvalue)/len(listvalue)", "def getMean(list):\n return sum(list) / len(list)", "def average(data):\r\n sum =0\r\n for i in data:\r\n sum+=i\r\n return sum/len(data)", "def mean(mean_numbers):\n return sum(mean_numbers) / float(len(mean_numbers))", "def calc_mean(data: list) -> float:\n if len(data) == 0:\n return 0.0\n acc = 0.0\n for n in data:\n acc += n\n return acc / len(data)", "def mean(items):\n\n return float(sum(items)) / len(items)", "def lmean (inlist):\r\n sum = 0\r\n for item in inlist:\r\n sum = sum + item\r\n return sum/float(len(inlist))", "def average(ls):\n\n if len(ls) == 0:\n return 0.0\n\n sm = sum(ls)\n return sm / len(ls)", "def _mean(items):\n return sum(items) / len(items)", "def mean_list(data):\n return sum(data) / len(data)", "def avg(arr):\n return sum(arr) / float(len(arr))", "def mean(my_list):\n the_mean = sum(my_list) / len(my_list)\n return the_mean", "def compute_average(vec_list):\r\n return np.sum(vec_list, axis = 0)/len(vec_list)", "def mean(array: list) -> float:\n\n arr_sum = 0\n\n for element in array:\n arr_sum = arr_sum + element\n\n return arr_sum/len(array)", "def generate_mean(list):\n sum = 0\n for i in list:\n sum += i\n return sum / len(list)", "def mean(list_of_values):\r\n # Explicit float(...) to allow for Python 2 division.\r\n return sum(list_of_values) / float(len(list_of_values))", "def mean(l):\n return sum(f) / max(1, len(f))", "def calculate_average(array):\n result = 0\n for item in array:\n result += float(item)\n final_result = result/len(array)\n return final_result", "def calcAverage(dat):\n return sum(dat)/len(dat)", "def mean_list(arr):\n if not isinstance(arr, list):\n return 'Use only list with numbers for this function'\n\n if not len(arr):\n return None\n\n m = reduce(lambda x,y: x + y, arr, 0) / len(arr)\n return m", "def mean(values):\r\n return sum(values) / float(len(values))", "def find_mean(values):\n return sum(values) / len(values)", "def mean(x):\n return sum(x)/len(x)", "def mean(items):\n return float(sum(items)) / len(items) if len(items) > 0 else 0", "def mean(v):\n return sum(v)/len(v)", "def mean(l):\n if l:\n return sum(l)/len(l)\n else:\n raise ValueError(\"list empty\")", "def mean(x):\n return sum(x) / len(x)", "def get_mean(iterable):\n return sum(iterable) / len(iterable)", "def mean(L):\n\treturn sum(L) / len(L)", "def mean(l):\n return sum(l) / len(l) if len(l) != 0 else 0.0", "def mean(arr) -> float:\n return sum(arr) / len(arr)", "def mean(vals):", "def get_average(data):\n average = sum(data) / len(data)\n\n return average", "def AVERAGE(nums):\n if type(nums) == list or type(nums) == np.ndarray:\n return(np.mean(nums))\n else:\n print('Invalid type: nums needs to be a list or numpy array.')", "def mean(numbers):\n\n total = 0 # we will add to it\n for i in numbers:\n\n # The += is shorthand notation for total = total + i\n total += i\n\n \"\"\"\n Get the number of elements in the sum, using the built in len()\n function. Check out all of pythons built in functions at:\n http://docs.python.org/2/library/functions.html\n \"\"\"\n N = len(numbers)\n \n # Do the mean\n mean = total / N\n\n return mean", "def mean(List):\n if len(List) != 0:\n return sum(List)/len(List)\n else:\n return 0.0", "def mean(data):\n n = len(data)\n return sum(data)/float(n)", "def get_mean(lst):\n if len(lst) == 0:\n return None\n else:\n return float(sum(lst)) / len(lst)", "def get_average(array):\n total = sum(array)\n count = len(array)\n average = total / count\n return average", "def averageOfList(num):\n sumOfNumbers = 0\n numberOfNumbers = 0\n for t in num: #num denotes the input - a list containing numbers and empty elements\n for x in range(1, 2): #additional for loop that is only executed if the respective input value is not empty\n if t == '':\n break\n else:\n sumOfNumbers = sumOfNumbers + t #sum up all the numbers in list\n numberOfNumbers = numberOfNumbers + 1 #count the numbers\n \n avg = float(sumOfNumbers / numberOfNumbers) #average value is calculated\n return avg", "def centered_average(some_list):\n # Tihs funstion will return average value of list but ignore the largest and smallest number.\n sorted(some_list)\n some_list.remove(some_list[0])\n some_list.remove(some_list[-1])\n result = 0\n for i in some_list:\n result += i \n return result/len(some_list)", "def find_mean(values):\n mean = sum(values) / len(values)\n return mean", "def main():\n print(average([2, 4, 6, 8, 20, 50, 70]))", "def mean_value( values ):\n return sum( values ) / len( values )", "def dishlist_avg(n:list)->float:\r\n all_prices = dishlist_prices(n)\r\n return sum(all_prices)/len(all_prices)", "def mean(values):\n total = sum(values)\n len_values = len(values)\n return total/len_values", "def geo_mean(num_list):\n np_array = np.array(num_list)\n return np_array.prod() ** (1.0 / len(np_array))", "def fmean(items):\n if len(items) == 0:\n return 0.\n\n return fsum(items) / float(len(items))", "def _average_list(self, row_list):\n\n if not row_list:\n return row_list\n else:\n N = len(row_list)\n d = len(row_list[0])\n avg = [0 for _ in range(d)]\n for i in range(d):\n avg[i] = (sum([row_list[j][i] for j in range(1, N)],\n start=row_list[0][i]) * 1.0) / N\n return avg", "def find_arithmetic_mean( *args):\n sum = 0\n for value in args:\n sum += value\n return sum/len(args)", "def dishlist_avg_cal(n:list)->float:\r\n all_cal = dishlist_cal(n)\r\n return sum(all_cal)/len(all_cal)", "def average(data):\n return np.average(data)", "def average(data, number=None):\n if number is None:\n return numpy.mean(data)\n return numpy.sum(data) / number", "def average(sequence):\n try:\n # first assume that numpy is installed for the fastest approach\n return sequence.mean()\n\n except AttributeError:\n # no numpy available, fall back to support regular list\n return sum(sequence) / len(sequence)", "def mean_stat(list):\n answer = 0 #Final value\n count = 0 #number of values\n\n ##Mean calculation\n for item in list:\n answer += item\n count +=1\n\n if(answer == 0):\n answer = \"List is Empty\"\n return answer\n else:\n answer = answer/count\n return answer", "def absoluteAverageOfList(num):\n sumOfNumbers = 0\n numberOfNumbers = 0\n for t in num:\n for x in range(1, 2): #additional for loop that is only executed if the respective input value is not empty\n if t == '':\n break\n else:\n sumOfNumbers = sumOfNumbers + abs(t) #use of absolute values instead of the 'real' values\n numberOfNumbers = numberOfNumbers + 1\n \n avg = float(sumOfNumbers / numberOfNumbers) \n return avg", "def calc_average():\r\n total = 0\r\n count = 0\r\n for i in records:\r\n total+=int(i[i.find(',')+1:])\r\n count+=1\r\n average = total/count\r\n return average", "def average_grade(lst):\r\n res = []\r\n for stdnt in lst:\r\n name, avg = stdnt[0], mean(conv_to_num(stdnt[1:]))\r\n res.append([name, avg])\r\n\r\n\r\n return(res)", "def mean(data):\n n = len(data)\n if n < 1:\n return 0\n return sum(data)/float(n)", "def mean(list_of_values):\n # so don't have to worry about getting the divisor.\n # Explicit float(...) to allow for Python 2 division.\n try:\n mean = sum(list_of_values) / float(len(list_of_values))\n return mean\n except:\n return False", "def mean(series):\n return fsum(series) / len(series)", "def avg(vector):\n if len(vector) == 0:\n return 0\n return sum(vector) / len(vector)", "def running_avg (mylist, N):\n import numpy as np\n \n cumsum = np.cumsum(np.insert(mylist, 0, 0))\n return (cumsum[N:] - cumsum[:-N]) / float(N)", "def get_avg_over_samples(sampleList, first=0, last=-1):\n assert isinstance(sampleList, list)\n assert len(sampleList) > 0\n assert isinstance(sampleList[0], numbers.Number)\n\n if (last == -1):\n last = len(sampleList)\n\n return sum(sampleList[first:last]) / (last - 1 - first)", "def running_mean(sequence: list):\n if not sequence:\n return []\n\n mean = []\n \"\"\"\n [1] = 1 / 1\n [1,2] = 3 / 2 \n [1,2,3] = 6 / 3\n \"\"\"\n for idx, num in enumerate(sequence):\n\n sum_total = sum(sequence[:(idx + 1)])\n result = sum_total / (idx + 1)\n\n mean.append(round(result, 2))\n\n return mean", "def computeMeans(list_of_lists):\n # Find length of longest list\n longest = 0\n for lst in list_of_lists:\n if len(lst) > longest:\n longest = len(lst)\n # Get totals\n tots = [0]*(longest)\n for lst in list_of_lists:\n for i in range(longest):\n if i < len(lst):\n tots[i] += lst[i]\n else:\n tots[i] += lst[-1]\n # Convert tots to an array to make averaging across each index easier\n tots = pylab.array(tots)\n # Compute means\n means = tots/float(len(list_of_lists))\n return means", "def computeMeans(list_of_lists):\n # Find length of longest list\n longest = 0\n for lst in list_of_lists:\n if len(lst) > longest:\n longest = len(lst)\n # Get totals\n tots = [0]*(longest)\n for lst in list_of_lists:\n for i in range(longest):\n if i < len(lst):\n tots[i] += lst[i]\n else:\n tots[i] += lst[-1]\n # Convert tots to an array to make averaging across each index easier\n tots = pylab.array(tots)\n # Compute means\n means = tots/float(len(list_of_lists))\n return means" ]
[ "0.88870585", "0.848456", "0.8434734", "0.8398049", "0.83776134", "0.8336529", "0.8316348", "0.83155566", "0.82814676", "0.8269709", "0.8267124", "0.8266494", "0.8249835", "0.82441944", "0.8243413", "0.8158804", "0.815238", "0.81390256", "0.80972695", "0.80799866", "0.8048972", "0.8039542", "0.8032305", "0.7997561", "0.7997561", "0.799329", "0.7940992", "0.79215646", "0.7895254", "0.7870623", "0.7860512", "0.78582853", "0.78486145", "0.78352636", "0.7809779", "0.7807029", "0.780701", "0.7803326", "0.780322", "0.77936506", "0.77872723", "0.7781405", "0.7780395", "0.77789223", "0.777834", "0.7775166", "0.77111655", "0.7706554", "0.76921517", "0.76653045", "0.7618387", "0.7605139", "0.75820863", "0.7574383", "0.7549764", "0.7502697", "0.74940944", "0.748964", "0.748883", "0.74645054", "0.7433976", "0.74252784", "0.7407558", "0.73963517", "0.73836327", "0.7377767", "0.7360119", "0.7340309", "0.7326542", "0.7312785", "0.73087156", "0.7307768", "0.7300593", "0.72974837", "0.72973484", "0.7286436", "0.7280514", "0.72737074", "0.7260473", "0.7256841", "0.7236745", "0.7210709", "0.72024626", "0.7199866", "0.718974", "0.718887", "0.71825814", "0.7176331", "0.71628433", "0.71206427", "0.71131444", "0.7104283", "0.70933145", "0.70848066", "0.7053034", "0.7032037", "0.7026784", "0.70041007", "0.69999045", "0.69999045" ]
0.8089239
19
Standard deviation of a list of numbers
def std(l: List[float]) -> float: n = len(l) if n == 0: return 0 avg = average(l) return sqrt(sum([(avg - i) * (avg - i) for i in l]))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def standard_deviation(xs: List[float]) -> float:\n return math.sqrt(variance(xs))", "def standard_deviation(xs: List[float]) -> float:\n return math.sqrt(variance(xs))", "def standard_deviation(list):\n num_items = len(list)\n mean = sum(list) / num_items\n differences = [x - mean for x in list]\n sq_differences = [d ** 2 for d in differences]\n ssd = sum(sq_differences)\n\n\n variance = ssd / num_items\n\n sd = sqrt(variance)\n\n return sd", "def StandardDeviation(numlist):\n\tv = Variance(numlist)\n\t#print v\n\treturn math.sqrt(v)", "def deviation(xs):\n\ta = avg(xs)\n\treturn sqrt(sum([(x - a) ** 2 for x in xs]) / (len(xs) - 1))", "def stddev(self, num_list):\n try:\n mean = self.average(num_list)\n\n minus_mean = []\n\n for number in num_list:\n try:\n minus_mean.append((number - mean) ** 2)\n except Exception as e:\n print(\"Error: \", e)\n\n meany_mean = self.average(minus_mean)\n\n meany_mean = meany_mean ** .5\n\n except Exception as e:\n print(\"Error: \", e)\n\n return meany_mean", "def standard_deviation(lst):\n\tnum_items = len(lst)\n\tif num_items == 0:\n\t\treturn -1\n\tmean = sum(lst) / num_items\n\tdifferences = [x - mean for x in lst]\n\tsq_differences = [d ** 2 for d in differences]\n\tssd = sum(sq_differences)\n\treturn ssd", "def stdev(items):\n return Series.std(Series(items))", "def calc_standard_deviation(data: list) -> float:\n mean = calc_mean(data)\n acc = 0.0\n for n in data:\n acc += (n - mean) ** 2\n acc /= len(data) - 1\n return math.sqrt(acc)", "def std_dev(l):\n return variance(l)**.5", "def sampleStandardDeviation(numlist):\n\tv = sampleVariance(numlist)\n\t#print v\n\treturn math.sqrt(v)", "def std_dev(list_num):\n\n # Calculate the mean of the numbers\n mean = sum(list_num)/len(list_num)\n\n # Initialise a variable to hold the sum of the squared distance to the mean\n sum_sqrd_dist = 0\n \n # Iterate over the numbers\n for num in list_num:\n # Subtract the mean from the number and square the result\n sqrd_dist = (num - mean)**2\n # Add the number to the sum of the squared distances \n sum_sqrd_dist = sum_sqrd_dist + sqrd_dist\n\n # return the square root of the sum of squared distances divided by the length of the list\n return (sum_sqrd_dist/len(list_num))**(1/2)", "def stddev(std_numbers):\n mean = sum(std_numbers) / float(len(std_numbers))\n sum_std = 0.0\n\n for x in std_numbers:\n sum_std += (mean - x) * (mean - x)\n\n variance = sum_std / float(len(std_numbers))\n stddev = math.sqrt(variance)\n\n return stddev", "def stdDev(data):\r\n sum = 0\r\n ave = average(data)\r\n for i in data:\r\n sum += (i-ave)**2\r\n return math.sqrt(sum/len(data))", "def _std(listvalue,ddof=1):\n\tmean=_mean(listvalue)\n\ttemp=[math.pow(i-mean,2) for i in listvalue]\n\tres=math.sqrt(sum(temp)/(len(listvalue)-ddof))\n\treturn res", "def GetStandardDeviation(vals_l, mean):\n\n\n sum_deviations_squared = 0\n\n for x in vals_l:\n sum_deviations_squared += (x - mean)**2\n\n return math.sqrt(float(sum_deviations_squared)/float(len(vals_l)))", "def zstddev(list) -> float:\n\n var = zvariance.zvariance(list)\n std_dev = math.sqrt(var)\n return std_dev", "def stddev(r):\n avg = average(r)\n sdsq = sum([(i - avg) ** 2 for i in r])\n return (sdsq / (len(r) - 1 or 1)) ** 0.5", "def standard_deviation( values, sample=False ):\n return ma.sqrt( variance( values, sample ) )", "def get_std_dev(self, data):\n mean = 0\n data_arr = []\n for i in data:\n data_arr.append(i[1])\n return statistics.stdev(data_arr)", "def std_dev(L, is_sample=0):\n\treturn math.sqrt(variance(L, is_sample))", "def stdev(values):\n mean = avg(values)\n diffs = [(value - mean) ** 2 for value in values]\n return avg(diffs) ** 0.5", "def sd(vals):", "def standardize( num_list):\n\n standard_dev = np.std(num_list)\n mean = np.mean(num_list)\n\n print(standard_dev)\n print(mean)\n\n result = list()\n\n for xx in num_list:\n result.append( (xx-mean)/standard_dev )\n\n return result", "def std(mean, vals):\n return sqrt(sum([(i-mean)**2 for i in vals])/len(vals))", "def sd(x):\n x_mean = mean(x)\n return (\n sum((x_i - x_mean) ** 2 for x_i in x) / (len(x) - 1)\n ) ** 0.5", "def lsamplestdev (inlist):\r\n return math.sqrt(samplevar(inlist))", "def pooled_standard_deviation(input_variances):\r\n # compute and return pooled standard deviation\r\n return sqrt(mean(square([float(i) for i in input_variances])))", "def lstdev (inlist):\r\n return math.sqrt(var(inlist))", "def std_deviation(array):\n if not array or len(array) == 1:\n return 0\n\n average = AGGREGATES['mean_arithmetic'](array)\n variance = map(lambda x: (x-average)**2,array)\n stdev = AGGREGATES['mean_arithmetic'](variance)\n return math.sqrt(stdev)", "def std(values, ave):\n return math.sqrt(float(sum((value-ave)**2 for value in values))/len(values))", "def get_std_dev(data, n = -1):\n mean = get_mean(data, n =n)\n\n deviations = []\n\n for i in range(0,n):\n deviations.append( (data[i] - mean)**2 )\n\n std_dev = sqrt( sum(deviations)/n )\n\n return std_dev", "def std (nums,n_mean=None):\r\n if not n_mean:\r\n n_mean = mean(nums)\r\n n = len(nums)\r\n if n == 1:\r\n return 0.0\r\n variance = 0.0\r\n for i in xrange(n):\r\n tmp = (nums[i]-n_mean)\r\n variance += (tmp*tmp)\r\n \r\n variance /= n-1\r\n return sqrt(variance)", "def stdev(headers, data):\n\tcolumn_matrix=data.get_data(headers)\n\tmean_values=column_matrix.std(0)\n\tstd_values=mean_values.tolist()\n\treturn std_values", "def std(self):\n stds = [(x.m-self.mean)**2 + x.std**2 for x in self.xs]\n return np.sqrt(np.dot(self.a, np.array(stds)))", "def standard_deviation(data):\n\n return np.sqrt(variance(data))", "def standard_deviation(data):\n\n return np.sqrt(variance(data))", "def standard_deviation(scores):\n num_scores = len(scores)\n if num_scores == 0: return 0\n\n mean_score = mean(scores, False)\n sum_x2 = sum(score**2 for score in scores)\n std_dev_score = (sum_x2/num_scores - mean_score ** 2) ** 0.5\n return round(std_dev_score, 2)", "def stdev_from_mean(x):\r\n x = array(x)\r\n return (x - mean(x)) / std(x)", "def test_stddev(self):\n self.assertEqual(stddev(list1, sample=False), np.std(list1))\n self.assertEqual(stddev(list1), np.std(list1, ddof=1))", "def std(self, dset):\n avg = self.mean(dset)\n variance = sum([math.pow(x - avg, 2) for x in dset])\n std = math.sqrt(variance)\n return std", "def calc_std_deviation(average):\r\n sqr_sum = 0\r\n count = len(records)\r\n for i in records:\r\n value = int(i[i.find(',')+1:])\r\n sqr_sum+=(value-average)**2 \r\n std_deviation = math.sqrt(sqr_sum/count)\r\n return std_deviation", "def stat(lst):\n n = float(len(lst))\n mean = sum(lst) / n\n stdev = sqrt((sum(x * x for x in lst) / n) - (mean * mean))\n return mean, stdev", "def deviationAvg(xs):\n\treturn deviation(xs) / sqrt(len(xs))", "def stddev(data, ddof=0):\n n = len(data)\n if n < 2:\n return 0\n ss = _ss(data)\n pvar = ss/(n-ddof)\n return pvar**0.5", "def get_stddev(self):\r\n for i in range(1,len(self.data[0])):\r\n self.stddev.append(np.std(self.data[:,i]))", "def calculate_std_dev(X):\n\tstd_dev = np.sqrt(calculate_variance(X))\n\treturn std_dev", "def get_stdev(cls, data: tuple or list, is_population=False) -> float:\n cls._data_validation(data)\n from math import sqrt\n return sqrt(cls.get_var(data, is_population))", "def _std(self, data):\n var = stats.var(data)\n if var>0.0:\n sd = math.sqrt(var)\n else:\n sd = 0.0\n return sd", "def test_stdev_from_mean(self):\r\n x = [2.1, 4.2, 5.9, 8.4, 9.6]\r\n result = stdev_from_mean(x)\r\n self.assertFloatEqual(\r\n result,\r\n [-1.292463399014413,\r\n -0.60358696806764478,\r\n -0.045925095396451399,\r\n 0.77416589382589174,\r\n 1.1678095686526162])", "def calc_stdev(a, b, c, d, e):\n mean_of_num = (a + b + c + d + e) / 5\n return (((a - mean_of_num)**2 + (b - mean_of_num)**2 + (c - mean_of_num)**2\n + (d - mean_of_num)**2 + (e - mean_of_num)**2) / 5) ** 0.5", "def MeanAndStandardDeviation(data):\n n = len(data)\n if n == 0:\n return 0.0, 0.0\n mean = float(sum(data)) / n\n variance = sum([(element - mean)**2 for element in data]) / n\n return mean, math.sqrt(variance)", "def calculate_std_dev(temps, temp_average):\n\n variance_sum = 0\n for temp in temps:\n variance = (temp - temp_average) ** 2\n variance_sum += variance\n\n variance = variance_sum / len(temps)\n standard_deviation = variance ** 0.5\n\n return standard_deviation", "def variance(xs: List[float]) -> float:\n assert len(xs) >= 2, \"variance requires at least two elements\"\n\n n = len(xs)\n deviations = de_mean(xs)\n return sum_of_squares(deviations) / (n - 1)", "def variance(xs: List[float]) -> float:\n assert len(xs) >= 2, \"variance requires at least two elements\"\n\n n = len(xs)\n deviations = de_mean(xs)\n return sum_of_squares(deviations) / (n - 1)", "def lsterr(inlist):\r\n return stdev(inlist) / float(math.sqrt(len(inlist)))", "def std_dev(self) -> float:\n return math.sqrt(self.variance())", "def calc_std(sig):\n return np.std(sig)", "def stddev(x: pd.Series, d: int or float) -> pd.Series:\n if isinstance(d, float):\n d = math.floor(d)\n\n if isinstance(x.index, pd.MultiIndex):\n return x.groupby(level=1).rolling(d).std()\n else:\n return x.rolling(d).std()", "def sampleStandardDeviation(num):\n numberList = []\n numberOfNumbers = 0\n for t in num:\n for x in range(1, 2): #additional for loop that is only executed if the respective input value is not empty\n if t == '':\n break\n else:\n numberList.append(t)\n numberOfNumbers = numberOfNumbers + 1\n\n if numberOfNumbers > 1:\n sd = statistics.stdev(numberList)\n else:\n sd = \"n must be >= 2\" #if number of elements is less than two, show this message (as sample st.dev. cannot be calculated)\n return sd", "def empirical_std_deviation(x):\n import numpy as np\n x = np.array(x)\n M = np.size(x)\n xm = np.mean(x)\n\n #return np.sqrt(1./(M-1.)*np.sum((x-xm)**2))\n return np.sqrt( M/(M-1.) * ( (1./M*np.sum(x**2)) - xm**2 ) )", "def stddev(data, ddof=0):\n n = len(data)\n if n < 2:\n raise ValueError('variance requires at least two data points')\n ss = _ss(data)\n pvar = ss/(n-ddof)\n return pvar**0.5", "def standard_dev(self):\n return self.variance()**0.5", "def std(self):\n\n return self._reduce_for_stat_function(F.stddev, only_numeric=True)", "def deviation(values, val):\n\tm = mean(values)\n\tdev = abs(val-m)\n\tsd = standard_deviation(values)\n\treturn float(dev)/sd if sd!=0 else 0.0", "def calculate_std_deviation(value_arr):\n avg_balanced_acc = 0\n for value in value_arr:\n avg_balanced_acc += value\n avg_balanced_acc /= len(value_arr)\n\n balanced_accuracy_std_deviation = 0\n for value in value_arr:\n balanced_accuracy_std_deviation += pow(abs(value - avg_balanced_acc), 2)\n balanced_accuracy_std_deviation /= len(value_arr)\n return math.sqrt(balanced_accuracy_std_deviation)", "def standard_deviation(self):\r\n\t\treturn self.variance()**(1/2)", "def std(dfs):\n df_mean = mean(dfs)\n df_sq = sum([(df - df_mean)*(df - df_mean) for df in dfs])\n return df_sq / len(dfs)", "def std(self):\r\n return np.std(self.data_array)", "def pstdev(data):\n n = len(data)\n c = mean(data)\n ss = sum((x-c)**2 for x in data)\n if n < 2:\n raise ValueError('variance requires at least two data points')\n pvar = ss/n # the population variance\n return round(pvar**0.5, 1)", "def avg_std_dev(positions):\n # print(\"len pos = \", len(positions))\n # print(positions)\n if sum(positions) == 0:\n the_mean = 0\n standard_dev = 0\n return the_mean, standard_dev \n try:\n the_mean = sum(positions) / float(len(positions))\n standard_dev = numpy.std(positions)\n except ValueError:\n the_mean = 0\n standard_dev = 0\n return the_mean, standard_dev", "def std_deviation_of_mean_value(x):\n\n import numpy as np\n x = np.array(x)\n M = np.size(x)\n\n if M == 1: return 0\n\n return empirical_std_deviation(x)/np.sqrt(M)", "def std( x, weights ):\n weights[weights!=weights] = 0\n std2 = np.average( x**2, weights = weights ) - np.average( x, weights = weights )**2\n return np.sqrt( np.abs(std2) )", "def stdev(data, xbar=None):\n return math.sqrt(variance(data, xbar))", "def standarddeviation_of_distances(self, distances, mean=None):\n if len(distances) == 1:\n mean = 3.50\n self.stddev = 0.2\n else:\n sum = 0\n for dis in distances:\n sum = sum + dis\n # finding mean\n mean = sum / len(distances)\n if mean > 4.00:\n mean = 3.50\n self.distsquares = 0\n self.midval = 0\n self.midval1 = 0\n # Now, subtract the mean individually from each of the numbers and square it\n for dist in distances:\n self.distsquares = 0\n self.distsquares = math.pow((dist - mean), 2)\n self.midval = self.midval + self.distsquares\n if len(distances) > 1:\n self.midval1 = self.midval / (len(distances) - 1)\n if len(distances) == 1:\n mean = distances[0]\n self.stddev = 0\n self.stddev = math.sqrt(self.midval1)\n\n return self.stddev, mean", "def stddev(self, sample=True):\n distance_squared = list(map(lambda x: (x - sum(self.data)/self.size)**2, self.data))\n\n if sample == True:\n variance = sum(distance_squared)/(self.size - 1)\n stddev = variance**(1/2)\n if sample == False:\n variance = sum(distance_squared)/(self.size)\n stddev = variance**(1/2)\n return stddev", "def _get_standard_deviation(intermediate_normalization_dict):\n\n num_values = float(intermediate_normalization_dict[NUM_VALUES_KEY])\n multiplier = num_values / (num_values - 1)\n\n return numpy.sqrt(multiplier * (\n intermediate_normalization_dict[MEAN_OF_SQUARES_KEY] -\n intermediate_normalization_dict[MEAN_VALUE_KEY] ** 2\n ))", "def atstdev(a,limits=None,inclusive=(1,1)):\r\n return N.sqrt(tvar(a,limits,inclusive))", "def stdev(requestContext, seriesList, points, windowTolerance=0.1):\n\n # For this we take the standard deviation in terms of the moving average\n # and the moving average of series squares.\n for (seriesIndex,series) in enumerate(seriesList):\n stddevSeries = TimeSeries(\"stddev(%s,%d)\" % (series.name, int(points)), series.start, series.end, series.step, [])\n stddevSeries.pathExpression = \"stddev(%s,%d)\" % (series.name, int(points))\n\n validPoints = 0\n currentSum = 0\n currentSumOfSquares = 0\n for (index, newValue) in enumerate(series):\n # Mark whether we've reached our window size - dont drop points out otherwise\n if index < points:\n bootstrapping = True\n droppedValue = None\n else:\n bootstrapping = False\n droppedValue = series[index - points]\n\n # Track non-None points in window\n if not bootstrapping and droppedValue is not None:\n validPoints -= 1\n if newValue is not None:\n validPoints += 1\n\n # Remove the value that just dropped out of the window\n if not bootstrapping and droppedValue is not None:\n currentSum -= droppedValue\n currentSumOfSquares -= droppedValue**2\n\n # Add in the value that just popped in the window\n if newValue is not None:\n currentSum += newValue\n currentSumOfSquares += newValue**2\n\n if validPoints > 0 and \\\n float(validPoints)/points >= windowTolerance:\n\n try:\n deviation = math.sqrt(validPoints * currentSumOfSquares - currentSum**2)/validPoints\n except ValueError:\n deviation = None\n stddevSeries.append(deviation)\n else:\n stddevSeries.append(None)\n\n seriesList[seriesIndex] = stddevSeries\n\n return seriesList", "def std(self) -> \"Stream[float]\":\n return self.agg(lambda x: np.std(x, ddof=1)).astype(\"float\")", "def calculate_std(self) -> float:\n\n if self.data:\n return np.std(self.data)\n else:\n return self.sigma", "def std(x):\n return sqrt(TinyStatistician.var(x))", "def _avg_sd_from_list(lst):\n arr = flex.double(lst)\n avg = round(flex.mean(arr), 5)\n std = round(arr.standard_deviation_of_the_sample(), 5)\n return avg, std", "def standard_deviation(self):\n clean, total = self._prepare_for_stats()\n if not total:\n return None\n\n return math.sqrt(clean.variance())", "def normalize_standard_deviation(dataset):\n return dataset*(1/np.std(dataset))", "def weighted_std(values, weights):\n average = np.average(values, weights=weights)\n variance = np.average((values-average)**2, weights=weights) # Fast and numerically precise\n return np.sqrt(variance)", "def weighted_std(values, weights_in):\r\n average = numpy.average(values, weights=weights_in)\r\n # Fast and numerically precise:\r\n values2 = (values-average)**2\r\n variance = numpy.average(values2, weights=weights_in)\r\n return math.sqrt(variance)", "def weighted_std(values, weights):\n average = np.average(values, weights=weights)\n variance = np.average((values - average) ** 2, weights=weights) # Fast and numerically precise\n return math.sqrt(variance)", "def dev_mean(x):\r\n x_bar = mean(x)\r\n return [x_i - x_bar for x_i in x]", "def std(self):\n return self.data.std(axis=-1, keepdims=True)", "def pstdev(data):\n n = len(data)\n if n < 2:\n raise ValueError('variance requires at least two data points')\n ss = _ss(data)\n pvar = ss/n # the population variance\n return pvar**0.5", "def pstdev(data):\n n = len(data)\n if n < 2:\n raise ValueError('variance requires at least two data points')\n ss = _ss(data)\n pvar = ss/n # the population variance\n return pvar**0.5", "def pstdev(data):\n n = len(data)\n if n < 2:\n raise ValueError('variance requires at least two data points')\n ss = _ss(data)\n pvar = ss/n # the population variance\n return pvar**0.5", "def pstdev(data):\n n = len(data)\n if n < 2:\n raise ValueError('variance requires at least two data points')\n ss = _ss(data)\n pvar = ss/n # the population variance\n return pvar**0.5", "def meanstd(self):\n\t\tmean = [125.3, 123.0, 113.9] # R,G,B\n\t\tstddev = [63.0, 62.1, 66.7] # R,G,B\n\t\treturn [mean, stddev]", "def get_sterr(cls, data: tuple or list, is_population=False) -> float:\n cls._data_validation(data)\n from math import sqrt\n return cls.get_stdev(data, is_population) / sqrt(cls.get_n(data))", "def F_std(d, N):\n # memoize specht() and weyl() results (but only for current call)\n specht_mem, weyl_mem = memoize(specht), memoize(weyl)\n\n return sum(\n d ** (-N - 2)\n * sum(sqrt(specht_mem(mu) * weyl_mem(d, mu)) for mu in box_added(alpha, d)) ** 2\n for alpha in Partitions(n=N - 1, max_length=d)\n )", "def std(x, axis=None):\r\n try:\r\n sample_variance = var(x, axis=axis)\r\n except IndexError as e: # just to avoid breaking the old test code\r\n raise IndexOrValueError(e)\r\n return sqrt(sample_variance)", "def std_params(params):\n std_params = []\n for i in range(len(params[0])):\n std_params.append(np.std(np.array(params)[:, i]))\n return std_params", "def std(self) -> float:\n return self._data.std()" ]
[ "0.85239553", "0.85239553", "0.82795185", "0.8250837", "0.811858", "0.8115698", "0.81116855", "0.80819494", "0.80761546", "0.8022738", "0.79791003", "0.7964581", "0.7951416", "0.7936604", "0.79159623", "0.7855638", "0.7839232", "0.7821115", "0.7801466", "0.7798936", "0.77946174", "0.776713", "0.7687553", "0.76753116", "0.76526123", "0.75997555", "0.75525045", "0.7540391", "0.75388014", "0.75349355", "0.75319797", "0.75209016", "0.74994653", "0.7409773", "0.7397619", "0.7392503", "0.7392503", "0.7358977", "0.7334169", "0.7333875", "0.73203534", "0.73100346", "0.73048025", "0.7277804", "0.7239219", "0.7225381", "0.71595806", "0.7157571", "0.7155688", "0.71534485", "0.71479785", "0.71163154", "0.71138436", "0.7102075", "0.7102075", "0.7077567", "0.7040635", "0.7034908", "0.703048", "0.7026017", "0.7024782", "0.7016066", "0.70119756", "0.69963515", "0.6988673", "0.6978718", "0.696168", "0.6931392", "0.68663937", "0.68654937", "0.68470544", "0.68437636", "0.68411183", "0.6837199", "0.6818392", "0.68103", "0.6807749", "0.6788163", "0.6722685", "0.6702308", "0.66761094", "0.66708916", "0.6669045", "0.6645692", "0.6607483", "0.657805", "0.6564529", "0.65633446", "0.655861", "0.65542394", "0.65396863", "0.65396863", "0.65396863", "0.65396863", "0.6534559", "0.65331775", "0.6528028", "0.65244865", "0.6519332", "0.65098536" ]
0.7768153
21
Test of function choosing if log rotation is needed
def test_need_to_rotate_log(self): self.assertTrue(need_to_rotate_log(0, 20, 'daily', 15, 'daily'), 'rotate log by time') self.assertFalse(need_to_rotate_log(10, 20, 'daily', 15, 'hourly'), 'do not rotate log by time') self.assertTrue(need_to_rotate_log(10, 20, 'daily', 25, None), 'rotate log by max size') self.assertFalse(need_to_rotate_log(10, 20, 'hourly', 5, 'hourly'), 'do not rotate log by min size')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_log_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n filename = self.conveyer.rotate_logs()\n self.assertEquals(self.conveyer.logfile, None)\n self.assertEquals(filename, \"testfile.dat.rotated\")", "def log(a):", "def log2(a):", "def test_log_con():\n c=14\n assert {'diff':EF.log(c).der, 'value': EF.log(c).val}=={'diff':0, 'value': math.log(c)}", "def test_rotated(self):\n self._calibration_test(\"rotated\")", "def test_log():\n c=14\n def myfunc(x):\n f1=EF.log(x)\n return f1\n\n f_obj=ADiff(myfunc)\n res=f_obj.Jac(c)\n\n expectAns={'diff': 1/c, 'value': math.log(c)}\n\n assert res==expectAns", "def log_Schechter_log(self, logl, alpha, logls, logl0):\n phi = (logl - logls) * (alpha+1) * np.log(10.) - np.power(10., logl-logls)\n lik = phi.copy()\n lik [logl < logl0] = -1e99\n return lik", "def test_rotation(self):\n # this logfile should rotate every 10 bytes\n with contextlib.closing(\n logfile.LogFile(self.name, self.dir, rotateLength=10)\n ) as log:\n\n # test automatic rotation\n log.write(\"123\")\n log.write(\"4567890\")\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.2\".format(self.path)))\n log.write(\"\")\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertTrue(os.path.exists(\"{}.2\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n log.write(\"3\")\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n\n # test manual rotation\n log.rotate()\n self.assertTrue(os.path.exists(\"{}.3\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.4\".format(self.path)))\n\n self.assertEqual(log.listLogs(), [1, 2, 3])", "def _logcheck(self, t, y):\n\t\t#print y\n\t\tif (t>0)&(y[0]>-2*self.phi0): self.r, self._y = numpy.r_[self.r, t], numpy.c_[self._y, y]\n\n\t\treturn 0", "def log_cust(x):\n if type(x) != str:\n if x < 0:\n return 0\n elif x == 0:\n return 0\n elif x > 0:\n return np.log(x)", "def log_inplace(a):", "def test_rotation(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n days = [(self.path + \".\" + log.suffix(day * 86400)) for day in range(3)]\n\n # test automatic rotation\n log._clock = 0.0 # 1970/01/01 00:00.00\n log.write(\"123\")\n log._clock = 43200 # 1970/01/01 12:00.00\n log.write(\"4567890\")\n log._clock = 86400 # 1970/01/02 00:00.00\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(days[0]))\n self.assertFalse(os.path.exists(days[1]))\n log._clock = 172800 # 1970/01/03 00:00.00\n log.write(\"\")\n self.assertTrue(os.path.exists(days[0]))\n self.assertTrue(os.path.exists(days[1]))\n self.assertFalse(os.path.exists(days[2]))\n log._clock = 259199 # 1970/01/03 23:59.59\n log.write(\"3\")\n self.assertFalse(os.path.exists(days[2]))", "def test_logistic():\n r=np.random.normal(size=20)\n assert np.isclose( ilogistic(logistic(r)),r ).all()", "def add_log_if_improves_skew(feature, df) :\r\n featureData = df[feature] \r\n logged = np.log(featureData)\r\n if abs(logged.skew()) >= abs(featureData.skew()) :\r\n return False\r\n df[feature+\"_log\"] = logged\r\n return True", "def test_loglike(dlm,Cl,noise,beam):\n lmax = Cl.shape[0]\n tt_exp = -1./2 * np.real(np.vdot(dlm.T,hp.almxfl(dlm,1/(beam[:lmax]**2*Cl[:,1]+noise[:lmax]))))\n #plt.plot(Cl[:,1])\n tt_det = - 1./2 *(np.arange(1,lmax+1)*np.log((noise[:lmax]+Cl[:,1]*beam[:lmax]**2))).sum() \n tt_f = tt_exp + tt_det\n return tt_exp,tt_det,tt_f#,Cl[:,1]", "def _loglike(self, y, f):\n raise NotImplementedError('Abstract base class only.')", "def test_function_log2(self):\r\n self.assertEquals(preview.latex_preview('log2(3)'), r'\\log_2(3)')", "def test_get_log(self):\n result = log_lib.get_log(True)\n self.assertTrue(callable(result))\n result(\"dummy-message\")\n\n result = log_lib.get_log(False)\n self.assertTrue(callable(result))\n result(\"dummy-message\")", "def ilog(x,delta):\n if(delta < x and x < 1.0 - delta):\n return np.log( -np.log(x) )\n elif(x < delta):\n return np.log( -np.log(delta) )\n else: \n return np.log( -np.log(1.0 - delta) )", "def test_transform(self):\n t = Linearize()\n assert t.transform(numpy.e) == numpy.log(numpy.e)\n t.transform(0)", "def ga_log(R):\n phiP, t_normal_n, t_perpendicular_n = extractRotorComponents(R)\n return phiP + t_normal_n + t_perpendicular_n", "def check( log = False):\n return True", "def HasRotated(logfile, hash):\n timestamp = utcnow()\n cursor.execute('''SELECT hash, date FROM rotate\n WHERE logfile = \"%s\"''' % (logfile,))\n result = cursor.fetchone()\n # If the database doesn't have an entry for our logfile then we need to\n # create one for it using the passed logfile hash and the current\n # timestamp.\n if not result:\n print \"New logfile, adding hash and date.\"\n cursor.execute('''INSERT INTO rotate (logfile, hash, date)\n VALUES (\"%s\", \"%s\", \"%s\")''' % (logfile, hash, timestamp))\n con.commit()\n return timestamp\n if result[0] == hash:\n # The current logfile hash matches the recorded one at last rotation,\n # we just return the old timestamp.\n return result[1]\n # If we get here, the logfile hash is different, indicating that rotation\n # has occured. We therefore set and return a new timestamp.\n print logfile, \"has rotated\"\n cursor.execute('''UPDATE rotate SET hash = \"%s\", date = \"%s\"\n WHERE logfile = \"%s\"''' % (hash, timestamp, logfile))\n con.commit()\n return timestamp", "def log2(x):\n raise NotImplementedError", "def log1p(x):\r\n # see decorator for function body\r", "def log2_inplace(a):", "def log_prob(self):", "def test_transform(self):\n\n # Known constants tests\n for i, direction in enumerate(OCIO.ColorSpaceDirection.__members__.values()):\n self.colorspace.setTransform(self.log_tr, direction)\n log_transform = self.colorspace.getTransform(direction)\n self.assertIsInstance(log_transform, OCIO.LogTransform)\n self.assertEquals(self.log_tr.getBase(), log_transform.getBase())", "def test_log():\n x, y = fwd.Variable(), fwd.Variable()\n f = fwd.log(fwd.sin(x)+y**2)\n dfdx = lambda x, y: np.cos(x) / (np.sin(x)+y**2)\n dfdy = lambda x, y: 2*y / (np.sin(x)+y**2)\n d2fdxdy = lambda x, y: -2*y*np.cos(x) / (np.sin(x)+y**2)**2\n assert equals(f.evaluation_at({x: 1.5, y:2.5}), np.log(np.sin(1.5)+2.5**2))\n assert equals(f.derivative_at(x, {x: 1.5, y:2.5}), dfdx(1.5, 2.5))\n assert equals(f.derivative_at(y, {x: 1.5, y:2.5}), dfdy(1.5, 2.5))\n assert equals(f.derivative_at((x, y), {x: 1.5, y:2.5}), d2fdxdy(1.5, 2.5))\n with pytest.raises(NotImplementedError):\n f.derivative_at(x, {x:1.0, y: 2.0}, order=3)", "def m(loglvl):\n global LOG_LEVEL\n return (loglvl & LOG_LEVEL) != 0x0", "def setLogFunction(function):\n None", "def log10(a):", "def check_random_rotation(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [degrees, resample, expand, center, fill_value], _ = parse_user_args(method, *args, **kwargs)\n check_degrees(degrees)\n\n if resample is not None:\n type_check(resample, (Inter,), \"resample\")\n if expand is not None:\n type_check(expand, (bool,), \"expand\")\n if center is not None:\n check_2tuple(center, \"center\")\n if fill_value is not None:\n check_fill_value(fill_value)\n\n return method(self, *args, **kwargs)\n\n return new_method", "def safelog(x):\n #return np.log(x)\n return np.log(np.clip(x,floor,np.inf))", "def logistic(scale, shift, stretch, t):\r\n return scale / (1 + np.power(np.e, -1.0*(t - shift )/ stretch))", "def test_function_log10(self):\r\n self.assertEquals(preview.latex_preview('log10(3)'), r'\\log_{10}(3)')", "def log(x):\n raise", "def _get_logrotated_log(self):\n file_lst = glob.glob(self.rotation_pattern)\n file_lst.remove(self.log_filename)\n\n if len(file_lst) == 0:\n return None\n\n stat_lst = [(os.stat(x).st_mtime, x) for x in file_lst]\n sorted_stat_lst = sorted(stat_lst, key=lambda x: x[1])\n sorted_stat_lst.reverse()\n\n r_tuple = reduce(lambda a,b: a if (a[0] > b[0]) else b, sorted_stat_lst)\n return r_tuple[1]", "def test_abstractShouldRotate(self):\n log = logfile.BaseLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n self.assertRaises(NotImplementedError, log.shouldRotate)", "def check(log=False):\n return True", "def test_rotateAlreadyExists(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n # Build a new file with the same name as the file which would be created\n # if the log file is to be rotated.\n newFilePath = \"{}.{}\".format(log.path, log.suffix(log.lastDate))\n with open(newFilePath, \"w\") as fp:\n fp.write(\"123\")\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_logarithmic_utility(self):\n self.preferences.init(\n utility_params_by_good_id=self.utility_params,\n exchange_params_by_currency_id=self.exchange_params,\n tx_fee=self.tx_fee,\n )\n log_utility = self.preferences.logarithmic_utility(\n quantities_by_good_id=self.good_holdings\n )\n assert log_utility is not None, \"Log_utility must not be none.\"", "def _loglike(self, y, f):\n ll = -0.5 * (tf.log(2 * self.variance * np.pi) +\n (y - f)**2 / self.variance)\n return ll", "def test_logfile_recreates_after_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n self.conveyer.rotate_logs()\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"fourth\\\"}\"))\n self.assertEquals(self.events_out.getvalue(), \"{message: \\\"fourth\\\"}\")\n self.assertTrue(self.renamerCalled)", "def log_operator(SE3): \n #print('SE3 log: ', SE3)\n R = SE3[:3,:3]\n t = SE3[:3,3]\n theta = arccos(0.5*(trace(R)-1)) # radians\n lnR = 0.5*(theta/sin(theta))*(R-R.T)\n omega = vee(lnR) # vee operator\n omega_skew_sym = lnR#skew_symmetric(omega.reshape(-1,))\n \n if theta <= 1e-10:\n V = eye(3)\n else:\n V = eye(3) + \\\n (theta**-2)*(1-cos(theta))*omega_skew_sym + \\\n (theta**-3)*(theta-sin(theta))*(omega_skew_sym @ omega_skew_sym)\n neu = inv(V) @ t\n\n # if theta <= 1e-10:\n # Vinv = eye(3)\n # else:\n # theta_half = 0.5*theta \n # Vinv = eye(3) - 0.5*omega_skew_sym + \\\n # (theta**-2)*(1- (theta_half*cos(theta_half)/sin(theta_half)))*(omega_skew_sym @ omega_skew_sym)\n # neu = Vinv @ t\n\n return np.hstack((neu, omega)).reshape(-1,1)", "def log_check(w_in: np.ndarray, w_log: np.ndarray) -> None:\n w_log[:] = np.nan\n\n if np.isnan(w_in).any():\n return\n\n if np.any(w_in <= 0):\n return\n\n w_log[:] = np.log(w_in[:])", "def __convert_to_log(self):\n for i in range(self.nStates):\n if self.pi[i]>0:\n self.pi[i]=log(self.pi[i])\n else:\n self.pi[i]=float('-inf')\n for j in range(self.nStates):\n if self.t[i][j]>0:\n self.t[i][j]=log(self.t[i][j])\n else:\n self.t[i][j]=float('-inf')\n for j in range(self.nObs):\n if self.e[i][j]>0:\n self.e[i][j]=log(self.e[i][j])\n else:\n self.e[i][j]=float('-inf')\n self.logdomain=True", "def logp(self, x):\n pass", "def LogPrior(cube):\n\tif (cube<0)+(cube>265):\n\t\treturn -np.inf\n\telse:\n\t\treturn np.log10(1./265)", "def _log_util(chips: float,\n bet_size: float,\n payout: float) -> float:\n if chips <= 0 or chips + payout*bet_size <= 0:\n return MIN_REWARD\n return max(math.log(1.0 + chips + payout*bet_size) - math.log(1.0 + chips),\n MIN_REWARD)", "def my_log(num):\n\n if num == 0.0:\n return -9999999999\n return math.log(num)", "def log2(a,b):\n try:\n return np.log(a) / np.log(b)\n except:\n print('tried to take log of negative value. Returning last value')\n return b", "def is_logged(function=None):\n\tdef _dec(view_func):\n\t\tdef _view(request, *args, **kwargs):\n\t\t\tif request.session.get('login',None):\n\t\t\t\treturn view_func(request, *args, **kwargs)\n\t\t\telse:\n\t\t\t\treturn redirect('idx')\n\n\t\t_view.__name__ = view_func.__name__\n\t\t_view.__dict__ = view_func.__dict__\n\t\t_view.__doc__ = view_func.__doc__\n\n\t\treturn _view\n\n\tif function is None:\n\t\treturn _dec\n\telse:\n\t\treturn _dec(function)", "def log2(tensor):\n return log(tensor, base=2)", "def logbarrierfunc(delta, z, use_sigma):\n if use_sigma:\n z = np.where(z >= 0, np.tanh(z), z)\n k = 2\n return np.where(z > delta, -np.log(np.abs(z)),\n ((k - 1) / k) * (((z - k * delta) / ((k - 1) * delta)) ** k - 1) - np.log(delta))", "def logit_link(x):\n\n return 1 / (1 + math.exp(-0.05 * x))\n # return 1 / (1 + math.exp(-0.01 * x))", "def log(x, base=math.e):\n return 0.0", "def _rotatelog(self,newlgf,newname):\n modlogger.debug( \"rl:%s\"%newname)\n if self.logf: \n start_new_thread(self._waitlog,(self.logf,self.logname))\n self.logsync.acquire()\n\n if newname: self.in_use_logs += [ newname ] \n try:\n self.logf, self.logname = newlgf , newname\n except Exception:\n if newname:\n self.in_use_logs.remove(newname)\n raise", "def compute_loglike(self, tools: ModelingTools) -> float:\n return -1.5", "def logP(self):\n raise NotImplementedError", "def log_method(f):\n return log(f, ignore_first=True)", "def logit_transform(params, bounds):\n with np.errstate(divide=\"ignore\", invalid=\"ignore\"):\n ret_array = np.ma.array(\n [np.ma.log(np.true_divide((x - a), (b - x))) for x, (a, b) in zip(params, bounds)])\n ret_array.set_fill_value(0)\n return np.ma.filled(ret_array)", "def lg(x: Union[int, float]) -> float:\n res = 0.0\n try:\n res = log(x, 2)\n except ValueError:\n pass\n return res", "def mpf_log(x, prec, rnd=round_fast):\n sign, man, exp, bc = x\n #------------------------------------------------------------------\n # Handle special values\n if not man:\n if x == fzero: return fninf\n if x == finf: return finf\n if x == fnan: return fnan\n if sign:\n raise ComplexResult(\"logarithm of a negative number\")\n wp = prec + 20\n #------------------------------------------------------------------\n # Handle log(2^n) = log(n)*2.\n # Here we catch the only possible exact value, log(1) = 0\n if man == 1:\n if not exp:\n return fzero\n return from_man_exp(exp*ln2_fixed(wp), -wp, prec, rnd)\n mag = exp+bc\n abs_mag = abs(mag)\n #------------------------------------------------------------------\n # Handle x = 1+eps, where log(x) ~ x. We need to check for\n # cancellation when moving to fixed-point math and compensate\n # by increasing the precision. Note that abs_mag in (0, 1) <=>\n # 0.5 < x < 2 and x != 1\n if abs_mag <= 1:\n # Calculate t = x-1 to measure distance from 1 in bits\n tsign = 1-abs_mag\n if tsign:\n tman = (MPZ_ONE<<bc) - man\n else:\n tman = man - (MPZ_ONE<<(bc-1))\n tbc = bitcount(tman)\n cancellation = bc - tbc\n if cancellation > wp:\n t = normalize(tsign, tman, abs_mag-bc, tbc, tbc, 'n')\n return mpf_perturb(t, tsign, prec, rnd)\n else:\n wp += cancellation\n # TODO: if close enough to 1, we could use Taylor series\n # even in the AGM precision range, since the Taylor series\n # converges rapidly\n #------------------------------------------------------------------\n # Another special case:\n # n*log(2) is a good enough approximation\n if abs_mag > 10000:\n if bitcount(abs_mag) > wp:\n return from_man_exp(exp*ln2_fixed(wp), -wp, prec, rnd)\n #------------------------------------------------------------------\n # General case.\n # Perform argument reduction using log(x) = log(x*2^n) - n*log(2):\n # If we are in the Taylor precision range, choose magnitude 0 or 1.\n # If we are in the AGM precision range, choose magnitude -m for\n # some large m; benchmarking on one machine showed m = prec/20 to be\n # optimal between 1000 and 100,000 digits.\n if wp <= LOG_TAYLOR_PREC:\n m = log_taylor_cached(lshift(man, wp-bc), wp)\n if mag:\n m += mag*ln2_fixed(wp)\n else:\n optimal_mag = -wp//LOG_AGM_MAG_PREC_RATIO\n n = optimal_mag - mag\n x = mpf_shift(x, n)\n wp += (-optimal_mag)\n m = -log_agm(to_fixed(x, wp), wp)\n m -= n*ln2_fixed(wp)\n return from_man_exp(m, -wp, prec, rnd)", "def log(base, real):\n return math.log(real, base)", "def _determine_rotated_logfile(self):\n rotated_filename = self._check_rotated_filename_candidates()\n if rotated_filename and exists(rotated_filename):\n if stat(rotated_filename).st_ino == self._offset_file_inode:\n return rotated_filename\n\n # if the inode hasn't changed, then the file shrank; this is expected with copytruncate,\n # otherwise print a warning\n if stat(self.filename).st_ino == self._offset_file_inode:\n if self.copytruncate:\n return rotated_filename\n else:\n sys.stderr.write(\n \"[pygtail] [WARN] file size of %s shrank, and copytruncate support is \"\n \"disabled (expected at least %d bytes, was %d bytes).\\n\" %\n (self.filename, self._offset, stat(self.filename).st_size))\n\n return None", "def log_so3(R):\n theta = np.arccos((np.trace(R) - 1) / 2)\n return (R - R.T) * theta / (2*np.sin(theta))", "def log(self): # just use base?\n return Factor().__build( VarSet(self.v) , np.log(self.t) )", "def _loglike(self, y, f):\n ll = y * tf.log(pos(f)) + (1 - y) * tf.log(pos(1 - f))\n return ll", "def x_flag(self):\n if self.calibrationlogtime and (\n self.calibrationlogtime != DEFAULT_CALIBRATIONLOG_TIME):\n self.calibrationlog = DEFAULT_CALIBRATIONLOG_D3S\n self.calibrationlogflag = True", "def logp(self, value: TensorType, **kwargs) -> TensorType:", "def logp(self, value: TensorType, **kwargs) -> TensorType:", "def logg_trigomonetric(teff, mass, v, bc, par, dpar, dteff, dmass):\n #np.geterr()\n if mass == 'nan':\n logg, dlogg = 'nan', 'nan'\n else:\n e = 2.718281828\n logg = 4.44 + np.log10(mass) + (4.0*np.log10(teff/5777.)) + (0.4*(v + bc)) + (2.0*np.log10(par/1000.0)) + 0.108\n logg = np.round(logg, 2)\n dlogg = np.sqrt(((dmass*np.log10(e))/mass)**2 + ((4.*dteff*np.log10(e))/teff)**2 + ((2.*0.05*np.log10(e))/par)**2)\n dlogg = np.round(dlogg, 2)\n return logg, dlogg", "def setLogFunction(function):\n _setLogFunction(function)", "def get_log(p):\n if p==0:\n return 0.\n return p*np.log2(p)", "def toeplitz_slogdet(r):\n n = len(r)\n r_0 = r[0]\n \n r = np.concatenate((r, np.array([r_0])))\n r /= r_0 # normalize the system so that the T matrix has diagonal of ones\n \n logdet = n*np.log(np.abs(r_0))\n sign = np.sign(r_0)**n\n \n if n == 1:\n return (sign, logdet)\n \n # now on is a modification of Levinson algorithm\n y = zeros((n,))\n x = zeros((n,))\n\n b = -r[1:n+1] \n r = r[:n]\n \n y[0] = -r[1]\n x[0] = b[0]\n beta = 1\n alpha = -r[1]\n \n d = 1 + dot(-b[0], x[0])\n sign *= np.sign(d)\n logdet += np.log(np.abs(d))\n \n for k in range(0,n-2):\n \n beta = (1 - alpha*alpha)*beta\n mu = (b[k+1] - dot(r[1:k+2], x[k::-1])) /beta\n x[0:k+1] = x[0:k+1] + mu*y[k::-1]\n x[k+1] = mu\n \n d = 1 + dot(-b[0:k+2], x[0:k+2])\n sign *= np.sign(d)\n logdet += np.log(np.abs(d))\n \n if k < n-2:\n alpha = -(r[k+2] + dot(r[1:k+2], y[k::-1]))/beta\n y[0:k+1] = y[0:k+1] + alpha * y[k::-1]\n y[k+1] = alpha \n\n return(sign, logdet)", "def log_t(u, t):\n\n def _internal_log_t(u, t):\n return (u ** (1.0 - t) - 1.0) / (1.0 - t)\n\n return tf.cond(\n tf.math.equal(t, 1.0), lambda: tf.math.log(u),\n functools.partial(_internal_log_t, u, t))", "def create_timed_rotating_log(path_log, when=\"midnight\", last=3, interval=1):\n logger = logging.getLogger(\"Test rotating file\")\n formatter = logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s')\n if path_log:\n hdlr = TimedRotatingFileHandler(\n path_log, when=when, interval=interval, backupCount=last\n )\n hdlr.setFormatter(formatter)\n logger.addHandler(hdlr)\n logger.debug(\"[LOG] Criou arquivo de log\")\n else:\n hdlr2 = logging.StreamHandler()\n hdlr2.setFormatter(formatter)\n logger.addHandler(hdlr2)\n return logger", "def Log(A, B):\n return logm(inv(A).dot(B))", "def logarithmic():\n return Equivalency(\n [(dimensionless_unscaled, function_units.dex, np.log10, lambda x: 10.0**x)],\n \"logarithmic\",\n )", "def log10_inplace(a):", "def log_deriv(error):\n return logistic(error) * (1 - logistic(error))", "def logbasechange(a,b):\n return np.log(b)/np.log(a)", "def test_sc_logger(self):\n log_path = self.log_paths['tcs.codec.spatial_codec']\n log = logging.getLogger('tcs.codec.spatial_codec')\n ctrl = self.md5(log_path)\n log.debug(\"test\")\n assert self.md5(log_path) != ctrl", "def compute_loglike(self, tools: ModelingTools) -> float:\n return -3.0 * self.placeholder", "def log_wrap(fun_name):\n assert fun_name in ['log', 'log10', 'log2']\n torch_fun = getattr(torch, fun_name)\n base_lookup = {'log': torch.log2(torch.exp(torch.ones(()))), \n 'log10': torch.log2(torch.tensor(10.)), \n 'log2': torch.ones(())}\n base_coeff = base_lookup[fun_name]\n\n # The new logarithm function\n @wraps(torch_fun)\n def log_fun(input):\n assert isinstance(input, STensor)\n output = torch_fun(input.data) + input.scale/base_coeff\n return stensor(output)\n\n # Register the new logarithm function\n STABLE_FUNCTIONS[torch_fun] = log_fun", "def logistic(val):\n return 1.0 / (1.0 + np.exp(-val))", "def logits_or(x: torch.Tensor, y: torch.Tensor) -> torch.Tensor:\n f = -(x + y) / 2\n t = logaddexp(logaddexp((x - y) / 2, (y - x) / 2), -f)\n return t - f", "def test_func(self):\n self.rol_nu, self.functie_nu = rol_get_huidige_functie(self.request)\n return self.rol_nu == Rollen.ROL_RCL", "def test_func(self):\n self.rol_nu, self.functie_nu = rol_get_huidige_functie(self.request)\n return self.rol_nu == Rollen.ROL_RCL", "def test_arbitrary_rotation(self):\n \n # This test is run a bunch of times on various intervals, ranging from 50% to 1/6\n\t\t# (16.667%).\n for i in range(2, 7):\n \n interval = 1 / i # The amount to increase each qubit's probability by, relative to the previous qubit\n step_string = \"{:.4f}\".format(100 / i) # The decimal representation of the interval, as a percent\n target_probabilities = [0] * (i + 1) # This will store the desired probabilities of each qubit\n for j in range(0, i + 1):\n target_probability = j * interval\n target_probabilities[j] = target_probability\n\n # Run the test\n self.run_test(self.arbitrary_rotation_function, f\"Rotation with steps of 1/{i} ({step_string}%)\", 2000, target_probabilities, 0.05)", "def log2(self):\n return Factor().__build( VarSet(self.v) , np.log2(self.t) )", "def test_rotatePermissionFileNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.path, 0o444)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def statePosteriors(log_alpha, log_beta):", "def log1p(x):\n return 0.0", "def rotatelog(self,**kwargs):\n newname = self._newname()\n newlgf = LogFile(newname,**kwargs)\n with self.id_lock:\n self._rotatelog(newlgf,newname)", "def test_cache_logger(self):\n log_path = self.log_paths['tcs.codec.cache']\n log = logging.getLogger('tcs.codec.cache')\n ctrl = self.md5(log_path)\n log.debug(\"test\")\n assert self.md5(log_path) != ctrl", "def _logprob(self, sample):\n return 0, 0", "def rotation(self, *args, **kwargs) -> Any:\n pass", "def test_cloglog_transform_deriv_v(self):\n # Note the index has a value that is <= -40 to test whether or not\n # the function correctly uses L'Hopital's rule to deal with underflow\n # and calculating the derivative. When the index is <= -40, the\n # derivative should be 1.\n test_index = np.array([-40, 1, 7])\n # Note we use a compressed sparse-row matrix so that we can easily\n # convert the output matrix to a numpy array using the '.A' attribute.\n test_output = diags(np.ones(test_index.shape[0]),\n 0, format='csr')\n\n # Bundle the arguments needed for the function\n # Not all elements except for test_index are completely fake and only\n # needed because the function requires a given number of arguments.\n # This is for api compatibility with other models.\n args = [test_index,\n np.ones(3),\n diags(np.ones(3), 0, format='csr'),\n None]\n\n # Get the derivative using the function defined in clog_log.py.\n derivative = clog._cloglog_transform_deriv_v(*args,\n output_array=test_output)\n\n # Calculate, 'by hand' what the results should be\n correct_derivatives = np.diag(np.array([1,\n 2.910328703250801,\n 1096.6331584284585]))\n\n self.assertIsInstance(derivative, type(test_output))\n self.assertEqual(len(derivative.shape), 2)\n self.assertEqual(derivative.shape, (3, 3))\n npt.assert_allclose(correct_derivatives, derivative.A)\n\n return None" ]
[ "0.62220013", "0.61344224", "0.58783966", "0.58642936", "0.5808232", "0.5750104", "0.5715419", "0.5713662", "0.5678804", "0.56212765", "0.55951023", "0.5573171", "0.5567551", "0.5551365", "0.55413747", "0.5514498", "0.5495516", "0.5494864", "0.54912615", "0.5475333", "0.5475077", "0.5449099", "0.5437804", "0.5417328", "0.5414975", "0.54093736", "0.5400344", "0.53598154", "0.5358943", "0.53586483", "0.5354937", "0.53520936", "0.5340343", "0.5338366", "0.5337326", "0.533291", "0.5323507", "0.5321488", "0.53192765", "0.5316818", "0.5294323", "0.52867794", "0.5284803", "0.52654046", "0.52403826", "0.5229486", "0.5220716", "0.5219278", "0.52121156", "0.52070296", "0.52017206", "0.519376", "0.5177182", "0.5176024", "0.5169045", "0.5153236", "0.5151645", "0.5149186", "0.51484853", "0.5133445", "0.51310164", "0.5130206", "0.51233745", "0.5117391", "0.5116884", "0.5115901", "0.5114885", "0.51145834", "0.5109968", "0.50957805", "0.5093419", "0.5093419", "0.50928813", "0.50903004", "0.5088276", "0.5087044", "0.5071569", "0.5065414", "0.5061743", "0.50528145", "0.5050967", "0.504826", "0.50472295", "0.50434273", "0.50351775", "0.5029445", "0.50237167", "0.5020634", "0.5011352", "0.5011352", "0.50062805", "0.49962157", "0.4989527", "0.4986397", "0.49855956", "0.49813825", "0.498124", "0.49762127", "0.49646083", "0.4961941" ]
0.7284713
0
Test of conversion human like file size units to integer
def test_human_size_units_to_base(self): self.assertEqual(human_size_units_to_base(1), 1) self.assertEqual(human_size_units_to_base('1'), 1) self.assertEqual(human_size_units_to_base('1b'), 1) self.assertEqual(human_size_units_to_base('1k'), 1000) self.assertEqual(human_size_units_to_base('1kb'), 1000) self.assertEqual(human_size_units_to_base('1kib'), 1024) self.assertEqual(human_size_units_to_base('1KiB'), 1024) self.assertEqual(human_size_units_to_base('1KiB 1b'), 1025) self.assertEqual(human_size_units_to_base('1M'), 1000000) self.assertEqual(human_size_units_to_base('1Mi'), 1024*1024) self.assertEqual(human_size_units_to_base('1G'), 1000000000) self.assertEqual(human_size_units_to_base('1T'), 1000000000000) self.assertRaisesRegex(ValueError, 'Bad unit "a" in size parameter "1a".', human_size_units_to_base, '1a')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _file_scale(fn):\n s = utils.file_word(fn)\n try:\n n = int(s, 0)\n except ValueError:\n n = float(s)\n return n", "def test_human_size(self):\n\n human_size = pyCompressor.human_size\n\n self.assertEqual(human_size(2), \"2.0 bytes\")\n self.assertEqual(human_size(2050), \"2.0 KB\")\n self.assertEqual(human_size(3565158), \"3.4 MB\")\n self.assertEqual(human_size(6120328397), \"5.7 GB\")", "def convert_unit(size_in_bytes, unit):\n if unit == 'KB':\n return size_in_bytes/1024\n elif unit == 'MB':\n return size_in_bytes/(1024*1024)\n elif unit == 'GB':\n return size_in_bytes/(1024*1024*1024)\n else:\n return size_in_bytes", "def __convert_file_size(self, file_size:float)->float:\n return file_size * 1000000", "def convert_file_size_to_int(size: Union[int, str]):\n if isinstance(size, int):\n return size\n if size.upper().endswith(\"GIB\"):\n return int(size[:-3]) * (2**30)\n if size.upper().endswith(\"MIB\"):\n return int(size[:-3]) * (2**20)\n if size.upper().endswith(\"KIB\"):\n return int(size[:-3]) * (2**10)\n if size.upper().endswith(\"GB\"):\n int_size = int(size[:-2]) * (10**9)\n return int_size // 8 if size.endswith(\"b\") else int_size\n if size.upper().endswith(\"MB\"):\n int_size = int(size[:-2]) * (10**6)\n return int_size // 8 if size.endswith(\"b\") else int_size\n if size.upper().endswith(\"KB\"):\n int_size = int(size[:-2]) * (10**3)\n return int_size // 8 if size.endswith(\"b\") else int_size\n raise ValueError(\"`size` is not in a valid format. Use an integer followed by the unit, e.g., '5GB'.\")", "def test_size_pretty_intonly(self):\n assert smdba.basegate.BaseGate.size_pretty(size=str(0x19000000000), int_only=True) == \"2 TB\"\n assert smdba.basegate.BaseGate.size_pretty(size=str(0x19000000000), int_only=False) == \"1.56 TB\"", "def parse_size(text, unit):\n\n text = text.strip()\n text = text.upper()\n unit = unit.upper()\n\n # First, handle the suffixes\n if text.endswith('B'):\n text = text[:-1]\n if text.endswith('I'):\n text = text[:-1]\n\n if not text:\n return ValueError('Empty size')\n\n if text[-1] in _SIZE_FACTORS:\n factor = _SIZE_FACTORS[text[-1]]\n text = text[:-1]\n else:\n factor = _SIZE_FACTORS[unit]\n\n try:\n value = float(text) * factor\n except ValueError:\n raise ValueError(\n 'Cannot parse \"{}\" as {}iB value.'.format(text, unit)\n )\n\n if value % _SIZE_FACTORS[unit]:\n raise ValueError('Value must be multiple of 1 {}iB'.format(unit))\n return int(value / _SIZE_FACTORS[unit])", "def parse_size(text, unit):\n\n text = text.strip()\n text = text.upper()\n unit = unit.upper()\n\n # First, handle the suffixes\n if text.endswith('B'):\n text = text[:-1]\n if text.endswith('I'):\n text = text[:-1]\n\n if not text:\n return ValueError('Empty size')\n\n if text[-1] in _SIZE_FACTORS:\n factor = _SIZE_FACTORS[text[-1]]\n text = text[:-1]\n else:\n factor = _SIZE_FACTORS[unit]\n\n try:\n value = float(text) * factor\n except ValueError:\n raise ValueError(\n 'Cannot parse \"{}\" as {}iB value.'.format(text, unit)\n )\n\n if value % _SIZE_FACTORS[unit]:\n raise ValueError('Value must be multiple of 1 {}iB'.format(unit))\n return int(value / _SIZE_FACTORS[unit])", "def test_filesize_to_str_int(size_num):\n assert isinstance(filesize_to_str(size_num), str)", "def get_size(size):\n if size.isdigit():\n return int(size)\n\n def do_get_size(num, unit):\n u = units[unit]\n if num.find('.') == -1:\n return int(num) * u\n return int(float(num) * u)\n\n s = size.strip().upper()\n if s.find(' ') == -1:\n num, unit = re.sub(r\"([\\d.]+)\", r\"\\1 \", s).split()\n else:\n num, unit = s.split()\n\n try:\n return do_get_size(num, unit)\n except KeyError:\n\traise Exception('unknown size unit[%s]' % size)", "def testSize(self):\n path_spec = path_spec_factory.Factory.NewPathSpec(\n definitions.TYPE_INDICATOR_CS, parent=self._gpt_path_spec,\n volume_index=0)\n file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)\n\n self.assertIsNotNone(file_entry)\n self.assertEqual(file_entry.size, 167772160)", "def test_size_pretty_round(self):\n for size, res in ((0x40, \"64 Bytes\"), (0xff, \"255 Bytes\"),\n (0x400, \"1.00 KB\"), (0x800, \"2.00 KB\"), (0x10000, \"64.00 KB\"),\n (0xffff, \"64.00 KB\"), (0xfffff, \"1024.00 KB\"),\n (0x100000, \"1.00 MB\"), (0x40000000, \"1.00 GB\"),\n (0x10000000000, \"1.00 TB\"), (0x19000000000, \"1.56 TB\")):\n assert smdba.basegate.BaseGate.size_pretty(size=str(size)) == res", "def convertFromBytes(size, unit):\n\tif (unit == 'kb'):\n\t\treturn size / 10000\n\telif (unit == 'mb'):\n\t\treturn size / 1000000\n\telif (size == 'gb'):\n\t\treturn size / 1000000000", "def ensure_size(value):\n return int(round(value * 1.0 / base)) * base", "def to_units(number):\n unit = 0\n while number >= 1024.:\n unit += 1\n number = number / 1024.\n if unit == len(UNITS) - 1:\n break\n if unit:\n return '%.2f%s' % (number, UNITS[unit])\n return '%d' % number", "def _parse_human_filesize(m):\n try:\n return int(m)\n except ValueError as e:\n match = re.match(\"^(\\\\d+)([kmgtp])$\", m)\n if match:\n digits = match[1]\n suffix = match[2]\n multiplier = 1\n for letter in [\"k\", \"m\", \"g\", \"t\", \"p\"]:\n multiplier *= 1024\n if suffix == letter:\n return multiplier * int(digits)\n\n raise e", "def size_in_mb(size_in_bytes):\n if size_in_bytes < 10**6:\n return size_in_bytes // 1000\n else:\n return size_in_bytes // 10**6", "def test_pretty_size(self):\n cases = [\n (0, '0 B'),\n (233, '233 B'),\n (1023, '1023 B'),\n (1024, '1 KB'),\n (1024 ** 2 - 1, '1 MB'),\n (12345678, '12 MB'),\n (1024 ** 3 + 100, '1 GB'),\n (1024 ** 4 + 1, '1 TB'),\n (1024 ** 5 * 2, '2048 TB'),\n ]\n\n for v, expected in cases:\n actual = utils.pretty_size(v)\n self.assertEqual(actual, expected)", "def get_filesize(string):\r\n string = get_sra_xml('SRR3403834')\r\n pattern = re.compile(r'size.*?([0-9.-]+)')\r\n size = re.search(pattern,string)\r\n\r\n return float(size.group(1))/(10**9)", "def convert_file_size_string(value):\n # list of file format sizes\n file_format_sizes = (\"kB\", \"MB\", \"GB\", \"TB\", \"PB\", \"EB\", \"ZB\", \"YB\")\n # dictionary mapping to multiplier\n file_format_scale = {\"B\" : 1,\n \"kB\" : 1e3,\n \"MB\" : 1e6,\n \"GB\" : 1e9,\n \"TB\" : 1e12,\n \"EB\" : 1e15,\n \"ZB\" : 1e18,\n \"YB\" : 1e21}\n if isinstance(value, str):\n if value.endswith(file_format_sizes):\n suffix = value[-2:]\n size = int(value[:-2])\n elif value[-1] == \"B\":\n suffix = \"B\"\n size = int(value[:-1])\n else:\n suffix = \"B\"\n size = int(value)\n # multiply by scalar\n size *= file_format_scale[suffix]\n return size\n else:\n return value", "def getSize(amt):\n fmt = lambda x: \"{:,}\".format(x)\n fstr = lambda x, y: float(\"%.1f\" % (x / float(y)))\n kb = 1024\n mb = (1024 * 1024)\n \n if amt >= mb:\n amt = fmt(fstr(amt, mb)) + \" mb\"\n return amt \n \n if amt >= kb:\n amt = fmt(fstr(amt, kb)) + \" kb\"\n return amt\n \n return fmt(amt) + \" bytes\"", "def bytes_to_size(size):\n if not size >> 10 or size < 0:\n return str(size)\n elif not size >> 20:\n return '{:.2f}KB'.format(size / 1024.0)\n elif not size >> 30:\n return '{:.2f}MB'.format(size / (1024.0 ** 2))\n elif not size >> 40:\n return '{:.2f}GB'.format(size / (1024.0 ** 3))\n else:\n return '{:.2f}TB'.format(size / (1024.0 ** 4))", "def get_size(bytes, suffix=\"B\"):\n factor = 1024\n for unit in [\"\", \"K\", \"M\", \"G\", \"T\", \"P\"]:\n if bytes < factor:\n return f\"{bytes:.2f}{unit}{suffix}\"\n bytes /= factor", "def get_size(bytes, suffix=\"B\"):\n factor = 1024\n for unit in [\"\", \"K\", \"M\", \"G\", \"T\", \"P\"]:\n if bytes < factor:\n return f\"{bytes:.2f}{unit}{suffix}\"\n bytes /= factor", "def parse_size(size,b=1024,u='B',pre=['']+[p for p in'KMGTPEZY']):\n intsize, unit = extract_num_unit(size)\n\n # Account for 10B vs 10KB when looking for base\n if len(unit) == len(u):\n base = unit\n else:\n base = unit[1:]\n\n # Check if we know this unit's base, otherwise use default\n if base in unit_base:\n b = unit_base[base]\n pow = { k+base:v for v, k in enumerate(pre) }\n\n return float(intsize)*(b**pow[unit])", "def _parseDiskSize(self, diskSizeParam): \\\n # pylint: disable=no-self-use\n if diskSizeParam.endswith('TB'):\n return int(float(diskSizeParam[:-2]) * 1000000)\n\n if diskSizeParam.endswith('GB'):\n return int(float(diskSizeParam[:-2]) * 1000)\n elif diskSizeParam.endswith('MB'):\n # Must be an integer\n return int(diskSizeParam[:-2])\n\n return int(diskSizeParam)", "def readable_size(n: int) -> str:\n sizes = ['K', 'M', 'G']\n fmt = ''\n size = n\n for i, s in enumerate(sizes):\n nn = n / (1000 ** (i + 1))\n if nn >= 1:\n size = nn\n fmt = sizes[i]\n else:\n break\n return '%.2f%s' % (size, fmt)", "def human_readable_size(value):\n hummanize_suffixes = ('KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB')\n base = 1024\n bytes_int = float(value)\n\n if bytes_int == 1:\n return '1 Byte'\n elif bytes_int < base:\n return '%d Bytes' % bytes_int\n\n for i, suffix in enumerate(hummanize_suffixes):\n unit = base ** (i+2)\n if round((bytes_int / unit) * base) < base:\n return '%.1f %s' % ((base * bytes_int / unit), suffix)", "def format_size(size):\n size = float(size)\n for unit in ['bit','Kibit','Mibit','Gibit']:\n if size < 1024.0:\n return \"{size:3.2f}{unit}\".format(size=size, unit=unit)\n size /= 1024.0\n return \"{size:.2f}{unit}\".format(size=size, unit='TiB')", "def humanvalue(self, value):\n if value > 1024 * 1024 * 1024:\n return \"%d\" % (value / 1024 / 1024 / 1024 / 1024)\n if value > 1024 * 1024:\n return \"%d\" % (value / 1024 / 1024)\n if value > 1024:\n return \"%d\" % (value / 1024 / 1024)", "def from_units(text):\n match = re.match(r'^([0-9\\.]+)(|[' + ''.join(UNITS[1:]) + r'])$', text)\n if not match:\n return None\n\n number = float(match.group(1))\n unit = match.group(2)\n return int(number * 1024**UNITS.index(unit))", "def _disk_size_in_gb(_string):\n try:\n value = int(_string)\n except ValueError as e:\n raise argparse.ArgumentTypeError(str(e))\n if value <= 0:\n raise argparse.ArgumentTypeError('Size must be positive value')\n return value", "def get_file_size(file: Path) -> str:\n return (\n humanize.naturalsize(file.stat().st_size) if file.stat().st_size else \"unknown\"\n )", "def mbsize(x):\n return str(int(x / (1024 ** 2)))", "def anySizeToBytes(size_string):\n # separate integer from unit\n try:\n size, unit = size_string.split()\n except Exception:\n try:\n size = size_string.strip()\n unit = ''.join([c for c in size if c.isalpha()])\n if len(unit) > 0:\n size = size[:-len(unit)]\n except Exception:\n return -1\n if len(size) == 0:\n return -1\n size = float(size)\n if len(unit) == 0:\n return int(size)\n short_unit = unit.upper()[0]\n\n # convert\n units_dict = {'T': 40, 'G': 30, 'M': 20, 'K': 10}\n if short_unit in units_dict:\n size = size * 2**units_dict[short_unit]\n return int(size)", "def cvtFromKMG(str):\n\n # remember, we already verify sizeset[]\n match = re.match('(\\d+)([kmg]?\\Z)', str, re.I)\n size = int(match.group(1))\n type = match.group(2).lower()\n if type == '':\n objsize = size\n if type == 'k':\n objsize = size * 1024\n elif type == 'm':\n objsize = size * 1024 * 1024\n elif type == 'g':\n objsize = size * 1024 * 1024 * 1024\n return(objsize)", "def filter_storage_size_num(size_str):\n\n # pattern: '^[1-9][\\d\\.]*[MGT]B?$', multiplier=1000 (not KiB)\n if size_str.endswith('B'):\n size_str = size_str[:-1]\n try:\n size_num = 1000000\n for multiplier in ['M', 'G', 'T']:\n if size_str.endswith(multiplier):\n return '{:.2f}'.format(size_num * float(size_str[:-1]))\n size_num = size_num * 1000\n return '{:.2f}'.format(float(size_str))\n except ValueError as ex:\n logging.error(size_str + \" is not a valid size string\")\n raise", "def size_to_gb(self, value):\n nb = re.search(\"[0-9]+\", value)\n if nb:\n nb = int(re.search(\"[0-9]+\", value).group())\n else:\n return 0\n if \"MB\" in value:\n return nb / 1024 if nb else 0\n elif \"GB\" in value:\n return nb\n else:\n return 0", "def unitsize(self, unit):\n\t\treturn self._unitsize[unit]", "def parse_size(size_str):\n try:\n return int(size_str)\n except ValueError, e:\n pass\n\n try:\n num = int(size_str[:-1])\n except ValueError, e:\n raise VMBuilderUserError(\"Invalid size: %s\" % size_str)\n\n if size_str[-1:] == 'g' or size_str[-1:] == 'G':\n return num * 1024\n if size_str[-1:] == 'm' or size_str[-1:] == 'M':\n return num\n if size_str[-1:] == 'k' or size_str[-1:] == 'K':\n return num / 1024", "def human_size(size_bytes):\n if size_bytes is 0:\n return \"0B\"\n\n def ln(x):\n n = 99999999\n return n * ((x ** (1/n)) - 1)\n\n def log(x, base):\n result = ln(x)/ln(base)\n return result\n\n exp = int(log(size_bytes, 1024))\n try:\n unit = (\"B\", \"KB\", \"MB\", \"GB\", \"TB\", \"PB\", \"EB\", \"ZB\", \"YB\")[exp]\n except KeyError:\n unit = \"YB\"\n return \"{} {}\".format(round(size_bytes / (1024 ** exp), 2), unit)", "def format_byte_size(fmt):\n b = 0\n for m in RE_UNIT.finditer(fmt):\n count = int(m.group(1)) if m.group(1) else 1\n b += count * UNIT_SIZE[m.group(2)]\n return b", "def bytesto(self, bytes, to, bsize=1024):\n a = {'k': 1, 'm': 2, 'g': 3, 't': 4, 'p': 5, 'e': 6}\n r = float(bytes)\n for i in range(a[to]):\n r = r / bsize\n r = round(r, 1)\n return(r)", "def baseSize_convert(baseSize_string): \r\n # Convert input genome size to int\r\n if baseSize_string[-1].upper() == 'K':\r\n baseSize = float(baseSize_string[0:-1]) * 1000\r\n elif baseSize_string[-1].upper() == 'M':\r\n baseSize = float(baseSize_string[0:-1]) * 1000000\r\n elif baseSize_string[-1].upper() == 'G':\r\n baseSize = float(baseSize_string[0:-1]) * 1000000000\r\n else:\r\n baseSize = float(baseSize)\r\n \r\n return int(baseSize)", "def is_valid_volsize(self,volsize):\n \n if type(volsize) is int:\n size_temp = divmod(volsize, 8192)\n if size_temp[1] > 0: # If not on 8GB boundary\n return int((1 + size_temp[0]) * 8192) # Always round to next 8GB increment\n else:\n return int(volsize)", "def test_size_too_small(self):\n min_size = min(settings.MISAGO_AVATARS_SIZES)\n too_small = min_size / 2\n\n self.assertEqual(clean_size(too_small), min_size)", "def get_friendly_file_size(b):\n i = 0\n check = b\n size1024 = b\n size1000 = b\n while check // 1024 != 0:\n size1000 /= 1000\n size1024 /= 1024\n check //= 1024\n i += 1\n return f'{size1000:.2f} {file_sizes[i]}B', f'{size1024:.2f} {file_sizes[i]}iB'", "def test_size(self) -> int:\n return int(self.data_size * self.__test_fraction)", "def size_kb(path):\n size = os.path.getsize(path)\n\n return '{:.2f}'.format(size / 1000.0)", "def getsize(f):\n size = os.path.getsize(f) / MEGABYTE\n return size", "def get_file_size_in_megabytes(file_path):\n try:\n return os.path.getsize(file_path)*(10**-6)\n except FileNotFoundError:\n print(\"No file found in path: \", file_path)\n return 0 #Return 0 if file specified does not exist or path given is mistaken", "def size_human2byte(s_str):#{{{\n s_byte = None\n if s_str.isdigit():\n s_byte = int(s_str)\n else:\n s_str = s_str.upper()\n match = re.match(r\"([0-9]+)([A-Z]+)\", s_str, re.I)\n if match:\n items = match.groups()\n size = int(items[0])\n if items[1] in [\"B\"]:\n s_byte = size\n elif items[1] in [\"K\", \"KB\"]:\n s_byte = size*1024\n elif items[1] in [\"M\", \"MB\"]:\n s_byte = size*1024*1024\n elif items[1] in [\"G\", \"GB\"]:\n s_byte = size*1024*1024*1024\n else:\n print(\"Bad maxsize argument:\", s_str, file=sys.stderr)\n return -1\n else:\n print(\"Bad maxsize argument:\", s_str, file=sys.stderr)\n return -1\n return s_byte", "def pretty_size(number):\n float_bytes = float(number)\n scale_factor = 0\n while float_bytes >= 1000 and scale_factor < len(SCALE_UNITS) - 1:\n scale_factor += 1\n float_bytes /= 1000\n return \"{0:,.2f} {1:}Bytes\".format(float_bytes, SCALE_UNITS[scale_factor])", "def human_size(size_bytes):\n if (size_bytes == 1):\n # because I really hate unnecessary plurals\n return \"1 byte\"\n\n suffixes_table = [('bytes',0),('KB',0),('MB',1),('GB',2),('TB',2), ('PB',2)]\n\n num = float(size_bytes)\n for suffix, precision in suffixes_table:\n if (num < 1024.0):\n break\n num /= 1024.0\n\n if (precision == 0):\n formatted_size = \"%d\" % num\n else:\n formatted_size = str(round(num, ndigits=precision))\n\n return \"%s %s\" % (formatted_size, suffix)", "def sizeof(num, suffix=\"B\"):\n for unit in [\"\", \"Ki\", \"Mi\", \"Gi\", \"Ti\", \"Pi\", \"Ei\", \"Zi\"]:\n if abs(num) < 1024.0:\n return f\"{num:3.1f} {unit}{suffix}\"\n num /= 1024.0\n return f\"{num:.1f} {'Yi'}{suffix}\"", "def get_size_in_mb(file_size):\n return round(file_size / (1024 * 1024), 2)", "def byte_size(x):\n\n suffixes = ['bytes', 'KB', 'MB', 'GB', 'TB']\n index = 0\n while x > 1024 and index < 4:\n index += 1\n x /= 1024\n return \"{} {}\".format(int(round(x)), suffixes[index])", "def format(self,size):\n if self.__units is None:\n # Human-readable\n return bcf_utils.format_file_size(size)\n elif self.__units == 'bytes':\n # Raw bytes\n return size\n else:\n return bcf_utils.format_file_size(size,units=self.__units)[0:-1]", "def _size_to_bytes(size):\n\tunits = 'KMGTPEZY' # note that position of letter is same as power - 1\n\tmatch = re.search(r'^\\s*([-+]?\\s*[0-9]*\\.?[0-9]*)\\s*([' + units + r']?\\s*B?\\s*S?)\\s*', size, re.IGNORECASE)\n\tif match is None or match.group(1) == '':\n\t\traise ValueError(\"size string not in proper format 'number [kmgtpezy]': \" + size)\n\tmem_size = float(re.sub(r'\\s*', '', match.group(1)))\n\tunit = re.sub(r'\\s*', '', match.group(2)).upper()\n\tunit = re.sub(r'B?S?$', '', unit) # remove trailing units symbol\n\tif unit == '':\n\t\tunit_pow = 0\n\telse:\n\t\tunit_pow = units.find(unit) + 1\n\tbyte_size = int(round(mem_size * (1024 ** unit_pow)))\n\treturn byte_size", "def clean_size(size):\n size = size.replace(\"M\",\"\")\n if size.endswith(\"k\"):\n size = float(size[:-1])/1000\n elif size == \"Varies with device\":\n size = np.NaN\n else:\n size = float(size)\n return size", "def human_size(size_bytes):\n if size_bytes == 1:\n # because I really hate unnecessary plurals\n return \"1 byte\"\n\n suffixes_table = [('bytes', 0), ('KB', 0), ('MB', 1), ('GB', 2), ('TB', 2), ('PB', 2)]\n\n num = float(0 if size_bytes is None else size_bytes)\n for suffix, precision in suffixes_table:\n if num < 1024.0:\n break\n num /= 1024.0\n\n if precision == 0:\n formatted_size = \"%d\" % num\n else:\n formatted_size = str(round(num, ndigits=precision))\n\n return \"%s %s\" % (formatted_size, suffix)", "def disk_size_format(cls, num):\n\n for attr in ['bytes', 'KB', 'MB', 'GB', 'TB']:\n if num < 1024.0:\n return '%3.2f %s' % (num, attr)\n num /= 1024.0\n return None", "def get_file_size_str(size):\n if size < 128: return f'{size}B' # only show bytes for really small < 0.1 KB sizes\n if size < (1024*1024): return f'{size/1024:.1f}KB'\n if size < (1024*1024*1024): return f'{size/(1024*1024):.1f}MB'\n return f'{size/(1024*1024):.2}GB'", "def bytesto(bytes, to, bsize=1024):\n\n a = {'K' : 1, 'M': 2, 'G' : 3, 'T' : 4, 'P' : 5, 'E' : 6 }\n r = float(bytes)\n for i in range(a[to]):\n r = r * bsize\n\n return(r)", "def bytesto(bytes, to, bsize=1024):\n\n a = {'k' : 1, 'm': 2, 'g' : 3, 't' : 4, 'p' : 5, 'e' : 6 }\n r = float(bytes)\n for i in range(a[to]):\n r = r / bsize\n return(r)", "def cm2inch(size):\n return size / 2.54", "def cm2inch(size):\n return size / 2.54", "def cm2inch(size):\n return size / 2.54", "def storage_size_to_kb(self,storage_size):\n multiplier = 1\n val = float(storage_size[0:-2])\n meter = storage_size[-2:]\n if \"kb\" == meter:\n multiplier = 1\n elif \"mb\" == meter:\n multiplier = 1024\n elif \"gb\" == meter:\n multiplier = 1024*1024\n return val*multiplier", "def unit_converter(val, from_u, to_u):\n\tconverter = {'b':0, 'k':1, 'm':2, 'g':3, 't':4}\n\tif converter[from_u] < converter[to_u]:\n\t\tval = float(val)\n\t\tfor _ in range(converter[to_u] - converter[from_u]):\n\t\t\tval = val/1024\n\telse:\n\t\tfor _ in range(converter[from_u] - converter[to_u]):\n\t\t\tval = val * 1024\n\t\t\t\n\treturn val", "def size_str(num):\n if num > 2 ** 30:\n return \"%0.2fGB\" % (num / 2 ** 30)\n elif num > 2 ** 20:\n return \"%0.2fMB\" % (num / 2 ** 20)\n elif num > 2 ** 10:\n return \"%0.2fkB\" % (num / 2 ** 10)\n else:\n return \"%d bytes\" % num", "def _num(s, unit=1):\r\n if s[-2:]=='cm':\r\n unit=cm\r\n s = s[:-2]\r\n if s[-2:]=='in':\r\n unit=inch\r\n s = s[:-2]\r\n if s[-2:]=='pt':\r\n unit=1\r\n s = s[:-2]\r\n if s[-1:]=='i':\r\n unit=inch\r\n s = s[:-1]\r\n if s[-2:]=='mm':\r\n unit=mm\r\n s = s[:-2]\r\n if s[-4:]=='pica':\r\n unit=pica\r\n s = s[:-4]\r\n return _convnum(s,unit)", "def pretty_print_file_size(size: float) -> str:\n for x in [\"bytes\", \"KB\", \"MB\", \"GB\", \"TB\"]:\n if size < 1024.0:\n return f\"{size:3.1f} {x}\"\n size /= 1024.0\n return str(size)", "def test_size_too_big(self):\n max_size = max(settings.MISAGO_AVATARS_SIZES)\n too_big = max_size * 2\n\n self.assertEqual(clean_size(too_big), max_size)", "def approximate_size(size, a_kilobyte_is_1024_bytes=True):\n if size < 0:\n raise ValueError('number must be non-negative')\n\n multiple = 1024 if a_kilobyte_is_1024_bytes else 1000\n for suffix in SUFFIXES[multiple]:\n if size < multiple:\n return '{0:.1f} {1}'.format(size, suffix)\n size = 1.0 * size / multiple\n\n raise ValueError('number too large')", "def convert_bytes_size(size_str):\n\n m = BYTES_REGEX.fullmatch(size_str.lower())\n if m:\n number = int(m.group(1))\n\n if m.group(2) is not None:\n unit = m.group(2)\n conversion = SIZE_UNITS.get(unit)\n if conversion:\n return conversion * number\n return number\n else:\n raise ValueError(\"Invalid size string: {}\".format(size_str))", "def parse_memsize(conf):\n\n def is_int(x):\n try:\n int(x)\n return True\n except ValueError:\n return False\n\n return int([x[:-1] for x in conf.split('-') if x.endswith('g') and is_int(x[:-1])][0])", "def make_human_readable_size(size, decimals=2):\n for unit in [\"B\",\"KB\",\"MB\",\"GB\",\"TB\"]:\n if size < 1024.0:\n break\n size /= 1024.0\n return f\"{size:.{decimals}f} {unit}\"", "def part1():\n program = read_input()\n root = build_filesystem(program)\n all_sizes = root.make_size_list()\n return sum(size for size in all_sizes if size <= 100000)", "def fix_size(value):\n try:\n obj_size = int(float(value) * wx.GetApp().settings.size_coeff)\n except AttributeError:\n obj_size = int(value)\n return obj_size", "def testSize(self):\n path_spec = path_spec_factory.Factory.NewPathSpec(\n definitions.TYPE_INDICATOR_HFS,\n identifier=self._IDENTIFIER_ANOTHER_FILE,\n location='/a_directory/another_file',\n parent=self._raw_path_spec)\n file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)\n\n self.assertIsNotNone(file_entry)\n self.assertEqual(file_entry.size, 22)", "def human_readable(size):\n if size < 1024**2:\n hreadable = float(size)/1024.0\n return \"%.0fK\" % hreadable\n elif size < (1024**3):\n hreadable = float(size)/(1024**2)\n return \"%.1fM\" % round(hreadable, 1)\n else:\n hreadable = float(size)/(1024.0**3)\n return \"%.2fG\" % round(hreadable, 2)", "def unitsDetector(self, num):\n try:\n num = int(num)\n except:\n sys.exit('Invalid input! Method only takes ints or floats.')\n \n digits = 0\n while num > 1:\n num /= 10\n digits += 1\n \n digits -= 1\n ind = digits // 3\n units = {3: 'B', 2: 'M', 1: 'K', 0: ''}[ind]\n \n return 10 ** (ind * 3), units", "def convert_unit_size_to_num(size, unit=None):\n if unit:\n unit = MemoryUnit.validate_unit(unit)\n else:\n unit = MemoryUnit.UNIT_SIZE_DEFAULT\n log.info(_('A memory unit is not provided for size; using the '\n 'default unit %(default)s.') % {'default': 'B'})\n regex = re.compile('(\\d*)\\s*(\\w*)')\n result = regex.match(str(size)).groups()\n if result[1]:\n unit_size = MemoryUnit.validate_unit(result[1])\n converted = int(str_to_num(result[0])\n * MemoryUnit.UNIT_SIZE_DICT[unit_size]\n * math.pow(MemoryUnit.UNIT_SIZE_DICT\n [unit], -1))\n log.info(_('Given size %(size)s is converted to %(num)s '\n '%(unit)s.') % {'size': size,\n 'num': converted, 'unit': unit})\n else:\n converted = (str_to_num(result[0]))\n return converted", "def sizeof_fmt(num):\n for x in [\"bytes\", \"KB\", \"MB\", \"GB\"]:\n if num < 1024.0 and num > -1024.0:\n return \"%3.1f %s\" % (num, x)\n num /= 1024.0\n return \"%3.1f %s\" % (num, \"TB\")", "def _SizeCalculator(partition_size):\n # Max image size grows less than partition size, which means\n # footer size grows faster than partition size.\n return int(math.pow(partition_size, 0.95))", "def state(self):\n decimals = 2\n size_mb = round(self._size/1e6, decimals)\n return size_mb", "def __human_size(size, padding = True):\n # the spaces after the base unit 'B' are for alignment\n powers = { 0 : \"B \", 1 : \"KiB\", 2 : \"MiB\", 3 : \"GiB\", 4 : \"TiB\", 5 : \"PiB\", 6 : \"EiB\" }\n i = 0\n orig_size = size\n size = float(size)\n while size >= 1024:\n i += 1\n size /= 1024\n if i > 6:\n print(\"Not prepared to handle this number: {:d}\".format(orig_size))\n return \"NaN\"\n val = str(round(size, 1)) + ' ' + powers[i]\n if padding: val = \" \"*(MAX_SIZE-len(val)) + val\n return val", "def approximate_size(size, a_kilobyte_is_1024_bytes=True):\r\n if size < 0:\r\n raise ValueError('number must be non-negative')\r\n\r\n multiple = 1024 if a_kilobyte_is_1024_bytes else 1000\r\n for suffix in SUFFIXES[multiple]:\r\n size /= multiple\r\n if size < multiple:\r\n return '{0:.1f} {1}'.format(size, suffix)\r\n\r\n raise ValueError('number too large')", "def h_size(size: int, units: list = None):\n if units is None:\n units = H_SIZE_ABBREVIATIONS\n return f'{size} {units[0]}' if size < 1024 else h_size(size >> 10, units[1:])", "def test_length_to_metric(self):\n self.assertEqual(\n 100,\n METRIC_SYSTEM.length(100, METRIC_SYSTEM.length_unit)\n )\n self.assertEqual(\n 8.04672,\n METRIC_SYSTEM.length(5, IMPERIAL_SYSTEM.length_unit)\n )", "def validate_size(size):\n if not size.lower().endswith(('kb', 'mb', 'gb', 'tb'\n )) or not size[:-2].isdigit():\n msg = ('Invalid format for size. \\n'\n 'Valid sizes must be of form X[kKmMgGtT]b where X is an'\n 'integer. Default = 100mb')\n raise ValidationError(msg)", "def get_size(file):\n size_in_bytes = os.path.getsize(file)\n size_in_megabytes = size_in_bytes / 1000000\n return size_in_megabytes", "def testSize (self):\r\n \r\n perpixel = bytes_per_pixel [self.bih_vals [bih_BitCount]]\r\n width = self.bih_vals [bih_Width]\r\n height = self.bih_vals [bih_Height]\r\n expected = self.bih_vals [bih_SizeImage]\r\n\r\n # Rows always have multiples of 4 bytes\r\n \r\n padding = 3 - ((perpixel * width + 3) % 4)\r\n size = (width * perpixel + padding) * height\r\n\r\n if not size == expected:\r\n print \"Calculated size = %d (<> %d)\" % (size, expected)\r\n print \"***** File size error *****\"", "def parse_size(size):\n if isinstance(size, int):\n return size\n elif isinstance(size, str):\n if size.isdigit():\n return int(size)\n return None", "def file_size(file_path):\n if os.path.isfile(file_path):\n file_info = os.stat(file_path)\n # return humanize.naturalsize(file_info.st_size)\n return file_info.st_size", "def units(self, size=\"G\", transfer='GB/s'): # YAML", "def formatSize(val, sf=1):\n\tif val<1e3:\n\t\treturn \"%d Bytes\"%int(val)\n\telif val<1e6:\n\t\treturn \"%.*fkiB\"%(sf, val/1024.)\n\telif val<1e9:\n\t\treturn \"%.*fMiB\"%(sf, val/1024./1024.)\n\telse:\n\t\treturn \"%.*fGiB\"%(sf, val/1024./1024./1024)", "def test_integer_magnitude(self):\n command_line = [\"filesystem\", \"create\", \"pn\", \"fn\", '--size=\"32.2GiB\"']\n for prefix in [[], [\"--propagate\"]]:\n self.check_system_exit(prefix + command_line, _PARSE_ERROR)", "def format_bytes_size(val):\n\tif not val:\n\t\treturn '0 bytes'\n\tfor sz_name in ['bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB']:\n\t\tif val < 1024.0:\n\t\t\treturn \"{0:.2f} {1}\".format(val, sz_name)\n\t\tval /= 1024.0\n\traise OverflowError()" ]
[ "0.7208111", "0.70847607", "0.70484537", "0.7023813", "0.692497", "0.6893805", "0.6853747", "0.6853747", "0.68404585", "0.6745593", "0.66911465", "0.66774565", "0.6614509", "0.6607619", "0.659698", "0.65692264", "0.6559572", "0.6522625", "0.6506409", "0.65063375", "0.647743", "0.64737296", "0.6466713", "0.6466713", "0.6462866", "0.64279944", "0.64116746", "0.6406531", "0.6383015", "0.6372377", "0.6360011", "0.6349384", "0.6344186", "0.6340048", "0.63386226", "0.63385904", "0.63374346", "0.6334753", "0.63242567", "0.6316175", "0.6301661", "0.62999374", "0.62981033", "0.6270855", "0.6267816", "0.62423193", "0.62386787", "0.6228349", "0.62277275", "0.6224815", "0.6221496", "0.6199926", "0.6198461", "0.6197871", "0.61913747", "0.61824983", "0.6179892", "0.6166864", "0.6147196", "0.6141879", "0.61402017", "0.6116602", "0.610961", "0.609436", "0.6089349", "0.60886246", "0.60886246", "0.60886246", "0.6072388", "0.60624576", "0.60614955", "0.6054792", "0.60509974", "0.6048474", "0.6047", "0.60394686", "0.6036255", "0.60211265", "0.6019396", "0.60116446", "0.6007126", "0.6000654", "0.5999265", "0.59934455", "0.5991698", "0.59815574", "0.59764135", "0.59694004", "0.5965383", "0.5962693", "0.5961431", "0.59611577", "0.59608644", "0.596042", "0.5959777", "0.5948731", "0.5934953", "0.592793", "0.59235334", "0.59138304" ]
0.6409324
27
Tests of try rotation without configuration
def test_process_log_without_configuration(self): with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout: compressors = process_log( datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30), {}, 'hourly', '/tmp/pokus.log', 10 ) self.assertEqual(compressors, []) self.assertEqual(fake_stdout.getvalue(), 'Checking "/tmp/pokus.log"... rotation not needed.\n')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_rotated(self):\n self._calibration_test(\"rotated\")", "def test_skel_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def test_g_asignar_rol(self):", "def test_need_to_rotate_log(self):\n self.assertTrue(need_to_rotate_log(0, 20, 'daily', 15, 'daily'), 'rotate log by time')\n self.assertFalse(need_to_rotate_log(10, 20, 'daily', 15, 'hourly'), 'do not rotate log by time')\n self.assertTrue(need_to_rotate_log(10, 20, 'daily', 25, None), 'rotate log by max size')\n self.assertFalse(need_to_rotate_log(10, 20, 'hourly', 5, 'hourly'), 'do not rotate log by min size')", "def test_rotation(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n days = [(self.path + \".\" + log.suffix(day * 86400)) for day in range(3)]\n\n # test automatic rotation\n log._clock = 0.0 # 1970/01/01 00:00.00\n log.write(\"123\")\n log._clock = 43200 # 1970/01/01 12:00.00\n log.write(\"4567890\")\n log._clock = 86400 # 1970/01/02 00:00.00\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(days[0]))\n self.assertFalse(os.path.exists(days[1]))\n log._clock = 172800 # 1970/01/03 00:00.00\n log.write(\"\")\n self.assertTrue(os.path.exists(days[0]))\n self.assertTrue(os.path.exists(days[1]))\n self.assertFalse(os.path.exists(days[2]))\n log._clock = 259199 # 1970/01/03 23:59.59\n log.write(\"3\")\n self.assertFalse(os.path.exists(days[2]))", "def test_rotation(self):\n # this logfile should rotate every 10 bytes\n with contextlib.closing(\n logfile.LogFile(self.name, self.dir, rotateLength=10)\n ) as log:\n\n # test automatic rotation\n log.write(\"123\")\n log.write(\"4567890\")\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.2\".format(self.path)))\n log.write(\"\")\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertTrue(os.path.exists(\"{}.2\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n log.write(\"3\")\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n\n # test manual rotation\n log.rotate()\n self.assertTrue(os.path.exists(\"{}.3\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.4\".format(self.path)))\n\n self.assertEqual(log.listLogs(), [1, 2, 3])", "def test_default_parameters(self):\n\n assert self.test_shape.rotation_angle == 360", "def test_rotation_angle_warning(self):\n\n def warning_trigger():\n try:\n paramak.CenterColumnStudyReactor(\n inner_bore_radial_thickness=20,\n inboard_tf_leg_radial_thickness=50,\n center_column_shield_radial_thickness_mid=50,\n center_column_shield_radial_thickness_upper=100,\n inboard_firstwall_radial_thickness=20,\n divertor_radial_thickness=100,\n inner_plasma_gap_radial_thickness=80,\n plasma_radial_thickness=200,\n outer_plasma_gap_radial_thickness=90,\n # first number must be between plasma inner/outer radius\n plasma_high_point=(245, 240),\n plasma_gap_vertical_thickness=40,\n center_column_arc_vertical_thickness=520,\n rotation_angle=360)\n\n except BaseException:\n pass\n\n with warnings.catch_warnings(record=True) as w:\n warnings.simplefilter(\"always\")\n warning_trigger()\n assert len(w) == 1\n assert issubclass(w[-1].category, UserWarning)\n assert \"360 degree rotation may result in a Standard_ConstructionError or AttributeError\" in str(\n w[-1].message)", "def test_calc_rotation(self):\n t = AioBaseTurtle()\n t.speed(speed=2)\n orient, steps, delta = t._calc_rotation(120)\n self.assertEqual(steps, 21)\n self.assertAlmostEqual(delta, 120.0 / 21.0)\n self.assertAlmostEqual(orient[0], math.cos(math.radians(120)))\n self.assertAlmostEqual(orient[1], math.sin(math.radians(120)))", "def test_rotation(self, tol):\n theta = 0.98\n S = symplectic.rotation(theta)\n expected = np.block([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]])\n np.allclose(S, expected, atol=tol, rtol=0)", "def test_arbitrary_rotation(self):\n \n # This test is run a bunch of times on various intervals, ranging from 50% to 1/6\n\t\t# (16.667%).\n for i in range(2, 7):\n \n interval = 1 / i # The amount to increase each qubit's probability by, relative to the previous qubit\n step_string = \"{:.4f}\".format(100 / i) # The decimal representation of the interval, as a percent\n target_probabilities = [0] * (i + 1) # This will store the desired probabilities of each qubit\n for j in range(0, i + 1):\n target_probability = j * interval\n target_probabilities[j] = target_probability\n\n # Run the test\n self.run_test(self.arbitrary_rotation_function, f\"Rotation with steps of 1/{i} ({step_string}%)\", 2000, target_probabilities, 0.05)", "def test_xform_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.xform_file, ani=1)\n\n values = cmds.keyframe('pCube1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def test_abstractShouldRotate(self):\n log = logfile.BaseLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n self.assertRaises(NotImplementedError, log.shouldRotate)", "def _check_rotation(spec_nest, path):\n spec = _get_from_nest(spec_nest, path)\n if spec is not None and not isinstance(spec, primitives_pb2.RotationType):\n raise InvalidSpecError(\n f'{\"/\".join(path)} was expected to be of type Rotation, but is instead '\n f'{type(spec)}')", "def test_retry_run(self):\n pass", "def test_rotation_angle(self):\n\n self.test_shape.azimuth_placement_angle = [45, 135, 225, 315]\n test_volume = self.test_shape.volume()\n self.test_shape.rotation_angle = 180\n assert self.test_shape.volume() == pytest.approx(test_volume * 0.5)", "def test_rot(self):\n\n print(\"rot()\")\n obs = self.fixture\n\n # rotation(0) = identity\n for axis in [1, 2, 3]:\n # theta = 0.0\n rotation = obs.rot(0.0, axis)\n # find || eye - rot1 ||\n diff = np.linalg.norm(np.eye(3) - rotation)\n self.assertAlmostEqual(diff, 0.0, delta=1e-12)\n # theta = 2*pi\n rotation = obs.rot(2.0 * np.pi, axis)\n # find || eye - rot1 ||\n diff = np.linalg.norm(np.eye(3) - rotation)\n self.assertAlmostEqual(diff, 0.0, delta=1e-12)\n\n # perform many randomized tests\n num_tests = 100\n num_products = 10\n for _test_counter in range(num_tests):\n thetas = []\n axes = []\n base = np.eye(3)\n # we will multiply a series of rotations into \"base\"\n rot_all = base\n for _rot_counter in range(num_products):\n theta = np.random.uniform(2 * np.pi) # in [0,2 pi]\n axis = np.random.randint(3) + 1 # in {1,2,3}\n axes.append(axis)\n thetas.append(theta)\n rotation = obs.rot(theta, axis)\n # multiply rot1 into the cumulative rotation\n rot_all = np.dot(rot_all, rotation)\n # now, back all the rotations out\n for _rot_counter in range(num_products):\n theta = thetas.pop()\n axis = axes.pop()\n # apply the inverse rotation\n rotation = obs.rot(-theta, axis)\n rot_all = np.dot(rot_all, rotation)\n # find || base - rot1 * rot2 ||\n diff = np.linalg.norm(base - rot_all)\n self.assertAlmostEqual(diff, 0.0, delta=1e-10 * num_products)", "def test_asssert_rotation_matrix_behaves_like_check_matrix():\n random_state = np.random.RandomState(2345)\n for _ in range(5):\n a = pr.random_axis_angle(random_state)\n R = pr.matrix_from_axis_angle(a)\n original_value = R[2, 2]\n for error in [0, 1e-8, 1e-7, 1e-5, 1e-4, 1]:\n R[2, 2] = original_value + error\n try:\n pr.assert_rotation_matrix(R)\n pr.check_matrix(R)\n except AssertionError:\n assert_raises_regexp(\n ValueError, \"Expected rotation matrix\", pr.check_matrix, R)", "def rotation(self, *args, **kwargs) -> Any:\n pass", "def test_rotatePermissionFileNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.path, 0o444)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_log_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n filename = self.conveyer.rotate_logs()\n self.assertEquals(self.conveyer.logfile, None)\n self.assertEquals(filename, \"testfile.dat.rotated\")", "def rotate(self):\n pass", "def test_skel_rotation(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1, aef=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertAlmostEqual(0.0, values[-1])", "def test_rotate_without_moving(controller):\n distance = math.pi / 2 * (DISTANCE_BETWEEN_WHEELS / 2)\n revolution = distance / (2 * math.pi * WHEEL_RADIUS)\n ticks = revolution * TICK_PER_REVOLUTION\n pos, angle = controller.odometry(\n round(10 - ticks),\n round(10 + ticks),\n Vector2(0, 0),\n 0,\n )\n\n # Rotate 90 degrees without moving.\n assert pos == Vector2(0, 0)\n assert round(math.pi / 2 / angle, 1) == 1\n\n # Rotate back to 0 degrees without moving.\n pos, angle = controller.odometry(10, 10, Vector2(0, 0), 0)\n assert pos == Vector2(0, 0)\n assert round(-math.pi / 2 / angle, 1) == 1", "def _optimise_rotation(self):\n logger.info(\n f\"Minimising dimer rotation up to \"\n f'δϕ = {self.phi_tol.to(\"degrees\"):.4f}º'\n )\n\n for i in range(self._ratio_rot_iters):\n\n result = self._rotate()\n\n if (\n result == _StepResult.skipped_rotation\n or abs(self._coords.phi) < self.phi_tol\n ):\n break\n\n logger.info(\n f\"Micro iteration: {i}.\"\n f' ϕ={self._coords.phi.to(\"degrees\"):.2f}º'\n )\n\n return None", "def test_rotating_phantom(self):\n cheese = TomoCheese.from_demo_images()\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -0.25, abs_tol=0.05)\n for img in cheese.dicom_stack:\n img.array = rotate(img.array, angle=3, mode=\"edge\")\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -3.25, abs_tol=0.05)", "def test_rotatePermissionDirectoryNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.directory, 0o444)\n # Restore permissions so tests can be cleaned up.\n self.addCleanup(os.chmod, log.directory, 0o755)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_recheck_fails(self):\n raise NotImplementedError", "def check_random_rotation(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [degrees, resample, expand, center, fill_value], _ = parse_user_args(method, *args, **kwargs)\n check_degrees(degrees)\n\n if resample is not None:\n type_check(resample, (Inter,), \"resample\")\n if expand is not None:\n type_check(expand, (bool,), \"expand\")\n if center is not None:\n check_2tuple(center, \"center\")\n if fill_value is not None:\n check_fill_value(fill_value)\n\n return method(self, *args, **kwargs)\n\n return new_method", "def test_default_parameters(self):\n\n # assert self.test_shape.rotation_angle == 360\n assert self.test_shape.start_angle == 0", "def interaction_turnstile(self) -> None:\n if self.get_rotation()[1][0] != 0:\n condition = self.can_rotate()[0]\n if condition:\n self.rotate()", "def test_shuffled(self):\n self.setup_flags()\n self.io_args.matches = os.path.join(\n self.io_args.output_root, \"shuffled\", \"matches.json\"\n )\n self._calibration_error_test(\"shuffled\", \"GeometricCalibration\")", "def test_rotateAlreadyExists(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n # Build a new file with the same name as the file which would be created\n # if the log file is to be rotated.\n newFilePath = \"{}.{}\".format(log.path, log.suffix(log.lastDate))\n with open(newFilePath, \"w\") as fp:\n fp.write(\"123\")\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_rotate(self):\n rotable = TestRotable()\n command = RotateCommand(rotable)\n collinear_to_new_direction = rotable.get_direction() + rotable.get_angular_velocity()\n\n command()\n\n ratio = norm(rotable.get_direction()) / norm(collinear_to_new_direction)\n self.assertTrue(allclose(collinear_to_new_direction * ratio, rotable.get_direction()))\n self.assertTrue(isclose(norm(rotable.get_direction()), 1))", "def test_extract_rot_angle():\n v = np.zeros((4,2))\n try:\n angle = extract_rot_angle(v,min_points=0)\n except AssertionError,err:\n assert err.args[0]==\"Zero velocities not allowed.\"\n \n v[:,1] = 1.\n try:\n angle = extract_rot_angle(v,min_points=0)\n except AssertionError,err:\n assert err.args[0]==\"Failed to get both forward and backward directions.\"\n\n # Forwards-backwards motion.\n v[:,1] = 0.\n v[:2,0] = -1.1\n v[2:,0] = 1.2\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,np.pi)\n\n # Forwards-backwards motion.\n v[:,0] = 0.\n v[:2,1] = -.9\n v[2:,1] = .8\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,-np.pi/2)\n\n # Forwards-backwards motion with noise.\n v[:2,1] += (np.random.rand(2)*2-1)/10\n v[2:,1] += (np.random.rand(2)*2-1)/10\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,-np.pi/2,atol=.1)", "def test_verification_failed(self):\n pass", "def verify_legal_rotation(self, direction):\n test_figure = None\n if direction == \"CW\":\n test_figure = self.get_block_positions(self.active_piece.get_cw_rotation())\n elif direction == \"CCW\":\n test_figure = self.get_block_positions(self.active_piece.get_ccw_rotation())\n\n for b_x, b_y in test_figure:\n if b_x < 0 or b_x >= self.WIDTH:\n return False\n\n if b_y < 0 or b_y >= self.HEIGHT:\n return False\n\n if self.board[b_y][b_x] != 0:\n return False\n return True", "def rotate(self, *args, **kwargs): # real signature unknown\n pass", "def test_call_bad_perms(self):\r\n self.assertRaises(ValueError, self.single_dms, -1)", "def test_rotation_isometry(self):\n import numpy\n\n # test for all kinds of curvatures K\n for k in (0, 1, -1, 1/11, -1/11, 11, -2):\n \n s = space(curvature=k)\n\n # use a small enough magnitude to not break math for very negative K\n magic = 0.33377777373737737777\n # 1/sqrt(2)\n s2_ref = 0.707106781186547524400844362104785\n\n o = s.make_origin(2)\n p = s.make_point((1, 0), magic)\n q = s.make_point((s2_ref, s2_ref), magic)\n\n rot = space_point_transform(\n numpy.array([[1,0,0],[0,s2_ref,-s2_ref],[0,s2_ref,s2_ref]]),\n curvature=k,\n math = common_math\n )\n\n f, g, i = map(space_point_transform, (p, q, o))\n\n def check_transform_eq(t1, t2, invert=False):\n for ref in (\n s.make_point((5/13, 12/13), magic),\n s.make_point((-3/5, 4/5), magic)\n ):\n self.assertTrue(invert ^ point_isclose(\n t1(ref),\n t2(ref),\n abs_tol = 1e-12\n ))\n\n # 1/8 turn, times 8\n check_transform_eq(rot*8, i)\n\n # rotate, shift, rotate\n check_transform_eq(g, rot + f + rot * -1)\n\n # the other way\n check_transform_eq(f, rot * -1 + g + rot)", "def testCalculateRotationDiff(self):\n # Test identity\n transform1 = numpy.eye(4)\n transform2 = numpy.eye(4)\n (_, result) = self.evaluator._calculateDifference(transform1, transform2)\n self.assertEqual(result, 0.0)\n # Test arbitrary rotation\n rot1 = numpy.array(\n [[0.0, 1.0, 0.0], [-1.0, 0.0, 0.0], [0.0, 0.0, 1.0]])\n rot2 = numpy.array(\n [[0.0, 0.0, -1.0], [0.0, 1.0, 0.0], [1.0, 0.0, 0.0]])\n transform1[0:3, 0:3] = numpy.matmul(transform1[0:3, 0:3], rot1)\n transform2[0:3, 0:3] = numpy.matmul(transform2[0:3, 0:3], rot2)\n (_, result) = self.evaluator._calculateDifference(transform1, transform2)\n self.assertAlmostEqual(result, 120.0 * numpy.pi / 180.0, 8)\n # Order shouldn't matter\n (_, result) = self.evaluator._calculateDifference(transform2, transform1)\n self.assertAlmostEqual(result, 120.0 * numpy.pi / 180.0, 8)\n # Test when the angle is pi\n transform1 = numpy.eye(4)\n transform2 = numpy.eye(4)\n transform2[0, 0] = -1.0\n transform2[1, 1] = -1.0\n (_, result) = self.evaluator._calculateDifference(transform1, transform2)\n # It might wrap to -pi, so check the absolute value\n self.assertAlmostEqual(abs(result), numpy.pi, 8)\n # Test an extreme value\n transform2 = -1.0 * numpy.eye(4)\n (_, result) = self.evaluator._calculateDifference(transform2, transform1)\n self.assertAlmostEqual(abs(result), numpy.pi)", "def test_modePreservation(self):\n open(self.path, \"w\").close()\n os.chmod(self.path, 0o707)\n mode = os.stat(self.path)[stat.ST_MODE]\n log = logfile.LogFile(self.name, self.dir)\n self.addCleanup(log.close)\n log.write(\"abc\")\n log.rotate()\n self.assertEqual(mode, os.stat(self.path)[stat.ST_MODE])", "def test_default_parameters(self):\n\n assert self.test_shape.rotation_angle == 360\n assert self.test_shape.extrude_both", "def test_noPermission(self):\n log = logfile.LogFile(self.name, self.dir)\n log.write(\"abc\")\n\n # change permissions so rotation would fail\n os.chmod(self.dir, 0555)\n\n # if this succeeds, chmod doesn't restrict us, so we can't\n # do the test\n try:\n f = open(os.path.join(self.dir,\"xxx\"), \"w\")\n except (OSError, IOError):\n pass\n else:\n f.close()\n return\n\n log.rotate() # this should not fail\n\n log.write(\"def\")\n log.flush()\n\n f = log._file\n self.assertEquals(f.tell(), 6)\n f.seek(0, 0)\n self.assertEquals(f.read(), \"abcdef\")\n log.close()", "def test_xform_rotation(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.xform_file, ani=1, aef=1)\n\n values = cmds.keyframe('pCube1.rx', q=1, vc=1)\n self.assertAlmostEqual(0.0, values[-1])", "def test_far_out_coordinates(rotationangle):\n\n eps = 1e-7\n\n # Get the limits\n lim = rotationangle[\"cs\"].limits()[2:]\n lim0 = max(lim)\n lim1 = min(lim)\n\n # Setting c2 and c3 to zero\n c3 = lim0 - eps\n\n # A large value which is still valid\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n\n # Setting c2 and c3 to zero\n c3 = lim1 + eps\n\n # A large value which is still valid\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n\n # A large value which is raises an exception\n with pytest.raises(RuntimeError):\n c3 = lim0 + eps\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n print(phi_dash)\n\n with pytest.raises(RuntimeError):\n c3 = lim1 - eps\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)", "def test_noPermission(self):\n log = logfile.LogFile(self.name, self.dir)\n self.addCleanup(log.close)\n log.write(\"abc\")\n\n # change permissions so rotation would fail\n os.chmod(self.dir, 0o555)\n\n # if this succeeds, chmod doesn't restrict us, so we can't\n # do the test\n try:\n f = open(os.path.join(self.dir, \"xxx\"), \"w\")\n except OSError:\n pass\n else:\n f.close()\n return\n\n log.rotate() # this should not fail\n\n log.write(\"def\")\n log.flush()\n\n f = log._file\n self.assertEqual(f.tell(), 6)\n f.seek(0, 0)\n self.assertEqual(f.read(), b\"abcdef\")", "def correct_rotation(k_rotations):\n\n for key, value in Chunk.global_piece_rotations.items():\n Chunk.global_piece_rotations[key] = (k_rotations + value) % 4\n # Should I correct it for the side rotations also?", "def test_logfile_recreates_after_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n self.conveyer.rotate_logs()\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"fourth\\\"}\"))\n self.assertEquals(self.events_out.getvalue(), \"{message: \\\"fourth\\\"}\")\n self.assertTrue(self.renamerCalled)", "def verify_no_snapshot_reingestion(c: Composition) -> None:\n c.run(\"testdrive\", \"wait-for-snapshot.td\", \"postgres-disable-select-permission.td\")\n\n restart_mz(c)\n\n c.run(\n \"testdrive\",\n \"delete-rows-t1.td\",\n \"delete-rows-t2.td\",\n \"alter-table.td\",\n \"alter-mz.td\",\n )", "def test_stickers(self):\n rotation, _ = list(self.cube.scramble_cube(15))\n\n unique, counts = np.unique(rotation, return_counts=True)\n dictionary = dict(zip(unique, counts))\n\n self.assertEqual(all(value == 4 for value in dictionary.values()), True)", "def validate_orientation():\r\n ui.click_and_WAIT_for_item_with_retries('/tray/', 'Settings', True)\r\n time.sleep(WAIT)\r\n ui.doDefault_on_obj(name='Settings', role='button')\r\n time.sleep(WAIT)\r\n ui.doDefault_on_obj(name='Displays', role='link')\r\n time.sleep(WAIT)", "def test_to_rotation(self):\r\n q = np.array([-1, 1, 3, 2])\r\n q = q / np.linalg.norm(q)\r\n R_gt = np.array([\r\n [-1/3., -14/15., -2/15.],\r\n [2/3., -1/3., 2/3.],\r\n [-2/3., 2/15., 11/15.]]).T\r\n R = to_rotation(q)\r\n\r\n zero_matrix = R - R_gt\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)\r\n\r\n for _ in range(20):\r\n q = np.random.randn(4)\r\n q /= np.linalg.norm(q)\r\n q_inv = quaternion_conjugate(q)\r\n\r\n R = to_rotation(q)\r\n R_inv = to_rotation(q_inv)\r\n\r\n zero_matrix = R @ R_inv - np.identity(3)\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)\r\n\r\n # orthogonal matrix\r\n zero_matrix = R @ R.T - np.identity(3)\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)", "def test_restore_backup():", "def test_arc_smear(self):", "def test_can_info_does_not_exist(self):\n fake_user = User(username='Fake', password='')\n self.assertFalse(send_rotate_to_can(fake_user, self.BIN_NUM))", "def test_active_rotation_is_default():\n Rx = pr.active_matrix_from_angle(0, 0.5 * np.pi)\n ax = np.array([1, 0, 0, 0.5 * np.pi])\n qx = pr.quaternion_from_axis_angle(ax)\n assert_array_almost_equal(Rx, pr.matrix_from_axis_angle(ax))\n assert_array_almost_equal(Rx, pr.matrix_from_quaternion(qx))\n Ry = pr.active_matrix_from_angle(1, 0.5 * np.pi)\n ay = np.array([0, 1, 0, 0.5 * np.pi])\n qy = pr.quaternion_from_axis_angle(ay)\n assert_array_almost_equal(Ry, pr.matrix_from_axis_angle(ay))\n assert_array_almost_equal(Ry, pr.matrix_from_quaternion(qy))\n Rz = pr.active_matrix_from_angle(2, 0.5 * np.pi)\n az = np.array([0, 0, 1, 0.5 * np.pi])\n qz = pr.quaternion_from_axis_angle(az)\n assert_array_almost_equal(Rz, pr.matrix_from_axis_angle(az))\n assert_array_almost_equal(Rz, pr.matrix_from_quaternion(qz))", "def test_not_rxx_equivalent(self):\n gate = SwapGate\n with self.assertRaises(QiskitError) as exc:\n TwoQubitControlledUDecomposer(gate)\n self.assertIn(\n \"Equivalent gate needs to take exactly 1 angle parameter.\", exc.exception.message\n )", "def test_conversions_matrix_euler_zyx():\n random_state = np.random.RandomState(0)\n for _ in range(5):\n a = pr.random_axis_angle(random_state)\n R = pr.matrix_from_axis_angle(a)\n pr.assert_rotation_matrix(R)\n\n e_zyx = pr.euler_zyx_from_matrix(R)\n R2 = pr.matrix_from_euler_zyx(e_zyx)\n assert_array_almost_equal(R, R2)\n pr.assert_rotation_matrix(R2)\n\n e_zyx2 = pr.euler_zyx_from_matrix(R2)\n pr.assert_euler_zyx_equal(e_zyx, e_zyx2)\n\n # Gimbal lock\n for _ in range(5):\n e_zyx = random_state.rand(3)\n e_zyx[1] = np.pi / 2.0\n R = pr.matrix_from_euler_zyx(e_zyx)\n e_zyx2 = pr.euler_zyx_from_matrix(R)\n pr.assert_euler_zyx_equal(e_zyx, e_zyx2)\n\n e_zyx[1] = -np.pi / 2.0\n R = pr.matrix_from_euler_zyx(e_zyx)\n e_zyx2 = pr.euler_zyx_from_matrix(R)\n pr.assert_euler_zyx_equal(e_zyx, e_zyx2)", "def test_rotated(self):\n d = np.random.random((100, 3))\n d_emb = tadasets.embed(d, 10)\n assert np.all(np.var(d_emb, axis=0) > 0)", "def test_serialize_circuit_rotations_tape(self, monkeypatch, tmpdir, test_batch_result):\n qml.enable_tape()\n dev = QeQiskitDevice(wires=1, shots=1000, backend=\"qasm_simulator\", analytic=False)\n\n circuit_history = []\n\n with qml.tape.QuantumTape() as tape1:\n qml.Hadamard(wires=[0])\n qml.expval(qml.Hadamard(0))\n\n with monkeypatch.context() as m:\n m.setattr(pennylane_orquestra.cli_actions, \"user_data_dir\", lambda *args: tmpdir)\n m.setattr(\n pennylane_orquestra.orquestra_device,\n \"gen_expval_workflow\",\n lambda component, backend_specs, circuits, operators, **kwargs: circuit_history.extend(\n circuits\n ),\n )\n\n # Disable submitting to the Orquestra platform by mocking Popen\n m.setattr(subprocess, \"Popen\", lambda *args, **kwargs: MockPopen())\n m.setattr(\n pennylane_orquestra.orquestra_device,\n \"loop_until_finished\",\n lambda *args, **kwargs: test_batch_result, # The exact results are not considered in the test\n )\n\n dev.execute(tape1)\n\n expected = 'OPENQASM 2.0;\\ninclude \"qelib1.inc\";\\nqreg q[1];\\ncreg c[1];\\nh q[0];\\nry(-0.7853981633974483) q[0];\\n'\n assert circuit_history[0] == expected\n qml.disable_tape()", "def testQuestionFour(self):\n self.assertTrue(os.path.exists(\"./mandelbrot.png\"), \"Question 4's output (mandelbrot.png) does not exist.\")", "def test_encrypt_creates_and_restores_backup(\n self,\n mock_os,\n mock_shutil,\n mock_subprocess,\n ):\n mock_subprocess.run.return_value.returncode = 1\n\n with self.assertRaises(RuntimeError):\n self.mikla.encrypt('Chunky Hunky', 'plain', 'enc')\n\n mock_os.unlink.assert_not_called()\n mock_shutil.move.assert_called_with('enc.bak', 'enc')\n self.assertEqual(mock_shutil.move.call_count, 2)", "def determine_rotation(arm, d, tip_data, rot_data):\n n_t = np.zeros(3)\n for this_n_t in tip_data['pos_ntip_wrt_r']:\n n_t += this_n_t\n n_t /= len(tip_data['pos_ntip_wrt_r'])\n print(\"Our n_t to use in this stage: {}\".format(n_t))\n\n K = len(rot_data['pos_ntip_wrt_s'])\n errors_zyz = []\n errors_zyx = []\n\n for k in range(K):\n lhs = rot_data['pos_ntip_wrt_s'][k]\n t_st = rot_data['pos_tool_wrt_s_code'][k]\n ypr = rot_data['rot_tool_wrt_s_code'][k]\n yaw, pitch, roll = ypr[0], ypr[1], ypr[2]\n\n # R_zyz\n R_z1 = U.rotation_matrix_3x3_axis(angle=roll, axis='z')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z2 = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyz = R_z2.dot(R_y).dot(R_z1)\n\n # R_zyx\n R_x = U.rotation_matrix_3x3_axis(angle=roll, axis='x')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyx = R_z.dot(R_y).dot(R_x)\n\n # Evaluate!\n rhs_zyz = t_st + R_zyz.dot( n_t )\n rhs_zyx = t_st + R_zyx.dot( n_t )\n err_zyz = np.linalg.norm(lhs - rhs_zyz)\n err_zyx = np.linalg.norm(lhs - rhs_zyx)\n errors_zyz.append( err_zyz )\n errors_zyx.append( err_zyx )\n print(\"\\nerr_zyz: {:.3f} for {}-th sample\".format(err_zyz, k))\n print(\"err_zyx: {:.3f} for {}-th sample\".format(err_zyx, k))\n print(\"R_zyz:\\n{}\".format(R_zyz))\n print(\"R_zyx:\\n{}\".format(R_zyx))\n\n print(\"\\nDone with evaluation!\")\n print(\"zyz has avg error {:.5f}\".format(np.mean(errors_zyz)))\n print(\"zyx has avg error {:.5f}\".format(np.mean(errors_zyx)))", "def test_rotate_down(self):\n # Testing 'down' rotation clockwise\n side = 'D'\n rotation = list(self.cube.rotate_cube(side))\n result = [np.array([['g', 'g'], ['r', 'r']], dtype='<U1'),\n np.array([['y', 'y'], ['y', 'y']], dtype='<U1'),\n np.array([['o', 'o'], ['g', 'g']], dtype='<U1'),\n np.array([['w', 'w'], ['w', 'w']], dtype='<U1'),\n np.array([['b', 'b'], ['o', 'o']], dtype='<U1'),\n np.array([['r', 'r'], ['b', 'b']], dtype='<U1')]\n\n np.testing.assert_array_equal(rotation, result)", "def testModePreservation(self):\n f = open(self.path, \"w\").close()\n os.chmod(self.path, 0707)\n mode = os.stat(self.path)[stat.ST_MODE]\n log = logfile.LogFile(self.name, self.dir)\n log.write(\"abc\")\n log.rotate()\n self.assertEquals(mode, os.stat(self.path)[stat.ST_MODE])", "def testrotconsts(self):\r\n assert self.data.rotconsts.shape == (len(self.data.atomcoords), 3)", "def trial(self):\n pass", "def test_revolute(self):\n # Rotate around the z axis\n r = Joint.revolute(np.array([0, 0, 1]))\n t_mat = r(np.array([np.pi / 2]))\n rot_vec = np.dot(t_mat, np.array([1, 0, 0, 1]))[:3]\n self.assertTrue(np.allclose(\n rot_vec, np.array([0, 1, 0]), rtol=1e-5, atol=1e-5))", "def test_id_rot():\n assert_array_almost_equal(pr.R_id, pr.matrix_from_axis_angle(pr.a_id))\n assert_array_almost_equal(pr.R_id, pr.matrix_from_quaternion(pr.q_id))\n assert_array_almost_equal(pr.R_id, pr.matrix_from_euler_xyz(pr.e_xyz_id))\n assert_array_almost_equal(pr.R_id, pr.matrix_from_euler_zyx(pr.e_zyx_id))", "def test_calibrate(get_touchmat):\n touchmat = get_touchmat\n touchmat_model = check_device_types.get_device_model(touchmat)\n\n if touchmat_model == Devices.touchmat_g1:\n with pytest.raises(PySproutError) as execinfo:\n touchmat.calibrate()\n assert 'Functionality not available' in str(execinfo.value)\n return\n\n touchmat.calibrate()", "def test_simulationRun(self):\n self.opt = { 'temperature' : 300.0, 'friction' : 1, 'dt' : 0.00002,\n 'nIter' : 2, 'nstepsNC' : 2, 'nstepsMD' : 1, 'nprop' : 1,\n 'nonbondedMethod' : 'NoCutoff', 'constraints': 'HBonds',\n 'trajectory_interval' : 1, 'reporter_interval' : 1,\n 'outfname' : 'mc-test',\n 'platform' : None,\n 'constraints' : 'HBonds',\n 'mc_per_iter' : 2 }\n\n structure = self.full_struct\n class SetRotationMove(RandomLigandRotationMove):\n def __init__(self, structure, resname='LIG'):\n super(SetRotationMove, self).__init__(structure, resname)\n\n def move(self, context):\n \"\"\"Function that performs a random rotation about the\n center of mass of the ligand.\n \"\"\"\n #TODO: check if we need to deepcopy\n positions = context.getState(getPositions=True).getPositions(asNumpy=True)\n\n self.positions = positions[self.atom_indices]\n self.center_of_mass = self.getCenterOfMass(self.positions, self.masses)\n reduced_pos = self.positions - self.center_of_mass\n\n # Define random rotational move on the ligand\n #set rotation so that test is reproducible\n set_rotation_matrix = np.array([[-0.62297988, -0.17349253, 0.7627558 ],\n [ 0.55082352, -0.78964857, 0.27027502],\n [ 0.55541834, 0.58851973, 0.58749893]])\n\n\n #set_rotation_matrix = np.array([[1, 0, 0],\n # [0, 1, 0],\n # [0, 0, 1]])\n\n #multiply lig coordinates by rot matrix and add back COM translation from origin\n rot_move = np.dot(reduced_pos, set_rotation_matrix) * positions.unit + self.center_of_mass\n\n # Update ligand positions in nc_sim\n for index, atomidx in enumerate(self.atom_indices):\n positions[atomidx] = rot_move[index]\n context.setPositions(positions)\n positions = context.getState(getPositions=True).getPositions(asNumpy=True)\n self.positions = positions[self.atom_indices]\n return context\n\n\n self.model = SetRotationMove(structure, resname='ALA')\n #self.model = RandomLigandRotationMove(structure, resname='ALA')\n\n self.model.atom_indices = range(22)\n self.model.topology = structure[self.model.atom_indices].topology\n self.model.positions = structure[self.model.atom_indices].positions\n self.model.calculateProperties()\n\n self.mover = MoveEngine(self.model)\n #Initialize the SimulationFactory object\n sims = SimulationFactory(structure, self.mover, **self.opt)\n #print(sims)\n system = sims.generateSystem(structure, **self.opt)\n simdict = sims.createSimulationSet()\n alch_system = sims.generateAlchSystem(system, self.model.atom_indices)\n self.nc_sim = sims.generateSimFromStruct(structure, self.mover, alch_system, ncmc=True, **self.opt)\n self.model.calculateProperties()\n self.initial_positions = self.nc_sim.context.getState(getPositions=True).getPositions(asNumpy=True)\n mc_sim = Simulation(sims, self.mover, **self.opt)\n #monkeypatch to access acceptance value\n def nacceptRejectMC(self, temperature=300, **opt):\n \"\"\"Function that chooses to accept or reject the proposed move.\n \"\"\"\n md_state0 = self.current_state['md']['state0']\n md_state1 = self.current_state['md']['state1']\n log_mc = (md_state1['potential_energy'] - md_state0['potential_energy']) * (-1.0/self.nc_sim.context._integrator.kT)\n randnum = math.log(np.random.random())\n\n if log_mc > randnum:\n self.accept += 1\n print('MC MOVE ACCEPTED: log_mc {} > randnum {}'.format(log_mc, randnum) )\n self.md_sim.context.setPositions(md_state1['positions'])\n else:\n self.reject += 1\n print('MC MOVE REJECTED: log_mc {} < {}'.format(log_mc, randnum) )\n self.md_sim.context.setPositions(md_state0['positions'])\n self.log_mc = log_mc\n self.md_sim.context.setVelocitiesToTemperature(self.opt['temperature'])\n mc_sim.acceptRejectMC = nacceptRejectMC\n nacceptRejectMC.__get__(mc_sim)\n mc_sim.acceptRejectMC = types.MethodType(nacceptRejectMC, mc_sim)\n mc_sim.runMC(self.opt['nIter'])\n #get log acceptance\n print(mc_sim.log_mc)\n #if mc is working, should be around -24.1\n assert mc_sim.log_mc <= -23.8 and mc_sim.log_mc >= -24.3", "def test_user_is_none(self):\n self.assertFalse(send_rotate_to_can(None, self.BIN_NUM))", "def test_T4():", "def test_T4():", "def rotate_in_place(angle):\n action = easy_cozmo._robot.turn_in_place(degrees(-1*angle),speed=degrees(df_rotate_speed))\n try:\n action.wait_for_completed()\n if action.has_succeeded:\n return True\n else:\n code, reason = action.failure_reason\n result = action.result\n print(\"WARNING RotateInPlace: code=%s reason='%s' result=%s\" % (code, reason, result))\n say_error(\"I couldn't rotate, sorry\")\n except Exception as e:\n import traceback\n print(e)\n traceback.print_exc()\n say_error(\"I can't rotate, sorry\")\n try:\n while action.is_running:\n action.abort()\n time.sleep(.5)\n except Exception as e:\n import traceback\n print(e)\n traceback.print_exc()\n say_error(\"Wheels faulty\")\n\n return False", "def compute_rotation(self):\n if self.predictions[self.iteration][0] == 90.0 or self.predictions[self.iteration][0] == 270.0:\n self.rotation = 20\n self.initial_adjust = True\n return\n\n if self.iteration == 0 or (self.iteration == 1 and self.initial_adjust):\n self.rotation = rotate.get_90_deg_rotation(self.predictions[self.iteration])\n elif self.iteration == 1 or (self.iteration == 2 and self.initial_adjust):\n self.rotation = rotate.get_45_deg_rotation(self.predictions, self.current_position)\n elif self.iteration >= 2 or (self.iteration > 2 and self.initial_adjust):\n self.rotation = rotate.get_fine_rotation(self.iteration)", "def test_serialize_operator_needs_rotation(self, obs, expected):\n dev = QeQiskitDevice(wires=3, shots=1000, backend=\"qasm_simulator\", analytic=False)\n op_str = dev.serialize_operator(obs)\n assert op_str == expected", "def test_unsuccessful_verification(self):\n for i in (-4, -3, 3, 4):\n description = \"TOTP verified for `i={0}`\".format(i)\n calculated = self.algorithm.calculate(self.device.secret, drift=i)\n confirmed = self.relate.verify(calculated, save=False)\n\n self.assertFalse(confirmed, description)\n\n self.relate.confirm = False", "def doRotation(self, delta):\n self.correctPending()\n self.rotation = (self.rotation + delta) % self.possibleRotations", "def test_minvar_rotation(self):\n vrot, v, w = minvar(self.rdata)\n # Determinant of rotation matrix should be = 1\n self.assertTrue((np.linalg.det(v) - 1) < self.tol)", "def test_wrong_mode(self):\n self.assertRaises(ComponentErrorsEx, self.dp.setRewindingMode, 'FOO')", "def test_untar(self):", "def fix_rotation(self):\n self.rotate(self.rotation)\n self.annotations.rotate(self.rotation)\n self.rotation = 0", "def test_helioviewer_rotation(lasco, lasco_helioviewer):\n np.testing.assert_allclose(lasco.rotation_matrix,\n [[0.999966, -0.008296], [0.008296, 0.999966]], rtol=1e-6)\n np.testing.assert_array_equal(lasco_helioviewer.rotation_matrix, [[1., 0.], [0., 1.]])", "def test_d_3():\n rs = 20\n d = 3\n np.random.seed(rs)\n number_rotations = 3\n\n theta_1 = np.random.uniform(0, 2 * math.pi)\n rotation_1 = np.identity(d)\n pos_1 = np.random.randint(0, d - 1)\n pos_2 = np.random.randint(pos_1 + 1, d)\n rotation_1[pos_1, pos_1] = math.cos(theta_1)\n rotation_1[pos_1, pos_2] = - math.sin(theta_1)\n rotation_1[pos_2, pos_1] = math.sin(theta_1)\n rotation_1[pos_2, pos_2] = math.cos(theta_1)\n\n theta_2 = np.random.uniform(0, 2 * math.pi)\n rotation_2 = np.identity(d)\n pos_3 = np.random.randint(0, d - 1)\n pos_4 = np.random.randint(pos_3 + 1, d)\n rotation_2[pos_3, pos_3] = math.cos(theta_2)\n rotation_2[pos_3, pos_4] = - math.sin(theta_2)\n rotation_2[pos_4, pos_3] = math.sin(theta_2)\n rotation_2[pos_4, pos_4] = math.cos(theta_2)\n\n theta_3 = np.random.uniform(0, 2 * math.pi)\n rotation_3 = np.identity(d)\n pos_5 = np.random.randint(0, d - 1)\n pos_6 = np.random.randint(pos_5 + 1, d)\n rotation_3[pos_5, pos_5] = math.cos(theta_3)\n rotation_3[pos_5, pos_6] = - math.sin(theta_3)\n rotation_3[pos_6, pos_5] = math.sin(theta_3)\n rotation_3[pos_6, pos_6] = math.cos(theta_3)\n\n final_rotation = rotation_1 @ rotation_2 @ rotation_3\n np.random.seed(rs)\n rotation_function = (mt_obj.calculate_rotation_matrix\n (d, number_rotations))\n assert(np.all(final_rotation == rotation_function))", "def test_comp_angle_opening(self, test_dict):\n test_obj = test_dict[\"test_obj\"]\n a = test_obj.slot.comp_angle_opening()\n self.assertEqual(a, 2 * pi / test_obj.slot.Zs)\n\n b = comp_angle_opening(test_obj.slot)\n msg = \"Return \" + str(a) + \" expected \" + str(b)\n self.assertAlmostEqual((a - b) / a, 0, delta=DELTA, msg=msg)", "def test_conversions_matrix_euler_xyz():\n random_state = np.random.RandomState(0)\n for _ in range(5):\n a = pr.random_axis_angle(random_state)\n R = pr.matrix_from_axis_angle(a)\n pr.assert_rotation_matrix(R)\n\n e_xyz = pr.euler_xyz_from_matrix(R)\n R2 = pr.matrix_from_euler_xyz(e_xyz)\n assert_array_almost_equal(R, R2)\n pr.assert_rotation_matrix(R2)\n\n e_xyz2 = pr.euler_xyz_from_matrix(R2)\n pr.assert_euler_xyz_equal(e_xyz, e_xyz2)\n\n # Gimbal lock\n for _ in range(5):\n e_xyz = random_state.rand(3)\n e_xyz[1] = np.pi / 2.0\n R = pr.matrix_from_euler_xyz(e_xyz)\n e_xyz2 = pr.euler_xyz_from_matrix(R)\n pr.assert_euler_xyz_equal(e_xyz, e_xyz2)\n\n e_xyz[1] = -np.pi / 2.0\n R = pr.matrix_from_euler_xyz(e_xyz)\n e_xyz2 = pr.euler_xyz_from_matrix(R)\n pr.assert_euler_xyz_equal(e_xyz, e_xyz2)", "def test_calculate_angle():\n r1 = np.array([0, 0, -1])\n r2 = np.array([0, 0, 0])\n r3 = np.array([1, 0, 0])\n\n expected_angle = 90\n calculated_angle = molecool.calculate_angle(r1, r2, r3, degrees = True)\n\n assert expected_angle == calculated_angle", "def test_archive_run(self):\n pass", "def test_rotate_right(self):\n # Testing 'down' rotation clockwise\n side = 'R'\n rotation = list(self.cube.rotate_cube(side))\n result = [np.array([['g', 'g'], ['g', 'g']], dtype='<U1'),\n np.array([['y', 'o'], ['y', 'o']], dtype='<U1'),\n np.array([['o', 'w'], ['o', 'w']], dtype='<U1'),\n np.array([['w', 'r'], ['w', 'r']], dtype='<U1'),\n np.array([['b', 'b'], ['b', 'b']], dtype='<U1'),\n np.array([['y', 'r'], ['y', 'r']], dtype='<U1')]\n\n np.testing.assert_array_equal(rotation, result)", "def test_rendezvous(self):\n\n utils.rendezvous()", "def match(cube):\n \n #M1'\n M1 = (cube[1,1,0] & cube[1,1,1] & \n (not cube[0,0,2]) & (not cube[1,0,2]) & (not cube[2,0,2]) &\n (not cube[0,1,2]) & (not cube[1,1,2]) & (not cube[2,1,2]) &\n (not cube[0,2,2]) & (not cube[1,2,2]) & (not cube[2,2,2]));\n if M1:\n return True;\n \n # gerate rotations around z/vertical axis\n cuberots = [rotate(cube, axis = 2, steps = rot) for rot in range(4)];\n #print('Cube rotations:');\n #[printCube(c) for c in cuberots] \n \n # M2' and all rotations\n for curo in cuberots:\n M2 = (curo[1,1,0] & curo[1,1,1] & curo[1,2,1] &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]));\n if M2:\n return True;\n \n # M3' and all rotations\n for curo in cuberots:\n M3 = (curo[1,1,0] & curo[1,1,1] & curo[1,2,1] & curo[2,1,1] &\n (not curo[0,0,2]) & (not curo[1,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]));\n if M3:\n return True;\n \n # M4' and all rotations\n for curo in cuberots:\n M4 = (curo[1,1,0] & curo[1,1,1] & curo[2,2,1] & curo[2,2,2] &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]));\n if M4:\n return True;\n \n # M5' and all rotations\n for curo in cuberots:\n M5 = (curo[1,2,0] & curo[1,1,1] & \n (not curo[0,0,0]) & (not curo[1,0,0]) & (not curo[2,0,0]) &\n (not curo[1,1,0]) &\n (not curo[0,0,1]) & (not curo[1,0,1]) & (not curo[2,0,1]) &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]) & (not curo[2,2,2]));\n if M5:\n return True;\n \n # M6' and all rotations\n for curo in cuberots:\n M6 = (curo[2,1,0] & curo[1,2,0] & curo[1,1,1] &\n (not curo[0,0,0]) & (not curo[1,0,0]) &\n (not curo[0,1,0]) & (not curo[1,1,0]) &\n (not curo[0,0,1]) & (not curo[1,0,1]) &\n (not curo[0,1,1]) &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]) & (not curo[2,2,2]));\n if M6:\n return True;\n \n # M7' and all rotations\n for curo in cuberots:\n M7 = (curo[2,2,0] & curo[1,1,1] &\n (not curo[0,0,0]) & (not curo[1,0,0]) & (not curo[2,0,0]) &\n (not curo[0,1,0]) & (not curo[1,1,0]) & (not curo[2,1,0]) &\n (not curo[0,2,0]) & (not curo[1,2,0]) &\n (not curo[0,0,1]) & (not curo[1,0,1]) & (not curo[2,0,1]) &\n (not curo[0,1,1]) & (not curo[2,1,1]) &\n (not curo[0,2,1]) & (not curo[1,2,1]) & (not curo[2,2,1]) &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]) & (not curo[2,2,2]));\n if M7:\n return True;\n \n return False;", "def fix_rotation(self):\n cube_helper = Cube()\n cube_helper.scramble = self.scramble.split()\n cube_helper.solve = self.solve.split()\n\n rotations = []\n for move in cube_helper.scramble:\n cube_helper.exe_move(move)\n for move in cube_helper.solve:\n if move not in cube_helper.rotation:\n if not self.currently_parsing_smart_cube:\n break\n cube_helper.exe_move(move)\n\n str_perm = cube_helper.perm_to_string(cube_helper.current_perm).split()\n up = str_perm[4]\n front = str_perm[22]\n flag = False\n for i in range (4):\n if (up == \"5\"):\n flag = True\n break\n rotations.append(\"x\")\n cube_helper.exe_move(\"x\")\n str_perm = cube_helper.perm_to_string(cube_helper.current_perm).split()\n up = str_perm[4]\n front = str_perm[22]\n\n if (front != \"23\" and not flag):\n rotations.append(\"z\")\n cube_helper.exe_move(\"z\")\n str_perm = cube_helper.perm_to_string(cube_helper.current_perm).split()\n up = str_perm[4]\n front = str_perm[22]\n\n while (up != \"5\" or front != \"23\"):\n rotations.append(\"y\")\n cube_helper.exe_move(\"y\")\n str_perm = cube_helper.perm_to_string(cube_helper.current_perm).split()\n front = str_perm[22]\n\n final_rot = []\n while len(rotations) >= 3:\n if rotations[0] == rotations[1] == rotations[2]:\n r_fix = \"{}'\".format(rotations[0]).replace(\"''\",\"\")\n final_rot.append(r_fix)\n rotations.pop(0)\n rotations.pop(0)\n rotations.pop(0)\n else:\n final_rot.append(rotations[0])\n rotations.pop(0)\n if final_rot:\n return final_rot\n return rotations", "def detect_orientation(image):\n custom_oem_psm_config = r'--oem 1--psm 7'\n newdata = pytesseract.image_to_osd(image,config= custom_oem_psm_config)\n rotation = int(re.search('(?<=Rotate: )\\\\d+', newdata).group(0))\n # print(\"Rotation degrees : \", rotation)\n return rotate_img(image, rotation)", "def test_rotate_back(self):\n # Testing 'back' rotation clockwise\n side = 'B'\n rotation = list(self.cube.rotate_cube(side))\n result = [np.array([['y', 'g'], ['y', 'g']], dtype='<U1'),\n np.array([['b', 'b'], ['y', 'y']], dtype='<U1'),\n np.array([['o', 'o'], ['o', 'o']], dtype='<U1'),\n np.array([['w', 'w'], ['g', 'g']], dtype='<U1'),\n np.array([['b', 'w'], ['b', 'w']], dtype='<U1'),\n np.array([['r', 'r'], ['r', 'r']], dtype='<U1')]\n\n np.testing.assert_array_equal(rotation, result)", "def test_perspective_transform():\n # TODO: write this\n assert(True)", "def test_unlock_failure(self):\n # Make sure the image file doesn't exist.\n if os.path.exists(IMAGE_FILE):\n os.unlink(IMAGE_FILE)\n # Ask rsync-system-backup to use the encrypted filesystem on the image\n # file anyway, because we know it will fail and that's exactly what\n # we're interested in :-).\n program = RsyncSystemBackup(\n crypto_device=CRYPTO_NAME,\n destination=os.path.join(MOUNT_POINT, 'latest'),\n mount_point=MOUNT_POINT,\n )\n # When `cryptdisks_start' fails it should exit with a nonzero exit\n # code, thereby causing executor to raise an ExternalCommandFailed\n # exception that obscures the FailedToUnlockError exception that we're\n # interested in. The check=False option enables our `last resort error\n # handling' code path to be reached.\n program.destination_context.options['check'] = False\n self.assertRaises(FailedToUnlockError, program.execute)", "def check_orientation(self) -> None:\n if self.compute_volume() > 0:\n raise (\n BaseException(\n \"The volume within the surface is negative. It seems that you faces\"\n \"are not oriented correctly according to the clockwise flag\"\n )\n )", "def verify():", "def _retry_occurred(self):" ]
[ "0.7053024", "0.6514648", "0.6510062", "0.64673233", "0.6354083", "0.6268674", "0.62312365", "0.6228066", "0.6227956", "0.6155046", "0.6153381", "0.61482006", "0.6102702", "0.6070507", "0.60431606", "0.6037593", "0.60291284", "0.60207057", "0.59471196", "0.593792", "0.5898857", "0.5885746", "0.58734226", "0.5840074", "0.58111167", "0.5781484", "0.57409203", "0.5709362", "0.570395", "0.56991065", "0.56785375", "0.5638217", "0.5599993", "0.5598657", "0.5535958", "0.5508222", "0.5503783", "0.5481766", "0.5479953", "0.54511607", "0.5447012", "0.54455227", "0.54344994", "0.5432657", "0.54256725", "0.5424957", "0.5422659", "0.541641", "0.5406966", "0.53816146", "0.5372758", "0.53724825", "0.5353713", "0.5352897", "0.53403753", "0.533406", "0.53326166", "0.5310905", "0.53028685", "0.52968353", "0.5283715", "0.52770925", "0.5267043", "0.5260376", "0.5259665", "0.5253185", "0.52508634", "0.5250269", "0.5243787", "0.52294165", "0.52213126", "0.52199554", "0.520257", "0.5202082", "0.5202082", "0.5196443", "0.5194918", "0.5193599", "0.5191439", "0.5188869", "0.51858675", "0.5177402", "0.51723194", "0.5168211", "0.51636094", "0.5157626", "0.5153896", "0.5149956", "0.5149221", "0.51435745", "0.513603", "0.51312196", "0.5127578", "0.51268554", "0.51255625", "0.5123626", "0.5123369", "0.51195425", "0.51165164", "0.51054484", "0.5101011" ]
0.0
-1
Tests of try rotation without configuration
def test_process_log_without_target_configuration(self): with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout: compressors = process_log( datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30), {'max_size': 0}, 'hourly', '/tmp/pokus.log', 10 ) self.assertEqual(compressors, []) self.assertEqual(fake_stdout.getvalue(), 'Checking "/tmp/pokus.log"... missing target in configuration.\n')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_rotated(self):\n self._calibration_test(\"rotated\")", "def test_skel_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def test_g_asignar_rol(self):", "def test_need_to_rotate_log(self):\n self.assertTrue(need_to_rotate_log(0, 20, 'daily', 15, 'daily'), 'rotate log by time')\n self.assertFalse(need_to_rotate_log(10, 20, 'daily', 15, 'hourly'), 'do not rotate log by time')\n self.assertTrue(need_to_rotate_log(10, 20, 'daily', 25, None), 'rotate log by max size')\n self.assertFalse(need_to_rotate_log(10, 20, 'hourly', 5, 'hourly'), 'do not rotate log by min size')", "def test_rotation(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n days = [(self.path + \".\" + log.suffix(day * 86400)) for day in range(3)]\n\n # test automatic rotation\n log._clock = 0.0 # 1970/01/01 00:00.00\n log.write(\"123\")\n log._clock = 43200 # 1970/01/01 12:00.00\n log.write(\"4567890\")\n log._clock = 86400 # 1970/01/02 00:00.00\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(days[0]))\n self.assertFalse(os.path.exists(days[1]))\n log._clock = 172800 # 1970/01/03 00:00.00\n log.write(\"\")\n self.assertTrue(os.path.exists(days[0]))\n self.assertTrue(os.path.exists(days[1]))\n self.assertFalse(os.path.exists(days[2]))\n log._clock = 259199 # 1970/01/03 23:59.59\n log.write(\"3\")\n self.assertFalse(os.path.exists(days[2]))", "def test_rotation(self):\n # this logfile should rotate every 10 bytes\n with contextlib.closing(\n logfile.LogFile(self.name, self.dir, rotateLength=10)\n ) as log:\n\n # test automatic rotation\n log.write(\"123\")\n log.write(\"4567890\")\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.2\".format(self.path)))\n log.write(\"\")\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertTrue(os.path.exists(\"{}.2\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n log.write(\"3\")\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n\n # test manual rotation\n log.rotate()\n self.assertTrue(os.path.exists(\"{}.3\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.4\".format(self.path)))\n\n self.assertEqual(log.listLogs(), [1, 2, 3])", "def test_default_parameters(self):\n\n assert self.test_shape.rotation_angle == 360", "def test_rotation_angle_warning(self):\n\n def warning_trigger():\n try:\n paramak.CenterColumnStudyReactor(\n inner_bore_radial_thickness=20,\n inboard_tf_leg_radial_thickness=50,\n center_column_shield_radial_thickness_mid=50,\n center_column_shield_radial_thickness_upper=100,\n inboard_firstwall_radial_thickness=20,\n divertor_radial_thickness=100,\n inner_plasma_gap_radial_thickness=80,\n plasma_radial_thickness=200,\n outer_plasma_gap_radial_thickness=90,\n # first number must be between plasma inner/outer radius\n plasma_high_point=(245, 240),\n plasma_gap_vertical_thickness=40,\n center_column_arc_vertical_thickness=520,\n rotation_angle=360)\n\n except BaseException:\n pass\n\n with warnings.catch_warnings(record=True) as w:\n warnings.simplefilter(\"always\")\n warning_trigger()\n assert len(w) == 1\n assert issubclass(w[-1].category, UserWarning)\n assert \"360 degree rotation may result in a Standard_ConstructionError or AttributeError\" in str(\n w[-1].message)", "def test_calc_rotation(self):\n t = AioBaseTurtle()\n t.speed(speed=2)\n orient, steps, delta = t._calc_rotation(120)\n self.assertEqual(steps, 21)\n self.assertAlmostEqual(delta, 120.0 / 21.0)\n self.assertAlmostEqual(orient[0], math.cos(math.radians(120)))\n self.assertAlmostEqual(orient[1], math.sin(math.radians(120)))", "def test_rotation(self, tol):\n theta = 0.98\n S = symplectic.rotation(theta)\n expected = np.block([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]])\n np.allclose(S, expected, atol=tol, rtol=0)", "def test_arbitrary_rotation(self):\n \n # This test is run a bunch of times on various intervals, ranging from 50% to 1/6\n\t\t# (16.667%).\n for i in range(2, 7):\n \n interval = 1 / i # The amount to increase each qubit's probability by, relative to the previous qubit\n step_string = \"{:.4f}\".format(100 / i) # The decimal representation of the interval, as a percent\n target_probabilities = [0] * (i + 1) # This will store the desired probabilities of each qubit\n for j in range(0, i + 1):\n target_probability = j * interval\n target_probabilities[j] = target_probability\n\n # Run the test\n self.run_test(self.arbitrary_rotation_function, f\"Rotation with steps of 1/{i} ({step_string}%)\", 2000, target_probabilities, 0.05)", "def test_xform_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.xform_file, ani=1)\n\n values = cmds.keyframe('pCube1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def test_abstractShouldRotate(self):\n log = logfile.BaseLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n self.assertRaises(NotImplementedError, log.shouldRotate)", "def _check_rotation(spec_nest, path):\n spec = _get_from_nest(spec_nest, path)\n if spec is not None and not isinstance(spec, primitives_pb2.RotationType):\n raise InvalidSpecError(\n f'{\"/\".join(path)} was expected to be of type Rotation, but is instead '\n f'{type(spec)}')", "def test_retry_run(self):\n pass", "def test_rotation_angle(self):\n\n self.test_shape.azimuth_placement_angle = [45, 135, 225, 315]\n test_volume = self.test_shape.volume()\n self.test_shape.rotation_angle = 180\n assert self.test_shape.volume() == pytest.approx(test_volume * 0.5)", "def test_rot(self):\n\n print(\"rot()\")\n obs = self.fixture\n\n # rotation(0) = identity\n for axis in [1, 2, 3]:\n # theta = 0.0\n rotation = obs.rot(0.0, axis)\n # find || eye - rot1 ||\n diff = np.linalg.norm(np.eye(3) - rotation)\n self.assertAlmostEqual(diff, 0.0, delta=1e-12)\n # theta = 2*pi\n rotation = obs.rot(2.0 * np.pi, axis)\n # find || eye - rot1 ||\n diff = np.linalg.norm(np.eye(3) - rotation)\n self.assertAlmostEqual(diff, 0.0, delta=1e-12)\n\n # perform many randomized tests\n num_tests = 100\n num_products = 10\n for _test_counter in range(num_tests):\n thetas = []\n axes = []\n base = np.eye(3)\n # we will multiply a series of rotations into \"base\"\n rot_all = base\n for _rot_counter in range(num_products):\n theta = np.random.uniform(2 * np.pi) # in [0,2 pi]\n axis = np.random.randint(3) + 1 # in {1,2,3}\n axes.append(axis)\n thetas.append(theta)\n rotation = obs.rot(theta, axis)\n # multiply rot1 into the cumulative rotation\n rot_all = np.dot(rot_all, rotation)\n # now, back all the rotations out\n for _rot_counter in range(num_products):\n theta = thetas.pop()\n axis = axes.pop()\n # apply the inverse rotation\n rotation = obs.rot(-theta, axis)\n rot_all = np.dot(rot_all, rotation)\n # find || base - rot1 * rot2 ||\n diff = np.linalg.norm(base - rot_all)\n self.assertAlmostEqual(diff, 0.0, delta=1e-10 * num_products)", "def test_asssert_rotation_matrix_behaves_like_check_matrix():\n random_state = np.random.RandomState(2345)\n for _ in range(5):\n a = pr.random_axis_angle(random_state)\n R = pr.matrix_from_axis_angle(a)\n original_value = R[2, 2]\n for error in [0, 1e-8, 1e-7, 1e-5, 1e-4, 1]:\n R[2, 2] = original_value + error\n try:\n pr.assert_rotation_matrix(R)\n pr.check_matrix(R)\n except AssertionError:\n assert_raises_regexp(\n ValueError, \"Expected rotation matrix\", pr.check_matrix, R)", "def rotation(self, *args, **kwargs) -> Any:\n pass", "def test_rotatePermissionFileNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.path, 0o444)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_log_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n filename = self.conveyer.rotate_logs()\n self.assertEquals(self.conveyer.logfile, None)\n self.assertEquals(filename, \"testfile.dat.rotated\")", "def rotate(self):\n pass", "def test_skel_rotation(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1, aef=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertAlmostEqual(0.0, values[-1])", "def test_rotate_without_moving(controller):\n distance = math.pi / 2 * (DISTANCE_BETWEEN_WHEELS / 2)\n revolution = distance / (2 * math.pi * WHEEL_RADIUS)\n ticks = revolution * TICK_PER_REVOLUTION\n pos, angle = controller.odometry(\n round(10 - ticks),\n round(10 + ticks),\n Vector2(0, 0),\n 0,\n )\n\n # Rotate 90 degrees without moving.\n assert pos == Vector2(0, 0)\n assert round(math.pi / 2 / angle, 1) == 1\n\n # Rotate back to 0 degrees without moving.\n pos, angle = controller.odometry(10, 10, Vector2(0, 0), 0)\n assert pos == Vector2(0, 0)\n assert round(-math.pi / 2 / angle, 1) == 1", "def _optimise_rotation(self):\n logger.info(\n f\"Minimising dimer rotation up to \"\n f'δϕ = {self.phi_tol.to(\"degrees\"):.4f}º'\n )\n\n for i in range(self._ratio_rot_iters):\n\n result = self._rotate()\n\n if (\n result == _StepResult.skipped_rotation\n or abs(self._coords.phi) < self.phi_tol\n ):\n break\n\n logger.info(\n f\"Micro iteration: {i}.\"\n f' ϕ={self._coords.phi.to(\"degrees\"):.2f}º'\n )\n\n return None", "def test_rotating_phantom(self):\n cheese = TomoCheese.from_demo_images()\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -0.25, abs_tol=0.05)\n for img in cheese.dicom_stack:\n img.array = rotate(img.array, angle=3, mode=\"edge\")\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -3.25, abs_tol=0.05)", "def test_rotatePermissionDirectoryNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.directory, 0o444)\n # Restore permissions so tests can be cleaned up.\n self.addCleanup(os.chmod, log.directory, 0o755)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_recheck_fails(self):\n raise NotImplementedError", "def check_random_rotation(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [degrees, resample, expand, center, fill_value], _ = parse_user_args(method, *args, **kwargs)\n check_degrees(degrees)\n\n if resample is not None:\n type_check(resample, (Inter,), \"resample\")\n if expand is not None:\n type_check(expand, (bool,), \"expand\")\n if center is not None:\n check_2tuple(center, \"center\")\n if fill_value is not None:\n check_fill_value(fill_value)\n\n return method(self, *args, **kwargs)\n\n return new_method", "def test_default_parameters(self):\n\n # assert self.test_shape.rotation_angle == 360\n assert self.test_shape.start_angle == 0", "def interaction_turnstile(self) -> None:\n if self.get_rotation()[1][0] != 0:\n condition = self.can_rotate()[0]\n if condition:\n self.rotate()", "def test_shuffled(self):\n self.setup_flags()\n self.io_args.matches = os.path.join(\n self.io_args.output_root, \"shuffled\", \"matches.json\"\n )\n self._calibration_error_test(\"shuffled\", \"GeometricCalibration\")", "def test_rotateAlreadyExists(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n # Build a new file with the same name as the file which would be created\n # if the log file is to be rotated.\n newFilePath = \"{}.{}\".format(log.path, log.suffix(log.lastDate))\n with open(newFilePath, \"w\") as fp:\n fp.write(\"123\")\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_rotate(self):\n rotable = TestRotable()\n command = RotateCommand(rotable)\n collinear_to_new_direction = rotable.get_direction() + rotable.get_angular_velocity()\n\n command()\n\n ratio = norm(rotable.get_direction()) / norm(collinear_to_new_direction)\n self.assertTrue(allclose(collinear_to_new_direction * ratio, rotable.get_direction()))\n self.assertTrue(isclose(norm(rotable.get_direction()), 1))", "def test_extract_rot_angle():\n v = np.zeros((4,2))\n try:\n angle = extract_rot_angle(v,min_points=0)\n except AssertionError,err:\n assert err.args[0]==\"Zero velocities not allowed.\"\n \n v[:,1] = 1.\n try:\n angle = extract_rot_angle(v,min_points=0)\n except AssertionError,err:\n assert err.args[0]==\"Failed to get both forward and backward directions.\"\n\n # Forwards-backwards motion.\n v[:,1] = 0.\n v[:2,0] = -1.1\n v[2:,0] = 1.2\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,np.pi)\n\n # Forwards-backwards motion.\n v[:,0] = 0.\n v[:2,1] = -.9\n v[2:,1] = .8\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,-np.pi/2)\n\n # Forwards-backwards motion with noise.\n v[:2,1] += (np.random.rand(2)*2-1)/10\n v[2:,1] += (np.random.rand(2)*2-1)/10\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,-np.pi/2,atol=.1)", "def test_verification_failed(self):\n pass", "def verify_legal_rotation(self, direction):\n test_figure = None\n if direction == \"CW\":\n test_figure = self.get_block_positions(self.active_piece.get_cw_rotation())\n elif direction == \"CCW\":\n test_figure = self.get_block_positions(self.active_piece.get_ccw_rotation())\n\n for b_x, b_y in test_figure:\n if b_x < 0 or b_x >= self.WIDTH:\n return False\n\n if b_y < 0 or b_y >= self.HEIGHT:\n return False\n\n if self.board[b_y][b_x] != 0:\n return False\n return True", "def rotate(self, *args, **kwargs): # real signature unknown\n pass", "def test_call_bad_perms(self):\r\n self.assertRaises(ValueError, self.single_dms, -1)", "def test_rotation_isometry(self):\n import numpy\n\n # test for all kinds of curvatures K\n for k in (0, 1, -1, 1/11, -1/11, 11, -2):\n \n s = space(curvature=k)\n\n # use a small enough magnitude to not break math for very negative K\n magic = 0.33377777373737737777\n # 1/sqrt(2)\n s2_ref = 0.707106781186547524400844362104785\n\n o = s.make_origin(2)\n p = s.make_point((1, 0), magic)\n q = s.make_point((s2_ref, s2_ref), magic)\n\n rot = space_point_transform(\n numpy.array([[1,0,0],[0,s2_ref,-s2_ref],[0,s2_ref,s2_ref]]),\n curvature=k,\n math = common_math\n )\n\n f, g, i = map(space_point_transform, (p, q, o))\n\n def check_transform_eq(t1, t2, invert=False):\n for ref in (\n s.make_point((5/13, 12/13), magic),\n s.make_point((-3/5, 4/5), magic)\n ):\n self.assertTrue(invert ^ point_isclose(\n t1(ref),\n t2(ref),\n abs_tol = 1e-12\n ))\n\n # 1/8 turn, times 8\n check_transform_eq(rot*8, i)\n\n # rotate, shift, rotate\n check_transform_eq(g, rot + f + rot * -1)\n\n # the other way\n check_transform_eq(f, rot * -1 + g + rot)", "def testCalculateRotationDiff(self):\n # Test identity\n transform1 = numpy.eye(4)\n transform2 = numpy.eye(4)\n (_, result) = self.evaluator._calculateDifference(transform1, transform2)\n self.assertEqual(result, 0.0)\n # Test arbitrary rotation\n rot1 = numpy.array(\n [[0.0, 1.0, 0.0], [-1.0, 0.0, 0.0], [0.0, 0.0, 1.0]])\n rot2 = numpy.array(\n [[0.0, 0.0, -1.0], [0.0, 1.0, 0.0], [1.0, 0.0, 0.0]])\n transform1[0:3, 0:3] = numpy.matmul(transform1[0:3, 0:3], rot1)\n transform2[0:3, 0:3] = numpy.matmul(transform2[0:3, 0:3], rot2)\n (_, result) = self.evaluator._calculateDifference(transform1, transform2)\n self.assertAlmostEqual(result, 120.0 * numpy.pi / 180.0, 8)\n # Order shouldn't matter\n (_, result) = self.evaluator._calculateDifference(transform2, transform1)\n self.assertAlmostEqual(result, 120.0 * numpy.pi / 180.0, 8)\n # Test when the angle is pi\n transform1 = numpy.eye(4)\n transform2 = numpy.eye(4)\n transform2[0, 0] = -1.0\n transform2[1, 1] = -1.0\n (_, result) = self.evaluator._calculateDifference(transform1, transform2)\n # It might wrap to -pi, so check the absolute value\n self.assertAlmostEqual(abs(result), numpy.pi, 8)\n # Test an extreme value\n transform2 = -1.0 * numpy.eye(4)\n (_, result) = self.evaluator._calculateDifference(transform2, transform1)\n self.assertAlmostEqual(abs(result), numpy.pi)", "def test_modePreservation(self):\n open(self.path, \"w\").close()\n os.chmod(self.path, 0o707)\n mode = os.stat(self.path)[stat.ST_MODE]\n log = logfile.LogFile(self.name, self.dir)\n self.addCleanup(log.close)\n log.write(\"abc\")\n log.rotate()\n self.assertEqual(mode, os.stat(self.path)[stat.ST_MODE])", "def test_default_parameters(self):\n\n assert self.test_shape.rotation_angle == 360\n assert self.test_shape.extrude_both", "def test_noPermission(self):\n log = logfile.LogFile(self.name, self.dir)\n log.write(\"abc\")\n\n # change permissions so rotation would fail\n os.chmod(self.dir, 0555)\n\n # if this succeeds, chmod doesn't restrict us, so we can't\n # do the test\n try:\n f = open(os.path.join(self.dir,\"xxx\"), \"w\")\n except (OSError, IOError):\n pass\n else:\n f.close()\n return\n\n log.rotate() # this should not fail\n\n log.write(\"def\")\n log.flush()\n\n f = log._file\n self.assertEquals(f.tell(), 6)\n f.seek(0, 0)\n self.assertEquals(f.read(), \"abcdef\")\n log.close()", "def test_xform_rotation(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.xform_file, ani=1, aef=1)\n\n values = cmds.keyframe('pCube1.rx', q=1, vc=1)\n self.assertAlmostEqual(0.0, values[-1])", "def test_far_out_coordinates(rotationangle):\n\n eps = 1e-7\n\n # Get the limits\n lim = rotationangle[\"cs\"].limits()[2:]\n lim0 = max(lim)\n lim1 = min(lim)\n\n # Setting c2 and c3 to zero\n c3 = lim0 - eps\n\n # A large value which is still valid\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n\n # Setting c2 and c3 to zero\n c3 = lim1 + eps\n\n # A large value which is still valid\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n\n # A large value which is raises an exception\n with pytest.raises(RuntimeError):\n c3 = lim0 + eps\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n print(phi_dash)\n\n with pytest.raises(RuntimeError):\n c3 = lim1 - eps\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)", "def test_noPermission(self):\n log = logfile.LogFile(self.name, self.dir)\n self.addCleanup(log.close)\n log.write(\"abc\")\n\n # change permissions so rotation would fail\n os.chmod(self.dir, 0o555)\n\n # if this succeeds, chmod doesn't restrict us, so we can't\n # do the test\n try:\n f = open(os.path.join(self.dir, \"xxx\"), \"w\")\n except OSError:\n pass\n else:\n f.close()\n return\n\n log.rotate() # this should not fail\n\n log.write(\"def\")\n log.flush()\n\n f = log._file\n self.assertEqual(f.tell(), 6)\n f.seek(0, 0)\n self.assertEqual(f.read(), b\"abcdef\")", "def correct_rotation(k_rotations):\n\n for key, value in Chunk.global_piece_rotations.items():\n Chunk.global_piece_rotations[key] = (k_rotations + value) % 4\n # Should I correct it for the side rotations also?", "def test_logfile_recreates_after_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n self.conveyer.rotate_logs()\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"fourth\\\"}\"))\n self.assertEquals(self.events_out.getvalue(), \"{message: \\\"fourth\\\"}\")\n self.assertTrue(self.renamerCalled)", "def verify_no_snapshot_reingestion(c: Composition) -> None:\n c.run(\"testdrive\", \"wait-for-snapshot.td\", \"postgres-disable-select-permission.td\")\n\n restart_mz(c)\n\n c.run(\n \"testdrive\",\n \"delete-rows-t1.td\",\n \"delete-rows-t2.td\",\n \"alter-table.td\",\n \"alter-mz.td\",\n )", "def test_stickers(self):\n rotation, _ = list(self.cube.scramble_cube(15))\n\n unique, counts = np.unique(rotation, return_counts=True)\n dictionary = dict(zip(unique, counts))\n\n self.assertEqual(all(value == 4 for value in dictionary.values()), True)", "def validate_orientation():\r\n ui.click_and_WAIT_for_item_with_retries('/tray/', 'Settings', True)\r\n time.sleep(WAIT)\r\n ui.doDefault_on_obj(name='Settings', role='button')\r\n time.sleep(WAIT)\r\n ui.doDefault_on_obj(name='Displays', role='link')\r\n time.sleep(WAIT)", "def test_to_rotation(self):\r\n q = np.array([-1, 1, 3, 2])\r\n q = q / np.linalg.norm(q)\r\n R_gt = np.array([\r\n [-1/3., -14/15., -2/15.],\r\n [2/3., -1/3., 2/3.],\r\n [-2/3., 2/15., 11/15.]]).T\r\n R = to_rotation(q)\r\n\r\n zero_matrix = R - R_gt\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)\r\n\r\n for _ in range(20):\r\n q = np.random.randn(4)\r\n q /= np.linalg.norm(q)\r\n q_inv = quaternion_conjugate(q)\r\n\r\n R = to_rotation(q)\r\n R_inv = to_rotation(q_inv)\r\n\r\n zero_matrix = R @ R_inv - np.identity(3)\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)\r\n\r\n # orthogonal matrix\r\n zero_matrix = R @ R.T - np.identity(3)\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)", "def test_restore_backup():", "def test_arc_smear(self):", "def test_can_info_does_not_exist(self):\n fake_user = User(username='Fake', password='')\n self.assertFalse(send_rotate_to_can(fake_user, self.BIN_NUM))", "def test_active_rotation_is_default():\n Rx = pr.active_matrix_from_angle(0, 0.5 * np.pi)\n ax = np.array([1, 0, 0, 0.5 * np.pi])\n qx = pr.quaternion_from_axis_angle(ax)\n assert_array_almost_equal(Rx, pr.matrix_from_axis_angle(ax))\n assert_array_almost_equal(Rx, pr.matrix_from_quaternion(qx))\n Ry = pr.active_matrix_from_angle(1, 0.5 * np.pi)\n ay = np.array([0, 1, 0, 0.5 * np.pi])\n qy = pr.quaternion_from_axis_angle(ay)\n assert_array_almost_equal(Ry, pr.matrix_from_axis_angle(ay))\n assert_array_almost_equal(Ry, pr.matrix_from_quaternion(qy))\n Rz = pr.active_matrix_from_angle(2, 0.5 * np.pi)\n az = np.array([0, 0, 1, 0.5 * np.pi])\n qz = pr.quaternion_from_axis_angle(az)\n assert_array_almost_equal(Rz, pr.matrix_from_axis_angle(az))\n assert_array_almost_equal(Rz, pr.matrix_from_quaternion(qz))", "def test_not_rxx_equivalent(self):\n gate = SwapGate\n with self.assertRaises(QiskitError) as exc:\n TwoQubitControlledUDecomposer(gate)\n self.assertIn(\n \"Equivalent gate needs to take exactly 1 angle parameter.\", exc.exception.message\n )", "def test_conversions_matrix_euler_zyx():\n random_state = np.random.RandomState(0)\n for _ in range(5):\n a = pr.random_axis_angle(random_state)\n R = pr.matrix_from_axis_angle(a)\n pr.assert_rotation_matrix(R)\n\n e_zyx = pr.euler_zyx_from_matrix(R)\n R2 = pr.matrix_from_euler_zyx(e_zyx)\n assert_array_almost_equal(R, R2)\n pr.assert_rotation_matrix(R2)\n\n e_zyx2 = pr.euler_zyx_from_matrix(R2)\n pr.assert_euler_zyx_equal(e_zyx, e_zyx2)\n\n # Gimbal lock\n for _ in range(5):\n e_zyx = random_state.rand(3)\n e_zyx[1] = np.pi / 2.0\n R = pr.matrix_from_euler_zyx(e_zyx)\n e_zyx2 = pr.euler_zyx_from_matrix(R)\n pr.assert_euler_zyx_equal(e_zyx, e_zyx2)\n\n e_zyx[1] = -np.pi / 2.0\n R = pr.matrix_from_euler_zyx(e_zyx)\n e_zyx2 = pr.euler_zyx_from_matrix(R)\n pr.assert_euler_zyx_equal(e_zyx, e_zyx2)", "def test_rotated(self):\n d = np.random.random((100, 3))\n d_emb = tadasets.embed(d, 10)\n assert np.all(np.var(d_emb, axis=0) > 0)", "def test_serialize_circuit_rotations_tape(self, monkeypatch, tmpdir, test_batch_result):\n qml.enable_tape()\n dev = QeQiskitDevice(wires=1, shots=1000, backend=\"qasm_simulator\", analytic=False)\n\n circuit_history = []\n\n with qml.tape.QuantumTape() as tape1:\n qml.Hadamard(wires=[0])\n qml.expval(qml.Hadamard(0))\n\n with monkeypatch.context() as m:\n m.setattr(pennylane_orquestra.cli_actions, \"user_data_dir\", lambda *args: tmpdir)\n m.setattr(\n pennylane_orquestra.orquestra_device,\n \"gen_expval_workflow\",\n lambda component, backend_specs, circuits, operators, **kwargs: circuit_history.extend(\n circuits\n ),\n )\n\n # Disable submitting to the Orquestra platform by mocking Popen\n m.setattr(subprocess, \"Popen\", lambda *args, **kwargs: MockPopen())\n m.setattr(\n pennylane_orquestra.orquestra_device,\n \"loop_until_finished\",\n lambda *args, **kwargs: test_batch_result, # The exact results are not considered in the test\n )\n\n dev.execute(tape1)\n\n expected = 'OPENQASM 2.0;\\ninclude \"qelib1.inc\";\\nqreg q[1];\\ncreg c[1];\\nh q[0];\\nry(-0.7853981633974483) q[0];\\n'\n assert circuit_history[0] == expected\n qml.disable_tape()", "def testQuestionFour(self):\n self.assertTrue(os.path.exists(\"./mandelbrot.png\"), \"Question 4's output (mandelbrot.png) does not exist.\")", "def test_encrypt_creates_and_restores_backup(\n self,\n mock_os,\n mock_shutil,\n mock_subprocess,\n ):\n mock_subprocess.run.return_value.returncode = 1\n\n with self.assertRaises(RuntimeError):\n self.mikla.encrypt('Chunky Hunky', 'plain', 'enc')\n\n mock_os.unlink.assert_not_called()\n mock_shutil.move.assert_called_with('enc.bak', 'enc')\n self.assertEqual(mock_shutil.move.call_count, 2)", "def determine_rotation(arm, d, tip_data, rot_data):\n n_t = np.zeros(3)\n for this_n_t in tip_data['pos_ntip_wrt_r']:\n n_t += this_n_t\n n_t /= len(tip_data['pos_ntip_wrt_r'])\n print(\"Our n_t to use in this stage: {}\".format(n_t))\n\n K = len(rot_data['pos_ntip_wrt_s'])\n errors_zyz = []\n errors_zyx = []\n\n for k in range(K):\n lhs = rot_data['pos_ntip_wrt_s'][k]\n t_st = rot_data['pos_tool_wrt_s_code'][k]\n ypr = rot_data['rot_tool_wrt_s_code'][k]\n yaw, pitch, roll = ypr[0], ypr[1], ypr[2]\n\n # R_zyz\n R_z1 = U.rotation_matrix_3x3_axis(angle=roll, axis='z')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z2 = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyz = R_z2.dot(R_y).dot(R_z1)\n\n # R_zyx\n R_x = U.rotation_matrix_3x3_axis(angle=roll, axis='x')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyx = R_z.dot(R_y).dot(R_x)\n\n # Evaluate!\n rhs_zyz = t_st + R_zyz.dot( n_t )\n rhs_zyx = t_st + R_zyx.dot( n_t )\n err_zyz = np.linalg.norm(lhs - rhs_zyz)\n err_zyx = np.linalg.norm(lhs - rhs_zyx)\n errors_zyz.append( err_zyz )\n errors_zyx.append( err_zyx )\n print(\"\\nerr_zyz: {:.3f} for {}-th sample\".format(err_zyz, k))\n print(\"err_zyx: {:.3f} for {}-th sample\".format(err_zyx, k))\n print(\"R_zyz:\\n{}\".format(R_zyz))\n print(\"R_zyx:\\n{}\".format(R_zyx))\n\n print(\"\\nDone with evaluation!\")\n print(\"zyz has avg error {:.5f}\".format(np.mean(errors_zyz)))\n print(\"zyx has avg error {:.5f}\".format(np.mean(errors_zyx)))", "def test_rotate_down(self):\n # Testing 'down' rotation clockwise\n side = 'D'\n rotation = list(self.cube.rotate_cube(side))\n result = [np.array([['g', 'g'], ['r', 'r']], dtype='<U1'),\n np.array([['y', 'y'], ['y', 'y']], dtype='<U1'),\n np.array([['o', 'o'], ['g', 'g']], dtype='<U1'),\n np.array([['w', 'w'], ['w', 'w']], dtype='<U1'),\n np.array([['b', 'b'], ['o', 'o']], dtype='<U1'),\n np.array([['r', 'r'], ['b', 'b']], dtype='<U1')]\n\n np.testing.assert_array_equal(rotation, result)", "def testModePreservation(self):\n f = open(self.path, \"w\").close()\n os.chmod(self.path, 0707)\n mode = os.stat(self.path)[stat.ST_MODE]\n log = logfile.LogFile(self.name, self.dir)\n log.write(\"abc\")\n log.rotate()\n self.assertEquals(mode, os.stat(self.path)[stat.ST_MODE])", "def testrotconsts(self):\r\n assert self.data.rotconsts.shape == (len(self.data.atomcoords), 3)", "def trial(self):\n pass", "def test_revolute(self):\n # Rotate around the z axis\n r = Joint.revolute(np.array([0, 0, 1]))\n t_mat = r(np.array([np.pi / 2]))\n rot_vec = np.dot(t_mat, np.array([1, 0, 0, 1]))[:3]\n self.assertTrue(np.allclose(\n rot_vec, np.array([0, 1, 0]), rtol=1e-5, atol=1e-5))", "def test_id_rot():\n assert_array_almost_equal(pr.R_id, pr.matrix_from_axis_angle(pr.a_id))\n assert_array_almost_equal(pr.R_id, pr.matrix_from_quaternion(pr.q_id))\n assert_array_almost_equal(pr.R_id, pr.matrix_from_euler_xyz(pr.e_xyz_id))\n assert_array_almost_equal(pr.R_id, pr.matrix_from_euler_zyx(pr.e_zyx_id))", "def test_calibrate(get_touchmat):\n touchmat = get_touchmat\n touchmat_model = check_device_types.get_device_model(touchmat)\n\n if touchmat_model == Devices.touchmat_g1:\n with pytest.raises(PySproutError) as execinfo:\n touchmat.calibrate()\n assert 'Functionality not available' in str(execinfo.value)\n return\n\n touchmat.calibrate()", "def test_simulationRun(self):\n self.opt = { 'temperature' : 300.0, 'friction' : 1, 'dt' : 0.00002,\n 'nIter' : 2, 'nstepsNC' : 2, 'nstepsMD' : 1, 'nprop' : 1,\n 'nonbondedMethod' : 'NoCutoff', 'constraints': 'HBonds',\n 'trajectory_interval' : 1, 'reporter_interval' : 1,\n 'outfname' : 'mc-test',\n 'platform' : None,\n 'constraints' : 'HBonds',\n 'mc_per_iter' : 2 }\n\n structure = self.full_struct\n class SetRotationMove(RandomLigandRotationMove):\n def __init__(self, structure, resname='LIG'):\n super(SetRotationMove, self).__init__(structure, resname)\n\n def move(self, context):\n \"\"\"Function that performs a random rotation about the\n center of mass of the ligand.\n \"\"\"\n #TODO: check if we need to deepcopy\n positions = context.getState(getPositions=True).getPositions(asNumpy=True)\n\n self.positions = positions[self.atom_indices]\n self.center_of_mass = self.getCenterOfMass(self.positions, self.masses)\n reduced_pos = self.positions - self.center_of_mass\n\n # Define random rotational move on the ligand\n #set rotation so that test is reproducible\n set_rotation_matrix = np.array([[-0.62297988, -0.17349253, 0.7627558 ],\n [ 0.55082352, -0.78964857, 0.27027502],\n [ 0.55541834, 0.58851973, 0.58749893]])\n\n\n #set_rotation_matrix = np.array([[1, 0, 0],\n # [0, 1, 0],\n # [0, 0, 1]])\n\n #multiply lig coordinates by rot matrix and add back COM translation from origin\n rot_move = np.dot(reduced_pos, set_rotation_matrix) * positions.unit + self.center_of_mass\n\n # Update ligand positions in nc_sim\n for index, atomidx in enumerate(self.atom_indices):\n positions[atomidx] = rot_move[index]\n context.setPositions(positions)\n positions = context.getState(getPositions=True).getPositions(asNumpy=True)\n self.positions = positions[self.atom_indices]\n return context\n\n\n self.model = SetRotationMove(structure, resname='ALA')\n #self.model = RandomLigandRotationMove(structure, resname='ALA')\n\n self.model.atom_indices = range(22)\n self.model.topology = structure[self.model.atom_indices].topology\n self.model.positions = structure[self.model.atom_indices].positions\n self.model.calculateProperties()\n\n self.mover = MoveEngine(self.model)\n #Initialize the SimulationFactory object\n sims = SimulationFactory(structure, self.mover, **self.opt)\n #print(sims)\n system = sims.generateSystem(structure, **self.opt)\n simdict = sims.createSimulationSet()\n alch_system = sims.generateAlchSystem(system, self.model.atom_indices)\n self.nc_sim = sims.generateSimFromStruct(structure, self.mover, alch_system, ncmc=True, **self.opt)\n self.model.calculateProperties()\n self.initial_positions = self.nc_sim.context.getState(getPositions=True).getPositions(asNumpy=True)\n mc_sim = Simulation(sims, self.mover, **self.opt)\n #monkeypatch to access acceptance value\n def nacceptRejectMC(self, temperature=300, **opt):\n \"\"\"Function that chooses to accept or reject the proposed move.\n \"\"\"\n md_state0 = self.current_state['md']['state0']\n md_state1 = self.current_state['md']['state1']\n log_mc = (md_state1['potential_energy'] - md_state0['potential_energy']) * (-1.0/self.nc_sim.context._integrator.kT)\n randnum = math.log(np.random.random())\n\n if log_mc > randnum:\n self.accept += 1\n print('MC MOVE ACCEPTED: log_mc {} > randnum {}'.format(log_mc, randnum) )\n self.md_sim.context.setPositions(md_state1['positions'])\n else:\n self.reject += 1\n print('MC MOVE REJECTED: log_mc {} < {}'.format(log_mc, randnum) )\n self.md_sim.context.setPositions(md_state0['positions'])\n self.log_mc = log_mc\n self.md_sim.context.setVelocitiesToTemperature(self.opt['temperature'])\n mc_sim.acceptRejectMC = nacceptRejectMC\n nacceptRejectMC.__get__(mc_sim)\n mc_sim.acceptRejectMC = types.MethodType(nacceptRejectMC, mc_sim)\n mc_sim.runMC(self.opt['nIter'])\n #get log acceptance\n print(mc_sim.log_mc)\n #if mc is working, should be around -24.1\n assert mc_sim.log_mc <= -23.8 and mc_sim.log_mc >= -24.3", "def test_user_is_none(self):\n self.assertFalse(send_rotate_to_can(None, self.BIN_NUM))", "def test_T4():", "def test_T4():", "def rotate_in_place(angle):\n action = easy_cozmo._robot.turn_in_place(degrees(-1*angle),speed=degrees(df_rotate_speed))\n try:\n action.wait_for_completed()\n if action.has_succeeded:\n return True\n else:\n code, reason = action.failure_reason\n result = action.result\n print(\"WARNING RotateInPlace: code=%s reason='%s' result=%s\" % (code, reason, result))\n say_error(\"I couldn't rotate, sorry\")\n except Exception as e:\n import traceback\n print(e)\n traceback.print_exc()\n say_error(\"I can't rotate, sorry\")\n try:\n while action.is_running:\n action.abort()\n time.sleep(.5)\n except Exception as e:\n import traceback\n print(e)\n traceback.print_exc()\n say_error(\"Wheels faulty\")\n\n return False", "def compute_rotation(self):\n if self.predictions[self.iteration][0] == 90.0 or self.predictions[self.iteration][0] == 270.0:\n self.rotation = 20\n self.initial_adjust = True\n return\n\n if self.iteration == 0 or (self.iteration == 1 and self.initial_adjust):\n self.rotation = rotate.get_90_deg_rotation(self.predictions[self.iteration])\n elif self.iteration == 1 or (self.iteration == 2 and self.initial_adjust):\n self.rotation = rotate.get_45_deg_rotation(self.predictions, self.current_position)\n elif self.iteration >= 2 or (self.iteration > 2 and self.initial_adjust):\n self.rotation = rotate.get_fine_rotation(self.iteration)", "def test_serialize_operator_needs_rotation(self, obs, expected):\n dev = QeQiskitDevice(wires=3, shots=1000, backend=\"qasm_simulator\", analytic=False)\n op_str = dev.serialize_operator(obs)\n assert op_str == expected", "def test_unsuccessful_verification(self):\n for i in (-4, -3, 3, 4):\n description = \"TOTP verified for `i={0}`\".format(i)\n calculated = self.algorithm.calculate(self.device.secret, drift=i)\n confirmed = self.relate.verify(calculated, save=False)\n\n self.assertFalse(confirmed, description)\n\n self.relate.confirm = False", "def doRotation(self, delta):\n self.correctPending()\n self.rotation = (self.rotation + delta) % self.possibleRotations", "def test_minvar_rotation(self):\n vrot, v, w = minvar(self.rdata)\n # Determinant of rotation matrix should be = 1\n self.assertTrue((np.linalg.det(v) - 1) < self.tol)", "def test_wrong_mode(self):\n self.assertRaises(ComponentErrorsEx, self.dp.setRewindingMode, 'FOO')", "def test_untar(self):", "def fix_rotation(self):\n self.rotate(self.rotation)\n self.annotations.rotate(self.rotation)\n self.rotation = 0", "def test_helioviewer_rotation(lasco, lasco_helioviewer):\n np.testing.assert_allclose(lasco.rotation_matrix,\n [[0.999966, -0.008296], [0.008296, 0.999966]], rtol=1e-6)\n np.testing.assert_array_equal(lasco_helioviewer.rotation_matrix, [[1., 0.], [0., 1.]])", "def test_d_3():\n rs = 20\n d = 3\n np.random.seed(rs)\n number_rotations = 3\n\n theta_1 = np.random.uniform(0, 2 * math.pi)\n rotation_1 = np.identity(d)\n pos_1 = np.random.randint(0, d - 1)\n pos_2 = np.random.randint(pos_1 + 1, d)\n rotation_1[pos_1, pos_1] = math.cos(theta_1)\n rotation_1[pos_1, pos_2] = - math.sin(theta_1)\n rotation_1[pos_2, pos_1] = math.sin(theta_1)\n rotation_1[pos_2, pos_2] = math.cos(theta_1)\n\n theta_2 = np.random.uniform(0, 2 * math.pi)\n rotation_2 = np.identity(d)\n pos_3 = np.random.randint(0, d - 1)\n pos_4 = np.random.randint(pos_3 + 1, d)\n rotation_2[pos_3, pos_3] = math.cos(theta_2)\n rotation_2[pos_3, pos_4] = - math.sin(theta_2)\n rotation_2[pos_4, pos_3] = math.sin(theta_2)\n rotation_2[pos_4, pos_4] = math.cos(theta_2)\n\n theta_3 = np.random.uniform(0, 2 * math.pi)\n rotation_3 = np.identity(d)\n pos_5 = np.random.randint(0, d - 1)\n pos_6 = np.random.randint(pos_5 + 1, d)\n rotation_3[pos_5, pos_5] = math.cos(theta_3)\n rotation_3[pos_5, pos_6] = - math.sin(theta_3)\n rotation_3[pos_6, pos_5] = math.sin(theta_3)\n rotation_3[pos_6, pos_6] = math.cos(theta_3)\n\n final_rotation = rotation_1 @ rotation_2 @ rotation_3\n np.random.seed(rs)\n rotation_function = (mt_obj.calculate_rotation_matrix\n (d, number_rotations))\n assert(np.all(final_rotation == rotation_function))", "def test_comp_angle_opening(self, test_dict):\n test_obj = test_dict[\"test_obj\"]\n a = test_obj.slot.comp_angle_opening()\n self.assertEqual(a, 2 * pi / test_obj.slot.Zs)\n\n b = comp_angle_opening(test_obj.slot)\n msg = \"Return \" + str(a) + \" expected \" + str(b)\n self.assertAlmostEqual((a - b) / a, 0, delta=DELTA, msg=msg)", "def test_conversions_matrix_euler_xyz():\n random_state = np.random.RandomState(0)\n for _ in range(5):\n a = pr.random_axis_angle(random_state)\n R = pr.matrix_from_axis_angle(a)\n pr.assert_rotation_matrix(R)\n\n e_xyz = pr.euler_xyz_from_matrix(R)\n R2 = pr.matrix_from_euler_xyz(e_xyz)\n assert_array_almost_equal(R, R2)\n pr.assert_rotation_matrix(R2)\n\n e_xyz2 = pr.euler_xyz_from_matrix(R2)\n pr.assert_euler_xyz_equal(e_xyz, e_xyz2)\n\n # Gimbal lock\n for _ in range(5):\n e_xyz = random_state.rand(3)\n e_xyz[1] = np.pi / 2.0\n R = pr.matrix_from_euler_xyz(e_xyz)\n e_xyz2 = pr.euler_xyz_from_matrix(R)\n pr.assert_euler_xyz_equal(e_xyz, e_xyz2)\n\n e_xyz[1] = -np.pi / 2.0\n R = pr.matrix_from_euler_xyz(e_xyz)\n e_xyz2 = pr.euler_xyz_from_matrix(R)\n pr.assert_euler_xyz_equal(e_xyz, e_xyz2)", "def test_calculate_angle():\n r1 = np.array([0, 0, -1])\n r2 = np.array([0, 0, 0])\n r3 = np.array([1, 0, 0])\n\n expected_angle = 90\n calculated_angle = molecool.calculate_angle(r1, r2, r3, degrees = True)\n\n assert expected_angle == calculated_angle", "def test_archive_run(self):\n pass", "def test_rotate_right(self):\n # Testing 'down' rotation clockwise\n side = 'R'\n rotation = list(self.cube.rotate_cube(side))\n result = [np.array([['g', 'g'], ['g', 'g']], dtype='<U1'),\n np.array([['y', 'o'], ['y', 'o']], dtype='<U1'),\n np.array([['o', 'w'], ['o', 'w']], dtype='<U1'),\n np.array([['w', 'r'], ['w', 'r']], dtype='<U1'),\n np.array([['b', 'b'], ['b', 'b']], dtype='<U1'),\n np.array([['y', 'r'], ['y', 'r']], dtype='<U1')]\n\n np.testing.assert_array_equal(rotation, result)", "def test_rendezvous(self):\n\n utils.rendezvous()", "def match(cube):\n \n #M1'\n M1 = (cube[1,1,0] & cube[1,1,1] & \n (not cube[0,0,2]) & (not cube[1,0,2]) & (not cube[2,0,2]) &\n (not cube[0,1,2]) & (not cube[1,1,2]) & (not cube[2,1,2]) &\n (not cube[0,2,2]) & (not cube[1,2,2]) & (not cube[2,2,2]));\n if M1:\n return True;\n \n # gerate rotations around z/vertical axis\n cuberots = [rotate(cube, axis = 2, steps = rot) for rot in range(4)];\n #print('Cube rotations:');\n #[printCube(c) for c in cuberots] \n \n # M2' and all rotations\n for curo in cuberots:\n M2 = (curo[1,1,0] & curo[1,1,1] & curo[1,2,1] &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]));\n if M2:\n return True;\n \n # M3' and all rotations\n for curo in cuberots:\n M3 = (curo[1,1,0] & curo[1,1,1] & curo[1,2,1] & curo[2,1,1] &\n (not curo[0,0,2]) & (not curo[1,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]));\n if M3:\n return True;\n \n # M4' and all rotations\n for curo in cuberots:\n M4 = (curo[1,1,0] & curo[1,1,1] & curo[2,2,1] & curo[2,2,2] &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]));\n if M4:\n return True;\n \n # M5' and all rotations\n for curo in cuberots:\n M5 = (curo[1,2,0] & curo[1,1,1] & \n (not curo[0,0,0]) & (not curo[1,0,0]) & (not curo[2,0,0]) &\n (not curo[1,1,0]) &\n (not curo[0,0,1]) & (not curo[1,0,1]) & (not curo[2,0,1]) &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]) & (not curo[2,2,2]));\n if M5:\n return True;\n \n # M6' and all rotations\n for curo in cuberots:\n M6 = (curo[2,1,0] & curo[1,2,0] & curo[1,1,1] &\n (not curo[0,0,0]) & (not curo[1,0,0]) &\n (not curo[0,1,0]) & (not curo[1,1,0]) &\n (not curo[0,0,1]) & (not curo[1,0,1]) &\n (not curo[0,1,1]) &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]) & (not curo[2,2,2]));\n if M6:\n return True;\n \n # M7' and all rotations\n for curo in cuberots:\n M7 = (curo[2,2,0] & curo[1,1,1] &\n (not curo[0,0,0]) & (not curo[1,0,0]) & (not curo[2,0,0]) &\n (not curo[0,1,0]) & (not curo[1,1,0]) & (not curo[2,1,0]) &\n (not curo[0,2,0]) & (not curo[1,2,0]) &\n (not curo[0,0,1]) & (not curo[1,0,1]) & (not curo[2,0,1]) &\n (not curo[0,1,1]) & (not curo[2,1,1]) &\n (not curo[0,2,1]) & (not curo[1,2,1]) & (not curo[2,2,1]) &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]) & (not curo[2,2,2]));\n if M7:\n return True;\n \n return False;", "def fix_rotation(self):\n cube_helper = Cube()\n cube_helper.scramble = self.scramble.split()\n cube_helper.solve = self.solve.split()\n\n rotations = []\n for move in cube_helper.scramble:\n cube_helper.exe_move(move)\n for move in cube_helper.solve:\n if move not in cube_helper.rotation:\n if not self.currently_parsing_smart_cube:\n break\n cube_helper.exe_move(move)\n\n str_perm = cube_helper.perm_to_string(cube_helper.current_perm).split()\n up = str_perm[4]\n front = str_perm[22]\n flag = False\n for i in range (4):\n if (up == \"5\"):\n flag = True\n break\n rotations.append(\"x\")\n cube_helper.exe_move(\"x\")\n str_perm = cube_helper.perm_to_string(cube_helper.current_perm).split()\n up = str_perm[4]\n front = str_perm[22]\n\n if (front != \"23\" and not flag):\n rotations.append(\"z\")\n cube_helper.exe_move(\"z\")\n str_perm = cube_helper.perm_to_string(cube_helper.current_perm).split()\n up = str_perm[4]\n front = str_perm[22]\n\n while (up != \"5\" or front != \"23\"):\n rotations.append(\"y\")\n cube_helper.exe_move(\"y\")\n str_perm = cube_helper.perm_to_string(cube_helper.current_perm).split()\n front = str_perm[22]\n\n final_rot = []\n while len(rotations) >= 3:\n if rotations[0] == rotations[1] == rotations[2]:\n r_fix = \"{}'\".format(rotations[0]).replace(\"''\",\"\")\n final_rot.append(r_fix)\n rotations.pop(0)\n rotations.pop(0)\n rotations.pop(0)\n else:\n final_rot.append(rotations[0])\n rotations.pop(0)\n if final_rot:\n return final_rot\n return rotations", "def detect_orientation(image):\n custom_oem_psm_config = r'--oem 1--psm 7'\n newdata = pytesseract.image_to_osd(image,config= custom_oem_psm_config)\n rotation = int(re.search('(?<=Rotate: )\\\\d+', newdata).group(0))\n # print(\"Rotation degrees : \", rotation)\n return rotate_img(image, rotation)", "def test_rotate_back(self):\n # Testing 'back' rotation clockwise\n side = 'B'\n rotation = list(self.cube.rotate_cube(side))\n result = [np.array([['y', 'g'], ['y', 'g']], dtype='<U1'),\n np.array([['b', 'b'], ['y', 'y']], dtype='<U1'),\n np.array([['o', 'o'], ['o', 'o']], dtype='<U1'),\n np.array([['w', 'w'], ['g', 'g']], dtype='<U1'),\n np.array([['b', 'w'], ['b', 'w']], dtype='<U1'),\n np.array([['r', 'r'], ['r', 'r']], dtype='<U1')]\n\n np.testing.assert_array_equal(rotation, result)", "def test_perspective_transform():\n # TODO: write this\n assert(True)", "def test_unlock_failure(self):\n # Make sure the image file doesn't exist.\n if os.path.exists(IMAGE_FILE):\n os.unlink(IMAGE_FILE)\n # Ask rsync-system-backup to use the encrypted filesystem on the image\n # file anyway, because we know it will fail and that's exactly what\n # we're interested in :-).\n program = RsyncSystemBackup(\n crypto_device=CRYPTO_NAME,\n destination=os.path.join(MOUNT_POINT, 'latest'),\n mount_point=MOUNT_POINT,\n )\n # When `cryptdisks_start' fails it should exit with a nonzero exit\n # code, thereby causing executor to raise an ExternalCommandFailed\n # exception that obscures the FailedToUnlockError exception that we're\n # interested in. The check=False option enables our `last resort error\n # handling' code path to be reached.\n program.destination_context.options['check'] = False\n self.assertRaises(FailedToUnlockError, program.execute)", "def check_orientation(self) -> None:\n if self.compute_volume() > 0:\n raise (\n BaseException(\n \"The volume within the surface is negative. It seems that you faces\"\n \"are not oriented correctly according to the clockwise flag\"\n )\n )", "def verify():", "def _retry_occurred(self):" ]
[ "0.7053024", "0.6514648", "0.6510062", "0.64673233", "0.6354083", "0.6268674", "0.62312365", "0.6228066", "0.6227956", "0.6155046", "0.6153381", "0.61482006", "0.6102702", "0.6070507", "0.60431606", "0.6037593", "0.60291284", "0.60207057", "0.59471196", "0.593792", "0.5898857", "0.5885746", "0.58734226", "0.5840074", "0.58111167", "0.5781484", "0.57409203", "0.5709362", "0.570395", "0.56991065", "0.56785375", "0.5638217", "0.5599993", "0.5598657", "0.5535958", "0.5508222", "0.5503783", "0.5481766", "0.5479953", "0.54511607", "0.5447012", "0.54455227", "0.54344994", "0.5432657", "0.54256725", "0.5424957", "0.5422659", "0.541641", "0.5406966", "0.53816146", "0.5372758", "0.53724825", "0.5353713", "0.5352897", "0.53403753", "0.533406", "0.53326166", "0.5310905", "0.53028685", "0.52968353", "0.5283715", "0.52770925", "0.5267043", "0.5260376", "0.5259665", "0.5253185", "0.52508634", "0.5250269", "0.5243787", "0.52294165", "0.52213126", "0.52199554", "0.520257", "0.5202082", "0.5202082", "0.5196443", "0.5194918", "0.5193599", "0.5191439", "0.5188869", "0.51858675", "0.5177402", "0.51723194", "0.5168211", "0.51636094", "0.5157626", "0.5153896", "0.5149956", "0.5149221", "0.51435745", "0.513603", "0.51312196", "0.5127578", "0.51268554", "0.51255625", "0.5123626", "0.5123369", "0.51195425", "0.51165164", "0.51054484", "0.5101011" ]
0.0
-1
Tests of try rotation with ignore in configuration
def test_process_log_with_ignore_in_configuration(self): with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout: compressors = process_log( datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30), {'ignore': True}, 'hourly', '/tmp/pokus.log', 10 ) self.assertEqual(compressors, []) self.assertEqual(fake_stdout.getvalue(), '')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_rotated(self):\n self._calibration_test(\"rotated\")", "def test_need_to_rotate_log(self):\n self.assertTrue(need_to_rotate_log(0, 20, 'daily', 15, 'daily'), 'rotate log by time')\n self.assertFalse(need_to_rotate_log(10, 20, 'daily', 15, 'hourly'), 'do not rotate log by time')\n self.assertTrue(need_to_rotate_log(10, 20, 'daily', 25, None), 'rotate log by max size')\n self.assertFalse(need_to_rotate_log(10, 20, 'hourly', 5, 'hourly'), 'do not rotate log by min size')", "def test_rotation_angle_warning(self):\n\n def warning_trigger():\n try:\n paramak.CenterColumnStudyReactor(\n inner_bore_radial_thickness=20,\n inboard_tf_leg_radial_thickness=50,\n center_column_shield_radial_thickness_mid=50,\n center_column_shield_radial_thickness_upper=100,\n inboard_firstwall_radial_thickness=20,\n divertor_radial_thickness=100,\n inner_plasma_gap_radial_thickness=80,\n plasma_radial_thickness=200,\n outer_plasma_gap_radial_thickness=90,\n # first number must be between plasma inner/outer radius\n plasma_high_point=(245, 240),\n plasma_gap_vertical_thickness=40,\n center_column_arc_vertical_thickness=520,\n rotation_angle=360)\n\n except BaseException:\n pass\n\n with warnings.catch_warnings(record=True) as w:\n warnings.simplefilter(\"always\")\n warning_trigger()\n assert len(w) == 1\n assert issubclass(w[-1].category, UserWarning)\n assert \"360 degree rotation may result in a Standard_ConstructionError or AttributeError\" in str(\n w[-1].message)", "def test_g_asignar_rol(self):", "def test_retry_run(self):\n pass", "def test_default_parameters(self):\n\n assert self.test_shape.rotation_angle == 360", "def test_rotation(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n days = [(self.path + \".\" + log.suffix(day * 86400)) for day in range(3)]\n\n # test automatic rotation\n log._clock = 0.0 # 1970/01/01 00:00.00\n log.write(\"123\")\n log._clock = 43200 # 1970/01/01 12:00.00\n log.write(\"4567890\")\n log._clock = 86400 # 1970/01/02 00:00.00\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(days[0]))\n self.assertFalse(os.path.exists(days[1]))\n log._clock = 172800 # 1970/01/03 00:00.00\n log.write(\"\")\n self.assertTrue(os.path.exists(days[0]))\n self.assertTrue(os.path.exists(days[1]))\n self.assertFalse(os.path.exists(days[2]))\n log._clock = 259199 # 1970/01/03 23:59.59\n log.write(\"3\")\n self.assertFalse(os.path.exists(days[2]))", "def test_skel_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def test_abstractShouldRotate(self):\n log = logfile.BaseLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n self.assertRaises(NotImplementedError, log.shouldRotate)", "def _optimise_rotation(self):\n logger.info(\n f\"Minimising dimer rotation up to \"\n f'δϕ = {self.phi_tol.to(\"degrees\"):.4f}º'\n )\n\n for i in range(self._ratio_rot_iters):\n\n result = self._rotate()\n\n if (\n result == _StepResult.skipped_rotation\n or abs(self._coords.phi) < self.phi_tol\n ):\n break\n\n logger.info(\n f\"Micro iteration: {i}.\"\n f' ϕ={self._coords.phi.to(\"degrees\"):.2f}º'\n )\n\n return None", "def test_rotatePermissionFileNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.path, 0o444)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_rotation(self):\n # this logfile should rotate every 10 bytes\n with contextlib.closing(\n logfile.LogFile(self.name, self.dir, rotateLength=10)\n ) as log:\n\n # test automatic rotation\n log.write(\"123\")\n log.write(\"4567890\")\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.2\".format(self.path)))\n log.write(\"\")\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertTrue(os.path.exists(\"{}.2\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n log.write(\"3\")\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n\n # test manual rotation\n log.rotate()\n self.assertTrue(os.path.exists(\"{}.3\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.4\".format(self.path)))\n\n self.assertEqual(log.listLogs(), [1, 2, 3])", "def test_rotation_angle(self):\n\n self.test_shape.azimuth_placement_angle = [45, 135, 225, 315]\n test_volume = self.test_shape.volume()\n self.test_shape.rotation_angle = 180\n assert self.test_shape.volume() == pytest.approx(test_volume * 0.5)", "def test_rotate_without_moving(controller):\n distance = math.pi / 2 * (DISTANCE_BETWEEN_WHEELS / 2)\n revolution = distance / (2 * math.pi * WHEEL_RADIUS)\n ticks = revolution * TICK_PER_REVOLUTION\n pos, angle = controller.odometry(\n round(10 - ticks),\n round(10 + ticks),\n Vector2(0, 0),\n 0,\n )\n\n # Rotate 90 degrees without moving.\n assert pos == Vector2(0, 0)\n assert round(math.pi / 2 / angle, 1) == 1\n\n # Rotate back to 0 degrees without moving.\n pos, angle = controller.odometry(10, 10, Vector2(0, 0), 0)\n assert pos == Vector2(0, 0)\n assert round(-math.pi / 2 / angle, 1) == 1", "def test_log_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n filename = self.conveyer.rotate_logs()\n self.assertEquals(self.conveyer.logfile, None)\n self.assertEquals(filename, \"testfile.dat.rotated\")", "def test_skip_with_decorator_and_reason():\n pass", "def test_rotatePermissionDirectoryNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.directory, 0o444)\n # Restore permissions so tests can be cleaned up.\n self.addCleanup(os.chmod, log.directory, 0o755)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_rotateAlreadyExists(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n # Build a new file with the same name as the file which would be created\n # if the log file is to be rotated.\n newFilePath = \"{}.{}\".format(log.path, log.suffix(log.lastDate))\n with open(newFilePath, \"w\") as fp:\n fp.write(\"123\")\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_skipif_false():\n pass", "def test_arbitrary_rotation(self):\n \n # This test is run a bunch of times on various intervals, ranging from 50% to 1/6\n\t\t# (16.667%).\n for i in range(2, 7):\n \n interval = 1 / i # The amount to increase each qubit's probability by, relative to the previous qubit\n step_string = \"{:.4f}\".format(100 / i) # The decimal representation of the interval, as a percent\n target_probabilities = [0] * (i + 1) # This will store the desired probabilities of each qubit\n for j in range(0, i + 1):\n target_probability = j * interval\n target_probabilities[j] = target_probability\n\n # Run the test\n self.run_test(self.arbitrary_rotation_function, f\"Rotation with steps of 1/{i} ({step_string}%)\", 2000, target_probabilities, 0.05)", "def test_default_parameters(self):\n\n # assert self.test_shape.rotation_angle == 360\n assert self.test_shape.start_angle == 0", "def test_recheck_fails(self):\n raise NotImplementedError", "def _check_rotation(spec_nest, path):\n spec = _get_from_nest(spec_nest, path)\n if spec is not None and not isinstance(spec, primitives_pb2.RotationType):\n raise InvalidSpecError(\n f'{\"/\".join(path)} was expected to be of type Rotation, but is instead '\n f'{type(spec)}')", "def test_shuffled(self):\n self.setup_flags()\n self.io_args.matches = os.path.join(\n self.io_args.output_root, \"shuffled\", \"matches.json\"\n )\n self._calibration_error_test(\"shuffled\", \"GeometricCalibration\")", "def test_calc_rotation(self):\n t = AioBaseTurtle()\n t.speed(speed=2)\n orient, steps, delta = t._calc_rotation(120)\n self.assertEqual(steps, 21)\n self.assertAlmostEqual(delta, 120.0 / 21.0)\n self.assertAlmostEqual(orient[0], math.cos(math.radians(120)))\n self.assertAlmostEqual(orient[1], math.sin(math.radians(120)))", "def test_not_rxx_equivalent(self):\n gate = SwapGate\n with self.assertRaises(QiskitError) as exc:\n TwoQubitControlledUDecomposer(gate)\n self.assertIn(\n \"Equivalent gate needs to take exactly 1 angle parameter.\", exc.exception.message\n )", "def test_xform_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.xform_file, ani=1)\n\n values = cmds.keyframe('pCube1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def test_skipif_true():\n pass", "def rotation(self, *args, **kwargs) -> Any:\n pass", "def interaction_turnstile(self) -> None:\n if self.get_rotation()[1][0] != 0:\n condition = self.can_rotate()[0]\n if condition:\n self.rotate()", "def test_grid_scans_failing(RE, hw, plan):\n\n # Multiple instance of the same motor in 'args'\n args_list = [\n # New style\n (hw.motor, 1, 2, 3,\n hw.motor1, 4, 5, 6,\n hw.motor1, 7, 8, 9),\n # Old style\n (hw.motor, 1, 2, 3,\n hw.motor1, 4, 5, 6, True,\n hw.motor1, 7, 8, 9, False)\n ]\n for args in args_list:\n with pytest.raises(ValueError,\n match=\"Some motors are listed multiple times in the argument list 'args'\"):\n RE(plan([hw.det], *args))\n\n # 'snake_axes' contains repeated elements\n with pytest.raises(ValueError,\n match=\"The list of axes 'snake_axes' contains repeated elements\"):\n args = (hw.motor, 1, 2, 3,\n hw.motor1, 4, 5, 6,\n hw.motor2, 7, 8, 9)\n snake_axes = [hw.motor1, hw.motor2, hw.motor1]\n RE(plan([hw.det], *args, snake_axes=snake_axes))\n\n # Snaking is enabled for the slowest motor\n with pytest.raises(ValueError,\n match=\"The list of axes 'snake_axes' contains the slowest motor\"):\n args = (hw.motor, 1, 2, 3,\n hw.motor1, 4, 5, 6,\n hw.motor2, 7, 8, 9)\n snake_axes = [hw.motor1, hw.motor]\n RE(plan([hw.det], *args, snake_axes=snake_axes))\n\n # Attempt to enable snaking for motors that are not controlled during the scan\n with pytest.raises(ValueError,\n match=\"The list of axes 'snake_axes' contains motors \"\n \"that are not controlled during the scan\"):\n args = (hw.motor, 1, 2, 3,\n hw.motor1, 4, 5, 6,\n hw.motor2, 7, 8, 9)\n snake_axes = [hw.motor1, hw.motor3]\n RE(plan([hw.det], *args, snake_axes=snake_axes))\n\n # Mix deprecated and new API ('snake_axes' is used while snaking is set in 'args'\n with pytest.raises(ValueError,\n match=\"Mixing of deprecated and new API interface is not allowed\"):\n args = (hw.motor, 1, 2, 3,\n hw.motor1, 4, 5, 6, True,\n hw.motor2, 7, 8, 9, False)\n RE(plan([hw.det], *args, snake_axes=False))\n\n # The type of 'snake_axes' parameter is not allowed\n for snake_axes in (10, 50.439, \"some string\"):\n with pytest.raises(ValueError,\n match=\"Parameter 'snake_axes' is not iterable, boolean or None\"):\n args = (hw.motor, 1, 2, 3,\n hw.motor1, 4, 5, 6,\n hw.motor2, 7, 8, 9)\n RE(plan([hw.det], *args, snake_axes=snake_axes))", "def test_rotation(self, tol):\n theta = 0.98\n S = symplectic.rotation(theta)\n expected = np.block([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]])\n np.allclose(S, expected, atol=tol, rtol=0)", "def check_random_rotation(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [degrees, resample, expand, center, fill_value], _ = parse_user_args(method, *args, **kwargs)\n check_degrees(degrees)\n\n if resample is not None:\n type_check(resample, (Inter,), \"resample\")\n if expand is not None:\n type_check(expand, (bool,), \"expand\")\n if center is not None:\n check_2tuple(center, \"center\")\n if fill_value is not None:\n check_fill_value(fill_value)\n\n return method(self, *args, **kwargs)\n\n return new_method", "def test_rotating_phantom(self):\n cheese = TomoCheese.from_demo_images()\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -0.25, abs_tol=0.05)\n for img in cheese.dicom_stack:\n img.array = rotate(img.array, angle=3, mode=\"edge\")\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -3.25, abs_tol=0.05)", "def rotate(self):\n pass", "def test_config_wrong_config(self):\n test_data_1 = (\"[gnupg_missing]\\n\"\n \"recipients = [email protected]\\n\"\n \"signer = [email protected]\\n\"\n \"[amazon-s3]\\n\"\n \"access_key = ACCESSKEY\\n\"\n \"secret_access_key = SECRETACCESSKEY\\n\"\n \"[data]\\n\"\n \"bucket = DATABUCKET\\n\"\n \"[metadata]\\n\"\n \"bucket = METADATABUCKET\\n\")\n test_data_2 = (\"[gnupg]\\n\"\n \"recipients_missing = [email protected]\\n\"\n \"signer = [email protected]\\n\"\n \"[amazon-s3]\\n\"\n \"access_key = ACCESSKEY\\n\"\n \"secret_access_key = SECRETACCESSKEY\\n\"\n \"[data]\\n\"\n \"bucket = DATABUCKET\\n\"\n \"[metadata]\\n\"\n \"bucket = METADATABUCKET\\n\")\n if os.path.isfile(\"test_config.conf\"):\n os.remove(\"test_config.conf\")\n file(\"test_config.conf\", \"wb\").write(test_data_1)\n config = Config(\"test_config.conf\")\n self.assertRaises(\n ConfigError, config.check, \"gnupg\", [\"recipients\", \"signer\"])\n file(\"test_config.conf\", \"wb\").write(test_data_2)\n config = Config(\"test_config.conf\")\n self.assertRaises(\n ConfigError, config.check, \"gnupg\", [\"recipients\", \"signer\"])\n os.remove(\"test_config.conf\")", "def test_wrong_mode(self):\n self.assertRaises(ComponentErrorsEx, self.dp.setRewindingMode, 'FOO')", "def test_modePreservation(self):\n open(self.path, \"w\").close()\n os.chmod(self.path, 0o707)\n mode = os.stat(self.path)[stat.ST_MODE]\n log = logfile.LogFile(self.name, self.dir)\n self.addCleanup(log.close)\n log.write(\"abc\")\n log.rotate()\n self.assertEqual(mode, os.stat(self.path)[stat.ST_MODE])", "def setupAuto(self) :\n\t\tself.rotateDir = -1\n\t\tself.rotateDuration = -1\n\t\tself.moveDir = -1\n\t\tself.moveDuration = -1\n\t\tself.isAvoidingCollision = False\n\t\tself.inBigRotate = False # if True, do not move forward;\n\t\t\t\t\t # only rotate\n\t\treturn", "def test_noPermission(self):\n log = logfile.LogFile(self.name, self.dir)\n self.addCleanup(log.close)\n log.write(\"abc\")\n\n # change permissions so rotation would fail\n os.chmod(self.dir, 0o555)\n\n # if this succeeds, chmod doesn't restrict us, so we can't\n # do the test\n try:\n f = open(os.path.join(self.dir, \"xxx\"), \"w\")\n except OSError:\n pass\n else:\n f.close()\n return\n\n log.rotate() # this should not fail\n\n log.write(\"def\")\n log.flush()\n\n f = log._file\n self.assertEqual(f.tell(), 6)\n f.seek(0, 0)\n self.assertEqual(f.read(), b\"abcdef\")", "def test_default_parameters(self):\n\n assert self.test_shape.rotation_angle == 360\n assert self.test_shape.extrude_both", "def test_noPermission(self):\n log = logfile.LogFile(self.name, self.dir)\n log.write(\"abc\")\n\n # change permissions so rotation would fail\n os.chmod(self.dir, 0555)\n\n # if this succeeds, chmod doesn't restrict us, so we can't\n # do the test\n try:\n f = open(os.path.join(self.dir,\"xxx\"), \"w\")\n except (OSError, IOError):\n pass\n else:\n f.close()\n return\n\n log.rotate() # this should not fail\n\n log.write(\"def\")\n log.flush()\n\n f = log._file\n self.assertEquals(f.tell(), 6)\n f.seek(0, 0)\n self.assertEquals(f.read(), \"abcdef\")\n log.close()", "def test_rot(self):\n\n print(\"rot()\")\n obs = self.fixture\n\n # rotation(0) = identity\n for axis in [1, 2, 3]:\n # theta = 0.0\n rotation = obs.rot(0.0, axis)\n # find || eye - rot1 ||\n diff = np.linalg.norm(np.eye(3) - rotation)\n self.assertAlmostEqual(diff, 0.0, delta=1e-12)\n # theta = 2*pi\n rotation = obs.rot(2.0 * np.pi, axis)\n # find || eye - rot1 ||\n diff = np.linalg.norm(np.eye(3) - rotation)\n self.assertAlmostEqual(diff, 0.0, delta=1e-12)\n\n # perform many randomized tests\n num_tests = 100\n num_products = 10\n for _test_counter in range(num_tests):\n thetas = []\n axes = []\n base = np.eye(3)\n # we will multiply a series of rotations into \"base\"\n rot_all = base\n for _rot_counter in range(num_products):\n theta = np.random.uniform(2 * np.pi) # in [0,2 pi]\n axis = np.random.randint(3) + 1 # in {1,2,3}\n axes.append(axis)\n thetas.append(theta)\n rotation = obs.rot(theta, axis)\n # multiply rot1 into the cumulative rotation\n rot_all = np.dot(rot_all, rotation)\n # now, back all the rotations out\n for _rot_counter in range(num_products):\n theta = thetas.pop()\n axis = axes.pop()\n # apply the inverse rotation\n rotation = obs.rot(-theta, axis)\n rot_all = np.dot(rot_all, rotation)\n # find || base - rot1 * rot2 ||\n diff = np.linalg.norm(base - rot_all)\n self.assertAlmostEqual(diff, 0.0, delta=1e-10 * num_products)", "def test_skip_container_false(self):\n result = utils.skip_container('sam', 'admin,bob,liz')\n\n self.assertFalse(result)", "def test_tap_config_json_validation_retry_with_invalid_config_and_then_fix(self):\n self._assert_retry_validation_of_json_file(config=self.invalid_json_file,\n properties=self.valid_json_file,\n state=self.valid_json_file)", "def test_itar_restrict_asset(self):\n pass", "def test_logfile_recreates_after_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n self.conveyer.rotate_logs()\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"fourth\\\"}\"))\n self.assertEquals(self.events_out.getvalue(), \"{message: \\\"fourth\\\"}\")\n self.assertTrue(self.renamerCalled)", "def test_far_out_coordinates(rotationangle):\n\n eps = 1e-7\n\n # Get the limits\n lim = rotationangle[\"cs\"].limits()[2:]\n lim0 = max(lim)\n lim1 = min(lim)\n\n # Setting c2 and c3 to zero\n c3 = lim0 - eps\n\n # A large value which is still valid\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n\n # Setting c2 and c3 to zero\n c3 = lim1 + eps\n\n # A large value which is still valid\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n\n # A large value which is raises an exception\n with pytest.raises(RuntimeError):\n c3 = lim0 + eps\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n print(phi_dash)\n\n with pytest.raises(RuntimeError):\n c3 = lim1 - eps\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)", "def test_asssert_rotation_matrix_behaves_like_check_matrix():\n random_state = np.random.RandomState(2345)\n for _ in range(5):\n a = pr.random_axis_angle(random_state)\n R = pr.matrix_from_axis_angle(a)\n original_value = R[2, 2]\n for error in [0, 1e-8, 1e-7, 1e-5, 1e-4, 1]:\n R[2, 2] = original_value + error\n try:\n pr.assert_rotation_matrix(R)\n pr.check_matrix(R)\n except AssertionError:\n assert_raises_regexp(\n ValueError, \"Expected rotation matrix\", pr.check_matrix, R)", "def test_arc_smear(self):", "def testModePreservation(self):\n f = open(self.path, \"w\").close()\n os.chmod(self.path, 0707)\n mode = os.stat(self.path)[stat.ST_MODE]\n log = logfile.LogFile(self.name, self.dir)\n log.write(\"abc\")\n log.rotate()\n self.assertEquals(mode, os.stat(self.path)[stat.ST_MODE])", "def test_itar_restrict_test_asset(self):\n pass", "def test_case_matrix(self, mock_config, mock_access):\n # Matrix of tests\n # config: T F\n # access\n # T X X\n # F X O\n mock_config.read_config_option.side_effect = [\n True, True, False, False\n ]\n mock_access.side_effect = [\n True, False, True, False\n ]\n\n self.assertEqual(False, upload_helpers.directory_has_readonly_conflict(\"\"))\n self.assertEqual(False, upload_helpers.directory_has_readonly_conflict(\"\"))\n self.assertEqual(False, upload_helpers.directory_has_readonly_conflict(\"\"))\n self.assertEqual(True, upload_helpers.directory_has_readonly_conflict(\"\"))", "def skip_test(n):\n return k > 0 and magic * n * k**0.5 >= t4_ref", "def verify_legal_rotation(self, direction):\n test_figure = None\n if direction == \"CW\":\n test_figure = self.get_block_positions(self.active_piece.get_cw_rotation())\n elif direction == \"CCW\":\n test_figure = self.get_block_positions(self.active_piece.get_ccw_rotation())\n\n for b_x, b_y in test_figure:\n if b_x < 0 or b_x >= self.WIDTH:\n return False\n\n if b_y < 0 or b_y >= self.HEIGHT:\n return False\n\n if self.board[b_y][b_x] != 0:\n return False\n return True", "def test_skip_container_true(self):\n result = utils.skip_container('bob', 'admin,bob,liz')\n\n self.assertTrue(result)", "def test_tap_config_raise_exception_if_invalid_config_yet_after_retries(self):\n self._assert_raise_exception_on_invalid_file_content(\n test_case_invalid='config',\n invalid_file_contents=('', ' ', 'foo', '{\"foo\": 1')\n )", "def test_tap_config_raise_exception_if_invalid_properties_yet_after_retries(self):\n self._assert_raise_exception_on_invalid_file_content(\n test_case_invalid='properties',\n invalid_file_contents=('', ' ', 'foo', '{\"foo\": 1')\n )", "def _check_rotated_filename_candidates(self):\n # savelog(8)\n candidate = \"%s.0\" % self.filename\n if (exists(candidate) and exists(\"%s.1.gz\" % self.filename) and\n (stat(candidate).st_mtime > stat(\"%s.1.gz\" % self.filename).st_mtime)):\n return candidate\n\n # logrotate(8)\n # with delaycompress\n candidate = \"%s.1\" % self.filename\n if exists(candidate):\n return candidate\n\n # without delaycompress\n candidate = \"%s.1.gz\" % self.filename\n if exists(candidate):\n return candidate\n\n rotated_filename_patterns = (\n # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + with `delaycompress`\n \"-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]\",\n # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + without `delaycompress`\n \"-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz\",\n # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + with `delaycompress`\n \"-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]\",\n # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + without `delaycompress`\n \"-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz\",\n # for TimedRotatingFileHandler\n \".[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]\",\n )\n for rotated_filename_pattern in rotated_filename_patterns:\n candidates = glob.glob(self.filename + rotated_filename_pattern)\n if candidates:\n candidates.sort()\n return candidates[-1] # return most recent\n\n # no match\n return None", "def test__create_excl_file_2(self):\n rsync = RsyncMethod(self.settings, self.meta, self.log, self.comms, False)\n rsync.settings.set('debug-level', 1)\n rsync.exclude_file = os.path.join(os.environ['HOME'],\"temp/myocp_excl\")\n rsync._create_exclude_file()\n self.assertTrue(os.path.exists(rsync.exclude_file))\n self.assertEqual(self.log.getVal('info').split('|')[0], 'Settings file loaded.')\n self.assertEqual(self.log.getVal('info').split('|')[1], 'Settings file verified.')\n #self.assertEqual(self.log.getVal('info').split('|')[2], 'rsync exclusions file created at %s/temp/myocp_excl' % os.environ['HOME'])\n with open(rsync.exclude_file, 'r') as fp:\n self.assertEqual(fp.read(), \".a\\n.b\\nc\\nd\")\n os.unlink(rsync.exclude_file)\n self.assertEqual(self.log.getVal('debug').split('|')[0], \"_create_exclude_file() - EXCLUDES = \\n['.a', '.b', 'c', 'd']\")\n self.assertEqual(self.log.getVal('debug').split('|')[1], '_create_exclude_file() - %s/temp/myocp_excl exists True' % os.environ['HOME'])", "def correct_rotation(k_rotations):\n\n for key, value in Chunk.global_piece_rotations.items():\n Chunk.global_piece_rotations[key] = (k_rotations + value) % 4\n # Should I correct it for the side rotations also?", "def test_skel_rotation(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1, aef=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertAlmostEqual(0.0, values[-1])", "def test_patch_none():", "def test_can_info_does_not_exist(self):\n fake_user = User(username='Fake', password='')\n self.assertFalse(send_rotate_to_can(fake_user, self.BIN_NUM))", "def test_tap_config_json_validation_retry_with_invalid_properties_and_then_fix(self):\n self._assert_retry_validation_of_json_file(config=self.valid_json_file,\n properties=self.invalid_json_file,\n state=self.valid_json_file)", "def _retry_occurred(self):", "def rotate_in_place(angle):\n action = easy_cozmo._robot.turn_in_place(degrees(-1*angle),speed=degrees(df_rotate_speed))\n try:\n action.wait_for_completed()\n if action.has_succeeded:\n return True\n else:\n code, reason = action.failure_reason\n result = action.result\n print(\"WARNING RotateInPlace: code=%s reason='%s' result=%s\" % (code, reason, result))\n say_error(\"I couldn't rotate, sorry\")\n except Exception as e:\n import traceback\n print(e)\n traceback.print_exc()\n say_error(\"I can't rotate, sorry\")\n try:\n while action.is_running:\n action.abort()\n time.sleep(.5)\n except Exception as e:\n import traceback\n print(e)\n traceback.print_exc()\n say_error(\"Wheels faulty\")\n\n return False", "def test__remove_excl_file_2(self):\n rsync = RsyncMethod(self.settings, self.meta, self.log, self.comms, False)\n self.assertEqual(rsync.exclude_file, os.path.join(os.environ['HOME'],\"test_myocp\",\"myocp_excl\"))\n rsync.exclude_file = os.path.join(os.environ['HOME'],\"temp/myocp_excl\")\n with open(rsync.exclude_file, 'w') as fp:\n fp.write('{}')\n rsync._remove_exclude_file()\n self.assertFalse(os.path.exists(rsync.exclude_file))\n self.assertEqual(self.log.getVal('info').split('|')[0], 'Settings file loaded.')\n self.assertEqual(self.log.getVal('info').split('|')[1], 'Settings file verified.')\n #self.assertEqual(self.log.getVal('info').split('|')[2], 'rsync exclusions file removed.')", "def skip_or_run_auth_type_test_call(self):\n\n return skip_or_run_test_tarantool_call(self, '2.11.0',\n 'does not support auth type')", "def test_verification_failed(self):\n pass", "def validate_orientation():\r\n ui.click_and_WAIT_for_item_with_retries('/tray/', 'Settings', True)\r\n time.sleep(WAIT)\r\n ui.doDefault_on_obj(name='Settings', role='button')\r\n time.sleep(WAIT)\r\n ui.doDefault_on_obj(name='Displays', role='link')\r\n time.sleep(WAIT)", "def rotate(self, *args, **kwargs): # real signature unknown\n pass", "def test_overwrite_corrupted_files(overwrite_on_tape_topology, core_config_mock, caches_mock):\n rse1_id, rse2_id, rse3_id, did1, did2 = overwrite_on_tape_topology(did1_corrupted=True, did2_corrupted=True)\n all_rses = [rse1_id, rse2_id, rse3_id]\n\n class _FTSWrapper(FTSWrapper):\n @staticmethod\n def on_receive(job_params):\n for job in (job_params if isinstance(job_params, list) else [job_params]):\n for file in job.get('files', []):\n if (file.get('file_metadata', {}).get('dst_type') == 'TAPE'\n and file.get('file_metadata', {}).get('dst_file', {}).get('file_on_tape') is not None):\n # Fake that dst_file metadata contains file_on_tape == True\n # As we don't really have tape RSEs in our tests, file_on_tape is always false\n file['file_metadata']['dst_file']['file_on_tape'] = True\n return job_params\n\n with patch('rucio.daemons.conveyor.poller.FTS3Transfertool', _FTSWrapper):\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n # Both transfers must be marked as failed because the file size is incorrect\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.FAILED\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.FAILED\n\n # Re-submit the failed requests. They must fail again, because overwrite_corrupted_files is False\n # 2 runs: for multihop, finisher works one hop at a time\n finisher(once=True, partition_wait_time=0)\n finisher(once=True, partition_wait_time=0)\n request = request_core.get_request_by_did(rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.QUEUED\n request = request_core.get_request_by_did(rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.QUEUED\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n # Set overwrite to True before running the poller or finisher\n core_config.set('transfers', 'overwrite_corrupted_files', True)\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.FAILED\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.FAILED\n\n # Re-submit one more time. Now the destination file must be overwritten\n finisher(once=True, partition_wait_time=0)\n finisher(once=True, partition_wait_time=0)\n request = request_core.get_request_by_did(rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.QUEUED\n request = request_core.get_request_by_did(rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.QUEUED\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n request = request_core.get_request_by_did(rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.SUBMITTED\n assert __wait_for_fts_state(request, expected_state='ARCHIVING') == 'ARCHIVING'\n request = request_core.get_request_by_did(rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.SUBMITTED\n assert __wait_for_fts_state(request, expected_state='ARCHIVING') == 'ARCHIVING'", "def test_stickers(self):\n rotation, _ = list(self.cube.scramble_cube(15))\n\n unique, counts = np.unique(rotation, return_counts=True)\n dictionary = dict(zip(unique, counts))\n\n self.assertEqual(all(value == 4 for value in dictionary.values()), True)", "def test__create_excl_file_1(self):\n rsync = RsyncMethod(self.settings, self.meta, self.log, self.comms, False)\n rsync.exclude_file = os.path.join(os.environ['HOME'],\"temp/myocp_excl\")\n rsync._create_exclude_file()\n self.assertTrue(os.path.exists(rsync.exclude_file))\n self.assertEqual(self.log.getVal('info').split('|')[0], 'Settings file loaded.')\n self.assertEqual(self.log.getVal('info').split('|')[1], 'Settings file verified.')\n #self.assertEqual(self.log.getVal('info').split('|')[2], 'rsync exclusions file created at %s/temp/myocp_excl' % os.environ['HOME'])\n with open(rsync.exclude_file, 'r') as fp:\n self.assertEqual(fp.read(), \".a\\n.b\\nc\\nd\")\n os.unlink(rsync.exclude_file)", "def test_unsuccessful_verification(self):\n for i in (-4, -3, 3, 4):\n description = \"TOTP verified for `i={0}`\".format(i)\n calculated = self.algorithm.calculate(self.device.secret, drift=i)\n confirmed = self.relate.verify(calculated, save=False)\n\n self.assertFalse(confirmed, description)\n\n self.relate.confirm = False", "def test_restart(self):\n restart_path = os.path.join(arc_path, 'arc', 'testing', 'restart(H,H2O2,N2H3,CH3CO2).yml')\n project = 'arc_project_for_testing_delete_after_usage2'\n project_directory = os.path.join(arc_path, 'Projects', project)\n arc1 = ARC(project=project, input_dict=restart_path, project_directory=project_directory)\n arc1.execute()\n\n with open(os.path.join(project_directory, 'output', 'thermo.info'), 'r') as f:\n thermo_sft_ccsdtf12_bac = False\n for line in f.readlines():\n if 'thermo_DFT_CCSDTF12_BAC' in line:\n thermo_sft_ccsdtf12_bac = True\n break\n self.assertTrue(thermo_sft_ccsdtf12_bac)\n\n with open(os.path.join(project_directory, 'arc_project_for_testing_delete_after_usage2.info'), 'r') as f:\n sts, n2h3, oet, lot, ap = False, False, False, False, False\n for line in f.readlines():\n if 'Considered the following species and TSs:' in line:\n sts = True\n elif 'Species N2H3' in line:\n n2h3 = True\n elif 'Overall time since project initiation:' in line:\n oet = True\n elif 'Levels of theory used:' in line:\n lot = True\n elif 'ARC project arc_project_for_testing_delete_after_usage2' in line:\n ap = True\n self.assertTrue(sts)\n self.assertTrue(n2h3)\n self.assertTrue(oet)\n self.assertTrue(lot)\n self.assertTrue(ap)\n\n with open(os.path.join(project_directory, 'arc.log'), 'r') as f:\n aei, ver, git, spc, rtm, ldb, therm, src, ter =\\\n False, False, False, False, False, False, False, False, False\n for line in f.readlines():\n if 'ARC execution initiated on' in line:\n aei = True\n elif '# Version:' in line:\n ver = True\n elif 'The current git HEAD for ARC is:' in line:\n git = True\n elif 'Considering species: CH3CO2_rad' in line:\n spc = True\n elif 'All jobs for species N2H3 successfully converged. Run time' in line:\n rtm = True\n elif 'Loading the RMG database...' in line:\n ldb = True\n elif 'Thermodynamics for H2O2' in line:\n therm = True\n elif 'Sources of thermoproperties determined by RMG for the parity plots:' in line:\n src = True\n elif 'ARC execution terminated on' in line:\n ter = True\n self.assertTrue(aei)\n self.assertTrue(ver)\n self.assertTrue(git)\n self.assertTrue(spc)\n self.assertTrue(rtm)\n self.assertTrue(ldb)\n self.assertTrue(therm)\n self.assertTrue(src)\n self.assertTrue(ter)\n\n self.assertTrue(os.path.isfile(os.path.join(project_directory, 'output', 'thermo_parity_plots.pdf')))\n\n with open(os.path.join(project_directory, 'output', 'Species', 'H2O2', 'species_dictionary.txt'), 'r') as f:\n lines = f.readlines()\n adj_list = ''\n for line in lines:\n if 'H2O2' not in line:\n adj_list += line\n if line == '\\n':\n break\n mol1 = Molecule().fromAdjacencyList(str(adj_list))\n self.assertEqual(mol1.toSMILES(), str('OO'))\n\n thermo_library_path = os.path.join(project_directory, 'output', 'RMG libraries', 'thermo',\n 'arc_project_for_testing_delete_after_usage2.py')\n new_thermo_library_path = os.path.join(settings['database.directory'], 'thermo', 'libraries',\n 'arc_project_for_testing_delete_after_usage2.py')\n # copy the generated library to RMG-database\n shutil.copyfile(thermo_library_path, new_thermo_library_path)\n db = RMGDatabase()\n db.load(\n path=settings['database.directory'],\n thermoLibraries=[str('arc_project_for_testing_delete_after_usage2')],\n transportLibraries=[],\n reactionLibraries=[],\n seedMechanisms=[],\n kineticsFamilies='none',\n kineticsDepositories=[],\n statmechLibraries=None,\n depository=False,\n solvation=False,\n testing=True,\n )\n\n spc2 = Species().fromSMILES(str('CC([O])=O'))\n spc2.generate_resonance_structures()\n spc2.thermo = db.thermo.getThermoData(spc2)\n self.assertAlmostEqual(spc2.getEnthalpy(298), -178003.44650359568, 1)\n self.assertAlmostEqual(spc2.getEntropy(298), 283.5983103176096, 1)\n self.assertAlmostEqual(spc2.getHeatCapacity(1000), 118.99753808225603, 1)\n self.assertTrue('arc_project_for_testing_delete_after_usage2' in spc2.thermo.comment)\n\n # delete the generated library from RMG-database\n os.remove(new_thermo_library_path)", "def testPluginUnexpectedError(self):\n self.config.plugins[self.algName].flux0 = 0.0 # this causes a divide by zero\n schema = self.dataset.makeMinimalSchema()\n task = lsst.meas.base.SingleFrameMeasurementTask(schema=schema, config=self.config)\n exposure, cat = self.dataset.realize(noise=100.0, schema=schema, randomSeed=1)\n task.log.setLevel(task.log.FATAL)\n task.run(cat, exposure)\n source = cat[0]\n self.assertTrue(source.get(self.algName + \"_flag\"))\n self.assertFalse(source.get(self.algName + \"_flag_containsNan\"))\n self.assertFalse(source.get(self.algName + \"_flag_edge\"))", "def fix_rotation(self):\n self.rotate(self.rotation)\n self.annotations.rotate(self.rotation)\n self.rotation = 0", "def test_ignore():\n config = get_config(\"ignore.conf\")\n path = get_config_path(config)\n ignored_file = make_test_file(path, \"ignored.txt\")\n not_ignored_file = make_test_file(path, \"not_ignored.txt\")\n\n console.pushbroom(config)\n assert ignored_file.exists()\n assert not not_ignored_file.exists()\n\n ignored_file.unlink()\n path.rmdir()", "def testOperationsWithoutLock(self):\n self.assertRaises(RuntimeError, self._lock.Unlock)\n self.assertRaises(RuntimeError, self._lock.SetInUse, True)\n self.assertRaises(RuntimeError, self._lock.SetInUse, False)", "def test_restore_backup():", "async def test_skipped_already_silenced(self):\n subtests = (\n (\n False,\n MockTextChannel(),\n PermissionOverwrite(\n send_messages=False,\n add_reactions=False,\n create_private_threads=False,\n create_public_threads=False,\n send_messages_in_threads=False\n )\n ),\n (\n True,\n MockTextChannel(),\n PermissionOverwrite(\n send_messages=True,\n add_reactions=True,\n create_private_threads=True,\n create_public_threads=True,\n send_messages_in_threads=True\n )\n ),\n (\n True,\n MockTextChannel(),\n PermissionOverwrite(\n send_messages=False,\n add_reactions=False,\n create_private_threads=False,\n create_public_threads=False,\n send_messages_in_threads=False\n )\n ),\n (False, MockVoiceChannel(), PermissionOverwrite(connect=False, speak=False)),\n (True, MockVoiceChannel(), PermissionOverwrite(connect=True, speak=True)),\n (True, MockVoiceChannel(), PermissionOverwrite(connect=False, speak=False)),\n )\n\n for contains, channel, overwrite in subtests:\n with self.subTest(contains=contains, is_text=isinstance(channel, MockTextChannel), overwrite=overwrite):\n self.cog.scheduler.__contains__.return_value = contains\n channel.overwrites_for.return_value = overwrite\n\n self.assertFalse(await self.cog._set_silence_overwrites(channel))\n channel.set_permissions.assert_not_called()", "def test_ignore(self):\n parser = hhsuite.FastaParser(ignore={\"foo\"})\n results = parser.run(self.pipeline)\n self.assertNotIn(\"foo\", results[\"templates\"][1][\"sequence_alignments\"])", "def verify_no_snapshot_reingestion(c: Composition) -> None:\n c.run(\"testdrive\", \"wait-for-snapshot.td\", \"postgres-disable-select-permission.td\")\n\n restart_mz(c)\n\n c.run(\n \"testdrive\",\n \"delete-rows-t1.td\",\n \"delete-rows-t2.td\",\n \"alter-table.td\",\n \"alter-mz.td\",\n )", "def test_ignores(self, tmpdir):\n from pytest_flake8 import Ignorer\n ignores = [\"E203\", \"b/?.py E204 W205\", \"z.py ALL\", \"*.py E300\"]\n ign = Ignorer(ignores)\n assert ign(tmpdir.join(\"a/b/x.py\")) == \"E203 E204 W205 E300\".split()\n assert ign(tmpdir.join(\"a/y.py\")) == \"E203 E300\".split()\n assert ign(tmpdir.join(\"a/z.py\")) is None", "def test_extract_rot_angle():\n v = np.zeros((4,2))\n try:\n angle = extract_rot_angle(v,min_points=0)\n except AssertionError,err:\n assert err.args[0]==\"Zero velocities not allowed.\"\n \n v[:,1] = 1.\n try:\n angle = extract_rot_angle(v,min_points=0)\n except AssertionError,err:\n assert err.args[0]==\"Failed to get both forward and backward directions.\"\n\n # Forwards-backwards motion.\n v[:,1] = 0.\n v[:2,0] = -1.1\n v[2:,0] = 1.2\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,np.pi)\n\n # Forwards-backwards motion.\n v[:,0] = 0.\n v[:2,1] = -.9\n v[2:,1] = .8\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,-np.pi/2)\n\n # Forwards-backwards motion with noise.\n v[:2,1] += (np.random.rand(2)*2-1)/10\n v[2:,1] += (np.random.rand(2)*2-1)/10\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,-np.pi/2,atol=.1)", "def testNoHandbrakeOptionError(self):\n\n # Build out custom ini file\n self.sup2Sub = \"/usr/apps/bin/mkvTools/BDSup2Sub.jar\"\n self.handBrake = \"\"\n self.java = \"/usr/apps/Java/jre7/bin/java\"\n self.mkvExtract = \"/usr/apps/bin/mkvTools/mkvextract\"\n self.mkvMerge = \"/usr/apps/bin/mkvTools/mkvMerge\"\n\n # Get our formatted ini file\n self.configFile = _fillConfig(self, bare=True)\n\n # Build our config\n with tempfile.NamedTemporaryFile(mode='r+b') as f:\n f.write(self.configFile)\n # Calling readlines on the temp file. Without this Config fails to\n # read it. I have no idea why.\n f.readlines()\n self.assertRaises(\n ValueError,\n tools.Config,\n f.name\n )", "def test_tap_config_json_validation_retry_with_invalid_state_and_then_fix(self):\n self._assert_retry_validation_of_json_file(config=self.valid_json_file,\n properties=self.valid_json_file,\n state=self.invalid_json_file)", "def test_load_dangling(self):\n with NamedTemporaryFile(suffix=\".yaml\") as config:\n with open(config.name, \"w\") as write_stream:\n write_stream.write(\n \"\"\"\n pipeline:\n - !LinearController\n low_utilisation: 0.9\n high_utilisation: 1.1\n - !MockPool\n random_things:\n foo: bar\n \"\"\"\n )\n with pytest.raises(ConfigurationError):\n with load(config.name):\n assert False", "def test_user_is_none(self):\n self.assertFalse(send_rotate_to_can(None, self.BIN_NUM))", "def rotates(self, maze, game_display):\n if self.lidars[0].get_sense() <= self.lidars[0].radius // 3:\n if uniform(0, 1) > 0.7:\n self.rotate_right(angle=45, maze=maze, game_display=game_display)\n else:\n self.rotate_left(angle=45, maze=maze, game_display=game_display)\n # fix to left.\n if self.lidars[1].get_sense() <= 2 * self.lidars[1].radius // 3:\n self.rotate_left(angle=10, maze=maze, game_display=game_display)\n # fix to right.\n if self.lidars[2].get_sense() <= 2 * self.lidars[0].radius // 3:\n self.rotate_right(angle=10, maze=maze, game_display=game_display)", "def testUploadWrapperCorruption(self):\n # Check that small reads still work.\n encrypted_data = \"\"\n count = 0\n with self.assertRaisesRegexp(IOError, \"HMAC not verified\"):\n while 1:\n small_read = self.encrypt_wrapper.read(2)\n if not small_read:\n break\n encrypted_data += small_read\n count += len(small_read)\n\n # Corrupt the data a little bit.\n if count == 3000:\n small_read = \"XX\"\n\n self.decrypt_wrapper.write(small_read)", "def test_untar(self):", "def testIgnoredError(self):\n cmds = \"\"\"-chown 0 missingFile\npwd\nexit\n\"\"\"\n def _cbCheckResult(res):\n self.assertIn(self.testDir.asBytesMode().path, res)\n\n d = self._getBatchOutput(cmds)\n d.addCallback(_cbCheckResult)\n return d", "def test_call_bad_perms(self):\r\n self.assertRaises(ValueError, self.single_dms, -1)", "def test_match_with_ignore():\n config = get_config(\"match_and_ignore.conf\")\n path = get_config_path(config)\n ignored_file = make_test_file(path, \"ignored.txt\")\n matched_file = make_test_file(path, \"matched.txt\")\n\n console.pushbroom(config)\n assert ignored_file.exists()\n assert not matched_file.exists()\n\n ignored_file.unlink()\n path.rmdir()", "def test_ignore_edition(self):\n self.assertFalse(ignore_edition({\"isbn_13\": \"hi\"}))\n self.assertFalse(ignore_edition({\"oclc_numbers\": \"hi\"}))\n self.assertFalse(ignore_edition({\"covers\": \"hi\"}))\n self.assertFalse(ignore_edition({\"languages\": \"languages/fr\"}))\n self.assertTrue(ignore_edition({\"languages\": \"languages/eng\"}))\n self.assertTrue(ignore_edition({\"format\": \"paperback\"}))", "def test_ignore_readonly1(tmpdir):\n os.makedirs('%s/foo/bar/baz' % tmpdir)\n os.chmod('%s/foo/bar/baz' % tmpdir, 0o500)\n os.chmod('%s/foo/bar' % tmpdir, 0o500)\n os.chmod('%s/foo' % tmpdir, 0o500)\n\n with pytest.raises(PermissionError):\n sh.remove('%s/foo' % tmpdir, recursive=True)\n assert os.path.exists('%s/foo/bar/baz' % tmpdir)\n\n sh.remove('%s/foo' % tmpdir, force=False, recursive=True,\n ignore_readonly=True)\n assert not os.path.exists('%s/foo' % tmpdir)", "def test_areas_locked_ok(self):", "async def test_calibrate_gripper_does_not_save_during_error(\n decoy: Decoy, ot3_hardware_api: OT3API\n) -> None:\n subject = CalibrateGripperImplementation(hardware_api=ot3_hardware_api)\n\n params = CalibrateGripperParams(\n jaw=CalibrateGripperParamsJaw.REAR,\n otherJawOffset=Vec3f(x=4.4, y=5.5, z=6.6),\n )\n\n decoy.when(\n await ot3_calibration.calibrate_gripper_jaw(\n ot3_hardware_api, probe=GripperProbe.REAR\n )\n ).then_raise(EarlyCapacitiveSenseTrigger(5.0, 3.0))\n\n with pytest.raises(EarlyCapacitiveSenseTrigger):\n await subject.execute(params)\n\n decoy.verify(\n await ot3_hardware_api.save_instrument_offset(\n mount=OT3Mount.LEFT, delta=Point(x=3, y=4, z=6)\n ),\n times=0,\n )" ]
[ "0.6419533", "0.63577354", "0.59528947", "0.5869784", "0.58633524", "0.5819878", "0.57872254", "0.5771617", "0.5742969", "0.57247704", "0.57068", "0.5701424", "0.5659168", "0.555079", "0.5517062", "0.5510512", "0.54531544", "0.5388795", "0.53871274", "0.5342193", "0.53315157", "0.5328349", "0.53257006", "0.529093", "0.5270186", "0.5264653", "0.5263827", "0.52629167", "0.52581465", "0.52521545", "0.52507734", "0.5238353", "0.5201579", "0.51996696", "0.5176383", "0.5176222", "0.5171326", "0.51566875", "0.51542634", "0.5144172", "0.5143269", "0.5135804", "0.5130116", "0.5110944", "0.5106586", "0.5105034", "0.5088598", "0.5084928", "0.5072968", "0.5050424", "0.5047307", "0.5044044", "0.5042949", "0.5039295", "0.50306225", "0.5028335", "0.50210875", "0.5005039", "0.5002595", "0.50004077", "0.49942714", "0.49921387", "0.4988186", "0.4981287", "0.49792475", "0.49785763", "0.49782565", "0.4975651", "0.4975035", "0.49685228", "0.49519044", "0.49515963", "0.49509043", "0.49480215", "0.49421135", "0.49331594", "0.4927554", "0.49245256", "0.49242902", "0.4922577", "0.49070448", "0.49069664", "0.4900543", "0.4900449", "0.489981", "0.48988903", "0.48986295", "0.4897274", "0.4885346", "0.4883456", "0.4881983", "0.48748717", "0.4873153", "0.4871724", "0.4871547", "0.48690683", "0.4864289", "0.48613042", "0.48601675", "0.48595846", "0.48590082" ]
0.0
-1
Tests of try rotation with min_size in configuration
def test_process_log_with_min_size_in_configuration(self): with tempfile.TemporaryDirectory() as sandbox: with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout: srcfile = Path(sandbox, 'pokus.log') srcfile.touch() destfile = Path(sandbox, 'backup', 'pokus.log') compressors = process_log( datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30), {'target': '{{path}}/backup/{{name}}.{{ext}}', 'interval': 'hourly', 'min_size': 15}, 'hourly', str(srcfile), 10 ) self.assertEqual(compressors, []) self.assertTrue(srcfile.exists()) self.assertFalse(destfile.exists()) self.assertEqual(fake_stdout.getvalue(), 'Checking "{src}"... rotation not needed.\n'.format(src=srcfile))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _optimise_rotation(self):\n logger.info(\n f\"Minimising dimer rotation up to \"\n f'δϕ = {self.phi_tol.to(\"degrees\"):.4f}º'\n )\n\n for i in range(self._ratio_rot_iters):\n\n result = self._rotate()\n\n if (\n result == _StepResult.skipped_rotation\n or abs(self._coords.phi) < self.phi_tol\n ):\n break\n\n logger.info(\n f\"Micro iteration: {i}.\"\n f' ϕ={self._coords.phi.to(\"degrees\"):.2f}º'\n )\n\n return None", "def test_need_to_rotate_log(self):\n self.assertTrue(need_to_rotate_log(0, 20, 'daily', 15, 'daily'), 'rotate log by time')\n self.assertFalse(need_to_rotate_log(10, 20, 'daily', 15, 'hourly'), 'do not rotate log by time')\n self.assertTrue(need_to_rotate_log(10, 20, 'daily', 25, None), 'rotate log by max size')\n self.assertFalse(need_to_rotate_log(10, 20, 'hourly', 5, 'hourly'), 'do not rotate log by min size')", "def test_small_x_rot(self):\n\n # Create a Matrix representing 90 deg x rot.\n mat = Matrix44.from_rot_x(0.001)\n # Use from_matrix44()\n quat = Quat.from_matrix44(mat)\n\n # Ensure the quat matches the small degree x rotation.\n expected = Quat.from_axis_angle_deg(Vec3(1, 0, 0), 0.001)\n self.assertAlmostEqual(quat.x, expected.x)\n self.assertAlmostEqual(quat.y, expected.y)\n self.assertAlmostEqual(quat.z, expected.z)\n self.assertAlmostEqual(quat.w, expected.w)", "def test_default_parameters(self):\n\n assert self.test_shape.rotation_angle == 360", "def test_size_too_small(self):\n min_size = min(settings.MISAGO_AVATARS_SIZES)\n too_small = min_size / 2\n\n self.assertEqual(clean_size(too_small), min_size)", "def test_rotated(self):\n self._calibration_test(\"rotated\")", "def test_minvar_rotation(self):\n vrot, v, w = minvar(self.rdata)\n # Determinant of rotation matrix should be = 1\n self.assertTrue((np.linalg.det(v) - 1) < self.tol)", "def test_plate_size_error():\n \n test_object = fa.read_in_envision(data_csv=plate_1, platemap_csv=plate_map_file, data_type='plate', size=100)", "def test_mid_sizes(self):\n for size in settings.MISAGO_AVATARS_SIZES:\n self.assertEqual(clean_size(size - 1), size)", "def test_arbitrary_rotation(self):\n \n # This test is run a bunch of times on various intervals, ranging from 50% to 1/6\n\t\t# (16.667%).\n for i in range(2, 7):\n \n interval = 1 / i # The amount to increase each qubit's probability by, relative to the previous qubit\n step_string = \"{:.4f}\".format(100 / i) # The decimal representation of the interval, as a percent\n target_probabilities = [0] * (i + 1) # This will store the desired probabilities of each qubit\n for j in range(0, i + 1):\n target_probability = j * interval\n target_probabilities[j] = target_probability\n\n # Run the test\n self.run_test(self.arbitrary_rotation_function, f\"Rotation with steps of 1/{i} ({step_string}%)\", 2000, target_probabilities, 0.05)", "def test_skel_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def checkThumbSize(isz, tsz, desired):\n\n # tolerate 2% error\n try:\n if abs(float(isz[0]) / isz[1] - float(tsz[0]) / tsz[1]) > 0.02:\n return 0 # aspect has changed, or isz rotated\n except:\n return 0\n return abs(desired - tsz[0]) <= 1 or abs(desired - tsz[1]) <= 1", "def test_rotation_angle(self):\n\n self.test_shape.azimuth_placement_angle = [45, 135, 225, 315]\n test_volume = self.test_shape.volume()\n self.test_shape.rotation_angle = 180\n assert self.test_shape.volume() == pytest.approx(test_volume * 0.5)", "def test_normal_case():\n assert(minDeletionSize(['abc', 'bce', 'cae']) == 1)", "def test_rotation(self, tol):\n theta = 0.98\n S = symplectic.rotation(theta)\n expected = np.block([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]])\n np.allclose(S, expected, atol=tol, rtol=0)", "def test_default_parameters(self):\n\n # assert self.test_shape.rotation_angle == 360\n assert self.test_shape.start_angle == 0", "def test_single_resize_ok(self):\n\n to_resize = base_path +'/test_data/rendering_tests/just_resize/original/good.jpg'\n to_output = base_path +'/test_data/rendering_tests/just_resize/results/'\n\n\n rb.resize_and_crop(to_resize, to_output+\"good.jpg\", 300,300 )\n self.assertEqual(1, len(os.listdir(to_output)))", "def test_call_alt_min_aligned_length(self):\r\n # first 12 bases match perfect, and no alignment from there\r\n seqs = [('s1', 'TGCAGCTTGAGCGTTGTTACCGCTTT')]\r\n ref_seqs = [\r\n ('r1', 'TGCAGCTTGAGCCACGCCGAATAGCCGAGTTTGACCGGGCCCAGGAGGAGAGAGAGAGCTTC')]\r\n\r\n fd, seqs_fp = mkstemp(prefix='BlastOtuPickerTest_', suffix='.fasta')\r\n close(fd)\r\n fd, reference_seqs_fp = mkstemp(prefix='BlastOtuPickerTest_',\r\n suffix='.fasta')\r\n close(fd)\r\n\r\n f = open(seqs_fp, 'w')\r\n f.write('\\n'.join(['>%s\\n%s' % s for s in seqs]))\r\n f.close()\r\n\r\n f = open(reference_seqs_fp, 'w')\r\n f.write('\\n'.join(['>%s\\n%s' % s for s in ref_seqs]))\r\n f.close()\r\n\r\n self._files_to_remove.append(seqs_fp)\r\n self._files_to_remove.append(reference_seqs_fp)\r\n\r\n # with low min_aligned_percent s1 matches r1\r\n otu_picker = BlastOtuPicker({'max_e_value': 1e-3,\r\n 'min_aligned_percent': 0.10})\r\n expected = {'r1': ['s1']}\r\n actual = otu_picker(seqs_fp,\r\n refseqs_fp=reference_seqs_fp)\r\n self.assertEqual(actual, expected)\r\n\r\n # with min_aligned_percent s1 doesn't match r1\r\n otu_picker = BlastOtuPicker({'max_e_value': 1e-3,\r\n 'min_aligned_percent': 0.50})\r\n expected = {}\r\n actual = otu_picker(seqs_fp,\r\n refseqs_fp=reference_seqs_fp)\r\n self.assertEqual(actual, expected)", "def test_transform_image_resize_and_crop_landscape(self):\n self.expect_open_image('SomeBlobKey', (1200, 1600))\n self.expect_crop(top_y=0.0, bottom_y=0.75)\n self.expect_resize(32)\n self.expect_encode_image('SomeImageSize32-c')\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageSize32-c', 'image/jpeg'),\n self.app._transform_image('SomeBlobKey', 's32-c'))\n self.mox.VerifyAll()", "def correct_size():\n check50.run(\"./inheritance_test\").stdout(\"size_true.*\").exit(0)", "def test_batch_size_pack_size():", "def minsize(self):# -> int:\r\n return 0", "def test_full_resize(self):\n number_of_pixels = 300\n destination = base_path +'/test_data/rendering_tests/resized_images/'\n source_folder = base_path + '/test_data/rendering_tests/filter_database/'\n\n\n for the_file in os.listdir(destination):\n file_path = os.path.join(destination, the_file)\n if os.path.isfile(file_path):\n os.unlink(file_path)\n\n\n self.assertEqual(0, len(os.listdir(destination)))\n rb.find_all_files(number_of_pixels,source_folder, destination)\n self.assertEqual(6, len(os.listdir(destination)))\n for the_file in os.listdir(destination):\n file_path = os.path.join(destination,the_file)\n with Image.open(file_path) as f:\n self.assertNotEqual(number_of_pixels+5, f.size[0])\n self.assertNotEqual(number_of_pixels+5, f.size[1])\n # the above checks that the size does not vary as needed\n # probably not necessary\n self.assertEqual(number_of_pixels, f.size[0])\n self.assertEqual(number_of_pixels, f.size[1])", "def test_too_small_remap(self):\n ethanol = Molecule.from_file(get_data_file_path(\"molecules/ethanol.sdf\"))\n # catch mappings that are the wrong size\n too_small_mapping = {0: 1}\n with pytest.raises(ValueError):\n new_ethanol = ethanol.remap(too_small_mapping, current_to_new=True)", "def compare_fixed_size(algos,max_disc,simulName,samples=30,size=512,tries=3,min_disc=1,verbose=False,lam=20,h=8):\n\n\t#vector of sizes\n\tS=np.linspace(min_disc,max_disc,samples).astype(int)\n\tnum_algos=len(algos)\n\tR=np.zeros((samples,num_algos+1))\n\tfactory=SignalFactory(size,6) # to be changed\n\t#to do remove sigB and sigR\n\tfor (i,dis) in enumerate(S):\n\t\tprint(\"recovering with\",dis,\" discontinuities for size= \",size)\n\t\t#ini,sig=factory.uniformCase(parts=dis);\n\t\tini,sig=factory.normalShapeCase(parts=6)\n\t\tR[i,0]=dis;\n\t\tfor (j,algo) in enumerate(algos):\n\t\t\tR[i,j+1]=timing_recover(algo,sig,lam,h,tries)\n\n\t#saving the txt\n\tnp.savetxt(simulName,R,fmt=\"%.18e\",delimiter=',',header=\",\".join(['S']+algos),comments=\"\")", "def resize_invalid_genes_test(self):\n pass", "def test_invalid_hparams(mode: str, initial_scale: float, finetune_fraction: float, delay_fraction: float,\n size_increment: int):\n with pytest.raises(ValueError):\n ProgressiveResizing(mode, initial_scale, finetune_fraction, delay_fraction, size_increment, False)", "def _validate_shuffle_split_init(test_size, train_size):\n if test_size is None and train_size is None:\n raise ValueError('test_size and train_size can not both be None')\n\n if test_size is not None:\n if np.asarray(test_size).dtype.kind == 'f':\n if test_size >= 1.:\n raise ValueError(\n 'test_size=%f should be smaller '\n 'than 1.0 or be an integer' % test_size)\n elif np.asarray(test_size).dtype.kind != 'i':\n raise ValueError('Invalid value for test_size: %r' % test_size)\n\n if train_size is not None:\n if np.asarray(train_size).dtype.kind == 'f':\n if train_size >= 1.:\n raise ValueError(\n 'train_size=%f should be smaller '\n 'than 1.0 or be an integer' % test_size)\n elif (np.asarray(test_size).dtype.kind == 'f' and\n (train_size + test_size) > 1.):\n raise ValueError('The sum of test_size and train_size = %f'\n 'should be smaller than 1.0. Reduce test_size '\n 'and/or train_size.' % (train_size + test_size))\n elif np.asarray(train_size).dtype.kind != 'i':\n raise ValueError('Invalid value for train_size: %r' % train_size)", "def check_random_resize_crop(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [size, scale, ratio, interpolation, max_attempts], _ = parse_user_args(method, *args, **kwargs)\n if interpolation is not None:\n type_check(interpolation, (Inter,), \"interpolation\")\n check_size_scale_ration_max_attempts_paras(size, scale, ratio, max_attempts)\n\n return method(self, *args, **kwargs)\n\n return new_method", "def test_shuffled(self):\n self.setup_flags()\n self.io_args.matches = os.path.join(\n self.io_args.output_root, \"shuffled\", \"matches.json\"\n )\n self._calibration_error_test(\"shuffled\", \"GeometricCalibration\")", "def _test_img_resize(self, backend):\n # Check quality setting unaffected by new parameter\n im_95_qual = backend.resize(\n 225,\n self.IMG_225x225,\n quality=95,\n max_filesize=0,\n )\n # check valid path returned - max_filesize hasn't broken resize command\n self.assertExists(im_95_qual)\n\n # Attempt a lower filesize with same quality\n im_a = backend.resize(\n 225,\n self.IMG_225x225,\n quality=95,\n max_filesize=0.9 * os.stat(syspath(im_95_qual)).st_size,\n )\n self.assertExists(im_a)\n # target size was achieved\n self.assertLess(os.stat(syspath(im_a)).st_size,\n os.stat(syspath(im_95_qual)).st_size)\n\n # Attempt with lower initial quality\n im_75_qual = backend.resize(\n 225,\n self.IMG_225x225,\n quality=75,\n max_filesize=0,\n )\n self.assertExists(im_75_qual)\n\n im_b = backend.resize(\n 225,\n self.IMG_225x225,\n quality=95,\n max_filesize=0.9 * os.stat(syspath(im_75_qual)).st_size,\n )\n self.assertExists(im_b)\n # Check high (initial) quality still gives a smaller filesize\n self.assertLess(os.stat(syspath(im_b)).st_size,\n os.stat(syspath(im_75_qual)).st_size)", "def verify_size_content(self, re_size):\n to_alternate = 0\n if re_size['chunck'] < re_size['size']:\n to_alternate = re_size['chunck']\n re_size['chunck'] = re_size['size']\n re_size['size'] = to_alternate\n return re_size", "def test_valid_sizes(self):\n for size in settings.MISAGO_AVATARS_SIZES:\n self.assertEqual(clean_size(size), size)", "def check_random_rotation(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [degrees, resample, expand, center, fill_value], _ = parse_user_args(method, *args, **kwargs)\n check_degrees(degrees)\n\n if resample is not None:\n type_check(resample, (Inter,), \"resample\")\n if expand is not None:\n type_check(expand, (bool,), \"expand\")\n if center is not None:\n check_2tuple(center, \"center\")\n if fill_value is not None:\n check_fill_value(fill_value)\n\n return method(self, *args, **kwargs)\n\n return new_method", "def test_lengths(self):\n self.assertEqual(size(attempt.Z), 201)\n self.assertEqual(size(attempt.W), 201)", "def test_skel_rotation(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1, aef=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertAlmostEqual(0.0, values[-1])", "def test_FlexCrop1(self):", "def correct_rotation(k_rotations):\n\n for key, value in Chunk.global_piece_rotations.items():\n Chunk.global_piece_rotations[key] = (k_rotations + value) % 4\n # Should I correct it for the side rotations also?", "def test_constructed_is_small(self):\n self.assertTrue(all(elt<10 for elt in goodwinsheaf.checkradii()))#check all entries have small radii", "def test_modis_resize(self):\n modis_order = {'mod09a1': {'inputs': 'mod09a1.a2000072.h02v09.005.2008237032813',\n 'products': ['l1']},\n 'resampling_method': 'cc',\n 'resize': {'pixel_size': 30,\n 'pixel_size_units': 'meters'},\n 'format': 'gtiff'}\n\n exc = 'pixel count value is greater than maximum size of'\n\n try:\n api.validation(modis_order, self.staffuser.username)\n except Exception as e:\n assert(exc in str(e))\n else:\n self.fail('Failed MODIS pixel resize test')", "def _check_rotation(spec_nest, path):\n spec = _get_from_nest(spec_nest, path)\n if spec is not None and not isinstance(spec, primitives_pb2.RotationType):\n raise InvalidSpecError(\n f'{\"/\".join(path)} was expected to be of type Rotation, but is instead '\n f'{type(spec)}')", "def test_default_parameters(self):\n\n assert self.test_shape.rotation_angle == 360\n assert self.test_shape.extrude_both", "def DEFAULT_MAX_ROTATION(self): # real signature unknown; restored from __doc__\n pass", "def _shrinkCheck(self):\n if self.size > self.INIT_CAPACITY and self.size / self.capacity <= 0.25:\n self._shrink()", "def test_correct_image_size(location):\n chunkloc = resave_to_chunks(root=location[\"dir\"],\n n_imgs=10,\n output_stem=location[\"stem\"])\n\n loaded = np.load(chunkloc)\n assert len(loaded.files) > 0\n\n first = loaded[loaded.files[0]]\n assert first.shape != ()\n assert first.shape == (520, 696)", "def test_AVBCalcMinPartitionSize_LinearFooterSize(self):\n for image_size in self._image_sizes:\n for ratio in 0.95, 0.56, 0.22:\n expected_size = common.RoundUpTo4K(int(math.ceil(image_size / ratio)))\n self.assertEqual(\n expected_size,\n AVBCalcMinPartitionSize(\n image_size, lambda x, ratio=ratio: int(x * ratio)))", "def test_stickers(self):\n rotation, _ = list(self.cube.scramble_cube(15))\n\n unique, counts = np.unique(rotation, return_counts=True)\n dictionary = dict(zip(unique, counts))\n\n self.assertEqual(all(value == 4 for value in dictionary.values()), True)", "def rot_mosaic(source_dir='K:/IID_SaltonSea/Tasks/Soil mapping/PhotoDocumentation/Processing/',\r\n output_dir='K:/IID_SaltonSea/Tasks/Soil mapping/PhotoDocumentation/Final/',\r\n file_pattern='IID201905*.jpg', sub_dir=False, k=1, replace=False): \r\n \r\n \r\n if sub_dir:\r\n mosaics = []\r\n for root, dirnames, filenames in os.walk(source_dir):\r\n for filename in fnmatch.filter(filenames, file_pattern):\r\n mosaics.append(os.path.join(root, filename))\r\n else:\r\n mosaics = glob.glob(source_dir + file_pattern) \r\n \r\n g = 0\r\n r = 0\r\n s = 0\r\n for m in mosaics:\r\n f = output_dir + os.path.basename(m)\r\n if not os.path.exists(f):\r\n img = improc.imops.imio.imread(m)\r\n img = np.rot90(img, k=k) \r\n improc.imops.imio.imsave(f, img)\r\n print('generated: %s' % f)\r\n print('')\r\n g+=1\r\n elif replace:\r\n img = improc.imops.imio.imread(m)\r\n img = np.rot90(img, k=k)\r\n improc.imops.imio.imsave(f, img)\r\n print('replaced: %s' % f)\r\n print('')\r\n r+=1\r\n else:\r\n print('skipping: %s' % m)\r\n print('')\r\n s+=1\r\n\r\n print('generated total of %i files' % g)\r\n print('replaced total of %i files' % r)\r\n print('skipped total of %i files' % s)", "def test_single_resize_er(self):\n to_resize = base_path + '/test_data/rendering_tests/just_resize/original/faulty.jpg'\n to_output = base_path + '/test_data/rendering_tests/just_resize/results/'\n\n for the_file in os.listdir(to_output):\n file_path = os.path.join(to_output, the_file)\n if os.path.isfile(file_path):\n os.unlink(file_path)\n\n\n capturedOutput = io.StringIO() # Create StringIO object\n sys.stdout = capturedOutput # and redirect stdout.\n rb.resize_and_crop(to_resize, to_output+\"faulty.jpg\", 300,300 )\n sys.stdout = sys.__stdout__ # Reset redirect.\n self.assertEqual(\"Image too small to be resized\\n\",capturedOutput.getvalue()) # Now works as before.", "def test_rotating_phantom(self):\n cheese = TomoCheese.from_demo_images()\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -0.25, abs_tol=0.05)\n for img in cheese.dicom_stack:\n img.array = rotate(img.array, angle=3, mode=\"edge\")\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -3.25, abs_tol=0.05)", "def test_tile_read_valid():\n # Tile 7-43-24 - Full tile\n bounds = [\n -6574807.42497772,\n 12210356.646387195,\n -6261721.357121638,\n 12523442.714243278,\n ]\n with rasterio.open(COG) as src_dst:\n arr, mask = reader.part(\n src_dst, bounds, 16, 16, dst_crs=constants.WEB_MERCATOR_CRS\n )\n assert arr.shape == (1, 16, 16)\n assert mask.shape == (16, 16)\n\n # Read bounds at full resolution\n with rasterio.open(COG) as src_dst:\n arr, mask = reader.part(src_dst, bounds, dst_crs=constants.WEB_MERCATOR_CRS)\n assert arr.shape == (1, 893, 893)\n assert mask.shape == (893, 893)\n\n # set max_size for the returned array\n with rasterio.open(COG) as src_dst:\n arr, mask = reader.part(\n src_dst, bounds, max_size=50, dst_crs=constants.WEB_MERCATOR_CRS\n )\n assert arr.shape == (1, 50, 50)\n assert mask.shape == (50, 50)\n\n # If max_size is bigger than actual size, there is no effect\n with rasterio.open(COG) as src_dst:\n arr, mask = reader.part(\n src_dst, bounds, max_size=1000, dst_crs=constants.WEB_MERCATOR_CRS\n )\n assert arr.shape == (1, 893, 893)\n assert mask.shape == (893, 893)\n\n # Incompatible max_size with height and width\n with pytest.warns(UserWarning):\n with rasterio.open(COG) as src_dst:\n arr, mask = reader.part(\n src_dst,\n bounds,\n max_size=50,\n width=25,\n height=25,\n dst_crs=constants.WEB_MERCATOR_CRS,\n )\n assert arr.shape == (1, 25, 25)\n assert mask.shape == (25, 25)", "def minimum_rotated_rectangle(self): # -> BaseGeometry:\n ...", "def test_rotation(self):\n # this logfile should rotate every 10 bytes\n with contextlib.closing(\n logfile.LogFile(self.name, self.dir, rotateLength=10)\n ) as log:\n\n # test automatic rotation\n log.write(\"123\")\n log.write(\"4567890\")\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.2\".format(self.path)))\n log.write(\"\")\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertTrue(os.path.exists(\"{}.2\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n log.write(\"3\")\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n\n # test manual rotation\n log.rotate()\n self.assertTrue(os.path.exists(\"{}.3\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.4\".format(self.path)))\n\n self.assertEqual(log.listLogs(), [1, 2, 3])", "def test_asssert_rotation_matrix_behaves_like_check_matrix():\n random_state = np.random.RandomState(2345)\n for _ in range(5):\n a = pr.random_axis_angle(random_state)\n R = pr.matrix_from_axis_angle(a)\n original_value = R[2, 2]\n for error in [0, 1e-8, 1e-7, 1e-5, 1e-4, 1]:\n R[2, 2] = original_value + error\n try:\n pr.assert_rotation_matrix(R)\n pr.check_matrix(R)\n except AssertionError:\n assert_raises_regexp(\n ValueError, \"Expected rotation matrix\", pr.check_matrix, R)", "def test_signed_assert_min_exceeded(self):\n with pytest.raises(ConversionError):\n DPT2ByteSigned.to_knx(-32769)", "def test_AVBCalcMinPartitionSize_SlowerGrowthFooterSize(self):\n\n def _SizeCalculator(partition_size):\n \"\"\"Footer size is the power of 0.95 of partition size.\"\"\"\n # Minus footer size to return max image size.\n return partition_size - int(math.pow(partition_size, 0.95))\n\n for image_size in self._image_sizes:\n min_partition_size = AVBCalcMinPartitionSize(image_size, _SizeCalculator)\n # Checks min_partition_size can accommodate image_size.\n self.assertGreaterEqual(\n _SizeCalculator(min_partition_size),\n image_size)\n # Checks min_partition_size (round to BLOCK_SIZE) is the minimum.\n self.assertLess(\n _SizeCalculator(min_partition_size - BLOCK_SIZE),\n image_size)", "def testrotconsts(self):\r\n assert self.data.rotconsts.shape == (len(self.data.atomcoords), 3)", "def test_init_chunk_size_field_below_range(self):\n test_config = TestConfig(chunk_size=-1)\n with self.assertRaises(ValidationError):\n test_config.clean_fields()", "def test_transform_image_resize_and_crop_portrait(self):\n self.expect_open_image('SomeBlobKey', (148, 215))\n self.expect_crop(top_y=0.0, bottom_y=0.68837209302325575)\n self.expect_resize(32)\n self.expect_encode_image('SomeImageSize32-c')\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageSize32-c', 'image/jpeg'),\n self.app._transform_image('SomeBlobKey', 's32-c'))\n self.mox.VerifyAll()", "def test_xform_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.xform_file, ani=1)\n\n values = cmds.keyframe('pCube1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def min_size(self):\n raise NotImplementedError()", "def test_archiveextractsize(self):\n # copy file rules\n for testfile in ['6mbzipattachment.eml', '6mbrarattachment.eml']:\n try:\n tmpfile = tempfile.NamedTemporaryFile(\n suffix='virus', prefix='fuglu-unittest', dir='/tmp')\n shutil.copy(\"%s/%s\" % (TESTDATADIR, testfile), tmpfile.name)\n\n user = '[email protected]'\n conffile = self.tempdir + \"/%s-archivefiletypes.conf\" % user\n # the largefile in the test message is just a bunch of zeroes\n open(conffile, 'w').write(\n \"deny application\\/octet\\-stream no data allowed\")\n self.rulescache._loadrules()\n suspect = Suspect(\n '[email protected]', user, tmpfile.name)\n\n # test with high limit first\n oldlimit = self.candidate.config.get(\n 'FiletypePlugin', 'archivecontentmaxsize')\n self.candidate.config.set(\n 'FiletypePlugin', 'archivecontentmaxsize', '7000000')\n result = self.candidate.examine(suspect)\n if type(result) is tuple:\n result, message = result\n self.assertEqual(\n result, DELETE, 'extracted large file should be blocked')\n\n # now set the limit to 5 mb, the file should be skipped now\n self.candidate.config.set(\n 'FiletypePlugin', 'archivecontentmaxsize', '5000000')\n result = self.candidate.examine(suspect)\n if type(result) is tuple:\n result, message = result\n self.assertEqual(result, DUNNO, 'large file should be skipped')\n\n # reset config\n self.candidate.config.set(\n 'FiletypePlugin', 'archivecontentmaxsize', oldlimit)\n finally:\n tmpfile.close()\n os.remove(conffile)", "def min_parts():\n # min_parts = 270\n # you must replace this with your own value\n # return -1\n\n return 270", "def test_size_too_big(self):\n max_size = max(settings.MISAGO_AVATARS_SIZES)\n too_big = max_size * 2\n\n self.assertEqual(clean_size(too_big), max_size)", "def test_g_asignar_rol(self):", "def test_compress_works(self):\n tau = 45.0\n mrate = 60.0\n Mrate = 100.0\n gain = 5\n\n tmax = 50.0\n dt = 0.2\n\n self.rule.tau = tau\n self.rule.min_rate = mrate\n self.rule.max_rate = Mrate\n self.rule.compress_rates = False\n self.rule.gain = gain\n\n self.motor.error_fct = lambda t: (int_r(t/20.0)%3-1)*np.ones(self.Nsrc)\n\n M1 = simulation.StateMonitor(self.rule, 'out')\n\n sim1 = simulation.Simulation(self.source, self.motor, self.rule, M1, dt=dt)\n sim1.run(tmax)\n\n # make sure we normally go outside the range\n self.assertGreater(np.sum(M1.out < mrate), 0)\n self.assertGreater(np.sum(M1.out > Mrate), 0)\n\n self.rule.compress_rates = True\n\n M2 = simulation.StateMonitor(self.rule, 'out')\n\n sim2 = simulation.Simulation(self.source, self.motor, self.rule, M2, dt=dt)\n sim2.run(tmax)\n\n self.assertEqual(np.sum(M2.out < mrate), 0)\n self.assertEqual(np.sum(M2.out > Mrate), 0)", "def test_Z_start(self):\t\t\n self.assertAlmostEqual(attempt.Z[0], 40)", "def test_keysize_change(self):\n # This bundle used to trigger a bug in the RDATA sorting before hashing\n self._test_file(\n \"ksr-root-2016-q3-0.xml\",\n filter_ids=[\"a6b6162e-b299-427e-b11b-1a8c54a08910\"],\n )", "def test_tile_read_invalidResampling():\n bounds = [\n -6574807.42497772,\n 12210356.646387195,\n -6261721.357121638,\n 12523442.714243278,\n ]\n with pytest.raises(KeyError):\n with rasterio.open(COG) as src_dst:\n reader.part(src_dst, bounds, 16, 16, resampling_method=\"jacques\")", "def test_compress_fastq_real_with_integrity_fail(\n first_tmp_file, second_tmp_file, spring_tmp_path, real_base_context, mocker\n):\n # GIVEN the path to a existing two existing fastq files and a non existing spring\n runner = CliRunner()\n assert not spring_tmp_path.exists()\n assert first_tmp_file.exists()\n assert second_tmp_file.exists()\n\n dir_path = spring_tmp_path.parent\n assert nr_files(dir_path) == 2\n mocker.patch.object(compare_cmd, \"compare_elements\")\n compare_cmd.compare_elements.return_value = False\n # WHEN running the compress command with an intergrity check\n result = runner.invoke(\n fastq,\n [\n \"--first-read\",\n str(first_tmp_file),\n \"--second-read\",\n str(second_tmp_file),\n \"--spring-path\",\n str(spring_tmp_path),\n \"--check-integrity\",\n ],\n obj=real_base_context,\n )\n # THEN assert the command succedes\n assert result.exit_code == 1\n # THEN assert that the spring file was deleted\n assert not spring_tmp_path.exists()\n # THEN assert that only the original fastq files are left\n assert nr_files(dir_path) == 2", "def test_rotated(self):\n d = np.random.random((100, 3))\n d_emb = tadasets.embed(d, 10)\n assert np.all(np.var(d_emb, axis=0) > 0)", "def test_rotation_angle_warning(self):\n\n def warning_trigger():\n try:\n paramak.CenterColumnStudyReactor(\n inner_bore_radial_thickness=20,\n inboard_tf_leg_radial_thickness=50,\n center_column_shield_radial_thickness_mid=50,\n center_column_shield_radial_thickness_upper=100,\n inboard_firstwall_radial_thickness=20,\n divertor_radial_thickness=100,\n inner_plasma_gap_radial_thickness=80,\n plasma_radial_thickness=200,\n outer_plasma_gap_radial_thickness=90,\n # first number must be between plasma inner/outer radius\n plasma_high_point=(245, 240),\n plasma_gap_vertical_thickness=40,\n center_column_arc_vertical_thickness=520,\n rotation_angle=360)\n\n except BaseException:\n pass\n\n with warnings.catch_warnings(record=True) as w:\n warnings.simplefilter(\"always\")\n warning_trigger()\n assert len(w) == 1\n assert issubclass(w[-1].category, UserWarning)\n assert \"360 degree rotation may result in a Standard_ConstructionError or AttributeError\" in str(\n w[-1].message)", "def test_from_knx_max_exceeded(self):\n with self.assertRaises(ConversionError):\n DPTTariff().from_knx((0xFF,))", "def test_calc_rotation(self):\n t = AioBaseTurtle()\n t.speed(speed=2)\n orient, steps, delta = t._calc_rotation(120)\n self.assertEqual(steps, 21)\n self.assertAlmostEqual(delta, 120.0 / 21.0)\n self.assertAlmostEqual(orient[0], math.cos(math.radians(120)))\n self.assertAlmostEqual(orient[1], math.sin(math.radians(120)))", "def testVerySmallClusters(input_img, input_mode, min_rad=7.5):\n\n if input_mode == 'fp':\n np_img = loadImage(input_img)\n elif input_mode == 'np':\n np_img = input_img\n else:\n return (input_mode, \" is not a supported mode. Supported modes are 'np' or 'fp'.\")\n dim = np_img.shape\n if dim[0] <= dim[1]:\n length = dim[0]\n else:\n length = dim[1]\n if length/2. < min_rad:\n return True\n else:\n return False", "def _check_shrink(self):\n # As an example, if length is 1/4 of capacity and growth factor is 2,\n # then the capacity should shrink in half to keep length proportional\n # to capacity\n if self._length < int(self._capacity / (self._growth_factor ** 2)):\n self._shrink_arr()", "def test_generate_video_dont_enlarge(tmpdir, fmt):\n\n base, ext = os.path.splitext(TEST_VIDEO)\n dstfile = str(tmpdir.join(base + '.' + fmt))\n settings = create_settings(video_size=(1000, 1000), video_format=fmt)\n generate_video(SRCFILE, dstfile, settings,\n options=settings.get(fmt + '_options'))\n size_src = video_size(SRCFILE)\n size_dst = video_size(dstfile)\n\n assert size_src == size_dst", "def __init__(self, min_roi_length_for_fragmentation):\n self.min_roi_length_for_fragmentation = \\\n min_roi_length_for_fragmentation", "def test_tte5_short_write_tile_signature(self):\n filename = str(self.temp_j2k_filename)\n xtx5_setup(filename, short_sig=True)\n self.assertTrue(True)", "def augmentImageByRotation(imagePath, numRotations, originalBin, data_path):\n angles = np.linspace(0, 360, numRotations + 1, endpoint=False)[1:]\n augmentedImages = []\n rgb = cv2.imread(os.path.join(data_path, imagePath))\n dt = exrToNumpy(os.path.join(os.path.dirname(os.path.join(data_path, imagePath)), 'liver_0_dt.exr'))\n dl = exrToNumpy(os.path.join(os.path.dirname(os.path.join(data_path, imagePath)), 'liver_0_dl.exr'))\n newRatings = open(new_ratings_file_path, 'a')\n generated_images = 0\n for i, angle in enumerate(angles):\n # try different offsets if exact rotation does not give the same bin as the original image\n offsets = np.linspace(0, 10, 100, endpoint=False)\n newBin = None\n save_version = False\n for offset in offsets:\n rgb_r, dt_r, dl_r = rotate_image(rgb, dt, dl, angle + offset)\n # rate image\n rating, _ = rate_tumordistance_depth.rateImage(None, None, None, num_tumors, images=[rgb_r, dt_r, dl_r])\n newBin = getBinFromRating(rating, num_bins)\n # if bins match, save image\n if originalBin == newBin:\n save_version = True\n break\n if save_version:\n rotDir = os.path.join(augmentedDataPath, os.path.dirname(imagePath) + \"_rot\" + str(i))\n os.makedirs(rotDir)\n # save images to rotDir\n rgb_path = os.path.join(rotDir, 'liver_0.png')\n dt_path = os.path.join(rotDir, 'liver_0_dt.exr')\n dl_path = os.path.join(rotDir, 'liver_0_dl.exr')\n cv2.imwrite(rgb_path, rgb_r)\n save_exr_from_numpy(dt_path, dt_r)\n save_exr_from_numpy(dl_path, dl_r)\n # make entry in new ratings file\n save_path = os.path.relpath(rgb_path, data_path)\n newRatings.write(getRatingsLine(save_path, rating))\n generated_images += 1\n newRatings.close()\n if generated_images == 0:\n print \"Could not match bins. (\" + imagePath + \")\"\n return generated_images", "def test_AVBCalcMinPartitionSize_FasterGrowthFooterSize(self):\n\n def _SizeCalculator(partition_size):\n \"\"\"Max image size is the power of 0.95 of partition size.\"\"\"\n # Max image size grows less than partition size, which means\n # footer size grows faster than partition size.\n return int(math.pow(partition_size, 0.95))\n\n for image_size in self._image_sizes:\n min_partition_size = AVBCalcMinPartitionSize(image_size, _SizeCalculator)\n # Checks min_partition_size can accommodate image_size.\n self.assertGreaterEqual(\n _SizeCalculator(min_partition_size),\n image_size)\n # Checks min_partition_size (round to BLOCK_SIZE) is the minimum.\n self.assertLess(\n _SizeCalculator(min_partition_size - BLOCK_SIZE),\n image_size)", "def test_minrar_code_coverage(self):\n data = np.array([[0, 0, 1], [0, 0, 1]])\n vrot, v, w = minvar(data)\n # Case of np.sum(w) == 0.0\n self.assertTrue(w.sum() < self.tol)\n\n # This should be not Right Handed (?...)\n data = np.array([[0, -1, 1], [-1, -1, 1]])\n # case if YcrossZdotX < 0\n vrot, v, w = minvar(data)\n YcrossZdotX = v[0, 0] * (v[1, 1] * v[2, 2] - v[2, 1] * v[1, 2])\n # YcrossZdotX Should be positive after that\n self.assertTrue(YcrossZdotX > 0)\n\n # should tigger case if v[2, 2] < 0: (?...)\n data = np.array([[-0.1, -0.9, 0.5], [-1, 1, -0.9]])\n vrot, v, w = minvar(data)\n # v[2,2] Should be positive after that\n self.assertTrue(v[2, 2] > 0)", "def test_super_chunk(self):\n chunksize = MAX_SINGLE_UPLOAD_SIZE + 1\n size = MAX_SINGLE_UPLOAD_SIZE * 2\n self.assertEqual(find_chunksize(size, chunksize),\n MAX_SINGLE_UPLOAD_SIZE)", "def get_opt_rotate(obj_img, back_img,\n back_center_x, back_center_y,\n obj_center_x, obj_center_y,\n prev_rot_angle=0.,\n is_erosion=False):\n width = obj_img.shape[0]\n rot_img = ndimage.rotate(obj_img, prev_rot_angle, reshape=False)\n induce_x, induce_y = int(back_center_x - obj_center_x), int(back_center_y - obj_center_y)\n combine_img = back_img.copy()\n combine_img[induce_y:induce_y + width, induce_x:induce_x + width] -= rot_img\n neg_count = len(np.argwhere(combine_img < 0))\n if is_erosion:\n angle_amount = 4.\n else:\n angle_amount = 16.\n # check combine_img.dtype; rot_img.dtype; back_img\n curr_angle = prev_rot_angle\n while angle_amount > 0.5:\n angle_amount /= 2.\n\n rotate_1 = ndimage.rotate(obj_img, curr_angle + angle_amount, reshape=False)\n combine_img = back_img.copy()\n combine_img[induce_y:induce_y+width, induce_x:induce_x+width] -= rotate_1\n neg_count_1 = len(np.argwhere(combine_img < 0))\n\n rotate_2 = ndimage.rotate(obj_img, curr_angle - angle_amount, reshape=False)\n combine_img = back_img.copy()\n combine_img[induce_y:induce_y + width, induce_x:induce_x + width] -= rotate_2\n neg_count_2 = len(np.argwhere(combine_img < 0))\n\n if neg_count_1 < neg_count_2:\n if neg_count_1 < neg_count:\n neg_count = neg_count_1\n curr_angle = curr_angle + angle_amount\n else:\n if neg_count_2 < neg_count:\n neg_count = neg_count_2\n curr_angle = curr_angle - angle_amount\n # print(curr_angle)\n # print(neg_count, neg_count_1, neg_count_2)\n # print('Negative Pix Count Rotation: %d.' % neg_count)\n # print('Optimal Rotation: ', curr_angle)\n return curr_angle, neg_count", "def test_align_sanity(self):\n # QWERTY resemblance matrix:\n R = qwerty_distance()\n diff, u, r = min_difference_align(\"polynomial\", \"exponential\", R)\n # Warning: we may (read: 'will') use another matrix!\n self.assertEqual(diff, 15)\n # Warning: there may be other optimal matchings!\n self.assertEqual(u, '--polyn-om-ial')\n self.assertEqual(r, 'exp-o-ne-ntial')", "def testUploadWrapperCorruption(self):\n # Check that small reads still work.\n encrypted_data = \"\"\n count = 0\n with self.assertRaisesRegexp(IOError, \"HMAC not verified\"):\n while 1:\n small_read = self.encrypt_wrapper.read(2)\n if not small_read:\n break\n encrypted_data += small_read\n count += len(small_read)\n\n # Corrupt the data a little bit.\n if count == 3000:\n small_read = \"XX\"\n\n self.decrypt_wrapper.write(small_read)", "def test_far_out_coordinates(rotationangle):\n\n eps = 1e-7\n\n # Get the limits\n lim = rotationangle[\"cs\"].limits()[2:]\n lim0 = max(lim)\n lim1 = min(lim)\n\n # Setting c2 and c3 to zero\n c3 = lim0 - eps\n\n # A large value which is still valid\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n\n # Setting c2 and c3 to zero\n c3 = lim1 + eps\n\n # A large value which is still valid\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n\n # A large value which is raises an exception\n with pytest.raises(RuntimeError):\n c3 = lim0 + eps\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n print(phi_dash)\n\n with pytest.raises(RuntimeError):\n c3 = lim1 - eps\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)", "def rotate(self):\n pass", "def test_snapshots_size(self):\n snapshots_dir = REPO_ROOT / \"tests\" / \"snapshots\"\n small_snapshots = []\n for f in snapshots_dir.glob(\"*.txt\"):\n if f.stat().st_size <= 150:\n small_snapshots.append(f\"- {f.name}\")\n if small_snapshots:\n offenders = \"\\n\".join(small_snapshots)\n assert False, (\n \"Very small snapshot files are problematic.\\n\"\n \"Offending snapshot files:\\n\"\n f\"{offenders}\\n\"\n \"Consider refacotring them to avoid using snapshots. Tests that use \"\n \"snapshots are harder to reason about when they fail. Whenever possilbe \"\n \"a test with inline data is much easier to reason about and refactor.\"\n )", "def check_size(prev, current, delta):\n before = prev.pools[0].used\n after = current.pools[0].used\n assert delta == (before - after) >> 20", "def checkSize(self):\n if self.format.maxSize and self.size > self.format.maxSize:\n return 1\n elif (self.format.minSize and\n (not self.req_grow and\n self.size < self.format.minSize) or\n (self.req_grow and self.req_max_size and\n self.req_max_size < self.format.minSize)):\n return -1\n return 0", "def test_volume(self):\n\n self.test_shape.workplane = \"XY\"\n self.test_shape.rotation_axis = \"Z\"\n\n assert self.test_shape.volume() == pytest.approx(math.pi * (10**2) * 100 * 8)", "def test_to_knx_min_exceeded(self):\n with pytest.raises(ConversionError):\n DPTSceneNumber.to_knx(DPTSceneNumber.value_min - 1)", "def test_rotation(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n days = [(self.path + \".\" + log.suffix(day * 86400)) for day in range(3)]\n\n # test automatic rotation\n log._clock = 0.0 # 1970/01/01 00:00.00\n log.write(\"123\")\n log._clock = 43200 # 1970/01/01 12:00.00\n log.write(\"4567890\")\n log._clock = 86400 # 1970/01/02 00:00.00\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(days[0]))\n self.assertFalse(os.path.exists(days[1]))\n log._clock = 172800 # 1970/01/03 00:00.00\n log.write(\"\")\n self.assertTrue(os.path.exists(days[0]))\n self.assertTrue(os.path.exists(days[1]))\n self.assertFalse(os.path.exists(days[2]))\n log._clock = 259199 # 1970/01/03 23:59.59\n log.write(\"3\")\n self.assertFalse(os.path.exists(days[2]))", "async def test_unpacker_do_work_bundle_mismatch_size(config, mocker, path_map_mock):\n logger_mock = mocker.MagicMock()\n lta_rc_mock = mocker.patch(\"rest_tools.client.RestClient.request\", new_callable=AsyncMock)\n mock_zipfile_init = mocker.patch(\"zipfile.ZipFile.__init__\")\n mock_zipfile_init.return_value = None\n mock_zipfile_write = mocker.patch(\"zipfile.ZipFile.extractall\")\n mock_zipfile_write.return_value = None\n mock_json_load = mocker.patch(\"json.load\")\n mock_json_load.return_value = {\n \"files\": [\n {\n \"logical_name\": \"/full/path/to/file/in/data/warehouse.tar.bz2\",\n \"file_size\": 1234567890,\n \"checksum\": {\n \"adler32\": \"89d5efeb\",\n \"sha512\": \"c919210281b72327c179e26be799b06cdaf48bf6efce56fb9d53f758c1b997099831ad05453fdb1ba65be7b35d0b4c5cebfc439efbdf83317ba0e38bf6f42570\",\n },\n }\n ]\n }\n mock_shutil_move = mocker.patch(\"shutil.move\")\n mock_shutil_move.return_value = None\n mock_lta_checksums = mocker.patch(\"lta.unpacker.lta_checksums\")\n mock_lta_checksums.return_value = {\n \"adler32\": \"89d5efeb\",\n \"sha512\": \"c919210281b72327c179e26be799b06cdaf48bf6efce56fb9d53f758c1b997099831ad05453fdb1ba65be7b35d0b4c5cebfc439efbdf83317ba0e38bf6f42570\",\n }\n mock_os_path_getsize = mocker.patch(\"os.path.getsize\")\n mock_os_path_getsize.return_value = 234567890\n mock_os_remove = mocker.patch(\"os.remove\")\n mock_os_remove.return_value = None\n altfc_mock = mocker.patch(\"lta.unpacker.Unpacker._add_location_to_file_catalog\", new_callable=AsyncMock)\n altfc_mock.return_value = False\n p = Unpacker(config, logger_mock)\n BUNDLE_OBJ = {\n \"bundle_path\": \"/mnt/lfss/jade-lta/bundler_out/9a1cab0a395211eab1cbce3a3da73f88.zip\",\n \"uuid\": \"f74db80e-9661-40cc-9f01-8d087af23f56\",\n \"source\": \"NERSC\",\n \"dest\": \"WIPAC\",\n \"path\": \"/full/path/to/file\",\n \"files\": [{\"logical_name\": \"/full/path/to/file/in/data/warehouse.tar.bz2\", }],\n }\n with patch(\"builtins.open\", mock_open(read_data=\"data\")) as metadata_mock:\n with pytest.raises(Exception):\n await p._do_work_bundle(lta_rc_mock, BUNDLE_OBJ)\n metadata_mock.assert_called_with(mocker.ANY)", "def check_soft_dvpp_decode_random_crop_resize_jpeg(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [size, scale, ratio, max_attempts], _ = parse_user_args(method, *args, **kwargs)\n check_size_scale_ration_max_attempts_paras(size, scale, ratio, max_attempts)\n\n return method(self, *args, **kwargs)\n\n return new_method", "def test_len(self):\n arm = self.ar[2009][11]\n self.assertEqual(len(arm.keys()), len(self.__class__.wanted_sections))", "def test_for_size(self):\n\n inc = REFRESH_COMMANDS.calculate_refresh_commands(\"Rainmeter.exe\", \"test-config\", \"file.inc\", True, False)\n ini = REFRESH_COMMANDS.calculate_refresh_commands(\"Rainmeter.exe\", \"test-config\", \"file.ini\", True, True)\n\n self.assertEqual(len(inc) + 1, len(ini))", "def testCheckBlocksFitLength_Pass(self):\n self.assertIsNone(checker.PayloadChecker._CheckBlocksFitLength(\n 64, 4, 16, 'foo'))\n self.assertIsNone(checker.PayloadChecker._CheckBlocksFitLength(\n 60, 4, 16, 'foo'))\n self.assertIsNone(checker.PayloadChecker._CheckBlocksFitLength(\n 49, 4, 16, 'foo'))\n self.assertIsNone(checker.PayloadChecker._CheckBlocksFitLength(\n 48, 3, 16, 'foo'))", "def kuzmin_rotation(R,c,M,G=astronomicalG):\n return np.sqrt(2*G*np.power(10.,M)*R*R*np.power(c*c+R*R,-1.5))" ]
[ "0.5948658", "0.5849822", "0.5711593", "0.5711048", "0.5682156", "0.565054", "0.563231", "0.56153196", "0.5595372", "0.55438685", "0.55225056", "0.54687154", "0.5414168", "0.5409983", "0.5369747", "0.5347305", "0.5310389", "0.52922046", "0.5289117", "0.52687037", "0.52551305", "0.5245736", "0.5240485", "0.5220679", "0.5213655", "0.52060163", "0.5175585", "0.516036", "0.51336324", "0.5133239", "0.5128945", "0.51113915", "0.51097536", "0.5079732", "0.5076736", "0.5065708", "0.5065319", "0.5058132", "0.5054314", "0.5044122", "0.50356174", "0.5033223", "0.502913", "0.5027861", "0.5021064", "0.50146693", "0.5012496", "0.5009351", "0.50021714", "0.49991962", "0.49979654", "0.49919075", "0.49839133", "0.49802113", "0.49756676", "0.49755958", "0.49670178", "0.49656466", "0.4950259", "0.49466723", "0.49453977", "0.49390614", "0.49176356", "0.49167272", "0.48957813", "0.48870295", "0.48845044", "0.48832083", "0.48770514", "0.4871228", "0.48664448", "0.48622346", "0.48605925", "0.4858645", "0.48469618", "0.4846145", "0.4845149", "0.4837233", "0.48346597", "0.4834472", "0.48294595", "0.4827836", "0.48234123", "0.48221764", "0.4816859", "0.48144817", "0.48142886", "0.48137558", "0.4813369", "0.4804828", "0.48037353", "0.4782132", "0.4781823", "0.477784", "0.47777057", "0.47742468", "0.47686815", "0.47657412", "0.47640938", "0.47636455" ]
0.54828143
11
Tests of try rotation with target and interval in configuration
def test_process_log_with_target_in_configuration(self): with tempfile.TemporaryDirectory() as sandbox: with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout: srcfile = Path(sandbox, 'pokus.log') srcfile.touch() destfile = Path(sandbox, 'backup', 'pokus-20190110-2130.log') compressors = process_log( datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30), {'target': '{{path}}/backup/{{name}}-%Y%m%d-%H%M.{{ext}}', 'interval': 'hourly'}, 'hourly', str(srcfile), 10 ) self.assertEqual(compressors, []) self.assertFalse(srcfile.exists()) self.assertTrue(destfile.exists()) self.assertEqual(fake_stdout.getvalue(), 'Checking "{src}"... rotating... "{src}" -> "{dest}" done.\n'.format(src=srcfile, dest=destfile))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_arbitrary_rotation(self):\n \n # This test is run a bunch of times on various intervals, ranging from 50% to 1/6\n\t\t# (16.667%).\n for i in range(2, 7):\n \n interval = 1 / i # The amount to increase each qubit's probability by, relative to the previous qubit\n step_string = \"{:.4f}\".format(100 / i) # The decimal representation of the interval, as a percent\n target_probabilities = [0] * (i + 1) # This will store the desired probabilities of each qubit\n for j in range(0, i + 1):\n target_probability = j * interval\n target_probabilities[j] = target_probability\n\n # Run the test\n self.run_test(self.arbitrary_rotation_function, f\"Rotation with steps of 1/{i} ({step_string}%)\", 2000, target_probabilities, 0.05)", "def test_rotated(self):\n self._calibration_test(\"rotated\")", "def test_need_to_rotate_log(self):\n self.assertTrue(need_to_rotate_log(0, 20, 'daily', 15, 'daily'), 'rotate log by time')\n self.assertFalse(need_to_rotate_log(10, 20, 'daily', 15, 'hourly'), 'do not rotate log by time')\n self.assertTrue(need_to_rotate_log(10, 20, 'daily', 25, None), 'rotate log by max size')\n self.assertFalse(need_to_rotate_log(10, 20, 'hourly', 5, 'hourly'), 'do not rotate log by min size')", "def test_g_asignar_rol(self):", "def test_calc_rotation(self):\n t = AioBaseTurtle()\n t.speed(speed=2)\n orient, steps, delta = t._calc_rotation(120)\n self.assertEqual(steps, 21)\n self.assertAlmostEqual(delta, 120.0 / 21.0)\n self.assertAlmostEqual(orient[0], math.cos(math.radians(120)))\n self.assertAlmostEqual(orient[1], math.sin(math.radians(120)))", "def test_rotation_angle_warning(self):\n\n def warning_trigger():\n try:\n paramak.CenterColumnStudyReactor(\n inner_bore_radial_thickness=20,\n inboard_tf_leg_radial_thickness=50,\n center_column_shield_radial_thickness_mid=50,\n center_column_shield_radial_thickness_upper=100,\n inboard_firstwall_radial_thickness=20,\n divertor_radial_thickness=100,\n inner_plasma_gap_radial_thickness=80,\n plasma_radial_thickness=200,\n outer_plasma_gap_radial_thickness=90,\n # first number must be between plasma inner/outer radius\n plasma_high_point=(245, 240),\n plasma_gap_vertical_thickness=40,\n center_column_arc_vertical_thickness=520,\n rotation_angle=360)\n\n except BaseException:\n pass\n\n with warnings.catch_warnings(record=True) as w:\n warnings.simplefilter(\"always\")\n warning_trigger()\n assert len(w) == 1\n assert issubclass(w[-1].category, UserWarning)\n assert \"360 degree rotation may result in a Standard_ConstructionError or AttributeError\" in str(\n w[-1].message)", "def test_rotation(self, tol):\n theta = 0.98\n S = symplectic.rotation(theta)\n expected = np.block([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]])\n np.allclose(S, expected, atol=tol, rtol=0)", "def test_rotation_angle(self):\n\n self.test_shape.azimuth_placement_angle = [45, 135, 225, 315]\n test_volume = self.test_shape.volume()\n self.test_shape.rotation_angle = 180\n assert self.test_shape.volume() == pytest.approx(test_volume * 0.5)", "def test_rotate(self):\n rotable = TestRotable()\n command = RotateCommand(rotable)\n collinear_to_new_direction = rotable.get_direction() + rotable.get_angular_velocity()\n\n command()\n\n ratio = norm(rotable.get_direction()) / norm(collinear_to_new_direction)\n self.assertTrue(allclose(collinear_to_new_direction * ratio, rotable.get_direction()))\n self.assertTrue(isclose(norm(rotable.get_direction()), 1))", "def test_skel_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def test_rotation(self):\n # this logfile should rotate every 10 bytes\n with contextlib.closing(\n logfile.LogFile(self.name, self.dir, rotateLength=10)\n ) as log:\n\n # test automatic rotation\n log.write(\"123\")\n log.write(\"4567890\")\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.2\".format(self.path)))\n log.write(\"\")\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertTrue(os.path.exists(\"{}.2\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n log.write(\"3\")\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n\n # test manual rotation\n log.rotate()\n self.assertTrue(os.path.exists(\"{}.3\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.4\".format(self.path)))\n\n self.assertEqual(log.listLogs(), [1, 2, 3])", "def test_rotation(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n days = [(self.path + \".\" + log.suffix(day * 86400)) for day in range(3)]\n\n # test automatic rotation\n log._clock = 0.0 # 1970/01/01 00:00.00\n log.write(\"123\")\n log._clock = 43200 # 1970/01/01 12:00.00\n log.write(\"4567890\")\n log._clock = 86400 # 1970/01/02 00:00.00\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(days[0]))\n self.assertFalse(os.path.exists(days[1]))\n log._clock = 172800 # 1970/01/03 00:00.00\n log.write(\"\")\n self.assertTrue(os.path.exists(days[0]))\n self.assertTrue(os.path.exists(days[1]))\n self.assertFalse(os.path.exists(days[2]))\n log._clock = 259199 # 1970/01/03 23:59.59\n log.write(\"3\")\n self.assertFalse(os.path.exists(days[2]))", "def test_rot(self):\n\n print(\"rot()\")\n obs = self.fixture\n\n # rotation(0) = identity\n for axis in [1, 2, 3]:\n # theta = 0.0\n rotation = obs.rot(0.0, axis)\n # find || eye - rot1 ||\n diff = np.linalg.norm(np.eye(3) - rotation)\n self.assertAlmostEqual(diff, 0.0, delta=1e-12)\n # theta = 2*pi\n rotation = obs.rot(2.0 * np.pi, axis)\n # find || eye - rot1 ||\n diff = np.linalg.norm(np.eye(3) - rotation)\n self.assertAlmostEqual(diff, 0.0, delta=1e-12)\n\n # perform many randomized tests\n num_tests = 100\n num_products = 10\n for _test_counter in range(num_tests):\n thetas = []\n axes = []\n base = np.eye(3)\n # we will multiply a series of rotations into \"base\"\n rot_all = base\n for _rot_counter in range(num_products):\n theta = np.random.uniform(2 * np.pi) # in [0,2 pi]\n axis = np.random.randint(3) + 1 # in {1,2,3}\n axes.append(axis)\n thetas.append(theta)\n rotation = obs.rot(theta, axis)\n # multiply rot1 into the cumulative rotation\n rot_all = np.dot(rot_all, rotation)\n # now, back all the rotations out\n for _rot_counter in range(num_products):\n theta = thetas.pop()\n axis = axes.pop()\n # apply the inverse rotation\n rotation = obs.rot(-theta, axis)\n rot_all = np.dot(rot_all, rotation)\n # find || base - rot1 * rot2 ||\n diff = np.linalg.norm(base - rot_all)\n self.assertAlmostEqual(diff, 0.0, delta=1e-10 * num_products)", "def _optimise_rotation(self):\n logger.info(\n f\"Minimising dimer rotation up to \"\n f'δϕ = {self.phi_tol.to(\"degrees\"):.4f}º'\n )\n\n for i in range(self._ratio_rot_iters):\n\n result = self._rotate()\n\n if (\n result == _StepResult.skipped_rotation\n or abs(self._coords.phi) < self.phi_tol\n ):\n break\n\n logger.info(\n f\"Micro iteration: {i}.\"\n f' ϕ={self._coords.phi.to(\"degrees\"):.2f}º'\n )\n\n return None", "def test_custom_time(self):\n interval = 0.5\n M = simulation.StateMonitor(self.G, 'v', interval=interval)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n self.assertTrue(np.allclose(M.t, np.arange(0, self.t_max, interval)))", "def test_accurate_interval(self):\n interval = 0.5\n M = simulation.StateMonitor(self.G, 'v', interval=interval)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n\n v_expected = np.array([i*(M.t + self.dt) for i in xrange(self.N)])\n self.assertTrue(np.allclose(M.v, v_expected))", "def test_extract_rot_angle():\n v = np.zeros((4,2))\n try:\n angle = extract_rot_angle(v,min_points=0)\n except AssertionError,err:\n assert err.args[0]==\"Zero velocities not allowed.\"\n \n v[:,1] = 1.\n try:\n angle = extract_rot_angle(v,min_points=0)\n except AssertionError,err:\n assert err.args[0]==\"Failed to get both forward and backward directions.\"\n\n # Forwards-backwards motion.\n v[:,1] = 0.\n v[:2,0] = -1.1\n v[2:,0] = 1.2\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,np.pi)\n\n # Forwards-backwards motion.\n v[:,0] = 0.\n v[:2,1] = -.9\n v[2:,1] = .8\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,-np.pi/2)\n\n # Forwards-backwards motion with noise.\n v[:2,1] += (np.random.rand(2)*2-1)/10\n v[2:,1] += (np.random.rand(2)*2-1)/10\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,-np.pi/2,atol=.1)", "def test_controlled_rotation_gradient(self, G, tol):\n dev = qml.device(\"default.qubit\", wires=2)\n b = 0.123\n\n @qml.qnode(dev, diff_method=\"parameter-shift\")\n def circuit(b):\n qml.QubitStateVector(np.array([1.0, -1.0]) / np.sqrt(2), wires=0)\n G(b, wires=[0, 1])\n return qml.expval(qml.PauliX(0))\n\n res = circuit(b)\n assert np.allclose(res, -np.cos(b / 2), atol=tol, rtol=0)\n\n grad = qml.grad(circuit)(b)\n expected = np.sin(b / 2) / 2\n assert np.allclose(grad, expected, atol=tol, rtol=0)", "def test_skel_rotation(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1, aef=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertAlmostEqual(0.0, values[-1])", "def test_direction_no_int(self):\n tmax = 40.0\n dt = 0.1\n\n tutor = SimpleNeurons(2, out_fct=lambda _: [100.0, 60.0])\n reward = MockReward(lambda t: 1.0 if t < tmax/2 else -1)\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=0,\n constrain_rates=False, ini_rate=80.0, learning_rate=0.1,\n use_tutor_baseline=False)\n\n sim = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim.run(tmax)\n\n # tutor_rule's output should be increasing for t < tmax/2, decreasing after\n mask = (np.arange(0, tmax, dt) < tmax/2)\n\n self.assertGreater(np.min(tutor_rule.rates[mask, 0]), 80.0)\n self.assertLess(np.max(tutor_rule.rates[mask, 1]), 80.0)\n\n self.assertGreater(np.min(tutor_rule.rates[~mask, 1]), 80.0)\n self.assertLess(np.max(tutor_rule.rates[~mask, 0]), 80.0)", "async def rotate(self, angle: float, duration: float) -> None:\n angle *= self._ratio\n if duration < 0:\n raise ValueError\n if angle == 0:\n if duration > 0:\n await asyncio.sleep(duration)\n return\n if duration == 0 or angle / duration > self._max_speed:\n duration = abs(angle / self._max_speed)\n start = time.perf_counter()\n sequence_count = 0\n if angle > 0:\n plus_minus = 1\n else:\n plus_minus = -1\n # Times 2 because half-step\n steps = 2 * abs(int(float(angle) / 360 * self.STEPS_PER_REV))\n for i in range(steps):\n for pin in range(4):\n current_pin = self._pins[pin]\n if self.SEQUENCE[sequence_count][pin] != 0:\n GPIO.output(current_pin, True)\n else:\n GPIO.output(current_pin, False)\n sequence_count += plus_minus\n # If we reach the end of the sequence start again\n if sequence_count == self.rotation_seq_count:\n sequence_count = 0\n if sequence_count < 0:\n sequence_count = self.rotation_seq_count - 1\n # Wait to match entered duration\n wait = (float(i) / steps * duration) - (time.perf_counter() - start)\n if wait > 0:\n await asyncio.sleep(wait)\n for pin in self._pins:\n GPIO.output(pin, False)", "def test_simulationRun(self):\n self.opt = { 'temperature' : 300.0, 'friction' : 1, 'dt' : 0.00002,\n 'nIter' : 2, 'nstepsNC' : 2, 'nstepsMD' : 1, 'nprop' : 1,\n 'nonbondedMethod' : 'NoCutoff', 'constraints': 'HBonds',\n 'trajectory_interval' : 1, 'reporter_interval' : 1,\n 'outfname' : 'mc-test',\n 'platform' : None,\n 'constraints' : 'HBonds',\n 'mc_per_iter' : 2 }\n\n structure = self.full_struct\n class SetRotationMove(RandomLigandRotationMove):\n def __init__(self, structure, resname='LIG'):\n super(SetRotationMove, self).__init__(structure, resname)\n\n def move(self, context):\n \"\"\"Function that performs a random rotation about the\n center of mass of the ligand.\n \"\"\"\n #TODO: check if we need to deepcopy\n positions = context.getState(getPositions=True).getPositions(asNumpy=True)\n\n self.positions = positions[self.atom_indices]\n self.center_of_mass = self.getCenterOfMass(self.positions, self.masses)\n reduced_pos = self.positions - self.center_of_mass\n\n # Define random rotational move on the ligand\n #set rotation so that test is reproducible\n set_rotation_matrix = np.array([[-0.62297988, -0.17349253, 0.7627558 ],\n [ 0.55082352, -0.78964857, 0.27027502],\n [ 0.55541834, 0.58851973, 0.58749893]])\n\n\n #set_rotation_matrix = np.array([[1, 0, 0],\n # [0, 1, 0],\n # [0, 0, 1]])\n\n #multiply lig coordinates by rot matrix and add back COM translation from origin\n rot_move = np.dot(reduced_pos, set_rotation_matrix) * positions.unit + self.center_of_mass\n\n # Update ligand positions in nc_sim\n for index, atomidx in enumerate(self.atom_indices):\n positions[atomidx] = rot_move[index]\n context.setPositions(positions)\n positions = context.getState(getPositions=True).getPositions(asNumpy=True)\n self.positions = positions[self.atom_indices]\n return context\n\n\n self.model = SetRotationMove(structure, resname='ALA')\n #self.model = RandomLigandRotationMove(structure, resname='ALA')\n\n self.model.atom_indices = range(22)\n self.model.topology = structure[self.model.atom_indices].topology\n self.model.positions = structure[self.model.atom_indices].positions\n self.model.calculateProperties()\n\n self.mover = MoveEngine(self.model)\n #Initialize the SimulationFactory object\n sims = SimulationFactory(structure, self.mover, **self.opt)\n #print(sims)\n system = sims.generateSystem(structure, **self.opt)\n simdict = sims.createSimulationSet()\n alch_system = sims.generateAlchSystem(system, self.model.atom_indices)\n self.nc_sim = sims.generateSimFromStruct(structure, self.mover, alch_system, ncmc=True, **self.opt)\n self.model.calculateProperties()\n self.initial_positions = self.nc_sim.context.getState(getPositions=True).getPositions(asNumpy=True)\n mc_sim = Simulation(sims, self.mover, **self.opt)\n #monkeypatch to access acceptance value\n def nacceptRejectMC(self, temperature=300, **opt):\n \"\"\"Function that chooses to accept or reject the proposed move.\n \"\"\"\n md_state0 = self.current_state['md']['state0']\n md_state1 = self.current_state['md']['state1']\n log_mc = (md_state1['potential_energy'] - md_state0['potential_energy']) * (-1.0/self.nc_sim.context._integrator.kT)\n randnum = math.log(np.random.random())\n\n if log_mc > randnum:\n self.accept += 1\n print('MC MOVE ACCEPTED: log_mc {} > randnum {}'.format(log_mc, randnum) )\n self.md_sim.context.setPositions(md_state1['positions'])\n else:\n self.reject += 1\n print('MC MOVE REJECTED: log_mc {} < {}'.format(log_mc, randnum) )\n self.md_sim.context.setPositions(md_state0['positions'])\n self.log_mc = log_mc\n self.md_sim.context.setVelocitiesToTemperature(self.opt['temperature'])\n mc_sim.acceptRejectMC = nacceptRejectMC\n nacceptRejectMC.__get__(mc_sim)\n mc_sim.acceptRejectMC = types.MethodType(nacceptRejectMC, mc_sim)\n mc_sim.runMC(self.opt['nIter'])\n #get log acceptance\n print(mc_sim.log_mc)\n #if mc is working, should be around -24.1\n assert mc_sim.log_mc <= -23.8 and mc_sim.log_mc >= -24.3", "def test_t(self):\n assert np.isclose(self.stepper.t, self.final_t)", "def test_rotate_without_moving(controller):\n distance = math.pi / 2 * (DISTANCE_BETWEEN_WHEELS / 2)\n revolution = distance / (2 * math.pi * WHEEL_RADIUS)\n ticks = revolution * TICK_PER_REVOLUTION\n pos, angle = controller.odometry(\n round(10 - ticks),\n round(10 + ticks),\n Vector2(0, 0),\n 0,\n )\n\n # Rotate 90 degrees without moving.\n assert pos == Vector2(0, 0)\n assert round(math.pi / 2 / angle, 1) == 1\n\n # Rotate back to 0 degrees without moving.\n pos, angle = controller.odometry(10, 10, Vector2(0, 0), 0)\n assert pos == Vector2(0, 0)\n assert round(-math.pi / 2 / angle, 1) == 1", "def rotationDetermination(self):\n \n for index, row in enumerate(self.magdata):\n if index > 11 and index < (len(self.magdata) - 12):\n br1 = [row[0] for row in self.magdata[(index-12):(index-2)]]\n bt1 = [row[1] for row in self.magdata[(index-12):(index-2)]]\n bn1 = [row[2] for row in self.magdata[(index-12):(index-2)]]\n b1 = np.matrix((np.mean(br1), np.mean(bt1), np.mean(bn1)))\n\n br2 = [row[0] for row in self.magdata[(index+2):(index+12)]]\n bt2 = [row[1] for row in self.magdata[(index+2):(index+12)]]\n bn2 = [row[2] for row in self.magdata[(index+2):(index+12)]]\n b2 = np.matrix((np.mean(br2), np.mean(bt2), np.mean(bn2)))\n\n theta = np.arccos(np.dot(b1,b2.T)/(np.linalg.norm(b1)*np.linalg.norm(b2)))*180/np.pi\n\n self.detections.rotations.append(theta[0,0])\n self.detections.rotationTimeTags.append(self.timestamps[index])\n \n\n## self.b1 = b1\n## self.b2 = b2\n self.detections.rotationBoundary=[]\n if len(self.detections.rotations) != 0:\n \n for index, theta in enumerate(self.detections.rotations):\n if index > 0:\n if theta > 30 and self.detections.rotations[index-1] < 30:\n self.detections.rotationBoundary.append(self.detections.rotationTimeTags[index])\n if index < len(self.detections.rotations)-1:\n if theta > 30 and self.detections.rotations[index+1] < 30:\n self.detections.rotationBoundary.append(self.detections.rotationTimeTags[index])", "def test_asssert_rotation_matrix_behaves_like_check_matrix():\n random_state = np.random.RandomState(2345)\n for _ in range(5):\n a = pr.random_axis_angle(random_state)\n R = pr.matrix_from_axis_angle(a)\n original_value = R[2, 2]\n for error in [0, 1e-8, 1e-7, 1e-5, 1e-4, 1]:\n R[2, 2] = original_value + error\n try:\n pr.assert_rotation_matrix(R)\n pr.check_matrix(R)\n except AssertionError:\n assert_raises_regexp(\n ValueError, \"Expected rotation matrix\", pr.check_matrix, R)", "def check_random_rotation(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [degrees, resample, expand, center, fill_value], _ = parse_user_args(method, *args, **kwargs)\n check_degrees(degrees)\n\n if resample is not None:\n type_check(resample, (Inter,), \"resample\")\n if expand is not None:\n type_check(expand, (bool,), \"expand\")\n if center is not None:\n check_2tuple(center, \"center\")\n if fill_value is not None:\n check_fill_value(fill_value)\n\n return method(self, *args, **kwargs)\n\n return new_method", "def test_shape_interval(self):\n interval = 0.5\n M = simulation.StateMonitor(self.G, ['a', 'v', 'b'], interval=interval)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n\n nsteps = int_r(self.t_max/interval)\n self.assertEqual(M.v.shape, (self.N, nsteps))\n self.assertEqual(M.a.shape, (2, nsteps))\n self.assertEqual(M.b.shape, (self.N, nsteps))", "def test_calculate_tutor_baseline(self):\n tmax = 40.0\n dt = 1.0\n ini_rate = 80.0\n baseline_n = 5\n\n rate1 = ini_rate + 20.0\n rate2 = ini_rate - 10.0\n\n nruns = 10\n nsteps = int_r(tmax/dt)\n\n tutor = SimpleNeurons(2, out_fct=lambda i:\n [rate1, rate2] if i < nsteps/2 else [rate2, rate1])\n reward = MockReward(lambda _: 0.0)\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=0,\n constrain_rates=False, ini_rate=ini_rate, learning_rate=0.1,\n use_tutor_baseline=True, baseline_n=baseline_n)\n\n factor = 1 - 1.0/baseline_n\n\n for i in xrange(nruns):\n tutor_rule.reset_rates()\n\n sim = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim.run(tmax)\n\n crt_baseline = tutor_rule.baseline\n\n self.assertEqual(np.ndim(crt_baseline), 2)\n self.assertEqual(np.shape(crt_baseline)[0], nsteps)\n self.assertEqual(np.shape(crt_baseline)[1], 2)\n\n expected1 = rate1 + (ini_rate - rate1)*factor**(i+1)\n expected2 = rate2 + (ini_rate - rate2)*factor**(i+1)\n\n self.assertLess(np.max(np.abs(crt_baseline[:nsteps/2, 0] - expected1)),\n 1e-6)\n self.assertLess(np.max(np.abs(crt_baseline[nsteps/2:, 0] - expected2)),\n 1e-6)\n\n self.assertLess(np.max(np.abs(crt_baseline[:nsteps/2, 1] - expected2)),\n 1e-6)\n self.assertLess(np.max(np.abs(crt_baseline[nsteps/2:, 1] - expected1)),\n 1e-6)", "def manual_control_once(\n self, rotation: int, velocity: float, duration: int=1500):\n number_of_tries = 3\n self.manual_start()\n while number_of_tries > 0:\n if self.status().state_code == 7:\n time.sleep(5)\n self.manual_control(rotation, velocity, duration)\n time.sleep(5)\n return self.manual_stop()\n\n time.sleep(2)\n number_of_tries -= 1", "def _case3_test_failures_actual_rate_lags_target_rate(mock_get_next_ripe_time):\n mock_get_next_ripe_time.side_effect = (\n _mock_get_next_ripe_time_actual_rate_lags(rl)\n )\n\n advancer = self.__create_fake_clock_advancer_thread(\n rl, [threading.currentThread()]\n )\n advancer.start()\n\n counter = 0\n while True:\n token = rl.acquire_token()\n\n # Actual rate always lags target rate\n old_target_rate = rl._current_cluster_rate\n old_actual_rate = rl._get_actual_cluster_rate()\n if old_actual_rate is not None:\n self.assertLess(old_actual_rate, old_target_rate)\n\n # Token grant a 100 initial successes followed by all failures\n counter += 1\n if counter <= required_successes:\n rl.release_token(token, True)\n continue\n else:\n rl.release_token(token, False)\n\n # assert that the new rate is calculated based on the (higher/more conservative) target rate\n if backoff_factor * old_target_rate > min_cluster_rate:\n self.assertEqual(\n round(rl._current_cluster_rate, 2),\n round(backoff_factor * old_target_rate, 2),\n )\n else:\n # assert that new rate never goes lower than min rate\n self.assertEqual(\n round(rl._current_cluster_rate, 2), round(min_cluster_rate, 2)\n )\n break\n advancer.stop(wait_on_join=False)", "def determine_rotation(arm, d, tip_data, rot_data):\n n_t = np.zeros(3)\n for this_n_t in tip_data['pos_ntip_wrt_r']:\n n_t += this_n_t\n n_t /= len(tip_data['pos_ntip_wrt_r'])\n print(\"Our n_t to use in this stage: {}\".format(n_t))\n\n K = len(rot_data['pos_ntip_wrt_s'])\n errors_zyz = []\n errors_zyx = []\n\n for k in range(K):\n lhs = rot_data['pos_ntip_wrt_s'][k]\n t_st = rot_data['pos_tool_wrt_s_code'][k]\n ypr = rot_data['rot_tool_wrt_s_code'][k]\n yaw, pitch, roll = ypr[0], ypr[1], ypr[2]\n\n # R_zyz\n R_z1 = U.rotation_matrix_3x3_axis(angle=roll, axis='z')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z2 = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyz = R_z2.dot(R_y).dot(R_z1)\n\n # R_zyx\n R_x = U.rotation_matrix_3x3_axis(angle=roll, axis='x')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyx = R_z.dot(R_y).dot(R_x)\n\n # Evaluate!\n rhs_zyz = t_st + R_zyz.dot( n_t )\n rhs_zyx = t_st + R_zyx.dot( n_t )\n err_zyz = np.linalg.norm(lhs - rhs_zyz)\n err_zyx = np.linalg.norm(lhs - rhs_zyx)\n errors_zyz.append( err_zyz )\n errors_zyx.append( err_zyx )\n print(\"\\nerr_zyz: {:.3f} for {}-th sample\".format(err_zyz, k))\n print(\"err_zyx: {:.3f} for {}-th sample\".format(err_zyx, k))\n print(\"R_zyz:\\n{}\".format(R_zyz))\n print(\"R_zyx:\\n{}\".format(R_zyx))\n\n print(\"\\nDone with evaluation!\")\n print(\"zyz has avg error {:.5f}\".format(np.mean(errors_zyz)))\n print(\"zyx has avg error {:.5f}\".format(np.mean(errors_zyx)))", "def test_default_parameters(self):\n\n assert self.test_shape.rotation_angle == 360", "def testRolledSpectra():\n tau = np.zeros((2,50))\n tau[0,0] = 1\n tau[1,0] = 1\n tau[1,-1] = 2\n (roll, tau_new) = spec_utils.get_rolled_spectra(tau)\n assert np.all(roll == np.array([25,-24]))\n assert tau_new[0,25] == 1\n assert tau_new[1,25] == 2\n assert tau_new[1,26] == 1\n assert np.sum(np.abs(tau_new)) == 4", "def doRotation(self, delta):\n self.correctPending()\n self.rotation = (self.rotation + delta) % self.possibleRotations", "def compute_rotation(self):\n if self.predictions[self.iteration][0] == 90.0 or self.predictions[self.iteration][0] == 270.0:\n self.rotation = 20\n self.initial_adjust = True\n return\n\n if self.iteration == 0 or (self.iteration == 1 and self.initial_adjust):\n self.rotation = rotate.get_90_deg_rotation(self.predictions[self.iteration])\n elif self.iteration == 1 or (self.iteration == 2 and self.initial_adjust):\n self.rotation = rotate.get_45_deg_rotation(self.predictions, self.current_position)\n elif self.iteration >= 2 or (self.iteration > 2 and self.initial_adjust):\n self.rotation = rotate.get_fine_rotation(self.iteration)", "def test_rotation_isometry(self):\n import numpy\n\n # test for all kinds of curvatures K\n for k in (0, 1, -1, 1/11, -1/11, 11, -2):\n \n s = space(curvature=k)\n\n # use a small enough magnitude to not break math for very negative K\n magic = 0.33377777373737737777\n # 1/sqrt(2)\n s2_ref = 0.707106781186547524400844362104785\n\n o = s.make_origin(2)\n p = s.make_point((1, 0), magic)\n q = s.make_point((s2_ref, s2_ref), magic)\n\n rot = space_point_transform(\n numpy.array([[1,0,0],[0,s2_ref,-s2_ref],[0,s2_ref,s2_ref]]),\n curvature=k,\n math = common_math\n )\n\n f, g, i = map(space_point_transform, (p, q, o))\n\n def check_transform_eq(t1, t2, invert=False):\n for ref in (\n s.make_point((5/13, 12/13), magic),\n s.make_point((-3/5, 4/5), magic)\n ):\n self.assertTrue(invert ^ point_isclose(\n t1(ref),\n t2(ref),\n abs_tol = 1e-12\n ))\n\n # 1/8 turn, times 8\n check_transform_eq(rot*8, i)\n\n # rotate, shift, rotate\n check_transform_eq(g, rot + f + rot * -1)\n\n # the other way\n check_transform_eq(f, rot * -1 + g + rot)", "def test_motor_error(self):\n # reproducible arbitrariness\n np.random.seed(12325)\n\n nsteps = 10\n nchan = 3\n tmax = nsteps*self.dt\n sequence = np.random.randn(nsteps, self.N)\n\n target = np.random.randn(nchan, nsteps)\n controller = LinearController(self.G, target, tau=None)\n controller.W = np.random.randn(*controller.W.shape)\n\n self.G.out_fct = lambda i: sequence[i]\n\n class MotorErrorGrabber(object):\n def __init__(self, target):\n self.target = target\n self.order = 10\n \n def prepare(self, tmax, dt):\n nsteps = int_r(tmax/dt)\n self.motor_error = np.zeros((nsteps, self.target.N))\n\n def evolve(self, t, dt):\n i = int_r(t/dt)\n self.motor_error[i, :] = self.target.get_motor_error()\n\n M = MotorErrorGrabber(controller)\n M1 = simulation.StateMonitor(controller, 'out')\n\n sim = simulation.Simulation(self.G, controller, M, M1, dt=self.dt)\n sim.run(tmax)\n\n for i in xrange(int_r(tmax/self.dt)):\n diff = M1.out[:, i] - target[:, i]\n self.assertTrue(np.allclose(M.motor_error[i], diff))", "def testCalculateRotationDiff(self):\n # Test identity\n transform1 = numpy.eye(4)\n transform2 = numpy.eye(4)\n (_, result) = self.evaluator._calculateDifference(transform1, transform2)\n self.assertEqual(result, 0.0)\n # Test arbitrary rotation\n rot1 = numpy.array(\n [[0.0, 1.0, 0.0], [-1.0, 0.0, 0.0], [0.0, 0.0, 1.0]])\n rot2 = numpy.array(\n [[0.0, 0.0, -1.0], [0.0, 1.0, 0.0], [1.0, 0.0, 0.0]])\n transform1[0:3, 0:3] = numpy.matmul(transform1[0:3, 0:3], rot1)\n transform2[0:3, 0:3] = numpy.matmul(transform2[0:3, 0:3], rot2)\n (_, result) = self.evaluator._calculateDifference(transform1, transform2)\n self.assertAlmostEqual(result, 120.0 * numpy.pi / 180.0, 8)\n # Order shouldn't matter\n (_, result) = self.evaluator._calculateDifference(transform2, transform1)\n self.assertAlmostEqual(result, 120.0 * numpy.pi / 180.0, 8)\n # Test when the angle is pi\n transform1 = numpy.eye(4)\n transform2 = numpy.eye(4)\n transform2[0, 0] = -1.0\n transform2[1, 1] = -1.0\n (_, result) = self.evaluator._calculateDifference(transform1, transform2)\n # It might wrap to -pi, so check the absolute value\n self.assertAlmostEqual(abs(result), numpy.pi, 8)\n # Test an extreme value\n transform2 = -1.0 * numpy.eye(4)\n (_, result) = self.evaluator._calculateDifference(transform2, transform1)\n self.assertAlmostEqual(abs(result), numpy.pi)", "def test_memory(self):\n tau = 53.0\n tau0 = 22.0\n mrate = 50.0\n Mrate = 100.0\n\n tmax = 100.0\n dt = 0.01\n\n self.rule.tau = tau\n self.rule.min_rate = mrate\n self.rule.max_rate = Mrate\n self.rule.compress_rates = False\n\n ndiv3 = self.Nsrc/3\n\n self.motor.error_fct = lambda t: np.hstack((\n np.cos(t/tau0)*np.ones(ndiv3), np.sin(t/tau0)*np.ones(ndiv3),\n np.ones(ndiv3)))\n\n M = simulation.StateMonitor(self.rule, 'out')\n\n sim = simulation.Simulation(self.source, self.motor, self.rule, M, dt=dt)\n sim.run(tmax)\n\n # tutor output points *opposite* the motor error!\n prefactor = -self.rule.gain*tau0/(tau*tau + tau0*tau0)\n integral_part1 = np.cos(M.t/tau0)*np.exp(-M.t/tau)\n integral_part2 = np.sin(M.t/tau0)*np.exp(-M.t/tau)\n\n expected_cos = prefactor*(tau0 - tau0*integral_part1 + tau*integral_part2)\n expected_sin = prefactor*(tau - tau*integral_part1 - tau0*integral_part2)\n expected_const = -(1 - np.exp(-M.t/tau))\n\n mavg = (mrate + Mrate)*0.5\n mdiff = (Mrate - mrate)*0.5\n expected = np.vstack((\n np.tile(mavg + mdiff*expected_cos, (ndiv3, 1)),\n np.tile(mavg + mdiff*expected_sin, (ndiv3, 1)),\n np.tile(mavg + mdiff*expected_const, (ndiv3, 1))\n ))\n\n # mismatch is relatively large since we're using Euler's method\n # we can't do much better, however, since the motor controller cannot give\n # us motor error information at sub-step resolution\n mismatch = np.mean(np.abs(expected - M.out)/expected)\n self.assertLess(mismatch, 0.05)", "def test_d_3():\n rs = 20\n d = 3\n np.random.seed(rs)\n number_rotations = 3\n\n theta_1 = np.random.uniform(0, 2 * math.pi)\n rotation_1 = np.identity(d)\n pos_1 = np.random.randint(0, d - 1)\n pos_2 = np.random.randint(pos_1 + 1, d)\n rotation_1[pos_1, pos_1] = math.cos(theta_1)\n rotation_1[pos_1, pos_2] = - math.sin(theta_1)\n rotation_1[pos_2, pos_1] = math.sin(theta_1)\n rotation_1[pos_2, pos_2] = math.cos(theta_1)\n\n theta_2 = np.random.uniform(0, 2 * math.pi)\n rotation_2 = np.identity(d)\n pos_3 = np.random.randint(0, d - 1)\n pos_4 = np.random.randint(pos_3 + 1, d)\n rotation_2[pos_3, pos_3] = math.cos(theta_2)\n rotation_2[pos_3, pos_4] = - math.sin(theta_2)\n rotation_2[pos_4, pos_3] = math.sin(theta_2)\n rotation_2[pos_4, pos_4] = math.cos(theta_2)\n\n theta_3 = np.random.uniform(0, 2 * math.pi)\n rotation_3 = np.identity(d)\n pos_5 = np.random.randint(0, d - 1)\n pos_6 = np.random.randint(pos_5 + 1, d)\n rotation_3[pos_5, pos_5] = math.cos(theta_3)\n rotation_3[pos_5, pos_6] = - math.sin(theta_3)\n rotation_3[pos_6, pos_5] = math.sin(theta_3)\n rotation_3[pos_6, pos_6] = math.cos(theta_3)\n\n final_rotation = rotation_1 @ rotation_2 @ rotation_3\n np.random.seed(rs)\n rotation_function = (mt_obj.calculate_rotation_matrix\n (d, number_rotations))\n assert(np.all(final_rotation == rotation_function))", "def test_use_tutor_baseline(self):\n tmax = 40.0\n dt = 1.0\n ini_rate = 80.0\n\n nruns = 11\n\n tutor = SimpleNeurons(2, out_fct=lambda _: [100.0, 60.0])\n reward = MockReward(lambda t: 1.0 if t < tmax/2 else -1)\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=0,\n constrain_rates=False, ini_rate=ini_rate, learning_rate=0.1)\n\n tutor_rule.use_tutor_baseline = True\n tutor_rule.baseline_n = 5\n\n for i in xrange(nruns):\n # we first set the baselines for the two neurons to some values different\n # from tutor_rule's ini_rate, and then in the last round, we test how the\n # rates change\n if i == nruns-1:\n tutor.out_fct = lambda _: [80.0, 80.0]\n\n tutor_rule.reset_rates()\n\n sim = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim.run(tmax)\n\n drates = tutor_rule.rates - ini_rate\n\n # for the first neuron, for t < tmax/2, the current firing rate is below the\n # baseline and the reward is positive, so the rates should *decrease*\n # for t >= tmax/2, the rates should *increase*\n # the opposite should happen for the second neuron\n mask = (np.arange(0, tmax, dt) < tmax/2)\n\n self.assertGreater(np.min(drates[mask, 1]), 0)\n self.assertLess(np.max(drates[mask, 0]), 0)\n\n self.assertGreater(np.min(drates[~mask, 0]), 0)\n self.assertLess(np.max(drates[~mask, 1]), 0)", "def test_pol_rotator(time_location, spectral_type, unpolarized, below_horizon):\n time, telescope_location = time_location\n\n Nsrcs = 50\n ras = Longitude(np.linspace(0, 24, Nsrcs) * units.hr)\n decs = Latitude(np.linspace(-90, 90, Nsrcs) * units.deg)\n names = np.arange(Nsrcs).astype(\"str\")\n if unpolarized:\n fluxes = np.array([[[1.0, 0.0, 0.0, 0.0]]] * Nsrcs).T * units.Jy\n else:\n fluxes = np.array([[[5.5, 0.7, 0.3, 0.0]]] * Nsrcs).T * units.Jy\n\n # Make the last source non-polarized\n fluxes[..., -1] = [[1.0], [0], [0], [0]] * units.Jy\n\n extra = {}\n # Add frequencies if \"full\" freq:\n if spectral_type == \"full\":\n Nfreqs = 10\n freq_array = np.linspace(100e6, 110e6, Nfreqs) * units.Hz\n fluxes = fluxes.repeat(Nfreqs, axis=1)\n extra = {\"freq_array\": freq_array}\n\n assert isinstance(fluxes, Quantity)\n source = SkyModel(\n name=names,\n ra=ras,\n dec=decs,\n frame=\"icrs\",\n stokes=fluxes,\n spectral_type=spectral_type,\n **extra,\n )\n\n if unpolarized:\n assert source._n_polarized == 0\n else:\n assert source._n_polarized == Nsrcs - 1\n\n source.update_positions(time, telescope_location)\n\n # Check the default of inds for _calc_rotation_matrix()\n rots1 = source._calc_rotation_matrix()\n inds = np.array([25, 45, 16])\n rots2 = source._calc_rotation_matrix(inds)\n assert np.allclose(rots1[..., inds], rots2)\n\n # Unset the horizon mask and confirm that all rotation matrices are calculated.\n if below_horizon:\n source.above_horizon = np.full(source.Ncomponents, False, dtype=bool)\n warn_msg = \"\"\n warn_type = None\n else:\n source.above_horizon = None\n warn_msg = \"Horizon cutoff undefined\"\n warn_type = UserWarning\n\n with uvtest.check_warnings(warn_type, match=warn_msg):\n local_coherency = source.coherency_calc()\n\n if below_horizon:\n assert local_coherency.size == 0\n else:\n assert local_coherency.unit == units.Jy\n # Check that all polarized sources are rotated.\n if unpolarized:\n assert units.quantity.allclose(local_coherency, source.frame_coherency)\n else:\n assert not np.all(\n units.quantity.isclose(\n local_coherency[..., :-1], source.frame_coherency[..., :-1]\n )\n )\n assert units.quantity.allclose(\n local_coherency[..., -1], source.frame_coherency[..., -1]\n )", "def test_log_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n filename = self.conveyer.rotate_logs()\n self.assertEquals(self.conveyer.logfile, None)\n self.assertEquals(filename, \"testfile.dat.rotated\")", "def _case1_test_successes_actual_rate_leads_target_rate(\n mock_get_next_ripe_time,\n ):\n mock_get_next_ripe_time.side_effect = (\n _mock_get_next_ripe_time_actual_rate_leads(rl)\n )\n\n advancer = self.__create_fake_clock_advancer_thread(\n rl, [threading.currentThread()]\n )\n advancer.start()\n\n while True:\n token = rl.acquire_token()\n for x in range(required_successes - 1):\n rl.release_token(token, True)\n rl.acquire_token()\n\n # Actual rate always leads target rate\n old_target_rate = rl._current_cluster_rate\n old_actual_rate = rl._get_actual_cluster_rate()\n self.assertGreater(old_actual_rate, old_target_rate)\n\n # Token grant causes new rate to be calculated\n rl.release_token(token, True)\n\n # assert that the new rate is calculated based on the (lower/more conservative) target rate\n if increase_factor * old_target_rate < max_cluster_rate:\n self.assertEqual(\n rl._current_cluster_rate, increase_factor * old_target_rate\n )\n else:\n # assert that new rate never exceeds max rate\n self.assertEqual(rl._current_cluster_rate, max_cluster_rate)\n break\n advancer.stop(wait_on_join=False)", "def test_SetMultipleMovingLoadsWithVelocityFunctionConfigurationNegative(self):\n\n # create nodes\n second_coord = [1, 0, 0.0]\n self.mp.CreateNewNode(1,0.0,0.0,0.0)\n self.mp.CreateNewNode(2,second_coord[0],second_coord[1],0.0)\n\n # create condition\n self.mp.CreateNewCondition(\"MovingLoadCondition2D2N\", 1, [1,2], self.mp.GetProperties()[1])\n\n parameters = self.base_parameters\n parameters[\"velocity\"]=KratosMultiphysics.Parameters(self.time_dependent_velocity)\n parameters.AddVector(\"configuration\", [-0.25])\n\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.TIME, 0)\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.DELTA_TIME, 0.25)\n\n process = GMA.SetMultipleMovingLoadsProcess(self.mp, parameters)\n cond = self.cmp.GetCondition(2)\n\n # initialise and set load\n process.ExecuteInitialize()\n process.ExecuteInitializeSolutionStep()\n\n # initialise matrices\n lhs = KratosMultiphysics.Matrix(0, 0)\n rhs = KratosMultiphysics.Vector(0)\n\n # set load on node\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n\n self.checkRHS(rhs, [0.0, 0.0, 0.0, 0.0])\n\n # change time and recalculate load\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n # check if interpolation is done correctly\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n\n self.checkRHS(rhs, [0.0, 0.0, 0.0, 0.0])\n\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.TIME, 0.5)\n\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n # check if interpolation is done correctly\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n\n self.checkRHS(rhs, [0.0, -2.0, 0.0, 0.0])", "def rotate_orbit(self):\n try:\n ang = self.orbit_speed * self.time_scale / self.refresh_rate\n self.obj.rotate(angle=ang, axis=vector(0, 1, 0), origin=self.star.obj.pos)\n self.sum_ang += ang\n except ZeroDivisionError:\n print(\"ERROR: REFRESH_RATE is 0\")\n except (AttributeError, TypeError):\n print(\"ERROR: wrong arguments type while initializing!!\")", "def test_revolute(self):\n # Rotate around the z axis\n r = Joint.revolute(np.array([0, 0, 1]))\n t_mat = r(np.array([np.pi / 2]))\n rot_vec = np.dot(t_mat, np.array([1, 0, 0, 1]))[:3]\n self.assertTrue(np.allclose(\n rot_vec, np.array([0, 1, 0]), rtol=1e-5, atol=1e-5))", "def test_out_follows_rates(self):\n tmax = 40.0\n dt = 0.1\n\n tutor = SimpleNeurons(2, out_fct=lambda _: [100.0, 60.0])\n reward = MockReward(lambda _: 0.0)\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=0,\n constrain_rates=False, ini_rate=80.0, learning_rate=0.1,\n use_tutor_baseline=False)\n\n nsteps = int_r(tmax/dt)\n tutor_rule.rates = np.zeros((nsteps, 2))\n\n tutor_rule.rates[:, 0] = np.linspace(0, 1, nsteps)\n tutor_rule.rates[:, 1] = np.linspace(1, 0, nsteps)\n\n M = simulation.StateMonitor(tutor_rule, 'out')\n\n sim = simulation.Simulation(tutor, reward, tutor_rule, M, dt=dt)\n sim.run(tmax)\n\n self.assertLess(np.max(np.abs(M.out[0] - np.linspace(0, 1, nsteps))), 1e-6)\n self.assertLess(np.max(np.abs(M.out[1] - np.linspace(1, 0, nsteps))), 1e-6)", "def _case2_test_successes_actual_rate_lags_target_rate(mock_get_next_ripe_time):\n mock_get_next_ripe_time.side_effect = (\n _mock_get_next_ripe_time_actual_rate_lags(rl)\n )\n\n advancer = self.__create_fake_clock_advancer_thread(\n rl, [threading.currentThread()]\n )\n advancer.start()\n\n while True:\n token = rl.acquire_token()\n for x in range(required_successes - 1):\n rl.release_token(token, True)\n rl.acquire_token()\n\n # Actual rate always lags target rate\n old_target_rate = rl._current_cluster_rate\n old_actual_rate = rl._get_actual_cluster_rate()\n self.assertLess(old_actual_rate, old_target_rate)\n\n # Token grant causes new rate to be calculated\n rl.release_token(token, True)\n\n # assert that the new rate is calculated based on the (lower/more conservative) actual rate\n if increase_factor * old_actual_rate < max_cluster_rate:\n if increase_factor * old_actual_rate < old_target_rate:\n self.assertEqual(\n round(rl._current_cluster_rate, 2),\n round(old_target_rate, 2),\n )\n else:\n self.assertEqual(\n round(rl._current_cluster_rate, 2),\n round(increase_factor * old_actual_rate, 2),\n )\n else:\n # assert that new rate never exceeds max rate\n self.assertEqual(rl._current_cluster_rate, max_cluster_rate)\n break\n advancer.stop(wait_on_join=False)", "def test_accurate(self):\n M = simulation.EventMonitor(self.G)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n\n times = self.G.pattern.nonzero()[1]*self.dt\n self.assertTrue(np.allclose(sorted(times), M.t))\n for (i, t) in zip(M.i, M.t):\n self.assertTrue(self.G.pattern[i, int_r(t/self.dt)])", "def test_relaxation_end(self):\n tau = 50.0\n mrate = 40.0\n Mrate = 120.0\n\n tmax = 50.0\n dt = 0.1\n relaxation = 20.0\n\n tutor = SimpleNeurons(2, out_fct=lambda _: Mrate*np.random.rand())\n reward = MockReward(lambda t: np.sin(10*t/tmax))\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=tau,\n constrain_rates=True, min_rate=mrate, max_rate=Mrate,\n learning_rate=0.1, relaxation=relaxation, use_tutor_baseline=False)\n\n # reproducible arbitrariness\n np.random.seed(1)\n\n M = simulation.StateMonitor(tutor_rule, 'out')\n\n sim = simulation.Simulation(tutor, reward, tutor_rule, M, dt=dt)\n sim.run(tmax)\n\n mask = (M.t > tmax - relaxation/2)\n mavg = 0.5*(mrate + Mrate)\n\n self.assertAlmostEqual(np.mean(np.abs(M.out[:, mask] - mavg)), 0.0)", "def motor_rotate_deg(power,deg,port,sampling_time=.01,delay_when_stopping=.05): \n debug = False\n num_motor=len(power) #Number of motors being used\n #print num_motor\n init_val=[0]*num_motor\n curr_val=[0]*num_motor\n final_val=[0]*num_motor\n last_encod=[0]*num_motor\n \n delta=0\n gain=0.005\n idelta=0.0\n alpha=10\n smulti=0\n BrickPiUpdateValues()\n for i in range(num_motor):\n BrickPi.MotorEnable[port[i]] = 1 #Enable the Motors\n power[i]=abs(power[i])\n \n init_val[i]=BrickPi.Encoder[port[i]] #Initial reading of the encoder \n \n final_val[i]=init_val[i]+(deg[i]*2) #Final value when the motor has to be stopped;One encoder value counts for 0.5 degrees\n \n #For running clockwise and anticlockwise\n if deg[i]>0:\n BrickPi.MotorSpeed[port[i]] = power[i]\n elif deg[i]<0:\n BrickPi.MotorSpeed[port[i]] = -power[i]\n else:\n BrickPi.MotorSpeed[port[i]] = 0\n \n \n run_stat=[0]*num_motor\n\n time_start = time.time()\n time_end = time.time()\n time_total = time_end - time_start\n \n while True:\n time_end = time.time()\n time_total = time_end - time_start\n if time_total >= ROTATE_DEG_TIMEOUT:\n break\n \n result = BrickPiUpdateValues() #Ask BrickPi to update values for sensors/motors\n time.sleep(sampling_time) #sleep for the sampling time given (default:10 ms)\n i = 0\n #if debug:\n #print \"Result of Update Values: \" + `result`\n if not result :\n for i in range(num_motor): #Do for each of the motors\n #The FIRST thing we should do is check our encoders!\n curr_val[i]=BrickPi.Encoder[port[i]]\n if debug :\n print \"Motor \" + `i` + \" encoder: \" + `curr_val[i]`\n \n if run_stat[i]==1:\n continue\n # Check if final value reached for each of the motors\n if(deg[i]>0 and final_val[i]<=curr_val[i]) or (deg[i]<0 and final_val[i]>=curr_val[i]) :\n #This motor has reached its goal\n run_stat[i]=1\n \n #Now let's hit the breaks by going in reverse for a VERY quick amount of time.\n if deg[i]>0:\n BrickPi.MotorSpeed[port[i]] = -power[i]\n elif deg[i]<0:\n BrickPi.MotorSpeed[port[i]] = power[i]\n else:\n BrickPi.MotorSpeed[port[i]] = 0 \n BrickPiUpdateValues()\n time.sleep(delay_when_stopping)\n #Now let's turn the motor off all together\n BrickPi.MotorEnable[port[i]] = 0\n BrickPiUpdateValues()\n \n if(all(e==1 for e in run_stat)): #If all the motors have already completed their rotation, then stop\n break\n \n #Let's use Proportional Integral Control on the Motors to keep them in Sync\n if i == 1 :\n if curr_val[0] <> 0 and curr_val[1] <>0 : \n if last_encod[0]<>0 and last_encod[1] <>1 :\n if abs(last_encod[0] - init_val[0]) < abs(last_encod[1] - init_val[1]) :\n #Motor 1 is going faster\n delta = abs(curr_val[1]-last_encod[1]) - abs(curr_val[0]-last_encod[0])\n idelta = (abs(curr_val[1]-init_val[1]) - abs(curr_val[0]-init_val[0]))/alpha\n if debug:\n print \"Motor 1 is faster by \" + `delta`\n print \"last_encod = \" + `last_encod[0]` + \" , \" + `last_encod[1]`\n print \"idelta = \" + `idelta`\n print \"Current Encode = \" + `curr_val[0]` + \" , \" + `curr_val[1]`\n\n if int(abs(BrickPi.MotorSpeed[port[0]])) == 255 :\n #Motor 0 CANNOT be sped up\n smulti=BrickPi.MotorSpeed[port[0]]*delta*gain+idelta*gain\n #Speed Multiplier: the amount we want to slow down Motor 1\n if int(abs(BrickPi.MotorSpeed[port[1]]-smulti)) <= 255 : \n #Target speed is inside the bounds of Motor speed\n BrickPi.MotorSpeed[port[1]] = int (BrickPi.MotorSpeed[port[1]]-smulti)\n elif int (BrickPi.MotorSpeed[port[1]]-smulti) < 0 :\n #Target speed is outside the bounds of -255 to 255\n BrickPi.MotorSpeed[port[1]] = -255\n else :\n BrickPi.MotorSpeed[port[1]] = 255\n BrickPiUpdateValues()\n if debug : \n print \"Motor 1 speed : \" + `BrickPi.MotorSpeed[port[1]]`\n print \"Speed Multiplier : \" + `smulti`\n\n else :\n #Motor 0 CAN be sped up\n smulti=BrickPi.MotorSpeed[port[0]]*delta*gain+idelta*gain\n #Speed Multiplier: the amount we want to speed up Motor 0\n if int(abs(BrickPi.MotorSpeed[port[0]]+smulti)) <= 255 : \n #Target speed is inside the bounds of Motor speed\n BrickPi.MotorSpeed[port[0]] = int (BrickPi.MotorSpeed[port[0]]+smulti)\n elif int (BrickPi.MotorSpeed[port[0]]+smulti) < 0 :\n #Target speed is outside the bounds of -255 to 255\n BrickPi.MotorSpeed[port[0]] = -255 \n else :\n BrickPi.MotorSpeed[port[0]] = 255\n BrickPiUpdateValues()\n if debug : \n print \"Motor 0 speed : \" + `BrickPi.MotorSpeed[port[0]]`\n print \"Speed Multiplier : \" + `smulti`\n\n\n elif (last_encod[0] - curr_val[0]) > abs(last_encod[1] - curr_val[1]) :\n #Motor 0 is going faster\n delta= abs(curr_val[0]-last_encod[0])- abs(curr_val[1]-last_encod[1]) \n idelta = (abs(curr_val[0]-init_val[0]) - abs(curr_val[1]-init_val[1]))/alpha\n if debug :\n print \"Motor 0 is faster by \" + `delta`\n print \"last_encod = \" + `last_encod[0]` + \" , \" + `last_encod[1]`\n print \"idelta = \" + `idelta`\n print \"Current Encode = \" + `curr_val[0]` + \" , \" + `curr_val[1]`\n\n if abs(BrickPi.MotorSpeed[port[1]]) == 255 :\n #Motor 1 CANNOT be sped up, SLOW DOWN Motor 0\n smulti=BrickPi.MotorSpeed[port[0]]*delta*gain+idelta*gain\n #Speed Multiplier: the amount we want to slow down Motor 0\n if int(abs(BrickPi.MotorSpeed[port[0]]-smulti)) <= 255 :\n #Target speed is inside the bounds of Motor\n BrickPi.MotorSpeed[port[0]] = int (BrickPi.MotorSpeed[port[0]]-smulti)\n elif int (BrickPi.MotorSpeed[port[0]]-smulti) < 0 :\n #Target speed is outside the -255 to 255 bounds\n BrickPi.MotorSpeed[port[0]] = -255\n else : \n BrickPi.MotorSpeed[port[0]] = 255\n BrickPiUpdateValues()\n if debug : \n print \"Motor 0 speed : \" + `BrickPi.MotorSpeed[port[0]]`\n print \"Speed Multiplier : \" + `smulti`\n\n else :\n #Motor 1 CAN be sped up SPEED UP Motor 1\n smulti=BrickPi.MotorSpeed[port[0]]*delta*gain+idelta*gain\n #Speed Multiplier: the amount we want to speed up Motor 1\n if int(abs (BrickPi.MotorSpeed[port[1]]+smulti)) <= 255 :\n #Target speed is inside the bounds of Motor\n BrickPi.MotorSpeed[port[1]] = int (BrickPi.MotorSpeed[port[1]]+smulti)\n elif int (BrickPi.MotorSpeed[port[1]]+smulti) < 0 :\n #Target speed is outside the -255 to 255 bounds\n BrickPi.MotorSpeed[port[1]] = -255\n else :\n BrickPi.MotorSpeed[port[1]] = 255\n BrickPiUpdateValues()\n if debug : \n print \"Motor 1 speed : \" + `BrickPi.MotorSpeed[port[1]]`\n print \"Speed Multiplier : \" + `smulti`\n \n last_encod[0] = curr_val[0]\n last_encod[1] = curr_val[1]\n BrickPi.MotorEnable[MOTOR1] = 1\n BrickPi.MotorEnable[MOTOR2] = 1\n return 0", "def __init__(self, t=0.01):\r\n\r\n\t\tself.t = float(t)\r\n\t\t\r\n\t\t#Control constants\r\n\t\t#Altitude\r\n\t\t#P\r\n\t\tkp1=3\r\n\t\t#PID\r\n\t\tkp2=6\r\n\t\tki2=0\r\n\t\tkd2=2.5\r\n\t\t#PID\r\n\t\tkp3=0.75\r\n\t\tki3=1.5\r\n\t\tkd3=0\r\n\t\t\r\n\t\t#Pitch\r\n\t\t#P\r\n\t\tkp1_theta=4.5\r\n\t\t#PID\r\n\t\tkp2_theta=0.15\r\n\t\tki2_theta=0.1\r\n\t\tkd2_theta=0.004\r\n\t\t\r\n\t\t#Roll\r\n\t\t#P\r\n\t\tkp1_phi=4.5\r\n\t\t#PID\r\n\t\tkp2_phi=0.15\r\n\t\tki2_phi=0.1\r\n\t\tkd2_phi=0.004\r\n\t\t\r\n\t\t#Yaw\r\n\t\t#P\r\n\t\tkp1_psi=10\r\n\t\t#PID\r\n\t\tkp2_psi=0.2\r\n\t\tki2_psi=0.02\r\n\t\tkd2_psi=0\r\n\t\t\r\n\t\t#Control\r\n\t\timax1=10\r\n\t\tt_max1=200\r\n\t\timax2=10\r\n\t\tt_max2=120\r\n\t\t\r\n\t\t#Constrains\r\n\t\trate_imax = 50\r\n\t\tac_imax = 40\r\n\t\tt_min = 800\r\n\t\tt_max = 1500\r\n\t\t\r\n\t\t#Goals\r\n\t\tself.target_z=3\r\n\t\tself.target_theta=0\r\n\t\tself.target_phi=0\r\n\t\tself.target_psi=0\r\n\r\n\t\t#Initial Conditions\r\n\t\tself.w=[1000, 1000, 1000, 1000]\r\n\t\t#self.ev = [0, 0, 1, 0, 0, 0, 0, 0, -9.81, 0.4, -0.2, 1, 0.1, -0.1, 0.2, 0.01, -0.01, 0.02]\r\n\t\tself.ev = [0, 0, 0, 0, 0, 0, 0, 0, -9.81, 0, 0, 0, 0, 0, 0, 0, 0, 0]\r\n\r\n\t\t#Create controllers\r\n\t\tself.altitudeController = control.controlAlt (self.t, kp1, kp2, ki2, kd2, kp3, ki3, kd3, rate_imax, ac_imax,t_min,t_max)\r\n\t\tself.thetaController = control.controlAngle (self.t, kp1_theta, kp2_theta, ki2_theta, kd2_theta, imax1, t_max1)\r\n\t\tself.phiController = control.controlAngle (self.t, kp1_phi, kp2_phi, ki2_phi, kd2_phi, imax1, t_max1)\r\n\t\tself.psiController = control.controlAngle (self.t, kp1_psi, kp2_psi, ki2_psi, kd2_psi, imax2, t_max2)", "def test_calcAngles_angles_or_axis(self, kargs, expected_len_result, expected_truncated_results):\n kargs['vsk'] = self.cal_SM\n result = pycgmCalc.calcAngles(self.motion_data, **kargs)\n np.testing.assert_equal(len(result), expected_len_result)\n np.testing.assert_almost_equal(result[0:5], expected_truncated_results)", "def test_from_rotation_angle_coordinate_of_phi(rotationangle):\n\n # Get the coordinate at phi\n phi_dash = rotationangle[\"phi\"]\n c3 = rotationangle[\"cs\"].from_rotation_angle(phi_dash)\n\n # Ensure that it is at the origin\n assert c3 == pytest.approx(0.0)", "def rotating_bar(length=10, input_size_x=240, input_size_y=180, offx=0,\n offy=0, angle_step=10, ts_offset=10*ms):\n x_coord = []\n y_coord = []\n pol = []\n ts = []\n center = (input_size_x/2 + offx, input_size_y/2 + offy)\n angles = np.linspace(0, np.pi*2, angle_step)\n for i, cAngle in enumerate(angles):\n endy_1 = center[1] + ((length / 2) * np.sin((cAngle)))\n endx_1 = center[0] + ((length / 2) * np.cos((cAngle)))\n endy_2 = center[1] - ((length / 2) * np.sin((cAngle)))\n endx_2 = center[0] - ((length / 2) * np.cos((cAngle)))\n start = np.asarray((endx_1, endy_1))\n end = np.asarray((endx_2, endy_2))\n max_direction, max_length = max(enumerate(abs(end - start)),\n key=operator.itemgetter(1))\n dv = (end - start) / max_length\n line = [dda_round(start)]\n for step in range(int(max_length)):\n line.append(dda_round((step + 1) * dv + start))\n for coord in line:\n x_coord.append(coord[0])\n y_coord.append(coord[1])\n ts.append(i)\n pol.append(1)\n events = np.zeros((4, len(x_coord)))\n events[0, :] = np.asarray(x_coord)\n events[1, :] = np.asarray(y_coord)\n events[2, :] = np.asarray(ts) \n events[3, :] = np.asarray(pol)\n\n ind = xy2ind(events[0, :], events[1, :], input_size_x)\n stimuli_generator = SpikeGeneratorGroup(input_size_x*input_size_y, indices=ind, \n times=ts*ts_offset, name='rotating_bar')\n return stimuli_generator, events", "def test_SetMultipleMovingLoadsWithVelocityFunctionConfigurationPositive(self):\n\n # create nodes\n second_coord = [1, 0, 0.0]\n self.mp.CreateNewNode(1,0.0,0.0,0.0)\n self.mp.CreateNewNode(2,second_coord[0],second_coord[1],0.0)\n\n # create condition\n self.mp.CreateNewCondition(\"MovingLoadCondition2D2N\", 1, [1,2], self.mp.GetProperties()[1])\n\n parameters = self.base_parameters\n parameters[\"velocity\"]=KratosMultiphysics.Parameters(self.time_dependent_velocity)\n parameters.AddVector(\"configuration\", [0.5])\n\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.TIME, 0)\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.DELTA_TIME, 0.25)\n\n process = GMA.SetMultipleMovingLoadsProcess(self.mp, parameters)\n cond = self.cmp.GetCondition(2)\n\n # initialise and set load\n process.ExecuteInitialize()\n process.ExecuteInitializeSolutionStep()\n\n # initialise matrices\n lhs = KratosMultiphysics.Matrix(0, 0)\n rhs = KratosMultiphysics.Vector(0)\n\n # set load on node\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n\n self.checkRHS(rhs, [0.0, -1.0, 0.0, -1.0])\n\n # change time and recalculate load\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n # check if interpolation is done correctly\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n\n self.checkRHS(rhs, [0.0, -1.0, 0.0, -1.0])\n\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.TIME, 0.5)\n\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n # check if interpolation is done correctly\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n\n self.checkRHS(rhs, [0.0, -0.5, 0.0, -1.5])", "def _case4_test_failures_actual_rate_leads_target_rate(mock_get_next_ripe_time):\n mock_get_next_ripe_time.side_effect = (\n _mock_get_next_ripe_time_actual_rate_leads(rl)\n )\n\n advancer = self.__create_fake_clock_advancer_thread(\n rl, [threading.currentThread()]\n )\n advancer.start()\n\n counter = 0\n while True:\n token = rl.acquire_token()\n\n # Actual rate is always None because\n old_target_rate = rl._current_cluster_rate\n old_actual_rate = rl._get_actual_cluster_rate()\n if old_actual_rate is not None:\n self.assertGreater(old_actual_rate, old_target_rate)\n\n # Token grant a 100 initial successes followed by all failures\n counter += 1\n if counter <= required_successes:\n rl.release_token(token, True)\n continue\n else:\n rl.release_token(token, False)\n\n # assert that the new rate is calculated based on the (higher/more conservative) actual rate\n if backoff_factor * old_target_rate > min_cluster_rate:\n self.assertEqual(\n round(rl._current_cluster_rate, 2),\n round(backoff_factor * old_target_rate, 2),\n )\n else:\n # assert that new rate never goes lower than min rate\n self.assertEqual(rl._current_cluster_rate, min_cluster_rate)\n break\n advancer.stop(wait_on_join=False)", "def test_calculate_angle():\n r1 = np.array([0, 0, -1])\n r2 = np.array([0, 0, 0])\n r3 = np.array([1, 0, 0])\n\n expected_angle = 90\n calculated_angle = molecool.calculate_angle(r1, r2, r3, degrees = True)\n\n assert expected_angle == calculated_angle", "def test_ajuste(self):\n\n def test(clk, nrst, tick, ajuste, ajuste_hora, ajuste_min, ajuste_seg, hora, min, seg):\n\n yield delay(tick_period * randint(60, 180))\n ajuste.next = 1\n ajuste_hora.next = 5\n ajuste_min.next = 10\n ajuste_seg.next = 0\n\n yield delay(tick_period*2)\n self.assertEqual(5, hora)\n self.assertEqual(10, min)\n self.assertEqual(0, seg)\n\n ajuste.next = 0\n yield delay(tick_period)\n self.assertEqual(5, hora)\n self.assertEqual(10, min)\n self.assertEqual(1, seg)\n\n runSim(test, 60*60*3*tick_period)", "def interaction_turnstile(self) -> None:\n if self.get_rotation()[1][0] != 0:\n condition = self.can_rotate()[0]\n if condition:\n self.rotate()", "def rotation(self, *args, **kwargs) -> Any:\n pass", "def test_retry_run(self):\n pass", "def test_dry_lapse_2_levels():\n temps = dry_lapse(np.array([1000., 500.]) * units.mbar, 293. * units.kelvin)\n assert_array_almost_equal(temps, [293., 240.3583] * units.kelvin, 4)", "def rotate(self,angle):\n origin = copy.deepcopy(self._current)\n\n q = [origin.orientation.x,\n origin.orientation.y,\n origin.orientation.z,\n origin.orientation.w] # quaternion nonsense\n\n (roll, pitch, yaw) = euler_from_quaternion(q)\n\n atTarget=False\n\n currentAngle=yaw\n angle=angle+currentAngle\n\n if(angle==currentAngle):\n w=0\n elif(angle>currentAngle):\n w=1\n elif(angle<currentAngle):\n w=-1\n\n move_msg=Twist()\n move_msg.linear.x=0\n move_msg.angular.z=w\n\n\n stop_msg =Twist()\n stop_msg.linear.x=0\n stop_msg.angular.z=0\n\n while(not atTarget and not rospy.is_shutdown()):\n if(currentAngle>=angle):\n atTarget=True\n self._vel_pub.publish(stop_msg)\n print('rotate: stoped')\n else:\n origin = copy.deepcopy(self._current)\n\n q = [origin.orientation.x,\n origin.orientation.y,\n origin.orientation.z,\n origin.orientation.w] # quaternion nonsense\n\n (roll, pitch, yaw) = euler_from_quaternion(q)\n\n currentAngle=yaw\n self._vel_pub.publish(move_msg)\n rospy.sleep(.15)\n print('rotate: moving')\n print('angle: '+str(angle)+'currentAngle: '+str(currentAngle))", "def calibrate_rotation_rate(self, direction, angle):\n print(location_string[direction], \" calibration\")\n\n for speed in range(self.MIN_SPEED, 100, self.SPEED_TABLE_INTERVAL):\n sleep(1)\n if direction == DIR_LEFT: # rotate left\n self.kit.motor3.throttle = -speed/100\n self.kit.motor4.throttle = speed/100\n\n elif direction == DIR_RIGHT: # rotate right\n self.kit.motor3.throttle = speed/100\n self.kit.motor4.throttle = -speed/100\n\n else:\n print(\"Invalid direction\")\n\n time = self.rotation_angle_to_time(angle, speed)\n\n print(location_string[direction], \": rotate\", angle, \" degrees at speed \",\n speed, \" for \", time, \" ms\")\n sleep(time*1e-3)\n self.kit.motor3.throttle = 0\n self.kit.motor4.throttle = 0\n sleep(2) # two second delay between speeds", "def test_palm_rejection_timeout(get_touchmat):\n touchmat = get_touchmat\n touchmat_model = check_device_types.get_device_model(touchmat)\n\n if touchmat_model == Devices.touchmat_g1:\n with pytest.raises(PySproutError) as execinfo:\n touchmat.palm_rejection_timeout()\n assert 'Functionality not available' in str(execinfo.value)\n\n with pytest.raises(PySproutError) as execinfo:\n touchmat.palm_rejection_timeout(150)\n assert 'Functionality not available' in str(execinfo.value)\n return\n\n original_timeout = touchmat.palm_rejection_timeout()\n\n new_timeout = random.randint(150, 2000)\n timeout = touchmat.palm_rejection_timeout(new_timeout)\n assert timeout == new_timeout\n assert touchmat.palm_rejection_timeout() == new_timeout\n\n # Test the edge cases\n timeout = touchmat.palm_rejection_timeout(2000)\n assert timeout == 2000\n assert touchmat.palm_rejection_timeout() == 2000\n timeout = touchmat.palm_rejection_timeout(150)\n assert timeout == 150\n assert touchmat.palm_rejection_timeout() == 150\n\n # Test out of range values\n with pytest.raises(PySproutError) as execinfo:\n touchmat.palm_rejection_timeout(149)\n assert 'Parameter out of range' in execinfo.value.message\n with pytest.raises(PySproutError) as execinfo:\n touchmat.palm_rejection_timeout(2001)\n assert 'Parameter out of range' in execinfo.value.message\n with pytest.raises(PySproutError) as execinfo:\n touchmat.palm_rejection_timeout(3000)\n assert 'Parameter out of range' in execinfo.value.message\n with pytest.raises(PySproutError) as execinfo:\n touchmat.palm_rejection_timeout(15)\n assert 'Parameter out of range' in execinfo.value.message\n with pytest.raises(PySproutError) as execinfo:\n touchmat.palm_rejection_timeout(-150)\n assert 'Parameter out of range' in execinfo.value.message\n\n # Test invalid parameters\n with pytest.raises(PySproutError) as execinfo:\n touchmat.palm_rejection_timeout(\"abc\")\n assert 'Invalid parameter' in execinfo.value.message\n with pytest.raises(PySproutError) as execinfo:\n touchmat.palm_rejection_timeout({})\n assert 'Invalid parameter' in execinfo.value.message\n\n # Set the original value back\n timeout = touchmat.palm_rejection_timeout(original_timeout)\n assert timeout == original_timeout\n assert touchmat.palm_rejection_timeout() == original_timeout", "def test_scenario(timestep_per_pi, int_method):\n\n #determine BC and IC\n x0 = 0.0 #init pos\n v0 = 1.0 #init vel\n t0 = 0.0 #start-time\n tn = 4.0*np.pi #end-time\n tau = timestep_per_pi*np.pi #timesteps\n n = (tn-t0)/tau + 1 #number of timesteps\n \n time = np.linspace(t0, tn, n) #time-array\n\n #acceleration of point particle with k=m=1\n acc1 = lambda x,v,t: -1.0*x #function must take three arguments!\n\n pos, vel, time = integrate_time(func=acc1,\n init=(x0,v0),\n timearray=time,\n method=int_method)\n\n #analytical solutions\n pos_an = np.sin(time)\n vel_an = np.cos(time)\n\n return time, pos, pos_an, vel, vel_an", "def test_prop_learning_rate(self):\n tmax = 10.0\n dt = 1.0\n\n learning_rate1 = 0.1\n learning_rate2 = 0.5\n\n ini_rate = 80.0\n\n tutor = SimpleNeurons(1, out_fct=lambda _: ini_rate+20.0)\n reward = MockReward(lambda t: 1.0 if t < tmax/2 else -1)\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=0,\n constrain_rates=False, ini_rate=ini_rate, learning_rate=learning_rate1,\n use_tutor_baseline=False)\n\n sim1 = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim1.run(tmax)\n\n drates1 = tutor_rule.rates - ini_rate\n\n tutor_rule.reset_rates()\n tutor_rule.learning_rate = learning_rate2\n\n sim2 = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim2.run(tmax)\n\n drates2 = tutor_rule.rates - ini_rate\n\n self.assertLess(np.max(np.abs(learning_rate2*drates1 -\n learning_rate1*drates2)), 1e-6)", "def test_revolute_from_dh(self):\n x_offset = 1\n z_offset = 2\n # Rotate around the z axis\n r = Joint.revolute_from_dh(0, 0, x_offset, z_offset)\n t_mat = r(np.array([np.pi / 2]))\n rot_vec = np.dot(t_mat[:3, :3], np.array([1, 0, 0]))\n self.assertTrue(np.allclose(\n rot_vec, np.array([0, 1, 0]), rtol=1e-5, atol=1e-5))\n self.assertTrue(np.allclose(t_mat[2, 3], z_offset))\n # x was rotated 90 degrees, and is now y\n self.assertTrue(np.allclose(t_mat[1, 3], x_offset))", "def attempt(self, timer, context, phases):", "def spinAround(self):", "def reached_angle(self, angle, tol):\n if self.ros_node.get_data(\"/auto/turret/current/angle\") is None:\n rospy.logerr(\"The topic /auto/turret/current/angle has not been published yet\")\n else:\n neg_angle_diff = self.wrap_angle(self.ros_node.get_data(\"/auto/turret/current/angle\") - angle)\n pos_angle_diff = self.wrap_angle(angle - self.ros_node.get_data(\"/auto/turret/current/angle\"))\n\n if pos_angle_diff <= tol or neg_angle_diff <= tol:\n return True\n return False", "def angle(self, angle: int, time: int = 0, /) -> None:", "def test_option_repeat_interval(self):\n # run with --retry, see 2 lines, then kill -INT\n cmd, output = runCmdOutput(['-p', '7788', '-r'],\n wait=False, limit=2)\n cmd.send_signal(signal.SIGINT)\n self.assertEqual(cmd.wait(), 1)\n cmd.stdout.close()\n # run with --retry, see 4 lines, then kill -INT\n cmd, output = runCmdOutput(['-p', '7788', '-r', '-i', '1'],\n wait=False, limit=4)\n cmd.send_signal(signal.SIGINT)\n self.assertEqual(cmd.wait(), 1)\n cmd.stdout.close()\n # invalid --interval option argument (int > 0)\n cmd, output = runCmdOutput(['-p', '7788', '-i', '0'])\n self.assertEqual(cmd.returncode, os.EX_USAGE)\n # --interval option argument ignored if no --retry\n cmd, output = runCmdOutput(['-p', '7788', '-i', '1000'])\n self.assertEqual(cmd.returncode, os.EX_OK)", "def test_relaxation_no_change_beginning(self):\n tau = 25.0\n mrate = 40.0\n Mrate = 120.0\n\n tmax = 50.0\n dt = 0.1\n relaxation = 20.0\n\n tutor = SimpleNeurons(2, out_fct=lambda _: Mrate*np.random.rand())\n reward = MockReward(lambda t: np.sin(8*t/tmax))\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=tau,\n constrain_rates=True, min_rate=mrate, max_rate=Mrate,\n learning_rate=0.1, relaxation=None, use_tutor_baseline=False)\n\n # reproducible arbitrariness\n np.random.seed(12)\n\n M1 = simulation.StateMonitor(tutor_rule, 'out')\n\n sim1 = simulation.Simulation(tutor, reward, tutor_rule, M1, dt=dt)\n sim1.run(tmax)\n\n # now run again with relaxation enabled\n tutor_rule.relaxation = relaxation\n tutor_rule.reset_rates()\n np.random.seed(12)\n\n M2 = simulation.StateMonitor(tutor_rule, 'out')\n\n sim2 = simulation.Simulation(tutor, reward, tutor_rule, M2, dt=dt)\n sim2.run(tmax)\n\n mask = (M1.t < tmax - relaxation)\n self.assertAlmostEqual(np.mean(np.abs(M1.out[:, mask] - M2.out[:, mask])),\n 0.0)\n\n self.assertNotAlmostEqual(np.mean(np.abs(M1.out[:, ~mask] -\n M2.out[:, ~mask])), 0.0)", "def randomize_trajectory(self):\n self.angle = randint(-360, 360)\n self.speed = randint(1, 5)/2.5", "def do_polar_alignment_test(self, *arg):\n if self.ready is False:\n return\n\n start_time = current_time(flatten=True)\n\n base_dir = '{}/images/drift_align/{}'.format(\n os.getenv('PANDIR'), start_time)\n plot_fn = '{}/{}_center_overlay.jpg'.format(base_dir, start_time)\n\n mount = self.pocs.observatory.mount\n\n print_info(\"Moving to home position\")\n self.pocs.say(\"Moving to home position\")\n mount.slew_to_home()\n\n # Polar Rotation\n pole_fn = polar_rotation(self.pocs, base_dir=base_dir)\n pole_fn = pole_fn.replace('.cr2', '.fits')\n\n # Mount Rotation\n rotate_fn = mount_rotation(self.pocs, base_dir=base_dir)\n rotate_fn = rotate_fn.replace('.cr2', '.fits')\n\n print_info(\"Moving back to home\")\n self.pocs.say(\"Moving back to home\")\n mount.slew_to_home()\n\n print_info(\"Solving celestial pole image\")\n self.pocs.say(\"Solving celestial pole image\")\n try:\n pole_center = polar_alignment_utils.analyze_polar_rotation(pole_fn)\n except error.SolveError:\n print_warning(\"Unable to solve pole image.\")\n print_warning(\"Will proceeed with rotation image but analysis not possible\")\n pole_center = None\n else:\n pole_center = (float(pole_center[0]), float(pole_center[1]))\n\n print_info(\"Starting analysis of rotation image\")\n self.pocs.say(\"Starting analysis of rotation image\")\n try:\n rotate_center = polar_alignment_utils.analyze_ra_rotation(rotate_fn)\n except Exception:\n print_warning(\"Unable to process rotation image\")\n rotate_center = None\n\n if pole_center is not None and rotate_center is not None:\n print_info(\"Plotting centers\")\n self.pocs.say(\"Plotting centers\")\n\n print_info(\"Pole: {} {}\".format(pole_center, pole_fn))\n self.pocs.say(\"Pole : {:0.2f} x {:0.2f}\".format(\n pole_center[0], pole_center[1]))\n\n print_info(\"Rotate: {} {}\".format(rotate_center, rotate_fn))\n self.pocs.say(\"Rotate: {:0.2f} x {:0.2f}\".format(\n rotate_center[0], rotate_center[1]))\n\n d_x = pole_center[0] - rotate_center[0]\n d_y = pole_center[1] - rotate_center[1]\n\n self.pocs.say(\"d_x: {:0.2f}\".format(d_x))\n self.pocs.say(\"d_y: {:0.2f}\".format(d_y))\n\n fig = polar_alignment_utils.plot_center(\n pole_fn, rotate_fn, pole_center, rotate_center)\n\n print_info(\"Plot image: {}\".format(plot_fn))\n fig.tight_layout()\n fig.savefig(plot_fn)\n\n try:\n os.unlink('/var/panoptes/images/latest.jpg')\n except Exception:\n pass\n try:\n os.symlink(plot_fn, '/var/panoptes/images/latest.jpg')\n except Exception:\n print_warning(\"Can't link latest image\")\n\n with open('/var/panoptes/images/drift_align/center.txt'.format(base_dir), 'a') as f:\n f.write('{}.{},{},{},{},{},{}\\n'.format(start_time, pole_center[0], pole_center[\n 1], rotate_center[0], rotate_center[1], d_x, d_y))\n\n print_info(\"Done with polar alignment test\")\n self.pocs.say(\"Done with polar alignment test\")", "def test_str_rotation_angle(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx, \"TestSensor\", group_address_state=\"1/2/3\", value_type=\"rotation_angle\"\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0x2D,\n 0xDC,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), 11740)\n self.assertEqual(sensor.unit_of_measurement(), \"°\")\n self.assertEqual(sensor.ha_device_class(), None)", "def test_moist_lapse_uniform():\n temp = moist_lapse(np.array([900., 900., 900.]) * units.hPa, 20. * units.degC)\n assert_almost_equal(temp, np.array([20., 20., 20.]) * units.degC, 7)", "def test_logfile_recreates_after_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n self.conveyer.rotate_logs()\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"fourth\\\"}\"))\n self.assertEquals(self.events_out.getvalue(), \"{message: \\\"fourth\\\"}\")\n self.assertTrue(self.renamerCalled)", "def test_abstractShouldRotate(self):\n log = logfile.BaseLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n self.assertRaises(NotImplementedError, log.shouldRotate)", "def __init__(self, init_pose=None, init_velocities=None, \n init_angle_velocities=None, runtime=5., target_pos=None, target_vel=None):\n # Simulation\n self.sim = PhysicsSim(init_pose, init_velocities, init_angle_velocities, runtime) \n self.action_repeat = 3\n\n self.state_size = self.action_repeat * self.sim.pose.size\n \n # only one main throttle, all rotors get this.\n self.action_size = 1\n \n self.action_low = 300\n self.action_high = 600\n\n # Goal\n self.target_pos = target_pos if target_pos is not None else np.array([0., 0., 10.]) \n self.target_vel = target_vel if target_vel is not None else np.array([0., 0., 0.]) \n \n self.rotor_speeds = np.ones(4) # avoids div by 0 in sim", "def target_position(self, time):\n \"\"\"\n start_pos = self.points[self.cur_start]\n seg_time = time - self.last_checkpoint_time\n\n #The arguement of target-velocity dosent matter\n cur_pos = self.target_velocity(time)*seg_time + start_pos\n\n \n # or time > (self.total_time / 4)*(self.cur_start + 1)\n cur_pos_norm = length(cur_pos - start_pos)\n\n next_corner = self.points[(self.cur_start + 1)%4]\n \n seg_norm = length(next_corner - start_pos)\n print(\"cur_pos : \", cur_pos, \"segment: \", self.cur_start, seg_norm - cur_pos_norm)\n\n if cur_pos_norm >= seg_norm:\n self.cur_start = (self.cur_start + 1) % 4\n self.last_checkpoint_time = time\n return cur_pos\n \"\"\"\n\n #Possibly use rospy.sleep()\n total_time = self.total_time\n\n\n if time < total_time/4:\n return self.path1.target_position(time)\n\n elif time - total_time/4 == 0:\n rospy.sleep(0.5)\n\n elif time < total_time/2:\n return self.path2.target_position(time - (total_time/4 + 0.5))\n # return self.path2.target_position(time - (total_time/4 ))\n\n\n elif time - total_time/2 == 0:\n rospy.sleep(0.5)\n\n elif time <= total_time/4*3:\n return self.path3.target_position(time - (total_time/2 + 1))\n # return self.path3.target_position(time - (total_time/2))\n\n\n elif time - total_time/4*3 == 0:\n rospy.sleep(0.5)\n\n else:\n return self.path4.target_position(time - (total_time/4*3 + 1.5))\n # return self.path4.target_position(time - (total_time/4*3))", "def test_euler_angles_1q_hard_thetas(self, gate):\n self.check_one_qubit_euler_angles(Operator(gate))", "def test_deconvolve_to_motor_error(self):\n tau = 50.0\n mrate = 50.0\n Mrate = 100.0\n\n tmax = 50.0\n dt = 0.1\n\n self.rule.tau = tau\n self.rule.min_rate = mrate\n self.rule.max_rate = Mrate\n self.rule.compress_rates = False\n self.rule.gain = 1\n self.rule.tau_deconv1 = tau\n\n self.motor.error_fct = lambda _: np.ones(self.Nsrc)\n\n M = simulation.StateMonitor(self.rule, 'out')\n\n sim = simulation.Simulation(self.source, self.motor, self.rule, M, dt=dt)\n sim.run(tmax)\n \n # the output should be almost constant\n self.assertAlmostEqual(np.std(M.out)/np.mean(M.out), 0)", "def test_calculate_tilt(tilt_reference):\n tilt_rad = np.radians(tilt_reference)\n # get the rotation axis\n # NOTE:\n # we need to tilt the image -tilt in order to get the tilt angle as + value.\n rot_aixs_tilted = get_tilted_rot_axis(tilt_inplane=-tilt_rad, tilt_outplane=0.0)\n # radiograph at 0 deg\n img0 = virtual_cam(two_sphere_system(0, rot_aixs_tilted, size=200))\n # radiograph at 180 deg\n img180 = virtual_cam(two_sphere_system(np.pi, rot_aixs_tilted, size=200))\n # calculate the tilt angle\n tilt_angle = calculate_tilt(img0, img180).x\n # verify\n # NOTE: tolerance is set to half a pixel at the edge of the FOV\n np.testing.assert_allclose(tilt_angle, tilt_reference, atol=np.degrees(0.5 / 100))", "def rotate(self):\n pass", "def test_get_speed_limit():\n center = Coordinates(1 , 1)\n radius = 10\n speed_limit = 20\n\n assert get_speed_limit(center, radius, speed_limit) != center\n assert get_speed_limit(center, radius, speed_limit) != radius\n assert get_speed_limit(center, radius, speed_limit) == speed_limit", "def test_dynamics_with_tau_ref(self):\n n = 10\n t_max = 100.0\n dt = 0.1\n\n G = StudentLayer(n)\n\n i_values = np.linspace(0.02, 0.4, 28)\n\n different = 0\n for i_ext in i_values:\n # start with different initial voltages to take advantage of averaging\n # effects\n G.v_init = np.linspace(G.vR, G.v_th, n, endpoint=False)\n G.i_ext_init = i_ext\n\n M = simulation.EventMonitor(G)\n\n sim = simulation.Simulation(G, M, dt=dt)\n sim.run(t_max)\n\n rate = float(len(M.t))/n/t_max*1000.0\n # first source of uncertainty: a spike might not fit before the end of a\n # simulation\n uncertainty1 = 1.0/np.sqrt(n)/t_max*1000.0\n \n expected0 = 0.0\n expected = 0.0\n if G.R*i_ext > G.v_th - G.vR:\n expected0 = 1000.0/(G.tau_m*np.log(G.R*i_ext/(G.vR-G.v_th+G.R*i_ext)))\n expected = expected0/(1 + expected0*G.tau_ref/1000.0)\n\n # second source of uncertainty: spikes might move due to the granularity\n # of the simulation\n uncertainty2 = dt*expected*rate/1000.0\n uncertainty = uncertainty1 + uncertainty2\n\n self.assertLess(np.abs(rate - expected), uncertainty)\n\n if np.abs(expected - expected0) >= uncertainty:\n different += 1\n else:\n self.assertAlmostEqual(rate, 0.0)\n \n # make sure that in most cases the firing rate using the refractory period\n # was significantly different from the case without refractory period\n self.assertGreater(different, len(i_values)*2/3)", "def test_xform_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.xform_file, ani=1)\n\n values = cmds.keyframe('pCube1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def test_REFRESH_INTERVAL(self):\n self.assertIsInstance(constants.REFRESH_INTERVAL, int,\n \"constants.REFRESH_INTERVAL must be an integer.\")", "def test_timescales(self):\n np.random.seed(2312321)\n param_pairs = [(1, 0, self.rule.tau1), (0, 1, self.rule.tau2)]\n\n nsteps = 10\n self.conductor.out_fct = lambda i: 10*np.ones(self.Nc) if i == 0 \\\n else np.zeros(self.Nc)\n\n sim = simulation.Simulation(self.conductor, self.student, self.tutor,\n self.syns, self.rule, dt=self.dt)\n W0 = np.copy(self.syns.W)\n\n for params in param_pairs:\n self.rule.alpha = params[0]\n self.rule.beta = params[1]\n tau = params[2]\n\n self.tutor.out_fct = lambda i: (self.rule.theta + (10 if i == 0 else 0))*\\\n np.ones(self.Ns)\n\n self.syns.W = np.copy(W0)\n sim.run(self.dt)\n\n change0 = self.syns.W - W0\n\n self.assertGreater(np.linalg.norm(change0), 1e-10)\n \n self.tutor.out_fct = lambda i: (self.rule.theta + (10\n if i == nsteps-1 else 0))*np.ones(self.Ns)\n\n self.syns.W = np.copy(W0)\n sim.run(nsteps*self.dt)\n\n change1 = self.syns.W - W0\n\n change1_exp = change0*(1 - float(self.dt)/tau)**(nsteps-1)\n\n self.assertTrue(np.allclose(change1, change1_exp),\n msg=\"Timescale not verified, alpha={}, beta={}.\".format(*params[:2]))", "def test_range_no_compress(self):\n tau = 40.0\n mrate1 = 50.0\n Mrate1 = 100.0\n\n mrate2 = 30.0\n Mrate2 = 130.0\n\n tmax = 50.0\n dt = 0.1\n\n self.rule.tau = tau\n self.rule.min_rate = mrate1\n self.rule.max_rate = Mrate1\n self.rule.compress_rates = False\n\n self.motor.error_fct = lambda t: (int_r(t)%2)*np.ones(self.Nsrc)\n\n M1 = simulation.StateMonitor(self.rule, 'out')\n\n sim1 = simulation.Simulation(self.source, self.motor, self.rule, M1, dt=dt)\n sim1.run(tmax)\n\n self.rule.min_rate = mrate2\n self.rule.max_rate = Mrate2\n\n M2 = simulation.StateMonitor(self.rule, 'out')\n\n sim2 = simulation.Simulation(self.source, self.motor, self.rule, M2, dt=dt)\n sim2.run(tmax)\n\n expected2 = mrate2 + (M1.out - mrate1)*(Mrate2 - mrate2)/(Mrate1 - mrate1)\n\n self.assertTrue(np.allclose(M2.out, expected2), msg=\n \"mean(abs(out2 - expected2))={}\".format(\n np.mean(np.abs(M2.out - expected2))))", "def test_constrain_rates(self):\n tmax = 10.0\n dt = 1.0\n\n ini_rate = 80.0\n min_rate = ini_rate - 5.0\n max_rate = ini_rate + 5.0\n\n nsteps = int_r(tmax/dt)\n\n tutor = SimpleNeurons(1, out_fct=lambda i: ini_rate + i*20.0/nsteps - 10.0)\n reward = MockReward(lambda _: 1.0)\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=0,\n constrain_rates=False, ini_rate=ini_rate, learning_rate=1.0,\n min_rate=min_rate, max_rate=max_rate,\n use_tutor_baseline=False)\n\n sim1 = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim1.run(tmax)\n\n # rates should exceed limits\n self.assertGreater(np.max(tutor_rule.rates), max_rate)\n self.assertLess(np.min(tutor_rule.rates), min_rate)\n\n tutor_rule.constrain_rates = True\n tutor_rule.reset_rates()\n\n sim2 = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim2.run(tmax)\n \n # rates should no longer exceed limits\n self.assertLessEqual(np.max(tutor_rule.rates), max_rate)\n self.assertGreaterEqual(np.min(tutor_rule.rates), min_rate)", "def test_relaxation_end(self):\n tau = 50.0\n mrate = 40.0\n Mrate = 120.0\n\n tmax = 50.0\n dt = 0.1\n relaxation = 20.0\n\n self.rule.tau = tau\n self.rule.min_rate = mrate\n self.rule.max_rate = Mrate\n self.rule.compress_rates = False\n self.rule.relaxation = relaxation\n\n self.motor.error_fct = lambda _: np.ones(self.Nsrc)\n\n M = simulation.StateMonitor(self.rule, 'out')\n\n sim = simulation.Simulation(self.source, self.motor, self.rule, M, dt=dt)\n sim.run(tmax)\n\n mask = (M.t > tmax - relaxation/2)\n mavg = 0.5*(mrate + Mrate)\n\n self.assertAlmostEqual(np.mean(np.abs(M.out[:, mask] - mavg)), 0.0)", "def check_angle(self):\n self.find_pixels()\n alpha_theta=np.deg2rad(70)\n alpha_phi=np.deg2rad(70)\n extreme_values=self.compute_extreme_values(alpha_phi, alpha_theta)\n x=np.linspace(extreme_values[0], extreme_values[1], self.number_of_pix[1])\n y=np.linspace(extreme_values[2], extreme_values[3], self.number_of_pix[0])\n phi_0=20\n phi_0=np.deg2rad(phi_0)\n j, diff=self.compute_phi(\"find_orient.png\")\n print \"j=\", j\n print \"diff=\", diff", "async def test_camera_bad_interval_update(\n hass: HomeAssistant,\n mock_entry: MockEntityFixture,\n camera: tuple[Camera, str],\n):\n\n state = hass.states.get(camera[1])\n assert state and state.state == \"idle\"\n\n # update fails\n mock_entry.api.update = AsyncMock(side_effect=NvrError)\n await time_changed(hass, DEFAULT_SCAN_INTERVAL)\n\n state = hass.states.get(camera[1])\n assert state and state.state == \"unavailable\"\n\n # next update succeeds\n mock_entry.api.update = AsyncMock(return_value=mock_entry.api.bootstrap)\n await time_changed(hass, DEFAULT_SCAN_INTERVAL)\n\n state = hass.states.get(camera[1])\n assert state and state.state == \"idle\"", "def correct_rotation(k_rotations):\n\n for key, value in Chunk.global_piece_rotations.items():\n Chunk.global_piece_rotations[key] = (k_rotations + value) % 4\n # Should I correct it for the side rotations also?", "def __init__(self, init_pose=None, init_velocities=None, \n init_angle_velocities=None, runtime=5., target_pos=None):\n # Simulation\n self.success=0\n self.success_rate=deque(maxlen=10) # to record agent's success/failure for 10 consecutive episodes \n self.sim = PhysicsSim(init_pose, init_velocities, init_angle_velocities, runtime) \n self.action_repeat = 3\n\n self.state_size = self.action_repeat * 6\n self.action_low = 0\n self.action_high = 900\n self.action_size = 4\n self.runtime=runtime\n self.x_lower_bound=self.sim.lower_bounds[0]\n self.y_lower_bound=self.sim.lower_bounds[1]\n self.z_lower_bound=self.sim.lower_bounds[2]\n self.x_upper_bound=self.sim.upper_bounds[0]\n self.y_upper_bound=self.sim.upper_bounds[1]\n self.z_upper_bound=self.sim.upper_bounds[2]\n \n #Initial pos\n self.start_pos=init_pose[:3]\n \n # Goal\n self.target_pos = target_pos if target_pos is not None else np.array([0., 0., 10.]) \n \n # distance between initial position and target position \n self.total_distance= (np.dot(self.target_pos-self.start_pos, self.target_pos-self.start_pos))**(0.5)\n # target_margin : if the quad is within a distance of target_margin from the target, then it is declared successful \n self.target_margin=self.total_distance/50" ]
[ "0.6687461", "0.6452325", "0.6416947", "0.62044525", "0.6084357", "0.5957329", "0.5694046", "0.56862086", "0.56001717", "0.556808", "0.5566651", "0.5540372", "0.5497633", "0.54841346", "0.54720324", "0.54684687", "0.5468463", "0.5463228", "0.53882957", "0.53821856", "0.5356837", "0.5354365", "0.5354101", "0.53317326", "0.53135127", "0.52926946", "0.52573335", "0.5229836", "0.5228673", "0.5209369", "0.5203216", "0.5185583", "0.517988", "0.517107", "0.5149802", "0.5149452", "0.5138715", "0.5135581", "0.51312184", "0.5122282", "0.5118334", "0.51182014", "0.5117069", "0.5107735", "0.5104502", "0.51039135", "0.5100434", "0.50996614", "0.5095111", "0.50867337", "0.5084349", "0.507744", "0.5077094", "0.50666", "0.50655866", "0.5057907", "0.5056075", "0.50553364", "0.5047932", "0.50422955", "0.5036175", "0.5035824", "0.50355756", "0.5034225", "0.5031126", "0.50291204", "0.50282484", "0.50193024", "0.50105476", "0.5009192", "0.5008615", "0.5000174", "0.4980089", "0.4977315", "0.49755207", "0.49689627", "0.49563295", "0.49472117", "0.4942875", "0.4937875", "0.4927515", "0.49254787", "0.49224302", "0.4919232", "0.49185246", "0.49164143", "0.49142957", "0.49092323", "0.49073124", "0.4900961", "0.4899538", "0.48972747", "0.489339", "0.48932582", "0.48910913", "0.48870626", "0.48855132", "0.48844248", "0.48742872", "0.48712513", "0.48671323" ]
0.0
-1
Tests of try rotation with relative target and interval in configuration
def test_process_log_with_relative_target_in_configuration(self): with tempfile.TemporaryDirectory() as sandbox: with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout: srcfile = Path(sandbox, 'pokus.log') srcfile.touch() destfile = Path(sandbox, 'backup', 'pokus.log') compressors = process_log( datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30), {'target': 'backup/{{name}}.{{ext}}', 'interval': 'hourly'}, 'hourly', str(srcfile), 10 ) self.assertEqual(compressors, []) self.assertFalse(srcfile.exists()) self.assertTrue(destfile.exists()) self.assertEqual(fake_stdout.getvalue(), 'Checking "{src}"... rotating... "{src}" -> "{dest}" done.\n'.format(src=srcfile, dest=Path('backup', 'pokus.log')))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_arbitrary_rotation(self):\n \n # This test is run a bunch of times on various intervals, ranging from 50% to 1/6\n\t\t# (16.667%).\n for i in range(2, 7):\n \n interval = 1 / i # The amount to increase each qubit's probability by, relative to the previous qubit\n step_string = \"{:.4f}\".format(100 / i) # The decimal representation of the interval, as a percent\n target_probabilities = [0] * (i + 1) # This will store the desired probabilities of each qubit\n for j in range(0, i + 1):\n target_probability = j * interval\n target_probabilities[j] = target_probability\n\n # Run the test\n self.run_test(self.arbitrary_rotation_function, f\"Rotation with steps of 1/{i} ({step_string}%)\", 2000, target_probabilities, 0.05)", "def test_rotated(self):\n self._calibration_test(\"rotated\")", "def test_calc_rotation(self):\n t = AioBaseTurtle()\n t.speed(speed=2)\n orient, steps, delta = t._calc_rotation(120)\n self.assertEqual(steps, 21)\n self.assertAlmostEqual(delta, 120.0 / 21.0)\n self.assertAlmostEqual(orient[0], math.cos(math.radians(120)))\n self.assertAlmostEqual(orient[1], math.sin(math.radians(120)))", "def test_need_to_rotate_log(self):\n self.assertTrue(need_to_rotate_log(0, 20, 'daily', 15, 'daily'), 'rotate log by time')\n self.assertFalse(need_to_rotate_log(10, 20, 'daily', 15, 'hourly'), 'do not rotate log by time')\n self.assertTrue(need_to_rotate_log(10, 20, 'daily', 25, None), 'rotate log by max size')\n self.assertFalse(need_to_rotate_log(10, 20, 'hourly', 5, 'hourly'), 'do not rotate log by min size')", "def test_rotation_angle_warning(self):\n\n def warning_trigger():\n try:\n paramak.CenterColumnStudyReactor(\n inner_bore_radial_thickness=20,\n inboard_tf_leg_radial_thickness=50,\n center_column_shield_radial_thickness_mid=50,\n center_column_shield_radial_thickness_upper=100,\n inboard_firstwall_radial_thickness=20,\n divertor_radial_thickness=100,\n inner_plasma_gap_radial_thickness=80,\n plasma_radial_thickness=200,\n outer_plasma_gap_radial_thickness=90,\n # first number must be between plasma inner/outer radius\n plasma_high_point=(245, 240),\n plasma_gap_vertical_thickness=40,\n center_column_arc_vertical_thickness=520,\n rotation_angle=360)\n\n except BaseException:\n pass\n\n with warnings.catch_warnings(record=True) as w:\n warnings.simplefilter(\"always\")\n warning_trigger()\n assert len(w) == 1\n assert issubclass(w[-1].category, UserWarning)\n assert \"360 degree rotation may result in a Standard_ConstructionError or AttributeError\" in str(\n w[-1].message)", "def test_g_asignar_rol(self):", "def test_rotation_angle(self):\n\n self.test_shape.azimuth_placement_angle = [45, 135, 225, 315]\n test_volume = self.test_shape.volume()\n self.test_shape.rotation_angle = 180\n assert self.test_shape.volume() == pytest.approx(test_volume * 0.5)", "def test_rotate(self):\n rotable = TestRotable()\n command = RotateCommand(rotable)\n collinear_to_new_direction = rotable.get_direction() + rotable.get_angular_velocity()\n\n command()\n\n ratio = norm(rotable.get_direction()) / norm(collinear_to_new_direction)\n self.assertTrue(allclose(collinear_to_new_direction * ratio, rotable.get_direction()))\n self.assertTrue(isclose(norm(rotable.get_direction()), 1))", "def test_rotation(self, tol):\n theta = 0.98\n S = symplectic.rotation(theta)\n expected = np.block([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]])\n np.allclose(S, expected, atol=tol, rtol=0)", "def _optimise_rotation(self):\n logger.info(\n f\"Minimising dimer rotation up to \"\n f'δϕ = {self.phi_tol.to(\"degrees\"):.4f}º'\n )\n\n for i in range(self._ratio_rot_iters):\n\n result = self._rotate()\n\n if (\n result == _StepResult.skipped_rotation\n or abs(self._coords.phi) < self.phi_tol\n ):\n break\n\n logger.info(\n f\"Micro iteration: {i}.\"\n f' ϕ={self._coords.phi.to(\"degrees\"):.2f}º'\n )\n\n return None", "def test_extract_rot_angle():\n v = np.zeros((4,2))\n try:\n angle = extract_rot_angle(v,min_points=0)\n except AssertionError,err:\n assert err.args[0]==\"Zero velocities not allowed.\"\n \n v[:,1] = 1.\n try:\n angle = extract_rot_angle(v,min_points=0)\n except AssertionError,err:\n assert err.args[0]==\"Failed to get both forward and backward directions.\"\n\n # Forwards-backwards motion.\n v[:,1] = 0.\n v[:2,0] = -1.1\n v[2:,0] = 1.2\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,np.pi)\n\n # Forwards-backwards motion.\n v[:,0] = 0.\n v[:2,1] = -.9\n v[2:,1] = .8\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,-np.pi/2)\n\n # Forwards-backwards motion with noise.\n v[:2,1] += (np.random.rand(2)*2-1)/10\n v[2:,1] += (np.random.rand(2)*2-1)/10\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,-np.pi/2,atol=.1)", "def test_skel_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def test_controlled_rotation_gradient(self, G, tol):\n dev = qml.device(\"default.qubit\", wires=2)\n b = 0.123\n\n @qml.qnode(dev, diff_method=\"parameter-shift\")\n def circuit(b):\n qml.QubitStateVector(np.array([1.0, -1.0]) / np.sqrt(2), wires=0)\n G(b, wires=[0, 1])\n return qml.expval(qml.PauliX(0))\n\n res = circuit(b)\n assert np.allclose(res, -np.cos(b / 2), atol=tol, rtol=0)\n\n grad = qml.grad(circuit)(b)\n expected = np.sin(b / 2) / 2\n assert np.allclose(grad, expected, atol=tol, rtol=0)", "def test_rotate_without_moving(controller):\n distance = math.pi / 2 * (DISTANCE_BETWEEN_WHEELS / 2)\n revolution = distance / (2 * math.pi * WHEEL_RADIUS)\n ticks = revolution * TICK_PER_REVOLUTION\n pos, angle = controller.odometry(\n round(10 - ticks),\n round(10 + ticks),\n Vector2(0, 0),\n 0,\n )\n\n # Rotate 90 degrees without moving.\n assert pos == Vector2(0, 0)\n assert round(math.pi / 2 / angle, 1) == 1\n\n # Rotate back to 0 degrees without moving.\n pos, angle = controller.odometry(10, 10, Vector2(0, 0), 0)\n assert pos == Vector2(0, 0)\n assert round(-math.pi / 2 / angle, 1) == 1", "def test_skel_rotation(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1, aef=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertAlmostEqual(0.0, values[-1])", "def test_calculate_angle():\n r1 = np.array([0, 0, -1])\n r2 = np.array([0, 0, 0])\n r3 = np.array([1, 0, 0])\n\n expected_angle = 90\n calculated_angle = molecool.calculate_angle(r1, r2, r3, degrees = True)\n\n assert expected_angle == calculated_angle", "def test_rot(self):\n\n print(\"rot()\")\n obs = self.fixture\n\n # rotation(0) = identity\n for axis in [1, 2, 3]:\n # theta = 0.0\n rotation = obs.rot(0.0, axis)\n # find || eye - rot1 ||\n diff = np.linalg.norm(np.eye(3) - rotation)\n self.assertAlmostEqual(diff, 0.0, delta=1e-12)\n # theta = 2*pi\n rotation = obs.rot(2.0 * np.pi, axis)\n # find || eye - rot1 ||\n diff = np.linalg.norm(np.eye(3) - rotation)\n self.assertAlmostEqual(diff, 0.0, delta=1e-12)\n\n # perform many randomized tests\n num_tests = 100\n num_products = 10\n for _test_counter in range(num_tests):\n thetas = []\n axes = []\n base = np.eye(3)\n # we will multiply a series of rotations into \"base\"\n rot_all = base\n for _rot_counter in range(num_products):\n theta = np.random.uniform(2 * np.pi) # in [0,2 pi]\n axis = np.random.randint(3) + 1 # in {1,2,3}\n axes.append(axis)\n thetas.append(theta)\n rotation = obs.rot(theta, axis)\n # multiply rot1 into the cumulative rotation\n rot_all = np.dot(rot_all, rotation)\n # now, back all the rotations out\n for _rot_counter in range(num_products):\n theta = thetas.pop()\n axis = axes.pop()\n # apply the inverse rotation\n rotation = obs.rot(-theta, axis)\n rot_all = np.dot(rot_all, rotation)\n # find || base - rot1 * rot2 ||\n diff = np.linalg.norm(base - rot_all)\n self.assertAlmostEqual(diff, 0.0, delta=1e-10 * num_products)", "def test_direction_no_int(self):\n tmax = 40.0\n dt = 0.1\n\n tutor = SimpleNeurons(2, out_fct=lambda _: [100.0, 60.0])\n reward = MockReward(lambda t: 1.0 if t < tmax/2 else -1)\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=0,\n constrain_rates=False, ini_rate=80.0, learning_rate=0.1,\n use_tutor_baseline=False)\n\n sim = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim.run(tmax)\n\n # tutor_rule's output should be increasing for t < tmax/2, decreasing after\n mask = (np.arange(0, tmax, dt) < tmax/2)\n\n self.assertGreater(np.min(tutor_rule.rates[mask, 0]), 80.0)\n self.assertLess(np.max(tutor_rule.rates[mask, 1]), 80.0)\n\n self.assertGreater(np.min(tutor_rule.rates[~mask, 1]), 80.0)\n self.assertLess(np.max(tutor_rule.rates[~mask, 0]), 80.0)", "def test_t(self):\n assert np.isclose(self.stepper.t, self.final_t)", "async def rotate(self, angle: float, duration: float) -> None:\n angle *= self._ratio\n if duration < 0:\n raise ValueError\n if angle == 0:\n if duration > 0:\n await asyncio.sleep(duration)\n return\n if duration == 0 or angle / duration > self._max_speed:\n duration = abs(angle / self._max_speed)\n start = time.perf_counter()\n sequence_count = 0\n if angle > 0:\n plus_minus = 1\n else:\n plus_minus = -1\n # Times 2 because half-step\n steps = 2 * abs(int(float(angle) / 360 * self.STEPS_PER_REV))\n for i in range(steps):\n for pin in range(4):\n current_pin = self._pins[pin]\n if self.SEQUENCE[sequence_count][pin] != 0:\n GPIO.output(current_pin, True)\n else:\n GPIO.output(current_pin, False)\n sequence_count += plus_minus\n # If we reach the end of the sequence start again\n if sequence_count == self.rotation_seq_count:\n sequence_count = 0\n if sequence_count < 0:\n sequence_count = self.rotation_seq_count - 1\n # Wait to match entered duration\n wait = (float(i) / steps * duration) - (time.perf_counter() - start)\n if wait > 0:\n await asyncio.sleep(wait)\n for pin in self._pins:\n GPIO.output(pin, False)", "def compute_rotation(self):\n if self.predictions[self.iteration][0] == 90.0 or self.predictions[self.iteration][0] == 270.0:\n self.rotation = 20\n self.initial_adjust = True\n return\n\n if self.iteration == 0 or (self.iteration == 1 and self.initial_adjust):\n self.rotation = rotate.get_90_deg_rotation(self.predictions[self.iteration])\n elif self.iteration == 1 or (self.iteration == 2 and self.initial_adjust):\n self.rotation = rotate.get_45_deg_rotation(self.predictions, self.current_position)\n elif self.iteration >= 2 or (self.iteration > 2 and self.initial_adjust):\n self.rotation = rotate.get_fine_rotation(self.iteration)", "def test_from_rotation_angle_coordinate_of_phi(rotationangle):\n\n # Get the coordinate at phi\n phi_dash = rotationangle[\"phi\"]\n c3 = rotationangle[\"cs\"].from_rotation_angle(phi_dash)\n\n # Ensure that it is at the origin\n assert c3 == pytest.approx(0.0)", "def test_rotation(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n days = [(self.path + \".\" + log.suffix(day * 86400)) for day in range(3)]\n\n # test automatic rotation\n log._clock = 0.0 # 1970/01/01 00:00.00\n log.write(\"123\")\n log._clock = 43200 # 1970/01/01 12:00.00\n log.write(\"4567890\")\n log._clock = 86400 # 1970/01/02 00:00.00\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(days[0]))\n self.assertFalse(os.path.exists(days[1]))\n log._clock = 172800 # 1970/01/03 00:00.00\n log.write(\"\")\n self.assertTrue(os.path.exists(days[0]))\n self.assertTrue(os.path.exists(days[1]))\n self.assertFalse(os.path.exists(days[2]))\n log._clock = 259199 # 1970/01/03 23:59.59\n log.write(\"3\")\n self.assertFalse(os.path.exists(days[2]))", "def testCalculateRotationDiff(self):\n # Test identity\n transform1 = numpy.eye(4)\n transform2 = numpy.eye(4)\n (_, result) = self.evaluator._calculateDifference(transform1, transform2)\n self.assertEqual(result, 0.0)\n # Test arbitrary rotation\n rot1 = numpy.array(\n [[0.0, 1.0, 0.0], [-1.0, 0.0, 0.0], [0.0, 0.0, 1.0]])\n rot2 = numpy.array(\n [[0.0, 0.0, -1.0], [0.0, 1.0, 0.0], [1.0, 0.0, 0.0]])\n transform1[0:3, 0:3] = numpy.matmul(transform1[0:3, 0:3], rot1)\n transform2[0:3, 0:3] = numpy.matmul(transform2[0:3, 0:3], rot2)\n (_, result) = self.evaluator._calculateDifference(transform1, transform2)\n self.assertAlmostEqual(result, 120.0 * numpy.pi / 180.0, 8)\n # Order shouldn't matter\n (_, result) = self.evaluator._calculateDifference(transform2, transform1)\n self.assertAlmostEqual(result, 120.0 * numpy.pi / 180.0, 8)\n # Test when the angle is pi\n transform1 = numpy.eye(4)\n transform2 = numpy.eye(4)\n transform2[0, 0] = -1.0\n transform2[1, 1] = -1.0\n (_, result) = self.evaluator._calculateDifference(transform1, transform2)\n # It might wrap to -pi, so check the absolute value\n self.assertAlmostEqual(abs(result), numpy.pi, 8)\n # Test an extreme value\n transform2 = -1.0 * numpy.eye(4)\n (_, result) = self.evaluator._calculateDifference(transform2, transform1)\n self.assertAlmostEqual(abs(result), numpy.pi)", "def rotationDetermination(self):\n \n for index, row in enumerate(self.magdata):\n if index > 11 and index < (len(self.magdata) - 12):\n br1 = [row[0] for row in self.magdata[(index-12):(index-2)]]\n bt1 = [row[1] for row in self.magdata[(index-12):(index-2)]]\n bn1 = [row[2] for row in self.magdata[(index-12):(index-2)]]\n b1 = np.matrix((np.mean(br1), np.mean(bt1), np.mean(bn1)))\n\n br2 = [row[0] for row in self.magdata[(index+2):(index+12)]]\n bt2 = [row[1] for row in self.magdata[(index+2):(index+12)]]\n bn2 = [row[2] for row in self.magdata[(index+2):(index+12)]]\n b2 = np.matrix((np.mean(br2), np.mean(bt2), np.mean(bn2)))\n\n theta = np.arccos(np.dot(b1,b2.T)/(np.linalg.norm(b1)*np.linalg.norm(b2)))*180/np.pi\n\n self.detections.rotations.append(theta[0,0])\n self.detections.rotationTimeTags.append(self.timestamps[index])\n \n\n## self.b1 = b1\n## self.b2 = b2\n self.detections.rotationBoundary=[]\n if len(self.detections.rotations) != 0:\n \n for index, theta in enumerate(self.detections.rotations):\n if index > 0:\n if theta > 30 and self.detections.rotations[index-1] < 30:\n self.detections.rotationBoundary.append(self.detections.rotationTimeTags[index])\n if index < len(self.detections.rotations)-1:\n if theta > 30 and self.detections.rotations[index+1] < 30:\n self.detections.rotationBoundary.append(self.detections.rotationTimeTags[index])", "def test_rotation_isometry(self):\n import numpy\n\n # test for all kinds of curvatures K\n for k in (0, 1, -1, 1/11, -1/11, 11, -2):\n \n s = space(curvature=k)\n\n # use a small enough magnitude to not break math for very negative K\n magic = 0.33377777373737737777\n # 1/sqrt(2)\n s2_ref = 0.707106781186547524400844362104785\n\n o = s.make_origin(2)\n p = s.make_point((1, 0), magic)\n q = s.make_point((s2_ref, s2_ref), magic)\n\n rot = space_point_transform(\n numpy.array([[1,0,0],[0,s2_ref,-s2_ref],[0,s2_ref,s2_ref]]),\n curvature=k,\n math = common_math\n )\n\n f, g, i = map(space_point_transform, (p, q, o))\n\n def check_transform_eq(t1, t2, invert=False):\n for ref in (\n s.make_point((5/13, 12/13), magic),\n s.make_point((-3/5, 4/5), magic)\n ):\n self.assertTrue(invert ^ point_isclose(\n t1(ref),\n t2(ref),\n abs_tol = 1e-12\n ))\n\n # 1/8 turn, times 8\n check_transform_eq(rot*8, i)\n\n # rotate, shift, rotate\n check_transform_eq(g, rot + f + rot * -1)\n\n # the other way\n check_transform_eq(f, rot * -1 + g + rot)", "def test_rotation(self):\n # this logfile should rotate every 10 bytes\n with contextlib.closing(\n logfile.LogFile(self.name, self.dir, rotateLength=10)\n ) as log:\n\n # test automatic rotation\n log.write(\"123\")\n log.write(\"4567890\")\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.2\".format(self.path)))\n log.write(\"\")\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertTrue(os.path.exists(\"{}.2\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n log.write(\"3\")\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n\n # test manual rotation\n log.rotate()\n self.assertTrue(os.path.exists(\"{}.3\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.4\".format(self.path)))\n\n self.assertEqual(log.listLogs(), [1, 2, 3])", "def check_angle(self):\n self.find_pixels()\n alpha_theta=np.deg2rad(70)\n alpha_phi=np.deg2rad(70)\n extreme_values=self.compute_extreme_values(alpha_phi, alpha_theta)\n x=np.linspace(extreme_values[0], extreme_values[1], self.number_of_pix[1])\n y=np.linspace(extreme_values[2], extreme_values[3], self.number_of_pix[0])\n phi_0=20\n phi_0=np.deg2rad(phi_0)\n j, diff=self.compute_phi(\"find_orient.png\")\n print \"j=\", j\n print \"diff=\", diff", "def doRotation(self, delta):\n self.correctPending()\n self.rotation = (self.rotation + delta) % self.possibleRotations", "def check_random_rotation(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [degrees, resample, expand, center, fill_value], _ = parse_user_args(method, *args, **kwargs)\n check_degrees(degrees)\n\n if resample is not None:\n type_check(resample, (Inter,), \"resample\")\n if expand is not None:\n type_check(expand, (bool,), \"expand\")\n if center is not None:\n check_2tuple(center, \"center\")\n if fill_value is not None:\n check_fill_value(fill_value)\n\n return method(self, *args, **kwargs)\n\n return new_method", "def test_accurate_interval(self):\n interval = 0.5\n M = simulation.StateMonitor(self.G, 'v', interval=interval)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n\n v_expected = np.array([i*(M.t + self.dt) for i in xrange(self.N)])\n self.assertTrue(np.allclose(M.v, v_expected))", "def spinAround(self):", "def reached_angle(self, angle, tol):\n if self.ros_node.get_data(\"/auto/turret/current/angle\") is None:\n rospy.logerr(\"The topic /auto/turret/current/angle has not been published yet\")\n else:\n neg_angle_diff = self.wrap_angle(self.ros_node.get_data(\"/auto/turret/current/angle\") - angle)\n pos_angle_diff = self.wrap_angle(angle - self.ros_node.get_data(\"/auto/turret/current/angle\"))\n\n if pos_angle_diff <= tol or neg_angle_diff <= tol:\n return True\n return False", "def test_default_parameters(self):\n\n assert self.test_shape.rotation_angle == 360", "def test_d_3():\n rs = 20\n d = 3\n np.random.seed(rs)\n number_rotations = 3\n\n theta_1 = np.random.uniform(0, 2 * math.pi)\n rotation_1 = np.identity(d)\n pos_1 = np.random.randint(0, d - 1)\n pos_2 = np.random.randint(pos_1 + 1, d)\n rotation_1[pos_1, pos_1] = math.cos(theta_1)\n rotation_1[pos_1, pos_2] = - math.sin(theta_1)\n rotation_1[pos_2, pos_1] = math.sin(theta_1)\n rotation_1[pos_2, pos_2] = math.cos(theta_1)\n\n theta_2 = np.random.uniform(0, 2 * math.pi)\n rotation_2 = np.identity(d)\n pos_3 = np.random.randint(0, d - 1)\n pos_4 = np.random.randint(pos_3 + 1, d)\n rotation_2[pos_3, pos_3] = math.cos(theta_2)\n rotation_2[pos_3, pos_4] = - math.sin(theta_2)\n rotation_2[pos_4, pos_3] = math.sin(theta_2)\n rotation_2[pos_4, pos_4] = math.cos(theta_2)\n\n theta_3 = np.random.uniform(0, 2 * math.pi)\n rotation_3 = np.identity(d)\n pos_5 = np.random.randint(0, d - 1)\n pos_6 = np.random.randint(pos_5 + 1, d)\n rotation_3[pos_5, pos_5] = math.cos(theta_3)\n rotation_3[pos_5, pos_6] = - math.sin(theta_3)\n rotation_3[pos_6, pos_5] = math.sin(theta_3)\n rotation_3[pos_6, pos_6] = math.cos(theta_3)\n\n final_rotation = rotation_1 @ rotation_2 @ rotation_3\n np.random.seed(rs)\n rotation_function = (mt_obj.calculate_rotation_matrix\n (d, number_rotations))\n assert(np.all(final_rotation == rotation_function))", "def test_asssert_rotation_matrix_behaves_like_check_matrix():\n random_state = np.random.RandomState(2345)\n for _ in range(5):\n a = pr.random_axis_angle(random_state)\n R = pr.matrix_from_axis_angle(a)\n original_value = R[2, 2]\n for error in [0, 1e-8, 1e-7, 1e-5, 1e-4, 1]:\n R[2, 2] = original_value + error\n try:\n pr.assert_rotation_matrix(R)\n pr.check_matrix(R)\n except AssertionError:\n assert_raises_regexp(\n ValueError, \"Expected rotation matrix\", pr.check_matrix, R)", "def test_pol_rotator(time_location, spectral_type, unpolarized, below_horizon):\n time, telescope_location = time_location\n\n Nsrcs = 50\n ras = Longitude(np.linspace(0, 24, Nsrcs) * units.hr)\n decs = Latitude(np.linspace(-90, 90, Nsrcs) * units.deg)\n names = np.arange(Nsrcs).astype(\"str\")\n if unpolarized:\n fluxes = np.array([[[1.0, 0.0, 0.0, 0.0]]] * Nsrcs).T * units.Jy\n else:\n fluxes = np.array([[[5.5, 0.7, 0.3, 0.0]]] * Nsrcs).T * units.Jy\n\n # Make the last source non-polarized\n fluxes[..., -1] = [[1.0], [0], [0], [0]] * units.Jy\n\n extra = {}\n # Add frequencies if \"full\" freq:\n if spectral_type == \"full\":\n Nfreqs = 10\n freq_array = np.linspace(100e6, 110e6, Nfreqs) * units.Hz\n fluxes = fluxes.repeat(Nfreqs, axis=1)\n extra = {\"freq_array\": freq_array}\n\n assert isinstance(fluxes, Quantity)\n source = SkyModel(\n name=names,\n ra=ras,\n dec=decs,\n frame=\"icrs\",\n stokes=fluxes,\n spectral_type=spectral_type,\n **extra,\n )\n\n if unpolarized:\n assert source._n_polarized == 0\n else:\n assert source._n_polarized == Nsrcs - 1\n\n source.update_positions(time, telescope_location)\n\n # Check the default of inds for _calc_rotation_matrix()\n rots1 = source._calc_rotation_matrix()\n inds = np.array([25, 45, 16])\n rots2 = source._calc_rotation_matrix(inds)\n assert np.allclose(rots1[..., inds], rots2)\n\n # Unset the horizon mask and confirm that all rotation matrices are calculated.\n if below_horizon:\n source.above_horizon = np.full(source.Ncomponents, False, dtype=bool)\n warn_msg = \"\"\n warn_type = None\n else:\n source.above_horizon = None\n warn_msg = \"Horizon cutoff undefined\"\n warn_type = UserWarning\n\n with uvtest.check_warnings(warn_type, match=warn_msg):\n local_coherency = source.coherency_calc()\n\n if below_horizon:\n assert local_coherency.size == 0\n else:\n assert local_coherency.unit == units.Jy\n # Check that all polarized sources are rotated.\n if unpolarized:\n assert units.quantity.allclose(local_coherency, source.frame_coherency)\n else:\n assert not np.all(\n units.quantity.isclose(\n local_coherency[..., :-1], source.frame_coherency[..., :-1]\n )\n )\n assert units.quantity.allclose(\n local_coherency[..., -1], source.frame_coherency[..., -1]\n )", "def test_comp_angle_opening(self, test_dict):\n test_obj = test_dict[\"test_obj\"]\n a = test_obj.slot.comp_angle_opening()\n self.assertEqual(a, 2 * pi / test_obj.slot.Zs)\n\n b = comp_angle_opening(test_obj.slot)\n msg = \"Return \" + str(a) + \" expected \" + str(b)\n self.assertAlmostEqual((a - b) / a, 0, delta=DELTA, msg=msg)", "def determine_rotation(arm, d, tip_data, rot_data):\n n_t = np.zeros(3)\n for this_n_t in tip_data['pos_ntip_wrt_r']:\n n_t += this_n_t\n n_t /= len(tip_data['pos_ntip_wrt_r'])\n print(\"Our n_t to use in this stage: {}\".format(n_t))\n\n K = len(rot_data['pos_ntip_wrt_s'])\n errors_zyz = []\n errors_zyx = []\n\n for k in range(K):\n lhs = rot_data['pos_ntip_wrt_s'][k]\n t_st = rot_data['pos_tool_wrt_s_code'][k]\n ypr = rot_data['rot_tool_wrt_s_code'][k]\n yaw, pitch, roll = ypr[0], ypr[1], ypr[2]\n\n # R_zyz\n R_z1 = U.rotation_matrix_3x3_axis(angle=roll, axis='z')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z2 = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyz = R_z2.dot(R_y).dot(R_z1)\n\n # R_zyx\n R_x = U.rotation_matrix_3x3_axis(angle=roll, axis='x')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyx = R_z.dot(R_y).dot(R_x)\n\n # Evaluate!\n rhs_zyz = t_st + R_zyz.dot( n_t )\n rhs_zyx = t_st + R_zyx.dot( n_t )\n err_zyz = np.linalg.norm(lhs - rhs_zyz)\n err_zyx = np.linalg.norm(lhs - rhs_zyx)\n errors_zyz.append( err_zyz )\n errors_zyx.append( err_zyx )\n print(\"\\nerr_zyz: {:.3f} for {}-th sample\".format(err_zyz, k))\n print(\"err_zyx: {:.3f} for {}-th sample\".format(err_zyx, k))\n print(\"R_zyz:\\n{}\".format(R_zyz))\n print(\"R_zyx:\\n{}\".format(R_zyx))\n\n print(\"\\nDone with evaluation!\")\n print(\"zyz has avg error {:.5f}\".format(np.mean(errors_zyz)))\n print(\"zyx has avg error {:.5f}\".format(np.mean(errors_zyx)))", "def angle(self, angle: int, time: int = 0, /) -> None:", "def test_calculate_tutor_baseline(self):\n tmax = 40.0\n dt = 1.0\n ini_rate = 80.0\n baseline_n = 5\n\n rate1 = ini_rate + 20.0\n rate2 = ini_rate - 10.0\n\n nruns = 10\n nsteps = int_r(tmax/dt)\n\n tutor = SimpleNeurons(2, out_fct=lambda i:\n [rate1, rate2] if i < nsteps/2 else [rate2, rate1])\n reward = MockReward(lambda _: 0.0)\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=0,\n constrain_rates=False, ini_rate=ini_rate, learning_rate=0.1,\n use_tutor_baseline=True, baseline_n=baseline_n)\n\n factor = 1 - 1.0/baseline_n\n\n for i in xrange(nruns):\n tutor_rule.reset_rates()\n\n sim = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim.run(tmax)\n\n crt_baseline = tutor_rule.baseline\n\n self.assertEqual(np.ndim(crt_baseline), 2)\n self.assertEqual(np.shape(crt_baseline)[0], nsteps)\n self.assertEqual(np.shape(crt_baseline)[1], 2)\n\n expected1 = rate1 + (ini_rate - rate1)*factor**(i+1)\n expected2 = rate2 + (ini_rate - rate2)*factor**(i+1)\n\n self.assertLess(np.max(np.abs(crt_baseline[:nsteps/2, 0] - expected1)),\n 1e-6)\n self.assertLess(np.max(np.abs(crt_baseline[nsteps/2:, 0] - expected2)),\n 1e-6)\n\n self.assertLess(np.max(np.abs(crt_baseline[:nsteps/2, 1] - expected2)),\n 1e-6)\n self.assertLess(np.max(np.abs(crt_baseline[nsteps/2:, 1] - expected1)),\n 1e-6)", "def test_simulationRun(self):\n self.opt = { 'temperature' : 300.0, 'friction' : 1, 'dt' : 0.00002,\n 'nIter' : 2, 'nstepsNC' : 2, 'nstepsMD' : 1, 'nprop' : 1,\n 'nonbondedMethod' : 'NoCutoff', 'constraints': 'HBonds',\n 'trajectory_interval' : 1, 'reporter_interval' : 1,\n 'outfname' : 'mc-test',\n 'platform' : None,\n 'constraints' : 'HBonds',\n 'mc_per_iter' : 2 }\n\n structure = self.full_struct\n class SetRotationMove(RandomLigandRotationMove):\n def __init__(self, structure, resname='LIG'):\n super(SetRotationMove, self).__init__(structure, resname)\n\n def move(self, context):\n \"\"\"Function that performs a random rotation about the\n center of mass of the ligand.\n \"\"\"\n #TODO: check if we need to deepcopy\n positions = context.getState(getPositions=True).getPositions(asNumpy=True)\n\n self.positions = positions[self.atom_indices]\n self.center_of_mass = self.getCenterOfMass(self.positions, self.masses)\n reduced_pos = self.positions - self.center_of_mass\n\n # Define random rotational move on the ligand\n #set rotation so that test is reproducible\n set_rotation_matrix = np.array([[-0.62297988, -0.17349253, 0.7627558 ],\n [ 0.55082352, -0.78964857, 0.27027502],\n [ 0.55541834, 0.58851973, 0.58749893]])\n\n\n #set_rotation_matrix = np.array([[1, 0, 0],\n # [0, 1, 0],\n # [0, 0, 1]])\n\n #multiply lig coordinates by rot matrix and add back COM translation from origin\n rot_move = np.dot(reduced_pos, set_rotation_matrix) * positions.unit + self.center_of_mass\n\n # Update ligand positions in nc_sim\n for index, atomidx in enumerate(self.atom_indices):\n positions[atomidx] = rot_move[index]\n context.setPositions(positions)\n positions = context.getState(getPositions=True).getPositions(asNumpy=True)\n self.positions = positions[self.atom_indices]\n return context\n\n\n self.model = SetRotationMove(structure, resname='ALA')\n #self.model = RandomLigandRotationMove(structure, resname='ALA')\n\n self.model.atom_indices = range(22)\n self.model.topology = structure[self.model.atom_indices].topology\n self.model.positions = structure[self.model.atom_indices].positions\n self.model.calculateProperties()\n\n self.mover = MoveEngine(self.model)\n #Initialize the SimulationFactory object\n sims = SimulationFactory(structure, self.mover, **self.opt)\n #print(sims)\n system = sims.generateSystem(structure, **self.opt)\n simdict = sims.createSimulationSet()\n alch_system = sims.generateAlchSystem(system, self.model.atom_indices)\n self.nc_sim = sims.generateSimFromStruct(structure, self.mover, alch_system, ncmc=True, **self.opt)\n self.model.calculateProperties()\n self.initial_positions = self.nc_sim.context.getState(getPositions=True).getPositions(asNumpy=True)\n mc_sim = Simulation(sims, self.mover, **self.opt)\n #monkeypatch to access acceptance value\n def nacceptRejectMC(self, temperature=300, **opt):\n \"\"\"Function that chooses to accept or reject the proposed move.\n \"\"\"\n md_state0 = self.current_state['md']['state0']\n md_state1 = self.current_state['md']['state1']\n log_mc = (md_state1['potential_energy'] - md_state0['potential_energy']) * (-1.0/self.nc_sim.context._integrator.kT)\n randnum = math.log(np.random.random())\n\n if log_mc > randnum:\n self.accept += 1\n print('MC MOVE ACCEPTED: log_mc {} > randnum {}'.format(log_mc, randnum) )\n self.md_sim.context.setPositions(md_state1['positions'])\n else:\n self.reject += 1\n print('MC MOVE REJECTED: log_mc {} < {}'.format(log_mc, randnum) )\n self.md_sim.context.setPositions(md_state0['positions'])\n self.log_mc = log_mc\n self.md_sim.context.setVelocitiesToTemperature(self.opt['temperature'])\n mc_sim.acceptRejectMC = nacceptRejectMC\n nacceptRejectMC.__get__(mc_sim)\n mc_sim.acceptRejectMC = types.MethodType(nacceptRejectMC, mc_sim)\n mc_sim.runMC(self.opt['nIter'])\n #get log acceptance\n print(mc_sim.log_mc)\n #if mc is working, should be around -24.1\n assert mc_sim.log_mc <= -23.8 and mc_sim.log_mc >= -24.3", "def testRolledSpectra():\n tau = np.zeros((2,50))\n tau[0,0] = 1\n tau[1,0] = 1\n tau[1,-1] = 2\n (roll, tau_new) = spec_utils.get_rolled_spectra(tau)\n assert np.all(roll == np.array([25,-24]))\n assert tau_new[0,25] == 1\n assert tau_new[1,25] == 2\n assert tau_new[1,26] == 1\n assert np.sum(np.abs(tau_new)) == 4", "def rotate_orbit(self):\n try:\n ang = self.orbit_speed * self.time_scale / self.refresh_rate\n self.obj.rotate(angle=ang, axis=vector(0, 1, 0), origin=self.star.obj.pos)\n self.sum_ang += ang\n except ZeroDivisionError:\n print(\"ERROR: REFRESH_RATE is 0\")\n except (AttributeError, TypeError):\n print(\"ERROR: wrong arguments type while initializing!!\")", "def test_calculate_tilt(tilt_reference):\n tilt_rad = np.radians(tilt_reference)\n # get the rotation axis\n # NOTE:\n # we need to tilt the image -tilt in order to get the tilt angle as + value.\n rot_aixs_tilted = get_tilted_rot_axis(tilt_inplane=-tilt_rad, tilt_outplane=0.0)\n # radiograph at 0 deg\n img0 = virtual_cam(two_sphere_system(0, rot_aixs_tilted, size=200))\n # radiograph at 180 deg\n img180 = virtual_cam(two_sphere_system(np.pi, rot_aixs_tilted, size=200))\n # calculate the tilt angle\n tilt_angle = calculate_tilt(img0, img180).x\n # verify\n # NOTE: tolerance is set to half a pixel at the edge of the FOV\n np.testing.assert_allclose(tilt_angle, tilt_reference, atol=np.degrees(0.5 / 100))", "def check_rotation_fault(self, current_pos, target_pos):\n \n fault_pos = 340.\n \n def cw_dist(A, B):\n return (B-A)%360.\n def ccw_dist(A,B):\n return (A-B)%360.\n def fast_dist(A,B):\n return min(ccw_dist(A,B), cw_dist(A,B))\n def fast_dir(A,B):\n if ccw_dist(A,B) > cw_dist(A,B): return +1\n else: return -1\n \n def dist(A,B, direction):\n if direction > 0: return cw_dist(A,B)\n if direction < 0: return ccw_dist(A,B)\n \n print(\"A->B Fast dir {}\".format(fast_dir(current_pos,target_pos)))\n \n print(\"A->F fast\", fast_dist(current_pos, fault_pos), fast_dir(current_pos, fault_pos))\n print(\"F->B fast\", fast_dist(fault_pos,target_pos), fast_dir(fault_pos, current_pos))\n d = fast_dir(current_pos,target_pos)\n print(\"A->F\", dist(current_pos, fault_pos, d), dist(current_pos, fault_pos, -d))\n print(\"F->B\", dist(fault_pos, target_pos, d) , dist(fault_pos, target_pos, -d))\n \n if dist(current_pos, fault_pos, d)+ dist(fault_pos, target_pos,d) >= 180.:\n return [target_pos]\n else:\n middle_target = current_pos + (360 - fast_dist(current_pos, target_pos))/2\n middle_target %=360\n print(\"A->M->B\", fast_dist(current_pos, middle_target), fast_dist(middle_target, target_pos))\n return [middle_target, target_pos]", "def test_calcAngles_angles_or_axis(self, kargs, expected_len_result, expected_truncated_results):\n kargs['vsk'] = self.cal_SM\n result = pycgmCalc.calcAngles(self.motion_data, **kargs)\n np.testing.assert_equal(len(result), expected_len_result)\n np.testing.assert_almost_equal(result[0:5], expected_truncated_results)", "def target_position(self, time):\n \"\"\"\n start_pos = self.points[self.cur_start]\n seg_time = time - self.last_checkpoint_time\n\n #The arguement of target-velocity dosent matter\n cur_pos = self.target_velocity(time)*seg_time + start_pos\n\n \n # or time > (self.total_time / 4)*(self.cur_start + 1)\n cur_pos_norm = length(cur_pos - start_pos)\n\n next_corner = self.points[(self.cur_start + 1)%4]\n \n seg_norm = length(next_corner - start_pos)\n print(\"cur_pos : \", cur_pos, \"segment: \", self.cur_start, seg_norm - cur_pos_norm)\n\n if cur_pos_norm >= seg_norm:\n self.cur_start = (self.cur_start + 1) % 4\n self.last_checkpoint_time = time\n return cur_pos\n \"\"\"\n\n #Possibly use rospy.sleep()\n total_time = self.total_time\n\n\n if time < total_time/4:\n return self.path1.target_position(time)\n\n elif time - total_time/4 == 0:\n rospy.sleep(0.5)\n\n elif time < total_time/2:\n return self.path2.target_position(time - (total_time/4 + 0.5))\n # return self.path2.target_position(time - (total_time/4 ))\n\n\n elif time - total_time/2 == 0:\n rospy.sleep(0.5)\n\n elif time <= total_time/4*3:\n return self.path3.target_position(time - (total_time/2 + 1))\n # return self.path3.target_position(time - (total_time/2))\n\n\n elif time - total_time/4*3 == 0:\n rospy.sleep(0.5)\n\n else:\n return self.path4.target_position(time - (total_time/4*3 + 1.5))\n # return self.path4.target_position(time - (total_time/4*3))", "def rotate(self,angle):\n origin = copy.deepcopy(self._current)\n\n q = [origin.orientation.x,\n origin.orientation.y,\n origin.orientation.z,\n origin.orientation.w] # quaternion nonsense\n\n (roll, pitch, yaw) = euler_from_quaternion(q)\n\n atTarget=False\n\n currentAngle=yaw\n angle=angle+currentAngle\n\n if(angle==currentAngle):\n w=0\n elif(angle>currentAngle):\n w=1\n elif(angle<currentAngle):\n w=-1\n\n move_msg=Twist()\n move_msg.linear.x=0\n move_msg.angular.z=w\n\n\n stop_msg =Twist()\n stop_msg.linear.x=0\n stop_msg.angular.z=0\n\n while(not atTarget and not rospy.is_shutdown()):\n if(currentAngle>=angle):\n atTarget=True\n self._vel_pub.publish(stop_msg)\n print('rotate: stoped')\n else:\n origin = copy.deepcopy(self._current)\n\n q = [origin.orientation.x,\n origin.orientation.y,\n origin.orientation.z,\n origin.orientation.w] # quaternion nonsense\n\n (roll, pitch, yaw) = euler_from_quaternion(q)\n\n currentAngle=yaw\n self._vel_pub.publish(move_msg)\n rospy.sleep(.15)\n print('rotate: moving')\n print('angle: '+str(angle)+'currentAngle: '+str(currentAngle))", "def test_far_out_coordinates(rotationangle):\n\n eps = 1e-7\n\n # Get the limits\n lim = rotationangle[\"cs\"].limits()[2:]\n lim0 = max(lim)\n lim1 = min(lim)\n\n # Setting c2 and c3 to zero\n c3 = lim0 - eps\n\n # A large value which is still valid\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n\n # Setting c2 and c3 to zero\n c3 = lim1 + eps\n\n # A large value which is still valid\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n\n # A large value which is raises an exception\n with pytest.raises(RuntimeError):\n c3 = lim0 + eps\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)\n print(phi_dash)\n\n with pytest.raises(RuntimeError):\n c3 = lim1 - eps\n phi_dash = rotationangle[\"cs\"].to_rotation_angle(c3)", "def calibrate_rotation_rate(self, direction, angle):\n print(location_string[direction], \" calibration\")\n\n for speed in range(self.MIN_SPEED, 100, self.SPEED_TABLE_INTERVAL):\n sleep(1)\n if direction == DIR_LEFT: # rotate left\n self.kit.motor3.throttle = -speed/100\n self.kit.motor4.throttle = speed/100\n\n elif direction == DIR_RIGHT: # rotate right\n self.kit.motor3.throttle = speed/100\n self.kit.motor4.throttle = -speed/100\n\n else:\n print(\"Invalid direction\")\n\n time = self.rotation_angle_to_time(angle, speed)\n\n print(location_string[direction], \": rotate\", angle, \" degrees at speed \",\n speed, \" for \", time, \" ms\")\n sleep(time*1e-3)\n self.kit.motor3.throttle = 0\n self.kit.motor4.throttle = 0\n sleep(2) # two second delay between speeds", "def rotating_bar(length=10, input_size_x=240, input_size_y=180, offx=0,\n offy=0, angle_step=10, ts_offset=10*ms):\n x_coord = []\n y_coord = []\n pol = []\n ts = []\n center = (input_size_x/2 + offx, input_size_y/2 + offy)\n angles = np.linspace(0, np.pi*2, angle_step)\n for i, cAngle in enumerate(angles):\n endy_1 = center[1] + ((length / 2) * np.sin((cAngle)))\n endx_1 = center[0] + ((length / 2) * np.cos((cAngle)))\n endy_2 = center[1] - ((length / 2) * np.sin((cAngle)))\n endx_2 = center[0] - ((length / 2) * np.cos((cAngle)))\n start = np.asarray((endx_1, endy_1))\n end = np.asarray((endx_2, endy_2))\n max_direction, max_length = max(enumerate(abs(end - start)),\n key=operator.itemgetter(1))\n dv = (end - start) / max_length\n line = [dda_round(start)]\n for step in range(int(max_length)):\n line.append(dda_round((step + 1) * dv + start))\n for coord in line:\n x_coord.append(coord[0])\n y_coord.append(coord[1])\n ts.append(i)\n pol.append(1)\n events = np.zeros((4, len(x_coord)))\n events[0, :] = np.asarray(x_coord)\n events[1, :] = np.asarray(y_coord)\n events[2, :] = np.asarray(ts) \n events[3, :] = np.asarray(pol)\n\n ind = xy2ind(events[0, :], events[1, :], input_size_x)\n stimuli_generator = SpikeGeneratorGroup(input_size_x*input_size_y, indices=ind, \n times=ts*ts_offset, name='rotating_bar')\n return stimuli_generator, events", "def do_polar_alignment_test(self, *arg):\n if self.ready is False:\n return\n\n start_time = current_time(flatten=True)\n\n base_dir = '{}/images/drift_align/{}'.format(\n os.getenv('PANDIR'), start_time)\n plot_fn = '{}/{}_center_overlay.jpg'.format(base_dir, start_time)\n\n mount = self.pocs.observatory.mount\n\n print_info(\"Moving to home position\")\n self.pocs.say(\"Moving to home position\")\n mount.slew_to_home()\n\n # Polar Rotation\n pole_fn = polar_rotation(self.pocs, base_dir=base_dir)\n pole_fn = pole_fn.replace('.cr2', '.fits')\n\n # Mount Rotation\n rotate_fn = mount_rotation(self.pocs, base_dir=base_dir)\n rotate_fn = rotate_fn.replace('.cr2', '.fits')\n\n print_info(\"Moving back to home\")\n self.pocs.say(\"Moving back to home\")\n mount.slew_to_home()\n\n print_info(\"Solving celestial pole image\")\n self.pocs.say(\"Solving celestial pole image\")\n try:\n pole_center = polar_alignment_utils.analyze_polar_rotation(pole_fn)\n except error.SolveError:\n print_warning(\"Unable to solve pole image.\")\n print_warning(\"Will proceeed with rotation image but analysis not possible\")\n pole_center = None\n else:\n pole_center = (float(pole_center[0]), float(pole_center[1]))\n\n print_info(\"Starting analysis of rotation image\")\n self.pocs.say(\"Starting analysis of rotation image\")\n try:\n rotate_center = polar_alignment_utils.analyze_ra_rotation(rotate_fn)\n except Exception:\n print_warning(\"Unable to process rotation image\")\n rotate_center = None\n\n if pole_center is not None and rotate_center is not None:\n print_info(\"Plotting centers\")\n self.pocs.say(\"Plotting centers\")\n\n print_info(\"Pole: {} {}\".format(pole_center, pole_fn))\n self.pocs.say(\"Pole : {:0.2f} x {:0.2f}\".format(\n pole_center[0], pole_center[1]))\n\n print_info(\"Rotate: {} {}\".format(rotate_center, rotate_fn))\n self.pocs.say(\"Rotate: {:0.2f} x {:0.2f}\".format(\n rotate_center[0], rotate_center[1]))\n\n d_x = pole_center[0] - rotate_center[0]\n d_y = pole_center[1] - rotate_center[1]\n\n self.pocs.say(\"d_x: {:0.2f}\".format(d_x))\n self.pocs.say(\"d_y: {:0.2f}\".format(d_y))\n\n fig = polar_alignment_utils.plot_center(\n pole_fn, rotate_fn, pole_center, rotate_center)\n\n print_info(\"Plot image: {}\".format(plot_fn))\n fig.tight_layout()\n fig.savefig(plot_fn)\n\n try:\n os.unlink('/var/panoptes/images/latest.jpg')\n except Exception:\n pass\n try:\n os.symlink(plot_fn, '/var/panoptes/images/latest.jpg')\n except Exception:\n print_warning(\"Can't link latest image\")\n\n with open('/var/panoptes/images/drift_align/center.txt'.format(base_dir), 'a') as f:\n f.write('{}.{},{},{},{},{},{}\\n'.format(start_time, pole_center[0], pole_center[\n 1], rotate_center[0], rotate_center[1], d_x, d_y))\n\n print_info(\"Done with polar alignment test\")\n self.pocs.say(\"Done with polar alignment test\")", "def test_euler_angles_1q_hard_thetas(self, gate):\n self.check_one_qubit_euler_angles(Operator(gate))", "def test_xform_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.xform_file, ani=1)\n\n values = cmds.keyframe('pCube1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def _case3_test_failures_actual_rate_lags_target_rate(mock_get_next_ripe_time):\n mock_get_next_ripe_time.side_effect = (\n _mock_get_next_ripe_time_actual_rate_lags(rl)\n )\n\n advancer = self.__create_fake_clock_advancer_thread(\n rl, [threading.currentThread()]\n )\n advancer.start()\n\n counter = 0\n while True:\n token = rl.acquire_token()\n\n # Actual rate always lags target rate\n old_target_rate = rl._current_cluster_rate\n old_actual_rate = rl._get_actual_cluster_rate()\n if old_actual_rate is not None:\n self.assertLess(old_actual_rate, old_target_rate)\n\n # Token grant a 100 initial successes followed by all failures\n counter += 1\n if counter <= required_successes:\n rl.release_token(token, True)\n continue\n else:\n rl.release_token(token, False)\n\n # assert that the new rate is calculated based on the (higher/more conservative) target rate\n if backoff_factor * old_target_rate > min_cluster_rate:\n self.assertEqual(\n round(rl._current_cluster_rate, 2),\n round(backoff_factor * old_target_rate, 2),\n )\n else:\n # assert that new rate never goes lower than min rate\n self.assertEqual(\n round(rl._current_cluster_rate, 2), round(min_cluster_rate, 2)\n )\n break\n advancer.stop(wait_on_join=False)", "def randomize_trajectory(self):\n self.angle = randint(-360, 360)\n self.speed = randint(1, 5)/2.5", "def test_roll_angle():\n ra_ref = 165 # in deg\n dec_ref = 54 # in deg\n v2_ref = 0\n v3_ref = 0\n r0 = 37 # in deg\n\n v2 = .01 # in arcsec\n v3 = .01 # in arcsec\n roll_angle = pointing.compute_roll_ref(v2_ref, v3_ref, r0, ra_ref, dec_ref, v2, v3)\n assert_allclose(roll_angle, r0, atol=1e-3)", "def test_revolute(self):\n # Rotate around the z axis\n r = Joint.revolute(np.array([0, 0, 1]))\n t_mat = r(np.array([np.pi / 2]))\n rot_vec = np.dot(t_mat, np.array([1, 0, 0, 1]))[:3]\n self.assertTrue(np.allclose(\n rot_vec, np.array([0, 1, 0]), rtol=1e-5, atol=1e-5))", "def test_accurate(self):\n M = simulation.EventMonitor(self.G)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n\n times = self.G.pattern.nonzero()[1]*self.dt\n self.assertTrue(np.allclose(sorted(times), M.t))\n for (i, t) in zip(M.i, M.t):\n self.assertTrue(self.G.pattern[i, int_r(t/self.dt)])", "def test_custom_time(self):\n interval = 0.5\n M = simulation.StateMonitor(self.G, 'v', interval=interval)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n self.assertTrue(np.allclose(M.t, np.arange(0, self.t_max, interval)))", "def test_start_angle(self):\n\n assert self.test_shape.azimuth_placement_angle == [\n 0,\n 45,\n 90,\n 135,\n 180,\n 225,\n 270,\n 315,\n ]\n self.test_shape.start_angle = 10\n assert self.test_shape.azimuth_placement_angle == [\n 10,\n 55,\n 100,\n 145,\n 190,\n 235,\n 280,\n 325,\n ]", "def test_revolute_from_dh(self):\n x_offset = 1\n z_offset = 2\n # Rotate around the z axis\n r = Joint.revolute_from_dh(0, 0, x_offset, z_offset)\n t_mat = r(np.array([np.pi / 2]))\n rot_vec = np.dot(t_mat[:3, :3], np.array([1, 0, 0]))\n self.assertTrue(np.allclose(\n rot_vec, np.array([0, 1, 0]), rtol=1e-5, atol=1e-5))\n self.assertTrue(np.allclose(t_mat[2, 3], z_offset))\n # x was rotated 90 degrees, and is now y\n self.assertTrue(np.allclose(t_mat[1, 3], x_offset))", "def correct_angle(self, tank_angle, target_angle):\n angle_diff = periodic_difference_of_angles(target_angle, tank_angle)\n if abs(angle_diff) <= MIN_ANGLE_DIF:\n self.tank.stop_turning()\n return True\n else:\n return False", "def correct_rotation(k_rotations):\n\n for key, value in Chunk.global_piece_rotations.items():\n Chunk.global_piece_rotations[key] = (k_rotations + value) % 4\n # Should I correct it for the side rotations also?", "def scenario2(angle, speed):\n speedHorizontal = speed * math.cos(angle)\n speedVertical = speed * math.sin(angle)\n timePeak = speedVertical / 9.81\n time = timePeak * 2\n result = speedHorizontal * time\n return result", "def test_str_rotation_angle(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx, \"TestSensor\", group_address_state=\"1/2/3\", value_type=\"rotation_angle\"\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0x2D,\n 0xDC,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), 11740)\n self.assertEqual(sensor.unit_of_measurement(), \"°\")\n self.assertEqual(sensor.ha_device_class(), None)", "def test_use_tutor_baseline(self):\n tmax = 40.0\n dt = 1.0\n ini_rate = 80.0\n\n nruns = 11\n\n tutor = SimpleNeurons(2, out_fct=lambda _: [100.0, 60.0])\n reward = MockReward(lambda t: 1.0 if t < tmax/2 else -1)\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=0,\n constrain_rates=False, ini_rate=ini_rate, learning_rate=0.1)\n\n tutor_rule.use_tutor_baseline = True\n tutor_rule.baseline_n = 5\n\n for i in xrange(nruns):\n # we first set the baselines for the two neurons to some values different\n # from tutor_rule's ini_rate, and then in the last round, we test how the\n # rates change\n if i == nruns-1:\n tutor.out_fct = lambda _: [80.0, 80.0]\n\n tutor_rule.reset_rates()\n\n sim = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim.run(tmax)\n\n drates = tutor_rule.rates - ini_rate\n\n # for the first neuron, for t < tmax/2, the current firing rate is below the\n # baseline and the reward is positive, so the rates should *decrease*\n # for t >= tmax/2, the rates should *increase*\n # the opposite should happen for the second neuron\n mask = (np.arange(0, tmax, dt) < tmax/2)\n\n self.assertGreater(np.min(drates[mask, 1]), 0)\n self.assertLess(np.max(drates[mask, 0]), 0)\n\n self.assertGreater(np.min(drates[~mask, 0]), 0)\n self.assertLess(np.max(drates[~mask, 1]), 0)", "def test_memory(self):\n tau = 53.0\n tau0 = 22.0\n mrate = 50.0\n Mrate = 100.0\n\n tmax = 100.0\n dt = 0.01\n\n self.rule.tau = tau\n self.rule.min_rate = mrate\n self.rule.max_rate = Mrate\n self.rule.compress_rates = False\n\n ndiv3 = self.Nsrc/3\n\n self.motor.error_fct = lambda t: np.hstack((\n np.cos(t/tau0)*np.ones(ndiv3), np.sin(t/tau0)*np.ones(ndiv3),\n np.ones(ndiv3)))\n\n M = simulation.StateMonitor(self.rule, 'out')\n\n sim = simulation.Simulation(self.source, self.motor, self.rule, M, dt=dt)\n sim.run(tmax)\n\n # tutor output points *opposite* the motor error!\n prefactor = -self.rule.gain*tau0/(tau*tau + tau0*tau0)\n integral_part1 = np.cos(M.t/tau0)*np.exp(-M.t/tau)\n integral_part2 = np.sin(M.t/tau0)*np.exp(-M.t/tau)\n\n expected_cos = prefactor*(tau0 - tau0*integral_part1 + tau*integral_part2)\n expected_sin = prefactor*(tau - tau*integral_part1 - tau0*integral_part2)\n expected_const = -(1 - np.exp(-M.t/tau))\n\n mavg = (mrate + Mrate)*0.5\n mdiff = (Mrate - mrate)*0.5\n expected = np.vstack((\n np.tile(mavg + mdiff*expected_cos, (ndiv3, 1)),\n np.tile(mavg + mdiff*expected_sin, (ndiv3, 1)),\n np.tile(mavg + mdiff*expected_const, (ndiv3, 1))\n ))\n\n # mismatch is relatively large since we're using Euler's method\n # we can't do much better, however, since the motor controller cannot give\n # us motor error information at sub-step resolution\n mismatch = np.mean(np.abs(expected - M.out)/expected)\n self.assertLess(mismatch, 0.05)", "def test_shape_interval(self):\n interval = 0.5\n M = simulation.StateMonitor(self.G, ['a', 'v', 'b'], interval=interval)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n\n nsteps = int_r(self.t_max/interval)\n self.assertEqual(M.v.shape, (self.N, nsteps))\n self.assertEqual(M.a.shape, (2, nsteps))\n self.assertEqual(M.b.shape, (self.N, nsteps))", "def rotate(self, angle, aspeed):\n current_pose = [self.px, self.py, self.pth]\n initial_pose = current_pose\n # final pose is the final angle that the robot moves to about z\n final_angle = self.pth+angle\n if final_angle < self.pth:\n aspeed=aspeed*(-1)\n\n final_pose = [self.px, self.py, final_angle]\n \ttolerance = 0.01\n\n self.send_speed(0.0, aspeed)\n while abs(final_pose[2]-current_pose[2]) > tolerance:\n current_pose = [self.px, self.py, self.pth]\n self.send_speed(0.0, 0.0)", "def _case1_test_successes_actual_rate_leads_target_rate(\n mock_get_next_ripe_time,\n ):\n mock_get_next_ripe_time.side_effect = (\n _mock_get_next_ripe_time_actual_rate_leads(rl)\n )\n\n advancer = self.__create_fake_clock_advancer_thread(\n rl, [threading.currentThread()]\n )\n advancer.start()\n\n while True:\n token = rl.acquire_token()\n for x in range(required_successes - 1):\n rl.release_token(token, True)\n rl.acquire_token()\n\n # Actual rate always leads target rate\n old_target_rate = rl._current_cluster_rate\n old_actual_rate = rl._get_actual_cluster_rate()\n self.assertGreater(old_actual_rate, old_target_rate)\n\n # Token grant causes new rate to be calculated\n rl.release_token(token, True)\n\n # assert that the new rate is calculated based on the (lower/more conservative) target rate\n if increase_factor * old_target_rate < max_cluster_rate:\n self.assertEqual(\n rl._current_cluster_rate, increase_factor * old_target_rate\n )\n else:\n # assert that new rate never exceeds max rate\n self.assertEqual(rl._current_cluster_rate, max_cluster_rate)\n break\n advancer.stop(wait_on_join=False)", "def test_angle():\n # radians\n theta_coord = 45. * coord.degrees\n theta_astro = astropy.coordinates.Angle(pi/4., units.radian)\n\n # degrees\n np.testing.assert_almost_equal(theta_coord.rad, theta_astro.rad, decimal=12)\n np.testing.assert_almost_equal(theta_coord / coord.degrees, theta_astro.degree, decimal=12)\n np.testing.assert_almost_equal(theta_coord / coord.hours, theta_astro.hour, decimal=12)\n np.testing.assert_almost_equal(theta_coord / coord.arcmin, theta_astro.arcminute, decimal=12)\n np.testing.assert_almost_equal(theta_coord / coord.arcsec, theta_astro.arcsec, decimal=12)\n\n # Other constructors\n theta_astro2 = astropy.coordinates.Angle(23.09, units.arcsec)\n theta_coord2 = coord.Angle(23.09, coord.arcsec)\n np.testing.assert_almost_equal(theta_coord2.rad, theta_astro2.rad, decimal=12)\n\n theta_astro3 = astropy.coordinates.Angle(-0.17, unit='rad')\n theta_coord3 = coord._Angle(-0.17)\n np.testing.assert_almost_equal(theta_coord3.rad, theta_astro3.rad, decimal=12)\n\n # astropy wrapping uses a different convention than we do. Their argument is\n # the upper end of the target range, not the center.\n theta_astro4 = theta_astro3.wrap_at(360 * units.deg)\n theta_coord4 = theta_coord3.wrap(180 * coord.degrees)\n np.testing.assert_almost_equal(theta_coord4.rad, theta_astro4.rad, decimal=12)\n\n theta_astro5 = theta_astro3.wrap_at(-100 * units.deg)\n theta_coord5 = theta_coord3.wrap(-280 * coord.degrees)\n np.testing.assert_almost_equal(theta_coord5.rad, theta_astro5.rad, decimal=12)\n\n theta_astro6 = astropy.coordinates.Angle('03:34:12', unit='hourangle')\n theta_coord6 = coord.Angle.from_hms('03:34:12')\n np.testing.assert_almost_equal(theta_coord6.rad, theta_astro6.rad, decimal=12)\n\n theta_astro7 = astropy.coordinates.Angle('03:34:12', unit='deg')\n theta_coord7 = coord.Angle.from_dms('03:34:12')\n np.testing.assert_almost_equal(theta_coord7.rad, theta_astro7.rad, decimal=12)\n\n # Their default arguments to to_string are different from ours, but can make them compatible.\n print('theta_astro6.hms = ',theta_astro6.to_string(sep=':', pad=True))\n print('theta_coord6.hms = ',theta_coord6.hms())\n assert theta_coord6.hms() == theta_astro6.to_string(sep=':', pad=True)\n\n print('theta_astro7.dms = ',theta_astro7.to_string(sep=':', pad=True))\n print('theta_coord7.dms = ',theta_coord7.dms())\n assert theta_coord7.dms() == theta_astro7.to_string(sep=':', pad=True)\n\n print('theta_astro6.hms = ',theta_astro6.to_string())\n print('theta_coord6.hms = ',theta_coord6.hms(sep='hms', pad=False))\n assert theta_coord6.hms(sep='hms', pad=False) == theta_astro6.to_string()\n\n print('theta_astro7.hms = ',theta_astro7.to_string())\n print('theta_coord7.hms = ',theta_coord7.dms(sep='dms', pad=False))\n assert theta_coord7.dms(sep='dms', pad=False) == theta_astro7.to_string()", "def rotation(self, *args, **kwargs) -> Any:\n pass", "def _case2_test_successes_actual_rate_lags_target_rate(mock_get_next_ripe_time):\n mock_get_next_ripe_time.side_effect = (\n _mock_get_next_ripe_time_actual_rate_lags(rl)\n )\n\n advancer = self.__create_fake_clock_advancer_thread(\n rl, [threading.currentThread()]\n )\n advancer.start()\n\n while True:\n token = rl.acquire_token()\n for x in range(required_successes - 1):\n rl.release_token(token, True)\n rl.acquire_token()\n\n # Actual rate always lags target rate\n old_target_rate = rl._current_cluster_rate\n old_actual_rate = rl._get_actual_cluster_rate()\n self.assertLess(old_actual_rate, old_target_rate)\n\n # Token grant causes new rate to be calculated\n rl.release_token(token, True)\n\n # assert that the new rate is calculated based on the (lower/more conservative) actual rate\n if increase_factor * old_actual_rate < max_cluster_rate:\n if increase_factor * old_actual_rate < old_target_rate:\n self.assertEqual(\n round(rl._current_cluster_rate, 2),\n round(old_target_rate, 2),\n )\n else:\n self.assertEqual(\n round(rl._current_cluster_rate, 2),\n round(increase_factor * old_actual_rate, 2),\n )\n else:\n # assert that new rate never exceeds max rate\n self.assertEqual(rl._current_cluster_rate, max_cluster_rate)\n break\n advancer.stop(wait_on_join=False)", "def calculate_translation(reference_im:np.ndarray, \n target_im:np.ndarray,\n ref_to_tar_rotation:np.ndarray=None,\n use_autocorr:bool=True,\n alignment_kwargs:dict={},\n verbose:bool=True,\n ):\n from math import pi\n import cv2\n ## quality check\n # images\n if np.shape(reference_im) != np.shape(target_im):\n raise IndexError(f\"two images should be of the same shape\")\n # rotation matrix\n if ref_to_tar_rotation is None:\n ref_to_tar_rotation = np.diag([1,1])\n elif np.shape(ref_to_tar_rotation) != tuple([2,2]):\n raise IndexError(f\"wrong shape for rotation matrix, should be 2x2. \")\n # get dimensions\n _dz,_dx,_dy = np.shape(reference_im)\n # calculate angle\n if verbose:\n print(f\"-- start calculating drift with rotation between images\")\n _rotation_angle = np.arcsin(ref_to_tar_rotation[0,1])/pi*180\n _temp_new_rotation_M = cv2.getRotationMatrix2D((_dx/2, _dy/2), _rotation_angle, 1) # temporary rotation angle\n # rotate image\n if _rotation_angle != 0:\n _rot_target_im = np.array([cv2.warpAffine(_lyr, _temp_new_rotation_M, \n _lyr.shape, borderMode=cv2.BORDER_DEFAULT) \n for _lyr in target_im], dtype=reference_im.dtype)\n else:\n _rot_target_im = target_im\n # calculate drift \n _drift, _drift_flag = align_image(\n _rot_target_im,\n reference_im,\n precision_fold=10,\n use_autocorr=use_autocorr,\n verbose=verbose,\n #detailed_verbose=verbose,\n **alignment_kwargs,)\n\n if verbose:\n print(f\"--- drift: {np.round(_drift,2)} pixels\")\n \n return _rot_target_im, ref_to_tar_rotation, _drift", "def __init__(self, t=0.01):\r\n\r\n\t\tself.t = float(t)\r\n\t\t\r\n\t\t#Control constants\r\n\t\t#Altitude\r\n\t\t#P\r\n\t\tkp1=3\r\n\t\t#PID\r\n\t\tkp2=6\r\n\t\tki2=0\r\n\t\tkd2=2.5\r\n\t\t#PID\r\n\t\tkp3=0.75\r\n\t\tki3=1.5\r\n\t\tkd3=0\r\n\t\t\r\n\t\t#Pitch\r\n\t\t#P\r\n\t\tkp1_theta=4.5\r\n\t\t#PID\r\n\t\tkp2_theta=0.15\r\n\t\tki2_theta=0.1\r\n\t\tkd2_theta=0.004\r\n\t\t\r\n\t\t#Roll\r\n\t\t#P\r\n\t\tkp1_phi=4.5\r\n\t\t#PID\r\n\t\tkp2_phi=0.15\r\n\t\tki2_phi=0.1\r\n\t\tkd2_phi=0.004\r\n\t\t\r\n\t\t#Yaw\r\n\t\t#P\r\n\t\tkp1_psi=10\r\n\t\t#PID\r\n\t\tkp2_psi=0.2\r\n\t\tki2_psi=0.02\r\n\t\tkd2_psi=0\r\n\t\t\r\n\t\t#Control\r\n\t\timax1=10\r\n\t\tt_max1=200\r\n\t\timax2=10\r\n\t\tt_max2=120\r\n\t\t\r\n\t\t#Constrains\r\n\t\trate_imax = 50\r\n\t\tac_imax = 40\r\n\t\tt_min = 800\r\n\t\tt_max = 1500\r\n\t\t\r\n\t\t#Goals\r\n\t\tself.target_z=3\r\n\t\tself.target_theta=0\r\n\t\tself.target_phi=0\r\n\t\tself.target_psi=0\r\n\r\n\t\t#Initial Conditions\r\n\t\tself.w=[1000, 1000, 1000, 1000]\r\n\t\t#self.ev = [0, 0, 1, 0, 0, 0, 0, 0, -9.81, 0.4, -0.2, 1, 0.1, -0.1, 0.2, 0.01, -0.01, 0.02]\r\n\t\tself.ev = [0, 0, 0, 0, 0, 0, 0, 0, -9.81, 0, 0, 0, 0, 0, 0, 0, 0, 0]\r\n\r\n\t\t#Create controllers\r\n\t\tself.altitudeController = control.controlAlt (self.t, kp1, kp2, ki2, kd2, kp3, ki3, kd3, rate_imax, ac_imax,t_min,t_max)\r\n\t\tself.thetaController = control.controlAngle (self.t, kp1_theta, kp2_theta, ki2_theta, kd2_theta, imax1, t_max1)\r\n\t\tself.phiController = control.controlAngle (self.t, kp1_phi, kp2_phi, ki2_phi, kd2_phi, imax1, t_max1)\r\n\t\tself.psiController = control.controlAngle (self.t, kp1_psi, kp2_psi, ki2_psi, kd2_psi, imax2, t_max2)", "def find_rotation_angle(img, coarse_bound=4, fine_bound=0.1, rescale_amt=0.5):\n\n num_trials = int(coarse_bound / fine_bound)\n img_resized = rescale(img, rescale_amt, order=0, multichannel=False)\n\n def project_angles(img_to_project, angles_to_try):\n best_angle = 0\n highest_variation = 0\n for a in angles_to_try:\n rot_img = rotate(img_to_project, a, mode=\"edge\")\n proj = np.sum(rot_img, 1).astype(\"int64\")\n variation = np.sum(np.diff(proj) ** 2)\n if variation > highest_variation:\n highest_variation = variation\n best_angle = a\n return best_angle\n\n angles_to_try = np.linspace(-coarse_bound, coarse_bound, num_trials)\n coarse_angle = project_angles(img_resized, angles_to_try)\n\n angles_to_try = np.linspace(\n -fine_bound + coarse_angle, fine_bound + coarse_angle, num_trials\n )\n fine_angle = project_angles(img_resized, angles_to_try)\n\n return fine_angle", "def motor_rotate_deg(power,deg,port,sampling_time=.01,delay_when_stopping=.05): \n debug = False\n num_motor=len(power) #Number of motors being used\n #print num_motor\n init_val=[0]*num_motor\n curr_val=[0]*num_motor\n final_val=[0]*num_motor\n last_encod=[0]*num_motor\n \n delta=0\n gain=0.005\n idelta=0.0\n alpha=10\n smulti=0\n BrickPiUpdateValues()\n for i in range(num_motor):\n BrickPi.MotorEnable[port[i]] = 1 #Enable the Motors\n power[i]=abs(power[i])\n \n init_val[i]=BrickPi.Encoder[port[i]] #Initial reading of the encoder \n \n final_val[i]=init_val[i]+(deg[i]*2) #Final value when the motor has to be stopped;One encoder value counts for 0.5 degrees\n \n #For running clockwise and anticlockwise\n if deg[i]>0:\n BrickPi.MotorSpeed[port[i]] = power[i]\n elif deg[i]<0:\n BrickPi.MotorSpeed[port[i]] = -power[i]\n else:\n BrickPi.MotorSpeed[port[i]] = 0\n \n \n run_stat=[0]*num_motor\n\n time_start = time.time()\n time_end = time.time()\n time_total = time_end - time_start\n \n while True:\n time_end = time.time()\n time_total = time_end - time_start\n if time_total >= ROTATE_DEG_TIMEOUT:\n break\n \n result = BrickPiUpdateValues() #Ask BrickPi to update values for sensors/motors\n time.sleep(sampling_time) #sleep for the sampling time given (default:10 ms)\n i = 0\n #if debug:\n #print \"Result of Update Values: \" + `result`\n if not result :\n for i in range(num_motor): #Do for each of the motors\n #The FIRST thing we should do is check our encoders!\n curr_val[i]=BrickPi.Encoder[port[i]]\n if debug :\n print \"Motor \" + `i` + \" encoder: \" + `curr_val[i]`\n \n if run_stat[i]==1:\n continue\n # Check if final value reached for each of the motors\n if(deg[i]>0 and final_val[i]<=curr_val[i]) or (deg[i]<0 and final_val[i]>=curr_val[i]) :\n #This motor has reached its goal\n run_stat[i]=1\n \n #Now let's hit the breaks by going in reverse for a VERY quick amount of time.\n if deg[i]>0:\n BrickPi.MotorSpeed[port[i]] = -power[i]\n elif deg[i]<0:\n BrickPi.MotorSpeed[port[i]] = power[i]\n else:\n BrickPi.MotorSpeed[port[i]] = 0 \n BrickPiUpdateValues()\n time.sleep(delay_when_stopping)\n #Now let's turn the motor off all together\n BrickPi.MotorEnable[port[i]] = 0\n BrickPiUpdateValues()\n \n if(all(e==1 for e in run_stat)): #If all the motors have already completed their rotation, then stop\n break\n \n #Let's use Proportional Integral Control on the Motors to keep them in Sync\n if i == 1 :\n if curr_val[0] <> 0 and curr_val[1] <>0 : \n if last_encod[0]<>0 and last_encod[1] <>1 :\n if abs(last_encod[0] - init_val[0]) < abs(last_encod[1] - init_val[1]) :\n #Motor 1 is going faster\n delta = abs(curr_val[1]-last_encod[1]) - abs(curr_val[0]-last_encod[0])\n idelta = (abs(curr_val[1]-init_val[1]) - abs(curr_val[0]-init_val[0]))/alpha\n if debug:\n print \"Motor 1 is faster by \" + `delta`\n print \"last_encod = \" + `last_encod[0]` + \" , \" + `last_encod[1]`\n print \"idelta = \" + `idelta`\n print \"Current Encode = \" + `curr_val[0]` + \" , \" + `curr_val[1]`\n\n if int(abs(BrickPi.MotorSpeed[port[0]])) == 255 :\n #Motor 0 CANNOT be sped up\n smulti=BrickPi.MotorSpeed[port[0]]*delta*gain+idelta*gain\n #Speed Multiplier: the amount we want to slow down Motor 1\n if int(abs(BrickPi.MotorSpeed[port[1]]-smulti)) <= 255 : \n #Target speed is inside the bounds of Motor speed\n BrickPi.MotorSpeed[port[1]] = int (BrickPi.MotorSpeed[port[1]]-smulti)\n elif int (BrickPi.MotorSpeed[port[1]]-smulti) < 0 :\n #Target speed is outside the bounds of -255 to 255\n BrickPi.MotorSpeed[port[1]] = -255\n else :\n BrickPi.MotorSpeed[port[1]] = 255\n BrickPiUpdateValues()\n if debug : \n print \"Motor 1 speed : \" + `BrickPi.MotorSpeed[port[1]]`\n print \"Speed Multiplier : \" + `smulti`\n\n else :\n #Motor 0 CAN be sped up\n smulti=BrickPi.MotorSpeed[port[0]]*delta*gain+idelta*gain\n #Speed Multiplier: the amount we want to speed up Motor 0\n if int(abs(BrickPi.MotorSpeed[port[0]]+smulti)) <= 255 : \n #Target speed is inside the bounds of Motor speed\n BrickPi.MotorSpeed[port[0]] = int (BrickPi.MotorSpeed[port[0]]+smulti)\n elif int (BrickPi.MotorSpeed[port[0]]+smulti) < 0 :\n #Target speed is outside the bounds of -255 to 255\n BrickPi.MotorSpeed[port[0]] = -255 \n else :\n BrickPi.MotorSpeed[port[0]] = 255\n BrickPiUpdateValues()\n if debug : \n print \"Motor 0 speed : \" + `BrickPi.MotorSpeed[port[0]]`\n print \"Speed Multiplier : \" + `smulti`\n\n\n elif (last_encod[0] - curr_val[0]) > abs(last_encod[1] - curr_val[1]) :\n #Motor 0 is going faster\n delta= abs(curr_val[0]-last_encod[0])- abs(curr_val[1]-last_encod[1]) \n idelta = (abs(curr_val[0]-init_val[0]) - abs(curr_val[1]-init_val[1]))/alpha\n if debug :\n print \"Motor 0 is faster by \" + `delta`\n print \"last_encod = \" + `last_encod[0]` + \" , \" + `last_encod[1]`\n print \"idelta = \" + `idelta`\n print \"Current Encode = \" + `curr_val[0]` + \" , \" + `curr_val[1]`\n\n if abs(BrickPi.MotorSpeed[port[1]]) == 255 :\n #Motor 1 CANNOT be sped up, SLOW DOWN Motor 0\n smulti=BrickPi.MotorSpeed[port[0]]*delta*gain+idelta*gain\n #Speed Multiplier: the amount we want to slow down Motor 0\n if int(abs(BrickPi.MotorSpeed[port[0]]-smulti)) <= 255 :\n #Target speed is inside the bounds of Motor\n BrickPi.MotorSpeed[port[0]] = int (BrickPi.MotorSpeed[port[0]]-smulti)\n elif int (BrickPi.MotorSpeed[port[0]]-smulti) < 0 :\n #Target speed is outside the -255 to 255 bounds\n BrickPi.MotorSpeed[port[0]] = -255\n else : \n BrickPi.MotorSpeed[port[0]] = 255\n BrickPiUpdateValues()\n if debug : \n print \"Motor 0 speed : \" + `BrickPi.MotorSpeed[port[0]]`\n print \"Speed Multiplier : \" + `smulti`\n\n else :\n #Motor 1 CAN be sped up SPEED UP Motor 1\n smulti=BrickPi.MotorSpeed[port[0]]*delta*gain+idelta*gain\n #Speed Multiplier: the amount we want to speed up Motor 1\n if int(abs (BrickPi.MotorSpeed[port[1]]+smulti)) <= 255 :\n #Target speed is inside the bounds of Motor\n BrickPi.MotorSpeed[port[1]] = int (BrickPi.MotorSpeed[port[1]]+smulti)\n elif int (BrickPi.MotorSpeed[port[1]]+smulti) < 0 :\n #Target speed is outside the -255 to 255 bounds\n BrickPi.MotorSpeed[port[1]] = -255\n else :\n BrickPi.MotorSpeed[port[1]] = 255\n BrickPiUpdateValues()\n if debug : \n print \"Motor 1 speed : \" + `BrickPi.MotorSpeed[port[1]]`\n print \"Speed Multiplier : \" + `smulti`\n \n last_encod[0] = curr_val[0]\n last_encod[1] = curr_val[1]\n BrickPi.MotorEnable[MOTOR1] = 1\n BrickPi.MotorEnable[MOTOR2] = 1\n return 0", "def test_get_speed_limit():\n center = Coordinates(1 , 1)\n radius = 10\n speed_limit = 20\n\n assert get_speed_limit(center, radius, speed_limit) != center\n assert get_speed_limit(center, radius, speed_limit) != radius\n assert get_speed_limit(center, radius, speed_limit) == speed_limit", "def azimuth_update(self):\n self.current_azimuth = self.azimuth_encoder.get_degrees()\n azimuth_error = self.azimuth - float(self.current_azimuth)\n # print('goal azimuth', self.azimuth, 'current azimuth', self.azimuth_encoder.get_degrees(), 'difference in azimuth', azimuth_error)\n if azimuth_error >0:\n # print('positive azimuth')\n self.azimuth_motor.set_direction(1)\n elif azimuth_error > 0:\n # print('negative azimuth')\n self.azimuth_motor.set_direction(0)\n azimuth_error = abs(azimuth_error)\n self.azimuth_error = azimuth_error\n if azimuth_error >= 0:\n self.azimuth_motor.set_speed(0)\n if azimuth_error >= 35:\n self.azimuth_motor.set_speed(1)\n if azimuth_error >= 40:\n self.azimuth_motor.set_speed(2)\n if azimuth_error >= 80:\n self.azimuth_motor.set_speed(3)\n if azimuth_error >= 160:\n self.azimuth_motor.set_speed(4)\n if azimuth_error >= 280:\n self.azimuth_motor.set_speed(5)\n self.azimuth_error = azimuth_error\n print('debug_azimuth', self.current_azimuth, self.azimuth_error, self.azimuth_motor.speed)\n return self.azimuth_error", "def test_Z_start(self):\t\t\n self.assertAlmostEqual(attempt.Z[0], 40)", "def test_out_follows_rates(self):\n tmax = 40.0\n dt = 0.1\n\n tutor = SimpleNeurons(2, out_fct=lambda _: [100.0, 60.0])\n reward = MockReward(lambda _: 0.0)\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=0,\n constrain_rates=False, ini_rate=80.0, learning_rate=0.1,\n use_tutor_baseline=False)\n\n nsteps = int_r(tmax/dt)\n tutor_rule.rates = np.zeros((nsteps, 2))\n\n tutor_rule.rates[:, 0] = np.linspace(0, 1, nsteps)\n tutor_rule.rates[:, 1] = np.linspace(1, 0, nsteps)\n\n M = simulation.StateMonitor(tutor_rule, 'out')\n\n sim = simulation.Simulation(tutor, reward, tutor_rule, M, dt=dt)\n sim.run(tmax)\n\n self.assertLess(np.max(np.abs(M.out[0] - np.linspace(0, 1, nsteps))), 1e-6)\n self.assertLess(np.max(np.abs(M.out[1] - np.linspace(1, 0, nsteps))), 1e-6)", "def interaction_turnstile(self) -> None:\n if self.get_rotation()[1][0] != 0:\n condition = self.can_rotate()[0]\n if condition:\n self.rotate()", "def _case4_test_failures_actual_rate_leads_target_rate(mock_get_next_ripe_time):\n mock_get_next_ripe_time.side_effect = (\n _mock_get_next_ripe_time_actual_rate_leads(rl)\n )\n\n advancer = self.__create_fake_clock_advancer_thread(\n rl, [threading.currentThread()]\n )\n advancer.start()\n\n counter = 0\n while True:\n token = rl.acquire_token()\n\n # Actual rate is always None because\n old_target_rate = rl._current_cluster_rate\n old_actual_rate = rl._get_actual_cluster_rate()\n if old_actual_rate is not None:\n self.assertGreater(old_actual_rate, old_target_rate)\n\n # Token grant a 100 initial successes followed by all failures\n counter += 1\n if counter <= required_successes:\n rl.release_token(token, True)\n continue\n else:\n rl.release_token(token, False)\n\n # assert that the new rate is calculated based on the (higher/more conservative) actual rate\n if backoff_factor * old_target_rate > min_cluster_rate:\n self.assertEqual(\n round(rl._current_cluster_rate, 2),\n round(backoff_factor * old_target_rate, 2),\n )\n else:\n # assert that new rate never goes lower than min rate\n self.assertEqual(rl._current_cluster_rate, min_cluster_rate)\n break\n advancer.stop(wait_on_join=False)", "def spin_until_facing(self, signature, x, delta, speed):\n print('testing spin until')\n while True:\n print('in while loop')\n if self.robot.sensor_system.camera.get_biggest_blob().center.x > \\\n x + delta:\n print('turn left until', self.robot.sensor_system.camera\n .get_biggest_blob().center.x,\n (x - delta))\n self.robot.drive_system.right_motor.turn_on(speed)\n self.robot.drive_system.left_motor.turn_on((-speed))\n\n\n elif self.robot.sensor_system.camera.get_biggest_blob().center.x \\\n < x - delta:\n print('turn right until',\n self.robot.sensor_system.camera.get_biggest_blob().center.x,\n (x + delta))\n self.robot.drive_system.right_motor.turn_on(-speed)\n self.robot.drive_system.left_motor.turn_on(speed)\n\n\n else:\n self.robot.drive_system.right_motor.turn_off()\n self.robot.drive_system.left_motor.turn_off()\n break", "def test_scenario(timestep_per_pi, int_method):\n\n #determine BC and IC\n x0 = 0.0 #init pos\n v0 = 1.0 #init vel\n t0 = 0.0 #start-time\n tn = 4.0*np.pi #end-time\n tau = timestep_per_pi*np.pi #timesteps\n n = (tn-t0)/tau + 1 #number of timesteps\n \n time = np.linspace(t0, tn, n) #time-array\n\n #acceleration of point particle with k=m=1\n acc1 = lambda x,v,t: -1.0*x #function must take three arguments!\n\n pos, vel, time = integrate_time(func=acc1,\n init=(x0,v0),\n timearray=time,\n method=int_method)\n\n #analytical solutions\n pos_an = np.sin(time)\n vel_an = np.cos(time)\n\n return time, pos, pos_an, vel, vel_an", "def __init__(self, init_pose=None, init_velocities=None, \n init_angle_velocities=None, runtime=5., target_pos=None):\n # Simulation\n self.success=0\n self.success_rate=deque(maxlen=10) # to record agent's success/failure for 10 consecutive episodes \n self.sim = PhysicsSim(init_pose, init_velocities, init_angle_velocities, runtime) \n self.action_repeat = 3\n\n self.state_size = self.action_repeat * 6\n self.action_low = 0\n self.action_high = 900\n self.action_size = 4\n self.runtime=runtime\n self.x_lower_bound=self.sim.lower_bounds[0]\n self.y_lower_bound=self.sim.lower_bounds[1]\n self.z_lower_bound=self.sim.lower_bounds[2]\n self.x_upper_bound=self.sim.upper_bounds[0]\n self.y_upper_bound=self.sim.upper_bounds[1]\n self.z_upper_bound=self.sim.upper_bounds[2]\n \n #Initial pos\n self.start_pos=init_pose[:3]\n \n # Goal\n self.target_pos = target_pos if target_pos is not None else np.array([0., 0., 10.]) \n \n # distance between initial position and target position \n self.total_distance= (np.dot(self.target_pos-self.start_pos, self.target_pos-self.start_pos))**(0.5)\n # target_margin : if the quad is within a distance of target_margin from the target, then it is declared successful \n self.target_margin=self.total_distance/50", "def rotate(self):\n pass", "def manual_control_once(\n self, rotation: int, velocity: float, duration: int=1500):\n number_of_tries = 3\n self.manual_start()\n while number_of_tries > 0:\n if self.status().state_code == 7:\n time.sleep(5)\n self.manual_control(rotation, velocity, duration)\n time.sleep(5)\n return self.manual_stop()\n\n time.sleep(2)\n number_of_tries -= 1", "def test_motor_error(self):\n # reproducible arbitrariness\n np.random.seed(12325)\n\n nsteps = 10\n nchan = 3\n tmax = nsteps*self.dt\n sequence = np.random.randn(nsteps, self.N)\n\n target = np.random.randn(nchan, nsteps)\n controller = LinearController(self.G, target, tau=None)\n controller.W = np.random.randn(*controller.W.shape)\n\n self.G.out_fct = lambda i: sequence[i]\n\n class MotorErrorGrabber(object):\n def __init__(self, target):\n self.target = target\n self.order = 10\n \n def prepare(self, tmax, dt):\n nsteps = int_r(tmax/dt)\n self.motor_error = np.zeros((nsteps, self.target.N))\n\n def evolve(self, t, dt):\n i = int_r(t/dt)\n self.motor_error[i, :] = self.target.get_motor_error()\n\n M = MotorErrorGrabber(controller)\n M1 = simulation.StateMonitor(controller, 'out')\n\n sim = simulation.Simulation(self.G, controller, M, M1, dt=self.dt)\n sim.run(tmax)\n\n for i in xrange(int_r(tmax/self.dt)):\n diff = M1.out[:, i] - target[:, i]\n self.assertTrue(np.allclose(M.motor_error[i], diff))", "def test_SetMultipleMovingLoadsWithVelocityFunctionConfigurationNegative(self):\n\n # create nodes\n second_coord = [1, 0, 0.0]\n self.mp.CreateNewNode(1,0.0,0.0,0.0)\n self.mp.CreateNewNode(2,second_coord[0],second_coord[1],0.0)\n\n # create condition\n self.mp.CreateNewCondition(\"MovingLoadCondition2D2N\", 1, [1,2], self.mp.GetProperties()[1])\n\n parameters = self.base_parameters\n parameters[\"velocity\"]=KratosMultiphysics.Parameters(self.time_dependent_velocity)\n parameters.AddVector(\"configuration\", [-0.25])\n\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.TIME, 0)\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.DELTA_TIME, 0.25)\n\n process = GMA.SetMultipleMovingLoadsProcess(self.mp, parameters)\n cond = self.cmp.GetCondition(2)\n\n # initialise and set load\n process.ExecuteInitialize()\n process.ExecuteInitializeSolutionStep()\n\n # initialise matrices\n lhs = KratosMultiphysics.Matrix(0, 0)\n rhs = KratosMultiphysics.Vector(0)\n\n # set load on node\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n\n self.checkRHS(rhs, [0.0, 0.0, 0.0, 0.0])\n\n # change time and recalculate load\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n # check if interpolation is done correctly\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n\n self.checkRHS(rhs, [0.0, 0.0, 0.0, 0.0])\n\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.TIME, 0.5)\n\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n # check if interpolation is done correctly\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n\n self.checkRHS(rhs, [0.0, -2.0, 0.0, 0.0])", "def test_moist_lapse_uniform():\n temp = moist_lapse(np.array([900., 900., 900.]) * units.hPa, 20. * units.degC)\n assert_almost_equal(temp, np.array([20., 20., 20.]) * units.degC, 7)", "def testRadial(self):\n self.doTest(afwGeom.makeRadialTransform([0, 1.01, 1e-7]))", "def testRadial(self):\n self.doTest(afwGeom.makeRadialTransform([0, 1.01, 1e-7]))", "def run(self, curr_x, goal_x, curr_y, goal_y, curr_z, goal_z, curr_rot_z, goal_rot_z, current_time=None):\n \n error_x = goal_x - curr_x\n error_y = goal_y - curr_y\n error_z = goal_z - curr_z\n error_rot_z = goal_rot_z - curr_rot_z\n\n self.output_x = 0.0\n self.output_y = 0.0\n self.output_z = 0.0\n self.output_rot_z = 0.0\n\n # self.current_time = current_time if current_time is not None else rospy.get_rostime()\n # self.current_time = time.time()\n self.current_time = rospy.get_time()\n delta_time = self.current_time - self.last_time\n delta_error_x = error_x - self.last_error_x\n delta_error_y = error_y - self.last_error_y\n delta_error_z = error_z - self.last_error_z\n delta_error_rot_z = error_rot_z - self.last_error_rot_z\n\n # print(\"delta_time:\", delta_time)\n\n if (delta_time >= self.sample_time):\n self.PTerm_x = self.Kp_x * error_x\n self.PTerm_y = self.Kp_y * error_y\n self.PTerm_z = self.Kp_z * error_z\n self.PTerm_rot_z = self.Kp_rotz * error_rot_z\n\n self.ITerm_x += error_x * delta_time\n self.ITerm_y += error_y * delta_time\n self.ITerm_z += error_z * delta_time\n self.ITerm_rot_z += error_rot_z * delta_time\n\n if (self.ITerm_x < -self.windup_guard):\n self.ITerm_x = -self.windup_guard\n elif (self.ITerm_x > self.windup_guard):\n self.ITerm_x = self.windup_guard\n \n if (self.ITerm_y < -self.windup_guard):\n self.ITerm_y = -self.windup_guard\n elif (self.ITerm_y > self.windup_guard):\n self.ITerm_y = self.windup_guard\n\n if (self.ITerm_z < -self.windup_guard):\n self.ITerm_z = -self.windup_guard\n elif (self.ITerm_z > self.windup_guard):\n self.ITerm_z = self.windup_guard\n \n if (self.ITerm_rot_z < -self.windup_guard):\n self.ITerm_rot_z = -self.windup_guard\n elif (self.ITerm_rot_z > self.windup_guard):\n self.ITerm_rot_z = self.windup_guard\n\n self.DTerm_x = 0.0\n self.DTerm_y = 0.0\n self.DTerm_z = 0.0\n self.DTerm_rot_z = 0.0\n if delta_time > 0:\n self.DTerm_x = delta_error_x / delta_time\n self.DTerm_y = delta_error_y / delta_time\n self.DTerm_z = delta_error_z / delta_time\n self.DTerm_rot_z = delta_error_rot_z / delta_time\n\n\n # Remember last time and last error for next calculation\n self.last_time = self.current_time\n self.last_error_x = error_x\n self.last_error_y = error_y\n self.last_error_z = error_z\n self.last_error_rot_z = error_rot_z\n\n self.output_x = self.PTerm_x + (self.Ki_x * self.ITerm_x) + (self.Kd_x * self.DTerm_x)\n self.output_y = self.PTerm_y + (self.Ki_y * self.ITerm_y) + (self.Kd_y * self.DTerm_y)\n self.output_z = self.PTerm_z + (self.Ki_z * self.ITerm_z) + (self.Kd_z * self.DTerm_z)\n self.output_rot_z = self.PTerm_rot_z + (self.Ki_rotz * self.ITerm_rot_z) + (self.Kd_rotz * self.DTerm_rot_z)\n \n return self.output_x, self.output_y, self.output_z, self.output_rot_z", "def steps_to_angle():\n pass", "def test_minvar_rotation(self):\n vrot, v, w = minvar(self.rdata)\n # Determinant of rotation matrix should be = 1\n self.assertTrue((np.linalg.det(v) - 1) < self.tol)", "def test_relaxation_end(self):\n tau = 50.0\n mrate = 40.0\n Mrate = 120.0\n\n tmax = 50.0\n dt = 0.1\n relaxation = 20.0\n\n tutor = SimpleNeurons(2, out_fct=lambda _: Mrate*np.random.rand())\n reward = MockReward(lambda t: np.sin(10*t/tmax))\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=tau,\n constrain_rates=True, min_rate=mrate, max_rate=Mrate,\n learning_rate=0.1, relaxation=relaxation, use_tutor_baseline=False)\n\n # reproducible arbitrariness\n np.random.seed(1)\n\n M = simulation.StateMonitor(tutor_rule, 'out')\n\n sim = simulation.Simulation(tutor, reward, tutor_rule, M, dt=dt)\n sim.run(tmax)\n\n mask = (M.t > tmax - relaxation/2)\n mavg = 0.5*(mrate + Mrate)\n\n self.assertAlmostEqual(np.mean(np.abs(M.out[:, mask] - mavg)), 0.0)", "def test_dry_lapse_2_levels():\n temps = dry_lapse(np.array([1000., 500.]) * units.mbar, 293. * units.kelvin)\n assert_array_almost_equal(temps, [293., 240.3583] * units.kelvin, 4)", "def rotates(self, maze, game_display):\n if self.lidars[0].get_sense() <= self.lidars[0].radius // 3:\n if uniform(0, 1) > 0.7:\n self.rotate_right(angle=45, maze=maze, game_display=game_display)\n else:\n self.rotate_left(angle=45, maze=maze, game_display=game_display)\n # fix to left.\n if self.lidars[1].get_sense() <= 2 * self.lidars[1].radius // 3:\n self.rotate_left(angle=10, maze=maze, game_display=game_display)\n # fix to right.\n if self.lidars[2].get_sense() <= 2 * self.lidars[0].radius // 3:\n self.rotate_right(angle=10, maze=maze, game_display=game_display)" ]
[ "0.6849133", "0.66332906", "0.64928585", "0.63230497", "0.621054", "0.6208657", "0.6164278", "0.59979725", "0.5994086", "0.5906198", "0.5899471", "0.5757551", "0.5706605", "0.5610273", "0.56079453", "0.560449", "0.56034946", "0.5596018", "0.55945164", "0.55886596", "0.5548625", "0.5537282", "0.5536592", "0.55294806", "0.5526241", "0.5522117", "0.55012316", "0.5498065", "0.54821676", "0.54731655", "0.5469154", "0.5459952", "0.5453098", "0.5445251", "0.5442013", "0.5436042", "0.5431785", "0.5416726", "0.54153585", "0.5394771", "0.5389504", "0.53774446", "0.53694946", "0.5355981", "0.53420925", "0.53390455", "0.5322644", "0.5320587", "0.5318078", "0.53067756", "0.5301165", "0.5300889", "0.5289858", "0.5266019", "0.5261372", "0.5258688", "0.524941", "0.5235317", "0.52340555", "0.52337456", "0.5233659", "0.5231174", "0.5221744", "0.5216228", "0.51950157", "0.51938325", "0.5189249", "0.5187326", "0.5186245", "0.5173007", "0.5167795", "0.51625454", "0.5162328", "0.51568013", "0.5149271", "0.51435804", "0.513851", "0.51379776", "0.5137098", "0.51281655", "0.51152277", "0.5108562", "0.51002556", "0.5094182", "0.5093885", "0.5091225", "0.5085919", "0.5085105", "0.5080977", "0.50708", "0.50681424", "0.5062206", "0.50564873", "0.5054508", "0.5054508", "0.50522226", "0.50478077", "0.50472933", "0.5041763", "0.5041445", "0.5035468" ]
0.0
-1
Tests of try rotation with compress in configuration
def test_process_log_with_compress_in_configuration(self): with tempfile.TemporaryDirectory() as sandbox: with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout: srcfile = Path(sandbox, 'pokus.log') srcfile.touch() destfile = Path(sandbox, 'backup', 'pokus.log') compressors = process_log( datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30), { 'target': '{{path}}/backup/{{name}}.{{ext}}', 'interval': 'hourly', 'compress': 'gzip -9' }, 'hourly', str(srcfile), 10 ) self.assertEqual(compressors, [[sandbox, 'gzip', '-9', str(destfile)]]) self.assertFalse(srcfile.exists()) self.assertTrue(destfile.exists()) self.assertEqual(fake_stdout.getvalue(), 'Checking "{src}"... rotating... "{src}" -> "{dest}" done.\n'.format(src=srcfile, dest=destfile))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_compress_works(self):\n tau = 45.0\n mrate = 60.0\n Mrate = 100.0\n gain = 5\n\n tmax = 50.0\n dt = 0.2\n\n self.rule.tau = tau\n self.rule.min_rate = mrate\n self.rule.max_rate = Mrate\n self.rule.compress_rates = False\n self.rule.gain = gain\n\n self.motor.error_fct = lambda t: (int_r(t/20.0)%3-1)*np.ones(self.Nsrc)\n\n M1 = simulation.StateMonitor(self.rule, 'out')\n\n sim1 = simulation.Simulation(self.source, self.motor, self.rule, M1, dt=dt)\n sim1.run(tmax)\n\n # make sure we normally go outside the range\n self.assertGreater(np.sum(M1.out < mrate), 0)\n self.assertGreater(np.sum(M1.out > Mrate), 0)\n\n self.rule.compress_rates = True\n\n M2 = simulation.StateMonitor(self.rule, 'out')\n\n sim2 = simulation.Simulation(self.source, self.motor, self.rule, M2, dt=dt)\n sim2.run(tmax)\n\n self.assertEqual(np.sum(M2.out < mrate), 0)\n self.assertEqual(np.sum(M2.out > Mrate), 0)", "def test_compress():\n print('Testing compress')\n\n # Cases given to test this problem\n assert_equals('c1o17l1k1a1n1g1a1r1o2',\n hw1.compress('cooooooooooooooooolkangaroo'))\n assert_equals('a3', hw1.compress('aaa'))\n assert_equals('', hw1.compress(''))\n\n # Additional cases to test this problem\n assert_equals('a1p2l1e1', hw1.compress('apple'))\n assert_equals('g1o6d1a1w1g4s3', hw1.compress('goooooodawggggsss'))", "def test_auto_compression():\n with dask.config.set({\"test123\": \"auto\"}):\n try:\n import lz4 # noqa: F401\n\n assert get_compression_settings(\"test123\") == \"lz4\"\n return\n except ImportError:\n pass\n\n try:\n import snappy # noqa: F401\n\n assert get_compression_settings(\"test123\") == \"snappy\"\n except ImportError:\n assert get_compression_settings(\"test123\") is None", "def test_compression_tanh(self):\n tau = 48.0\n mrate = 60.0\n Mrate = 100.0\n gain = 5\n\n tmax = 50.0\n dt = 0.2\n\n self.rule.tau = tau\n self.rule.min_rate = mrate\n self.rule.max_rate = Mrate\n self.rule.compress_rates = False\n self.rule.gain = gain\n\n self.motor.error_fct = lambda t: (int_r(t/20.0)%3-1)*np.ones(self.Nsrc)\n\n M1 = simulation.StateMonitor(self.rule, 'out')\n\n sim1 = simulation.Simulation(self.source, self.motor, self.rule, M1, dt=dt)\n sim1.run(tmax)\n\n self.rule.compress_rates = True\n\n M2 = simulation.StateMonitor(self.rule, 'out')\n\n sim2 = simulation.Simulation(self.source, self.motor, self.rule, M2, dt=dt)\n sim2.run(tmax)\n\n mavg = 0.5*(mrate + Mrate)\n mdiff = 0.5*(Mrate - mrate)\n\n expected = mavg + mdiff*np.tanh((M1.out - mavg)/mdiff)\n\n self.assertTrue(np.allclose(M2.out, expected), msg=\n \"mean(abs(out - expected))={}\".format(np.mean(np.abs(M2.out - expected))))", "def test_compress(self):\n self.logger.info(\"STEP: Create the workspace directory to be compressed.\")\n workspace = Workspace(Mock)\n directory = Path.cwd().joinpath(\"workspace\")\n directory.mkdir()\n workspace.workspace = directory\n\n # Create a file to verify compression.\n directory.joinpath(\"file.txt\").touch()\n\n test_folder = Path.cwd().joinpath(\"testfolder\")\n test_folder.mkdir()\n self.items.append(test_folder)\n\n self.logger.info(\"STEP: Compress the directory.\")\n workspace.compress()\n\n self.logger.info(\n \"STEP: Verify that the directory was compressed using the gztar format.\"\n )\n self.items.append(test_folder)\n compressed_workspace = Path.cwd().joinpath(\"workspace.tar.gz\")\n unpack_archive(compressed_workspace, test_folder, format=\"gztar\")\n compressed_file = test_folder.joinpath(\"workspace/file.txt\")\n self.assertTrue(compressed_file.exists() and compressed_file.is_file())", "def test_compress_deterministic(self):\n\n class DeterministicGZipMiddleware(GZipMiddleware):\n max_random_bytes = 0\n\n r1 = DeterministicGZipMiddleware(self.get_response)(self.req)\n r2 = DeterministicGZipMiddleware(self.get_response)(self.req)\n self.assertEqual(r1.content, r2.content)\n self.assertEqual(self.get_mtime(r1.content), 0)\n self.assertEqual(self.get_mtime(r2.content), 0)", "def test_backup_with_compress_flag(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size,\n end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self.backupset.backup_compressed = False\n self.backup_cluster()\n no_compression = self.get_database_file_info()\n self.log.info(\"\\nDelete old backup and do backup again with compress flag\")\n self.backup_create()\n self.backupset.backup_compressed = self.input.param(\"backup-compressed\", False)\n self.backup_cluster()\n with_compression = self.get_database_file_info()\n self.validate_backup_compressed_file(no_compression, with_compression)", "def _check_rotated_filename_candidates(self):\n # savelog(8)\n candidate = \"%s.0\" % self.filename\n if (exists(candidate) and exists(\"%s.1.gz\" % self.filename) and\n (stat(candidate).st_mtime > stat(\"%s.1.gz\" % self.filename).st_mtime)):\n return candidate\n\n # logrotate(8)\n # with delaycompress\n candidate = \"%s.1\" % self.filename\n if exists(candidate):\n return candidate\n\n # without delaycompress\n candidate = \"%s.1.gz\" % self.filename\n if exists(candidate):\n return candidate\n\n rotated_filename_patterns = (\n # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + with `delaycompress`\n \"-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]\",\n # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + without `delaycompress`\n \"-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz\",\n # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + with `delaycompress`\n \"-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]\",\n # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + without `delaycompress`\n \"-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz\",\n # for TimedRotatingFileHandler\n \".[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]\",\n )\n for rotated_filename_pattern in rotated_filename_patterns:\n candidates = glob.glob(self.filename + rotated_filename_pattern)\n if candidates:\n candidates.sort()\n return candidates[-1] # return most recent\n\n # no match\n return None", "def test_compressed(self):\n try:\n import zlib\n except ImportError:\n self.skipTest('zlib is missing')\n\n ba = amf3.ByteArray()\n\n self.assertFalse(ba.compressed)\n\n z = zlib.compress(b'b' * 100)\n ba = amf3.ByteArray(z)\n\n self.assertTrue(ba.compressed)\n\n z = zlib.compress(b'\\x00' * 100)\n ba = amf3.ByteArray(z)\n\n self.assertTrue(ba.compressed)", "def test_rotated(self):\n self._calibration_test(\"rotated\")", "def perform_tests():\n print \"\\n****\\nTesting Doublecompress...\\n\"\n dc_pass = unit_doublecompress()\n if (dc_pass):\n result = 'PASS'\n else:\n result = 'FAIL'\n print \">>> \" + result\n\n return dc_pass", "def test_tamper_mutate_compress(logger):\n backup = copy.deepcopy(actions.tamper.ACTIVATED_PRIMITIVES)\n actions.tamper.ACTIVATED_PRIMITIVES = [\"compress\"]\n try:\n tamper = actions.tamper.TamperAction(None)\n assert tamper.parse(\"TCP:flags:corrupt\", logger)\n tamper._mutate_tamper_type()\n assert tamper.tamper_type == \"compress\"\n assert tamper.tamper_proto_str == \"DNS\"\n assert tamper.field == \"qd\"\n packet = layers.packet.Packet(IP()/TCP()/DNS()/DNSQR())\n packet2 = tamper.tamper(packet, logger)\n assert packet2 == packet\n finally:\n actions.tamper.ACTIVATED_PRIMITIVES = backup", "def test_op_no_compression(self):\n assert OP_NO_COMPRESSION == 0x20000", "def test_need_to_rotate_log(self):\n self.assertTrue(need_to_rotate_log(0, 20, 'daily', 15, 'daily'), 'rotate log by time')\n self.assertFalse(need_to_rotate_log(10, 20, 'daily', 15, 'hourly'), 'do not rotate log by time')\n self.assertTrue(need_to_rotate_log(10, 20, 'daily', 25, None), 'rotate log by max size')\n self.assertFalse(need_to_rotate_log(10, 20, 'hourly', 5, 'hourly'), 'do not rotate log by min size')", "def test_compress_cmd():\n # GIVEN a cli runner\n runner = CliRunner()\n # WHEN running the compress command with dry_run\n result = runner.invoke(compress, obj={})\n # THEN assert the command was succesful even without a valid api\n assert result.exit_code == 0", "def test_compression_level(self):\n test_compression_level = 8\n self.encoder._compression_level = test_compression_level", "def test_compress_spring(spring_tmp_path, first_tmp_file, second_tmp_file, spring_api):\n # GIVEN a spring api\n # GIVEN two existing fastq reads\n assert first_tmp_file.exists()\n assert second_tmp_file.exists()\n # GIVEN a spring path that does not exist\n assert not spring_tmp_path.exists()\n\n # WHEN compressing fastq files into the spring file\n res = spring_api.compress(first_tmp_file, second_tmp_file, spring_tmp_path)\n\n # THEN assert that process was succesful\n assert res is True\n # THEN assert that the spring compression exists\n assert spring_tmp_path.exists()", "def test_mcg_data_compression(\n self, mcg_obj, awscli_pod, bucket_factory, bucketclass_dict\n ):\n download_dir = \"/aws/compression/\"\n awscli_pod.exec_cmd_on_pod(\n command=craft_s3_command(\n f\"cp s3://{constants.TEST_FILES_BUCKET}/enwik8 {download_dir}\"\n ),\n out_yaml_format=False,\n )\n bucketname = bucket_factory(1, bucketclass=bucketclass_dict)[0].name\n full_object_path = f\"s3://{bucketname}\"\n sync_object_directory(awscli_pod, download_dir, full_object_path, mcg_obj)\n # For this test, enwik8 is used in conjunction with Snappy compression\n # utilized by NooBaa. Snappy consistently compresses 35MB of the file.\n mcg_obj.check_data_reduction(bucketname, 35 * 1024 * 1024)", "def test_compress_2(self):\n text = 'abcdefdeabc'\n actual = LZ77.compress(text)\n expected = bytearray([3]) + bytearray(b'abcdef')\\\n + bytearray([0, 32]) + bytearray([0, 113])\n self.assertEqual(actual, expected)", "def test_rotation(self):\n # this logfile should rotate every 10 bytes\n with contextlib.closing(\n logfile.LogFile(self.name, self.dir, rotateLength=10)\n ) as log:\n\n # test automatic rotation\n log.write(\"123\")\n log.write(\"4567890\")\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.2\".format(self.path)))\n log.write(\"\")\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertTrue(os.path.exists(\"{}.2\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n log.write(\"3\")\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n\n # test manual rotation\n log.rotate()\n self.assertTrue(os.path.exists(\"{}.3\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.4\".format(self.path)))\n\n self.assertEqual(log.listLogs(), [1, 2, 3])", "def test_compress_fastq_real_with_integrity_fail(\n first_tmp_file, second_tmp_file, spring_tmp_path, real_base_context, mocker\n):\n # GIVEN the path to a existing two existing fastq files and a non existing spring\n runner = CliRunner()\n assert not spring_tmp_path.exists()\n assert first_tmp_file.exists()\n assert second_tmp_file.exists()\n\n dir_path = spring_tmp_path.parent\n assert nr_files(dir_path) == 2\n mocker.patch.object(compare_cmd, \"compare_elements\")\n compare_cmd.compare_elements.return_value = False\n # WHEN running the compress command with an intergrity check\n result = runner.invoke(\n fastq,\n [\n \"--first-read\",\n str(first_tmp_file),\n \"--second-read\",\n str(second_tmp_file),\n \"--spring-path\",\n str(spring_tmp_path),\n \"--check-integrity\",\n ],\n obj=real_base_context,\n )\n # THEN assert the command succedes\n assert result.exit_code == 1\n # THEN assert that the spring file was deleted\n assert not spring_tmp_path.exists()\n # THEN assert that only the original fastq files are left\n assert nr_files(dir_path) == 2", "def _should_compress(new_descriptor: Union[FileDescriptor, StreamDescriptor], ingestion_properties: IngestionProperties) -> bool:\n return not new_descriptor.is_compressed and ingestion_properties.format.compressible", "def check_compression(ctype, clevel, olevel):\n repository = Repository(archiver.repository_path, exclusive=True)\n with repository:\n manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)\n state = None\n while True:\n ids, state = repository.scan(limit=LIST_SCAN_LIMIT, state=state)\n if not ids:\n break\n for id in ids:\n chunk = repository.get(id, read_data=True)\n meta, data = manifest.repo_objs.parse(id, chunk) # will also decompress according to metadata\n m_olevel = meta.get(\"olevel\", -1)\n m_psize = meta.get(\"psize\", -1)\n print(\n hexlify(id).decode(),\n meta[\"ctype\"],\n meta[\"clevel\"],\n meta[\"csize\"],\n meta[\"size\"],\n m_olevel,\n m_psize,\n )\n # this is not as easy as one thinks due to the DecidingCompressor choosing the smallest of\n # (desired compressed, lz4 compressed, not compressed).\n assert meta[\"ctype\"] in (ctype, LZ4.ID, CNONE.ID)\n assert meta[\"clevel\"] in (clevel, 255) # LZ4 and CNONE has level 255\n if olevel != -1: # we expect obfuscation\n assert \"psize\" in meta\n assert m_olevel == olevel\n else:\n assert \"psize\" not in meta\n assert \"olevel\" not in meta", "def test_compress_response(self):\n r = GZipMiddleware(self.get_response)(self.req)\n self.assertEqual(self.decompress(r.content), self.compressible_string)\n self.assertEqual(r.get(\"Content-Encoding\"), \"gzip\")\n self.assertEqual(r.get(\"Content-Length\"), str(len(r.content)))", "def test_compress_fastq_real_with_integrity(\n first_tmp_file, second_tmp_file, spring_tmp_path, real_base_context\n):\n # GIVEN the path to a existing two existing fastq files and a non existing spring\n runner = CliRunner()\n assert not spring_tmp_path.exists()\n assert first_tmp_file.exists()\n assert second_tmp_file.exists()\n\n dir_path = spring_tmp_path.parent\n assert nr_files(dir_path) == 2\n # WHEN running the compress command with an intergrity check\n result = runner.invoke(\n fastq,\n [\n \"--first-read\",\n str(first_tmp_file),\n \"--second-read\",\n str(second_tmp_file),\n \"--spring-path\",\n str(spring_tmp_path),\n \"--check-integrity\",\n ],\n obj=real_base_context,\n )\n # THEN assert the command succedes\n assert result.exit_code == 0\n # THEN assert that the spring file was created\n assert spring_tmp_path.exists()\n # THEN assert that the files created for integrity check was removed\n assert nr_files(dir_path) == 3", "def test_compress_fastq_dry_run_integrity(first_read, second_read):\n # GIVEN the path to a existing bam file and a cli runner\n runner = CliRunner()\n assert first_read.exists()\n assert second_read.exists()\n # WHEN running the compress command with dry_run\n result = runner.invoke(\n fastq,\n [\n \"--first-read\",\n str(first_read),\n \"--second-read\",\n str(second_read),\n \"--dry-run\",\n \"--check-integrity\",\n ],\n obj={},\n )\n # THEN assert the command was succesful even without a valid api\n assert result.exit_code == 0", "def check_zlib():\n\n try:\n import zlib\n zlib.compress('Compress this')\n return True\n except Exception as ex:\n LOG.error(str(ex))\n LOG.error('Failed to import zlib module.')\n return False", "def _optimise_rotation(self):\n logger.info(\n f\"Minimising dimer rotation up to \"\n f'δϕ = {self.phi_tol.to(\"degrees\"):.4f}º'\n )\n\n for i in range(self._ratio_rot_iters):\n\n result = self._rotate()\n\n if (\n result == _StepResult.skipped_rotation\n or abs(self._coords.phi) < self.phi_tol\n ):\n break\n\n logger.info(\n f\"Micro iteration: {i}.\"\n f' ϕ={self._coords.phi.to(\"degrees\"):.2f}º'\n )\n\n return None", "def test_rotation(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n days = [(self.path + \".\" + log.suffix(day * 86400)) for day in range(3)]\n\n # test automatic rotation\n log._clock = 0.0 # 1970/01/01 00:00.00\n log.write(\"123\")\n log._clock = 43200 # 1970/01/01 12:00.00\n log.write(\"4567890\")\n log._clock = 86400 # 1970/01/02 00:00.00\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(days[0]))\n self.assertFalse(os.path.exists(days[1]))\n log._clock = 172800 # 1970/01/03 00:00.00\n log.write(\"\")\n self.assertTrue(os.path.exists(days[0]))\n self.assertTrue(os.path.exists(days[1]))\n self.assertFalse(os.path.exists(days[2]))\n log._clock = 259199 # 1970/01/03 23:59.59\n log.write(\"3\")\n self.assertFalse(os.path.exists(days[2]))", "def test_no_compress_compressed_response(self):\n self.resp[\"Content-Encoding\"] = \"deflate\"\n r = GZipMiddleware(self.get_response)(self.req)\n self.assertEqual(r.content, self.compressible_string)\n self.assertEqual(r.get(\"Content-Encoding\"), \"deflate\")", "def recompress_fucntion(destination_file, tmp_folder):\n def confirm_new_files(ziplocation):\n \"\"\"\n test if the file.zip/cbz has the same\n amount of files as tmp_folder\n :param ziplocation: string\n :return: bool\n \"\"\"\n try:\n zf = ZipFile(ziplocation)\n filecontents = list(zf.namelist())\n except BadZipFile:\n os.remove(ziplocation)\n print('OUTPUT FILE BROKEN')\n return False\n\n for walk in os.walk(tmp_folder):\n files = [walk[0] + '/' + x for x in walk[2]]\n if len(filecontents) < len(files):\n os.remove(ziplocation)\n shutil.rmtree(tmp_folder)\n print('FILES MISSING')\n return False\n break\n\n return True\n\n zipfile = destination_file[0:-(len('.cbz'))]\n\n if platform.system() != \"Windows\":\n os.sync()\n\n shutil.make_archive(zipfile, 'zip', tmp_folder)\n zipfile += '.zip'\n\n if platform.system() != \"Windows\":\n os.sync()\n\n if not confirm_new_files(zipfile):\n return False\n\n if not os.path.exists(zipfile) or os.path.getsize(zipfile) == 0:\n print('WRITE OUTPUT ERROR')\n if os.path.exists(zipfile):\n os.remove(zipfile)\n\n return False\n\n shutil.move(zipfile, destination_file)\n\n return True", "def test_compress_fastq_real(\n first_read, second_read, spring_tmp_path, real_base_context\n):\n # GIVEN the path to a existing bam file and a cli runner\n runner = CliRunner()\n assert not spring_tmp_path.exists()\n # WHEN running the compress command with dry_run\n result = runner.invoke(\n fastq,\n [\n \"--first-read\",\n str(first_read),\n \"--second-read\",\n str(second_read),\n \"--spring-path\",\n str(spring_tmp_path),\n ],\n obj=real_base_context,\n )\n # THEN assert the command succedes\n assert result.exit_code == 0\n # THEN assert that the spring file was created\n assert spring_tmp_path.exists()", "def test_compress_bam_real_data(real_base_context, bam_tmp_file, cram_tmp_path):\n # GIVEN the path to a bam file, a existing outpath and a cli runner\n runner = CliRunner()\n assert not cram_tmp_path.exists()\n assert bam_tmp_file.exists()\n dir_path = cram_tmp_path.parent\n assert nr_files(dir_path) == 1\n cram_api = real_base_context[\"cram_api\"]\n\n # WHEN running the compress command\n res = runner.invoke(\n bam,\n [\"--bam-path\", str(bam_tmp_file), \"--cram-path\", str(cram_tmp_path)],\n obj=real_base_context,\n )\n # THEN the progam should abort since file already exists\n assert res.exit_code == 0\n # THEN check that the cram file was created\n assert cram_tmp_path.exists()\n # THEN assert that the cram index was created\n index_path = cram_api.get_index_path(cram_tmp_path)\n assert index_path.exists()\n # THEN assert that the three files exists. original bam, cram and cram index\n assert nr_files(dir_path) == 3", "def testUploadWrapperCorruption(self):\n # Check that small reads still work.\n encrypted_data = \"\"\n count = 0\n with self.assertRaisesRegexp(IOError, \"HMAC not verified\"):\n while 1:\n small_read = self.encrypt_wrapper.read(2)\n if not small_read:\n break\n encrypted_data += small_read\n count += len(small_read)\n\n # Corrupt the data a little bit.\n if count == 3000:\n small_read = \"XX\"\n\n self.decrypt_wrapper.write(small_read)", "def test_compress_bam_valid_outpath(base_context, bam_path):\n # GIVEN the path to a bam file, a non existing outpath and a cli runner\n outpath = \"a_file.cram\"\n runner = CliRunner()\n # WHEN running the compress command\n result = runner.invoke(\n bam, [\"--bam-path\", str(bam_path), \"--cram-path\", outpath], obj=base_context,\n )\n # THEN assert the command was succesful\n assert result.exit_code == 0", "def test_compress_fastq_valid_spring_file(\n first_read, second_read, spring_tmp_path, base_context\n):\n # GIVEN the path to a existing bam file and a cli runner\n runner = CliRunner()\n assert not spring_tmp_path.exists()\n # WHEN running the compress command with dry_run\n result = runner.invoke(\n fastq,\n [\n \"--first-read\",\n str(first_read),\n \"--second-read\",\n str(second_read),\n \"--spring-path\",\n str(spring_tmp_path),\n ],\n obj=base_context,\n )\n # THEN assert the command succedes\n assert result.exit_code == 0", "def test_no_compress_incompressible_response(self):\n self.resp.content = self.incompressible_string\n r = GZipMiddleware(self.get_response)(self.req)\n self.assertEqual(r.content, self.incompressible_string)\n self.assertIsNone(r.get(\"Content-Encoding\"))", "def test_rotatePermissionFileNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.path, 0o444)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_decompress_spring(spring_tmp_file, spring_api):\n # GIVEN a spring file, a spring api and two paths to fastq files that does not exist\n assert spring_tmp_file.exists()\n fastq_files = fastq_outpaths(spring_tmp_file)\n first_read = fastq_files[0]\n second_read = fastq_files[1]\n\n assert not first_read.exists()\n assert not second_read.exists()\n\n # WHEN decompressing the spring file to the fastq files\n res = spring_api.decompress(spring_tmp_file, first_read, second_read)\n\n # THEN assert that the fastq files have been created\n assert res is True\n assert first_read.exists()\n assert second_read.exists()", "def test_rotating_phantom(self):\n cheese = TomoCheese.from_demo_images()\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -0.25, abs_tol=0.05)\n for img in cheese.dicom_stack:\n img.array = rotate(img.array, angle=3, mode=\"edge\")\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -3.25, abs_tol=0.05)", "def test_process_log_with_min_size_in_configuration(self):\n with tempfile.TemporaryDirectory() as sandbox:\n with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout:\n srcfile = Path(sandbox, 'pokus.log')\n srcfile.touch()\n destfile = Path(sandbox, 'backup', 'pokus.log')\n compressors = process_log(\n datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30),\n {'target': '{{path}}/backup/{{name}}.{{ext}}', 'interval': 'hourly', 'min_size': 15},\n 'hourly',\n str(srcfile),\n 10\n )\n self.assertEqual(compressors, [])\n self.assertTrue(srcfile.exists())\n self.assertFalse(destfile.exists())\n self.assertEqual(fake_stdout.getvalue(), 'Checking \"{src}\"... rotation not needed.\\n'.format(src=srcfile))", "def test_encrypt_success(self):\n fake_input_file = 'fake-input-file'\n fake_output_dir = 'fake-output-dir'\n fake_output_extension = '.fake-extension'\n\n with patch('iceit.crypto.gnupg.GPG') as mock_gpg:\n mock_gpg.return_value = mock_gpg\n with patch('iceit.crypto.open', mock_open(read_data='fake input file data'), create=True) as mock_open_obj:\n with patch('os.path.exists', return_value=True):\n encryptor = self.test_init()\n\n with patch('os.path.getsize', return_value=10):\n output_file_name = encryptor.encrypt(input_file=fake_input_file, output_dir=fake_output_dir,\n output_extension=fake_output_extension)\n\n self.assertEqual(1, mock_gpg.encrypt_file.call_count)\n\n # make sure the output file name is composed correctly\n (call_name, call_args, call_kwargs) = mock_gpg.encrypt_file.mock_calls[0]\n file_name = call_kwargs['output']\n self.assertTrue(file_name.startswith(fake_output_dir))\n self.assertTrue(fake_input_file in file_name)\n self.assertTrue(file_name.endswith(fake_output_extension))\n\n self.assertEqual(file_name, output_file_name)", "def test00(self):\n a = np.arange(20)\n cnames = bcolz.blosc_compressor_list()\n if common.verbose:\n print(\"Checking compressors:\", cnames)\n # print \"\\nsize b uncompressed-->\", a.size * a.dtype.itemsize\n for cname in cnames:\n b = bcolz.carray(a, rootdir=self.rootdir,\n cparams=bcolz.cparams(clevel=9, cname=cname))\n # print \"size b compressed -->\", b.cbytes, \"with '%s'\"%cname\n self.assertTrue(sys.getsizeof(b) > b.nbytes,\n \"compression does not seem to have any overhead\")\n assert_array_equal(a, b[:], \"Arrays are not equal\")\n # Remove the array on disk before trying with the next one\n if self.disk:\n common.remove_tree(self.rootdir)", "def compress_image(filename,k):", "def test_encrypt_creates_and_restores_backup(\n self,\n mock_os,\n mock_shutil,\n mock_subprocess,\n ):\n mock_subprocess.run.return_value.returncode = 1\n\n with self.assertRaises(RuntimeError):\n self.mikla.encrypt('Chunky Hunky', 'plain', 'enc')\n\n mock_os.unlink.assert_not_called()\n mock_shutil.move.assert_called_with('enc.bak', 'enc')\n self.assertEqual(mock_shutil.move.call_count, 2)", "def test_archive_run(self):\n pass", "def test_compress_fastq_dry_run(first_read, second_read):\n # GIVEN the path to a existing bam file and a cli runner\n runner = CliRunner()\n assert first_read.exists()\n assert second_read.exists()\n # WHEN running the compress command with dry_run\n result = runner.invoke(\n fastq,\n [\n \"--first-read\",\n str(first_read),\n \"--second-read\",\n str(second_read),\n \"--dry-run\",\n ],\n obj={},\n )\n # THEN assert the command was succesful even without a valid api\n assert result.exit_code == 0", "def testDecompress(self):\n decompressor = xz_decompressor.LZMADecompressor()\n\n compressed_data = (\n b']\\x00\\x00\\x80\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00*\\x1a\\t\\'d\\x1c'\n b'\\x87\\x8aO\\xcaL\\xf4\\xf8!\\xda\\x88\\xd8\\xff\\xff\\xeb\\xcc\\x00')\n\n uncompressed_data, _ = decompressor.Decompress(compressed_data)\n expected_uncompressed_data = b'This is a test.'\n self.assertEqual(uncompressed_data, expected_uncompressed_data)\n\n # Test to trigger lzma raising EOFError.\n with self.assertRaises(errors.BackEndError):\n decompressor.Decompress(b'This is a test.')\n\n # Test to trigger lzma raising IOError.\n decompressor = xz_decompressor.LZMADecompressor()\n\n with self.assertRaises(errors.BackEndError):\n decompressor.Decompress(b'This is a test.')", "def test_compress_non_200_response(self):\n self.resp.status_code = 404\n r = GZipMiddleware(self.get_response)(self.req)\n self.assertEqual(self.decompress(r.content), self.compressible_string)\n self.assertEqual(r.get(\"Content-Encoding\"), \"gzip\")", "def test_shuffled(self):\n self.setup_flags()\n self.io_args.matches = os.path.join(\n self.io_args.output_root, \"shuffled\", \"matches.json\"\n )\n self._calibration_error_test(\"shuffled\", \"GeometricCalibration\")", "def test_compress_2_idenctical_char(self):\n text = 'aa'\n actual = LZ77.compress(text)\n expected = bytearray([0]) + bytearray(b'aa')\n self.assertEqual(actual, expected)", "def testCompressedSize(self):\n\n uncompressed_file = tempfile.NamedTemporaryFile(delete=False)\n for line in range(200):\n uncompressed_file.write(\n 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. '\n 'Sed eleifend')\n uncompressed_file.close()\n compressed_path = uncompressed_file.name + '.compressed'\n compressor_path = os.path.join(DIR_SOURCE_ROOT, 'third_party',\n 'fuchsia-sdk', 'sdk', 'tools', 'x64',\n 'blobfs-compression')\n subprocess.call([compressor_path, uncompressed_file.name, compressed_path])\n self.assertEqual(binary_sizes.CompressedSize(uncompressed_file.name),\n os.path.getsize(compressed_path))\n os.remove(uncompressed_file.name)\n os.remove(compressed_path)", "def test_compress_4_idenctical_char(self):\n text = 'bbbb'\n actual = LZ77.compress(text)\n expected = bytearray([32]) + bytearray(b'bb') + bytearray([0, 16])\n self.assertEqual(actual, expected)", "def test_verify_unzip(self):\n assert os.path.exists(\n os.path.join(\n settings.MEDIA_ROOT,\n \"indices\",\n \"test-index\",\n \"data\",\n \"sample.txt\"\n )\n )", "def test_rotateAlreadyExists(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n # Build a new file with the same name as the file which would be created\n # if the log file is to be rotated.\n newFilePath = \"{}.{}\".format(log.path, log.suffix(log.lastDate))\n with open(newFilePath, \"w\") as fp:\n fp.write(\"123\")\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_5000_225(self):\n\n n_ed = 5000.0\n m_ed = 225.0\n\n as_1 = 4.578443 * 10 ** -4\n as_2 = 23.49285 * 10 ** -4\n\n n_rd, m_rd = compression_diagnostic.main(h, b, a1, a2, m_ed, n_ed, as_1, as_2, eta_bet, lambda_bet, f_cd, f_ck)\n self.assertAlmostEqual(n_rd, n_ed, 0)\n self.assertAlmostEqual(m_rd, m_ed, 0)", "def compression(self) -> str:\n ...", "def test_compress_cram(cram_tmp_path, bam_tmp_file, cram_api, cram_tmp_index_path):\n # GIVEN a existing bam file\n assert bam_tmp_file.exists()\n # GIVEN a non existing cram path\n assert not cram_tmp_path.exists()\n # GIVEN a non existing cram index path\n cram_tmp_index_path = pathlib.Path(cram_tmp_index_path)\n assert not cram_tmp_index_path.exists()\n # GIVEN a cram api\n\n # WHEN decompressing the cram file\n res = cram_api.compress(bam_path=bam_tmp_file, cram_path=cram_tmp_path)\n\n # THEN assert that the process executed with success\n assert res is True\n # THEN assert that the cram file was created\n assert cram_tmp_path.exists()\n # THEN assert that the cram index file was created\n assert cram_tmp_index_path.exists()", "def test_unarchive_run(self):\n pass", "def augmentImageByRotation(imagePath, numRotations, originalBin, data_path):\n angles = np.linspace(0, 360, numRotations + 1, endpoint=False)[1:]\n augmentedImages = []\n rgb = cv2.imread(os.path.join(data_path, imagePath))\n dt = exrToNumpy(os.path.join(os.path.dirname(os.path.join(data_path, imagePath)), 'liver_0_dt.exr'))\n dl = exrToNumpy(os.path.join(os.path.dirname(os.path.join(data_path, imagePath)), 'liver_0_dl.exr'))\n newRatings = open(new_ratings_file_path, 'a')\n generated_images = 0\n for i, angle in enumerate(angles):\n # try different offsets if exact rotation does not give the same bin as the original image\n offsets = np.linspace(0, 10, 100, endpoint=False)\n newBin = None\n save_version = False\n for offset in offsets:\n rgb_r, dt_r, dl_r = rotate_image(rgb, dt, dl, angle + offset)\n # rate image\n rating, _ = rate_tumordistance_depth.rateImage(None, None, None, num_tumors, images=[rgb_r, dt_r, dl_r])\n newBin = getBinFromRating(rating, num_bins)\n # if bins match, save image\n if originalBin == newBin:\n save_version = True\n break\n if save_version:\n rotDir = os.path.join(augmentedDataPath, os.path.dirname(imagePath) + \"_rot\" + str(i))\n os.makedirs(rotDir)\n # save images to rotDir\n rgb_path = os.path.join(rotDir, 'liver_0.png')\n dt_path = os.path.join(rotDir, 'liver_0_dt.exr')\n dl_path = os.path.join(rotDir, 'liver_0_dl.exr')\n cv2.imwrite(rgb_path, rgb_r)\n save_exr_from_numpy(dt_path, dt_r)\n save_exr_from_numpy(dl_path, dl_r)\n # make entry in new ratings file\n save_path = os.path.relpath(rgb_path, data_path)\n newRatings.write(getRatingsLine(save_path, rating))\n generated_images += 1\n newRatings.close()\n if generated_images == 0:\n print \"Could not match bins. (\" + imagePath + \")\"\n return generated_images", "def test_compress_seq_diff_9_char(self):\n text = '123456789'\n actual = LZ77.compress(text)\n expected = bytearray([0]) + bytearray(b'12345678') \\\n + bytearray([0]) + bytearray(b'9')\n self.assertEqual(actual, expected)", "def test01a(self):\n a = np.arange(1e5)\n cnames = bcolz.blosc_compressor_list()\n if common.verbose:\n print(\"Checking compressors:\", cnames)\n # print \"\\nsize b uncompressed-->\", a.size * a.dtype.itemsize\n for cname in cnames:\n bcolz.cparams.setdefaults(clevel=9, cname=cname)\n b = bcolz.carray(a, rootdir=self.rootdir)\n # print \"size b compressed -->\", b.cbytes, \"with '%s'\"%cname\n self.assertTrue(sys.getsizeof(b) < b.nbytes,\n \"carray does not seem to compress at all\")\n assert_array_equal(a, b[:], \"Arrays are not equal\")\n # Remove the array on disk before trying with the next one\n if self.disk:\n common.remove_tree(self.rootdir)", "def test_is_spring_decompression_needed_when_true(\n populated_compress_spring_api: CompressAPI,\n analysis_store_single_case: Store,\n case_id: str,\n sample_id: str,\n):\n\n # GIVEN a populated prepare_fastq_api\n prepare_fastq_api = PrepareFastqAPI(\n store=analysis_store_single_case, compress_api=populated_compress_spring_api\n )\n # GIVEN a store with a case that has linked samples\n case_obj: Family = analysis_store_single_case.get_case_by_internal_id(internal_id=case_id)\n assert case_obj\n # GIVEN that the case has linked samples\n link_objects = [link_obj for link_obj in case_obj.links]\n assert link_objects\n # GIVEN a that there exists a version with only spring in housekeeper\n version_object = populated_compress_spring_api.hk_api.get_latest_bundle_version(\n bundle_name=sample_id\n )\n for file in version_object.files:\n assert file.path.endswith(\".spring\")\n\n # WHEN checking if spring decompression is needed\n res = prepare_fastq_api.is_spring_decompression_needed(case_id)\n\n # THEN assert that spring decompression is needed since there are no fastq files\n assert res is True", "def rot_mosaic(source_dir='K:/IID_SaltonSea/Tasks/Soil mapping/PhotoDocumentation/Processing/',\r\n output_dir='K:/IID_SaltonSea/Tasks/Soil mapping/PhotoDocumentation/Final/',\r\n file_pattern='IID201905*.jpg', sub_dir=False, k=1, replace=False): \r\n \r\n \r\n if sub_dir:\r\n mosaics = []\r\n for root, dirnames, filenames in os.walk(source_dir):\r\n for filename in fnmatch.filter(filenames, file_pattern):\r\n mosaics.append(os.path.join(root, filename))\r\n else:\r\n mosaics = glob.glob(source_dir + file_pattern) \r\n \r\n g = 0\r\n r = 0\r\n s = 0\r\n for m in mosaics:\r\n f = output_dir + os.path.basename(m)\r\n if not os.path.exists(f):\r\n img = improc.imops.imio.imread(m)\r\n img = np.rot90(img, k=k) \r\n improc.imops.imio.imsave(f, img)\r\n print('generated: %s' % f)\r\n print('')\r\n g+=1\r\n elif replace:\r\n img = improc.imops.imio.imread(m)\r\n img = np.rot90(img, k=k)\r\n improc.imops.imio.imsave(f, img)\r\n print('replaced: %s' % f)\r\n print('')\r\n r+=1\r\n else:\r\n print('skipping: %s' % m)\r\n print('')\r\n s+=1\r\n\r\n print('generated total of %i files' % g)\r\n print('replaced total of %i files' % r)\r\n print('skipped total of %i files' % s)", "def test_uncompressed(mode, size, test_file):\n\n with Image.open(test_file) as im:\n assert im.format == \"DDS\"\n assert im.mode == mode\n assert im.size == size\n\n assert_image_equal_tofile(im, test_file.replace(\".dds\", \".png\"))", "def test_compress_bam_dry_run(bam_tmp_file, base_context):\n # GIVEN the path to a existing bam file and a cli runner\n runner = CliRunner()\n bam_path = bam_tmp_file\n assert bam_path.exists()\n # WHEN running the compress command with dry_run\n result = runner.invoke(\n bam, [\"--bam-path\", str(bam_path), \"--dry-run\"], obj=base_context\n )\n # THEN assert the command was succesful\n assert result.exit_code == 0", "def test_compress_1_char(self):\n text = 'a'\n actual = LZ77.compress(text)\n expected = bytearray([0]) + bytearray(b'a')\n self.assertEqual(actual, expected)", "def test_encrypt_creates_and_cleans_up_backup(\n self,\n mock_os,\n mock_shutil,\n mock_subprocess,\n ):\n mock_subprocess.run.return_value.returncode = 0\n\n self.mikla.encrypt('Chunky Hunky', 'plain', 'enc')\n\n mock_shutil.move.assert_called_once_with('enc', 'enc.bak')\n mock_os.unlink.assert_called_once_with('enc.bak')", "def __handle_compression(self, x):\n if self.__compress:\n return zlib.compress(x)\n return x", "def runTest(self):\n nc = Dataset(self.file)\n data = nc['vl'][-1]\n # check max error of compression\n err = np.abs(data - self.data)\n assert(err.max() < nc['vl'].scale_factor)\n # turn off auto-scaling\n nc.set_auto_maskandscale(False)\n data = nc['vl'][-1]\n assert(data[-1] == np.around(self.data[-1]/nc['vl'].scale_factor))\n nc.close()", "def test_random_bytes(self):\n with mock.patch(\n \"django.utils.text.secrets.randbelow\", autospec=True, return_value=3\n ):\n r = GZipMiddleware(self.get_response)(self.req)\n # The fourth byte of a gzip stream contains flags.\n self.assertEqual(r.content[3], gzip.FNAME)\n # A 3 byte filename \"aaa\" and a null byte are added.\n self.assertEqual(r.content[10:14], b\"aaa\\x00\")\n self.assertEqual(self.decompress(r.content), self.compressible_string)", "def test_run_ug_nocompress(gatk_object, datadir, clean_tmp):\n\n outfile = gatk_object.run_caller(program='UnifiedGenotyper', \n prefix=\"{0}/outdir/test\".format(datadir),\n compress=False)\n\n assert os.path.isfile(outfile) is True", "def test_abstractShouldRotate(self):\n log = logfile.BaseLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n self.assertRaises(NotImplementedError, log.shouldRotate)", "def _check_orig(self):\n if self.is_dir():\n self._orig = False\n return\n\n parts = self._path.split('.')\n try:\n if parts[-1] == 'tgz':\n self._orig = True\n elif parts[-2] == 'tar':\n if (parts[-1] in Compressor.Opts or\n parts[-1] in Compressor.Aliases):\n self._orig = True\n except IndexError:\n self._orig = False", "def test_compress_bam_no_outpath(base_context, bam_tmp_file):\n # GIVEN the path to a bam file and a cli runner\n runner = CliRunner()\n bam_path = bam_tmp_file\n assert bam_path.exists()\n # WHEN running the compress command\n result = runner.invoke(bam, [\"--bam-path\", bam_path], obj=base_context,)\n # THEN assert the command was succesful\n assert result.exit_code == 0", "def test_arbitrary_rotation(self):\n \n # This test is run a bunch of times on various intervals, ranging from 50% to 1/6\n\t\t# (16.667%).\n for i in range(2, 7):\n \n interval = 1 / i # The amount to increase each qubit's probability by, relative to the previous qubit\n step_string = \"{:.4f}\".format(100 / i) # The decimal representation of the interval, as a percent\n target_probabilities = [0] * (i + 1) # This will store the desired probabilities of each qubit\n for j in range(0, i + 1):\n target_probability = j * interval\n target_probabilities[j] = target_probability\n\n # Run the test\n self.run_test(self.arbitrary_rotation_function, f\"Rotation with steps of 1/{i} ({step_string}%)\", 2000, target_probabilities, 0.05)", "def test_log_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n filename = self.conveyer.rotate_logs()\n self.assertEquals(self.conveyer.logfile, None)\n self.assertEquals(filename, \"testfile.dat.rotated\")", "def test_compress_offset_less_len1(self):\n text = 'ababab'\n actual = LZ77.compress(text)\n expected = bytearray([32]) + bytearray(b'ab') + bytearray([0, 18])\n self.assertEqual(actual, expected)", "def test_transform_image_resize_and_crop_landscape(self):\n self.expect_open_image('SomeBlobKey', (1200, 1600))\n self.expect_crop(top_y=0.0, bottom_y=0.75)\n self.expect_resize(32)\n self.expect_encode_image('SomeImageSize32-c')\n self.mox.ReplayAll()\n self.assertEquals(('SomeImageSize32-c', 'image/jpeg'),\n self.app._transform_image('SomeBlobKey', 's32-c'))\n self.mox.VerifyAll()", "def test_archive_wrong_extension(self):\n testfile = 'wrongextension.eml'\n try:\n tmpfile = tempfile.NamedTemporaryFile(\n suffix='wrongext', prefix='fuglu-unittest', dir='/tmp')\n shutil.copy(\"%s/%s\" % (TESTDATADIR, testfile), tmpfile.name)\n\n user = '[email protected]'\n conffile = self.tempdir + \"/%s-archivenames.conf\" % user\n # the largefile in the test message is just a bunch of zeroes\n open(conffile, 'w').write(\n \"deny \\.exe$ exe detected in zip with wrong extension\")\n self.rulescache._loadrules()\n suspect = Suspect(\n '[email protected]', user, tmpfile.name)\n\n result = self.candidate.examine(suspect)\n if type(result) is tuple:\n result, message = result\n self.assertEqual(\n result, DELETE, 'exe in zip with .gz extension was not detected')\n\n finally:\n tmpfile.close()\n os.remove(conffile)", "def test_rotatePermissionDirectoryNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.directory, 0o444)\n # Restore permissions so tests can be cleaned up.\n self.addCleanup(os.chmod, log.directory, 0o755)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_compress_seq_diff_8_char(self):\n text = '12345678'\n actual = LZ77.compress(text)\n expected = bytearray([0]) + bytearray(b'12345678')\n self.assertEqual(actual, expected)", "def test_compress_offset_less_len2(self):\n text = 'abcdabcdab'\n actual = LZ77.compress(text)\n expected = bytearray([8]) + bytearray(b'abcd') + bytearray([0, 52])\n self.assertEqual(actual, expected)", "def test_rescale_compress_lossless_maps(self):\n height_map = 96\n width_map = 48\n bin_widths_test = numpy.array([1.5, 1.5, 1.5], dtype=numpy.float32)\n \n # In \"lossless/pseudo_data/binary_probabilities_scale_compress_invalid_0.npy\",\n # several binary probabilities are equal to `nan`\n # but the associated binary decisions may occur.\n # In \"lossless/pseudo_data/binary_probabilities_scale_compress_invalid_1.npy\",\n # several binary probabilities are either negative\n # or larger than 1.\n path_to_binary_probabilities = 'lossless/pseudo_data/binary_probabilities_scale_compress_valid.npy'\n print('The binary probabilities at \"{}\" are used.'.format(path_to_binary_probabilities))\n \n # The optional argument `loc` of the function\n # `numpy.random.normal` is set to 0.0 as the\n # data must be centered.\n centered_data_0 = numpy.random.normal(loc=0.,\n scale=5.,\n size=(1, height_map, width_map, 1)).astype(numpy.float32)\n centered_data_1 = numpy.random.normal(loc=0.,\n scale=0.2,\n size=(1, height_map, width_map, 1)).astype(numpy.float32)\n centered_data_2 = numpy.random.normal(loc=0.,\n scale=0.5,\n size=(1, height_map, width_map, 1)).astype(numpy.float32)\n centered_data = numpy.concatenate((centered_data_0, centered_data_1, centered_data_2),\n axis=3)\n expanded_centered_quantized_data = tls.quantize_per_map(centered_data, bin_widths_test)\n centered_quantized_data = numpy.squeeze(expanded_centered_quantized_data,\n axis=0)\n nb_bits = lossless.compression.rescale_compress_lossless_maps(centered_quantized_data,\n bin_widths_test,\n path_to_binary_probabilities)\n print('Number of bits in the bitstream: {}'.format(nb_bits))", "def testDecompress(self):\n decompressor = xz_decompressor.XZDecompressor()\n\n compressed_data = (\n b'\\xfd7zXZ\\x00\\x00\\x01i\"\\xde6\\x02\\xc0\\x13\\x0f!\\x01\\x16\\x00\\xc0\\xb7\\xdc'\n b'\\xe9\\x01\\x00\\x0eThis is a test.\\x00\\x00]\\xc9\\xc3\\xc6\\x00\\x01#\\x0f\\xdb'\n b'\\xdf\\x90\\x0e\\x90B\\x99\\r\\x01\\x00\\x00\\x00\\x00\\x01YZ')\n\n uncompressed_data, _ = decompressor.Decompress(compressed_data)\n expected_uncompressed_data = b'This is a test.'\n self.assertEqual(uncompressed_data, expected_uncompressed_data)\n\n # Test to trigger xz raising EOFError.\n with self.assertRaises(errors.BackEndError):\n decompressor.Decompress(b'This is a test.')\n\n # Test to trigger xz raising IOError.\n decompressor = xz_decompressor.XZDecompressor()\n\n with self.assertRaises(errors.BackEndError):\n decompressor.Decompress(b'This is a test.')", "def test_get_compressed(self):\n self.create_compressed(\"%s/one\" % (self.tests_path), \"some value\")\n self.shell.onecmd(\"get %s/one\" % (self.tests_path))\n expected_output = \"b'some value'\\n\" if PYTHON3 else \"some value\\n\"\n self.assertEqual(expected_output, self.output.getvalue())", "def compressIfNeeded(self):\n\n if self._mode == \"zip\":\n zip_folder(self._rootExportPath, self.getZipArchiveFullPath())", "def test_is_spring_decompression_needed_when_false(\n populated_compress_api_fastq_spring: CompressAPI,\n analysis_store_single_case: Store,\n case_id: str,\n):\n\n # GIVEN a populated prepare_fastq_api\n prepare_fastq_api = PrepareFastqAPI(\n store=analysis_store_single_case, compress_api=populated_compress_api_fastq_spring\n )\n # GIVEN a store with a case that has linked samples\n case_obj: Family = analysis_store_single_case.get_case_by_internal_id(internal_id=case_id)\n assert case_obj\n # GIVEN that the case has linked samples\n link_objects = [link_obj for link_obj in case_obj.links]\n assert link_objects\n\n # WHEN checking if spring decompression is needed\n res = prepare_fastq_api.is_spring_decompression_needed(case_id)\n\n # THEN assert that spring decompression is not needed since there are fastq files\n assert res is False", "def test_rotation_angle(self):\n\n self.test_shape.azimuth_placement_angle = [45, 135, 225, 315]\n test_volume = self.test_shape.volume()\n self.test_shape.rotation_angle = 180\n assert self.test_shape.volume() == pytest.approx(test_volume * 0.5)", "def test_5000_25(self):\n\n n_ed = 5000.0\n m_ed = 25.0\n\n as_1 = 15.58093 * 10 ** -4\n as_2 = 17.00950 * 10 ** -4\n\n n_rd, m_rd = compression_diagnostic.main(h, b, a1, a2, m_ed, n_ed, as_1, as_2, eta_bet, lambda_bet, f_cd, f_ck)\n self.assertAlmostEqual(n_rd, n_ed, 0)\n self.assertAlmostEqual(m_rd, m_ed, 0)", "def compress_experiment(self, exp_id):\n exp_folder = self.um.experiment_path(str(exp_id))[:-1]\n exp_folder = os.path.join(os.path.dirname(\n os.path.realpath(__file__)), exp_folder)\n archive_name = os.path.join(os.path.dirname(os.path.realpath(__file__)),\n \"backup\", str(exp_id)+\".zip\")\n\n print exp_folder, archive_name\n retval = self.um.compress_folder_zip(exp_folder, archive_name)\n if retval:\n return \"Success\"\n else:\n return \"Failure\"", "def test_process_log_with_pre_and_post_in_configuration(self):\n with tempfile.TemporaryDirectory() as sandbox:\n with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout:\n srcfile = Path(sandbox, 'pokus.log')\n srcfile.touch()\n destfile = Path(sandbox, 'backup', 'pokus.log')\n compressors = process_log(\n datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30),\n {\n 'target': '{{path}}/backup/{{name}}.{{ext}}',\n 'interval': 'hourly',\n 'compress': 'gzip -9',\n 'exec_pre': '/bin/true',\n 'exec_post': '/bin/true'\n },\n 'hourly',\n str(srcfile),\n 10\n )\n self.assertEqual(compressors, [[sandbox, 'gzip', '-9', str(destfile)]])\n self.assertFalse(srcfile.exists())\n self.assertTrue(destfile.exists())\n self.assertEqual(fake_stdout.getvalue(), 'Checking \"{src}\"... rotating... \"{src}\" -> \"{dest}\" done.\\n'.format(src=srcfile, dest=destfile))", "def test_5_10(self):\n\n n_ed = 5.0\n m_ed = 10.0\n\n as_1 = 0.219840 * 10 ** -4\n as_2 = 1.80000 * 10 ** -4\n\n n_rd, m_rd = compression_diagnostic.main(h, b, a1, a2, m_ed, n_ed, as_1, as_2, eta_bet, lambda_bet, f_cd, f_ck)\n self.assertAlmostEqual(n_rd, n_ed, 0)\n self.assertAlmostEqual(m_rd, m_ed, 0)", "def test_rotation(self, tol):\n theta = 0.98\n S = symplectic.rotation(theta)\n expected = np.block([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]])\n np.allclose(S, expected, atol=tol, rtol=0)", "def test_compress_fastq_with_metadata(\n first_read, second_read, spring_tmp_path, base_context, metadata_tmp_path\n):\n # GIVEN the path a pair of fastqs, a spring file and a cli runner\n runner = CliRunner()\n # GIVEN a non existing spring path\n assert not spring_tmp_path.exists()\n # GIVEN a non existing metadata path\n assert not metadata_tmp_path.exists()\n # WHEN running the compress command with metadata\n result = runner.invoke(\n fastq,\n [\n \"--first-read\",\n str(first_read),\n \"--second-read\",\n str(second_read),\n \"--spring-path\",\n str(spring_tmp_path),\n \"--metadata-file\",\n ],\n obj=base_context,\n )\n # THEN assert the command succedes\n assert result.exit_code == 0\n # THEN assert the metadata file was created\n assert metadata_tmp_path.exists()", "def is_compressed(self):\n return self.instance == 1", "def test_compress_max_1_seq_len(self):\n a_int = ord('a')\n seq = ''.join(map(chr, range(a_int, a_int + LZ77.max_seq + 1)))\n text = seq + '12' + seq + '1234'\n actual = LZ77.compress(text)\n expected = bytearray([0]) + bytearray(text[:8], 'utf-8')\\\n + bytearray([0]) + bytearray(text[8: 16], 'utf-8')\\\n + bytearray([12]) + bytearray(text[16: 20], 'utf-8')\\\n + bytearray([1, 63]) + bytearray([1, 49])\\\n + bytearray('34', 'utf-8')\n self.assertEqual(actual, expected)", "def __init__(self, tb=None,\n #\n RDY_ma_start_compression=0,\n RDY_ma_get_input=0,\n mav_send_compressed_value=0,\n RDY_mav_send_compressed_value=0,\n RDY_ma_end_compression=0,\n mv_compression_done=0,\n RDY_mv_compression_done=0,\n ):\n#\n if RDY_ma_start_compression is not None and isinstance(RDY_ma_start_compression, int):\n RDY_ma_start_compression = BinaryValue(\n RDY_ma_start_compression, tb.RDY_bits, False)\n if RDY_ma_get_input is not None and isinstance(RDY_ma_get_input, int):\n RDY_ma_get_input = BinaryValue(\n RDY_ma_get_input, tb.RDY_bits, False)\n if mav_send_compressed_value is not None and isinstance(mav_send_compressed_value, int):\n mav_send_compressed_value = BinaryValue(\n mav_send_compressed_value, tb.output_bits, False)\n if RDY_mav_send_compressed_value is not None and isinstance(RDY_mav_send_compressed_value, int):\n RDY_mav_send_compressed_value = BinaryValue(\n RDY_mav_send_compressed_value, tb.RDY_bits, False)\n if RDY_ma_end_compression is not None and isinstance(RDY_ma_end_compression, int):\n RDY_ma_end_compression = BinaryValue(\n RDY_ma_end_compression, tb.RDY_bits, False)\n if mv_compression_done is not None and isinstance(mv_compression_done, int):\n mv_compression_done = BinaryValue(\n mv_compression_done, tb.bool_bits, False)\n if RDY_mv_compression_done is not None and isinstance(RDY_mv_compression_done, int):\n RDY_mv_compression_done = BinaryValue(\n RDY_mv_compression_done, tb.RDY_bits, False)\n#\n self.value = (\n RDY_ma_start_compression,\n RDY_ma_get_input,\n mav_send_compressed_value,\n RDY_mav_send_compressed_value,\n RDY_ma_end_compression,\n mv_compression_done,\n RDY_mv_compression_done\n )", "def test02a(self):\n np.random.seed(10)\n a = np.cumsum(np.random.random_sample(100*1000)-0.5) # random walk\n if common.verbose:\n print(\"Checking quantize filter\")\n # print \"\\nsize b uncompressed-->\", a.size * a.dtype.itemsize\n cparams = bcolz.cparams(quantize=0)\n b = bcolz.carray(a, cparams=cparams, rootdir=self.rootdir)\n b_cbytes = b.cbytes\n assert_array_equal(a, b[:], \"Arrays are not equal\")\n # print \"size b compressed -->\", b_cbytes\n # Remove the array on disk before trying with the next one\n if self.disk:\n common.remove_tree(self.rootdir)\n cparams = bcolz.cparams(quantize=3)\n c = bcolz.carray(a, cparams=cparams, rootdir=self.rootdir)\n # print \"size c compressed -->\", c.cbytes\n self.assertTrue(c.cbytes < 0.7 * b_cbytes,\n \"quantize does not seem to improve compression \"\n \"significantly\")\n assert_array_almost_equal(a, c[:], 3, \"Arrays are not equal\")\n # Remove the array on disk before trying with the next one\n if self.disk:\n common.remove_tree(self.rootdir)", "def test_rotate_filenames():\n tmpdir = os.path.join(tempfile.gettempdir(), \"jade-test-tmp87alkj8ew\")\n os.makedirs(tmpdir, exist_ok=True)\n\n data = {\"A\": 1, \"B\": 2}\n json_file1 = os.path.join(tmpdir, \"a1.json\")\n dump_data(data, json_file1)\n\n json_file2 = os.path.join(tmpdir, \"a2.json\")\n dump_data(data, json_file2)\n\n rotate_filenames(tmpdir, \".json\")\n\n if os.path.exists(tmpdir):\n shutil.rmtree(tmpdir)" ]
[ "0.63947666", "0.61248237", "0.6114381", "0.6083236", "0.6082104", "0.6062771", "0.59740895", "0.59541243", "0.5930159", "0.5895802", "0.57862955", "0.5755226", "0.5731432", "0.56462026", "0.56308395", "0.5578965", "0.5572358", "0.55638254", "0.5525263", "0.5516631", "0.5515557", "0.55060184", "0.5438584", "0.54046595", "0.53942204", "0.53855133", "0.5384133", "0.53684163", "0.53375804", "0.5310259", "0.53085744", "0.52941364", "0.52827036", "0.527562", "0.5273712", "0.52484024", "0.5239204", "0.52243006", "0.5214891", "0.52023137", "0.5200744", "0.5198905", "0.51978517", "0.5190583", "0.5187111", "0.5185874", "0.5177424", "0.51754653", "0.5161851", "0.5145859", "0.5138407", "0.51152253", "0.5114881", "0.5114186", "0.51115245", "0.5110003", "0.5104747", "0.50901806", "0.5082667", "0.507658", "0.5075862", "0.5069164", "0.50666726", "0.5051912", "0.5049776", "0.5046095", "0.5041125", "0.5036428", "0.5028825", "0.5020928", "0.5015505", "0.5008175", "0.50050056", "0.50031775", "0.49940565", "0.49931735", "0.49876973", "0.49822858", "0.49814054", "0.49760666", "0.497507", "0.49726987", "0.49720937", "0.49620497", "0.49565658", "0.4945389", "0.494523", "0.4938601", "0.49304175", "0.49253708", "0.49181512", "0.49169606", "0.49095857", "0.4909215", "0.49077314", "0.49055347", "0.48940372", "0.489085", "0.4888185", "0.4887572" ]
0.62119424
1
Tests of try rotation with exec_pre in configuration
def test_process_log_with_exec_pre_in_configuration(self): with tempfile.TemporaryDirectory() as sandbox: with mock.patch('sys.stderr', new=io.StringIO()) as fake_stderr: with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout: stream_handler = logging.StreamHandler(fake_stderr) logging.getLogger().addHandler(stream_handler) try: srcfile = Path(sandbox, 'pokus.log') srcfile.touch() destfile = Path(sandbox, 'backup', 'pokus.log') compressors = process_log( datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30), { 'target': '{{path}}/backup/{{name}}.{{ext}}', 'interval': 'hourly', 'compress': 'bzip2', 'exec_pre': '/bin/false' }, 'hourly', str(srcfile), 10 ) finally: logging.getLogger().removeHandler(stream_handler) self.assertEqual(compressors, []) self.assertTrue(srcfile.exists()) self.assertFalse(destfile.exists()) self.assertEqual(fake_stdout.getvalue(), 'Checking "{src}"... exec_pre failed.\n'.format(src=srcfile)) self.assertEqual(fake_stderr.getvalue(), 'exec_pre "/bin/false pokus.log" failed with code 1\n')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_relaunch_deployment_run(self):\n pass", "def test_retest_deployment_run(self):\n pass", "def test_retry_run(self):\n pass", "def test_pre_hooks(self):\n os.makedirs('/tmp/localhost/pacha_pre')\n touch_script = open('/tmp/localhost/pacha_pre/foo.sh', 'w')\n touch_script.write('''touch /tmp/localhost/pre_got_executed.txt''')\n touch_script.close()\n run = rebuild.Rebuild(hostname='localhost') \n run.pre_hooks()\n self.assertTrue(os.path.isfile('/tmp/localhost/pre_got_executed.txt'))", "def verify_no_snapshot_reingestion(c: Composition) -> None:\n c.run(\"testdrive\", \"wait-for-snapshot.td\", \"postgres-disable-select-permission.td\")\n\n restart_mz(c)\n\n c.run(\n \"testdrive\",\n \"delete-rows-t1.td\",\n \"delete-rows-t2.td\",\n \"alter-table.td\",\n \"alter-mz.td\",\n )", "def test_set_deployment_run_lock(self):\n pass", "def test_encrypt_creates_and_restores_backup(\n self,\n mock_os,\n mock_shutil,\n mock_subprocess,\n ):\n mock_subprocess.run.return_value.returncode = 1\n\n with self.assertRaises(RuntimeError):\n self.mikla.encrypt('Chunky Hunky', 'plain', 'enc')\n\n mock_os.unlink.assert_not_called()\n mock_shutil.move.assert_called_with('enc.bak', 'enc')\n self.assertEqual(mock_shutil.move.call_count, 2)", "def run_starter(self, expect_to_fail=False):", "def test_pre_post_hooks(self):\n os.makedirs('/tmp/localhost/pacha_pre')\n os.makedirs('/tmp/localhost/pacha_post')\n pre_script = open('/tmp/localhost/pacha_pre/foo.sh', 'w')\n pre_script.write('''touch /tmp/localhost/pre_got_executed.txt''')\n pre_script.close()\n post_script = open('/tmp/localhost/pacha_post/bar.sh', 'w')\n post_script.write('''touch /tmp/localhost/post_got_executed.txt''')\n post_script.close()\n run = rebuild.Rebuild(hostname='localhost') \n run.pre_hooks()\n run.post_hooks()\n self.assertTrue(os.path.isfile('/tmp/localhost/post_got_executed.txt'))\n self.assertTrue(os.path.isfile('/tmp/localhost/pre_got_executed.txt'))", "def test_workflows_restart(self):\n pass", "def test_redeploy_container_asset(self):\n pass", "def test_worker_precheck_exception(self):\n assert airflow.settings.validate_session()", "def test_restore_backup():", "def test_redeploy(self):\n pass", "def test_abort_in_resourcing_mid(\n set_restart_after_abort: None,\n setup_context_monitoring_for_abort_test: None,\n composition: conf_types.Composition,\n):", "def _pre_deploy_exec(self):\n app.env['JUJU_PROVIDERTYPE'] = model_info(\n juju.get_current_model())['provider-type']\n\n pre_deploy_sh = os.path.join(app.config['spell-dir'],\n 'conjure/steps/00_pre-deploy')\n if os.path.isfile(pre_deploy_sh) \\\n and os.access(pre_deploy_sh, os.X_OK):\n utils.pollinate(app.session_id, 'J001')\n msg = \"Running pre-deployment tasks.\"\n app.log.debug(msg)\n app.ui.set_footer(msg)\n return run(pre_deploy_sh,\n shell=True,\n stdout=PIPE,\n stderr=PIPE,\n env=app.env)\n return json.dumps({'message': 'No pre deploy necessary',\n 'returnCode': 0,\n 'isComplete': True})", "def test_exec_prefix(self):\n self.chck_triple('exec_prefix')", "def test_noarg_optimize_call():\n os.chdir(pathlib.Path(__file__).parent.absolute())\n loc = shutil.which(\"parrot-optimize\")\n script_descriptor = open(os.path.abspath(loc))\n script = script_descriptor.read()\n sys.argv = [\"parrot-optimize\"]\n\n with pytest.raises(SystemExit):\n exec(script)\n\n script_descriptor.close()", "def test_abort_in_resourcing_low(\n set_restart_after_abort: None,\n setup_context_monitoring_for_abort_test: None,\n composition: conf_types.Composition,\n):", "def pre_execute(self):", "def PreExecute(self):\n return True", "def test_noarg_train_call():\n os.chdir(pathlib.Path(__file__).parent.absolute())\n loc = shutil.which(\"parrot-train\")\n script_descriptor = open(os.path.abspath(loc))\n script = script_descriptor.read()\n sys.argv = [\"parrot-train\"]\n\n with pytest.raises(SystemExit):\n exec(script)\n\n script_descriptor.close()", "def test_archive_run(self):\n pass", "def test_process_log_with_pre_and_post_in_configuration(self):\n with tempfile.TemporaryDirectory() as sandbox:\n with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout:\n srcfile = Path(sandbox, 'pokus.log')\n srcfile.touch()\n destfile = Path(sandbox, 'backup', 'pokus.log')\n compressors = process_log(\n datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30),\n {\n 'target': '{{path}}/backup/{{name}}.{{ext}}',\n 'interval': 'hourly',\n 'compress': 'gzip -9',\n 'exec_pre': '/bin/true',\n 'exec_post': '/bin/true'\n },\n 'hourly',\n str(srcfile),\n 10\n )\n self.assertEqual(compressors, [[sandbox, 'gzip', '-9', str(destfile)]])\n self.assertFalse(srcfile.exists())\n self.assertTrue(destfile.exists())\n self.assertEqual(fake_stdout.getvalue(), 'Checking \"{src}\"... rotating... \"{src}\" -> \"{dest}\" done.\\n'.format(src=srcfile, dest=destfile))", "def on_pre_execution(**kwargs):\n logging.debug(\"Calling callbacks: %s\", __pre_exec_callbacks)\n for callback in __pre_exec_callbacks:\n try:\n callback(**kwargs)\n except Exception:\n logging.exception(\"Failed on pre-execution callback using %s\", callback)", "def test_unarchive_run(self):\n pass", "def test_execute_deployment(self):\n pass", "def test_restart(self):\n restart_path = os.path.join(arc_path, 'arc', 'testing', 'restart(H,H2O2,N2H3,CH3CO2).yml')\n project = 'arc_project_for_testing_delete_after_usage2'\n project_directory = os.path.join(arc_path, 'Projects', project)\n arc1 = ARC(project=project, input_dict=restart_path, project_directory=project_directory)\n arc1.execute()\n\n with open(os.path.join(project_directory, 'output', 'thermo.info'), 'r') as f:\n thermo_sft_ccsdtf12_bac = False\n for line in f.readlines():\n if 'thermo_DFT_CCSDTF12_BAC' in line:\n thermo_sft_ccsdtf12_bac = True\n break\n self.assertTrue(thermo_sft_ccsdtf12_bac)\n\n with open(os.path.join(project_directory, 'arc_project_for_testing_delete_after_usage2.info'), 'r') as f:\n sts, n2h3, oet, lot, ap = False, False, False, False, False\n for line in f.readlines():\n if 'Considered the following species and TSs:' in line:\n sts = True\n elif 'Species N2H3' in line:\n n2h3 = True\n elif 'Overall time since project initiation:' in line:\n oet = True\n elif 'Levels of theory used:' in line:\n lot = True\n elif 'ARC project arc_project_for_testing_delete_after_usage2' in line:\n ap = True\n self.assertTrue(sts)\n self.assertTrue(n2h3)\n self.assertTrue(oet)\n self.assertTrue(lot)\n self.assertTrue(ap)\n\n with open(os.path.join(project_directory, 'arc.log'), 'r') as f:\n aei, ver, git, spc, rtm, ldb, therm, src, ter =\\\n False, False, False, False, False, False, False, False, False\n for line in f.readlines():\n if 'ARC execution initiated on' in line:\n aei = True\n elif '# Version:' in line:\n ver = True\n elif 'The current git HEAD for ARC is:' in line:\n git = True\n elif 'Considering species: CH3CO2_rad' in line:\n spc = True\n elif 'All jobs for species N2H3 successfully converged. Run time' in line:\n rtm = True\n elif 'Loading the RMG database...' in line:\n ldb = True\n elif 'Thermodynamics for H2O2' in line:\n therm = True\n elif 'Sources of thermoproperties determined by RMG for the parity plots:' in line:\n src = True\n elif 'ARC execution terminated on' in line:\n ter = True\n self.assertTrue(aei)\n self.assertTrue(ver)\n self.assertTrue(git)\n self.assertTrue(spc)\n self.assertTrue(rtm)\n self.assertTrue(ldb)\n self.assertTrue(therm)\n self.assertTrue(src)\n self.assertTrue(ter)\n\n self.assertTrue(os.path.isfile(os.path.join(project_directory, 'output', 'thermo_parity_plots.pdf')))\n\n with open(os.path.join(project_directory, 'output', 'Species', 'H2O2', 'species_dictionary.txt'), 'r') as f:\n lines = f.readlines()\n adj_list = ''\n for line in lines:\n if 'H2O2' not in line:\n adj_list += line\n if line == '\\n':\n break\n mol1 = Molecule().fromAdjacencyList(str(adj_list))\n self.assertEqual(mol1.toSMILES(), str('OO'))\n\n thermo_library_path = os.path.join(project_directory, 'output', 'RMG libraries', 'thermo',\n 'arc_project_for_testing_delete_after_usage2.py')\n new_thermo_library_path = os.path.join(settings['database.directory'], 'thermo', 'libraries',\n 'arc_project_for_testing_delete_after_usage2.py')\n # copy the generated library to RMG-database\n shutil.copyfile(thermo_library_path, new_thermo_library_path)\n db = RMGDatabase()\n db.load(\n path=settings['database.directory'],\n thermoLibraries=[str('arc_project_for_testing_delete_after_usage2')],\n transportLibraries=[],\n reactionLibraries=[],\n seedMechanisms=[],\n kineticsFamilies='none',\n kineticsDepositories=[],\n statmechLibraries=None,\n depository=False,\n solvation=False,\n testing=True,\n )\n\n spc2 = Species().fromSMILES(str('CC([O])=O'))\n spc2.generate_resonance_structures()\n spc2.thermo = db.thermo.getThermoData(spc2)\n self.assertAlmostEqual(spc2.getEnthalpy(298), -178003.44650359568, 1)\n self.assertAlmostEqual(spc2.getEntropy(298), 283.5983103176096, 1)\n self.assertAlmostEqual(spc2.getHeatCapacity(1000), 118.99753808225603, 1)\n self.assertTrue('arc_project_for_testing_delete_after_usage2' in spc2.thermo.comment)\n\n # delete the generated library from RMG-database\n os.remove(new_thermo_library_path)", "def test_run_started(self):", "def test_fleur_relax_continue_converged(self, run_with_cache, mock_code_factory):\n assert False", "def test_process_log_with_exec_post_in_configuration(self):\n with tempfile.TemporaryDirectory() as sandbox:\n with mock.patch('sys.stderr', new=io.StringIO()) as fake_stderr:\n with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout:\n stream_handler = logging.StreamHandler(fake_stderr)\n logging.getLogger().addHandler(stream_handler)\n try:\n srcfile = Path(sandbox, 'pokus.log')\n srcfile.touch()\n destfile = Path(sandbox, 'backup', 'pokus.log')\n compressors = process_log(\n datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30),\n {\n 'target': '{{path}}/backup/{{name}}.{{ext}}',\n 'interval': 'hourly',\n 'compress': 'bzip2',\n 'exec_post': '/bin/false'\n },\n 'hourly',\n str(srcfile),\n 10\n )\n finally:\n logging.getLogger().removeHandler(stream_handler)\n self.assertEqual(compressors, [])\n self.assertFalse(srcfile.exists())\n self.assertTrue(destfile.exists())\n self.assertEqual(fake_stdout.getvalue(), 'Checking \"{src}\"... rotating... \"{src}\" -> \"{dest}\" exec_post failed.\\n'.format(src=srcfile, dest=destfile))\n self.assertEqual(fake_stderr.getvalue(), 'exec_post \"/bin/false {dest}\" failed with code 1\\n'.format(dest=destfile))", "def test_compress_fastq_real_with_integrity_fail(\n first_tmp_file, second_tmp_file, spring_tmp_path, real_base_context, mocker\n):\n # GIVEN the path to a existing two existing fastq files and a non existing spring\n runner = CliRunner()\n assert not spring_tmp_path.exists()\n assert first_tmp_file.exists()\n assert second_tmp_file.exists()\n\n dir_path = spring_tmp_path.parent\n assert nr_files(dir_path) == 2\n mocker.patch.object(compare_cmd, \"compare_elements\")\n compare_cmd.compare_elements.return_value = False\n # WHEN running the compress command with an intergrity check\n result = runner.invoke(\n fastq,\n [\n \"--first-read\",\n str(first_tmp_file),\n \"--second-read\",\n str(second_tmp_file),\n \"--spring-path\",\n str(spring_tmp_path),\n \"--check-integrity\",\n ],\n obj=real_base_context,\n )\n # THEN assert the command succedes\n assert result.exit_code == 1\n # THEN assert that the spring file was deleted\n assert not spring_tmp_path.exists()\n # THEN assert that only the original fastq files are left\n assert nr_files(dir_path) == 2", "def testPreProcess(self):\n self.grr_hunt_file_collector.PreProcess()\n self.assertEqual(\n self.grr_hunt_file_collector.file_path_list,\n ['/etc/passwd', '/etc/shadow', '/etc/hosts'])", "def test_logfile_recreates_after_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n self.conveyer.rotate_logs()\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"fourth\\\"}\"))\n self.assertEquals(self.events_out.getvalue(), \"{message: \\\"fourth\\\"}\")\n self.assertTrue(self.renamerCalled)", "def test_restart_with_permission(self):\n self.create_user_with_role(\n self.user.name, self.user.email, self.user.password, Role.tester)\n self.create_forktest(\"own-fork-commit\", TestPlatform.linux, regression_tests=[2])\n self.create_completed_regression_t_entries(3, [2])\n with self.app.test_client() as c:\n response = c.post(\n '/account/login', data=self.create_login_form_data(self.user.email, self.user.password))\n response = c.get('/test/restart_test/3')\n test = Test.query.filter(Test.id == 3).first()\n self.assertEqual(test.finished, False)", "def main_method_setup_execution(monkeypatch, s3_setup, tmpdir, argument_dict):\n monkeypatch.setattr(bootstrapper.OpUtil, \"parse_arguments\", lambda x: argument_dict)\n monkeypatch.setattr(bootstrapper.OpUtil, \"package_install\", mock.Mock(return_value=True))\n\n monkeypatch.setenv(\"AWS_ACCESS_KEY_ID\", \"minioadmin\")\n monkeypatch.setenv(\"AWS_SECRET_ACCESS_KEY\", \"minioadmin\")\n monkeypatch.setenv(\"TEST_ENV_VAR1\", \"test_env_var1\")\n\n s3_setup.fput_object(\n bucket_name=argument_dict[\"cos-bucket\"],\n object_name=\"test-directory/test-file.txt\",\n file_path=os.path.join(RESOURCES_DIR, \"test-requirements-elyra.txt\"),\n )\n s3_setup.fput_object(\n bucket_name=argument_dict[\"cos-bucket\"],\n object_name=\"test-directory/test,file.txt\",\n file_path=os.path.join(RESOURCES_DIR, \"test-bad-requirements-elyra.txt\"),\n )\n s3_setup.fput_object(\n bucket_name=argument_dict[\"cos-bucket\"],\n object_name=\"test-directory/test-archive.tgz\",\n file_path=os.path.join(RESOURCES_DIR, \"test-archive.tgz\"),\n )\n\n with tmpdir.as_cwd():\n bootstrapper.main()\n test_file_list = [\n \"test-archive.tgz\",\n \"test-file.txt\",\n \"test,file.txt\",\n \"test-file/test-file-copy.txt\",\n \"test-file/test,file/test,file-copy.txt\",\n \"test-notebookA.ipynb\",\n \"test-notebookA-output.ipynb\",\n \"test-notebookA.html\",\n ]\n # Ensure working directory has all the files.\n for file in test_file_list:\n assert os.path.isfile(file)\n # Ensure upload directory has all the files EXCEPT the output notebook\n # since it was it is uploaded as the input notebook (test-notebookA.ipynb)\n # (which is included in the archive at start).\n for file in test_file_list:\n if file != \"test-notebookA-output.ipynb\":\n assert s3_setup.stat_object(\n bucket_name=argument_dict[\"cos-bucket\"], object_name=\"test-directory/\" + file\n )\n if file == \"test-notebookA.html\":\n with open(\"test-notebookA.html\") as html_file:\n assert \"TEST_ENV_VAR1: test_env_var1\" in html_file.read()", "def run_test_second():\n os.system(\n \"sed -n '/(Failed)$/p' test_op_log.txt | awk '{print $3}' >& rerun_op.txt\"\n )\n rerun_list = get_op_list('rerun_op.txt')\n if len(rerun_list):\n print(\n \"-------there are \"\n + str(len(rerun_list))\n + \" op(s) need to rerun!!!-------\"\n )\n for failed_op in rerun_list:\n os.system(\"ctest -R \\\"(\" + failed_op + \")\\\" \")\n else:\n print(\"-------all op passed successfully!!!-------\")", "def test_delete_deployment_run(self):\n pass", "def test_release_deployment_run(self):\n pass", "def can_dry_run(self):\r\n return False", "def at_pre_cmd(self):\n pass", "def test_encrypt_creates_and_cleans_up_backup(\n self,\n mock_os,\n mock_shutil,\n mock_subprocess,\n ):\n mock_subprocess.run.return_value.returncode = 0\n\n self.mikla.encrypt('Chunky Hunky', 'plain', 'enc')\n\n mock_shutil.move.assert_called_once_with('enc', 'enc.bak')\n mock_os.unlink.assert_called_once_with('enc.bak')", "def test_overwrite_corrupted_files(overwrite_on_tape_topology, core_config_mock, caches_mock):\n rse1_id, rse2_id, rse3_id, did1, did2 = overwrite_on_tape_topology(did1_corrupted=True, did2_corrupted=True)\n all_rses = [rse1_id, rse2_id, rse3_id]\n\n class _FTSWrapper(FTSWrapper):\n @staticmethod\n def on_receive(job_params):\n for job in (job_params if isinstance(job_params, list) else [job_params]):\n for file in job.get('files', []):\n if (file.get('file_metadata', {}).get('dst_type') == 'TAPE'\n and file.get('file_metadata', {}).get('dst_file', {}).get('file_on_tape') is not None):\n # Fake that dst_file metadata contains file_on_tape == True\n # As we don't really have tape RSEs in our tests, file_on_tape is always false\n file['file_metadata']['dst_file']['file_on_tape'] = True\n return job_params\n\n with patch('rucio.daemons.conveyor.poller.FTS3Transfertool', _FTSWrapper):\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n # Both transfers must be marked as failed because the file size is incorrect\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.FAILED\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.FAILED\n\n # Re-submit the failed requests. They must fail again, because overwrite_corrupted_files is False\n # 2 runs: for multihop, finisher works one hop at a time\n finisher(once=True, partition_wait_time=0)\n finisher(once=True, partition_wait_time=0)\n request = request_core.get_request_by_did(rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.QUEUED\n request = request_core.get_request_by_did(rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.QUEUED\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n # Set overwrite to True before running the poller or finisher\n core_config.set('transfers', 'overwrite_corrupted_files', True)\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.FAILED\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.FAILED\n\n # Re-submit one more time. Now the destination file must be overwritten\n finisher(once=True, partition_wait_time=0)\n finisher(once=True, partition_wait_time=0)\n request = request_core.get_request_by_did(rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.QUEUED\n request = request_core.get_request_by_did(rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.QUEUED\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n request = request_core.get_request_by_did(rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.SUBMITTED\n assert __wait_for_fts_state(request, expected_state='ARCHIVING') == 'ARCHIVING'\n request = request_core.get_request_by_did(rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.SUBMITTED\n assert __wait_for_fts_state(request, expected_state='ARCHIVING') == 'ARCHIVING'", "def test_run_and_restore(self):\n # Run for 5 algo-calls\n testargs = [\"python\", \"scripts/smac\", \"--scenario_file\",\n self.scenario_one, \"--verbose\", \"DEBUG\"]\n with mock.patch.object(sys, 'argv', testargs):\n self.smaccli.main_cli()\n # Increase limit and run for 10 (so 5 more) by using restore_state\n testargs = [\"python\", \"scripts/smac\", \"--restore_state\",\n self.output_one, \"--scenario_file\",\n self.scenario_two, \"--verbose\", \"DEBUG\"]\n with mock.patch.object(sys, 'argv', testargs):\n self.smaccli.main_cli()", "def test_AbortAndRestart(self, dir_mock):\n from furious.async import Async\n from furious.context._execution import _ExecutionContext\n from furious.errors import AbortAndRestart\n from furious.processors import run_job\n\n dir_mock.side_effect = AbortAndRestart\n mock_success = Mock()\n mock_error = Mock()\n\n work = Async(target='dir',\n callbacks={'success': mock_success,\n 'error': mock_error})\n\n with _ExecutionContext(work):\n self.assertRaises(AbortAndRestart, run_job)\n\n self.assertFalse(mock_success.called)\n self.assertFalse(mock_error.called)", "def preRunSetup(self):\n self.logDesc(\"Pre Run Setup\") \n self.verifyCurrentUser(userRole='Administrator', loginAsUser=True)", "def preRunSetup(self):\n self.logDesc(\"Pre Run Setup\") \n self.verifyCurrentUser(userRole='Administrator', loginAsUser=True)", "def test_need_to_rotate_log(self):\n self.assertTrue(need_to_rotate_log(0, 20, 'daily', 15, 'daily'), 'rotate log by time')\n self.assertFalse(need_to_rotate_log(10, 20, 'daily', 15, 'hourly'), 'do not rotate log by time')\n self.assertTrue(need_to_rotate_log(10, 20, 'daily', 25, None), 'rotate log by max size')\n self.assertFalse(need_to_rotate_log(10, 20, 'hourly', 5, 'hourly'), 'do not rotate log by min size')", "def test_pre_cli_init(run):\n out, err = run(dork.cli.the_predork_cli, [], *(\"\", \"-i\", \"test\"))\n assert \"test\" in out, \\\n \"Failed run the dork.cli.the_predork_cli method: {err}\"\\\n .format(err=err)\n out, err = run(dork.cli.the_predork_cli, [], *(\"\", \"-i\", \":test\"))\n assert \"does not exist\" in out, \\\n \"Failed run the dork.cli.the_predork_cli method: {err}\"\\\n .format(err=err)", "def test_rollback():", "def fxt_set_restart_after_abort(sut_settings: SutTestSettings):\n sut_settings.restart_after_abort = True", "def test_functional_decrypts_encrypted_deployment_yml_file_on_startup(self):\n\n # 0) Preparation of configuration files\n self.prepare_valid_deployment_yml()\n update_task = UpdateFilesTask()\n update_task.download_roles = lambda *args, **kwargs: None\n self.execute_mocked_task_and_get_output(update_task,\n args={\n '--ask-vault-pass': False,\n '--vault-passwords': '',\n '--ask-ssh-login': False,\n '--ask-ssh-pass': False,\n '--ask-ssh-key-path': False,\n '--ask-sudo-pass': False\n },\n env={})\n\n ansible_call = []\n passphrase_file_path = self.get_test_env_subdirectory('.rkd') + '/tmp-secret.txt'\n\n # 1) Write test password\n with open(passphrase_file_path, 'w') as f:\n f.write('International-Workers-Association')\n\n # 2) Encrypt a file\n deployment_yml_path = self.get_test_env_subdirectory('') + '/deployment.yml'\n subprocess.check_call(\n ['ansible-vault encrypt --vault-password-file=%s %s' % (passphrase_file_path, deployment_yml_path)],\n shell=True)\n\n # 3) Run a deployment while the deployment.yml is encrypted\n deployment_task = DeploymentTask()\n deployment_task.spawn_ansible = lambda *args, **kwargs: ansible_call.append(args)\n\n out = self.execute_mocked_task_and_get_output(deployment_task,\n args={'--playbook': 'harbor.playbook.yml',\n '--inventory': 'harbor.inventory.yml',\n '--git-key': '',\n '--branch': 'master',\n '--profile': '',\n '--debug': False,\n '--vault-passwords': passphrase_file_path,\n '--ask-vault-pass': False,\n '--ask-ssh-login': False,\n '--ask-ssh-pass': False,\n '--ask-ssh-key-path': False,\n '--ask-sudo-pass': False\n },\n env={})\n\n self.assertIn('--vault-password-file=', ansible_call[0][0])\n self.assertIn('.rkd/tmp-secret.txt', ansible_call[0][0])\n self.assertIn('TASK_EXIT_RESULT=True', out)", "def test_g_asignar_rol(self):", "def run_skip(self):\n pass", "def test_verify_error(self):\n task = Task(\"uid\", False, False, \"echo\", \"does_not_exist\", \".\", \"A\")\n task._checkpoint_dir = tmp_checkpoint_dir()\n with self.assertRaisesRegexp(RuntimeError, \".*executing Task's verification:.*\"):\n task.run()\n task.shell = True\n with self.assertRaisesRegexp(RuntimeError, \".*executing Task's verification:.*\"):\n task.run()\n task._dry_run = True\n task.run() # No longer raises RuntimeError", "def reprovision_and_retry(func):\n @functools.wraps(func)\n def wrapper(*a, **kw):\n errback = kw.get('errback', None)\n if errback is None:\n def errback(e):\n raise e\n def errback_wrapper(e):\n if isinstance(e, UnknownAppID) and 'INITIAL' in OPTIONS:\n try:\n for initial in OPTIONS['INITIAL']:\n provision(*initial) # retry provisioning the initial setup\n func(*a, **kw) # and try the function once more\n except Exception(new_exc):\n errback(new_exc) # throwing the new exception\n else:\n errback(e) # not an instance of UnknownAppID - nothing we can do here\n kw['errback'] = errback_wrapper\n return func(*a, **kw)\n return wrapper", "def test_error_prefork(self, mocker):\n expected_msg = [\n mocker.call('Cannot call resume')\n ]\n\n filename = os.path.join(os.path.dirname(__file__), '../integration/files', 'file2.yaml')\n factory = get_factory('localhost', config={'splitFile': filename})\n try:\n factory.block_until_ready(1)\n except:\n pass\n _logger = mocker.Mock()\n mocker.patch('splitio.client.factory._LOGGER', new=_logger)\n factory.resume()\n assert _logger.warning.mock_calls == expected_msg\n factory.destroy()", "def onPreFork(self):", "def retry(self):\n # XXX: check whether it is possible to distingish \n # between the error conditions and set meaningfull exitcode\n return False", "def test_rotatePermissionFileNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.path, 0o444)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def pytest_runtest_setup(item):\n if \"incremental\" in item.keywords:\n previousfailed = getattr(item.parent, \"_previousfailed\", None)\n if previousfailed is not None:\n pytest.xfail(\"previous test failed (%s)\" % previousfailed.name)", "def preRunSetup(self):\n self.logDesc(\"Pre Run Setup\") \n #Check for current logged in user\n self.verifyCurrentUser(userRole='Administrator', loginAsUser=True)", "def on_before_execution(self):\n pass", "def testFailure():\n run(\"chariot-me\") #Start management-engine without initial deplflag\n egress()", "def testError(self):\n cmds = \"\"\"chown 0 missingFile\npwd\nexit\n\"\"\"\n\n def _cbCheckResult(res):\n self.assertNotIn(self.testDir.asBytesMode().path, res)\n\n d = self._getBatchOutput(cmds)\n d.addCallback(_cbCheckResult)\n return d", "def test_dry_run():\n config = get_config(\"delete.conf\")\n path = get_config_path(config)\n test_file = make_test_file(path)\n\n console.pushbroom(config, dry_run=True)\n assert test_file.exists()\n\n console.pushbroom(config)\n assert not test_file.exists()\n\n path.rmdir()", "def test_execution(self):\n self.assertTrue(True)", "def test_retrieve_files_with_pre_hook(self):\n os.makedirs('/tmp/remote_pacha/localhost/etc')\n os.mkdir('/tmp/remote_pacha/localhost/home')\n remote_file = open('/tmp/remote_pacha/localhost/etc/etc.conf', 'w')\n remote_file.write(\"remote second file\")\n remote_file.close()\n remote_file = open('/tmp/remote_pacha/localhost/home/home.conf', 'w')\n remote_file.write(\"remote file\")\n remote_file.close()\n os.makedirs('/tmp/remote_pacha/localhost/pacha_pre')\n touch_script = open('/tmp/remote_pacha/localhost/pacha_pre/foo.sh', 'w')\n touch_script.write('''touch /tmp/remote_pacha/localhost/pre_got_executed.txt''')\n touch_script.close()\n server = \"%s@%s\" % (self.username, host.hostname()) \n run = rebuild.Rebuild(server=server,\n hostname='localhost', \n source='/tmp/remote_pacha')\n run.retrieve_files()\n result_1 = os.path.isfile('/tmp/localhost/etc/etc.conf')\n result_2 = os.path.isfile('/tmp/localhost/home/home.conf')\n line = open('/tmp/localhost/etc/etc.conf')\n remote_line = line.readline()\n self.assertEqual(remote_line, \"remote second file\")\n self.assertTrue(result_2)\n self.assertTrue(result_1)\n self.assertTrue(os.path.isfile('/tmp/remote_pacha/localhost/pre_got_executed.txt'))", "def test_compress_fastq_real_with_integrity(\n first_tmp_file, second_tmp_file, spring_tmp_path, real_base_context\n):\n # GIVEN the path to a existing two existing fastq files and a non existing spring\n runner = CliRunner()\n assert not spring_tmp_path.exists()\n assert first_tmp_file.exists()\n assert second_tmp_file.exists()\n\n dir_path = spring_tmp_path.parent\n assert nr_files(dir_path) == 2\n # WHEN running the compress command with an intergrity check\n result = runner.invoke(\n fastq,\n [\n \"--first-read\",\n str(first_tmp_file),\n \"--second-read\",\n str(second_tmp_file),\n \"--spring-path\",\n str(spring_tmp_path),\n \"--check-integrity\",\n ],\n obj=real_base_context,\n )\n # THEN assert the command succedes\n assert result.exit_code == 0\n # THEN assert that the spring file was created\n assert spring_tmp_path.exists()\n # THEN assert that the files created for integrity check was removed\n assert nr_files(dir_path) == 3", "def ensure_container():\n return exec_fn(_init_container)", "def _transaction(context, stage, target_repoids, tasks, plugin_info, xfs_info,\n test=False, cmd_prefix=None, on_aws=False):\n\n # we do not want\n if stage not in ['dry-run', 'upgrade']:\n create_config(\n context=context,\n target_repoids=target_repoids,\n debug=config.is_debug(),\n test=test, tasks=tasks,\n on_aws=on_aws\n )\n backup_config(context=context)\n\n # FIXME: rhsm\n with guards.guarded_execution(guards.connection_guard(), guards.space_guard()):\n cmd_prefix = cmd_prefix or []\n common_params = []\n if config.is_verbose():\n common_params.append('-v')\n if rhsm.skip_rhsm():\n common_params += ['--disableplugin', 'subscription-manager']\n if plugin_info:\n for info in plugin_info:\n if stage in info.disable_in:\n common_params += ['--disableplugin', info.name]\n env = {}\n if get_target_major_version() == '9':\n # allow handling new RHEL 9 syscalls by systemd-nspawn\n env = {'SYSTEMD_SECCOMP': '0'}\n\n # We need to reset modules twice, once before we check, and the second time before we actually perform\n # the upgrade. Not more often as the modules will be reset already.\n if stage in ('check', 'upgrade') and tasks.modules_to_reset:\n # We shall only reset modules that are not going to be enabled\n # This will make sure it is so\n modules_to_reset = {(module.name, module.stream) for module in tasks.modules_to_reset}\n modules_to_enable = {(module.name, module.stream) for module in tasks.modules_to_enable}\n module_reset_list = [module[0] for module in modules_to_reset - modules_to_enable]\n # Perform module reset\n cmd = ['/usr/bin/dnf', 'module', 'reset', '--enabled', ] + module_reset_list\n cmd += ['--disablerepo', '*', '-y', '--installroot', '/installroot']\n try:\n context.call(\n cmd=cmd_prefix + cmd + common_params,\n callback_raw=utils.logging_handler,\n env=env\n )\n except (CalledProcessError, OSError):\n api.current_logger().debug('Failed to reset modules via dnf with an error. Ignoring.',\n exc_info=True)\n\n cmd = [\n '/usr/bin/dnf',\n 'rhel-upgrade',\n stage,\n DNF_PLUGIN_DATA_PATH\n ]\n try:\n context.call(\n cmd=cmd_prefix + cmd + common_params,\n callback_raw=utils.logging_handler,\n env=env\n )\n except OSError as e:\n api.current_logger().error('Could not call dnf command: Message: %s', str(e), exc_info=True)\n raise StopActorExecutionError(\n message='Failed to execute dnf. Reason: {}'.format(str(e))\n )\n except CalledProcessError as e:\n api.current_logger().error('Cannot calculate, check, test, or perform the upgrade transaction.')\n _handle_transaction_err_msg(stage, xfs_info, e, is_container=False)\n finally:\n if stage == 'check':\n backup_debug_data(context=context)", "def test_first_run(dbbackup, plugin, update, version_file=None, orig_version=None):\n\n if version_file:\n os.unlink(version_file)\n\n cli.initialize()\n update.assert_called_once()\n dbbackup.assert_not_called()\n\n # Check that it got called for each default plugin\n from kolibri.core.settings import DEFAULT_PLUGINS\n\n assert plugin.call_count == len(DEFAULT_PLUGINS)", "def test_run_configuration_dry_run_diff_no_early_exit_shard_err(mocker: MockerFixture):\n succeeding_shard = \"succeed\" # success\n another_succeeding_shard = \"succeed as well\" # success\n failing_shard = \"fail\" # fail\n sys_exit_1_shard = \"sys-exit-1\" # fail\n sys_exit_true_shard = \"sys-exit-true\" # fail\n sys_exit_0_shard = \"sys-exit-0\" # success\n sys_exit_false_shard = \"sys-exit-false\" # success\n\n def integration_run_func(self: ShardableTestIntegration, dry_run: bool) -> None:\n if self.params.shard == failing_shard:\n raise Exception(f\"shard {self.params.shard} failed\")\n if self.params.shard == sys_exit_1_shard:\n sys.exit(1)\n if self.params.shard == sys_exit_false_shard:\n sys.exit(False)\n if self.params.shard == sys_exit_0_shard:\n sys.exit(0)\n if self.params.shard == sys_exit_true_shard:\n sys.exit(True)\n\n integration_run_func_mock = mocker.patch.object(\n ShardableTestIntegration, \"run\", side_effect=integration_run_func, autospec=True\n )\n\n shardable_test_integration = ShardableTestIntegration(\n params=ShardableTestIntegrationParams()\n )\n\n affected_shards = {\n succeeding_shard,\n another_succeeding_shard,\n failing_shard,\n sys_exit_1_shard,\n sys_exit_false_shard,\n sys_exit_0_shard,\n sys_exit_true_shard,\n }\n\n with pytest.raises(SystemExit) as e:\n _integration_dry_run(\n shardable_test_integration,\n DesiredStateDiff(\n current_desired_state={},\n previous_desired_state={},\n diff_found=True,\n affected_shards=affected_shards,\n ),\n )\n\n # the SystemExit exception contains the nr of failed shards as code\n assert e.value.code == 3\n\n # make sure the run method has been called once per shard\n assert integration_run_func_mock.call_count == len(affected_shards)\n called_sharded_params = [\n c[0][0].params for c in integration_run_func_mock.call_args_list\n ]\n for shard in affected_shards:\n sharded_params = shardable_test_integration.params.copy_and_update(\n {\"shard\": shard}\n )\n assert sharded_params in called_sharded_params", "def test_clean_before_run(self):\n env = pike.Environment()\n with self.assertRaises(ValueError):\n env.clean('.')", "def testIgnoredError(self):\n cmds = \"\"\"-chown 0 missingFile\npwd\nexit\n\"\"\"\n def _cbCheckResult(res):\n self.assertIn(self.testDir.asBytesMode().path, res)\n\n d = self._getBatchOutput(cmds)\n d.addCallback(_cbCheckResult)\n return d", "def test_syntax(self):\n\t\ttheResult = False\n\t\ttry:\n\t\t\tfrom .context import code\n\t\t\tfrom code import restart_service\n\t\t\ttheResult = True\n\t\texcept Exception:\n\t\t\ttheResult = False\n\t\tassert theResult", "def test_modePreservation(self):\n open(self.path, \"w\").close()\n os.chmod(self.path, 0o707)\n mode = os.stat(self.path)[stat.ST_MODE]\n log = logfile.LogFile(self.name, self.dir)\n self.addCleanup(log.close)\n log.write(\"abc\")\n log.rotate()\n self.assertEqual(mode, os.stat(self.path)[stat.ST_MODE])", "def test_run_ended(self):", "def skip_or_run_test_tarantool_call(self, required_tt_version, msg):\n\n skip_or_run_test_tarantool_impl(self, required_tt_version, msg)", "def precheck(ctx):\n rc = run_playbook(precheck_cmd(ctx.obj))\n if rc != 0:\n print_error_msg(\"Upgrade prechecks failed!!!\")\n sys.exit(1)\n print_success_msg(\"Upgrade prechecks ran successfully\")", "def test_dont_archive_commitlog(self):\n self.run_archive_commitlog(restore_point_in_time=False, restore_archived_commitlog=False)", "def test_auto_rollback(self):\n self.mocked_cursor.execute.side_effect = psycopg2.Error('testing')\n\n db = database.Database()\n try:\n db.execute(sql=\"SELECT * from FOO WHERE bar LIKE 'baz'\")\n except database.DatabaseError:\n pass\n\n self.assertEqual(self.mocked_connection.rollback.call_count, 1)", "def test_rotation(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n days = [(self.path + \".\" + log.suffix(day * 86400)) for day in range(3)]\n\n # test automatic rotation\n log._clock = 0.0 # 1970/01/01 00:00.00\n log.write(\"123\")\n log._clock = 43200 # 1970/01/01 12:00.00\n log.write(\"4567890\")\n log._clock = 86400 # 1970/01/02 00:00.00\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(days[0]))\n self.assertFalse(os.path.exists(days[1]))\n log._clock = 172800 # 1970/01/03 00:00.00\n log.write(\"\")\n self.assertTrue(os.path.exists(days[0]))\n self.assertTrue(os.path.exists(days[1]))\n self.assertFalse(os.path.exists(days[2]))\n log._clock = 259199 # 1970/01/03 23:59.59\n log.write(\"3\")\n self.assertFalse(os.path.exists(days[2]))", "def skip_or_run_test_tarantool(func, required_tt_version, msg):\n\n @functools.wraps(func)\n def wrapper(self, *args, **kwargs):\n if func.__name__ == 'setUp':\n func(self, *args, **kwargs)\n\n skip_or_run_test_tarantool_impl(self, required_tt_version, msg)\n\n if func.__name__ != 'setUp':\n func(self, *args, **kwargs)\n\n return wrapper", "def pre_setup(self) -> None:\n if self.__setup_done:\n self.base_logger.error(\"pre_setup was erroneously called twice\")\n raise SetupAlreadyDoneError()", "def test_backup_failure(self):\n program = RsyncSystemBackup(\n destination='0.0.0.0::module/directory',\n sudo_enabled=False,\n )\n self.assertRaises(ExternalCommandFailed, program.execute)", "def before_scenario(context, _):\n context.backup_rotation = br\n context.backup_root_raw = tempfile.TemporaryDirectory()\n context.backup_root = context.backup_root_raw.name\n context.created_files = {}\n\n logging.info(\"Creating %s\" , context.backup_root)\n\n for bucket in [\"yearly\", \"monthly\", \"daily\"]:\n os.mkdir(os.path.join(context.backup_root, bucket))\n context.created_files[bucket] = {\"backup\": [], \"miscellaneous\": []}", "def __call__(self, result=None):\n self._pre_setup()\n super(TestCase, self).__call__(result)\n self._post_tearDown()", "def test_exploding_core_lock_captured(ppg2_per_test):\n a = ppg.FileGeneratingJob(\n \"a\", lambda of: of.write_text(\"a\"), resources=ppg.Resources._RaiseInCoreLock\n )\n with pytest.raises(ppg.FatalGraphException):\n ppg.run()\n assert \"Count == 0\" in str(a.exception)", "def test_pre_no_operation(capsys):\n a = K()\n with pytest.raises(TypeError):\n assert a.pre_order()", "def test_recheck_fails(self):\n raise NotImplementedError", "def test_revert_3(self):\n self.image_create(self.rurl)\n some_files = [\"dev/xxx\", \"dev/yyy\", \"dev/zzz\",\n \"dev/dir1/aaaa\", \"dev/dir1/bbbb\", \"dev/dir2/cccc\",\n \"dev/cfg/ffff\", \"dev/cfg/gggg\",\n \"dev/cfg/dir3/iiii\", \"dev/cfg/dir3/jjjj\"]\n\n some_dirs = [\"dev/dir1/\", \"dev/dir1/\", \"dev/dir2/\", \"dev/cfg/dir3/\"]\n self.pkg(\"install dev dev2\")\n self.pkg(\"verify\")\n self.files_are_all_missing(some_dirs + some_files)\n self.create_some_files(some_dirs + some_files)\n self.files_are_all_there(some_dirs + some_files)\n self.pkg(\"verify -v\")\n self.damage_files([\"dev/cfg/bar2\"])\n self.pkg(\"revert -vvv --tagged init-dev\")\n self.pkg(\"verify -v\")\n self.files_are_all_missing(some_dirs + some_files)", "def test_seqprep_assembly(self):\n self.writeTmpFastq(self.test_fn1, self.test_fn2)\n\n ### Suggested default settings ###\n params = {}\n params['-f'] = self.test_fn1\n params['-r'] = self.test_fn2\n params['-s'] = self.temp_dir_string + 'assembled.gz'\n params['-1'] = self.temp_dir_string + 'unassembled.reads1out.gz'\n params['-2'] = self.temp_dir_string + 'unassembled.reads2out.gz'\n params['-o'] = 15\n params['-m'] = 0.02\n params['-n'] = 0.9\n params['-y'] = 'J'\n\n sp_app = SeqPrep(params = params,\n WorkingDir=self.temp_dir_string)\n\n sp_res = sp_app()\n\n # since output is gzipped by default we need to convert to\n # raw text before testing our results. \n assembly_result = GzipFile(fileobj=sp_res['Assembled']).read()\n self.assertEqual(assembly_result, default_expected_assembly_workaround) \n\n unass_reads1_result = GzipFile(fileobj=\n sp_res['UnassembledReads1']).read()\n self.assertEqual(unass_reads1_result, \n expected_default_unassembled_reads1) \n\n unass_reads2_result = GzipFile(fileobj=\n sp_res['UnassembledReads2']).read()\n self.assertEqual(unass_reads2_result, \n expected_default_unassembled_reads2) \n sp_res.cleanUp() \n \n\n ### Alt settings ###\n params_alt = {}\n params_alt['-f'] = self.test_fn1\n params_alt['-r'] = self.test_fn2\n params_alt['-s'] = self.temp_dir_string + 'assembled.gz'\n params_alt['-1'] = self.temp_dir_string + 'unassembled.reads1out.gz'\n params_alt['-2'] = self.temp_dir_string + 'unassembled.reads2out.gz'\n params_alt['-o'] = 30\n params_alt['-m'] = 0.01\n params_alt['-n'] = 0.95\n params_alt['-y'] = 'J'\n \n sp_app2 = SeqPrep(params = params_alt,\n WorkingDir=self.temp_dir_string)\n sp_res2 = sp_app2()\n\n assembly_result = GzipFile(fileobj=sp_res2['Assembled']).read()\n self.assertEqual(assembly_result, expected_assembly_altered_params) \n\n unassembled_reads1_result2 = GzipFile(fileobj=\n sp_res2['UnassembledReads1']).read()\n self.assertEqual(unassembled_reads1_result2, \n expected_unassembled_reads1_altered_params) \n\n unassembled_reads2_result2 = GzipFile(fileobj=\n sp_res2['UnassembledReads2']).read()\n self.assertEqual(unassembled_reads2_result2, \n expected_unassembled_reads2_altered_params) \n\n sp_res2.cleanUp() \n shutil.rmtree(self.temp_dir_string)", "def test_launch_deployment(self):\n pass", "def test_cmd_error(self):\n task = Task(\"uid\", False, False, \"does_not_exist\", None, \".\")\n task._checkpoint_dir = tmp_checkpoint_dir()\n with self.assertRaisesRegexp(RuntimeError, \".*executing Task's command:.*\"):\n task.run()\n task.shell = True\n with self.assertRaisesRegexp(RuntimeError, \".*executing Task's command:.*\"):\n task.run()\n task._dry_run = True\n task.run() # No longer raises RuntimeError", "def test_unlock_failure(self):\n # Make sure the image file doesn't exist.\n if os.path.exists(IMAGE_FILE):\n os.unlink(IMAGE_FILE)\n # Ask rsync-system-backup to use the encrypted filesystem on the image\n # file anyway, because we know it will fail and that's exactly what\n # we're interested in :-).\n program = RsyncSystemBackup(\n crypto_device=CRYPTO_NAME,\n destination=os.path.join(MOUNT_POINT, 'latest'),\n mount_point=MOUNT_POINT,\n )\n # When `cryptdisks_start' fails it should exit with a nonzero exit\n # code, thereby causing executor to raise an ExternalCommandFailed\n # exception that obscures the FailedToUnlockError exception that we're\n # interested in. The check=False option enables our `last resort error\n # handling' code path to be reached.\n program.destination_context.options['check'] = False\n self.assertRaises(FailedToUnlockError, program.execute)", "def test_renamer_dryrun(monkeypatch, param_fs, src, dest):\n monkeypatch.setattr(\"builtins.input\", lambda: \"Y\")\n os.chdir(param_fs)\n table = renamer.generate_rentable(src, dest)\n queue = renamer.print_rentable(table)\n renamer.rename_queue(queue, dryrun=True)\n for s, d in zip(src, dest):\n f = param_fs / s\n assert f.read_text() == s", "def startTestHook(self):", "def test_abstractShouldRotate(self):\n log = logfile.BaseLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n self.assertRaises(NotImplementedError, log.shouldRotate)", "def test_cache_results(self):\n kbase_sdk.init_context.cache_clear()\n config_path = os.path.join(self.test_app_dir, 'kbase.yaml')\n # Move kbase.yaml to kbase.yaml.bak\n context1 = kbase_sdk.init_context(self.test_app_dir)\n shutil.move(config_path, config_path + '.bak')\n context2 = kbase_sdk.init_context(self.test_app_dir)\n # If it's not caching, then MissingPath would be raised\n self.assertEqual(context1, context2)\n shutil.move(config_path + '.bak', config_path)" ]
[ "0.62804943", "0.62302554", "0.5996128", "0.59816766", "0.58999544", "0.57469875", "0.5720267", "0.5683897", "0.5645453", "0.55972165", "0.5577958", "0.55499387", "0.54937345", "0.54752177", "0.5470032", "0.53967464", "0.5395424", "0.53703004", "0.5367641", "0.53619397", "0.53227764", "0.53099126", "0.53001875", "0.5293087", "0.5292406", "0.52699286", "0.52648944", "0.52525276", "0.5249659", "0.5248163", "0.52343684", "0.5233103", "0.52188736", "0.5214909", "0.5186725", "0.5170813", "0.51696897", "0.51612383", "0.5160959", "0.51586187", "0.5152921", "0.514696", "0.51403755", "0.5138457", "0.5136797", "0.513304", "0.513304", "0.5131182", "0.5108282", "0.5084219", "0.5080383", "0.506365", "0.5052212", "0.50255644", "0.50192744", "0.5016967", "0.50085187", "0.50000477", "0.49977055", "0.4996324", "0.4994787", "0.499355", "0.4989168", "0.49833968", "0.4969239", "0.49666375", "0.4960292", "0.4954985", "0.49538583", "0.49493954", "0.49429274", "0.4936174", "0.4935423", "0.4934042", "0.493316", "0.49248022", "0.492281", "0.4918908", "0.491792", "0.49166033", "0.4905049", "0.49049997", "0.4897522", "0.4895965", "0.4895926", "0.48944435", "0.48899338", "0.48897216", "0.4885697", "0.48839247", "0.48691323", "0.48686108", "0.48670772", "0.4865888", "0.48623446", "0.48608285", "0.4858467", "0.4840191", "0.48352394", "0.48338926" ]
0.6106966
2
Tests of try rotation with exec_post in configuration
def test_process_log_with_exec_post_in_configuration(self): with tempfile.TemporaryDirectory() as sandbox: with mock.patch('sys.stderr', new=io.StringIO()) as fake_stderr: with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout: stream_handler = logging.StreamHandler(fake_stderr) logging.getLogger().addHandler(stream_handler) try: srcfile = Path(sandbox, 'pokus.log') srcfile.touch() destfile = Path(sandbox, 'backup', 'pokus.log') compressors = process_log( datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30), { 'target': '{{path}}/backup/{{name}}.{{ext}}', 'interval': 'hourly', 'compress': 'bzip2', 'exec_post': '/bin/false' }, 'hourly', str(srcfile), 10 ) finally: logging.getLogger().removeHandler(stream_handler) self.assertEqual(compressors, []) self.assertFalse(srcfile.exists()) self.assertTrue(destfile.exists()) self.assertEqual(fake_stdout.getvalue(), 'Checking "{src}"... rotating... "{src}" -> "{dest}" exec_post failed.\n'.format(src=srcfile, dest=destfile)) self.assertEqual(fake_stderr.getvalue(), 'exec_post "/bin/false {dest}" failed with code 1\n'.format(dest=destfile))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_relaunch_deployment_run(self):\n pass", "def test_retest_deployment_run(self):\n pass", "def test_retry_run(self):\n pass", "def test_redeploy(self):\n pass", "def test_encrypt_creates_and_restores_backup(\n self,\n mock_os,\n mock_shutil,\n mock_subprocess,\n ):\n mock_subprocess.run.return_value.returncode = 1\n\n with self.assertRaises(RuntimeError):\n self.mikla.encrypt('Chunky Hunky', 'plain', 'enc')\n\n mock_os.unlink.assert_not_called()\n mock_shutil.move.assert_called_with('enc.bak', 'enc')\n self.assertEqual(mock_shutil.move.call_count, 2)", "def test_set_deployment_run_lock(self):\n pass", "def test_redeploy_container_asset(self):\n pass", "def test_process_log_with_exec_pre_in_configuration(self):\n with tempfile.TemporaryDirectory() as sandbox:\n with mock.patch('sys.stderr', new=io.StringIO()) as fake_stderr:\n with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout:\n stream_handler = logging.StreamHandler(fake_stderr)\n logging.getLogger().addHandler(stream_handler)\n try:\n srcfile = Path(sandbox, 'pokus.log')\n srcfile.touch()\n destfile = Path(sandbox, 'backup', 'pokus.log')\n compressors = process_log(\n datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30),\n {\n 'target': '{{path}}/backup/{{name}}.{{ext}}',\n 'interval': 'hourly',\n 'compress': 'bzip2',\n 'exec_pre': '/bin/false'\n },\n 'hourly',\n str(srcfile),\n 10\n )\n finally:\n logging.getLogger().removeHandler(stream_handler)\n self.assertEqual(compressors, [])\n self.assertTrue(srcfile.exists())\n self.assertFalse(destfile.exists())\n self.assertEqual(fake_stdout.getvalue(), 'Checking \"{src}\"... exec_pre failed.\\n'.format(src=srcfile))\n self.assertEqual(fake_stderr.getvalue(), 'exec_pre \"/bin/false pokus.log\" failed with code 1\\n')", "def test_post_hooks(self):\n os.makedirs('/tmp/localhost/pacha_post')\n touch_script = open('/tmp/localhost/pacha_post/bar.sh', 'w')\n touch_script.write('''touch /tmp/localhost/post_got_executed.txt''')\n touch_script.close()\n run = rebuild.Rebuild(hostname='localhost') \n run.post_hooks()\n self.assertTrue(os.path.isfile('/tmp/localhost/post_got_executed.txt'))", "def test_unlock_failure(self):\n # Make sure the image file doesn't exist.\n if os.path.exists(IMAGE_FILE):\n os.unlink(IMAGE_FILE)\n # Ask rsync-system-backup to use the encrypted filesystem on the image\n # file anyway, because we know it will fail and that's exactly what\n # we're interested in :-).\n program = RsyncSystemBackup(\n crypto_device=CRYPTO_NAME,\n destination=os.path.join(MOUNT_POINT, 'latest'),\n mount_point=MOUNT_POINT,\n )\n # When `cryptdisks_start' fails it should exit with a nonzero exit\n # code, thereby causing executor to raise an ExternalCommandFailed\n # exception that obscures the FailedToUnlockError exception that we're\n # interested in. The check=False option enables our `last resort error\n # handling' code path to be reached.\n program.destination_context.options['check'] = False\n self.assertRaises(FailedToUnlockError, program.execute)", "def test_archive_run(self):\n pass", "def test_unarchive_run(self):\n pass", "def _post_run_hook(self, runtime):\n pass", "def retry(self):\n # XXX: check whether it is possible to distingish \n # between the error conditions and set meaningfull exitcode\n return False", "def verify_no_snapshot_reingestion(c: Composition) -> None:\n c.run(\"testdrive\", \"wait-for-snapshot.td\", \"postgres-disable-select-permission.td\")\n\n restart_mz(c)\n\n c.run(\n \"testdrive\",\n \"delete-rows-t1.td\",\n \"delete-rows-t2.td\",\n \"alter-table.td\",\n \"alter-mz.td\",\n )", "def test_restore_backup():", "def test_pre_post_hooks(self):\n os.makedirs('/tmp/localhost/pacha_pre')\n os.makedirs('/tmp/localhost/pacha_post')\n pre_script = open('/tmp/localhost/pacha_pre/foo.sh', 'w')\n pre_script.write('''touch /tmp/localhost/pre_got_executed.txt''')\n pre_script.close()\n post_script = open('/tmp/localhost/pacha_post/bar.sh', 'w')\n post_script.write('''touch /tmp/localhost/post_got_executed.txt''')\n post_script.close()\n run = rebuild.Rebuild(hostname='localhost') \n run.pre_hooks()\n run.post_hooks()\n self.assertTrue(os.path.isfile('/tmp/localhost/post_got_executed.txt'))\n self.assertTrue(os.path.isfile('/tmp/localhost/pre_got_executed.txt'))", "def test_overwrite_corrupted_files(overwrite_on_tape_topology, core_config_mock, caches_mock):\n rse1_id, rse2_id, rse3_id, did1, did2 = overwrite_on_tape_topology(did1_corrupted=True, did2_corrupted=True)\n all_rses = [rse1_id, rse2_id, rse3_id]\n\n class _FTSWrapper(FTSWrapper):\n @staticmethod\n def on_receive(job_params):\n for job in (job_params if isinstance(job_params, list) else [job_params]):\n for file in job.get('files', []):\n if (file.get('file_metadata', {}).get('dst_type') == 'TAPE'\n and file.get('file_metadata', {}).get('dst_file', {}).get('file_on_tape') is not None):\n # Fake that dst_file metadata contains file_on_tape == True\n # As we don't really have tape RSEs in our tests, file_on_tape is always false\n file['file_metadata']['dst_file']['file_on_tape'] = True\n return job_params\n\n with patch('rucio.daemons.conveyor.poller.FTS3Transfertool', _FTSWrapper):\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n # Both transfers must be marked as failed because the file size is incorrect\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.FAILED\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.FAILED\n\n # Re-submit the failed requests. They must fail again, because overwrite_corrupted_files is False\n # 2 runs: for multihop, finisher works one hop at a time\n finisher(once=True, partition_wait_time=0)\n finisher(once=True, partition_wait_time=0)\n request = request_core.get_request_by_did(rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.QUEUED\n request = request_core.get_request_by_did(rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.QUEUED\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n # Set overwrite to True before running the poller or finisher\n core_config.set('transfers', 'overwrite_corrupted_files', True)\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.FAILED\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.FAILED\n\n # Re-submit one more time. Now the destination file must be overwritten\n finisher(once=True, partition_wait_time=0)\n finisher(once=True, partition_wait_time=0)\n request = request_core.get_request_by_did(rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.QUEUED\n request = request_core.get_request_by_did(rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.QUEUED\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n request = request_core.get_request_by_did(rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.SUBMITTED\n assert __wait_for_fts_state(request, expected_state='ARCHIVING') == 'ARCHIVING'\n request = request_core.get_request_by_did(rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.SUBMITTED\n assert __wait_for_fts_state(request, expected_state='ARCHIVING') == 'ARCHIVING'", "def test_logfile_recreates_after_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n self.conveyer.rotate_logs()\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"fourth\\\"}\"))\n self.assertEquals(self.events_out.getvalue(), \"{message: \\\"fourth\\\"}\")\n self.assertTrue(self.renamerCalled)", "def test_AbortAndRestart(self, dir_mock):\n from furious.async import Async\n from furious.context._execution import _ExecutionContext\n from furious.errors import AbortAndRestart\n from furious.processors import run_job\n\n dir_mock.side_effect = AbortAndRestart\n mock_success = Mock()\n mock_error = Mock()\n\n work = Async(target='dir',\n callbacks={'success': mock_success,\n 'error': mock_error})\n\n with _ExecutionContext(work):\n self.assertRaises(AbortAndRestart, run_job)\n\n self.assertFalse(mock_success.called)\n self.assertFalse(mock_error.called)", "def postRun(self):\n pass", "async def test_unpacker_run_exception(config, mocker, path_map_mock):\n logger_mock = mocker.MagicMock()\n p = Unpacker(config, logger_mock)\n p.last_work_end_timestamp = None\n p._do_work = AsyncMock()\n p._do_work.side_effect = [Exception(\"bad thing happen!\")]\n await p.run()\n p._do_work.assert_called()\n assert p.last_work_end_timestamp", "def test_execute_deployment(self):\n pass", "def test_delete_deployment_run(self):\n pass", "def test_terminate_run(self):\n pass", "def postRunCleanup(self):\n self.logDesc(\"Post Run Cleanup\")", "def post_run_func_checked(driver: HammerDriver) -> None:\n if post_run_func is not None:\n post_run_func(driver)", "def test_release_deployment_run(self):\n pass", "def fxt_set_restart_after_abort(sut_settings: SutTestSettings):\n sut_settings.restart_after_abort = True", "def test_process_log_with_pre_and_post_in_configuration(self):\n with tempfile.TemporaryDirectory() as sandbox:\n with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout:\n srcfile = Path(sandbox, 'pokus.log')\n srcfile.touch()\n destfile = Path(sandbox, 'backup', 'pokus.log')\n compressors = process_log(\n datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30),\n {\n 'target': '{{path}}/backup/{{name}}.{{ext}}',\n 'interval': 'hourly',\n 'compress': 'gzip -9',\n 'exec_pre': '/bin/true',\n 'exec_post': '/bin/true'\n },\n 'hourly',\n str(srcfile),\n 10\n )\n self.assertEqual(compressors, [[sandbox, 'gzip', '-9', str(destfile)]])\n self.assertFalse(srcfile.exists())\n self.assertTrue(destfile.exists())\n self.assertEqual(fake_stdout.getvalue(), 'Checking \"{src}\"... rotating... \"{src}\" -> \"{dest}\" done.\\n'.format(src=srcfile, dest=destfile))", "def test_run_ended(self):", "async def test_unpacker_do_work_bundle_once_and_die(config, mocker, path_map_mock):\n once = config.copy()\n once[\"RUN_ONCE_AND_DIE\"] = \"True\"\n logger_mock = mocker.MagicMock()\n claim_mock = mocker.patch(\"lta.unpacker.Unpacker._do_work_claim\", new_callable=AsyncMock)\n claim_mock.return_value = False\n sys_exit_mock = mocker.patch(\"sys.exit\")\n p = Unpacker(once, logger_mock)\n assert not await p._do_work()\n sys_exit_mock.assert_not_called()", "async def test_setup_failed_update_reauth(\n hass: HomeAssistant, ufp: MockUFPFixture\n) -> None:\n\n await hass.config_entries.async_setup(ufp.entry.entry_id)\n await hass.async_block_till_done()\n assert ufp.entry.state == ConfigEntryState.LOADED\n\n # reauth should not be triggered until there are 10 auth failures in a row\n # to verify it is not transient\n ufp.api.update = AsyncMock(side_effect=NotAuthorized)\n for _ in range(10):\n await time_changed(hass, DEFAULT_SCAN_INTERVAL)\n assert len(hass.config_entries.flow._progress) == 0\n\n assert ufp.api.update.call_count == 10\n assert ufp.entry.state == ConfigEntryState.LOADED\n\n await time_changed(hass, DEFAULT_SCAN_INTERVAL)\n assert ufp.api.update.call_count == 11\n assert len(hass.config_entries.flow._progress) == 1", "def test_pre_hooks(self):\n os.makedirs('/tmp/localhost/pacha_pre')\n touch_script = open('/tmp/localhost/pacha_pre/foo.sh', 'w')\n touch_script.write('''touch /tmp/localhost/pre_got_executed.txt''')\n touch_script.close()\n run = rebuild.Rebuild(hostname='localhost') \n run.pre_hooks()\n self.assertTrue(os.path.isfile('/tmp/localhost/pre_got_executed.txt'))", "def postRunCleanup(self):\n self.logDesc(\"Post Run Cleanup\")\n self.logout()", "def postRunCleanup(self):\n self.logDesc(\"Post Run Cleanup\")\n self.logout()", "def run_starter(self, expect_to_fail=False):", "def reprovision_and_retry(func):\n @functools.wraps(func)\n def wrapper(*a, **kw):\n errback = kw.get('errback', None)\n if errback is None:\n def errback(e):\n raise e\n def errback_wrapper(e):\n if isinstance(e, UnknownAppID) and 'INITIAL' in OPTIONS:\n try:\n for initial in OPTIONS['INITIAL']:\n provision(*initial) # retry provisioning the initial setup\n func(*a, **kw) # and try the function once more\n except Exception(new_exc):\n errback(new_exc) # throwing the new exception\n else:\n errback(e) # not an instance of UnknownAppID - nothing we can do here\n kw['errback'] = errback_wrapper\n return func(*a, **kw)\n return wrapper", "def test_fleur_relax_continue_converged(self, run_with_cache, mock_code_factory):\n assert False", "def task_cleanup(e):\n if e.info['task']['mode'] == 'docker' and '_tempdir' in e.info['kwargs']:\n tmpdir = e.info['kwargs']['_tempdir']\n chmod_writable(tmpdir)", "def test_abort_in_resourcing_mid(\n set_restart_after_abort: None,\n setup_context_monitoring_for_abort_test: None,\n composition: conf_types.Composition,\n):", "def postcondition(self, result, exc_info, *args, **kwargs):\n pass", "def test_workflows_restart(self):\n pass", "def test_backup_failure(self):\n program = RsyncSystemBackup(\n destination='0.0.0.0::module/directory',\n sudo_enabled=False,\n )\n self.assertRaises(ExternalCommandFailed, program.execute)", "def can_dry_run(self):\r\n return False", "def postRunCleanup(self):\n self.logDesc(\"Post Run Cleanup\")\n #logout of application\n self.logout()", "def test_overwrite_on_tape(overwrite_on_tape_topology, caches_mock):\n rse1_id, rse2_id, rse3_id, did1, did2 = overwrite_on_tape_topology(did1_corrupted=False, did2_corrupted=True)\n all_rses = [rse1_id, rse2_id, rse3_id]\n\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.FAILED\n assert 'Destination file exists and overwrite is not enabled' in request['err_msg']\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.FAILED\n assert 'Destination file exists and overwrite is not enabled' in request['err_msg']", "def _retry_occurred(self):", "def main_method_setup_execution(monkeypatch, s3_setup, tmpdir, argument_dict):\n monkeypatch.setattr(bootstrapper.OpUtil, \"parse_arguments\", lambda x: argument_dict)\n monkeypatch.setattr(bootstrapper.OpUtil, \"package_install\", mock.Mock(return_value=True))\n\n monkeypatch.setenv(\"AWS_ACCESS_KEY_ID\", \"minioadmin\")\n monkeypatch.setenv(\"AWS_SECRET_ACCESS_KEY\", \"minioadmin\")\n monkeypatch.setenv(\"TEST_ENV_VAR1\", \"test_env_var1\")\n\n s3_setup.fput_object(\n bucket_name=argument_dict[\"cos-bucket\"],\n object_name=\"test-directory/test-file.txt\",\n file_path=os.path.join(RESOURCES_DIR, \"test-requirements-elyra.txt\"),\n )\n s3_setup.fput_object(\n bucket_name=argument_dict[\"cos-bucket\"],\n object_name=\"test-directory/test,file.txt\",\n file_path=os.path.join(RESOURCES_DIR, \"test-bad-requirements-elyra.txt\"),\n )\n s3_setup.fput_object(\n bucket_name=argument_dict[\"cos-bucket\"],\n object_name=\"test-directory/test-archive.tgz\",\n file_path=os.path.join(RESOURCES_DIR, \"test-archive.tgz\"),\n )\n\n with tmpdir.as_cwd():\n bootstrapper.main()\n test_file_list = [\n \"test-archive.tgz\",\n \"test-file.txt\",\n \"test,file.txt\",\n \"test-file/test-file-copy.txt\",\n \"test-file/test,file/test,file-copy.txt\",\n \"test-notebookA.ipynb\",\n \"test-notebookA-output.ipynb\",\n \"test-notebookA.html\",\n ]\n # Ensure working directory has all the files.\n for file in test_file_list:\n assert os.path.isfile(file)\n # Ensure upload directory has all the files EXCEPT the output notebook\n # since it was it is uploaded as the input notebook (test-notebookA.ipynb)\n # (which is included in the archive at start).\n for file in test_file_list:\n if file != \"test-notebookA-output.ipynb\":\n assert s3_setup.stat_object(\n bucket_name=argument_dict[\"cos-bucket\"], object_name=\"test-directory/\" + file\n )\n if file == \"test-notebookA.html\":\n with open(\"test-notebookA.html\") as html_file:\n assert \"TEST_ENV_VAR1: test_env_var1\" in html_file.read()", "def onerror(func, path, exc_info):\r\n import stat\r\n print \"Retrying \" + path + \" after chmod\"\r\n os.chmod(path, stat.S_IWRITE)\r\n func(path)", "def test_run_and_restore(self):\n # Run for 5 algo-calls\n testargs = [\"python\", \"scripts/smac\", \"--scenario_file\",\n self.scenario_one, \"--verbose\", \"DEBUG\"]\n with mock.patch.object(sys, 'argv', testargs):\n self.smaccli.main_cli()\n # Increase limit and run for 10 (so 5 more) by using restore_state\n testargs = [\"python\", \"scripts/smac\", \"--restore_state\",\n self.output_one, \"--scenario_file\",\n self.scenario_two, \"--verbose\", \"DEBUG\"]\n with mock.patch.object(sys, 'argv', testargs):\n self.smaccli.main_cli()", "async def run(self) -> Optional[BaseException]: # pylint: disable=too-many-branches,too-many-statements\n active = Invocation.active.get(self.name)\n if active is not None:\n return await self.done(self.wait_for(active))\n\n self._become_current()\n Logger.trace(\"Call\")\n\n global rebuild_changed_actions # pylint: disable=invalid-name\n if rebuild_changed_actions.value:\n self.new_persistent_actions.append(PersistentAction())\n self.read_old_persistent_actions()\n\n assert self.name not in Invocation.active\n Invocation.active[self.name] = self\n self.collect_initial_outputs()\n\n try:\n assert self.step is not None\n try:\n await self.done(self.step.function(**self.kwargs))\n except RestartException:\n self._restart()\n await self.done(self.step.function(**self.kwargs))\n await self.done(self.sync())\n await self.done(self.collect_final_outputs())\n\n except StepException as exception: # pylint: disable=broad-except\n self.exception = exception\n\n finally:\n self._become_current()\n\n if self.exception is None:\n assert not self.async_actions\n if self.new_persistent_actions:\n if len(self.new_persistent_actions) > 1 and self.new_persistent_actions[-1].is_empty():\n self.new_persistent_actions.pop()\n\n if not self.did_skip_actions:\n self.write_new_persistent_actions()\n elif len(self.new_persistent_actions) < len(self.old_persistent_actions):\n Logger.warning(\"Skipped some action(s) \" \"even though changed to remove some final action(s)\")\n\n if self.did_run_actions:\n Logger.trace(\"Done\")\n elif self.did_skip_actions:\n Logger.trace(\"Skipped\")\n else:\n Logger.trace(\"Complete\")\n\n else:\n while self.async_actions:\n try:\n await self.done(self.async_actions.pop())\n except StepException:\n pass\n if self.did_run_actions:\n self.poison_all_outputs()\n self.remove_old_persistent_data()\n if not isinstance(self.exception, DryRunException):\n Logger.trace(\"Fail\")\n\n del Invocation.active[self.name]\n if self.condition is not None:\n await self.done(self.condition.acquire())\n self.condition.notify_all()\n self.condition.release()\n\n global failure_aborts_build # pylint: disable=invalid-name\n if self.exception is not None and failure_aborts_build.value:\n no_additional_complaints()\n raise self.exception\n\n return self.exception", "def pytest_finished_handling_group(session, worker):", "async def test_deleter_run_exception(config, mocker):\n logger_mock = mocker.MagicMock()\n p = Deleter(config, logger_mock)\n p.last_work_end_timestamp = None\n p._do_work = AsyncMock()\n p._do_work.side_effect = [Exception(\"bad thing happen!\")]\n await p.run()\n p._do_work.assert_called()\n assert p.last_work_end_timestamp", "def test_restart_with_permission(self):\n self.create_user_with_role(\n self.user.name, self.user.email, self.user.password, Role.tester)\n self.create_forktest(\"own-fork-commit\", TestPlatform.linux, regression_tests=[2])\n self.create_completed_regression_t_entries(3, [2])\n with self.app.test_client() as c:\n response = c.post(\n '/account/login', data=self.create_login_form_data(self.user.email, self.user.password))\n response = c.get('/test/restart_test/3')\n test = Test.query.filter(Test.id == 3).first()\n self.assertEqual(test.finished, False)", "def test_archive_run(self, mock_move):\n run_dir = 'data/nanopore_data/run4/done_demuxing/20200104_1412_MN19414_AAU644_68125dc2'\n np_run = Nanopore(run_dir)\n np_run.archive_dir = '/some/dir'\n np_run.archive_run()\n mock_move.assert_called_once()", "def testFailure():\n run(\"chariot-me\") #Start management-engine without initial deplflag\n egress()", "def test_run_configuration_dry_run_diff_no_early_exit_shard_err(mocker: MockerFixture):\n succeeding_shard = \"succeed\" # success\n another_succeeding_shard = \"succeed as well\" # success\n failing_shard = \"fail\" # fail\n sys_exit_1_shard = \"sys-exit-1\" # fail\n sys_exit_true_shard = \"sys-exit-true\" # fail\n sys_exit_0_shard = \"sys-exit-0\" # success\n sys_exit_false_shard = \"sys-exit-false\" # success\n\n def integration_run_func(self: ShardableTestIntegration, dry_run: bool) -> None:\n if self.params.shard == failing_shard:\n raise Exception(f\"shard {self.params.shard} failed\")\n if self.params.shard == sys_exit_1_shard:\n sys.exit(1)\n if self.params.shard == sys_exit_false_shard:\n sys.exit(False)\n if self.params.shard == sys_exit_0_shard:\n sys.exit(0)\n if self.params.shard == sys_exit_true_shard:\n sys.exit(True)\n\n integration_run_func_mock = mocker.patch.object(\n ShardableTestIntegration, \"run\", side_effect=integration_run_func, autospec=True\n )\n\n shardable_test_integration = ShardableTestIntegration(\n params=ShardableTestIntegrationParams()\n )\n\n affected_shards = {\n succeeding_shard,\n another_succeeding_shard,\n failing_shard,\n sys_exit_1_shard,\n sys_exit_false_shard,\n sys_exit_0_shard,\n sys_exit_true_shard,\n }\n\n with pytest.raises(SystemExit) as e:\n _integration_dry_run(\n shardable_test_integration,\n DesiredStateDiff(\n current_desired_state={},\n previous_desired_state={},\n diff_found=True,\n affected_shards=affected_shards,\n ),\n )\n\n # the SystemExit exception contains the nr of failed shards as code\n assert e.value.code == 3\n\n # make sure the run method has been called once per shard\n assert integration_run_func_mock.call_count == len(affected_shards)\n called_sharded_params = [\n c[0][0].params for c in integration_run_func_mock.call_args_list\n ]\n for shard in affected_shards:\n sharded_params = shardable_test_integration.params.copy_and_update(\n {\"shard\": shard}\n )\n assert sharded_params in called_sharded_params", "def run_test_second():\n os.system(\n \"sed -n '/(Failed)$/p' test_op_log.txt | awk '{print $3}' >& rerun_op.txt\"\n )\n rerun_list = get_op_list('rerun_op.txt')\n if len(rerun_list):\n print(\n \"-------there are \"\n + str(len(rerun_list))\n + \" op(s) need to rerun!!!-------\"\n )\n for failed_op in rerun_list:\n os.system(\"ctest -R \\\"(\" + failed_op + \")\\\" \")\n else:\n print(\"-------all op passed successfully!!!-------\")", "def test_encrypt_creates_and_cleans_up_backup(\n self,\n mock_os,\n mock_shutil,\n mock_subprocess,\n ):\n mock_subprocess.run.return_value.returncode = 0\n\n self.mikla.encrypt('Chunky Hunky', 'plain', 'enc')\n\n mock_shutil.move.assert_called_once_with('enc', 'enc.bak')\n mock_os.unlink.assert_called_once_with('enc.bak')", "def _postprocess(self):", "def after_scenario(context, _):\n context.backup_root_raw.cleanup()", "def post_process(done_exec, temp_file):\n\n if done_exec.suite == \"renaissance\":\n assert temp_file is not None\n return post_process_renaissance(done_exec, temp_file)\n elif done_exec.suite == \"dacapo\":\n assert temp_file is None\n return post_process_dacapo(done_exec)\n elif done_exec.suite == \"specjvm\":\n assert temp_file is None\n return post_process_specjvm(done_exec)\n else:\n raise ValueError(\"unknown suite %s\" % done_exec.suite)", "def test_rotation(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n days = [(self.path + \".\" + log.suffix(day * 86400)) for day in range(3)]\n\n # test automatic rotation\n log._clock = 0.0 # 1970/01/01 00:00.00\n log.write(\"123\")\n log._clock = 43200 # 1970/01/01 12:00.00\n log.write(\"4567890\")\n log._clock = 86400 # 1970/01/02 00:00.00\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(days[0]))\n self.assertFalse(os.path.exists(days[1]))\n log._clock = 172800 # 1970/01/03 00:00.00\n log.write(\"\")\n self.assertTrue(os.path.exists(days[0]))\n self.assertTrue(os.path.exists(days[1]))\n self.assertFalse(os.path.exists(days[2]))\n log._clock = 259199 # 1970/01/03 23:59.59\n log.write(\"3\")\n self.assertFalse(os.path.exists(days[2]))", "def test_state_after_failure(self):\n pass", "def test_worker_precheck_exception(self):\n assert airflow.settings.validate_session()", "def test_and_swap(self, config):\n LOGGER.info('Attempting to apply new configuration')\n backup = self.backup_config()\n # We have backed up ALL config files (not just the ones we might\n # replace). If any error occurs from here out, we will need to restore\n # our config, so we will use exception handling.\n try:\n self.install_config(config)\n\n # We have now merged in our new configuration files, lets test this\n # config.\n if self.test_command(quiet=False):\n LOGGER.debug('Configuration good, reloading')\n self.reload_command()\n self.remove_config(backup)\n\n else:\n LOGGER.info('Configuration bad, restoring')\n self.restore_config(backup)\n\n except Exception:\n LOGGER.exception('Failure, restoring config', exc_info=True)\n self.restore_config(backup)", "def teardown(self, exception):", "def test_noarg_optimize_call():\n os.chdir(pathlib.Path(__file__).parent.absolute())\n loc = shutil.which(\"parrot-optimize\")\n script_descriptor = open(os.path.abspath(loc))\n script = script_descriptor.read()\n sys.argv = [\"parrot-optimize\"]\n\n with pytest.raises(SystemExit):\n exec(script)\n\n script_descriptor.close()", "def on_reset_after_execution(self):\n pass", "def test_rotatePermissionFileNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.path, 0o444)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "async def post_behavior_run(self) -> None:", "def test_base_replica_repair_with_contention(self):\n self._base_replica_repair_test(fail_mv_lock=True)", "def pytest_runtest_teardown(item, nextitem):\n\n outcome = yield\n\n # No revert after last test\n if nextitem is None or item.session.shouldstop:\n return\n\n # No revert on KeyboardInterrupt\n if outcome.excinfo is not None and outcome.excinfo[0] is KeyboardInterrupt:\n return\n\n reverted = False\n\n # Test fail or teardown fail\n failed = (item.failed_count_before != item.session.testsfailed or\n outcome.excinfo is not None)\n\n destructive = 'undestructive' not in item.keywords\n env_name = item.config.getoption(\"--env\")\n snapshot_name = item.config.getoption(\"--snapshot\")\n if destructive or failed:\n if all([env_name, snapshot_name]):\n revert_snapshot(env_name, snapshot_name)\n\n finalizers = [\n x\n for y in item.session._setupstate._finalizers.values()\n for x in y if hasattr(x, 'im_self')\n ]\n clean_finalizers(item._request, finalizers)\n\n parent = item\n while parent != item.session:\n if parent in item.session._setupstate._finalizers:\n del item.session._setupstate._finalizers[parent]\n if parent.cls and issubclass(parent.cls, unittest.TestCase):\n parent.setup()\n parent = parent.parent\n if item in item.session._setupstate._finalizers:\n del item.session._setupstate._finalizers[item]\n reverted = True\n\n setattr(nextitem._request.session, 'reverted', reverted)", "def retry(self):\n return False", "def test_recheck_fails(self):\n raise NotImplementedError", "def run_autostig(self):\n raise NotImplementedError", "def test_delete_run(self):\n pass", "def test_retry_errors_sooner(self):\n config_manager, json_file = self._setup_config_manager(\n 'socorro.unittest.cron.test_crontabber.BarBackfillJob|1d\\n'\n 'socorro.unittest.cron.test_crontabber.FooBackfillJob|1d\\n'\n 'socorro.unittest.cron.test_crontabber.FooBarBackfillJob|1d',\n extra_value_source={\n # crontabber already has a good default for this but by\n # being explict like this we not only show that it can be\n # changed, we also make it clear what the unit test is\n # supposed to do.\n 'crontabber.error_retry_time': '3600' # 1 hour\n }\n )\n\n # first we need to hack-about so that BarBackfillJob fails only\n # once.\n\n class SomeError(Exception):\n pass\n\n def nosy_run(self, date):\n dates_used[self.__class__].append(date)\n if self.__class__ == BarBackfillJob:\n if len(dates_used[self.__class__]) == 1:\n # first time run, simulate trouble\n raise SomeError(\"something went wrong\")\n return originals[self.__class__](self, date)\n\n classes = BarBackfillJob, FooBackfillJob, FooBarBackfillJob\n originals = {}\n dates_used = collections.defaultdict(list)\n for klass in classes:\n originals[klass] = klass.run\n klass.run = nosy_run\n\n try:\n with config_manager.context() as config:\n tab = crontabber.CronTabber(config)\n tab.run_all()\n self.assertEqual(len(dates_used[FooBackfillJob]), 1)\n self.assertEqual(len(dates_used[FooBackfillJob]), 1)\n # never gets there because dependency fails\n self.assertEqual(len(dates_used[FooBarBackfillJob]), 0)\n\n structure = json.load(open(json_file))\n assert structure['foo-backfill']\n assert not structure['foo-backfill']['last_error']\n next_date = utc_now() + datetime.timedelta(days=1)\n assert (\n next_date.strftime('%Y-%m-%d %H:%M') in\n structure['foo-backfill']['next_run']\n )\n\n assert structure['bar-backfill']\n assert structure['bar-backfill']['last_error']\n next_date = utc_now() + datetime.timedelta(hours=1)\n assert (\n next_date.strftime('%Y-%m-%d %H:%M') in\n structure['bar-backfill']['next_run']\n )\n\n assert 'foobar-backfill' not in structure\n\n # Now, let the magic happen, we pretend time passes by 2 hours\n # and run all jobs again\n self._wind_clock(json_file, hours=2)\n # this forces in crontabber instance to reload the JSON file\n tab._database = None\n\n # here, we go two hours later\n tab.run_all()\n\n # Here's the magic sauce! The FooBarBackfillJob had to wait\n # two hours to run after FooBackfillJob but it should\n # have been given the same date input as when FooBackfillJob\n # ran.\n self.assertEqual(len(dates_used[FooBackfillJob]), 1)\n self.assertEqual(len(dates_used[FooBackfillJob]), 1)\n self.assertEqual(len(dates_used[FooBarBackfillJob]), 1)\n\n # use this formatter so that we don't have to compare\n # datetimes with microseconds\n format = lambda x: x.strftime('%Y%m%d %H:%M %Z')\n self.assertEqual(\n format(dates_used[FooBackfillJob][0]),\n format(dates_used[FooBarBackfillJob][0])\n )\n # also check the others\n self.assertEqual(\n format(dates_used[BarBackfillJob][0]),\n format(dates_used[FooBarBackfillJob][0])\n )\n\n structure = json.load(open(json_file))\n self.assertTrue(structure['foo-backfill'])\n self.assertTrue(not structure['foo-backfill']['last_error'])\n self.assertTrue(structure['bar-backfill'])\n self.assertTrue(not structure['bar-backfill']['last_error'])\n self.assertTrue(structure['foobar-backfill'])\n self.assertTrue(not structure['foobar-backfill']['last_error'])\n\n finally:\n for klass in classes:\n klass.run = originals[klass]", "def test_rollback():", "def test_cleanup_on_failure_when_preparing_file(self, mocker):\n remove_spy = mocker.spy(os, 'remove')\n self._retryable.side_effect = requests.HTTPError('Fail')\n\n payload = dict(id=\"B\", data={\"some\": \"data\"}, ai_service='A')\n headers = {'x-rh-identity': 'ABC'}\n self.client.post(self.url, json=payload, headers=headers)\n\n remove_spy.assert_called_once()", "def test_attempt_to_overwrite(fake_meas, tmpdir):\n fake_meas.name = 'test'\n fake_meas.id = '001'\n fake_meas.root_task.default_path = str(tmpdir)\n\n with open(str(tmpdir.join('test_001.meas.ini')), 'wb'):\n pass\n\n fake_meas.dependencies.collect_runtimes()\n res, err = fake_meas.run_checks()\n assert res\n assert 'exopy.internal_checks' in err\n assert 'duplicate' in err['exopy.internal_checks']", "def post_cleanup(self):\n pass", "def test_mount_failure(self):\n with prepared_image_file(create_filesystem=False):\n program = RsyncSystemBackup(\n crypto_device=CRYPTO_NAME,\n destination=os.path.join(MOUNT_POINT, 'latest'),\n mount_point=MOUNT_POINT,\n )\n # When `mount' fails it should exit with a nonzero exit code,\n # thereby causing executor to raise an ExternalCommandFailed\n # exception that obscures the FailedToMountError exception that\n # we're interested in. The check=False option enables our\n # `last resort error handling' code path to be reached.\n program.destination_context.options['check'] = False\n self.assertRaises(FailedToMountError, program.execute)", "def test_publish_deployment_run(self):\n pass", "def test_mpdrmq_after_execute_exception(self):\n agentconf={}\n telnetconf={\"host\":\"telnet.lan\"}\n rmqconf={\"host\":\"rmq.lan\"}\n agent=TelnetRmqAgent(agentconf, telnetconf, rmqconf)\n\n #Setup generic mock for others methods wich are not tested here\n ignoredmocks=Mock()\n agent.telnetclient=ignoredmocks\n agent.rmqclient=ignoredmocks\n \n agent.telnetclient.disconnect.side_effect=Exception(\"In your face\")\n\n with self.assertRaises(TelnetRmqAgenException):\n agent.ensure_after_execute()", "def PostExecute(self):\n return True", "def __exit__(self, type, value, trace):\n # Did we exit cleanly?\n if type is None:\n logger.verbose(\"Reconfiguring VM...\")\n self.vm.ReconfigVM_Task(spec=self.spec)", "def _rotate_workers(self, worker):\n raise NotImplementedError", "def test_compress_fastq_real_with_integrity_fail(\n first_tmp_file, second_tmp_file, spring_tmp_path, real_base_context, mocker\n):\n # GIVEN the path to a existing two existing fastq files and a non existing spring\n runner = CliRunner()\n assert not spring_tmp_path.exists()\n assert first_tmp_file.exists()\n assert second_tmp_file.exists()\n\n dir_path = spring_tmp_path.parent\n assert nr_files(dir_path) == 2\n mocker.patch.object(compare_cmd, \"compare_elements\")\n compare_cmd.compare_elements.return_value = False\n # WHEN running the compress command with an intergrity check\n result = runner.invoke(\n fastq,\n [\n \"--first-read\",\n str(first_tmp_file),\n \"--second-read\",\n str(second_tmp_file),\n \"--spring-path\",\n str(spring_tmp_path),\n \"--check-integrity\",\n ],\n obj=real_base_context,\n )\n # THEN assert the command succedes\n assert result.exit_code == 1\n # THEN assert that the spring file was deleted\n assert not spring_tmp_path.exists()\n # THEN assert that only the original fastq files are left\n assert nr_files(dir_path) == 2", "def test_abstractShouldRotate(self):\n log = logfile.BaseLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n self.assertRaises(NotImplementedError, log.shouldRotate)", "def always_retry(e):\n return True", "def test_dry_run():\n config = get_config(\"delete.conf\")\n path = get_config_path(config)\n test_file = make_test_file(path)\n\n console.pushbroom(config, dry_run=True)\n assert test_file.exists()\n\n console.pushbroom(config)\n assert not test_file.exists()\n\n path.rmdir()", "def test_failed():\n build()\n sh(\"%s %s --last-failed\" % (PYTHON, RUNNER_PY))", "def test_run_started(self):", "def test_abort_in_resourcing_low(\n set_restart_after_abort: None,\n setup_context_monitoring_for_abort_test: None,\n composition: conf_types.Composition,\n):", "def test_restart(self):\n restart_path = os.path.join(arc_path, 'arc', 'testing', 'restart(H,H2O2,N2H3,CH3CO2).yml')\n project = 'arc_project_for_testing_delete_after_usage2'\n project_directory = os.path.join(arc_path, 'Projects', project)\n arc1 = ARC(project=project, input_dict=restart_path, project_directory=project_directory)\n arc1.execute()\n\n with open(os.path.join(project_directory, 'output', 'thermo.info'), 'r') as f:\n thermo_sft_ccsdtf12_bac = False\n for line in f.readlines():\n if 'thermo_DFT_CCSDTF12_BAC' in line:\n thermo_sft_ccsdtf12_bac = True\n break\n self.assertTrue(thermo_sft_ccsdtf12_bac)\n\n with open(os.path.join(project_directory, 'arc_project_for_testing_delete_after_usage2.info'), 'r') as f:\n sts, n2h3, oet, lot, ap = False, False, False, False, False\n for line in f.readlines():\n if 'Considered the following species and TSs:' in line:\n sts = True\n elif 'Species N2H3' in line:\n n2h3 = True\n elif 'Overall time since project initiation:' in line:\n oet = True\n elif 'Levels of theory used:' in line:\n lot = True\n elif 'ARC project arc_project_for_testing_delete_after_usage2' in line:\n ap = True\n self.assertTrue(sts)\n self.assertTrue(n2h3)\n self.assertTrue(oet)\n self.assertTrue(lot)\n self.assertTrue(ap)\n\n with open(os.path.join(project_directory, 'arc.log'), 'r') as f:\n aei, ver, git, spc, rtm, ldb, therm, src, ter =\\\n False, False, False, False, False, False, False, False, False\n for line in f.readlines():\n if 'ARC execution initiated on' in line:\n aei = True\n elif '# Version:' in line:\n ver = True\n elif 'The current git HEAD for ARC is:' in line:\n git = True\n elif 'Considering species: CH3CO2_rad' in line:\n spc = True\n elif 'All jobs for species N2H3 successfully converged. Run time' in line:\n rtm = True\n elif 'Loading the RMG database...' in line:\n ldb = True\n elif 'Thermodynamics for H2O2' in line:\n therm = True\n elif 'Sources of thermoproperties determined by RMG for the parity plots:' in line:\n src = True\n elif 'ARC execution terminated on' in line:\n ter = True\n self.assertTrue(aei)\n self.assertTrue(ver)\n self.assertTrue(git)\n self.assertTrue(spc)\n self.assertTrue(rtm)\n self.assertTrue(ldb)\n self.assertTrue(therm)\n self.assertTrue(src)\n self.assertTrue(ter)\n\n self.assertTrue(os.path.isfile(os.path.join(project_directory, 'output', 'thermo_parity_plots.pdf')))\n\n with open(os.path.join(project_directory, 'output', 'Species', 'H2O2', 'species_dictionary.txt'), 'r') as f:\n lines = f.readlines()\n adj_list = ''\n for line in lines:\n if 'H2O2' not in line:\n adj_list += line\n if line == '\\n':\n break\n mol1 = Molecule().fromAdjacencyList(str(adj_list))\n self.assertEqual(mol1.toSMILES(), str('OO'))\n\n thermo_library_path = os.path.join(project_directory, 'output', 'RMG libraries', 'thermo',\n 'arc_project_for_testing_delete_after_usage2.py')\n new_thermo_library_path = os.path.join(settings['database.directory'], 'thermo', 'libraries',\n 'arc_project_for_testing_delete_after_usage2.py')\n # copy the generated library to RMG-database\n shutil.copyfile(thermo_library_path, new_thermo_library_path)\n db = RMGDatabase()\n db.load(\n path=settings['database.directory'],\n thermoLibraries=[str('arc_project_for_testing_delete_after_usage2')],\n transportLibraries=[],\n reactionLibraries=[],\n seedMechanisms=[],\n kineticsFamilies='none',\n kineticsDepositories=[],\n statmechLibraries=None,\n depository=False,\n solvation=False,\n testing=True,\n )\n\n spc2 = Species().fromSMILES(str('CC([O])=O'))\n spc2.generate_resonance_structures()\n spc2.thermo = db.thermo.getThermoData(spc2)\n self.assertAlmostEqual(spc2.getEnthalpy(298), -178003.44650359568, 1)\n self.assertAlmostEqual(spc2.getEntropy(298), 283.5983103176096, 1)\n self.assertAlmostEqual(spc2.getHeatCapacity(1000), 118.99753808225603, 1)\n self.assertTrue('arc_project_for_testing_delete_after_usage2' in spc2.thermo.comment)\n\n # delete the generated library from RMG-database\n os.remove(new_thermo_library_path)", "def __run(self):\n try:\n sys.settrace(self.globaltrace)\n self.__run_backup()\n self.run = self.__run_backup\n except Exception, e:\n e.message = e.__class__.__name__ + ' in ' + self.getName() + ': ' + e.message\n self.__exception = e", "def after_test(self, func, *args, **kwargs):\n pass", "def IntrumentFailHook(self):\n #Restart iserver\n #If failed to restart\n #\treturn fail\n pass" ]
[ "0.642584", "0.63849103", "0.63836694", "0.5834038", "0.5756419", "0.56373674", "0.56172615", "0.5577163", "0.55671585", "0.5550985", "0.5545134", "0.5540187", "0.55376", "0.5514732", "0.5497733", "0.5491249", "0.54792863", "0.54780215", "0.5450571", "0.543589", "0.5422628", "0.54138786", "0.5409028", "0.539499", "0.53931224", "0.53667676", "0.53639525", "0.52959", "0.5287589", "0.5266576", "0.52662253", "0.5259084", "0.5259017", "0.5254766", "0.5246299", "0.5246299", "0.5227764", "0.52223825", "0.52195024", "0.5209001", "0.5194356", "0.5183865", "0.51801753", "0.51686484", "0.51659936", "0.5165698", "0.51641256", "0.5155246", "0.5143693", "0.5124165", "0.5114378", "0.51115954", "0.51106775", "0.5110105", "0.51075315", "0.5094559", "0.5094377", "0.50784904", "0.50759256", "0.50723296", "0.5061088", "0.50596845", "0.505679", "0.5054401", "0.5050446", "0.503916", "0.5036648", "0.5030574", "0.5024479", "0.50238484", "0.50226015", "0.50219995", "0.5008296", "0.5007228", "0.50029486", "0.500008", "0.49898157", "0.4972726", "0.49657536", "0.49638963", "0.49519625", "0.49409127", "0.49408802", "0.49394608", "0.49384764", "0.49350873", "0.4928388", "0.4923247", "0.49224216", "0.49219286", "0.49215028", "0.49166504", "0.4913913", "0.49122432", "0.4909278", "0.49088177", "0.49045262", "0.49042347", "0.49001577", "0.48995444" ]
0.61695516
3
Tests of try rotation with positive pre and post exec in configuration
def test_process_log_with_pre_and_post_in_configuration(self): with tempfile.TemporaryDirectory() as sandbox: with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout: srcfile = Path(sandbox, 'pokus.log') srcfile.touch() destfile = Path(sandbox, 'backup', 'pokus.log') compressors = process_log( datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30), { 'target': '{{path}}/backup/{{name}}.{{ext}}', 'interval': 'hourly', 'compress': 'gzip -9', 'exec_pre': '/bin/true', 'exec_post': '/bin/true' }, 'hourly', str(srcfile), 10 ) self.assertEqual(compressors, [[sandbox, 'gzip', '-9', str(destfile)]]) self.assertFalse(srcfile.exists()) self.assertTrue(destfile.exists()) self.assertEqual(fake_stdout.getvalue(), 'Checking "{src}"... rotating... "{src}" -> "{dest}" done.\n'.format(src=srcfile, dest=destfile))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_need_to_rotate_log(self):\n self.assertTrue(need_to_rotate_log(0, 20, 'daily', 15, 'daily'), 'rotate log by time')\n self.assertFalse(need_to_rotate_log(10, 20, 'daily', 15, 'hourly'), 'do not rotate log by time')\n self.assertTrue(need_to_rotate_log(10, 20, 'daily', 25, None), 'rotate log by max size')\n self.assertFalse(need_to_rotate_log(10, 20, 'hourly', 5, 'hourly'), 'do not rotate log by min size')", "def test_rotated(self):\n self._calibration_test(\"rotated\")", "def test_retry_run(self):\n pass", "def test_g_asignar_rol(self):", "def test_rotation(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n days = [(self.path + \".\" + log.suffix(day * 86400)) for day in range(3)]\n\n # test automatic rotation\n log._clock = 0.0 # 1970/01/01 00:00.00\n log.write(\"123\")\n log._clock = 43200 # 1970/01/01 12:00.00\n log.write(\"4567890\")\n log._clock = 86400 # 1970/01/02 00:00.00\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(days[0]))\n self.assertFalse(os.path.exists(days[1]))\n log._clock = 172800 # 1970/01/03 00:00.00\n log.write(\"\")\n self.assertTrue(os.path.exists(days[0]))\n self.assertTrue(os.path.exists(days[1]))\n self.assertFalse(os.path.exists(days[2]))\n log._clock = 259199 # 1970/01/03 23:59.59\n log.write(\"3\")\n self.assertFalse(os.path.exists(days[2]))", "def test_logfile_recreates_after_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n self.conveyer.rotate_logs()\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"fourth\\\"}\"))\n self.assertEquals(self.events_out.getvalue(), \"{message: \\\"fourth\\\"}\")\n self.assertTrue(self.renamerCalled)", "def test_encrypt_creates_and_restores_backup(\n self,\n mock_os,\n mock_shutil,\n mock_subprocess,\n ):\n mock_subprocess.run.return_value.returncode = 1\n\n with self.assertRaises(RuntimeError):\n self.mikla.encrypt('Chunky Hunky', 'plain', 'enc')\n\n mock_os.unlink.assert_not_called()\n mock_shutil.move.assert_called_with('enc.bak', 'enc')\n self.assertEqual(mock_shutil.move.call_count, 2)", "def test_rotating_phantom(self):\n cheese = TomoCheese.from_demo_images()\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -0.25, abs_tol=0.05)\n for img in cheese.dicom_stack:\n img.array = rotate(img.array, angle=3, mode=\"edge\")\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -3.25, abs_tol=0.05)", "def test_retest_deployment_run(self):\n pass", "def test_fleur_relax_continue_converged(self, run_with_cache, mock_code_factory):\n assert False", "def test_log_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n filename = self.conveyer.rotate_logs()\n self.assertEquals(self.conveyer.logfile, None)\n self.assertEquals(filename, \"testfile.dat.rotated\")", "def _optimise_rotation(self):\n logger.info(\n f\"Minimising dimer rotation up to \"\n f'δϕ = {self.phi_tol.to(\"degrees\"):.4f}º'\n )\n\n for i in range(self._ratio_rot_iters):\n\n result = self._rotate()\n\n if (\n result == _StepResult.skipped_rotation\n or abs(self._coords.phi) < self.phi_tol\n ):\n break\n\n logger.info(\n f\"Micro iteration: {i}.\"\n f' ϕ={self._coords.phi.to(\"degrees\"):.2f}º'\n )\n\n return None", "def test_recheck_fails(self):\n raise NotImplementedError", "def test_restart(self):\n restart_path = os.path.join(arc_path, 'arc', 'testing', 'restart(H,H2O2,N2H3,CH3CO2).yml')\n project = 'arc_project_for_testing_delete_after_usage2'\n project_directory = os.path.join(arc_path, 'Projects', project)\n arc1 = ARC(project=project, input_dict=restart_path, project_directory=project_directory)\n arc1.execute()\n\n with open(os.path.join(project_directory, 'output', 'thermo.info'), 'r') as f:\n thermo_sft_ccsdtf12_bac = False\n for line in f.readlines():\n if 'thermo_DFT_CCSDTF12_BAC' in line:\n thermo_sft_ccsdtf12_bac = True\n break\n self.assertTrue(thermo_sft_ccsdtf12_bac)\n\n with open(os.path.join(project_directory, 'arc_project_for_testing_delete_after_usage2.info'), 'r') as f:\n sts, n2h3, oet, lot, ap = False, False, False, False, False\n for line in f.readlines():\n if 'Considered the following species and TSs:' in line:\n sts = True\n elif 'Species N2H3' in line:\n n2h3 = True\n elif 'Overall time since project initiation:' in line:\n oet = True\n elif 'Levels of theory used:' in line:\n lot = True\n elif 'ARC project arc_project_for_testing_delete_after_usage2' in line:\n ap = True\n self.assertTrue(sts)\n self.assertTrue(n2h3)\n self.assertTrue(oet)\n self.assertTrue(lot)\n self.assertTrue(ap)\n\n with open(os.path.join(project_directory, 'arc.log'), 'r') as f:\n aei, ver, git, spc, rtm, ldb, therm, src, ter =\\\n False, False, False, False, False, False, False, False, False\n for line in f.readlines():\n if 'ARC execution initiated on' in line:\n aei = True\n elif '# Version:' in line:\n ver = True\n elif 'The current git HEAD for ARC is:' in line:\n git = True\n elif 'Considering species: CH3CO2_rad' in line:\n spc = True\n elif 'All jobs for species N2H3 successfully converged. Run time' in line:\n rtm = True\n elif 'Loading the RMG database...' in line:\n ldb = True\n elif 'Thermodynamics for H2O2' in line:\n therm = True\n elif 'Sources of thermoproperties determined by RMG for the parity plots:' in line:\n src = True\n elif 'ARC execution terminated on' in line:\n ter = True\n self.assertTrue(aei)\n self.assertTrue(ver)\n self.assertTrue(git)\n self.assertTrue(spc)\n self.assertTrue(rtm)\n self.assertTrue(ldb)\n self.assertTrue(therm)\n self.assertTrue(src)\n self.assertTrue(ter)\n\n self.assertTrue(os.path.isfile(os.path.join(project_directory, 'output', 'thermo_parity_plots.pdf')))\n\n with open(os.path.join(project_directory, 'output', 'Species', 'H2O2', 'species_dictionary.txt'), 'r') as f:\n lines = f.readlines()\n adj_list = ''\n for line in lines:\n if 'H2O2' not in line:\n adj_list += line\n if line == '\\n':\n break\n mol1 = Molecule().fromAdjacencyList(str(adj_list))\n self.assertEqual(mol1.toSMILES(), str('OO'))\n\n thermo_library_path = os.path.join(project_directory, 'output', 'RMG libraries', 'thermo',\n 'arc_project_for_testing_delete_after_usage2.py')\n new_thermo_library_path = os.path.join(settings['database.directory'], 'thermo', 'libraries',\n 'arc_project_for_testing_delete_after_usage2.py')\n # copy the generated library to RMG-database\n shutil.copyfile(thermo_library_path, new_thermo_library_path)\n db = RMGDatabase()\n db.load(\n path=settings['database.directory'],\n thermoLibraries=[str('arc_project_for_testing_delete_after_usage2')],\n transportLibraries=[],\n reactionLibraries=[],\n seedMechanisms=[],\n kineticsFamilies='none',\n kineticsDepositories=[],\n statmechLibraries=None,\n depository=False,\n solvation=False,\n testing=True,\n )\n\n spc2 = Species().fromSMILES(str('CC([O])=O'))\n spc2.generate_resonance_structures()\n spc2.thermo = db.thermo.getThermoData(spc2)\n self.assertAlmostEqual(spc2.getEnthalpy(298), -178003.44650359568, 1)\n self.assertAlmostEqual(spc2.getEntropy(298), 283.5983103176096, 1)\n self.assertAlmostEqual(spc2.getHeatCapacity(1000), 118.99753808225603, 1)\n self.assertTrue('arc_project_for_testing_delete_after_usage2' in spc2.thermo.comment)\n\n # delete the generated library from RMG-database\n os.remove(new_thermo_library_path)", "def verify_no_snapshot_reingestion(c: Composition) -> None:\n c.run(\"testdrive\", \"wait-for-snapshot.td\", \"postgres-disable-select-permission.td\")\n\n restart_mz(c)\n\n c.run(\n \"testdrive\",\n \"delete-rows-t1.td\",\n \"delete-rows-t2.td\",\n \"alter-table.td\",\n \"alter-mz.td\",\n )", "def doRotation(self, delta):\n self.correctPending()\n self.rotation = (self.rotation + delta) % self.possibleRotations", "def test_arbitrary_rotation(self):\n \n # This test is run a bunch of times on various intervals, ranging from 50% to 1/6\n\t\t# (16.667%).\n for i in range(2, 7):\n \n interval = 1 / i # The amount to increase each qubit's probability by, relative to the previous qubit\n step_string = \"{:.4f}\".format(100 / i) # The decimal representation of the interval, as a percent\n target_probabilities = [0] * (i + 1) # This will store the desired probabilities of each qubit\n for j in range(0, i + 1):\n target_probability = j * interval\n target_probabilities[j] = target_probability\n\n # Run the test\n self.run_test(self.arbitrary_rotation_function, f\"Rotation with steps of 1/{i} ({step_string}%)\", 2000, target_probabilities, 0.05)", "def test_rot(self):\n\n print(\"rot()\")\n obs = self.fixture\n\n # rotation(0) = identity\n for axis in [1, 2, 3]:\n # theta = 0.0\n rotation = obs.rot(0.0, axis)\n # find || eye - rot1 ||\n diff = np.linalg.norm(np.eye(3) - rotation)\n self.assertAlmostEqual(diff, 0.0, delta=1e-12)\n # theta = 2*pi\n rotation = obs.rot(2.0 * np.pi, axis)\n # find || eye - rot1 ||\n diff = np.linalg.norm(np.eye(3) - rotation)\n self.assertAlmostEqual(diff, 0.0, delta=1e-12)\n\n # perform many randomized tests\n num_tests = 100\n num_products = 10\n for _test_counter in range(num_tests):\n thetas = []\n axes = []\n base = np.eye(3)\n # we will multiply a series of rotations into \"base\"\n rot_all = base\n for _rot_counter in range(num_products):\n theta = np.random.uniform(2 * np.pi) # in [0,2 pi]\n axis = np.random.randint(3) + 1 # in {1,2,3}\n axes.append(axis)\n thetas.append(theta)\n rotation = obs.rot(theta, axis)\n # multiply rot1 into the cumulative rotation\n rot_all = np.dot(rot_all, rotation)\n # now, back all the rotations out\n for _rot_counter in range(num_products):\n theta = thetas.pop()\n axis = axes.pop()\n # apply the inverse rotation\n rotation = obs.rot(-theta, axis)\n rot_all = np.dot(rot_all, rotation)\n # find || base - rot1 * rot2 ||\n diff = np.linalg.norm(base - rot_all)\n self.assertAlmostEqual(diff, 0.0, delta=1e-10 * num_products)", "def test_relaunch_deployment_run(self):\n pass", "def test_rotation(self):\n # this logfile should rotate every 10 bytes\n with contextlib.closing(\n logfile.LogFile(self.name, self.dir, rotateLength=10)\n ) as log:\n\n # test automatic rotation\n log.write(\"123\")\n log.write(\"4567890\")\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.2\".format(self.path)))\n log.write(\"\")\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertTrue(os.path.exists(\"{}.2\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n log.write(\"3\")\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n\n # test manual rotation\n log.rotate()\n self.assertTrue(os.path.exists(\"{}.3\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.4\".format(self.path)))\n\n self.assertEqual(log.listLogs(), [1, 2, 3])", "def test_rotatePermissionFileNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.path, 0o444)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def retry(self):\n # XXX: check whether it is possible to distingish \n # between the error conditions and set meaningfull exitcode\n return False", "def test_run_and_restore(self):\n # Run for 5 algo-calls\n testargs = [\"python\", \"scripts/smac\", \"--scenario_file\",\n self.scenario_one, \"--verbose\", \"DEBUG\"]\n with mock.patch.object(sys, 'argv', testargs):\n self.smaccli.main_cli()\n # Increase limit and run for 10 (so 5 more) by using restore_state\n testargs = [\"python\", \"scripts/smac\", \"--restore_state\",\n self.output_one, \"--scenario_file\",\n self.scenario_two, \"--verbose\", \"DEBUG\"]\n with mock.patch.object(sys, 'argv', testargs):\n self.smaccli.main_cli()", "def can_dry_run(self):\r\n return False", "def test_rotate(self):\n rotable = TestRotable()\n command = RotateCommand(rotable)\n collinear_to_new_direction = rotable.get_direction() + rotable.get_angular_velocity()\n\n command()\n\n ratio = norm(rotable.get_direction()) / norm(collinear_to_new_direction)\n self.assertTrue(allclose(collinear_to_new_direction * ratio, rotable.get_direction()))\n self.assertTrue(isclose(norm(rotable.get_direction()), 1))", "def test_abstractShouldRotate(self):\n log = logfile.BaseLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n self.assertRaises(NotImplementedError, log.shouldRotate)", "def test_process_log_with_exec_pre_in_configuration(self):\n with tempfile.TemporaryDirectory() as sandbox:\n with mock.patch('sys.stderr', new=io.StringIO()) as fake_stderr:\n with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout:\n stream_handler = logging.StreamHandler(fake_stderr)\n logging.getLogger().addHandler(stream_handler)\n try:\n srcfile = Path(sandbox, 'pokus.log')\n srcfile.touch()\n destfile = Path(sandbox, 'backup', 'pokus.log')\n compressors = process_log(\n datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30),\n {\n 'target': '{{path}}/backup/{{name}}.{{ext}}',\n 'interval': 'hourly',\n 'compress': 'bzip2',\n 'exec_pre': '/bin/false'\n },\n 'hourly',\n str(srcfile),\n 10\n )\n finally:\n logging.getLogger().removeHandler(stream_handler)\n self.assertEqual(compressors, [])\n self.assertTrue(srcfile.exists())\n self.assertFalse(destfile.exists())\n self.assertEqual(fake_stdout.getvalue(), 'Checking \"{src}\"... exec_pre failed.\\n'.format(src=srcfile))\n self.assertEqual(fake_stderr.getvalue(), 'exec_pre \"/bin/false pokus.log\" failed with code 1\\n')", "def test_skel_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def setupAuto(self) :\n\t\tself.rotateDir = -1\n\t\tself.rotateDuration = -1\n\t\tself.moveDir = -1\n\t\tself.moveDuration = -1\n\t\tself.isAvoidingCollision = False\n\t\tself.inBigRotate = False # if True, do not move forward;\n\t\t\t\t\t # only rotate\n\t\treturn", "def test_change_provisioned_throughput_usual_case():", "def test_calc_rotation(self):\n t = AioBaseTurtle()\n t.speed(speed=2)\n orient, steps, delta = t._calc_rotation(120)\n self.assertEqual(steps, 21)\n self.assertAlmostEqual(delta, 120.0 / 21.0)\n self.assertAlmostEqual(orient[0], math.cos(math.radians(120)))\n self.assertAlmostEqual(orient[1], math.sin(math.radians(120)))", "def test_shuffled(self):\n self.setup_flags()\n self.io_args.matches = os.path.join(\n self.io_args.output_root, \"shuffled\", \"matches.json\"\n )\n self._calibration_error_test(\"shuffled\", \"GeometricCalibration\")", "def test_workflows_restart(self):\n pass", "def test_restore_backup():", "def test_modePreservation(self):\n open(self.path, \"w\").close()\n os.chmod(self.path, 0o707)\n mode = os.stat(self.path)[stat.ST_MODE]\n log = logfile.LogFile(self.name, self.dir)\n self.addCleanup(log.close)\n log.write(\"abc\")\n log.rotate()\n self.assertEqual(mode, os.stat(self.path)[stat.ST_MODE])", "def rotate_in_place(angle):\n action = easy_cozmo._robot.turn_in_place(degrees(-1*angle),speed=degrees(df_rotate_speed))\n try:\n action.wait_for_completed()\n if action.has_succeeded:\n return True\n else:\n code, reason = action.failure_reason\n result = action.result\n print(\"WARNING RotateInPlace: code=%s reason='%s' result=%s\" % (code, reason, result))\n say_error(\"I couldn't rotate, sorry\")\n except Exception as e:\n import traceback\n print(e)\n traceback.print_exc()\n say_error(\"I can't rotate, sorry\")\n try:\n while action.is_running:\n action.abort()\n time.sleep(.5)\n except Exception as e:\n import traceback\n print(e)\n traceback.print_exc()\n say_error(\"Wheels faulty\")\n\n return False", "def test_asssert_rotation_matrix_behaves_like_check_matrix():\n random_state = np.random.RandomState(2345)\n for _ in range(5):\n a = pr.random_axis_angle(random_state)\n R = pr.matrix_from_axis_angle(a)\n original_value = R[2, 2]\n for error in [0, 1e-8, 1e-7, 1e-5, 1e-4, 1]:\n R[2, 2] = original_value + error\n try:\n pr.assert_rotation_matrix(R)\n pr.check_matrix(R)\n except AssertionError:\n assert_raises_regexp(\n ValueError, \"Expected rotation matrix\", pr.check_matrix, R)", "def test_set_deployment_run_lock(self):\n pass", "def run_starter(self, expect_to_fail=False):", "def should_attempt_phase(testcase, phase):\n if (phase == MinimizationPhase.ARGUMENTS and\n environment.is_engine_fuzzer_job()):\n # Should not minimize arguments list for engine based fuzzer jobs.\n return False\n\n current_phase = testcase.get_metadata(\n 'minimization_phase', default=MinimizationPhase.GESTURES)\n return phase >= current_phase", "def test_rotatePermissionDirectoryNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.directory, 0o444)\n # Restore permissions so tests can be cleaned up.\n self.addCleanup(os.chmod, log.directory, 0o755)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def run_test_second():\n os.system(\n \"sed -n '/(Failed)$/p' test_op_log.txt | awk '{print $3}' >& rerun_op.txt\"\n )\n rerun_list = get_op_list('rerun_op.txt')\n if len(rerun_list):\n print(\n \"-------there are \"\n + str(len(rerun_list))\n + \" op(s) need to rerun!!!-------\"\n )\n for failed_op in rerun_list:\n os.system(\"ctest -R \\\"(\" + failed_op + \")\\\" \")\n else:\n print(\"-------all op passed successfully!!!-------\")", "def test_xform_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.xform_file, ani=1)\n\n values = cmds.keyframe('pCube1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def test_foo(self):\n self.ran = True\n 1 / 0", "def interaction_turnstile(self) -> None:\n if self.get_rotation()[1][0] != 0:\n condition = self.can_rotate()[0]\n if condition:\n self.rotate()", "def testStart(self):\n self.assert_(self.visionSystem.downwardSafeDetector)\n #self.assertCurrentMotion(motion.search.ForwardZigZag)", "def test_wrong_mode(self):\n self.assertRaises(ComponentErrorsEx, self.dp.setRewindingMode, 'FOO')", "def do_polar_alignment_test(self, *arg):\n if self.ready is False:\n return\n\n start_time = current_time(flatten=True)\n\n base_dir = '{}/images/drift_align/{}'.format(\n os.getenv('PANDIR'), start_time)\n plot_fn = '{}/{}_center_overlay.jpg'.format(base_dir, start_time)\n\n mount = self.pocs.observatory.mount\n\n print_info(\"Moving to home position\")\n self.pocs.say(\"Moving to home position\")\n mount.slew_to_home()\n\n # Polar Rotation\n pole_fn = polar_rotation(self.pocs, base_dir=base_dir)\n pole_fn = pole_fn.replace('.cr2', '.fits')\n\n # Mount Rotation\n rotate_fn = mount_rotation(self.pocs, base_dir=base_dir)\n rotate_fn = rotate_fn.replace('.cr2', '.fits')\n\n print_info(\"Moving back to home\")\n self.pocs.say(\"Moving back to home\")\n mount.slew_to_home()\n\n print_info(\"Solving celestial pole image\")\n self.pocs.say(\"Solving celestial pole image\")\n try:\n pole_center = polar_alignment_utils.analyze_polar_rotation(pole_fn)\n except error.SolveError:\n print_warning(\"Unable to solve pole image.\")\n print_warning(\"Will proceeed with rotation image but analysis not possible\")\n pole_center = None\n else:\n pole_center = (float(pole_center[0]), float(pole_center[1]))\n\n print_info(\"Starting analysis of rotation image\")\n self.pocs.say(\"Starting analysis of rotation image\")\n try:\n rotate_center = polar_alignment_utils.analyze_ra_rotation(rotate_fn)\n except Exception:\n print_warning(\"Unable to process rotation image\")\n rotate_center = None\n\n if pole_center is not None and rotate_center is not None:\n print_info(\"Plotting centers\")\n self.pocs.say(\"Plotting centers\")\n\n print_info(\"Pole: {} {}\".format(pole_center, pole_fn))\n self.pocs.say(\"Pole : {:0.2f} x {:0.2f}\".format(\n pole_center[0], pole_center[1]))\n\n print_info(\"Rotate: {} {}\".format(rotate_center, rotate_fn))\n self.pocs.say(\"Rotate: {:0.2f} x {:0.2f}\".format(\n rotate_center[0], rotate_center[1]))\n\n d_x = pole_center[0] - rotate_center[0]\n d_y = pole_center[1] - rotate_center[1]\n\n self.pocs.say(\"d_x: {:0.2f}\".format(d_x))\n self.pocs.say(\"d_y: {:0.2f}\".format(d_y))\n\n fig = polar_alignment_utils.plot_center(\n pole_fn, rotate_fn, pole_center, rotate_center)\n\n print_info(\"Plot image: {}\".format(plot_fn))\n fig.tight_layout()\n fig.savefig(plot_fn)\n\n try:\n os.unlink('/var/panoptes/images/latest.jpg')\n except Exception:\n pass\n try:\n os.symlink(plot_fn, '/var/panoptes/images/latest.jpg')\n except Exception:\n print_warning(\"Can't link latest image\")\n\n with open('/var/panoptes/images/drift_align/center.txt'.format(base_dir), 'a') as f:\n f.write('{}.{},{},{},{},{},{}\\n'.format(start_time, pole_center[0], pole_center[\n 1], rotate_center[0], rotate_center[1], d_x, d_y))\n\n print_info(\"Done with polar alignment test\")\n self.pocs.say(\"Done with polar alignment test\")", "def test_state_after_failure(self):\n pass", "def test_overwrite_corrupted_files(overwrite_on_tape_topology, core_config_mock, caches_mock):\n rse1_id, rse2_id, rse3_id, did1, did2 = overwrite_on_tape_topology(did1_corrupted=True, did2_corrupted=True)\n all_rses = [rse1_id, rse2_id, rse3_id]\n\n class _FTSWrapper(FTSWrapper):\n @staticmethod\n def on_receive(job_params):\n for job in (job_params if isinstance(job_params, list) else [job_params]):\n for file in job.get('files', []):\n if (file.get('file_metadata', {}).get('dst_type') == 'TAPE'\n and file.get('file_metadata', {}).get('dst_file', {}).get('file_on_tape') is not None):\n # Fake that dst_file metadata contains file_on_tape == True\n # As we don't really have tape RSEs in our tests, file_on_tape is always false\n file['file_metadata']['dst_file']['file_on_tape'] = True\n return job_params\n\n with patch('rucio.daemons.conveyor.poller.FTS3Transfertool', _FTSWrapper):\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n # Both transfers must be marked as failed because the file size is incorrect\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.FAILED\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.FAILED\n\n # Re-submit the failed requests. They must fail again, because overwrite_corrupted_files is False\n # 2 runs: for multihop, finisher works one hop at a time\n finisher(once=True, partition_wait_time=0)\n finisher(once=True, partition_wait_time=0)\n request = request_core.get_request_by_did(rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.QUEUED\n request = request_core.get_request_by_did(rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.QUEUED\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n # Set overwrite to True before running the poller or finisher\n core_config.set('transfers', 'overwrite_corrupted_files', True)\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.FAILED\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.FAILED\n\n # Re-submit one more time. Now the destination file must be overwritten\n finisher(once=True, partition_wait_time=0)\n finisher(once=True, partition_wait_time=0)\n request = request_core.get_request_by_did(rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.QUEUED\n request = request_core.get_request_by_did(rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.QUEUED\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n request = request_core.get_request_by_did(rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.SUBMITTED\n assert __wait_for_fts_state(request, expected_state='ARCHIVING') == 'ARCHIVING'\n request = request_core.get_request_by_did(rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.SUBMITTED\n assert __wait_for_fts_state(request, expected_state='ARCHIVING') == 'ARCHIVING'", "def test_seqprep_assembly(self):\n self.writeTmpFastq(self.test_fn1, self.test_fn2)\n\n ### Suggested default settings ###\n params = {}\n params['-f'] = self.test_fn1\n params['-r'] = self.test_fn2\n params['-s'] = self.temp_dir_string + 'assembled.gz'\n params['-1'] = self.temp_dir_string + 'unassembled.reads1out.gz'\n params['-2'] = self.temp_dir_string + 'unassembled.reads2out.gz'\n params['-o'] = 15\n params['-m'] = 0.02\n params['-n'] = 0.9\n params['-y'] = 'J'\n\n sp_app = SeqPrep(params = params,\n WorkingDir=self.temp_dir_string)\n\n sp_res = sp_app()\n\n # since output is gzipped by default we need to convert to\n # raw text before testing our results. \n assembly_result = GzipFile(fileobj=sp_res['Assembled']).read()\n self.assertEqual(assembly_result, default_expected_assembly_workaround) \n\n unass_reads1_result = GzipFile(fileobj=\n sp_res['UnassembledReads1']).read()\n self.assertEqual(unass_reads1_result, \n expected_default_unassembled_reads1) \n\n unass_reads2_result = GzipFile(fileobj=\n sp_res['UnassembledReads2']).read()\n self.assertEqual(unass_reads2_result, \n expected_default_unassembled_reads2) \n sp_res.cleanUp() \n \n\n ### Alt settings ###\n params_alt = {}\n params_alt['-f'] = self.test_fn1\n params_alt['-r'] = self.test_fn2\n params_alt['-s'] = self.temp_dir_string + 'assembled.gz'\n params_alt['-1'] = self.temp_dir_string + 'unassembled.reads1out.gz'\n params_alt['-2'] = self.temp_dir_string + 'unassembled.reads2out.gz'\n params_alt['-o'] = 30\n params_alt['-m'] = 0.01\n params_alt['-n'] = 0.95\n params_alt['-y'] = 'J'\n \n sp_app2 = SeqPrep(params = params_alt,\n WorkingDir=self.temp_dir_string)\n sp_res2 = sp_app2()\n\n assembly_result = GzipFile(fileobj=sp_res2['Assembled']).read()\n self.assertEqual(assembly_result, expected_assembly_altered_params) \n\n unassembled_reads1_result2 = GzipFile(fileobj=\n sp_res2['UnassembledReads1']).read()\n self.assertEqual(unassembled_reads1_result2, \n expected_unassembled_reads1_altered_params) \n\n unassembled_reads2_result2 = GzipFile(fileobj=\n sp_res2['UnassembledReads2']).read()\n self.assertEqual(unassembled_reads2_result2, \n expected_unassembled_reads2_altered_params) \n\n sp_res2.cleanUp() \n shutil.rmtree(self.temp_dir_string)", "def test_rotation_angle_warning(self):\n\n def warning_trigger():\n try:\n paramak.CenterColumnStudyReactor(\n inner_bore_radial_thickness=20,\n inboard_tf_leg_radial_thickness=50,\n center_column_shield_radial_thickness_mid=50,\n center_column_shield_radial_thickness_upper=100,\n inboard_firstwall_radial_thickness=20,\n divertor_radial_thickness=100,\n inner_plasma_gap_radial_thickness=80,\n plasma_radial_thickness=200,\n outer_plasma_gap_radial_thickness=90,\n # first number must be between plasma inner/outer radius\n plasma_high_point=(245, 240),\n plasma_gap_vertical_thickness=40,\n center_column_arc_vertical_thickness=520,\n rotation_angle=360)\n\n except BaseException:\n pass\n\n with warnings.catch_warnings(record=True) as w:\n warnings.simplefilter(\"always\")\n warning_trigger()\n assert len(w) == 1\n assert issubclass(w[-1].category, UserWarning)\n assert \"360 degree rotation may result in a Standard_ConstructionError or AttributeError\" in str(\n w[-1].message)", "def test_rotate_without_moving(controller):\n distance = math.pi / 2 * (DISTANCE_BETWEEN_WHEELS / 2)\n revolution = distance / (2 * math.pi * WHEEL_RADIUS)\n ticks = revolution * TICK_PER_REVOLUTION\n pos, angle = controller.odometry(\n round(10 - ticks),\n round(10 + ticks),\n Vector2(0, 0),\n 0,\n )\n\n # Rotate 90 degrees without moving.\n assert pos == Vector2(0, 0)\n assert round(math.pi / 2 / angle, 1) == 1\n\n # Rotate back to 0 degrees without moving.\n pos, angle = controller.odometry(10, 10, Vector2(0, 0), 0)\n assert pos == Vector2(0, 0)\n assert round(-math.pi / 2 / angle, 1) == 1", "def test_compress_fastq_real_with_integrity_fail(\n first_tmp_file, second_tmp_file, spring_tmp_path, real_base_context, mocker\n):\n # GIVEN the path to a existing two existing fastq files and a non existing spring\n runner = CliRunner()\n assert not spring_tmp_path.exists()\n assert first_tmp_file.exists()\n assert second_tmp_file.exists()\n\n dir_path = spring_tmp_path.parent\n assert nr_files(dir_path) == 2\n mocker.patch.object(compare_cmd, \"compare_elements\")\n compare_cmd.compare_elements.return_value = False\n # WHEN running the compress command with an intergrity check\n result = runner.invoke(\n fastq,\n [\n \"--first-read\",\n str(first_tmp_file),\n \"--second-read\",\n str(second_tmp_file),\n \"--spring-path\",\n str(spring_tmp_path),\n \"--check-integrity\",\n ],\n obj=real_base_context,\n )\n # THEN assert the command succedes\n assert result.exit_code == 1\n # THEN assert that the spring file was deleted\n assert not spring_tmp_path.exists()\n # THEN assert that only the original fastq files are left\n assert nr_files(dir_path) == 2", "def test_process_log_with_exec_post_in_configuration(self):\n with tempfile.TemporaryDirectory() as sandbox:\n with mock.patch('sys.stderr', new=io.StringIO()) as fake_stderr:\n with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout:\n stream_handler = logging.StreamHandler(fake_stderr)\n logging.getLogger().addHandler(stream_handler)\n try:\n srcfile = Path(sandbox, 'pokus.log')\n srcfile.touch()\n destfile = Path(sandbox, 'backup', 'pokus.log')\n compressors = process_log(\n datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30),\n {\n 'target': '{{path}}/backup/{{name}}.{{ext}}',\n 'interval': 'hourly',\n 'compress': 'bzip2',\n 'exec_post': '/bin/false'\n },\n 'hourly',\n str(srcfile),\n 10\n )\n finally:\n logging.getLogger().removeHandler(stream_handler)\n self.assertEqual(compressors, [])\n self.assertFalse(srcfile.exists())\n self.assertTrue(destfile.exists())\n self.assertEqual(fake_stdout.getvalue(), 'Checking \"{src}\"... rotating... \"{src}\" -> \"{dest}\" exec_post failed.\\n'.format(src=srcfile, dest=destfile))\n self.assertEqual(fake_stderr.getvalue(), 'exec_post \"/bin/false {dest}\" failed with code 1\\n'.format(dest=destfile))", "def test_run_configuration_dry_run_diff_no_early_exit_shard_err(mocker: MockerFixture):\n succeeding_shard = \"succeed\" # success\n another_succeeding_shard = \"succeed as well\" # success\n failing_shard = \"fail\" # fail\n sys_exit_1_shard = \"sys-exit-1\" # fail\n sys_exit_true_shard = \"sys-exit-true\" # fail\n sys_exit_0_shard = \"sys-exit-0\" # success\n sys_exit_false_shard = \"sys-exit-false\" # success\n\n def integration_run_func(self: ShardableTestIntegration, dry_run: bool) -> None:\n if self.params.shard == failing_shard:\n raise Exception(f\"shard {self.params.shard} failed\")\n if self.params.shard == sys_exit_1_shard:\n sys.exit(1)\n if self.params.shard == sys_exit_false_shard:\n sys.exit(False)\n if self.params.shard == sys_exit_0_shard:\n sys.exit(0)\n if self.params.shard == sys_exit_true_shard:\n sys.exit(True)\n\n integration_run_func_mock = mocker.patch.object(\n ShardableTestIntegration, \"run\", side_effect=integration_run_func, autospec=True\n )\n\n shardable_test_integration = ShardableTestIntegration(\n params=ShardableTestIntegrationParams()\n )\n\n affected_shards = {\n succeeding_shard,\n another_succeeding_shard,\n failing_shard,\n sys_exit_1_shard,\n sys_exit_false_shard,\n sys_exit_0_shard,\n sys_exit_true_shard,\n }\n\n with pytest.raises(SystemExit) as e:\n _integration_dry_run(\n shardable_test_integration,\n DesiredStateDiff(\n current_desired_state={},\n previous_desired_state={},\n diff_found=True,\n affected_shards=affected_shards,\n ),\n )\n\n # the SystemExit exception contains the nr of failed shards as code\n assert e.value.code == 3\n\n # make sure the run method has been called once per shard\n assert integration_run_func_mock.call_count == len(affected_shards)\n called_sharded_params = [\n c[0][0].params for c in integration_run_func_mock.call_args_list\n ]\n for shard in affected_shards:\n sharded_params = shardable_test_integration.params.copy_and_update(\n {\"shard\": shard}\n )\n assert sharded_params in called_sharded_params", "def rotation(self, *args, **kwargs) -> Any:\n pass", "def test_rotateAlreadyExists(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n # Build a new file with the same name as the file which would be created\n # if the log file is to be rotated.\n newFilePath = \"{}.{}\".format(log.path, log.suffix(log.lastDate))\n with open(newFilePath, \"w\") as fp:\n fp.write(\"123\")\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_archive_run(self):\n pass", "def run1():\n #Reseting motors\n ResetRobot.reset_wheel_motors()\n ResetRobot.reset_attachment_motors()\n CalibrateRobot.calibrate_gyro()\n\n #mission M01 and M02 - space travel and solar panel\n M01_M02()\n \n #Mission M05- Extraction \n M05_M14()\n\n #Back to base before Gerhard (Remove comment if necessary)\n return_to_base1()\n\n # Must delete for competition.. This is to set up forklift to repeat run.\n Robot.attachment_left.on_for_rotations(-100, 8) #Raises Forklift ", "def test_unlock_failure(self):\n # Make sure the image file doesn't exist.\n if os.path.exists(IMAGE_FILE):\n os.unlink(IMAGE_FILE)\n # Ask rsync-system-backup to use the encrypted filesystem on the image\n # file anyway, because we know it will fail and that's exactly what\n # we're interested in :-).\n program = RsyncSystemBackup(\n crypto_device=CRYPTO_NAME,\n destination=os.path.join(MOUNT_POINT, 'latest'),\n mount_point=MOUNT_POINT,\n )\n # When `cryptdisks_start' fails it should exit with a nonzero exit\n # code, thereby causing executor to raise an ExternalCommandFailed\n # exception that obscures the FailedToUnlockError exception that we're\n # interested in. The check=False option enables our `last resort error\n # handling' code path to be reached.\n program.destination_context.options['check'] = False\n self.assertRaises(FailedToUnlockError, program.execute)", "def test_valid_input_succeeds(self, async_patch, chan_patch):\n self.assertTrue(send_rotate_to_can(self.USER, self.BIN_NUM))\n async_patch.assert_called_once()\n chan_patch.assert_called_once()", "def test_run_ended(self):", "def test(arm1, fname, test_method):\n positions = U.load_pickle_to_list(fname) \n print(\"loaded {} positions from {}\".format(len(positions), fname))\n if test_method == 0:\n print(\"We're going to run entirely open-loop.\")\n elif test_method == 1:\n print(\"NOTE! You'll have to provide corrections after each movement.\")\n revised_positions = []\n arm1.close_gripper()\n\n for i,(pos,rot) in enumerate(positions):\n U.move(arm1, pos, rot)\n arm1.close_gripper()\n real_pos,real_rot = U.get_pos_rot_from_arm(arm1, nparrays=True)\n print(\"\\n({}) Target position: {},{}\".format(i,pos,rot))\n print(\" Actual position: {},{}\".format(real_pos,real_rot))\n\n if test_method == 1:\n string = \"Now correct the position as needed, then press any key,\"+ \\\n \" other than ESC (which will terminate the entire program).\"\n U.call_wait_key(cv2.imshow(string, d.left_image), exit=True)\n revised_pos,revised_rot = U.get_pos_rot_from_arm(arm1, nparrays=True)\n revised_positions.append( (revised_pos,revised_rot) )\n print(\" Revised position: {},{}\".format(revised_pos,revised_rot))\n else:\n time.sleep(2)\n\n if test_method == 1:\n new_fname = fname[:-2] + '_revised.p'\n print(\"Storing {} positions in file {}\".format(len(revised_positions), new_fname))\n U.store_pickle(new_fname, revised_positions)", "def test_case_01(self):\n if True:\n self.fail()", "def test_encrypt_creates_and_cleans_up_backup(\n self,\n mock_os,\n mock_shutil,\n mock_subprocess,\n ):\n mock_subprocess.run.return_value.returncode = 0\n\n self.mikla.encrypt('Chunky Hunky', 'plain', 'enc')\n\n mock_shutil.move.assert_called_once_with('enc', 'enc.bak')\n mock_os.unlink.assert_called_once_with('enc.bak')", "def test_worker_precheck_exception(self):\n assert airflow.settings.validate_session()", "def determine_rotation(arm, d, tip_data, rot_data):\n n_t = np.zeros(3)\n for this_n_t in tip_data['pos_ntip_wrt_r']:\n n_t += this_n_t\n n_t /= len(tip_data['pos_ntip_wrt_r'])\n print(\"Our n_t to use in this stage: {}\".format(n_t))\n\n K = len(rot_data['pos_ntip_wrt_s'])\n errors_zyz = []\n errors_zyx = []\n\n for k in range(K):\n lhs = rot_data['pos_ntip_wrt_s'][k]\n t_st = rot_data['pos_tool_wrt_s_code'][k]\n ypr = rot_data['rot_tool_wrt_s_code'][k]\n yaw, pitch, roll = ypr[0], ypr[1], ypr[2]\n\n # R_zyz\n R_z1 = U.rotation_matrix_3x3_axis(angle=roll, axis='z')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z2 = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyz = R_z2.dot(R_y).dot(R_z1)\n\n # R_zyx\n R_x = U.rotation_matrix_3x3_axis(angle=roll, axis='x')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyx = R_z.dot(R_y).dot(R_x)\n\n # Evaluate!\n rhs_zyz = t_st + R_zyz.dot( n_t )\n rhs_zyx = t_st + R_zyx.dot( n_t )\n err_zyz = np.linalg.norm(lhs - rhs_zyz)\n err_zyx = np.linalg.norm(lhs - rhs_zyx)\n errors_zyz.append( err_zyz )\n errors_zyx.append( err_zyx )\n print(\"\\nerr_zyz: {:.3f} for {}-th sample\".format(err_zyz, k))\n print(\"err_zyx: {:.3f} for {}-th sample\".format(err_zyx, k))\n print(\"R_zyz:\\n{}\".format(R_zyz))\n print(\"R_zyx:\\n{}\".format(R_zyx))\n\n print(\"\\nDone with evaluation!\")\n print(\"zyz has avg error {:.5f}\".format(np.mean(errors_zyz)))\n print(\"zyx has avg error {:.5f}\".format(np.mean(errors_zyx)))", "def test_overwrite_on_tape(overwrite_on_tape_topology, caches_mock):\n rse1_id, rse2_id, rse3_id, did1, did2 = overwrite_on_tape_topology(did1_corrupted=False, did2_corrupted=True)\n all_rses = [rse1_id, rse2_id, rse3_id]\n\n submitter(once=True, rses=[{'id': rse_id} for rse_id in all_rses], group_bulk=10, partition_wait_time=0, transfertype='single', filter_transfertool=None)\n\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did1)\n assert request['state'] == RequestState.FAILED\n assert 'Destination file exists and overwrite is not enabled' in request['err_msg']\n request = __wait_for_state_transition(dst_rse_id=rse3_id, **did2)\n assert request['state'] == RequestState.FAILED\n assert 'Destination file exists and overwrite is not enabled' in request['err_msg']", "def test_rotation_angle(self):\n\n self.test_shape.azimuth_placement_angle = [45, 135, 225, 315]\n test_volume = self.test_shape.volume()\n self.test_shape.rotation_angle = 180\n assert self.test_shape.volume() == pytest.approx(test_volume * 0.5)", "def test_renamer_dryrun(monkeypatch, param_fs, src, dest):\n monkeypatch.setattr(\"builtins.input\", lambda: \"Y\")\n os.chdir(param_fs)\n table = renamer.generate_rentable(src, dest)\n queue = renamer.print_rentable(table)\n renamer.rename_queue(queue, dryrun=True)\n for s, d in zip(src, dest):\n f = param_fs / s\n assert f.read_text() == s", "def testModePreservation(self):\n f = open(self.path, \"w\").close()\n os.chmod(self.path, 0707)\n mode = os.stat(self.path)[stat.ST_MODE]\n log = logfile.LogFile(self.name, self.dir)\n log.write(\"abc\")\n log.rotate()\n self.assertEquals(mode, os.stat(self.path)[stat.ST_MODE])", "async def test_setup_failed_update_reauth(\n hass: HomeAssistant, ufp: MockUFPFixture\n) -> None:\n\n await hass.config_entries.async_setup(ufp.entry.entry_id)\n await hass.async_block_till_done()\n assert ufp.entry.state == ConfigEntryState.LOADED\n\n # reauth should not be triggered until there are 10 auth failures in a row\n # to verify it is not transient\n ufp.api.update = AsyncMock(side_effect=NotAuthorized)\n for _ in range(10):\n await time_changed(hass, DEFAULT_SCAN_INTERVAL)\n assert len(hass.config_entries.flow._progress) == 0\n\n assert ufp.api.update.call_count == 10\n assert ufp.entry.state == ConfigEntryState.LOADED\n\n await time_changed(hass, DEFAULT_SCAN_INTERVAL)\n assert ufp.api.update.call_count == 11\n assert len(hass.config_entries.flow._progress) == 1", "def test_revert_3(self):\n self.image_create(self.rurl)\n some_files = [\"dev/xxx\", \"dev/yyy\", \"dev/zzz\",\n \"dev/dir1/aaaa\", \"dev/dir1/bbbb\", \"dev/dir2/cccc\",\n \"dev/cfg/ffff\", \"dev/cfg/gggg\",\n \"dev/cfg/dir3/iiii\", \"dev/cfg/dir3/jjjj\"]\n\n some_dirs = [\"dev/dir1/\", \"dev/dir1/\", \"dev/dir2/\", \"dev/cfg/dir3/\"]\n self.pkg(\"install dev dev2\")\n self.pkg(\"verify\")\n self.files_are_all_missing(some_dirs + some_files)\n self.create_some_files(some_dirs + some_files)\n self.files_are_all_there(some_dirs + some_files)\n self.pkg(\"verify -v\")\n self.damage_files([\"dev/cfg/bar2\"])\n self.pkg(\"revert -vvv --tagged init-dev\")\n self.pkg(\"verify -v\")\n self.files_are_all_missing(some_dirs + some_files)", "def test_serialize_circuit_rotations_tape(self, monkeypatch, tmpdir, test_batch_result):\n qml.enable_tape()\n dev = QeQiskitDevice(wires=1, shots=1000, backend=\"qasm_simulator\", analytic=False)\n\n circuit_history = []\n\n with qml.tape.QuantumTape() as tape1:\n qml.Hadamard(wires=[0])\n qml.expval(qml.Hadamard(0))\n\n with monkeypatch.context() as m:\n m.setattr(pennylane_orquestra.cli_actions, \"user_data_dir\", lambda *args: tmpdir)\n m.setattr(\n pennylane_orquestra.orquestra_device,\n \"gen_expval_workflow\",\n lambda component, backend_specs, circuits, operators, **kwargs: circuit_history.extend(\n circuits\n ),\n )\n\n # Disable submitting to the Orquestra platform by mocking Popen\n m.setattr(subprocess, \"Popen\", lambda *args, **kwargs: MockPopen())\n m.setattr(\n pennylane_orquestra.orquestra_device,\n \"loop_until_finished\",\n lambda *args, **kwargs: test_batch_result, # The exact results are not considered in the test\n )\n\n dev.execute(tape1)\n\n expected = 'OPENQASM 2.0;\\ninclude \"qelib1.inc\";\\nqreg q[1];\\ncreg c[1];\\nh q[0];\\nry(-0.7853981633974483) q[0];\\n'\n assert circuit_history[0] == expected\n qml.disable_tape()", "def test_same_dir(self):\n # Run for 5 algo-calls\n testargs = [\"python\", \"scripts/smac\", \"--scenario_file\",\n self.scenario_one, \"--verbose\", \"DEBUG\"]\n with mock.patch.object(sys, 'argv', testargs):\n self.smaccli.main_cli()\n # Increase limit and run for 10 (so 5 more) by using restore_state\n testargs = [\"python\", \"scripts/smac\", \"--restore_state\",\n self.output_one, \"--scenario_file\",\n self.scenario_one, \"--verbose\", \"DEBUG\"]\n with mock.patch.object(sys, 'argv', testargs):\n self.smaccli.main_cli()", "def test_abort_in_resourcing_mid(\n set_restart_after_abort: None,\n setup_context_monitoring_for_abort_test: None,\n composition: conf_types.Composition,\n):", "def test_pre_post_hooks(self):\n os.makedirs('/tmp/localhost/pacha_pre')\n os.makedirs('/tmp/localhost/pacha_post')\n pre_script = open('/tmp/localhost/pacha_pre/foo.sh', 'w')\n pre_script.write('''touch /tmp/localhost/pre_got_executed.txt''')\n pre_script.close()\n post_script = open('/tmp/localhost/pacha_post/bar.sh', 'w')\n post_script.write('''touch /tmp/localhost/post_got_executed.txt''')\n post_script.close()\n run = rebuild.Rebuild(hostname='localhost') \n run.pre_hooks()\n run.post_hooks()\n self.assertTrue(os.path.isfile('/tmp/localhost/post_got_executed.txt'))\n self.assertTrue(os.path.isfile('/tmp/localhost/pre_got_executed.txt'))", "def test_pose_shifter(self):\n self.dyn_client.update_configuration({\"linear_offset_x\":0.1, \"linear_offset_y\":0.0, \"linear_offset_z\":0.05})\n pose_in = geometry_msgs.msg.PoseStamped()\n expected = geometry_msgs.msg.PoseStamped()\n pose_in.header.frame_id = \"base_link\"\n expected.header.frame_id = \"base_link\"\n\n pose_in.pose.position.x = 1.0\n pose_in.pose.position.y = 2.0\n pose_in.pose.position.z = 3.0\n pose_in.pose.orientation.x = 0.0\n pose_in.pose.orientation.y = 0.0\n pose_in.pose.orientation.z = 0.0\n pose_in.pose.orientation.w = 1.0\n\n # shift of 10 cm in X and 5 cm in Z\n expected.pose.position.x = 1.1\n expected.pose.position.y = 2.0\n expected.pose.position.z = 3.05\n expected.pose.orientation.x = 0.0\n expected.pose.orientation.y = 0.0\n expected.pose.orientation.z = 0.0\n expected.pose.orientation.w = 1.0\n\n self.pose_in_pub.publish(pose_in)\n\n while not self.wait_for_result:\n self.event_out.publish('e_start')\n\n self.assertEqual(self.result.header.frame_id, expected.header.frame_id)\n self.assertEqual(self.result.pose, expected.pose)", "def validate_orientation():\r\n ui.click_and_WAIT_for_item_with_retries('/tray/', 'Settings', True)\r\n time.sleep(WAIT)\r\n ui.doDefault_on_obj(name='Settings', role='button')\r\n time.sleep(WAIT)\r\n ui.doDefault_on_obj(name='Displays', role='link')\r\n time.sleep(WAIT)", "def fix_rotation(self):\n cube_helper = Cube()\n cube_helper.scramble = self.scramble.split()\n cube_helper.solve = self.solve.split()\n\n rotations = []\n for move in cube_helper.scramble:\n cube_helper.exe_move(move)\n for move in cube_helper.solve:\n if move not in cube_helper.rotation:\n if not self.currently_parsing_smart_cube:\n break\n cube_helper.exe_move(move)\n\n str_perm = cube_helper.perm_to_string(cube_helper.current_perm).split()\n up = str_perm[4]\n front = str_perm[22]\n flag = False\n for i in range (4):\n if (up == \"5\"):\n flag = True\n break\n rotations.append(\"x\")\n cube_helper.exe_move(\"x\")\n str_perm = cube_helper.perm_to_string(cube_helper.current_perm).split()\n up = str_perm[4]\n front = str_perm[22]\n\n if (front != \"23\" and not flag):\n rotations.append(\"z\")\n cube_helper.exe_move(\"z\")\n str_perm = cube_helper.perm_to_string(cube_helper.current_perm).split()\n up = str_perm[4]\n front = str_perm[22]\n\n while (up != \"5\" or front != \"23\"):\n rotations.append(\"y\")\n cube_helper.exe_move(\"y\")\n str_perm = cube_helper.perm_to_string(cube_helper.current_perm).split()\n front = str_perm[22]\n\n final_rot = []\n while len(rotations) >= 3:\n if rotations[0] == rotations[1] == rotations[2]:\n r_fix = \"{}'\".format(rotations[0]).replace(\"''\",\"\")\n final_rot.append(r_fix)\n rotations.pop(0)\n rotations.pop(0)\n rotations.pop(0)\n else:\n final_rot.append(rotations[0])\n rotations.pop(0)\n if final_rot:\n return final_rot\n return rotations", "def test_default_parameters(self):\n\n assert self.test_shape.rotation_angle == 360", "def rotate(database):\n now = time.time()\n for level, delay in sorted(LEVELS.items(), key=lambda x: x[1]):\n cmd = ['/bin/rsnapshot', '-c', CONFIG, level]\n last_run = database[level]\n if (now - last_run) > delay:\n print(\"running \" + ' '.join(cmd))\n subprocess.check_call(cmd)\n database[level] = now\n else:\n print(\"skipping \" + level)", "def test_rollback():", "def experiment3():\n raise FAKE_ERROR", "def test_reset_confirmation_failure(self):\n self._create_program_and_course_enrollment(self.program_uuid, self.user)\n\n with pytest.raises(CommandError):\n with self._replace_stdin('no'):\n call_command(self.command, self.program_uuid)\n\n self._validate_enrollments_count(1)", "def test_rotation(self, tol):\n theta = 0.98\n S = symplectic.rotation(theta)\n expected = np.block([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]])\n np.allclose(S, expected, atol=tol, rtol=0)", "def test_verification_failed(self):\n pass", "def test_restart_with_permission(self):\n self.create_user_with_role(\n self.user.name, self.user.email, self.user.password, Role.tester)\n self.create_forktest(\"own-fork-commit\", TestPlatform.linux, regression_tests=[2])\n self.create_completed_regression_t_entries(3, [2])\n with self.app.test_client() as c:\n response = c.post(\n '/account/login', data=self.create_login_form_data(self.user.email, self.user.password))\n response = c.get('/test/restart_test/3')\n test = Test.query.filter(Test.id == 3).first()\n self.assertEqual(test.finished, False)", "def rotate(self, *args, **kwargs): # real signature unknown\n pass", "def test_unix_client_account_rotate_reconciliation(users_and_roles, unix_machine_environment_config,\n agent_enrolled_unix_system_with_users, proxy_start_stop):\n\n \"\"\"\n Testrail Link:\n https://testrail.centrify.com/index.php?/cases/view/1293459\n https://testrail.centrify.com/index.php?/cases/view/1293460\n https://testrail.centrify.com/index.php?/cases/view/1293461\n \"\"\"\n\n # verfiy the test is run with single thread.\n assert 'PYTEST_XDIST_WORKER_COUNT' not in os.environ, \\\n f'This test cannot be run with multiple threads due to starting and stopping connectors'\n\n enrolledsystems = agent_enrolled_unix_system_with_users\n accounts = enrolledsystems[0][\"Accounts\"]\n resourceId = enrolledsystems[0][\"ResourceId\"]\n proxyid = enrolledsystems[0][\"ProxyId\"]\n session = enrolledsystems[0][\"Session\"]\n proxycontrol = proxy_start_stop\n\n conf = unix_machine_environment_config\n success_message = conf['success_message']\n\n right_data = (\"Privileged Access Service Power User\", \"role_Privileged Access Service Power User\")\n\n requester_session = users_and_roles.get_session_for_user(right_data[0])\n\n accountuser = accounts[1]\n\n logger.info(\"stop the agent\")\n ssh_manager.ssh_stop_agent(session)\n proxycontrol(proxyid, True)\n\n # count managed password change events\n filter = [['AccountName', accountuser['Name']], ['ComputerID', resourceId]]\n\n # set a different password for the user\n ssh_manager.change_sshuser_password(session, accountuser['Name'], \"differentpassword3\", success_message)\n\n logger.info(\"Rotate account and verify reconciliation, Connector is available\")\n result, success = ResourceManager.rotate_password(requester_session, accountuser[\"Id\"])\n assert result[\"Result\"], \"Reconciliation failed, Rotate password failed: \" + accountuser['Name']\n\n # verify operationMode\n rows = RedrockController.wait_for_event_by_type_filter(requester_session,\n \"Cloud.Server.LocalAccount.PasswordRotate\",\n filter=filter)\n eventcount = len(rows)\n logger.info(f\"# of Cloud.Server.LocalAccount.PasswordRotate events : {eventcount}\")\n assert rows[0][\"OperationMode\"] == \"Connector\", \"Failed to verify OperationMode is Connector\"\n\n # stop Connector, Should fail\n logger.info(\"Stopping the connector\")\n proxycontrol(proxyid, False)\n # set a different password for the user\n ssh_manager.change_sshuser_password(session, accountuser['Name'], \"differentpassword4\", success_message)\n\n logger.info(f\"# of Cloud.Server.LocalAccount.PasswordRotate events : {eventcount}\")\n result, success = ResourceManager.rotate_password(requester_session, accountuser[\"Id\"])\n assert result[\n \"Result\"] == False, f\"Reconciliation success, Rotate password is successfull: {accountuser['Name']}\"\n\n # Start agent\n logger.info(\"Starting the agent\")\n ssh_manager.ssh_start_agent(session, True)\n # set a different password for the user\n ssh_manager.change_sshuser_password(session, accountuser['Name'], \"differentpassword5\", success_message)\n\n logger.info(\"Rotate account and verify reconciliation, Agent is available\")\n result, success = ResourceManager.rotate_password(requester_session, accountuser[\"Id\"])\n assert result[\"Result\"], \"Reconciliation failed, Rotate password failed: \" + accountuser['Name']\n\n # verify operationMode\n rows = RedrockController.wait_for_event_by_type_filter(requester_session,\n \"Cloud.Server.LocalAccount.PasswordRotate\",\n filter=filter, count=eventcount + 1)\n eventcount = len(rows)\n logger.info(f\"# of Cloud.Server.LocalAccount.PasswordRotate events : {eventcount}\")\n assert rows[0][\"OperationMode\"] == \"Client\", \"Failed to verify OperationMode is Client\"\n\n # verify account again, both connector and agent are running\n logger.info(\"Starting connector\")\n proxycontrol(proxyid, True)\n # set a different password\n ssh_manager.change_sshuser_password(session, accountuser['Name'], \"differentpassword3\", success_message)\n\n logger.info(\"Rotate account and verify reconciliation, client and connector are available\")\n result, success = ResourceManager.rotate_password(requester_session, accountuser[\"Id\"])\n assert result[\"Result\"], \"Reconciliation failed, Rotate password failed: \" + accountuser['Name']\n\n # verify operationMode\n rows = RedrockController.wait_for_event_by_type_filter(requester_session,\n \"Cloud.Server.LocalAccount.PasswordRotate\",\n filter=filter, count=eventcount + 1)\n eventcount = len(rows)\n logger.info(f\"# of Cloud.Server.LocalAccount.PasswordRotate events {eventcount}\")\n assert rows[0][\"OperationMode\"] == \"Client\", \"Failed to verify OperationMode is Client\"", "def correct_rotation(k_rotations):\n\n for key, value in Chunk.global_piece_rotations.items():\n Chunk.global_piece_rotations[key] = (k_rotations + value) % 4\n # Should I correct it for the side rotations also?", "def test_execution(self):\n self.assertTrue(True)", "def test_attempt_to_overwrite(fake_meas, tmpdir):\n fake_meas.name = 'test'\n fake_meas.id = '001'\n fake_meas.root_task.default_path = str(tmpdir)\n\n with open(str(tmpdir.join('test_001.meas.ini')), 'wb'):\n pass\n\n fake_meas.dependencies.collect_runtimes()\n res, err = fake_meas.run_checks()\n assert res\n assert 'exopy.internal_checks' in err\n assert 'duplicate' in err['exopy.internal_checks']", "def test_noPermission(self):\n log = logfile.LogFile(self.name, self.dir)\n self.addCleanup(log.close)\n log.write(\"abc\")\n\n # change permissions so rotation would fail\n os.chmod(self.dir, 0o555)\n\n # if this succeeds, chmod doesn't restrict us, so we can't\n # do the test\n try:\n f = open(os.path.join(self.dir, \"xxx\"), \"w\")\n except OSError:\n pass\n else:\n f.close()\n return\n\n log.rotate() # this should not fail\n\n log.write(\"def\")\n log.flush()\n\n f = log._file\n self.assertEqual(f.tell(), 6)\n f.seek(0, 0)\n self.assertEqual(f.read(), b\"abcdef\")", "def test_noPermission(self):\n log = logfile.LogFile(self.name, self.dir)\n log.write(\"abc\")\n\n # change permissions so rotation would fail\n os.chmod(self.dir, 0555)\n\n # if this succeeds, chmod doesn't restrict us, so we can't\n # do the test\n try:\n f = open(os.path.join(self.dir,\"xxx\"), \"w\")\n except (OSError, IOError):\n pass\n else:\n f.close()\n return\n\n log.rotate() # this should not fail\n\n log.write(\"def\")\n log.flush()\n\n f = log._file\n self.assertEquals(f.tell(), 6)\n f.seek(0, 0)\n self.assertEquals(f.read(), \"abcdef\")\n log.close()", "def test_restart_redeeming(\n self,\n get_config: GetConfig,\n now: datetime,\n voucher: bytes,\n before_restart: int,\n after_restart: int,\n public_key: PublicKey,\n ) -> None:\n # Divide redemption into some groups that will succeed before a\n # restart and some that must succeed after a restart.\n num_redemption_groups = before_restart + after_restart\n # Give it enough tokens so each group can have one.\n num_tokens = num_redemption_groups\n\n store = self.useFixture(TemporaryVoucherStore(lambda: now, get_config)).store\n\n def first_try() -> None:\n # It will let `before_restart` attempts succeed before hanging.\n redeemers: list[IRedeemer] = []\n redeemers.extend([DummyRedeemer(public_key)] * before_restart)\n redeemers.extend([NonRedeemer()] * after_restart)\n\n controller = PaymentController(\n Clock(),\n store,\n IndexedRedeemer(redeemers),\n default_token_count=num_tokens,\n num_redemption_groups=num_redemption_groups,\n allowed_public_keys={public_key},\n )\n self.assertThat(\n Deferred.fromCoroutine(controller.redeem(voucher)),\n has_no_result(),\n )\n\n def second_try() -> PaymentController:\n redeemers: list[IRedeemer] = []\n # It will succeed only for the higher counter values which did\n # not succeed or did not get started on the first try.\n redeemers.extend([NonRedeemer()] * before_restart)\n redeemers.extend([DummyRedeemer(public_key)] * after_restart)\n\n # The controller will find the voucher in the voucher store and\n # restart redemption on its own.\n return PaymentController(\n Clock(),\n store,\n IndexedRedeemer(redeemers),\n # The default token count for this new controller doesn't\n # matter. The redemption attempt already started with some\n # token count. That token count must be respected on\n # resumption.\n default_token_count=0,\n # The number of redemption groups must not change for\n # redemption of a particular voucher.\n num_redemption_groups=num_redemption_groups,\n allowed_public_keys={public_key},\n )\n\n first_try()\n controller = second_try()\n\n persisted_voucher = controller.get_voucher(voucher)\n self.assertThat(\n persisted_voucher.state,\n Equals(\n model_Redeemed(\n finished=now,\n token_count=num_tokens,\n ),\n ),\n )", "def test_archive_run(self, mock_move):\n run_dir = 'data/nanopore_data/run4/done_demuxing/20200104_1412_MN19414_AAU644_68125dc2'\n np_run = Nanopore(run_dir)\n np_run.archive_dir = '/some/dir'\n np_run.archive_run()\n mock_move.assert_called_once()", "def test_compress_fastq_dry_run_integrity(first_read, second_read):\n # GIVEN the path to a existing bam file and a cli runner\n runner = CliRunner()\n assert first_read.exists()\n assert second_read.exists()\n # WHEN running the compress command with dry_run\n result = runner.invoke(\n fastq,\n [\n \"--first-read\",\n str(first_read),\n \"--second-read\",\n str(second_read),\n \"--dry-run\",\n \"--check-integrity\",\n ],\n obj={},\n )\n # THEN assert the command was succesful even without a valid api\n assert result.exit_code == 0", "def rotate(self):\n pass" ]
[ "0.60960656", "0.6015227", "0.6008511", "0.5992133", "0.55971855", "0.553978", "0.5513998", "0.55035573", "0.54847586", "0.5416907", "0.5409339", "0.5401802", "0.5393093", "0.5367785", "0.53527", "0.53498495", "0.5327782", "0.53196454", "0.5276392", "0.5270697", "0.52541757", "0.5241361", "0.52249384", "0.5192725", "0.5190621", "0.51894", "0.51886064", "0.5186892", "0.51803666", "0.51765317", "0.51721364", "0.5159973", "0.51571804", "0.51548135", "0.51519597", "0.51310027", "0.51205426", "0.51174456", "0.51170236", "0.50989085", "0.50955534", "0.50789213", "0.50742185", "0.5064677", "0.5060282", "0.5057544", "0.5053621", "0.50494635", "0.5049433", "0.50294656", "0.5023008", "0.50228405", "0.5021761", "0.50212586", "0.5014173", "0.50136083", "0.50082815", "0.50003546", "0.5000082", "0.49962708", "0.4995634", "0.49918407", "0.49765942", "0.49741384", "0.49707013", "0.49697077", "0.49683747", "0.49582183", "0.49548024", "0.4939774", "0.49383584", "0.4936882", "0.49283066", "0.49269062", "0.49254137", "0.4912927", "0.49114436", "0.49077523", "0.490513", "0.49014944", "0.48947033", "0.4882727", "0.48820165", "0.4878449", "0.4865565", "0.4858931", "0.48566368", "0.48536417", "0.48534274", "0.48504218", "0.48500395", "0.4847368", "0.48469704", "0.48433432", "0.48429826", "0.48359114", "0.482493", "0.48174974", "0.4816152", "0.48104754" ]
0.5355824
14
Tests of try rotation with target exists
def test_process_log_with_target_exists(self): with tempfile.TemporaryDirectory() as sandbox: with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout: srcfile = Path(sandbox, 'pokus.log') srcfile.touch() destfile = Path(sandbox, 'backup', 'pokus.log') destfile.mkdir(parents=True) compressors = process_log( datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30), { 'target': '{{path}}/backup/{{name}}.{{ext}}', 'interval': 'hourly', 'compress': 'gzip -9', }, 'hourly', str(srcfile), 10 ) self.assertEqual(compressors, []) self.assertTrue(srcfile.exists()) self.assertTrue(destfile.exists()) self.assertEqual(fake_stdout.getvalue(), 'Checking "{src}"... rotating... "{src}" -> "{dest}" target already exists!\n'.format(src=srcfile, dest=destfile))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_skel_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def check_rotation_fault(self, current_pos, target_pos):\n \n fault_pos = 340.\n \n def cw_dist(A, B):\n return (B-A)%360.\n def ccw_dist(A,B):\n return (A-B)%360.\n def fast_dist(A,B):\n return min(ccw_dist(A,B), cw_dist(A,B))\n def fast_dir(A,B):\n if ccw_dist(A,B) > cw_dist(A,B): return +1\n else: return -1\n \n def dist(A,B, direction):\n if direction > 0: return cw_dist(A,B)\n if direction < 0: return ccw_dist(A,B)\n \n print(\"A->B Fast dir {}\".format(fast_dir(current_pos,target_pos)))\n \n print(\"A->F fast\", fast_dist(current_pos, fault_pos), fast_dir(current_pos, fault_pos))\n print(\"F->B fast\", fast_dist(fault_pos,target_pos), fast_dir(fault_pos, current_pos))\n d = fast_dir(current_pos,target_pos)\n print(\"A->F\", dist(current_pos, fault_pos, d), dist(current_pos, fault_pos, -d))\n print(\"F->B\", dist(fault_pos, target_pos, d) , dist(fault_pos, target_pos, -d))\n \n if dist(current_pos, fault_pos, d)+ dist(fault_pos, target_pos,d) >= 180.:\n return [target_pos]\n else:\n middle_target = current_pos + (360 - fast_dist(current_pos, target_pos))/2\n middle_target %=360\n print(\"A->M->B\", fast_dist(current_pos, middle_target), fast_dist(middle_target, target_pos))\n return [middle_target, target_pos]", "def verify_legal_rotation(self, direction):\n test_figure = None\n if direction == \"CW\":\n test_figure = self.get_block_positions(self.active_piece.get_cw_rotation())\n elif direction == \"CCW\":\n test_figure = self.get_block_positions(self.active_piece.get_ccw_rotation())\n\n for b_x, b_y in test_figure:\n if b_x < 0 or b_x >= self.WIDTH:\n return False\n\n if b_y < 0 or b_y >= self.HEIGHT:\n return False\n\n if self.board[b_y][b_x] != 0:\n return False\n return True", "def test_rotated(self):\n self._calibration_test(\"rotated\")", "def can_rotate(self) -> (bool, list, list):\n arms, axis = self.get_arms()\n rotation = self.get_rotation()\n if rotation[1][0] == 0:\n return False\n coord_axis = np.array([[axis.x_obj], [axis.y_obj]])\n coord_arms = [np.array([[arm.x_obj], [arm.y_obj]])\n for arm in arms]\n coord_new_arms = []\n # Collecting arm coordinates in the situation there turnstile rotates\n for i in range(len(arms)):\n coord_arm = coord_arms[i]\n coord_new_arms.append(\n np.dot(rotation, coord_arm - coord_axis) + coord_axis)\n can_rotate = True\n for i in range(len(arms)):\n coord_arm = coord_arms[i]\n coord_new_arm = coord_new_arms[i]\n # Object turnstile should push\n coord_front = coord_arm + coord_new_arm - coord_axis\n coord_character = np.array(\n [[self.moving_character.x_obj], [self.moving_character.y_obj]])\n obj_front = self.grid.obj_list[\n coord_front[0][0], coord_front[1][0]]\n if not (isinstance(obj_front, ob.Void) or (coord_front == coord_character).all()):\n can_rotate = False\n # Object being at the destination of the arm\n obj_target = self.grid.obj_list[\n coord_new_arm[0][0], coord_new_arm[1][0]]\n if not isinstance(obj_target, (ob.Void, ob.TurnstileBloc)):\n can_rotate = False\n return can_rotate, coord_arms, coord_new_arms", "def test_skel_rotation(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1, aef=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertAlmostEqual(0.0, values[-1])", "def __checkDestination(self):\n return os.path.exists(self.__targetPath)", "def test_rotateAlreadyExists(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n # Build a new file with the same name as the file which would be created\n # if the log file is to be rotated.\n newFilePath = \"{}.{}\".format(log.path, log.suffix(log.lastDate))\n with open(newFilePath, \"w\") as fp:\n fp.write(\"123\")\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def match(cube):\n \n #M1'\n M1 = (cube[1,1,0] & cube[1,1,1] & \n (not cube[0,0,2]) & (not cube[1,0,2]) & (not cube[2,0,2]) &\n (not cube[0,1,2]) & (not cube[1,1,2]) & (not cube[2,1,2]) &\n (not cube[0,2,2]) & (not cube[1,2,2]) & (not cube[2,2,2]));\n if M1:\n return True;\n \n # gerate rotations around z/vertical axis\n cuberots = [rotate(cube, axis = 2, steps = rot) for rot in range(4)];\n #print('Cube rotations:');\n #[printCube(c) for c in cuberots] \n \n # M2' and all rotations\n for curo in cuberots:\n M2 = (curo[1,1,0] & curo[1,1,1] & curo[1,2,1] &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]));\n if M2:\n return True;\n \n # M3' and all rotations\n for curo in cuberots:\n M3 = (curo[1,1,0] & curo[1,1,1] & curo[1,2,1] & curo[2,1,1] &\n (not curo[0,0,2]) & (not curo[1,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]));\n if M3:\n return True;\n \n # M4' and all rotations\n for curo in cuberots:\n M4 = (curo[1,1,0] & curo[1,1,1] & curo[2,2,1] & curo[2,2,2] &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]));\n if M4:\n return True;\n \n # M5' and all rotations\n for curo in cuberots:\n M5 = (curo[1,2,0] & curo[1,1,1] & \n (not curo[0,0,0]) & (not curo[1,0,0]) & (not curo[2,0,0]) &\n (not curo[1,1,0]) &\n (not curo[0,0,1]) & (not curo[1,0,1]) & (not curo[2,0,1]) &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]) & (not curo[2,2,2]));\n if M5:\n return True;\n \n # M6' and all rotations\n for curo in cuberots:\n M6 = (curo[2,1,0] & curo[1,2,0] & curo[1,1,1] &\n (not curo[0,0,0]) & (not curo[1,0,0]) &\n (not curo[0,1,0]) & (not curo[1,1,0]) &\n (not curo[0,0,1]) & (not curo[1,0,1]) &\n (not curo[0,1,1]) &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]) & (not curo[2,2,2]));\n if M6:\n return True;\n \n # M7' and all rotations\n for curo in cuberots:\n M7 = (curo[2,2,0] & curo[1,1,1] &\n (not curo[0,0,0]) & (not curo[1,0,0]) & (not curo[2,0,0]) &\n (not curo[0,1,0]) & (not curo[1,1,0]) & (not curo[2,1,0]) &\n (not curo[0,2,0]) & (not curo[1,2,0]) &\n (not curo[0,0,1]) & (not curo[1,0,1]) & (not curo[2,0,1]) &\n (not curo[0,1,1]) & (not curo[2,1,1]) &\n (not curo[0,2,1]) & (not curo[1,2,1]) & (not curo[2,2,1]) &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]) & (not curo[2,2,2]));\n if M7:\n return True;\n \n return False;", "def test_xform_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.xform_file, ani=1)\n\n values = cmds.keyframe('pCube1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def verifyShiftFile(self):\n if self['refimage'] and fu.findFile(self['refimage']):\n return True\n else: return False", "def can_rotate(self, current_piece):\n rotated_shape = current_piece.template[current_piece.next_rotation()] \n for x in range(pieces.Piece.TEMPLATE_WIDTH):\n for y in range(pieces.Piece.TEMPLATE_HEIGHT):\n board_x = current_piece.get_pos_x() + x\n board_y = current_piece.get_pos_y() + y \n if board_x < 0 and rotated_shape[y][x]:\n return False\n if board_y < 0 and rotated_shape[y][x]:\n return False\n if board_x >= game_config.BOARD_BOX_COUNT_X:\n return False\n if board_y >= game_config.BOARD_BOX_COUNT_Y:\n return False\n if self.board.get_cell(board_x, board_y) and rotated_shape[y][x]:\n return False\n return True", "def test_need_to_rotate_log(self):\n self.assertTrue(need_to_rotate_log(0, 20, 'daily', 15, 'daily'), 'rotate log by time')\n self.assertFalse(need_to_rotate_log(10, 20, 'daily', 15, 'hourly'), 'do not rotate log by time')\n self.assertTrue(need_to_rotate_log(10, 20, 'daily', 25, None), 'rotate log by max size')\n self.assertFalse(need_to_rotate_log(10, 20, 'hourly', 5, 'hourly'), 'do not rotate log by min size')", "def _check_rotation(spec_nest, path):\n spec = _get_from_nest(spec_nest, path)\n if spec is not None and not isinstance(spec, primitives_pb2.RotationType):\n raise InvalidSpecError(\n f'{\"/\".join(path)} was expected to be of type Rotation, but is instead '\n f'{type(spec)}')", "def test_rotation(self, tol):\n theta = 0.98\n S = symplectic.rotation(theta)\n expected = np.block([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]])\n np.allclose(S, expected, atol=tol, rtol=0)", "def interaction_turnstile(self) -> None:\n if self.get_rotation()[1][0] != 0:\n condition = self.can_rotate()[0]\n if condition:\n self.rotate()", "def check_angle(self):\n self.find_pixels()\n alpha_theta=np.deg2rad(70)\n alpha_phi=np.deg2rad(70)\n extreme_values=self.compute_extreme_values(alpha_phi, alpha_theta)\n x=np.linspace(extreme_values[0], extreme_values[1], self.number_of_pix[1])\n y=np.linspace(extreme_values[2], extreme_values[3], self.number_of_pix[0])\n phi_0=20\n phi_0=np.deg2rad(phi_0)\n j, diff=self.compute_phi(\"find_orient.png\")\n print \"j=\", j\n print \"diff=\", diff", "def test_g_asignar_rol(self):", "def is_orientation_ok(image,k=2,is_first=True):\n\n mid_x, mid_y = int(0.5*image.shape[1]), int(0.5*image.shape[0])\n\n # Get moment for first body half \n image_0 = np.array(image)\n image_0[:,:int(mid_x)] = 0\n image_0 = image_0[:,int(mid_x):]\n moment_0 = get_moment(image_0,k)\n\n # Get moment for second body half\n image_1 = np.array(image)\n image_1[:,int(mid_x):] = 0\n image_1 = np.fliplr(image_1)\n image_1 = image_1[:,int(mid_x):]\n moment_1 = get_moment(image_1,k)\n\n # Compute descriminant and flip flag\n discrim = (moment_0 - moment_1)/(moment_0 + moment_1)\n if discrim < 0:\n ok = False\n else:\n ok = True \n return ok, discrim", "def check_random_rotation(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [degrees, resample, expand, center, fill_value], _ = parse_user_args(method, *args, **kwargs)\n check_degrees(degrees)\n\n if resample is not None:\n type_check(resample, (Inter,), \"resample\")\n if expand is not None:\n type_check(expand, (bool,), \"expand\")\n if center is not None:\n check_2tuple(center, \"center\")\n if fill_value is not None:\n check_fill_value(fill_value)\n\n return method(self, *args, **kwargs)\n\n return new_method", "def check_shot_on_target(self, shot):\n # Defining a few variables to ease the reading\n # Here we define the x and y interval of the goal's segment\n x_min = min(self.s_pos.x, self.e_pos.x)\n x_max = max(self.s_pos.x, self.e_pos.x)\n\n y_min = min(self.s_pos.y, self.e_pos.y)\n y_max = max(self.s_pos.y, self.e_pos.y)\n\n # Shortening variables names\n o_x = shot.opponent.pos.x\n o_y = shot.opponent.pos.y\n\n # If the angle = pi / 2 or - pi / 2, then tan(angle) is undefined\n # In these cases, the shot is vertical, therefore it is valid\n # iff the x coordinate of the opponent is in the goal's x interval\n if abs(shot.angle) == math.pi / 2:\n return self.is_in_interval(x_min, x_max, o_x)\n\n # If the angle = 0, pi or -pi, then tan(angle) is 0 which can lead to \n # undefined intersection points (if the goal is vertical for example)\n # although there is an intersection point\n # \n # In these cases, the shot is horizontal, therefore it is valid\n # iff the y coordinate of the opponent is in the goal's y interval\n if abs(shot.angle) == math.pi or shot.angle == 0:\n return self.is_in_interval(y_min, y_max, o_y)\n\n # Using tan the least amount of time possible, for this is a slow function\n tan_theta = math.tan(shot.angle)\n\n # Define the LE of the shot\n le1 = LinearEquation(tan_theta, o_y - tan_theta * o_x)\n le2 = None\n\n # If the goal is vertical, finding the intersection point\n # is not possible using the normal way\n #\n # That being said, unless the LE of the shot is vertical too (which it \n # isn't as it is checked before hand) there has to be an intersection point\n # This intersection must happen when at the x coodinate of the goal's segment\n # therefore, it is possible to compute the y coordinate of the intersection by\n # computing the application of the shot's LE on this ex coordinate\n #\n # Then, the resulting y is valid iff it is in the goal's segment interval\n if self.e_pos.x - self.s_pos.x == 0:\n y = le1.apply(self.e_pos.x)\n return self.is_in_interval(y_min, y_max, y)\n\n # The normal way of solving the intersection of these two LEs\n else:\n\n # Shortening variables by computing the coefficient of the goal's LE\n ratio = (self.e_pos.y - self.s_pos.y) / (self.e_pos.x - self.s_pos.x)\n\n # If the lines are parallels (have the same coefficient) return False\n if math.tan(shot.angle) == ratio:\n return False\n\n # Defining the goal's LE\n le2 = LinearEquation(ratio, self.e_pos.y - self.e_pos.x * ratio)\n\n # Finding the intersection point of the two LEs\n # If there isn't one, return False (but there should be one\n # given all the asserts we do before hand, this is just for completion sake)\n p_intersect = le1.intersection(le2)\n if p_intersect == None:\n return False\n\n # If the intersection point's abscissa is in the goal's x interval, then it is\n # a valid abstracted shot going \n return self.is_in_interval(x_min, x_max, p_intersect.x)", "def test_extract_rot_angle():\n v = np.zeros((4,2))\n try:\n angle = extract_rot_angle(v,min_points=0)\n except AssertionError,err:\n assert err.args[0]==\"Zero velocities not allowed.\"\n \n v[:,1] = 1.\n try:\n angle = extract_rot_angle(v,min_points=0)\n except AssertionError,err:\n assert err.args[0]==\"Failed to get both forward and backward directions.\"\n\n # Forwards-backwards motion.\n v[:,1] = 0.\n v[:2,0] = -1.1\n v[2:,0] = 1.2\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,np.pi)\n\n # Forwards-backwards motion.\n v[:,0] = 0.\n v[:2,1] = -.9\n v[2:,1] = .8\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,-np.pi/2)\n\n # Forwards-backwards motion with noise.\n v[:2,1] += (np.random.rand(2)*2-1)/10\n v[2:,1] += (np.random.rand(2)*2-1)/10\n angle = extract_rot_angle(v,min_points=0)\n assert np.isclose(angle,-np.pi/2,atol=.1)", "def test_asssert_rotation_matrix_behaves_like_check_matrix():\n random_state = np.random.RandomState(2345)\n for _ in range(5):\n a = pr.random_axis_angle(random_state)\n R = pr.matrix_from_axis_angle(a)\n original_value = R[2, 2]\n for error in [0, 1e-8, 1e-7, 1e-5, 1e-4, 1]:\n R[2, 2] = original_value + error\n try:\n pr.assert_rotation_matrix(R)\n pr.check_matrix(R)\n except AssertionError:\n assert_raises_regexp(\n ValueError, \"Expected rotation matrix\", pr.check_matrix, R)", "def test_calc_rotation(self):\n t = AioBaseTurtle()\n t.speed(speed=2)\n orient, steps, delta = t._calc_rotation(120)\n self.assertEqual(steps, 21)\n self.assertAlmostEqual(delta, 120.0 / 21.0)\n self.assertAlmostEqual(orient[0], math.cos(math.radians(120)))\n self.assertAlmostEqual(orient[1], math.sin(math.radians(120)))", "def check_ball_on_target():\n\n pass", "def test_arbitrary_rotation(self):\n \n # This test is run a bunch of times on various intervals, ranging from 50% to 1/6\n\t\t# (16.667%).\n for i in range(2, 7):\n \n interval = 1 / i # The amount to increase each qubit's probability by, relative to the previous qubit\n step_string = \"{:.4f}\".format(100 / i) # The decimal representation of the interval, as a percent\n target_probabilities = [0] * (i + 1) # This will store the desired probabilities of each qubit\n for j in range(0, i + 1):\n target_probability = j * interval\n target_probabilities[j] = target_probability\n\n # Run the test\n self.run_test(self.arbitrary_rotation_function, f\"Rotation with steps of 1/{i} ({step_string}%)\", 2000, target_probabilities, 0.05)", "def valid(self, target):", "def _test_stick_position(self, target):\n\n cue = np.array(self.cue_coords)\n target = np.array(target)\n\n # Get rotation matrix\n delta = target - cue\n l = np.linalg.norm(delta)\n rotation = np.array([[delta[1] / l, -delta[0] / l], [delta[0] / l, delta[1] / l]])\n\n rot_start = rotation.dot(target)\n rot_end = rotation.dot(cue)\n\n for ball in self.other_balls:\n rot_ball = rotation.dot(np.array(ball))\n dist = np.abs(rot_ball[0] - rot_start[0])\n if dist < 2.1 * self.ball_radius:\n return False\n\n return True", "def check(x):\n rotation_a = rotation_b = 0\n for a, b in zip(A, B):\n if x != a and x != b:\n return -1\n elif x != a:\n # x != a and x == b\n rotation_a += 1\n elif x != b:\n # x == a and x != b\n rotation_b += 1\n # else\n # x == a and x == b\n # do nothing since no rotation\n\n # Minimum rotation to have all elements equals x in A or B\n return min(rotation_a, rotation_b)", "def can_transform(\n self,\n target_frame: str,\n source_frame: str,\n time: Time,\n timeout: Duration = Duration()\n ) -> bool:\n try:\n self.lookup_transform(target_frame, source_frame, time, timeout)\n return True\n except tf2.TransformException:\n return False", "def test_rotation_angle_warning(self):\n\n def warning_trigger():\n try:\n paramak.CenterColumnStudyReactor(\n inner_bore_radial_thickness=20,\n inboard_tf_leg_radial_thickness=50,\n center_column_shield_radial_thickness_mid=50,\n center_column_shield_radial_thickness_upper=100,\n inboard_firstwall_radial_thickness=20,\n divertor_radial_thickness=100,\n inner_plasma_gap_radial_thickness=80,\n plasma_radial_thickness=200,\n outer_plasma_gap_radial_thickness=90,\n # first number must be between plasma inner/outer radius\n plasma_high_point=(245, 240),\n plasma_gap_vertical_thickness=40,\n center_column_arc_vertical_thickness=520,\n rotation_angle=360)\n\n except BaseException:\n pass\n\n with warnings.catch_warnings(record=True) as w:\n warnings.simplefilter(\"always\")\n warning_trigger()\n assert len(w) == 1\n assert issubclass(w[-1].category, UserWarning)\n assert \"360 degree rotation may result in a Standard_ConstructionError or AttributeError\" in str(\n w[-1].message)", "def correct_angle(self, tank_angle, target_angle):\n angle_diff = periodic_difference_of_angles(target_angle, tank_angle)\n if abs(angle_diff) <= MIN_ANGLE_DIF:\n self.tank.stop_turning()\n return True\n else:\n return False", "def _optimise_rotation(self):\n logger.info(\n f\"Minimising dimer rotation up to \"\n f'δϕ = {self.phi_tol.to(\"degrees\"):.4f}º'\n )\n\n for i in range(self._ratio_rot_iters):\n\n result = self._rotate()\n\n if (\n result == _StepResult.skipped_rotation\n or abs(self._coords.phi) < self.phi_tol\n ):\n break\n\n logger.info(\n f\"Micro iteration: {i}.\"\n f' ϕ={self._coords.phi.to(\"degrees\"):.2f}º'\n )\n\n return None", "def test_returns_single_phrase_if_len_one(self):\n result = find_rotation_point([\"nathan\"])\n self.assertEqual(result, 0)", "def isFinished(self):\n current = self.robot.drivetrain.get_gyro_angle()\n # If abs(target - current) < threshold then return true\n return math.fabs(self._target_degrees - current) <= self._degree_threshold or self.isTimedOut()", "def test_rotation_isometry(self):\n import numpy\n\n # test for all kinds of curvatures K\n for k in (0, 1, -1, 1/11, -1/11, 11, -2):\n \n s = space(curvature=k)\n\n # use a small enough magnitude to not break math for very negative K\n magic = 0.33377777373737737777\n # 1/sqrt(2)\n s2_ref = 0.707106781186547524400844362104785\n\n o = s.make_origin(2)\n p = s.make_point((1, 0), magic)\n q = s.make_point((s2_ref, s2_ref), magic)\n\n rot = space_point_transform(\n numpy.array([[1,0,0],[0,s2_ref,-s2_ref],[0,s2_ref,s2_ref]]),\n curvature=k,\n math = common_math\n )\n\n f, g, i = map(space_point_transform, (p, q, o))\n\n def check_transform_eq(t1, t2, invert=False):\n for ref in (\n s.make_point((5/13, 12/13), magic),\n s.make_point((-3/5, 4/5), magic)\n ):\n self.assertTrue(invert ^ point_isclose(\n t1(ref),\n t2(ref),\n abs_tol = 1e-12\n ))\n\n # 1/8 turn, times 8\n check_transform_eq(rot*8, i)\n\n # rotate, shift, rotate\n check_transform_eq(g, rot + f + rot * -1)\n\n # the other way\n check_transform_eq(f, rot * -1 + g + rot)", "def test_rotation_angle(self):\n\n self.test_shape.azimuth_placement_angle = [45, 135, 225, 315]\n test_volume = self.test_shape.volume()\n self.test_shape.rotation_angle = 180\n assert self.test_shape.volume() == pytest.approx(test_volume * 0.5)", "def is_rotation_running(self) -> bool:\n\n return self.send(self.cmd.GET_ROTATION_RUNNING)", "def check_orientation(self) -> None:\n if self.compute_volume() > 0:\n raise (\n BaseException(\n \"The volume within the surface is negative. It seems that you faces\"\n \"are not oriented correctly according to the clockwise flag\"\n )\n )", "def check_shot_direction(self, shot):\n return Vector.v_from_a(shot.angle) * self.dir < 0", "def is_valid_angle(self, theta_step):\n return not (self.angle % theta_step)", "def determine_rotation(arm, d, tip_data, rot_data):\n n_t = np.zeros(3)\n for this_n_t in tip_data['pos_ntip_wrt_r']:\n n_t += this_n_t\n n_t /= len(tip_data['pos_ntip_wrt_r'])\n print(\"Our n_t to use in this stage: {}\".format(n_t))\n\n K = len(rot_data['pos_ntip_wrt_s'])\n errors_zyz = []\n errors_zyx = []\n\n for k in range(K):\n lhs = rot_data['pos_ntip_wrt_s'][k]\n t_st = rot_data['pos_tool_wrt_s_code'][k]\n ypr = rot_data['rot_tool_wrt_s_code'][k]\n yaw, pitch, roll = ypr[0], ypr[1], ypr[2]\n\n # R_zyz\n R_z1 = U.rotation_matrix_3x3_axis(angle=roll, axis='z')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z2 = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyz = R_z2.dot(R_y).dot(R_z1)\n\n # R_zyx\n R_x = U.rotation_matrix_3x3_axis(angle=roll, axis='x')\n R_y = U.rotation_matrix_3x3_axis(angle=pitch, axis='y')\n R_z = U.rotation_matrix_3x3_axis(angle=yaw, axis='z')\n R_zyx = R_z.dot(R_y).dot(R_x)\n\n # Evaluate!\n rhs_zyz = t_st + R_zyz.dot( n_t )\n rhs_zyx = t_st + R_zyx.dot( n_t )\n err_zyz = np.linalg.norm(lhs - rhs_zyz)\n err_zyx = np.linalg.norm(lhs - rhs_zyx)\n errors_zyz.append( err_zyz )\n errors_zyx.append( err_zyx )\n print(\"\\nerr_zyz: {:.3f} for {}-th sample\".format(err_zyz, k))\n print(\"err_zyx: {:.3f} for {}-th sample\".format(err_zyx, k))\n print(\"R_zyz:\\n{}\".format(R_zyz))\n print(\"R_zyx:\\n{}\".format(R_zyx))\n\n print(\"\\nDone with evaluation!\")\n print(\"zyz has avg error {:.5f}\".format(np.mean(errors_zyz)))\n print(\"zyx has avg error {:.5f}\".format(np.mean(errors_zyx)))", "def test_can_info_does_not_exist(self):\n fake_user = User(username='Fake', password='')\n self.assertFalse(send_rotate_to_can(fake_user, self.BIN_NUM))", "def test_minvar_rotation(self):\n vrot, v, w = minvar(self.rdata)\n # Determinant of rotation matrix should be = 1\n self.assertTrue((np.linalg.det(v) - 1) < self.tol)", "def no_orientation(self) -> float:\n\n pbar = tqdm(total=len(self.positions) * 2)\n correct_no_orientation: int = 0\n total_no_orientation: int = 0\n for arm in [Arm.left, Arm.right]:\n for i in range(len(self.positions)):\n # Reload the scene.\n self.init_scene()\n orientations = self._get_ik_orientations(arm=arm, target=self.positions[i])\n if len(orientations) == 0:\n target_orientation = TargetOrientation.none\n orientation_mode = OrientationMode.none\n else:\n target_orientation = orientations[0].target_orientation\n orientation_mode = orientations[0].orientation_mode\n\n # Reach for the target.\n status = self.reach_for(target=TDWUtils.array_to_vector3(self.positions[i]),\n arm=arm,\n arrived_at=self.arrived_at,\n target_orientation=target_orientation,\n orientation_mode=orientation_mode)\n # Record how often we correctly guess that there's no solution (the action should fail).\n if len(orientations) == 0:\n total_no_orientation += 1\n if status != ActionStatus.success:\n correct_no_orientation += 1\n pbar.update(1)\n pbar.close()\n if total_no_orientation > 0:\n return correct_no_orientation / total_no_orientation\n else:\n return -1", "def check_already_extracted(video_parts):\n train_or_test, classname, filename_no_ext, _ = video_parts\n return bool(os.path.exists(os.path.join(\"/data/niteshku001/Ravdess\", train_or_test, classname,\n filename_no_ext + '-0001.jpg')))", "def test_rotatePermissionFileNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.path, 0o444)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def has_target(self):\n return self.target is not None", "def _check_integrity(self):\n root = self.root\n for scene_name in self.scene_list:\n if not(os.path.isdir(os.path.join(root,scene_name)) and \n os.path.isdir(os.path.join(root,scene_name, images_dir)) and\n os.path.isfile(os.path.join(root,scene_name,annotation_filename))):\n return False\n return True", "def _check_integrity(self):\n root = self.root\n for scene_name in self.scene_list:\n if not(os.path.isdir(os.path.join(root,scene_name)) and \n os.path.isdir(os.path.join(root,scene_name, images_dir)) and\n os.path.isfile(os.path.join(root,scene_name,annotation_filename))):\n return False\n return True", "def isRotationMatrix(self, R):\n Rt = np.transpose(R)\n shouldBeIdentity = np.dot(Rt, R)\n I = np.identity(3, dtype=R.dtype)\n n = np.linalg.norm(I - shouldBeIdentity)\n return n < 1e-6", "def test_xform_rotation(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.xform_file, ani=1, aef=1)\n\n values = cmds.keyframe('pCube1.rx', q=1, vc=1)\n self.assertAlmostEqual(0.0, values[-1])", "def get_in_good_starting_position(arm, which='arm1'):\n assert which == 'arm1'\n pos, rot = U.pos_rot_arm(arm, nparrays=True)\n print(\"(starting method) starting position and rotation:\")\n print(pos, rot)\n U.move(arm, HOME_POS_ARM1, HOME_ROT_ARM1, speed='slow')\n time.sleep(2)\n print(\"(starting method) position and rotation after moving:\")\n pos, rot = U.pos_rot_arm(arm, nparrays=True)\n print(pos, rot)\n print(\"(Goal was: {} and {}\".format(HOME_POS_ARM1, HOME_ROT_ARM1))\n R = U.rotation_matrix_3x3_axis(angle=180, axis='z')\n print(\"With desired rotation matrix:\\n{}\".format(R))\n print(\"Now exiting...\")\n sys.exit()", "def reached_angle(self, angle, tol):\n if self.ros_node.get_data(\"/auto/turret/current/angle\") is None:\n rospy.logerr(\"The topic /auto/turret/current/angle has not been published yet\")\n else:\n neg_angle_diff = self.wrap_angle(self.ros_node.get_data(\"/auto/turret/current/angle\") - angle)\n pos_angle_diff = self.wrap_angle(angle - self.ros_node.get_data(\"/auto/turret/current/angle\"))\n\n if pos_angle_diff <= tol or neg_angle_diff <= tol:\n return True\n return False", "def is_target_in(self, newtarget):\n from .utils.shape import HAS_SHAPELY\n # Test if shapely\n if not HAS_SHAPELY:\n print(\"WARNING: could not test if the target is in the image since you do not have SHAPELY\")\n return True\n # Test if WCS \n if not self.has_wcs():\n print(\"WARNING: because there is no wcs solution, \"+\\\n \"I can't test the inclusion of the new astrotarget\")\n return True\n \n return self.wcs.coordsAreInImage(*newtarget.radec)", "def _check_rotated_filename_candidates(self):\n # savelog(8)\n candidate = \"%s.0\" % self.filename\n if (exists(candidate) and exists(\"%s.1.gz\" % self.filename) and\n (stat(candidate).st_mtime > stat(\"%s.1.gz\" % self.filename).st_mtime)):\n return candidate\n\n # logrotate(8)\n # with delaycompress\n candidate = \"%s.1\" % self.filename\n if exists(candidate):\n return candidate\n\n # without delaycompress\n candidate = \"%s.1.gz\" % self.filename\n if exists(candidate):\n return candidate\n\n rotated_filename_patterns = (\n # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + with `delaycompress`\n \"-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]\",\n # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + without `delaycompress`\n \"-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz\",\n # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + with `delaycompress`\n \"-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]\",\n # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + without `delaycompress`\n \"-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz\",\n # for TimedRotatingFileHandler\n \".[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]\",\n )\n for rotated_filename_pattern in rotated_filename_patterns:\n candidates = glob.glob(self.filename + rotated_filename_pattern)\n if candidates:\n candidates.sort()\n return candidates[-1] # return most recent\n\n # no match\n return None", "def check_already_extracted(video_parts):\n filename_no_ext, _ = video_parts\n return bool(os.path.exists(os.path.join(output_dir,\n filename_no_ext + '-0030.jpg')))", "def _is_rotation_matrix(self, R):\n Rt = np.transpose(R)\n shouldBeIdentity = np.dot(Rt, R)\n I = np.identity(3, dtype=R.dtype)\n n = np.linalg.norm(I - shouldBeIdentity)\n return n < 1e-6", "def test_returns_none_if_empty_list(self):\n result = find_rotation_point([])\n self.assertIsNone(result)", "def orientation(pointA, pointB, target):\n if target in (pointA, pointB):\n return -1\n buf = np.array([1, pointA.X, pointA.Y, 1, pointB.X, pointB.Y, 1, target.X, target.Y]).reshape(3,-1)\n buf = np.linalg.det(buf)\n if abs(buf) < Drawable._comparisonLimit:\n return 0\n if buf < 0:\n return -1\n return 1", "def is_rot_plug(m_plug):\n valid_plugs = ('rotate', 'rotateX', 'rotateY', 'rotateZ')\n plug_name = m_plug.name().split(\".\")[-1]\n\n output = plug_name in valid_plugs\n\n return output", "def test_rotate(self):\n rotable = TestRotable()\n command = RotateCommand(rotable)\n collinear_to_new_direction = rotable.get_direction() + rotable.get_angular_velocity()\n\n command()\n\n ratio = norm(rotable.get_direction()) / norm(collinear_to_new_direction)\n self.assertTrue(allclose(collinear_to_new_direction * ratio, rotable.get_direction()))\n self.assertTrue(isclose(norm(rotable.get_direction()), 1))", "def lander_failure(self):\n failures_list = [\"Right Rotation\",\n \"Left Rotation\",\n \"Thrust\"\n ]\n if self.failure_ticks == 0:\n self.failure = 0\n if random.uniform(0, 1) < FAILURE_CHANCE:\n self.failure_ticks += FAILURE_DURATION\n self.failure = random.choice(failures_list)\n return False\n else:\n self.failure_ticks -= 1\n return True", "def test_exists_mol_res_spin_data(self):\n\n # This should be True.\n self.failUnless(mol_res_spin.exists_mol_res_spin_data())", "def has_guardian(self):\n return self.tiles.count(3) > 0", "def test_id_rot():\n assert_array_almost_equal(pr.R_id, pr.matrix_from_axis_angle(pr.a_id))\n assert_array_almost_equal(pr.R_id, pr.matrix_from_quaternion(pr.q_id))\n assert_array_almost_equal(pr.R_id, pr.matrix_from_euler_xyz(pr.e_xyz_id))\n assert_array_almost_equal(pr.R_id, pr.matrix_from_euler_zyx(pr.e_zyx_id))", "def is_same_target(self, candidate):\n return great_circle_distance(self.ra, self.dec,\n candidate.ra, candidate.dec) < (1.0 / 3600.0)", "def test_rotation(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n days = [(self.path + \".\" + log.suffix(day * 86400)) for day in range(3)]\n\n # test automatic rotation\n log._clock = 0.0 # 1970/01/01 00:00.00\n log.write(\"123\")\n log._clock = 43200 # 1970/01/01 12:00.00\n log.write(\"4567890\")\n log._clock = 86400 # 1970/01/02 00:00.00\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(days[0]))\n self.assertFalse(os.path.exists(days[1]))\n log._clock = 172800 # 1970/01/03 00:00.00\n log.write(\"\")\n self.assertTrue(os.path.exists(days[0]))\n self.assertTrue(os.path.exists(days[1]))\n self.assertFalse(os.path.exists(days[2]))\n log._clock = 259199 # 1970/01/03 23:59.59\n log.write(\"3\")\n self.assertFalse(os.path.exists(days[2]))", "def check_already_extracted(video_parts):\n train_or_test, filename_no_ext, _ = video_parts\n return bool(os.path.exists(os.path.join('demo_frames',\n filename_no_ext + '-0001.jpg')))", "def can_transform_full(\n self,\n target_frame: str,\n target_time: Time,\n source_frame: str,\n source_time: Time,\n fixed_frame: str,\n timeout: Duration = Duration()\n ) -> bool:\n try:\n self.lookup_transform_full(target_frame, target_time, source_frame, source_time, fixed_frame, timeout)\n return True\n except tf2.TransformException:\n return False", "def check():", "def check_if_anim_exist(name, ext=vext, figpath=figpath):\n return not(os.path.isfile(format_filename(name, ext, figpath)))", "def exists(self):\n\t\tif self.hasUdim:\n\t\t\treturn len( self.udimPaths ) != 0\n\t\treturn super( textureFile, self ).exists", "def check_transforms_match(self, transform: Mapping) -> None:\n xform_id = transform.get(TraceKeys.ID, \"\")\n if xform_id == id(self):\n return\n # TraceKeys.NONE to skip the id check\n if xform_id == TraceKeys.NONE:\n return\n xform_name = transform.get(TraceKeys.CLASS_NAME, \"\")\n warning_msg = transform.get(TraceKeys.EXTRA_INFO, {}).get(\"warn\")\n if warning_msg:\n warnings.warn(warning_msg)\n # basic check if multiprocessing uses 'spawn' (objects get recreated so don't have same ID)\n if torch.multiprocessing.get_start_method() in (\"spawn\", None) and xform_name == self.__class__.__name__:\n return\n raise RuntimeError(\n f\"Error {self.__class__.__name__} getting the most recently \"\n f\"applied invertible transform {xform_name} {xform_id} != {id(self)}.\"\n )", "def test_rotating_phantom(self):\n cheese = TomoCheese.from_demo_images()\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -0.25, abs_tol=0.05)\n for img in cheese.dicom_stack:\n img.array = rotate(img.array, angle=3, mode=\"edge\")\n cheese.analyze()\n assert math.isclose(cheese.catphan_roll, -3.25, abs_tol=0.05)", "def test_unsuccessful_verification(self):\n for i in (-4, -3, 3, 4):\n description = \"TOTP verified for `i={0}`\".format(i)\n calculated = self.algorithm.calculate(self.device.secret, drift=i)\n confirmed = self.relate.verify(calculated, save=False)\n\n self.assertFalse(confirmed, description)\n\n self.relate.confirm = False", "def is_at_target_position(self, position, tolerance=0.0):\n x, _ = position\n return x > self.corridor_length - tolerance", "def test_abstractShouldRotate(self):\n log = logfile.BaseLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n self.assertRaises(NotImplementedError, log.shouldRotate)", "def is_done(self):\n\n x, y = self.position\n\n if x <= -CAR_LENGTH \\\n and (self.spawn_position[0] > 0 or y != self.spawn_position[1]) \\\n and self.is_horizontal:\n return True\n elif x >= MAP_SIZE \\\n and (self.spawn_position[0] < MAP_SIZE or y != self.spawn_position[1]) \\\n and self.is_horizontal:\n return True\n elif y <= -CAR_LENGTH \\\n and (self.spawn_position[1] > 0 or x != self.spawn_position[0]) \\\n and not self.is_horizontal:\n return True\n elif y >= MAP_SIZE \\\n and (self.spawn_position[1] < MAP_SIZE or x != self.spawn_position[0]) \\\n and not self.is_horizontal:\n return True\n else:\n return False", "def test_exists(self):\n self.assertTrue(bool(self.photo))", "def test_rotated(self):\n d = np.random.random((100, 3))\n d_emb = tadasets.embed(d, 10)\n assert np.all(np.var(d_emb, axis=0) > 0)", "def check4move(st, selected_unit, direction):\n return 1", "def target_exists(self, target_id=0):\n try:\n target = self.target(target_id=target_id)\n except Exception as e:\n log.error(\"Exception checking if target exists: {} {}\".format(type(e), e))\n return False\n return target is not None", "def _check_if_lz4_exists(location, filename):\n # This just isn't a very readable syntax so I abstracted it\n return Path(location + _change_lzma_to_lz4_name(filename)).is_file()", "def test_from_rotation_angle_coordinate_of_phi(rotationangle):\n\n # Get the coordinate at phi\n phi_dash = rotationangle[\"phi\"]\n c3 = rotationangle[\"cs\"].from_rotation_angle(phi_dash)\n\n # Ensure that it is at the origin\n assert c3 == pytest.approx(0.0)", "def rotation(self, *args, **kwargs) -> Any:\n pass", "def has_invalid_targets(self):\n return len(self._combined_invalid_versioned_targets.targets) > 0", "def matches(self, tgt_residence_dir: str) -> bool:", "def get_opt_rotate(obj_img, back_img,\n back_center_x, back_center_y,\n obj_center_x, obj_center_y,\n prev_rot_angle=0.,\n is_erosion=False):\n width = obj_img.shape[0]\n rot_img = ndimage.rotate(obj_img, prev_rot_angle, reshape=False)\n induce_x, induce_y = int(back_center_x - obj_center_x), int(back_center_y - obj_center_y)\n combine_img = back_img.copy()\n combine_img[induce_y:induce_y + width, induce_x:induce_x + width] -= rot_img\n neg_count = len(np.argwhere(combine_img < 0))\n if is_erosion:\n angle_amount = 4.\n else:\n angle_amount = 16.\n # check combine_img.dtype; rot_img.dtype; back_img\n curr_angle = prev_rot_angle\n while angle_amount > 0.5:\n angle_amount /= 2.\n\n rotate_1 = ndimage.rotate(obj_img, curr_angle + angle_amount, reshape=False)\n combine_img = back_img.copy()\n combine_img[induce_y:induce_y+width, induce_x:induce_x+width] -= rotate_1\n neg_count_1 = len(np.argwhere(combine_img < 0))\n\n rotate_2 = ndimage.rotate(obj_img, curr_angle - angle_amount, reshape=False)\n combine_img = back_img.copy()\n combine_img[induce_y:induce_y + width, induce_x:induce_x + width] -= rotate_2\n neg_count_2 = len(np.argwhere(combine_img < 0))\n\n if neg_count_1 < neg_count_2:\n if neg_count_1 < neg_count:\n neg_count = neg_count_1\n curr_angle = curr_angle + angle_amount\n else:\n if neg_count_2 < neg_count:\n neg_count = neg_count_2\n curr_angle = curr_angle - angle_amount\n # print(curr_angle)\n # print(neg_count, neg_count_1, neg_count_2)\n # print('Negative Pix Count Rotation: %d.' % neg_count)\n # print('Optimal Rotation: ', curr_angle)\n return curr_angle, neg_count", "def test_rotatePermissionDirectoryNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.directory, 0o444)\n # Restore permissions so tests can be cleaned up.\n self.addCleanup(os.chmod, log.directory, 0o755)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def victory_checker() -> bool:\r\n conflict_check()\r\n for x in range(shape):\r\n for y in range(shape):\r\n if conflict_space[x, y] != 0:\r\n return False\r\n if separation_crawler(False):\r\n return False\r\n return True", "def rotate_in_place(angle):\n action = easy_cozmo._robot.turn_in_place(degrees(-1*angle),speed=degrees(df_rotate_speed))\n try:\n action.wait_for_completed()\n if action.has_succeeded:\n return True\n else:\n code, reason = action.failure_reason\n result = action.result\n print(\"WARNING RotateInPlace: code=%s reason='%s' result=%s\" % (code, reason, result))\n say_error(\"I couldn't rotate, sorry\")\n except Exception as e:\n import traceback\n print(e)\n traceback.print_exc()\n say_error(\"I can't rotate, sorry\")\n try:\n while action.is_running:\n action.abort()\n time.sleep(.5)\n except Exception as e:\n import traceback\n print(e)\n traceback.print_exc()\n say_error(\"Wheels faulty\")\n\n return False", "def row0_invariant(self, target_col):\n \n # Returns False if zero tile is NOT in target position (0, target_col).\n if self.get_number(0, target_col) != 0:\n return False\n \n # Returns False if tiles to the right of target_col are NOT positioned correctly.\n if target_col < self.get_width():\n for col in range(target_col + 1, self.get_width()):\n if self.get_number(0, col) != col:\n return False\n \n # Returns False if tiles to the right of target_col in row 1 are NOT positioned correctly.\n for col in range(target_col, self.get_width()):\n if self.get_number(1, col) != col + self.get_width():\n return False\n\n # Returns False if tiles in rows 2 and below are NOT positioned correctly.\n if 1 < self.get_height():\n for row in range(2, self.get_height()):\n for col in range(self.get_width()):\n if self.get_number(row, col) != col + (row * self.get_width()):\n return False\n\n return True", "def test_to_rotation(self):\r\n q = np.array([-1, 1, 3, 2])\r\n q = q / np.linalg.norm(q)\r\n R_gt = np.array([\r\n [-1/3., -14/15., -2/15.],\r\n [2/3., -1/3., 2/3.],\r\n [-2/3., 2/15., 11/15.]]).T\r\n R = to_rotation(q)\r\n\r\n zero_matrix = R - R_gt\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)\r\n\r\n for _ in range(20):\r\n q = np.random.randn(4)\r\n q /= np.linalg.norm(q)\r\n q_inv = quaternion_conjugate(q)\r\n\r\n R = to_rotation(q)\r\n R_inv = to_rotation(q_inv)\r\n\r\n zero_matrix = R @ R_inv - np.identity(3)\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)\r\n\r\n # orthogonal matrix\r\n zero_matrix = R @ R.T - np.identity(3)\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)", "def check_file_existing(k_out, sphere_radius,full_path):\n cwd = full_path\n file_names = ['/T_ns_', '/Energy_', '/Distance-e-to-e_',\n '/chain_on_iterations_']\n\n kout = f'{k_out:.3f}'\n spr_rad = f'{sphere_radius:.3f}'\n\n current = \"kout_\" + kout + \"_\" + \"spr_rad_\" + spr_rad\n for file_name in file_names:\n #print (cwd + file_name + current + \".pkl\")\n if not os.path.exists(cwd + file_name + current + \".pkl\"):\n return False\n return True", "def test_orientation_vector():\n\topening_angle = geom_instance.source_opening_angle\n\ttest_orientation = o_gen_instance.generate_orientation_vector()\n\tassert test_orientation[0] < np.cos(opening_angle)\n\tassert test_orientation[1] < np.sin(opening_angle)", "def calculate_translation(reference_im:np.ndarray, \n target_im:np.ndarray,\n ref_to_tar_rotation:np.ndarray=None,\n use_autocorr:bool=True,\n alignment_kwargs:dict={},\n verbose:bool=True,\n ):\n from math import pi\n import cv2\n ## quality check\n # images\n if np.shape(reference_im) != np.shape(target_im):\n raise IndexError(f\"two images should be of the same shape\")\n # rotation matrix\n if ref_to_tar_rotation is None:\n ref_to_tar_rotation = np.diag([1,1])\n elif np.shape(ref_to_tar_rotation) != tuple([2,2]):\n raise IndexError(f\"wrong shape for rotation matrix, should be 2x2. \")\n # get dimensions\n _dz,_dx,_dy = np.shape(reference_im)\n # calculate angle\n if verbose:\n print(f\"-- start calculating drift with rotation between images\")\n _rotation_angle = np.arcsin(ref_to_tar_rotation[0,1])/pi*180\n _temp_new_rotation_M = cv2.getRotationMatrix2D((_dx/2, _dy/2), _rotation_angle, 1) # temporary rotation angle\n # rotate image\n if _rotation_angle != 0:\n _rot_target_im = np.array([cv2.warpAffine(_lyr, _temp_new_rotation_M, \n _lyr.shape, borderMode=cv2.BORDER_DEFAULT) \n for _lyr in target_im], dtype=reference_im.dtype)\n else:\n _rot_target_im = target_im\n # calculate drift \n _drift, _drift_flag = align_image(\n _rot_target_im,\n reference_im,\n precision_fold=10,\n use_autocorr=use_autocorr,\n verbose=verbose,\n #detailed_verbose=verbose,\n **alignment_kwargs,)\n\n if verbose:\n print(f\"--- drift: {np.round(_drift,2)} pixels\")\n \n return _rot_target_im, ref_to_tar_rotation, _drift", "def test_stickers(self):\n rotation, _ = list(self.cube.scramble_cube(15))\n\n unique, counts = np.unique(rotation, return_counts=True)\n dictionary = dict(zip(unique, counts))\n\n self.assertEqual(all(value == 4 for value in dictionary.values()), True)", "def verify():", "def is_on_course_with(start, heading, target):\n v = Volant(*(tuple(start) + (heading,)))\n\n if start == target:\n return True\n \n for _ in range(distance_between_hex_cells(start, target)):\n v = v.advance()\n if v.xyh == (tuple(target) + (heading,)):\n return True\n\n return False" ]
[ "0.63469565", "0.633046", "0.5920605", "0.58786756", "0.58433646", "0.58242995", "0.57923526", "0.5692897", "0.5678962", "0.5639551", "0.55446136", "0.5513295", "0.5480883", "0.5480366", "0.5454172", "0.5421362", "0.5417596", "0.54171485", "0.53870094", "0.53733045", "0.5360865", "0.53129077", "0.531185", "0.5309398", "0.5300208", "0.52919096", "0.5263992", "0.5261818", "0.52590674", "0.52448046", "0.5235926", "0.5232548", "0.5211219", "0.5189282", "0.5180785", "0.51716137", "0.5165395", "0.5155577", "0.5153615", "0.51404667", "0.5127945", "0.51233405", "0.51098615", "0.5071817", "0.5071754", "0.5068044", "0.5057442", "0.5042777", "0.50389344", "0.50389344", "0.50384325", "0.5032601", "0.50322247", "0.5031794", "0.5026637", "0.5021535", "0.5018094", "0.5011191", "0.5005615", "0.49932882", "0.49902987", "0.4986506", "0.4986013", "0.49837852", "0.49685133", "0.49618712", "0.49556687", "0.49554926", "0.49514645", "0.49416465", "0.49298292", "0.491862", "0.49151447", "0.49148062", "0.49130693", "0.491218", "0.49063307", "0.49052745", "0.48951468", "0.48927313", "0.48905838", "0.48872164", "0.48830548", "0.48815984", "0.48709053", "0.4869071", "0.48688647", "0.4868229", "0.48670667", "0.4863486", "0.4858311", "0.4857554", "0.48529634", "0.48473746", "0.4840076", "0.4833022", "0.4830098", "0.4825876", "0.48186362", "0.4816019" ]
0.5124146
41
Tests of try rotation with OS error while file move
def test_process_log_with_os_error_at_move(self): with tempfile.TemporaryDirectory() as sandbox: with mock.patch('sys.stdout', new=io.StringIO()) as fake_stdout: with self.assertLogs() as logger: srcfile = Path(sandbox, 'pokus.log') srcfile.touch() destpath = Path(sandbox, 'backup') destpath.touch() compressors = process_log( datetime.datetime(year=2019, month=1, day=10, hour=21, minute=30), { 'target': '{{path}}/backup/{{name}}.{{ext}}', 'interval': 'hourly', 'compress': 'gzip -9', }, 'hourly', str(srcfile), 10 ) self.assertEqual(compressors, []) self.assertTrue(srcfile.exists()) self.assertEqual(fake_stdout.getvalue(), 'Checking "{src}"... rotating... '.format(src=srcfile)) self.assertIn("FileExistsError: [Errno 17] File exists: '{}'".format(destpath), logger.output[0])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_rotatePermissionFileNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.path, 0o444)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_move_badtgzfile(self):\n dbf = self.createDummyDBF('badtar.tgz')\n\n real_ans = (os.path.join(self.td, 'badtar.tgz'),\n os.path.join(self.td, 'L1', 'badtar.tgz'))\n self.assertFalse(os.path.isdir(os.path.join(self.td, 'L1')))\n # Method return may not be helpful but this is it for now\n self.assertEqual(real_ans, dbf.move())\n self.assertTrue(os.path.isdir(os.path.join(self.td, 'L1')))", "def test_rotatePermissionDirectoryNotOk(self):\n log = logfile.DailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n os.chmod(log.directory, 0o444)\n # Restore permissions so tests can be cleaned up.\n self.addCleanup(os.chmod, log.directory, 0o755)\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_move_nulltgzfile(self):\n dbf = self.createDummyDBF('emptytar.tgz')\n\n real_ans = (os.path.join(self.td, 'emptytar.tgz'),\n os.path.join(self.td, 'L1', 'emptytar.tgz'))\n self.assertFalse(os.path.isdir(os.path.join(self.td, 'L1')))\n # Method return may not be helpful but this is it for now\n self.assertEqual(real_ans, dbf.move())\n self.assertTrue(os.path.isdir(os.path.join(self.td, 'L1')))", "def test_rotateAlreadyExists(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n\n # Build a new file with the same name as the file which would be created\n # if the log file is to be rotated.\n newFilePath = \"{}.{}\".format(log.path, log.suffix(log.lastDate))\n with open(newFilePath, \"w\") as fp:\n fp.write(\"123\")\n previousFile = log._file\n log.rotate()\n self.assertEqual(previousFile, log._file)", "def test_rotation(self):\n log = RiggedDailyLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n days = [(self.path + \".\" + log.suffix(day * 86400)) for day in range(3)]\n\n # test automatic rotation\n log._clock = 0.0 # 1970/01/01 00:00.00\n log.write(\"123\")\n log._clock = 43200 # 1970/01/01 12:00.00\n log.write(\"4567890\")\n log._clock = 86400 # 1970/01/02 00:00.00\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(days[0]))\n self.assertFalse(os.path.exists(days[1]))\n log._clock = 172800 # 1970/01/03 00:00.00\n log.write(\"\")\n self.assertTrue(os.path.exists(days[0]))\n self.assertTrue(os.path.exists(days[1]))\n self.assertFalse(os.path.exists(days[2]))\n log._clock = 259199 # 1970/01/03 23:59.59\n log.write(\"3\")\n self.assertFalse(os.path.exists(days[2]))", "def test_noPermission(self):\n log = logfile.LogFile(self.name, self.dir)\n log.write(\"abc\")\n\n # change permissions so rotation would fail\n os.chmod(self.dir, 0555)\n\n # if this succeeds, chmod doesn't restrict us, so we can't\n # do the test\n try:\n f = open(os.path.join(self.dir,\"xxx\"), \"w\")\n except (OSError, IOError):\n pass\n else:\n f.close()\n return\n\n log.rotate() # this should not fail\n\n log.write(\"def\")\n log.flush()\n\n f = log._file\n self.assertEquals(f.tell(), 6)\n f.seek(0, 0)\n self.assertEquals(f.read(), \"abcdef\")\n log.close()", "def test_noPermission(self):\n log = logfile.LogFile(self.name, self.dir)\n self.addCleanup(log.close)\n log.write(\"abc\")\n\n # change permissions so rotation would fail\n os.chmod(self.dir, 0o555)\n\n # if this succeeds, chmod doesn't restrict us, so we can't\n # do the test\n try:\n f = open(os.path.join(self.dir, \"xxx\"), \"w\")\n except OSError:\n pass\n else:\n f.close()\n return\n\n log.rotate() # this should not fail\n\n log.write(\"def\")\n log.flush()\n\n f = log._file\n self.assertEqual(f.tell(), 6)\n f.seek(0, 0)\n self.assertEqual(f.read(), b\"abcdef\")", "def test_move_goodtgzfile(self):\n dbf = self.createDummyDBF('goodtar.tgz')\n\n real_ans = (os.path.join(self.td, 'goodtar.tgz'),\n os.path.join(self.td, 'L1', 'goodtar.tgz'))\n self.assertFalse(os.path.isdir(os.path.join(self.td, 'L1')))\n self.assertEqual(real_ans, dbf.move())\n self.assertTrue(os.path.isdir(os.path.join(self.td, 'L1')))\n # Verify that archive was expanded\n self.assertTrue(os.path.isfile(os.path.join(self.td, 'tar1.txt')))\n self.assertTrue(os.path.isfile(os.path.join(self.td, 'tar2.txt')))", "def test_encrypt_creates_and_restores_backup(\n self,\n mock_os,\n mock_shutil,\n mock_subprocess,\n ):\n mock_subprocess.run.return_value.returncode = 1\n\n with self.assertRaises(RuntimeError):\n self.mikla.encrypt('Chunky Hunky', 'plain', 'enc')\n\n mock_os.unlink.assert_not_called()\n mock_shutil.move.assert_called_with('enc.bak', 'enc')\n self.assertEqual(mock_shutil.move.call_count, 2)", "def wrap_move_file_folder(src, dst):\r\n try:\r\n if os.path.exists(dst):\r\n if os.path.isdir(dst):\r\n shutil.rmtree(dst)\r\n else:\r\n os.remove(dst)\r\n except Exception:\r\n pass\r\n for i in range(5):\r\n try:\r\n shutil.move(src, dst)\r\n break\r\n except Exception:\r\n time.sleep(10)", "def wrap_move_file_folder(src, dst):\r\n try:\r\n if os.path.exists(dst):\r\n if os.path.isdir(dst):\r\n shutil.rmtree(dst)\r\n else:\r\n os.remove(dst)\r\n except Exception:\r\n pass\r\n for i in range(5):\r\n try:\r\n shutil.move(src, dst)\r\n break\r\n except Exception:\r\n time.sleep(10)", "def test_rotation(self):\n # this logfile should rotate every 10 bytes\n with contextlib.closing(\n logfile.LogFile(self.name, self.dir, rotateLength=10)\n ) as log:\n\n # test automatic rotation\n log.write(\"123\")\n log.write(\"4567890\")\n log.write(\"1\" * 11)\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.2\".format(self.path)))\n log.write(\"\")\n self.assertTrue(os.path.exists(\"{}.1\".format(self.path)))\n self.assertTrue(os.path.exists(\"{}.2\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n log.write(\"3\")\n self.assertFalse(os.path.exists(\"{}.3\".format(self.path)))\n\n # test manual rotation\n log.rotate()\n self.assertTrue(os.path.exists(\"{}.3\".format(self.path)))\n self.assertFalse(os.path.exists(\"{}.4\".format(self.path)))\n\n self.assertEqual(log.listLogs(), [1, 2, 3])", "def test_move_to_trash(self):\n os.chdir(\"testimages/\")\n shutil.copyfile(\"arch_001.jpg\", \"image_to_edit.jpg\")\n filename = os.path.abspath(\"image_to_edit.jpg\")\n files = [filename]\n fileactions.move_to_trash(files, self.trashdir)\n trashed_file = os.path.join(self.trashdir, \"image_to_edit.jpg\")\n self.assertTrue(os.path.isfile(trashed_file))\n # Repeat, to check if backing up works\n shutil.copyfile(\"arch_001.jpg\", \"image_to_edit.jpg\")\n fileactions.move_to_trash(files, self.trashdir)\n trashed_file1 = os.path.join(self.trashdir, \"image_to_edit.jpg.1\")\n self.assertTrue(os.path.isfile(trashed_file1))\n shutil.copyfile(\"arch_001.jpg\", \"image_to_edit.jpg\")\n fileactions.move_to_trash(files, self.trashdir)\n trashed_file2 = os.path.join(self.trashdir, \"image_to_edit.jpg.2\")\n self.assertTrue(os.path.isfile(trashed_file2))\n # Clear the files\n os.remove(trashed_file)\n os.remove(trashed_file1)", "def move_old_excel():\n timestr = get_time()\n\n try:\n if not os.listdir('old_excel'):\n print('Folder empty no need to remove files')\n except FileNotFoundError:\n os.mkdir('old_excel')\n\n print(\"passing here\")\n try:\n if not os.listdir('excel'):\n print('Folder empty no need to remove files')\n else:\n os.rename('excel', 'old_excel/excel_' + timestr)\n os.mkdir('excel')\n print(\"created folder\")\n except FileNotFoundError:\n os.mkdir('excel')\n print(\"created folder within exception\")", "def test_abstractShouldRotate(self):\n log = logfile.BaseLogFile(self.name, self.dir)\n self.addCleanup(log.close)\n self.assertRaises(NotImplementedError, log.shouldRotate)", "def moveFile(sourceFullPath,targetDir):\n\n thisFunc = inspect.currentframe().f_code.co_name\n try:\n shutil.move(sourceFullPath,targetDir)\n return True\n except Exception as e:\n print(f\"{thisFunc} issue: {e}\")\n return False", "def test_modePreservation(self):\n open(self.path, \"w\").close()\n os.chmod(self.path, 0o707)\n mode = os.stat(self.path)[stat.ST_MODE]\n log = logfile.LogFile(self.name, self.dir)\n self.addCleanup(log.close)\n log.write(\"abc\")\n log.rotate()\n self.assertEqual(mode, os.stat(self.path)[stat.ST_MODE])", "def testError(self):\n cmds = \"\"\"chown 0 missingFile\npwd\nexit\n\"\"\"\n\n def _cbCheckResult(res):\n self.assertNotIn(self.testDir.asBytesMode().path, res)\n\n d = self._getBatchOutput(cmds)\n d.addCallback(_cbCheckResult)\n return d", "def test_skel_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.skel_file, ani=1)\n\n values = cmds.keyframe('joint1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def moveFile(source, dest):\n try:\n shutil.move(source, dest) \n except IOError as e:\n print (\"Unable to move file. %s\" %(e))", "def test_xform_rotation_fail(self):\n cmds.file(f=1, new=1)\n cmds.mayaUSDImport(file=self.xform_file, ani=1)\n\n values = cmds.keyframe('pCube1.rx', q=1, vc=1)\n self.assertNotAlmostEqual(0.0, values[-1])", "def move_file(original_path,final_path,max_attempts=30):\n assert_is_string(original_path)\n assert_is_string(final_path)\n\n attempt_counter = 0\n while attempt_counter < max_attempts:\n attempt_counter += 1\n if attempt_counter > 1:\n # Pause if something went wrong, (yt-dl is a suspect, might not be closing files?)\n time.sleep(attempt_counter)\n logging.debug(\"Attempt \"+repr(attempt_counter)+\" to move \"+repr(original_path)+\" to \"+repr(final_path))\n try:\n # Make sure output folder exists\n output_dir = os.path.dirname(final_path)\n if not os.path.exists(output_dir):\n os.makedirs(output_dir)\n assert(os.path.exists(output_dir))\n # Move file\n shutil.move(original_path, final_path)\n assert(not os.path.exists(original_path))\n assert(os.path.exists(final_path))\n return\n except WindowsError, err:\n logging.exception(err)\n logging.error(\"Failed to move file: \"+repr(original_path)+\" to \"+repr(final_path))\n continue\n # If we get here we already have an exception to re-raise\n logging.critical(\"move_file() Too many failed attempts to move a file!\")\n logging.critical(\"move_file()\"+repr(locals()))\n raise", "def test_atomic_failure(self):\n with TemporaryDirectory() as tmp:\n fp = os.path.join(tmp, \"asdf.txt\")\n\n # raise fake error while writing file atomically\n with self.assertRaises(FakeFileFailure):\n with atomic_write(fp, \"w\") as f:\n tmpfile = f.name\n assert os.path.exists(tmpfile)\n raise FakeFileFailure()\n\n # ensure both the temp and destination files do not exist\n assert not os.path.exists(tmpfile)\n assert not os.path.exists(fp)", "def _do_move(self, artist, album, song):\n try:\n move_to = \"{0}{1}/{2}/\".format(self.dupe_dir, \n artist, album)\n if not os.path.exists(move_to):\n os.makedirs(move_to)\n \n shutil.move(song['path'], move_to)\n self.moved.append(song)\n return 1\n except:\n self.logger.error(\"Could not move file: {0}\".format(str(song['path'])))\n return 0", "def testModePreservation(self):\n f = open(self.path, \"w\").close()\n os.chmod(self.path, 0707)\n mode = os.stat(self.path)[stat.ST_MODE]\n log = logfile.LogFile(self.name, self.dir)\n log.write(\"abc\")\n log.rotate()\n self.assertEquals(mode, os.stat(self.path)[stat.ST_MODE])", "def test_move_file_new_path(self, mock_message, mock_move):\n\n volume_path = os.path.join('the', 'volume', 'path')\n file_path_1 = os.path.join('my_dir', 'my_file.txt')\n file_path_2 = os.path.join('my_dir', 'my_file.json')\n full_path_file_1 = os.path.join(volume_path, file_path_1)\n full_path_file_2 = os.path.join(volume_path, file_path_2)\n\n file_1 = storage_test_utils.create_file(file_path=file_path_1, workspace=self.old_workspace)\n file_2 = storage_test_utils.create_file(file_path=file_path_2, workspace=self.old_workspace)\n file_ids = [file_1.id, file_2.id]\n\n # Call function\n move_files(file_ids, new_workspace=None, new_file_path='/test/path')\n\n # Check results\n mock_move.assert_called()", "def test_archive_run(self, mock_move):\n run_dir = 'data/nanopore_data/run4/done_demuxing/20200104_1412_MN19414_AAU644_68125dc2'\n np_run = Nanopore(run_dir)\n np_run.archive_dir = '/some/dir'\n np_run.archive_run()\n mock_move.assert_called_once()", "def _process_file_movement(src:str, dest:str, is_move=False)->bool:\n debug_str = \"move\" if (is_move) else \"copy\"\n \n objects = _list_objects(src) # list objects\n for obj in objects:\n if _is_dir(dest) or _is_dir(src):\n temp_dest = _append_object(dest, _get_dest_obj_name(src, obj))\n else:\n temp_dest = dest\n \n if _is_s3(src) and _is_s3(dest): #s3 to s3\n src_bucket, _ = _extract_bucket_key(src)\n dest_bucket, dest_key = _extract_bucket_key(temp_dest)\n print(f\"{debug_str} file s3://{src_bucket}/{obj} to {temp_dest}\")\n status = _copy_s3_to_s3(src_bucket, obj, dest_bucket, dest_key)\n if status and is_move:\n aws_s3_rm(f\"s3://{src_bucket}/{obj}\")\n elif _is_s3(src): # s3 to local\n src_bucket, _ = _extract_bucket_key(src)\n _create_local_dir(temp_dest) # create dir if doesn't exist\n print(f\"{debug_str} file s3://{src_bucket}/{obj} to {temp_dest}\")\n status = _copy_s3_to_local(src_bucket, obj, temp_dest)\n if status and is_move:\n aws_s3_rm(f\"s3://{src_bucket}/{obj}\")\n elif _is_s3(dest): # local to s3\n dest_bucket, dest_key = _extract_bucket_key(temp_dest)\n print(f\"{debug_str} file {obj} to {temp_dest}\")\n status = _copy_local_to_s3(obj, dest_bucket, dest_key)\n if status and is_move:\n os.remove(obj) \n \n if not status:\n raise Error(f\"S3 {debug_str} failed.\")\n return True", "def safe_move(src: str, dst: str) -> None:\n try:\n os.rename(src, dst)\n except OSError as err:\n\n if err.errno == errno.EXDEV:\n # Generate a unique ID, and copy `<src>` to the target directory\n # with a temporary name `<dst>.<ID>.tmp`. Because we're copying\n # across a filesystem boundary, this initial copy may not be\n # atomic. We intersperse a random UUID so if different processes\n # are copying into `<dst>`, they don't overlap in their tmp copies.\n copy_id = uuid4()\n tmp_dst = \"%s.%s.tmp\" % (dst, copy_id)\n shutil.copyfile(src, tmp_dst)\n\n # Then do an atomic rename onto the new name, and clean up the\n # source image.\n os.rename(tmp_dst, dst)\n os.unlink(src)\n else:\n raise", "def move_file(src, dst):\n # Sanity checkpoint\n src = re.sub('[^\\w/\\-\\.\\*]', '', src)\n dst = re.sub('[^\\w/\\-\\.\\*]', '', dst)\n if len(re.sub('[\\W]', '', src)) < 5 or len(re.sub('[\\W]', '', dst)) < 5:\n debug.log(\"Error: Moving file failed. Provided paths are invalid! src='%s' dst='%s'\"%(src, dst))\n else:\n # Check destination\n check = False\n if dst[-1] == '/':\n if os.path.exists(dst):\n check = True # Valid Dir\n else:\n debug.log(\"Error: Moving file failed. Destination directory does not exist (%s)\"%(dst)) #DEBUG\n elif os.path.exists(dst):\n if os.path.isdir(dst):\n check = True # Valid Dir\n dst += '/' # Add missing slash\n else:\n debug.log(\"Error: Moving file failed. %s exists!\"%dst)\n elif os.path.exists(os.path.dirname(dst)):\n check = True # Valid file path\n else:\n debug.log(\"Error: Moving file failed. %s is an invalid distination!\"%dst)\n if check:\n # Check source\n files = glob.glob(src)\n if len(files) != 0:\n debug.log(\"Moving File(s)...\", \"Move from %s\"%src, \"to %s\"%dst)\n for file_ in files:\n # Check if file contains invalid symbols:\n invalid_chars = re.findall('[^\\w/\\-\\.\\*]', os.path.basename(file_))\n if invalid_chars:\n debug.graceful_exit((\"Error: File %s contains invalid \"\n \"characters %s!\"\n )%(os.path.basename(file_), invalid_chars))\n continue\n # Check file exists\n if os.path.isfile(file_):\n debug.log(\"Moving file: %s\"%file_)\n shutil.move(file_, dst)\n else:\n debug.log(\"Error: Moving file failed. %s is not a regular file!\"%file_)\n else: debug.log(\"Error: Moving file failed. No files were found! (%s)\"%src)", "def test_make_sure_path_exists_correctly_handle_os_error(mocker):\n mocker.patch(\"pathlib.Path.mkdir\", side_effect=OSError)\n with pytest.raises(OSError) as err:\n utils.make_sure_path_exists(Path('protected_path'))\n assert str(err.value) == \"Unable to create directory at protected_path\"", "def move_from_temp_directory(self):", "async def test_sync_move_to_error(self):\n await self.cog.cog_load()\n failing_member, members = self.create_erroneous_members()\n\n await self.cog._force_voice_sync(MockVoiceChannel(members=members))\n for member in members:\n self.assertEqual(member.move_to.call_count, 1 if member == failing_member else 2)", "def _test_align_file_existance(self):\n if len(self._pathcreator.get_read_files()) == 0:\n self._write_err_msg_and_quit(\"Error! No read libraries given!\\n\")\n if len(self._ref_seq_files) == 0:\n self._write_err_msg_and_quit(\n \"Error! No reference sequence files given!\\n\"\n )", "def test_rotate_filenames():\n tmpdir = os.path.join(tempfile.gettempdir(), \"jade-test-tmp87alkj8ew\")\n os.makedirs(tmpdir, exist_ok=True)\n\n data = {\"A\": 1, \"B\": 2}\n json_file1 = os.path.join(tmpdir, \"a1.json\")\n dump_data(data, json_file1)\n\n json_file2 = os.path.join(tmpdir, \"a2.json\")\n dump_data(data, json_file2)\n\n rotate_filenames(tmpdir, \".json\")\n\n if os.path.exists(tmpdir):\n shutil.rmtree(tmpdir)", "def move_file(self, ctx):\n pass", "def test_encrypt_creates_and_cleans_up_backup(\n self,\n mock_os,\n mock_shutil,\n mock_subprocess,\n ):\n mock_subprocess.run.return_value.returncode = 0\n\n self.mikla.encrypt('Chunky Hunky', 'plain', 'enc')\n\n mock_shutil.move.assert_called_once_with('enc', 'enc.bak')\n mock_os.unlink.assert_called_once_with('enc.bak')", "def test_mount_failure(self):\n with prepared_image_file(create_filesystem=False):\n program = RsyncSystemBackup(\n crypto_device=CRYPTO_NAME,\n destination=os.path.join(MOUNT_POINT, 'latest'),\n mount_point=MOUNT_POINT,\n )\n # When `mount' fails it should exit with a nonzero exit code,\n # thereby causing executor to raise an ExternalCommandFailed\n # exception that obscures the FailedToMountError exception that\n # we're interested in. The check=False option enables our\n # `last resort error handling' code path to be reached.\n program.destination_context.options['check'] = False\n self.assertRaises(FailedToMountError, program.execute)", "def movefile(destpath,filename,sourcepath):\n\n\tcommand = 'mv ' + filename + ' ' + destpath\n\t\n\ttry :\n\t\tst = commands.getstatusoutput(command)\n\texcept Exception:\n\t\traise", "def _move_cleanup(self, ok, elog, start_pos, goal_pos):\n if not ok:\n self.stop()\n if elog:\n self._record_elog_move(start_pos, goal_pos)\n return ok", "def test_file_conflict(self):\n dir0, dir1 = self.make_temp_dirs(2)\n self.write_file(dir0, \"foo\")\n self.sync_all()\n\n self.write_file(dir0, \"foo\", \"bar\")\n time.sleep(0.1)\n self.write_file(dir1, \"foo\", \"baz\")\n self.sync_all()\n # File with later mtime wins\n self.assertFile(dir0, \"foo\", \"baz\")\n self.assertFile(dir1, \"foo\", \"baz\")", "def test_need_to_rotate_log(self):\n self.assertTrue(need_to_rotate_log(0, 20, 'daily', 15, 'daily'), 'rotate log by time')\n self.assertFalse(need_to_rotate_log(10, 20, 'daily', 15, 'hourly'), 'do not rotate log by time')\n self.assertTrue(need_to_rotate_log(10, 20, 'daily', 25, None), 'rotate log by max size')\n self.assertFalse(need_to_rotate_log(10, 20, 'hourly', 5, 'hourly'), 'do not rotate log by min size')", "def test_file_dir_conflict(self):\n dir0, dir1 = self.make_temp_dirs(2)\n self.write_file(dir0, \"foo\")\n self.write_file(dir1, \"foo/bar\", \"baz\")\n self.sync_all()\n # Directory wins. File is deleted in dir0\n self.assertFile(dir0, \"foo/bar\", \"baz\")\n self.assertFile(dir1, \"foo/bar\", \"baz\")", "def testTempDirWhenDeleteEncounterError(self):\n self.Patch(os, \"chmod\")\n self.Patch(tempfile, \"mkdtemp\", return_value=\"/tmp/tempdir\")\n expected_error = OSError(\"Expected OS Error\")\n self.Patch(shutil, \"rmtree\", side_effect=expected_error)\n\n def _Call():\n with utils.TempDir():\n pass\n\n # Verify OSError should be raised.\n self.assertRaises(OSError, _Call)\n tempfile.mkdtemp.assert_called_once() #pylint: disable=no-member\n shutil.rmtree.assert_called_with(\"/tmp/tempdir\") #pylint: disable=no-member", "def test_move_file_new_workspace(self, mock_message, mock_delete, mock_upload, mock_download, mock_paths):\n\n volume_path = os.path.join('the', 'volume', 'path')\n file_path_1 = os.path.join('my_dir', 'my_file.txt')\n file_path_2 = os.path.join('my_dir', 'my_file.json')\n full_path_file_1 = os.path.join(volume_path, file_path_1)\n full_path_file_2 = os.path.join(volume_path, file_path_2)\n\n file_1 = storage_test_utils.create_file(file_path=file_path_1, workspace=self.old_workspace)\n file_2 = storage_test_utils.create_file(file_path=file_path_2, workspace=self.old_workspace)\n file_ids = [file_1.id, file_2.id]\n\n # Call function\n move_files(file_ids, new_workspace=self.new_workspace, new_file_path=None)\n\n # Check results\n mock_download.assert_called()\n mock_upload.assert_called()\n mock_delete.assert_called()", "def test_move_file_new_workspace_without_download(self, mock_message, mock_delete, mock_upload, mock_download, mock_paths):\n\n volume_path = os.path.join('the', 'volume', 'path')\n file_path_1 = os.path.join('my_dir', 'my_file.txt')\n file_path_2 = os.path.join('my_dir', 'my_file.json')\n full_path_file_1 = os.path.join(volume_path, file_path_1)\n full_path_file_2 = os.path.join(volume_path, file_path_2)\n\n file_1 = storage_test_utils.create_file(file_path=file_path_1, workspace=self.old_workspace)\n file_2 = storage_test_utils.create_file(file_path=file_path_2, workspace=self.old_workspace)\n file_ids = [file_1.id, file_2.id]\n\n # Call function\n move_files(file_ids, new_workspace=self.new_workspace, new_file_path=None)\n\n # Check results\n mock_download.assert_not_called()\n mock_upload.assert_called()\n mock_delete.assert_called()", "def test_malformed(self):\n fdesc, fname = tempfile.mkstemp()\n tfile = os.fdopen(fdesc, 'w')\n tfile.write(self.file_str2)\n tfile.close()\n assert_raises(Exception, grades.writers.GradesFile, fname)\n os.unlink(fname)", "def move_file(path):\n new_path = os.path.join(TEST_DIR, TEST_FILE)\n command = ['mv', TEST_FILE, new_path]\n file_operation(path, command)", "def test_logfile_recreates_after_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n self.conveyer.rotate_logs()\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"fourth\\\"}\"))\n self.assertEquals(self.events_out.getvalue(), \"{message: \\\"fourth\\\"}\")\n self.assertTrue(self.renamerCalled)", "def test_raise_file_not_found_error_for_fileio_delete() -> None:\n with tempfile.TemporaryDirectory() as tmpdirname:\n # Write to the temporary file\n output_file_location = os.path.join(tmpdirname, \"foo.txt\")\n\n # Instantiate the file-io\n file_io = PyArrowFileIO()\n\n # Delete the non-existent file using the file-io implementations delete method\n with pytest.raises(FileNotFoundError) as exc_info:\n file_io.delete(output_file_location)\n\n assert \"Cannot delete file\" in str(exc_info.value)\n\n # Confirm that the file no longer exists\n assert not os.path.exists(output_file_location)", "def move_file(self, path: PathLike, dest: PathLike, force: bool = False):", "def test_delete_write_fail(self):\n self.task_storage.add(self.my_task)\n os.chmod(self.test_task_filename, 0400)\n\n self.assertRaises(IOError, self.task_storage.delete, self.my_task.key)", "def test_6e_move_data_btw_folders(self):\n if (not GST.logged_in) or (not GST.data_testing_swift_mounted):\n raise unittest.SkipTest(\"Skipped for failed login or failed mounting container.\")\n elif (GST.default_folder_to_be_used):\n if not (default_folders_exists):\n raise unittest.SkipTest(\"Skipped for failed to prepare default directories\")\n elif (not GST.dir1_exists) or (not GST.dir2_exists):\n raise unittest.SkipTest(\"Skipped for failed to prepare dirs\")\n elif not GST.moving_data_test_ready:\n raise unittest.SkipTest(\"Skipped for failed to prepare moving data tests.\")\n self.dismiss_dialogs()\n function = js_func[\"move_file\"] % (GST.gs_file_paths[\"file_to_move_to_folder_source_path\"], GST.gs_file_paths[\"move_to_folder_target_path\"])\n try:\n self.send_request(function, \"move_file()\")\n except Exception as e:\n raise MoveException(\"Failed to move the data between folders. \\n\" + e.__str__())\n try:\n response = self.get_response()\n assert \"Success\" in response\n self.refresh_page()\n except AssertionError:\n raise MoveException(\"Failed to move the data between folders. \\n\" + response)", "def test_file_update_delete_conflict(self):\n dir0, dir1 = self.make_temp_dirs(2)\n self.write_file(dir0, \"foo\", \"bar\")\n self.sync_all()\n\n self.write_file(dir0, \"foo\", \"baz\")\n self.delete_file(dir1, \"foo\")\n self.sync_all()\n self.assertFileAbsent(dir0, \"foo\")\n self.assertFileAbsent(dir1, \"foo\")", "def test_rotate(self):\n rotable = TestRotable()\n command = RotateCommand(rotable)\n collinear_to_new_direction = rotable.get_direction() + rotable.get_angular_velocity()\n\n command()\n\n ratio = norm(rotable.get_direction()) / norm(collinear_to_new_direction)\n self.assertTrue(allclose(collinear_to_new_direction * ratio, rotable.get_direction()))\n self.assertTrue(isclose(norm(rotable.get_direction()), 1))", "def _rename_ondisk(self):\n if not self.has_moved or not self.renames_remaining:\n return\n\n try:\n os.rename(self.rename_phase_src, self.rename_phase_dst)\n except Exception:\n sys.stderr.write(\"Failed to renamed '%s' to '%s'\\n\" %\n (self.rename_phase_src,\n self.rename_phase_dst))\n raise\n\n self._rename_phase += 1", "def _maybe_move(self, source_chunk, target_chunk, path_index, move_func):\n if len(source_chunk.paths) <= 1:\n return False\n\n move_time = source_chunk.paths[path_index].time\n\n new_source_badness = self._badness(source_chunk.time - move_time)\n new_target_badness = self._badness(target_chunk.time + move_time)\n\n delta_badness = ((new_source_badness + new_target_badness) -\n (source_chunk.badness + target_chunk.badness))\n if delta_badness < 0:\n move_func()\n return True\n\n return False", "def testMoveAndCopyFile(self):\n try:\n remoteLocator = self.__pathPdbxDictionaryFile\n fn = self.__fileU.getFileName(remoteLocator)\n # _, fn = os.path.split(remoteLocator)\n lPath = os.path.join(self.__workPath, fn)\n ok = self.__fileU.get(remoteLocator, lPath)\n self.assertTrue(ok)\n # Test copy file\n dPath2 = os.path.join(self.__workPath, \"tdir\")\n ok = self.__fileU.mkdir(dPath2)\n self.assertTrue(ok)\n lPath2 = os.path.join(dPath2, fn)\n ok = self.__fileU.put(lPath, lPath2)\n self.assertTrue(ok)\n ok = self.__fileU.exists(lPath)\n self.assertTrue(ok)\n ok = self.__fileU.exists(lPath2)\n self.assertTrue(ok)\n # Remove copied file (to test moving file next)\n ok = self.__fileU.remove(lPath2)\n self.assertTrue(ok)\n ok = self.__fileU.exists(lPath2)\n self.assertFalse(ok)\n # Test move file\n ok = self.__fileU.replace(lPath, lPath2)\n self.assertTrue(ok)\n ok = self.__fileU.exists(lPath)\n self.assertFalse(ok)\n ok = self.__fileU.exists(lPath2)\n self.assertTrue(ok)\n # Now clean up files and dirs\n ok = self.__fileU.remove(lPath)\n self.assertTrue(ok)\n ok = self.__fileU.remove(dPath2)\n self.assertTrue(ok)\n\n except Exception as e:\n logger.exception(\"Failing with %s\", str(e))\n self.fail()", "def test_rotated(self):\n self._calibration_test(\"rotated\")", "def on_moved(self, event):\n print(\"Moved\")\n time.sleep(5)\n self.moveFile(event.dest_path)", "def test_illegal_file_path_decryption(self):\n nonce = globals.generate_random_nonce()\n file_path = pl.Path(pl.Path.cwd() / \"..\")\n enc_file_path = self.file_crypt.encrypt_relative_file_path(file_path, nonce)\n self.assertRaises(PermissionError, self.file_crypt.decrypt_relative_file_path, enc_file_path, nonce)", "def test_make_output_folder_exists_with_timestamp_fails(self,\n mock_makedirs):\n mock_makedirs.side_effect = [OSError]\n test_object = Maic()\n path = \"my_path-1960-04-04--15-00\"\n try:\n test_object.make_output_folder(output_folder=path)\n except OSError:\n pass\n except BaseException:\n self.fail(\"Should get an OSError\")", "def move_invalid():\n check50.run(run_command).stdin(\"EAST\").stdout(\"Invalid command.\")", "def test_change_file_to_dir_without_file(self):\n dir0, dir1 = self.make_temp_dirs(2)\n self.write_file(dir0, \"foo\", \"bar\")\n self.sync_all()\n self.assertFile(dir0, \"foo\", \"bar\")\n self.assertFile(dir1, \"foo\", \"bar\")\n\n self.delete_file(dir0, \"foo\")\n self.write_dir(dir0, \"foo\")\n self.sync_all()\n self.assertDirPresent(dir0, \"foo\")\n self.assertDirPresent(dir1, \"foo\")", "def test_verify_move(self):\n self._verify([self.applied_commands['move']])", "def testIgnoredError(self):\n cmds = \"\"\"-chown 0 missingFile\npwd\nexit\n\"\"\"\n def _cbCheckResult(res):\n self.assertIn(self.testDir.asBytesMode().path, res)\n\n d = self._getBatchOutput(cmds)\n d.addCallback(_cbCheckResult)\n return d", "def test_rotate_without_moving(controller):\n distance = math.pi / 2 * (DISTANCE_BETWEEN_WHEELS / 2)\n revolution = distance / (2 * math.pi * WHEEL_RADIUS)\n ticks = revolution * TICK_PER_REVOLUTION\n pos, angle = controller.odometry(\n round(10 - ticks),\n round(10 + ticks),\n Vector2(0, 0),\n 0,\n )\n\n # Rotate 90 degrees without moving.\n assert pos == Vector2(0, 0)\n assert round(math.pi / 2 / angle, 1) == 1\n\n # Rotate back to 0 degrees without moving.\n pos, angle = controller.odometry(10, 10, Vector2(0, 0), 0)\n assert pos == Vector2(0, 0)\n assert round(-math.pi / 2 / angle, 1) == 1", "def test_file_deleted(self):\n try:\n with get_temp_file() as (fd, name):\n os.unlink(name)\n except Exception as err:\n self.fail('Failed with exception \"{}\"'.format(err))", "def test_move(self):\n # Shouldn't be able to run a move with this sampler\n self.assertRaises(NotImplementedError, lambda: gcmc_system_sampler.move(gcmc_system_simulation.context))\n\n return None", "def test_log_rotation(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"second\\\"}\"))\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"third\\\"}\"))\n filename = self.conveyer.rotate_logs()\n self.assertEquals(self.conveyer.logfile, None)\n self.assertEquals(filename, \"testfile.dat.rotated\")", "def test_move_overwrite(remote,AB,all_):\n testpath = os.path.join(os.path.abspath(os.path.split(__file__)[0]),\n 'test_dirs','pp','test_move_overwrite')\n try:\n shutil.rmtree(testpath)\n except:\n pass\n os.makedirs(testpath)\n testutil = testutils.Testutils(testpath=testpath)\n\n # Init\n testutil.write('A/fileA0',text='fileA0')\n testutil.write('A/fileB0',text='fileB0')\n\n # copy over\n testutil.copy_tree()\n\n # Start it\n config = testutil.get_config(remote=remote)\n testutil.init(config)\n\n # Apply actions\n testutil.write('A/fileA1',text='fileA1')\n testutil.move('A/fileA0','A/fileB1')\n\n testutil.write('B/fileB1',text='fileB1')\n testutil.move('B/fileB0','B/fileA1')\n\n # Sync\n if AB == 'A':\n mode = 'push'\n else:\n mode='pull'\n\n if all_:\n mode += '_all'\n\n testutil.run(config,mode=mode)\n\n # Check it -- Only need to check A\n diff = testutil.compare_tree()\n\n if all_:\n assert len(diff) == 0\n # In the end, all files are either moved or overwritten. We do not\n # expect there to be any differences\n elif AB == 'A': # Check backups in B\n assert diff == [('missing_inB', 'fileB0')] # Never gets pushed\n \n elif AB == 'B': # Check backups in B\n assert diff == [('missing_inA', 'fileA0')] # Never gets pulled", "def testTempDirOrininalErrorRaised(self):\n self.Patch(os, \"chmod\")\n self.Patch(tempfile, \"mkdtemp\", return_value=\"/tmp/tempdir\")\n expected_error = OSError(\"Expected OS Error\")\n self.Patch(shutil, \"rmtree\", side_effect=expected_error)\n\n class ExpectedException(Exception):\n \"\"\"Expected exception.\"\"\"\n\n def _Call():\n with utils.TempDir():\n raise ExpectedException(\"Expected Exception\")\n\n # Verify.\n # ExpectedException should be raised, and OSError\n # should not be raised.\n self.assertRaises(ExpectedException, _Call)\n tempfile.mkdtemp.assert_called_once() #pylint: disable=no-member\n shutil.rmtree.assert_called_with(\"/tmp/tempdir\") #pylint: disable=no-member", "def test_update_write_fail(self):\n self.task_storage.add(self.my_task)\n os.chmod(self.test_task_filename, 0400)\n\n self.assertRaises(IOError, self.task_storage.update, self.my_task)", "def rotate_file(cls, main_dir, temp_dir):\n\t\tif(os.path.isfile(main_dir)):\n\t\t\tos.remove(main_dir)\n\t\tcopyfile(temp_dir, main_dir)\n\t\tos.remove(temp_dir)", "def test_calc_rotation(self):\n t = AioBaseTurtle()\n t.speed(speed=2)\n orient, steps, delta = t._calc_rotation(120)\n self.assertEqual(steps, 21)\n self.assertAlmostEqual(delta, 120.0 / 21.0)\n self.assertAlmostEqual(orient[0], math.cos(math.radians(120)))\n self.assertAlmostEqual(orient[1], math.sin(math.radians(120)))", "def test_verify_corrupt_archive(self):\n self.backup(u\"full\", u\"testfiles/various_file_types\", options=[])\n output_files = os.listdir(\"testfiles/output\")\n archives = [elem for elem in output_files if \"vol\" in elem]\n for archive in archives:\n # Edit source file\n with open(\"testfiles/output/\" + archive, 'r+') as f:\n f.write('This writes text into each archive file to corrupt it.')\n # Test verify for the file\n try:\n self.verify(u'testfiles/various_file_types/executable', file_to_verify=u'executable', options=[])\n except CmdError as e:\n # Should return a 21 error code for \"hash mismatch\"\n self.assertEqual(e.exit_status, 21, str(e))\n else:\n self.fail('Expected Hash Mismatch Error not thrown')", "def check_rotation_fault(self, current_pos, target_pos):\n \n fault_pos = 340.\n \n def cw_dist(A, B):\n return (B-A)%360.\n def ccw_dist(A,B):\n return (A-B)%360.\n def fast_dist(A,B):\n return min(ccw_dist(A,B), cw_dist(A,B))\n def fast_dir(A,B):\n if ccw_dist(A,B) > cw_dist(A,B): return +1\n else: return -1\n \n def dist(A,B, direction):\n if direction > 0: return cw_dist(A,B)\n if direction < 0: return ccw_dist(A,B)\n \n print(\"A->B Fast dir {}\".format(fast_dir(current_pos,target_pos)))\n \n print(\"A->F fast\", fast_dist(current_pos, fault_pos), fast_dir(current_pos, fault_pos))\n print(\"F->B fast\", fast_dist(fault_pos,target_pos), fast_dir(fault_pos, current_pos))\n d = fast_dir(current_pos,target_pos)\n print(\"A->F\", dist(current_pos, fault_pos, d), dist(current_pos, fault_pos, -d))\n print(\"F->B\", dist(fault_pos, target_pos, d) , dist(fault_pos, target_pos, -d))\n \n if dist(current_pos, fault_pos, d)+ dist(fault_pos, target_pos,d) >= 180.:\n return [target_pos]\n else:\n middle_target = current_pos + (360 - fast_dist(current_pos, target_pos))/2\n middle_target %=360\n print(\"A->M->B\", fast_dist(current_pos, middle_target), fast_dist(middle_target, target_pos))\n return [middle_target, target_pos]", "def test_rotate_and_move_right(controller):\n pos, angle = controller.odometry(11, 10, Vector2(0, 0), 0)\n assert pos.x > 0 # Moved forward.\n assert pos.y < 0 # Went a bit down.\n assert angle < 0 # Turned right.", "def test_move(self):\n # Shouldn't be able to run a move with this sampler\n self.assertRaises(NotImplementedError, lambda: gcmc_sphere_sampler.move(gcmc_sphere_simulation.context))\n\n return None", "def test_existing_file_after_assert_error(exist_of_file):\n try:\n assert read_magic_number(exist_of_file)\n except AssertionError:\n print(\"Now lets do check of existing file\")", "def rotate_file(filename, format=\"%(basename)s.%(number)03i%(extension)s\"):\n\tif os.path.exists(filename):\n\t\tpathlocation, basename, extension = filename_split(filename)\n\t\tfn = lambda n: os.path.join(pathlocation,format%{'basename':basename, 'extension':extension, 'number':n})\n\t\tn = 1\n\t\twhile os.path.exists(fn(n)):\n\t\t\tn += 1\n\t\twhile n > 1:\n\t\t\tos.rename(fn(n-1),fn(n))\n\t\t\tn -= 1\n\t\tos.rename(filename,fn(1))\n\telse:\n\t\traise FileNotFoundError(\"File %s does not exist\"%filename)", "def test_game_move_negative():\n\n file=\"/home/unit_test_grids/test_game_grids.txt\"\n my_game=Game(file)\n \n result=my_game.move('s')\n \n assert my_game.listOfMoves==[],\"The move function of game is changing \"\\\n \"the initial list of moves when the\"\\\n \"configuration file does not exists\"\n \n \n assert my_game.numberOfMoves==0,\"The move function of game is changing \"\\\n \"the initial number of moves when the\"\\\n \"configuration file does not exists\"\n \n \n assert result==None,\"The move function is not working\"\\\n \"correctly when the configuration \"\\\n \"file does not exists\"", "def act_move_file(self, file_source, file_target):\n try:\n if not os.path.isfile(file_source):\n return\n path = os.path.dirname(file_target)\n if not os.path.exists(path):\n os.makedirs(path)\n shutil.move(file_source, file_target)\n #shutil.copy2(file_source, file_target)\n #os.remove(file_source)\n self.logger.debug('%s: Action: <move> %s -> %s', self.name, file_source, file_target)\n except:\n self.logger.exception('Error on file move: %s -> %s', file_source, file_target)", "def _optimise_rotation(self):\n logger.info(\n f\"Minimising dimer rotation up to \"\n f'δϕ = {self.phi_tol.to(\"degrees\"):.4f}º'\n )\n\n for i in range(self._ratio_rot_iters):\n\n result = self._rotate()\n\n if (\n result == _StepResult.skipped_rotation\n or abs(self._coords.phi) < self.phi_tol\n ):\n break\n\n logger.info(\n f\"Micro iteration: {i}.\"\n f' ϕ={self._coords.phi.to(\"degrees\"):.2f}º'\n )\n\n return None", "def test_unlock_failure(self):\n # Make sure the image file doesn't exist.\n if os.path.exists(IMAGE_FILE):\n os.unlink(IMAGE_FILE)\n # Ask rsync-system-backup to use the encrypted filesystem on the image\n # file anyway, because we know it will fail and that's exactly what\n # we're interested in :-).\n program = RsyncSystemBackup(\n crypto_device=CRYPTO_NAME,\n destination=os.path.join(MOUNT_POINT, 'latest'),\n mount_point=MOUNT_POINT,\n )\n # When `cryptdisks_start' fails it should exit with a nonzero exit\n # code, thereby causing executor to raise an ExternalCommandFailed\n # exception that obscures the FailedToUnlockError exception that we're\n # interested in. The check=False option enables our `last resort error\n # handling' code path to be reached.\n program.destination_context.options['check'] = False\n self.assertRaises(FailedToUnlockError, program.execute)", "def test_force_delete(mocker, tmp_path):\n ro_file = Path(tmp_path, 'bar')\n ro_file.write_text(\"Test data\")\n make_readonly(ro_file)\n\n rmtree = mocker.Mock()\n utils.force_delete(rmtree, ro_file, sys.exc_info())\n\n assert (ro_file.stat().st_mode & stat.S_IWRITE) == stat.S_IWRITE\n rmtree.assert_called_once_with(ro_file)\n\n utils.rmtree(tmp_path)", "def rot_mosaic(source_dir='K:/IID_SaltonSea/Tasks/Soil mapping/PhotoDocumentation/Processing/',\r\n output_dir='K:/IID_SaltonSea/Tasks/Soil mapping/PhotoDocumentation/Final/',\r\n file_pattern='IID201905*.jpg', sub_dir=False, k=1, replace=False): \r\n \r\n \r\n if sub_dir:\r\n mosaics = []\r\n for root, dirnames, filenames in os.walk(source_dir):\r\n for filename in fnmatch.filter(filenames, file_pattern):\r\n mosaics.append(os.path.join(root, filename))\r\n else:\r\n mosaics = glob.glob(source_dir + file_pattern) \r\n \r\n g = 0\r\n r = 0\r\n s = 0\r\n for m in mosaics:\r\n f = output_dir + os.path.basename(m)\r\n if not os.path.exists(f):\r\n img = improc.imops.imio.imread(m)\r\n img = np.rot90(img, k=k) \r\n improc.imops.imio.imsave(f, img)\r\n print('generated: %s' % f)\r\n print('')\r\n g+=1\r\n elif replace:\r\n img = improc.imops.imio.imread(m)\r\n img = np.rot90(img, k=k)\r\n improc.imops.imio.imsave(f, img)\r\n print('replaced: %s' % f)\r\n print('')\r\n r+=1\r\n else:\r\n print('skipping: %s' % m)\r\n print('')\r\n s+=1\r\n\r\n print('generated total of %i files' % g)\r\n print('replaced total of %i files' % r)\r\n print('skipped total of %i files' % s)", "def reorder_folder(source, destination, alog, order_format=None):\n\n # Induct the folder first to begin reordering.\n try:\n alog.rlog = 'reorder_folder() calling induct_folder().'\n folders, files = MuzikArkive.induct_folder(source, alog)\n except CoreError as er:\n er.elog = 'induct_folder() failed.'\n raise er\n\n alog.rlog = str(len(files)) + ' files to re-order.'\n\n # Now the actual reordering happens, one item at a time.\n # Due to potentially high number of reorders, errors do not\n # stop the process, the item in question is skipped.\n # Errors are counted, so user intervention can be an option\n # should a high number occur.\n er_count = 0\n for i in range(0, len(files)):\n try:\n alog.rlog = 'Formatting new name for ' + files[i]\n new_name = MuzikArkive.filename_formatter(\n source, destination, files[i], order_format)\n except CoreError as er:\n er_count += 1\n alog.elog = 'filename_formatter() failed on file: ' + files[i] \\\n + ' Error count at: ' + str(er_count)\n alog.elogger(er.elog)\n else:\n try:\n alog.rlog = 'Renaming ' + files[i]\n MuzikArkive.rename_file(files[i], new_name, alog)\n except CoreError as er:\n er_count += 1\n alog.elog = 'rename_file() failed on file: ' + files[i] \\\n + ' Error count at: ' + str(er_count)\n alog.elogger(er.elog)\n\n # Should 10 errors occur, the user shall be notified, and\n # allowed to continue, abort, or view what errors have\n # occurred during reordering.\n if er_count > 10:\n print('!'*20)\n print('10 rename fails have occurred.\\nContinue?')\n\n while True:\n option = input('Yes\\nNo\\nShow fail renames')\n if option == 'Y':\n er_count = 0\n break\n elif option == 'N':\n raise CoreError(\n 'User terminated: Multiple rename fails.'\n )\n elif option == 'S':\n print('-'*20)\n for i in range(0, len(alog.elog)):\n print(alog.elog[i])\n print('-'*20)\n\n alog.rlog = 'reorder_folder completed.'", "def MovePath(options, src, dst):\n # if the destination is not an existing directory, then overwrite it\n if os.path.isdir(dst):\n dst = os.path.join(dst, os.path.basename(src))\n\n # If the destination exists, the remove it\n if os.path.exists(dst):\n if options.force:\n Remove(['-vfr', dst])\n if os.path.exists(dst):\n raise OSError('mv: FAILED TO REMOVE ' + dst)\n else:\n raise OSError('mv: already exists ' + dst)\n for _ in range(5):\n try:\n os.rename(src, dst)\n break\n except OSError as error:\n print('Failed on %s with %s, retrying' % (src, error))\n time.sleep(5)\n else:\n print('Gave up.')\n raise OSError('mv: ' + error)", "def test_fileAlreadyExistsNoOverwrite(self):\n fp = FilePath(self.mktemp())\n fp.touch()\n\n self.assertRaises(OSError, self.makeConnectedDccFileReceive, fp.path)", "def test_change_file_to_dir_with_file(self):\n #TODO: File must be removed before directory is created\n dir0, dir1 = self.make_temp_dirs(2)\n self.write_file(dir0, \"foo\", \"bar\")\n self.sync_all()\n self.assertFile(dir0, \"foo\", \"bar\")\n self.assertFile(dir1, \"foo\", \"bar\")\n\n self.delete_file(dir0, \"foo\")\n self.write_file(dir0, \"foo/bar\", \"baz\")\n self.sync_all()\n self.assertFile(dir0, \"foo/bar\", \"baz\")\n self.assertFile(dir1, \"foo/bar\", \"baz\")", "def test_6f_move_data_btw_containers(self):\n if (not GST.logged_in) or (not GST.data_testing_swift_mounted):\n raise unittest.SkipTest(\"Skipped for failed login or failed mounting container.\")\n elif (GST.default_folder_to_be_used):\n if not (default_folders_exists):\n raise unittest.SkipTest(\"Skipped for failed to prepare default directories\")\n elif (not GST.dir1_exists):\n raise unittest.SkipTest(\"Skipped for failed to prepare dir1\")\n elif not GST.moving_data_test_ready:\n raise unittest.SkipTest(\"Skipped for failed to prepare moving data tests.\")\n self.dismiss_dialogs()\n function = js_func[\"move_file\"] % (GST.gs_file_paths[\"file_to_move_to_container_source_path\"], GST.gs_file_paths[\"move_to_container_target_path\"])\n try:\n self.send_request(function, \"move_file()\")\n except Exception as e:\n raise MoveException(\"Failed to move the data between containers. \\n\" + e.__str__())\n try:\n response = self.get_response()\n assert \"Success\" in response\n self.refresh_page()\n except AssertionError:\n raise MoveException(\"Failed to move the data between containers. \\n\" + response)", "def test_file_integrity_remove_file_in_case_of_fail():\n test_file = open('./testfile.tmp', 'a')\n test_file.close()\n test_file_path = os.path.realpath('./testfile.tmp')\n test_file_md5 = hashlib.md5(open(test_file_path, 'rb').read()).hexdigest()\n\n bad_md5 = 'some_noise_%s' % test_file_md5\n\n PackageDownloadHelper.check_file_integrity(test_file_path, bad_md5)\n\n assert not os.path.isfile(test_file_path)", "def rotate_in_place(angle):\n action = easy_cozmo._robot.turn_in_place(degrees(-1*angle),speed=degrees(df_rotate_speed))\n try:\n action.wait_for_completed()\n if action.has_succeeded:\n return True\n else:\n code, reason = action.failure_reason\n result = action.result\n print(\"WARNING RotateInPlace: code=%s reason='%s' result=%s\" % (code, reason, result))\n say_error(\"I couldn't rotate, sorry\")\n except Exception as e:\n import traceback\n print(e)\n traceback.print_exc()\n say_error(\"I can't rotate, sorry\")\n try:\n while action.is_running:\n action.abort()\n time.sleep(.5)\n except Exception as e:\n import traceback\n print(e)\n traceback.print_exc()\n say_error(\"Wheels faulty\")\n\n return False", "def test_change_dir_to_file(self):\n dir0, dir1 = self.make_temp_dirs(2)\n self.write_dir(dir0, \"foo\")\n self.sync_all()\n self.assertDirPresent(dir0, \"foo\")\n self.assertDirPresent(dir1, \"foo\")\n\n self.delete_dir(dir0, \"foo\")\n self.write_file(dir0, \"foo\", \"bar\")\n self.sync_all()\n self.assertFile(dir0, \"foo\", \"bar\")\n self.assertFile(dir1, \"foo\", \"bar\")", "def checks(self, event):\n if ListenerContainer.is_syncing:\n if ListenerContainer.move_to_folder:\n try:\n ListenerContainer.client.delete_folder(ListenerContainer.move_to_folder)\n except error_perm or error_reply: #TODO\n reset()\n # pass # nothing to delete\n # except error_reply:\n # reset()\n ListenerContainer.move_to_folder = None\n if ListenerContainer.move_to_file:\n try:\n ListenerContainer.client.delete_file(ListenerContainer.move_to_file)\n except error_perm or error_reply: # TODO\n reset()\n # pass # nothing to delete\n # except error_reply:\n # reset()\n ListenerContainer.move_to_file = None\n if event.pathname[-1] == '~': # Temp file\n return False\n else:\n return True\n else:\n timer = now()\n if ListenerContainer.move_to_folder:\n x = [timer, 'DELFOLDER', event.pathname]\n ListenerContainer.sync_db.quick_push(x)\n ListenerContainer.move_to_folder = None\n if ListenerContainer.move_to_file:\n x = [timer, 'DELFILE', event.pathname]\n ListenerContainer.sync_db.quick_push(x)\n ListenerContainer.move_to_file = None\n if event.pathname[-1] == '~': # Temp file\n return False\n else:\n return True", "def __shutil_fix(func, path, exc):\n # If the function is rmdir, remove or unlink and is an access error\n if func in (os.rmdir, os.remove, os.unlink) and exc[1].errno == errno.EACCES:\n # Set 777 as the permissions and call the function again\n os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)\n func(path)\n # Otherwise, just raise the exception again\n else:\n raise", "def test_reopen_changed_inode(tmp_path):\n\n path1 = tmp_path / \"file\"\n path2 = tmp_path / \"changed_file\"\n\n with open(path1, \"w\") as f:\n for i in range(1000):\n print(f\"{i}\", file=f)\n\n with open(path2, \"w\") as f:\n for i in range(2000):\n print(f\"{i}\", file=f)\n\n file_info = LogFileInfo(\n filename=path1,\n size_when_last_opened=0,\n file_position=0,\n file_handle=None,\n is_err_file=False,\n job_id=None,\n worker_pid=None,\n )\n\n file_info.reopen_if_necessary()\n for _ in range(1000):\n file_info.file_handle.readline()\n\n orig_file_pos = file_info.file_handle.tell()\n file_info.file_position = orig_file_pos\n\n # NOTE: On windows, an open file can't be deleted.\n file_info.file_handle.close()\n os.remove(path1)\n os.rename(path2, path1)\n\n file_info.reopen_if_necessary()\n\n assert file_info.file_position == orig_file_pos\n assert file_info.file_handle.tell() == orig_file_pos", "def testUploadWrapperCorruption(self):\n # Check that small reads still work.\n encrypted_data = \"\"\n count = 0\n with self.assertRaisesRegexp(IOError, \"HMAC not verified\"):\n while 1:\n small_read = self.encrypt_wrapper.read(2)\n if not small_read:\n break\n encrypted_data += small_read\n count += len(small_read)\n\n # Corrupt the data a little bit.\n if count == 3000:\n small_read = \"XX\"\n\n self.decrypt_wrapper.write(small_read)" ]
[ "0.684734", "0.6769702", "0.67119235", "0.6446284", "0.6391524", "0.6390081", "0.6323381", "0.6310103", "0.62937844", "0.6224313", "0.6208974", "0.6208974", "0.61933196", "0.619122", "0.6079188", "0.6045162", "0.598325", "0.59720737", "0.5965603", "0.5938034", "0.58546984", "0.5813051", "0.5810931", "0.5790306", "0.5756723", "0.57204556", "0.57194954", "0.569263", "0.56898737", "0.56779087", "0.56660485", "0.5642347", "0.56322587", "0.56281066", "0.56251836", "0.56246346", "0.56086093", "0.5574949", "0.5573983", "0.5545598", "0.55328184", "0.5489819", "0.5489373", "0.5479602", "0.54794157", "0.5471543", "0.5459487", "0.5457777", "0.54509974", "0.5450731", "0.54243654", "0.5424253", "0.5424079", "0.54183996", "0.5403872", "0.5401604", "0.5401305", "0.5400766", "0.5397335", "0.5396577", "0.5389417", "0.53871566", "0.5378436", "0.5371566", "0.53683674", "0.5364705", "0.5359734", "0.5357573", "0.5357194", "0.53439635", "0.53360456", "0.5330632", "0.5321108", "0.53172946", "0.5311898", "0.5299386", "0.52956796", "0.5291681", "0.52857685", "0.5269409", "0.5269273", "0.5261172", "0.5257264", "0.52440166", "0.52434623", "0.52430236", "0.52351916", "0.5232917", "0.52321005", "0.5229691", "0.52288955", "0.5228377", "0.5221997", "0.5221727", "0.5218099", "0.52140707", "0.52004635", "0.5194356", "0.51933634", "0.5192484" ]
0.6581009
3
Test get_spec_config on empty conf
def test_get_spec_config_empty(self): spec_conf = get_spec_config({}, '') self.assertEqual(spec_conf, {})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_spec_config_defaults(self):\n spec_conf = get_spec_config({\n 'defaults': {\n 'foo': 'bar'\n }\n }, '')\n self.assertEqual(spec_conf, {'foo': 'bar'})", "def test_config_spec(self):\n spec = self._gen.config_spec()\n self.assertIn('Number of examples', spec)\n self.assertIn('Maximum number of columns to change', spec)\n self.assertIn('Regression threshold', spec)\n self.assertIn('Prediction key', spec)", "def test_get_empty_config():\n\n testutils.deploy_config_raw(\"\")\n\n with pytest.raises(prop.PropertyError):\n prop.get_prop('info', 'sdk')\n\n testutils.undeploy()\n\n return 0", "def get_config_spec(cls):\n return False", "def test_config_file_empty(get_empty_config, monkeypatch, get_root, conf_obj):\n path = os.path.join(get_root, 'res', 'missing.yml')\n\n with pytest.raises(FileNotFoundError):\n get_empty_config(conf_obj, path)", "def test_get_spec_config_match(self):\n spec_conf = get_spec_config({\n 'defaults': {\n 'default_foo': 'default_bar',\n 'foo': 'bar'\n },\n 'specific': [\n {'mask': ['filenomatch'], 'foo': 'bar_nomatch'},\n {'mask': ['filematch'], 'foo': 'match'},\n {'mask': ['filenomatch2'], 'foo': 'bar_nomatch2'}\n ]\n }, 'filematch')\n self.assertEqual(spec_conf, {'default_foo': 'default_bar', 'foo': 'match', 'mask': ['filematch']})", "def test_get_with_empty_value(self):\n self.assertEqual(self.config.get('none_types','other_value'),None)\n self.assertEqual(self.config.get('none_types','other_value','something'),'something')", "def test_config_class():\n assert config is not None", "def test_test_empty_config():\n\n testutils.deploy_config_raw(\"\")\n\n assert prop.test_prop('info', 'sdk') == 0\n\n testutils.undeploy()\n\n return 0", "def _expected_config(self) -> Dict[str, Optional[str]]:\n return EXPECTED_CONFIG", "def _expected_config(self) -> Dict[str, Optional[str]]:\n return EXPECTED_CONFIG", "def test_no_config_keyword(self):\n args = self.get_args()\n config = {\n \"site\": {\n \"username\": \"\",\n \"name\": \"\",\n \"ip_address\": \"\",\n \"password\": \"\",\n \"local\": \"\",\n \"use_https\": \"\"\n }\n }\n temp = sys.stdout\n fake_out = FakeStdio()\n sys.stdout = fake_out\n\n config_filename = 'testsuite_cfg.json'\n args.config = config_filename\n config_file = open(config_filename, 'w')\n config_file.write(str(json.dumps(config)))\n config_file.close()\n\n execute_tool(args, test_mode=True)\n sys.stdout = temp\n self.assertTrue(fake_out.verify_output(['%% Invalid configuration file', '\\n']))", "def test_defaults():\n config = Config(\n env_var='DO_NOT_USE',\n env_prefix='DO_NOT_USE',\n entry_point_name='DO_NOT_USE',\n )\n\n assert not config.keys()", "def test_config_option_not_required_no_default():\n class Config(config.Config):\n a = config.option(int, help=\"\")\n\n c = config.structure({}, Config)\n assert c.a is None", "async def test_full_config(hass: HomeAssistant, mock_client) -> None:\n config = {\n prometheus.DOMAIN: {\n \"namespace\": \"ns\",\n \"default_metric\": \"m\",\n \"override_metric\": \"m\",\n \"requires_auth\": False,\n \"component_config\": {\"fake.test\": {\"override_metric\": \"km\"}},\n \"component_config_glob\": {\"fake.time_*\": {\"override_metric\": \"h\"}},\n \"component_config_domain\": {\"climate\": {\"override_metric\": \"°C\"}},\n \"filter\": {\n \"include_domains\": [\"climate\"],\n \"include_entity_globs\": [\"fake.time_*\"],\n \"include_entities\": [\"fake.test\"],\n \"exclude_domains\": [\"script\"],\n \"exclude_entity_globs\": [\"climate.excluded_*\"],\n \"exclude_entities\": [\"fake.time_excluded\"],\n },\n }\n }\n assert await async_setup_component(hass, prometheus.DOMAIN, config)\n await hass.async_block_till_done()", "def test_no_default(self):\n with self.assertRaises(ConfigError) as cm:\n imageroller.main.read_config(\n self._cmd_args,\n imageroller.test.get_config_parser(self._no_default))\n # ConcurrentWorkers is the first value that is checked\n self.assertEqual(str(cm.exception),\n \"Config must contain ConcurrentWorkers\")", "def test_config_device_init_with_defaults(get_config, monkeypatch):\n notbase_config = {'not_presented': 1}\n monkeypatch.setattr(DeviceConfig, 'minimal_essential_conf', notbase_config)\n cfg = get_config(DeviceConfig, base_config)\n\n assert isinstance(cfg, DeviceConfig), 'wrong class'\n assert cfg.data == notbase_config, 'bad config loaded'", "def test_get_config_default_value(configs):\n assert get_config('SOURCE_FOLDER') == configs['SOURCE_FOLDER']", "def test_config_device_restore_empty(get_config, write_config_fixture, monkeypatch):\n fname = 'will_be_empty.yml'\n # saving normal conf\n is_default = {'test': 'conf'}\n monkeypatch.setattr(DeviceConfig, 'minimal_essential_conf', is_default)\n cfg = get_config(DeviceConfig, base_config, fname=fname)\n cfg.save()\n write_config_fixture('', fname)\n should_be_default = cfg.read()\n\n assert should_be_default == is_default, 'configs not matched'", "def test_required_config_none(self):\n base_config = BaseConfig()\n setattr(base_config, 'required_config', ['TEST_CONF'])\n setattr(base_config, 'TEST_CONF', None)\n\n self.assertRaises(Exception, base_config.check_required_config)", "def test_config_option_required_no_default():\n class Config(config.Config):\n a = config.option(int, required=True, help=\"\")\n\n with pytest.raises(config.ConfigError):\n config.structure({}, Config)\n\n with pytest.raises(config.ConfigError):\n config.structure({\"a\": None}, Config)\n\n c = config.structure({\"a\": 12}, Config)\n assert c.a == 12", "def test_config_option_not_required_default():\n class Config(config.Config):\n a = config.option(int, default=12, required=False, help=\"\")\n\n c = config.structure({\"a\": None}, Config)\n assert c.a is None", "def test_validate_config_empty_config(self):\n\n sample_config = {}\n\n expected_config = {\n 'hosts': [],\n 'syncs': [],\n 'recursive': False,\n 'tags': [],\n }\n\n result = syncme.validate_config(sample_config)\n self.assertTrue(result)\n self.assertDictEqual(sample_config, expected_config)", "def test_no_config(self):\n exit_code = self.run_beat()\n\n assert exit_code == 1\n assert self.log_contains(\"error loading config file\") is True", "def check_config(cfg):", "def test_config_option_required_default():\n class Config(config.Config):\n a = config.option(int, required=True, default=12, help=\"\")\n\n c = config.structure({}, Config)\n assert c.a == 12\n\n c = config.structure({\"a\": 23}, Config)\n assert c.a == 23\n\n with pytest.raises(config.ConfigError):\n config.structure({\"a\": None}, Config)", "def test_none_in_config(self, mocked_callable_loader, mocked_load_config):\n config_filename = 'aconfigfile'\n importer.Finder(config_filename)", "def test_get_config(self):\r\n config = self.profile.get_config('testing.conf', TestConfig, storage_args=['this_section'])\r\n self.assertIsInstance(config, TestConfig)\r\n self.assertIsNone(config.save())", "def test_get_with_None_value(self):\n self.assertEqual(self.config.get('none_types','some_value'),None)\n self.assertEqual(self.config.get('none_types','some_value','something'),'something')", "def test_missing_paths():\n with pytest.raises(InputError):\n make_config([])", "async def test_minimal_config(hass: HomeAssistant, mock_client) -> None:\n config = {prometheus.DOMAIN: {}}\n assert await async_setup_component(hass, prometheus.DOMAIN, config)\n await hass.async_block_till_done()", "async def test_full_config(hass, mock_client):\n config = {\n prometheus.DOMAIN: {\n \"namespace\": \"ns\",\n \"default_metric\": \"m\",\n \"override_metric\": \"m\",\n \"component_config\": {\"fake.test\": {\"override_metric\": \"km\"}},\n \"component_config_glob\": {\"fake.time_*\": {\"override_metric\": \"h\"}},\n \"component_config_domain\": {\"climate\": {\"override_metric\": \"°C\"}},\n \"filter\": {\n \"include_domains\": [\"climate\"],\n \"include_entity_globs\": [\"fake.time_*\"],\n \"include_entities\": [\"fake.test\"],\n \"exclude_domains\": [\"script\"],\n \"exclude_entity_globs\": [\"climate.excluded_*\"],\n \"exclude_entities\": [\"fake.time_excluded\"],\n },\n }\n }\n assert await async_setup_component(hass, prometheus.DOMAIN, config)\n await hass.async_block_till_done()\n assert hass.bus.listen.called\n assert hass.bus.listen.call_args_list[0][0][0] == EVENT_STATE_CHANGED", "def test_broken_config(broken_config):\n with pytest.raises(RuntimeError, match=\"Error reading config.yml\"):\n abcconfig.get_config(broken_config)", "def test_get_config(default_config, tmp_path):\n abcconfig.write_config(default_config, configpath=tmp_path)\n config = abcconfig.get_config(configpath=tmp_path)\n assert config == default_config", "def test_config_prefix_none_value(self, mock_get_secret):\n kwargs = {'config_prefix': None}\n\n ssm_backend = SystemsManagerParameterStoreBackend(**kwargs)\n\n assert ssm_backend.get_config(\"config\") is None\n mock_get_secret.assert_not_called()", "def check_config(config):\n pass", "def test_config():\n if not os.path.exists(CONFIG_DIR):\n raise mupub.BadConfiguration('Configuration folder not found.')\n if not os.path.exists(_CONFIG_FNM):\n raise mupub.BadConfiguration('Configuration file not found.')\n if not os.path.exists(getDBPath()):\n raise mupub.BadConfiguration('Local database not found.')\n if len(CONFIG_DICT) == 0:\n raise mupub.BadConfiguration('Configuration was not loaded.')", "def testLoadConfigs_noConfigs(self):\n config_path = GetTestFilePath('no_config')\n with six.assertRaisesRegex(\n self, lab_config.ConfigError,\n r'.* no lab config files under the path.'):\n pool = lab_config.LabConfigPool(\n lab_config.LocalFileEnumerator(config_path, lab_config.IsYaml))\n pool.LoadConfigs()", "def test_find_config_nonexist(self):\n with pytest.raises(scuba.config.ConfigError):\n scuba.config.find_config()", "def test_invalid_config() -> None:\n config = {\"statsd\": {\"host1\": \"host1\"}}\n\n with pytest.raises(vol.Invalid):\n statsd.CONFIG_SCHEMA(None)\n with pytest.raises(vol.Invalid):\n statsd.CONFIG_SCHEMA(config)", "def test_config_setup(hass, loop):\n loop.run_until_complete(async_setup_component(hass, 'config', {}))\n assert 'config' in hass.config.components", "def test_config_spec(self):\n spec = self.ci.config_spec()\n self.assertIn('Label', spec)\n self.assertIsInstance(spec['Label'], lit_types.CategoryLabel)", "def test_no_server(self):\n with self.assertRaises(ConfigError) as cm:\n imageroller.main.read_config(\n self._cmd_args,\n imageroller.test.get_config_parser(self._no_server))\n self.assertEqual(str(cm.exception),\n \"You must configure at least one server\")", "def test_config_filename_not_given_envvar_is_empty(monkeypatch):\n monkeypatch.delenv(ENV_CONFIG_FILE)\n with pytest.raises(ValueError):\n Config()", "def test_config_option_implicitly_required():\n class Config(config.Config):\n a = config.option(int, default=12, help=\"\")\n\n c = config.structure({}, Config)\n assert c.a == 12\n\n with pytest.raises(config.ConfigError):\n config.structure({\"a\": None}, Config)", "def test_SpecConfig_class_minimal():\n res = SpecConfig(path=PATH_SPECS_2_YAML)\n assert res.path_out == PATH_SPECS_2_YAML_MODIFIED", "def test_with_no_cfg(tmp_path):\n # Arange\n cfg = os.path.join(tmp_path, \"gcbo.json\")\n\n # Act\n rm = gcbo.RepoManager(cfg=cfg)\n\n # Assert\n assert rm.has_cfg is False", "def test_config_device_restore_missing(get_config, config_dict, monkeypatch, write_config_fixture):\n fname = 'will_be_missing.yml'\n # saving normal conf\n is_default = {'test': 'conf'}\n monkeypatch.setattr(DeviceConfig, 'minimal_essential_conf', is_default)\n cfg = get_config(DeviceConfig, config_dict, fname=fname)\n cfg.save()\n path = write_config_fixture('corrupted_string', fname)\n os.remove(path)\n # trying to read from non-existent file\n should_be_default = cfg.read()\n\n assert should_be_default == is_default, 'configs not matched'", "def test_nonexisting_config(self):\n try:\n tempdir = tempfile.mkdtemp()\n filename = os.path.join(tempdir, \"Config.yaml\")\n with self.assertRaises(easydms.config.ErrorNoConfiguration):\n easydms.config.Config(filename)\n\n with self.assertRaises(SystemExit) as cm:\n sys.argv = [\"prog\", \"-c\", filename]\n easydms.cli.main()\n self.assertNotEqual(cm.exception.code, 0)\n\n finally:\n shutil.rmtree(tempdir)", "def test_nonexistent_config_options(self, capsys):\n with open(self.config_file, \"w\") as f:\n f.write(\n textwrap.dedent(\n \"\"\"\n [DEFAULT]\n foo=bar\n \"\"\"\n )\n )\n with pytest.raises(SystemExit) as excinfo:\n self.config_parser.parse_config_options(self.namespace, configs=[self.config_file])\n out, err = capsys.readouterr()\n assert not out\n assert \"failed loading config: unknown arguments: --foo=bar\" in err\n assert excinfo.value.code == 2", "def test_get_with_default(self):\n self.assertEqual(self.config.get('basic','salutation'),None)\n self.assertEqual(self.config.get('basic','salutation','bonjour'),\n 'bonjour')", "def test_get_configs_with_filter(self) -> None:\n config1 = self.integration.create_config(name='Config 1',\n enabled=True,\n save=True)\n self.integration.create_config(name='Config 2',\n enabled=True,\n save=True)\n\n # Add some configs that shouldn't be returned.\n integration2 = \\\n self.manager.register_integration_class(DummyIntegration2)\n self.integration.create_config(name='Config 3', save=True)\n integration2.create_config(name='Config 4', save=True)\n\n self.assertEqual(self.integration.get_configs(name='Config 1'),\n [config1])", "async def test_optional_conf_keys(hass, mock_hub):\n register_config = {}\n await run_test(\n hass,\n mock_hub,\n register_config,\n SENSOR_DOMAIN,\n register_words=[0x8000],\n expected=\"-32768\",\n )", "def testInitEmpty():\n conf = naiveConf.NaiveConf()\n with pytest.raises(KeyError):\n print conf.x\n conf.x = 5\n assert conf.x == 5", "def testNoPrograms(self):\n\n # Get our formatted ini file\n self.configFile = CONFIG_NO_PROGRAMS\n\n # Build our config\n with tempfile.NamedTemporaryFile(mode='r+b') as f:\n f.write(self.configFile)\n # Calling readlines on the temp file. Without this Config fails to\n # read it. I have no idea why.\n f.readlines()\n self.assertRaises(\n ValueError,\n tools.Config,\n f.name\n )", "def test_no_config_file(self):\r\n instance = self._makeOne()\r\n\r\n # No default config file search in case they would exist\r\n self.assertTrue(len(instance.searchpaths) > 0)\r\n instance.searchpaths = []\r\n\r\n instance.exit = dummy_exit()\r\n\r\n instance.realize(args=['-s', 'http://localhost:9001', '-u', 'chris',\r\n '-p', '123'])\r\n\r\n self.assertEqual(instance.interactive, 1)\r\n self.assertEqual(instance.serverurl, 'http://localhost:9001')\r\n self.assertEqual(instance.username, 'chris')\r\n self.assertEqual(instance.password, '123')", "def _check_config(self):", "async def test_api_get_config(hass: HomeAssistant, mock_api_client: TestClient) -> None:\n resp = await mock_api_client.get(const.URL_API_CONFIG)\n result = await resp.json()\n if \"components\" in result:\n result[\"components\"] = set(result[\"components\"])\n if \"whitelist_external_dirs\" in result:\n result[\"whitelist_external_dirs\"] = set(result[\"whitelist_external_dirs\"])\n if \"allowlist_external_dirs\" in result:\n result[\"allowlist_external_dirs\"] = set(result[\"allowlist_external_dirs\"])\n if \"allowlist_external_urls\" in result:\n result[\"allowlist_external_urls\"] = set(result[\"allowlist_external_urls\"])\n\n assert hass.config.as_dict() == result", "def test_set_defaults(self):\r\n self.assertEqual(self.config.values['option1'], 1337)\r\n self.assertNotIn('option2', self.config.values)", "def test_renderer_discovers_special_config(self):\n datastore = Mock(spec=DatastoreVersion)\n datastore.datastore_name = 'mysql'\n datastore.name = 'mysql-test'\n datastore.manager = 'mysql'\n config = template.SingleInstanceConfigTemplate(datastore,\n self.flavor_dict,\n self.server_id)\n self.validate_template(config.render(), \"hyper\",\n {'ram': 0}, self.server_id)", "def ignor_test_load_default_config(self):\n config = AnnotatorConfig()\n assert config[\"config\"] == \"config.json\"", "def test_config_opts(sc):\n assert sc.server_name is not None\n assert sc.deployment == Deployment.stg\n assert sc.admins is not None\n assert sc.command_handler is not None\n assert sc.command_handler_work_dir is not None\n assert sc.command_handler_pvc_env_var is not None\n assert sc.command_handler_image_reference is not None\n assert sc.command_handler_k8s_namespace is not None\n assert sc.fas_password is not None\n assert sc.testing_farm_secret is not None\n assert sc.github_requests_log_path is not None\n assert sc.webhook_secret is not None\n assert sc.validate_webhooks is not None\n assert sc.gitlab_token_secret is not None", "def test_blank_config_doesnt_crash(tmpdir):\n config = tmpdir.join(\"config.yml\")\n config.write('')\n util.read_config(tmpdir)", "def test_load_config(self):\n config = copyclipper.LoadConfig()\n self.assertTrue(len(config) > 0)", "def check_configs(self):\n\n pass", "def test_config_no_file(self):\n if os.path.isfile(\"test_config.conf\"):\n os.remove(\"test_config.conf\")\n self.assertRaises(ConfigError, Config, \"test_config.conf\")", "def test_config_device_reset(get_config, monkeypatch):\n monkeypatch.setattr(DeviceConfig, 'minimal_essential_conf', {'test': 'conf'})\n cfg = get_config(DeviceConfig, base_config)\n cfg.save()\n cfg.write_default()\n new_conf = cfg.load()\n\n assert cfg.data == cfg.minimal_essential_conf, 'failed to apply default config'\n assert new_conf == cfg.minimal_essential_conf, 'failed to load default config'", "def test_config_must_exist(cls, values):\n configs = [c.config for c in values.get('configs')]\n for test in values.get('tests'):\n if test.config not in configs:\n raise ValueError(\n f\"Test '{test.test}' gave the config '{test.config}', but \"\n \"this config does not exist in the file \"\n f\"'{values.get('yaml')}'. Configs detected : {configs} \\n\")\n return values", "def testConfigA(self):\n assert type(self.config) == dict, \"Read setting not returning a dictionary\"", "def test_get(self):\n self.assertEqual(self.config.get('basic','greeting'),'hello')", "def test_bogus_configs():\n with pytest.raises(FileNotFoundError):\n phmdoctest.main.generate_using(config_file=Path(\"bogus.toml\"))\n with pytest.raises(ValueError):\n # Can't generate from a .py file.\n phmdoctest.main.generate_using(config_file=Path(\"setup.py\"))", "def testParse_noHostConfigs(self):\n # No host_configs is fine.\n config_path = GetTestFilePath('valid/config_without_host_configs.yaml')\n with open(config_path, 'r') as f:\n lab_config_pb = lab_config.Parse(f)\n self.assertEqual(0, len(lab_config_pb.cluster_configs[0].host_configs))", "async def test_setup_missing_config(hass: HomeAssistant) -> None:\n assert await async_setup_component(\n hass, SENSOR_DOMAIN, {SENSOR_DOMAIN: {\"platform\": DOMAIN}}\n )\n await hass.async_block_till_done()\n assert len(hass.states.async_all(SENSOR_DOMAIN)) == 0", "def test_config_ok_config(self):\n test_data = (\"[gnupg]\\n\"\n \"recipients = [email protected]\\n\"\n \"signer = [email protected]\\n\"\n \"\\n\"\n \"[amazon-s3]\\n\"\n \"access_key = ACCESSKEY\\n\"\n \"secret_access_key = SECRETACCESSKEY\\n\"\n \"\\n\"\n \"[data]\\n\"\n \"\\n\"\n \"bucket = DATABUCKET\\n\"\n \"[metadata]\\n\"\n \"bucket = METADATABUCKET\\n\"\n \"\\n\")\n if os.path.isfile(\"test_config.conf\"):\n os.remove(\"test_config.conf\")\n file(\"test_config.conf\", \"wb\").write(test_data)\n config = Config(\"test_config.conf\")\n self.assertIn(\"gnupg\", config.config.sections())\n self.assertIn(\"amazon-s3\", config.config.sections())\n self.assertEqual(config.config.get(\n \"gnupg\", \"recipients\"), \"[email protected]\")\n self.assertEqual(config.config.get(\n \"gnupg\", \"signer\"), \"[email protected]\")\n self.assertEqual(config.config.get(\n \"amazon-s3\", \"access_key\"), \"ACCESSKEY\")\n self.assertEqual(config.config.get(\n \"amazon-s3\", \"secret_access_key\"), \"SECRETACCESSKEY\")\n self.assertEqual(config.config.get(\n \"data\", \"bucket\"), \"DATABUCKET\")\n self.assertEqual(config.config.get(\n \"metadata\", \"bucket\"), \"METADATABUCKET\")\n os.remove(\"test_config.conf\")", "def test_getJsonConfig(self) -> None:\n jsonConf = getJsonConfig()\n self.assertTrue(\"idSrvDiscoUrl\" in jsonConf)\n self.assertTrue(\"accessTokenFetchAudience\" in jsonConf)", "def test_no_such_conf_section(self):\n del self.oslo_config_dict['heat']\n self.assert_service_disabled(\n 'orchestration',\n \"No section for project 'heat' (service type 'orchestration') was \"\n \"present in the config.\",\n )", "def test_config_is_loaded(config):\n assert config[\"DEBUG\"] is False", "def test_config_option_required_example():\n class Config(config.Config):\n a = config.option(int, required=True, example=12, help=\"\")\n\n with pytest.raises(config.ConfigError):\n config.structure({}, Config)\n\n c = config.make_example(Config)\n assert c.a == 12", "def mock_config():\n from .. import config\n\n _old_fs = os.getenv('FREESURFER_HOME')\n if not _old_fs:\n os.environ['FREESURFER_HOME'] = mkdtemp()\n\n filename = Path(pkgrf('fmriprep', 'data/tests/config.toml'))\n settings = loads(filename.read_text())\n for sectionname, configs in settings.items():\n if sectionname != 'environment':\n section = getattr(config, sectionname)\n section.load(configs, init=False)\n config.nipype.omp_nthreads = 1\n config.nipype.init()\n config.loggers.init()\n config.init_spaces()\n\n config.execution.work_dir = Path(mkdtemp())\n config.execution.bids_dir = Path(pkgrf('fmriprep', 'data/tests/ds000005')).absolute()\n config.execution.fmriprep_dir = Path(mkdtemp())\n config.execution.init()\n\n yield\n\n shutil.rmtree(config.execution.work_dir)\n shutil.rmtree(config.execution.fmriprep_dir)\n\n if not _old_fs:\n del os.environ[\"FREESURFER_HOME\"]", "def test_get_configs():\n\n configs = application_services.get_configs()\n\n assert isinstance(configs, dict)\n assert len(configs) > 0", "def test_SpecConfig_class():\n res = SpecConfig(**SPEC_CONFIG)\n assert res.path_out == SPEC_CONFIG['path_out']", "def test_component_specifications_exist(self):\r\n\t\tself.assertTrue(not (self._configuration_[\"AddWordDefinitionTask\"] is None\r\n\t\t or self._configuration_[\"ListWordDefinitionsTask\"] is None or\r\n\t\t self._configuration_[\"RemoveWordDefinitionTask\"] is None))", "def test_config():\n assert not create_app().testing\n assert create_app(TestConfig).testing", "def test_get_reg_ex_config(self):\n pass", "def default_empty_config_env(monkeypatch):\n monkeypatch.delenv(\"SOPEL_CONFIG\", raising=False)\n monkeypatch.delenv(\"SOPEL_CONFIG_DIR\", raising=False)", "def testFakeLongHandConfigurables(self):\n config = self.getBaseConfiguration()\n # The following should be ignored by the configure step\n config.Webtools.section_('foo')\n config.Webtools.foo.bar = 'baz'\n config.Webtools.section_('stuff')\n config.Webtools.stuff = 'things'\n\n server = Root(config)\n server.start(blocking=False)\n\n self.assertFalse('foo' in cpconfig, 'non-standard configurable passed to server')\n self.assertFalse('stuff' in cpconfig, 'non-standard configurable passed to server')\n\n server.stop()", "def test_minimal_configuration(self):\n args = argparse.Namespace(cfg=os.path.join(TEST_DATA_DIR, 'minimal-cfg-file.ini'))\n self.cfg = configure(args)\n cfg = ElasticBlastConfig(self.cfg, task = ElbCommand.SUBMIT)\n\n self.assertTrue(cfg.blast.db_source)\n self.assertEqual(cfg.blast.db_source, DBSource.GCP)\n\n self.assertTrue(cfg.blast.batch_len)\n self.assertEqual(cfg.blast.batch_len, 10000)\n\n self.assertTrue(cfg.blast.mem_request)\n self.assertEqual(cfg.blast.mem_request, '0.5G')\n\n self.assertTrue(cfg.blast.mem_limit)\n expected_mem_limit = f'{get_machine_properties(cfg.cluster.machine_type).memory - SYSTEM_MEMORY_RESERVE}G'\n self.assertEqual(cfg.blast.mem_limit, expected_mem_limit)\n\n self.assertTrue(cfg.timeouts.init_pv > 0)\n self.assertTrue(cfg.timeouts.blast_k8s > 0)\n\n ElasticBlastConfig(self.cfg, task = ElbCommand.SUBMIT)", "def test_get_property_missing(self):\r\n try:\r\n value = self.config.option2\r\n assert value\r\n except Exception as e:\r\n self.assertIsInstance(e, OptionValueNotSetError)\r\n self.assertNotIn('option2', self.config.values)", "def test_no_adapter_opts(self):\n self.oslo_config_dict['heat'] = None\n self.assert_service_disabled(\n 'orchestration',\n \"Encountered an exception attempting to process config for \"\n \"project 'heat' (service type 'orchestration'): no such option\",\n )", "def test_read_no_env_config():\n data = \"\"\n with mock.patch(\n \"configparser.open\",\n # \"builtins.open\",\n mock.mock_open(read_data=data),\n ):\n testargs = [\"yessssms\", \"-m\", \"Bilde mir nicht ein was rechts zu wissen\"]\n with (mock.patch.object(sys, \"argv\", testargs)):\n with pytest.raises(SystemExit) as wrapped_e:\n cli = CLI()\n assert cli.read_env_config() is None\n assert wrapped_e.type == SystemExit\n assert wrapped_e.value.code == 2", "def test_read_config_option(self):\n # set up config\n config.set_config_file(os.path.join(path_to_module, \"test_config.conf\"))\n config.setup()\n # Test that all the parameters loaded from file are correct\n self.assertEqual(config.read_config_option('client_id'), 'uploader')\n self.assertEqual(config.read_config_option('client_secret'), 'secret')\n self.assertEqual(config.read_config_option('username'), 'admin')\n self.assertEqual(config.read_config_option('password'), 'password1')\n self.assertEqual(config.read_config_option('base_url'), 'http://localhost:8080/irida-latest/api/')\n self.assertEqual(config.read_config_option('parser'), 'miseq')\n self.assertEqual(config.read_config_option('readonly', bool), False)", "def test_spectrum_section_config(tardis_config_verysimple):\n tardis_config_verysimple[\"spectrum\"][\"start\"] = Quantity(\"2500 angstrom\")\n tardis_config_verysimple[\"spectrum\"][\"stop\"] = Quantity(\"500 angstrom\")\n with pytest.raises(ValueError):\n conf = Configuration.from_config_dict(\n tardis_config_verysimple, validate=True, config_dirname=\"test\"\n )", "def test_collect_configuration(self):\n sample_config = \"\"\"[dyndnsc]\nconfigs = testconfig\n\n[testconfig]\nuse_preset = testpreset\nupdater-userid = bob\nupdater-password = XYZ\n# test overwriting a preset value:\ndetector-url = http://myip.example.com/\n\n[preset:testpreset]\nupdater = fubarUpdater\nupdater-url = https://update.example.com/nic/update\nupdater-moreparam = some_stuff\ndetector = webcheck4\ndetector-family = INET\ndetector-url = http://ip.example.com/\ndetector-parser = plain\n \"\"\"\n p = configparser.ConfigParser()\n p.readfp(StringIO(sample_config)) # XXX readfp() is deprecated since py 3.2\n config = collect_config(p)\n self.assertEqual(dict, type(config))\n self.assertTrue('testconfig' in config)\n self.assertTrue('detector' in config['testconfig'])\n self.assertTrue(isinstance(config['testconfig']['detector'], list))\n self.assertEqual(1, len(config['testconfig']['detector']))\n detector, detector_opts = config['testconfig']['detector'][-1]\n self.assertEqual(detector, \"webcheck4\") # from the preset\n self.assertEqual(detector_opts['url'], \"http://myip.example.com/\") # from the user conf\n self.assertTrue('updater' in config['testconfig'])\n self.assertTrue(isinstance(config['testconfig']['updater'], list))\n self.assertEqual(1, len(config['testconfig']['updater']))\n updater = config['testconfig']['updater'][0]\n self.assertEqual(\"fubarUpdater\", updater[0])\n self.assertTrue(\"url\" in updater[1])\n self.assertTrue(\"moreparam\" in updater[1])\n self.assertEqual(\"some_stuff\", updater[1][\"moreparam\"])", "def test_load_configs_testing(self):\n global locator, config_paths\n locator.load_config(config_paths[0])\n\n self.assertEqual(locator.config['routines'], ['debug'])\n self.assertEqual(locator.config['driver'],\n {\n 'type': 'TestDriver',\n 'kwargs': {\n 'verbose': False\n }\n })", "def test_empty_azure_config_dir():\n pass", "def test_config():\n args = Namespace(molecule=\"nucleotide\", verbose=False)\n config = core.Config.from_args(args)\n assert config.verbose is False\n assert config.molecule == 'nucleotide'\n assert config.extended_validation == 'none'\n\n args = Namespace(molecule=\"protein\", verbose=True)\n config = core.Config.from_args(args)\n assert config.verbose is True\n assert config.molecule == 'protein'", "async def test_config_not_ready(hass):\n entry = MockConfigEntry(\n domain=DOMAIN,\n title=\"Home\",\n unique_id=\"55.55-122.12\",\n data={\n \"api_key\": \"foo\",\n \"latitude\": 55.55,\n \"longitude\": 122.12,\n \"name\": \"Home\",\n },\n )\n\n with patch(\"airly._private._RequestsHandler.get\", side_effect=ConnectionError()):\n entry.add_to_hass(hass)\n await hass.config_entries.async_setup(entry.entry_id)\n assert entry.state == ENTRY_STATE_SETUP_RETRY", "def test_required_config(self, ckan_config, monkeypatch, option):\n monkeypatch.delitem(ckan_config, option)\n plugin = p.get_plugin(\"cloudstorage\")\n with pytest.raises(RuntimeError, match=\"configuration option\"):\n plugin.configure(ckan_config)", "def test_SpecConfig_class_extra_arg():\n with pytest.raises(ValidationError):\n SpecConfig(non_existing=PATH_SPECS_2_YAML)", "def valid_config(hass: HomeAssistant, requests_mock):\n requests_mock.get(\n \"https://api.vultr.com/v1/account/info?api_key=ABCDEFG1234567\",\n text=load_fixture(\"account_info.json\", \"vultr\"),\n )\n\n with patch(\n \"vultr.Vultr.server_list\",\n return_value=json.loads(load_fixture(\"server_list.json\", \"vultr\")),\n ):\n # Setup hub\n vultr.setup(hass, VALID_CONFIG)" ]
[ "0.76028246", "0.7311133", "0.72477543", "0.7058012", "0.69974715", "0.69667923", "0.68360406", "0.6766845", "0.67532086", "0.6709966", "0.6709966", "0.66910833", "0.66852343", "0.6642581", "0.6638022", "0.6632264", "0.66242176", "0.661909", "0.661713", "0.6613062", "0.6608504", "0.6528488", "0.6495036", "0.6484634", "0.64743805", "0.6469067", "0.6463956", "0.64562863", "0.64150214", "0.64059645", "0.64014703", "0.6394963", "0.637511", "0.63688815", "0.63655484", "0.6364919", "0.6364624", "0.63416684", "0.6336756", "0.632237", "0.6317474", "0.6309018", "0.63068664", "0.6296431", "0.6290606", "0.62682384", "0.62607455", "0.62601495", "0.6257157", "0.6246787", "0.62422013", "0.623475", "0.6233505", "0.6216738", "0.6216206", "0.62020046", "0.6200825", "0.6190963", "0.61834", "0.6179036", "0.6178279", "0.6174811", "0.6171315", "0.617123", "0.6152853", "0.61462337", "0.61297643", "0.61281925", "0.6120559", "0.6109957", "0.6108061", "0.61058223", "0.60974663", "0.6095098", "0.6094373", "0.60890603", "0.6085901", "0.6066362", "0.60660183", "0.6064786", "0.6057915", "0.6053373", "0.6036368", "0.60314107", "0.6028487", "0.602689", "0.6025844", "0.6024449", "0.6010527", "0.60094637", "0.60023", "0.5996854", "0.5996512", "0.5987969", "0.5982394", "0.5974462", "0.5970802", "0.59702075", "0.5967744", "0.5960062" ]
0.84538144
0
Test get_spec_config on conf with defaults
def test_get_spec_config_defaults(self): spec_conf = get_spec_config({ 'defaults': { 'foo': 'bar' } }, '') self.assertEqual(spec_conf, {'foo': 'bar'})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_spec_config_match(self):\n spec_conf = get_spec_config({\n 'defaults': {\n 'default_foo': 'default_bar',\n 'foo': 'bar'\n },\n 'specific': [\n {'mask': ['filenomatch'], 'foo': 'bar_nomatch'},\n {'mask': ['filematch'], 'foo': 'match'},\n {'mask': ['filenomatch2'], 'foo': 'bar_nomatch2'}\n ]\n }, 'filematch')\n self.assertEqual(spec_conf, {'default_foo': 'default_bar', 'foo': 'match', 'mask': ['filematch']})", "def test_config_spec(self):\n spec = self._gen.config_spec()\n self.assertIn('Number of examples', spec)\n self.assertIn('Maximum number of columns to change', spec)\n self.assertIn('Regression threshold', spec)\n self.assertIn('Prediction key', spec)", "def test_get_spec_config_empty(self):\n spec_conf = get_spec_config({}, '')\n self.assertEqual(spec_conf, {})", "def test_get_config_default_value(configs):\n assert get_config('SOURCE_FOLDER') == configs['SOURCE_FOLDER']", "def test_config_device_init_with_defaults(get_config, monkeypatch):\n notbase_config = {'not_presented': 1}\n monkeypatch.setattr(DeviceConfig, 'minimal_essential_conf', notbase_config)\n cfg = get_config(DeviceConfig, base_config)\n\n assert isinstance(cfg, DeviceConfig), 'wrong class'\n assert cfg.data == notbase_config, 'bad config loaded'", "def test_get_config(default_config, tmp_path):\n abcconfig.write_config(default_config, configpath=tmp_path)\n config = abcconfig.get_config(configpath=tmp_path)\n assert config == default_config", "def test_get_with_default(self):\n self.assertEqual(self.config.get('basic','salutation'),None)\n self.assertEqual(self.config.get('basic','salutation','bonjour'),\n 'bonjour')", "def _expected_config(self) -> Dict[str, Optional[str]]:\n return EXPECTED_CONFIG", "def _expected_config(self) -> Dict[str, Optional[str]]:\n return EXPECTED_CONFIG", "def test_collect_configuration(self):\n sample_config = \"\"\"[dyndnsc]\nconfigs = testconfig\n\n[testconfig]\nuse_preset = testpreset\nupdater-userid = bob\nupdater-password = XYZ\n# test overwriting a preset value:\ndetector-url = http://myip.example.com/\n\n[preset:testpreset]\nupdater = fubarUpdater\nupdater-url = https://update.example.com/nic/update\nupdater-moreparam = some_stuff\ndetector = webcheck4\ndetector-family = INET\ndetector-url = http://ip.example.com/\ndetector-parser = plain\n \"\"\"\n p = configparser.ConfigParser()\n p.readfp(StringIO(sample_config)) # XXX readfp() is deprecated since py 3.2\n config = collect_config(p)\n self.assertEqual(dict, type(config))\n self.assertTrue('testconfig' in config)\n self.assertTrue('detector' in config['testconfig'])\n self.assertTrue(isinstance(config['testconfig']['detector'], list))\n self.assertEqual(1, len(config['testconfig']['detector']))\n detector, detector_opts = config['testconfig']['detector'][-1]\n self.assertEqual(detector, \"webcheck4\") # from the preset\n self.assertEqual(detector_opts['url'], \"http://myip.example.com/\") # from the user conf\n self.assertTrue('updater' in config['testconfig'])\n self.assertTrue(isinstance(config['testconfig']['updater'], list))\n self.assertEqual(1, len(config['testconfig']['updater']))\n updater = config['testconfig']['updater'][0]\n self.assertEqual(\"fubarUpdater\", updater[0])\n self.assertTrue(\"url\" in updater[1])\n self.assertTrue(\"moreparam\" in updater[1])\n self.assertEqual(\"some_stuff\", updater[1][\"moreparam\"])", "def get_config_spec(cls):\n return False", "def test_config_spec(self):\n spec = self.ci.config_spec()\n self.assertIn('Label', spec)\n self.assertIsInstance(spec['Label'], lit_types.CategoryLabel)", "async def test_full_config(hass: HomeAssistant, mock_client) -> None:\n config = {\n prometheus.DOMAIN: {\n \"namespace\": \"ns\",\n \"default_metric\": \"m\",\n \"override_metric\": \"m\",\n \"requires_auth\": False,\n \"component_config\": {\"fake.test\": {\"override_metric\": \"km\"}},\n \"component_config_glob\": {\"fake.time_*\": {\"override_metric\": \"h\"}},\n \"component_config_domain\": {\"climate\": {\"override_metric\": \"°C\"}},\n \"filter\": {\n \"include_domains\": [\"climate\"],\n \"include_entity_globs\": [\"fake.time_*\"],\n \"include_entities\": [\"fake.test\"],\n \"exclude_domains\": [\"script\"],\n \"exclude_entity_globs\": [\"climate.excluded_*\"],\n \"exclude_entities\": [\"fake.time_excluded\"],\n },\n }\n }\n assert await async_setup_component(hass, prometheus.DOMAIN, config)\n await hass.async_block_till_done()", "def test_SpecConfig_class():\n res = SpecConfig(**SPEC_CONFIG)\n assert res.path_out == SPEC_CONFIG['path_out']", "def test_set_defaults(self):\r\n self.assertEqual(self.config.values['option1'], 1337)\r\n self.assertNotIn('option2', self.config.values)", "def test_get(self):\n self.assertEqual(self.config.get('basic','greeting'),'hello')", "def test_config():\n args = Namespace(molecule=\"nucleotide\", verbose=False)\n config = core.Config.from_args(args)\n assert config.verbose is False\n assert config.molecule == 'nucleotide'\n assert config.extended_validation == 'none'\n\n args = Namespace(molecule=\"protein\", verbose=True)\n config = core.Config.from_args(args)\n assert config.verbose is True\n assert config.molecule == 'protein'", "def mock_config():\n real_configuration = pymod.config.config\n cfg = pymod.config.Configuration()\n basename = pymod.names.config_file_basename\n default_config_file = os.path.join(pymod.paths.etc_path, \"defaults\", basename)\n defaults = pymod.config.load_config(default_config_file)\n cfg.push_scope(\"defaults\", defaults)\n\n dirname = py.path.local(tempfile.mkdtemp())\n pymod.paths.user_config_path = dirname.strpath\n pymod.paths.user_cache_path = dirname.strpath\n\n pymod.config.config = cfg\n\n yield pymod.config.config\n\n pymod.config.config = real_configuration", "def test_load_configs_testing(self):\n global locator, config_paths\n locator.load_config(config_paths[0])\n\n self.assertEqual(locator.config['routines'], ['debug'])\n self.assertEqual(locator.config['driver'],\n {\n 'type': 'TestDriver',\n 'kwargs': {\n 'verbose': False\n }\n })", "def test_config_opts(sc):\n assert sc.server_name is not None\n assert sc.deployment == Deployment.stg\n assert sc.admins is not None\n assert sc.command_handler is not None\n assert sc.command_handler_work_dir is not None\n assert sc.command_handler_pvc_env_var is not None\n assert sc.command_handler_image_reference is not None\n assert sc.command_handler_k8s_namespace is not None\n assert sc.fas_password is not None\n assert sc.testing_farm_secret is not None\n assert sc.github_requests_log_path is not None\n assert sc.webhook_secret is not None\n assert sc.validate_webhooks is not None\n assert sc.gitlab_token_secret is not None", "def test_config_option_required_default():\n class Config(config.Config):\n a = config.option(int, required=True, default=12, help=\"\")\n\n c = config.structure({}, Config)\n assert c.a == 12\n\n c = config.structure({\"a\": 23}, Config)\n assert c.a == 23\n\n with pytest.raises(config.ConfigError):\n config.structure({\"a\": None}, Config)", "def test_config_class():\n assert config is not None", "def test_load_configs_simulation(self):\n global locator, config_paths\n locator.load_config(config_paths[1])\n\n self.assertEqual(locator.config['routines'], ['simulate'])\n self.assertEqual(locator.config['driver'],\n {\n 'type': 'SimDriver',\n 'kwargs': {\n \"arg\": \"val\"\n }\n })", "def test_defaults():\n config = Config(\n env_var='DO_NOT_USE',\n env_prefix='DO_NOT_USE',\n entry_point_name='DO_NOT_USE',\n )\n\n assert not config.keys()", "def test_read_config_option(self):\n # set up config\n config.set_config_file(os.path.join(path_to_module, \"test_config.conf\"))\n config.setup()\n # Test that all the parameters loaded from file are correct\n self.assertEqual(config.read_config_option('client_id'), 'uploader')\n self.assertEqual(config.read_config_option('client_secret'), 'secret')\n self.assertEqual(config.read_config_option('username'), 'admin')\n self.assertEqual(config.read_config_option('password'), 'password1')\n self.assertEqual(config.read_config_option('base_url'), 'http://localhost:8080/irida-latest/api/')\n self.assertEqual(config.read_config_option('parser'), 'miseq')\n self.assertEqual(config.read_config_option('readonly', bool), False)", "def ignor_test_load_default_config(self):\n config = AnnotatorConfig()\n assert config[\"config\"] == \"config.json\"", "def mock_config():\n from .. import config\n\n _old_fs = os.getenv('FREESURFER_HOME')\n if not _old_fs:\n os.environ['FREESURFER_HOME'] = mkdtemp()\n\n filename = Path(pkgrf('fmriprep', 'data/tests/config.toml'))\n settings = loads(filename.read_text())\n for sectionname, configs in settings.items():\n if sectionname != 'environment':\n section = getattr(config, sectionname)\n section.load(configs, init=False)\n config.nipype.omp_nthreads = 1\n config.nipype.init()\n config.loggers.init()\n config.init_spaces()\n\n config.execution.work_dir = Path(mkdtemp())\n config.execution.bids_dir = Path(pkgrf('fmriprep', 'data/tests/ds000005')).absolute()\n config.execution.fmriprep_dir = Path(mkdtemp())\n config.execution.init()\n\n yield\n\n shutil.rmtree(config.execution.work_dir)\n shutil.rmtree(config.execution.fmriprep_dir)\n\n if not _old_fs:\n del os.environ[\"FREESURFER_HOME\"]", "def test_loads_a_config_file(self):\n from test.resources import config\n self.assertIsInstance(config, type(sys))\n self.assertIsNotNone(config.example)\n self.assertEqual(config.example.config_option, 'config-value')", "def test_SpecConfig_class_minimal():\n res = SpecConfig(path=PATH_SPECS_2_YAML)\n assert res.path_out == PATH_SPECS_2_YAML_MODIFIED", "def test_get_config(self):\r\n config = self.profile.get_config('testing.conf', TestConfig, storage_args=['this_section'])\r\n self.assertIsInstance(config, TestConfig)\r\n self.assertIsNone(config.save())", "def testConfigA(self):\n assert type(self.config) == dict, \"Read setting not returning a dictionary\"", "def test_default_config():\n\n from app.config import Default\n \n assert Default.DEBUG == False\n assert Default.TESTING == False\n assert Default.JWT_BLACKLIST_ENABLED == True\n assert Default.JWT_BLACKLIST_TOKEN_CHECKS == ['access', 'refresh']\n assert Default.SQLALCHEMY_TRACK_MODIFICATIONS == False", "async def test_full_config(hass, mock_client):\n config = {\n prometheus.DOMAIN: {\n \"namespace\": \"ns\",\n \"default_metric\": \"m\",\n \"override_metric\": \"m\",\n \"component_config\": {\"fake.test\": {\"override_metric\": \"km\"}},\n \"component_config_glob\": {\"fake.time_*\": {\"override_metric\": \"h\"}},\n \"component_config_domain\": {\"climate\": {\"override_metric\": \"°C\"}},\n \"filter\": {\n \"include_domains\": [\"climate\"],\n \"include_entity_globs\": [\"fake.time_*\"],\n \"include_entities\": [\"fake.test\"],\n \"exclude_domains\": [\"script\"],\n \"exclude_entity_globs\": [\"climate.excluded_*\"],\n \"exclude_entities\": [\"fake.time_excluded\"],\n },\n }\n }\n assert await async_setup_component(hass, prometheus.DOMAIN, config)\n await hass.async_block_till_done()\n assert hass.bus.listen.called\n assert hass.bus.listen.call_args_list[0][0][0] == EVENT_STATE_CHANGED", "def testGetConfig():\n configs = GetConfig()\n # print(configs.host_ip)\n # print(configs.proxy_local)\n \n # print(configs.proxy_online)\n # print(configs.user_img_url)\n # print(configs.user_login_url)\n print(configs.user_start_id)\n\n # assert isinstance(configs.proxy_getter_functions, list)\n # print(configs.proxy_getter_functions)", "def setUpConfig(self):\n pass", "def test_get_yaml_spec(self):\n pass", "def mocked_config():\n mocked_config = mock.create_autospec(Config)\n mocked_config._reportportal_configured = True\n return mocked_config", "def test_init_from(config):\n\n config.init_from()\n config.init_from(file='../../config.cfg')", "def test_config_setup(hass, loop):\n loop.run_until_complete(async_setup_component(hass, 'config', {}))\n assert 'config' in hass.config.components", "def test_operato_defaults(monkeypatch, tmpdir):\n monkeypatch.chdir(os.path.abspath(os.path.dirname(__file__)))\n\n output = cookiecutter(\n '.', no_input=True, output_dir=str(tmpdir), config_file='config.yaml'\n )\n\n assert output['list'] == 'cats'", "def test_config_device_reset(get_config, monkeypatch):\n monkeypatch.setattr(DeviceConfig, 'minimal_essential_conf', {'test': 'conf'})\n cfg = get_config(DeviceConfig, base_config)\n cfg.save()\n cfg.write_default()\n new_conf = cfg.load()\n\n assert cfg.data == cfg.minimal_essential_conf, 'failed to apply default config'\n assert new_conf == cfg.minimal_essential_conf, 'failed to load default config'", "async def test_api_get_config(hass: HomeAssistant, mock_api_client: TestClient) -> None:\n resp = await mock_api_client.get(const.URL_API_CONFIG)\n result = await resp.json()\n if \"components\" in result:\n result[\"components\"] = set(result[\"components\"])\n if \"whitelist_external_dirs\" in result:\n result[\"whitelist_external_dirs\"] = set(result[\"whitelist_external_dirs\"])\n if \"allowlist_external_dirs\" in result:\n result[\"allowlist_external_dirs\"] = set(result[\"allowlist_external_dirs\"])\n if \"allowlist_external_urls\" in result:\n result[\"allowlist_external_urls\"] = set(result[\"allowlist_external_urls\"])\n\n assert hass.config.as_dict() == result", "def config(self, **kw):\n self.cfg_fixture.config(**kw)", "def test_config_device_restore_empty(get_config, write_config_fixture, monkeypatch):\n fname = 'will_be_empty.yml'\n # saving normal conf\n is_default = {'test': 'conf'}\n monkeypatch.setattr(DeviceConfig, 'minimal_essential_conf', is_default)\n cfg = get_config(DeviceConfig, base_config, fname=fname)\n cfg.save()\n write_config_fixture('', fname)\n should_be_default = cfg.read()\n\n assert should_be_default == is_default, 'configs not matched'", "def test_renderer_discovers_special_config(self):\n datastore = Mock(spec=DatastoreVersion)\n datastore.datastore_name = 'mysql'\n datastore.name = 'mysql-test'\n datastore.manager = 'mysql'\n config = template.SingleInstanceConfigTemplate(datastore,\n self.flavor_dict,\n self.server_id)\n self.validate_template(config.render(), \"hyper\",\n {'ram': 0}, self.server_id)", "def test_config_create_file_with_default_dict(get_root, get_empty_config, monkeypatch):\n path = os.path.join(get_root, 'res', 'non_existent.yml')\n test_dict = {'this': 'test', 'test': 'de'}\n monkeypatch.setattr(DeviceConfig, 'minimal_essential_conf', test_dict)\n cfg = get_empty_config(DeviceConfig, path)\n\n with open(path, 'r') as fh:\n content = yaml.load(fh, Loader=get_yaml_loader())\n\n assert cfg.config_path == path, \"config path is incorrect\"\n assert cfg.minimal_essential_conf == test_dict, \"bad minimal running\"\n assert content == test_dict, \"config not written to file\"\n assert cfg.data == test_dict, \"default config not loaded from file\"", "def mock_config(temp_dir):\n get_config_mock = Mock()\n config = base_config()\n config['skills']['priority_skills'] = ['foobar']\n config['data_dir'] = str(temp_dir)\n config['server']['metrics'] = False\n config['enclosure'] = {}\n\n get_config_mock.return_value = config\n return get_config_mock", "def test_config_option_required_no_default():\n class Config(config.Config):\n a = config.option(int, required=True, help=\"\")\n\n with pytest.raises(config.ConfigError):\n config.structure({}, Config)\n\n with pytest.raises(config.ConfigError):\n config.structure({\"a\": None}, Config)\n\n c = config.structure({\"a\": 12}, Config)\n assert c.a == 12", "def lava_gen_get_config_subset(config,\n default=True,\n core=True,\n regression=True):\n\n from copy import deepcopy\n cfg = deepcopy(config)\n tests = deepcopy(config[\"tests\"])\n\n # Remove all configs not requests by the caller\n if not default:\n tests.pop(\"Default\")\n if not core:\n tests.pop(\"CoreIPC\")\n tests.pop(\"CoreIPCTfmLevel2\")\n tests.pop(\"CoreIPCTfmLevel3\")\n if not regression:\n tests.pop(\"Regression\")\n\n cfg[\"tests\"] = tests\n return cfg", "def get_test_config(cls, cluster, role, env, job, filler=''):\n return cls.CONFIG_BASE % {'job': job, 'role': role, 'env': env, 'cluster': cluster,\n 'inner': filler}", "def test_everything():\n # TODO: split this up and write better tests\n\n @make_config()\n class Config:\n \"\"\"The test configuration for configurave.\"\"\"\n\n root_url: str = ce(\n comment=\"The root url configuration for the application\",\n description=\"A long ass multiline description goes here about all the options\"\n \" you could potentially decide upon using.\",\n )\n\n c = Config(\n sources=[ # in order of priority\n \"tests/test-config/config.toml\",\n \"ENV\", # Temporarily enabled, needs seperate optional dotenv test\n ]\n )\n\n assert \"root_url\" in str(c._crve_configs)\n assert c.root_url == \"test url\"\n\n default_toml = (\n \"# The test configuration for configurave.\\n\"\n \"# This is an autogenerated default configuration file written by Configurave\\n\\n\"\n \"# (str): The root url configuration for the application\\n\"\n \"# root_url = \\n\"\n \"# Description: A long ass multiline description goes here about all the\\n\"\n \"# options you could potentially decide upon using.\\n\"\n )\n assert c.defaults_toml() == default_toml", "def test_get_property_success(self):\r\n self.assertEqual(self.config.option1, 1337)", "def config():", "def config():", "def test_get_with_empty_value(self):\n self.assertEqual(self.config.get('none_types','other_value'),None)\n self.assertEqual(self.config.get('none_types','other_value','something'),'something')", "def test_config(app):\n assert app.testing", "def test_config_as_dict():\n c = core.Config(foo='bar')\n\n # check custom configuration\n assert c['foo'] == 'bar'\n \n # check len and iter behavior\n i = 0\n for k in c:\n i += 1\n assert len(c) == i\n assert 'datapath' in c._keys\n\n # check default get behavior\n assert c.get('doesNotExist') is None\n assert c.get('doesNotExists', 'foobar') == 'foobar'", "def init_config() -> Config:\n ...", "def test_all_configs_values():\n\n app_configs = application_services.get_configs()\n\n assert app_configs['TITLE'] == 'pyrin_unit_tests'\n assert app_configs['ENCODING'] == 'utf-8'\n assert app_configs['FLASK_LOG_LEVEL'] == 'DEBUG'\n assert app_configs['SERVER_NAME'] is None\n assert app_configs['SERVER_HOST'] == '127.0.0.1'\n assert app_configs['SERVER_PORT'] == 5001\n assert app_configs['ENV'] == 'testing'\n assert app_configs['DEBUG'] is False\n assert app_configs['TESTING'] is True\n assert app_configs['UNIT_TESTING'] is True", "def test_config_option_not_required_default():\n class Config(config.Config):\n a = config.option(int, default=12, required=False, help=\"\")\n\n c = config.structure({\"a\": None}, Config)\n assert c.a is None", "def test_config_ok_config(self):\n test_data = (\"[gnupg]\\n\"\n \"recipients = [email protected]\\n\"\n \"signer = [email protected]\\n\"\n \"\\n\"\n \"[amazon-s3]\\n\"\n \"access_key = ACCESSKEY\\n\"\n \"secret_access_key = SECRETACCESSKEY\\n\"\n \"\\n\"\n \"[data]\\n\"\n \"\\n\"\n \"bucket = DATABUCKET\\n\"\n \"[metadata]\\n\"\n \"bucket = METADATABUCKET\\n\"\n \"\\n\")\n if os.path.isfile(\"test_config.conf\"):\n os.remove(\"test_config.conf\")\n file(\"test_config.conf\", \"wb\").write(test_data)\n config = Config(\"test_config.conf\")\n self.assertIn(\"gnupg\", config.config.sections())\n self.assertIn(\"amazon-s3\", config.config.sections())\n self.assertEqual(config.config.get(\n \"gnupg\", \"recipients\"), \"[email protected]\")\n self.assertEqual(config.config.get(\n \"gnupg\", \"signer\"), \"[email protected]\")\n self.assertEqual(config.config.get(\n \"amazon-s3\", \"access_key\"), \"ACCESSKEY\")\n self.assertEqual(config.config.get(\n \"amazon-s3\", \"secret_access_key\"), \"SECRETACCESSKEY\")\n self.assertEqual(config.config.get(\n \"data\", \"bucket\"), \"DATABUCKET\")\n self.assertEqual(config.config.get(\n \"metadata\", \"bucket\"), \"METADATABUCKET\")\n os.remove(\"test_config.conf\")", "def test_no_default(self):\n with self.assertRaises(ConfigError) as cm:\n imageroller.main.read_config(\n self._cmd_args,\n imageroller.test.get_config_parser(self._no_default))\n # ConcurrentWorkers is the first value that is checked\n self.assertEqual(str(cm.exception),\n \"Config must contain ConcurrentWorkers\")", "def _get_test(self, config):\n expected_options = {'goodpassword', 'badpassword'}\n _warn_on_extra(set(config.options('test')) - expected_options -\n self.defaults, 'test section option(s)')\n\n get = partial(config.get, 'test')\n\n self.goodpassword = get('GOODPASSWORD')\n self.badpassword = get('BADPASSWORD')", "def test_define():\n client = TestClient()\n client.run(\"config set general.fakeos=Linux\")\n conf_file = load(client.cache.conan_conf_path)\n assert \"fakeos = Linux\" in conf_file\n\n client.run('config set general.compiler=\"Other compiler\"')\n conf_file = load(client.cache.conan_conf_path)\n assert 'compiler = Other compiler' in conf_file\n\n client.run('config set general.compiler.version=123.4.5')\n conf_file = load(client.cache.conan_conf_path)\n assert 'compiler.version = 123.4.5' in conf_file\n assert \"14\" not in conf_file\n\n client.run('config set general.new_setting=mysetting')\n conf_file = load(client.cache.conan_conf_path)\n assert 'new_setting = mysetting' in conf_file\n\n client.run('config set proxies.https=myurl')\n conf_file = load(client.cache.conan_conf_path)\n assert \"https = myurl\" in conf_file.splitlines()", "def check_config(cfg):", "def config():\n data = \"\"\"[YESSSSMS]\nLOGIN = 03211234567\nPASSWD = MySecr3t\nDEFAULT_TO = +43664123123123\nMVNO = YESSS\n\"\"\"\n with mock.patch(\n \"configparser.open\",\n # \"builtins.open\",\n mock.mock_open(read_data=data),\n ):\n yield", "def _init_config(self):\n self.config = self.config_template.specialize()\n print('MMH CONFIG:\\n' + str(self.config))", "def test_default(config, expected):\n if isinstance(expected, dict):\n formatted_config = parse_default(config, MODEL)\n try:\n assert expected == formatted_config\n except AssertionError:\n for k, d in formatted_config[\"model\"][\"layers\"].items():\n for opt in [\"user_vals\"]:\n try:\n assert (\n d[\"options\"][opt]\n is expected[\"model\"][\"layers\"][k][\"options\"][opt]\n ), f\"layer {k} does not have matching {opt}\"\n except AssertionError:\n for i, a in enumerate(d[\"options\"][opt]):\n b = expected[\"model\"][\"layers\"][k][\"options\"][opt][i]\n try:\n assert (\n a is b\n ), f\"layer {k} does not have matching {opt} for {a} != {b}\"\n except AssertionError:\n if issubclass(\n type(b), tf.keras.regularizers.Regularizer\n ):\n # TODO: implement more in depth check\n assert issubclass(\n type(a), tf.keras.regularizers.Regularizer\n )\n elif issubclass(\n type(b), tf.keras.initializers.Initializer\n ):\n # TODO: implement more in depth check\n assert issubclass(\n type(a), tf.keras.initializers.Initializer\n )\n else:\n assert (\n a == b\n ), f\"{opt} in layer {k} does not match: {a} != {b}\"\n for opt in [\"func\", \"func_args\", \"func_defaults\"]:\n assert (\n d[\"layer_base\"][opt]\n == expected[\"model\"][\"layers\"][k][\"layer_base\"][opt]\n ), f\"layer {k} does not have matching {opt}\"\n for opt in [\"layer_in_name\"]:\n # print(d[opt])\n assert (\n d[opt] == expected[\"model\"][\"layers\"][k][opt]\n ), f\"layer {k} does not have matching {opt}\"\n\n elif isinstance(expected, ValueError):\n with pytest.raises(ValueError):\n formatted_config = parse_default(config, MODEL)\n elif isinstance(expected, TypeError):\n with pytest.raises(TypeError):\n formatted_config = parse_default(config, MODEL)", "def test_get_feature_config(self):\n tools.eq_(\n self.old_manifest.get_feature_config(\"sub\").to_dict(),\n {\n \"url\": \"git://github.com/Toumorokoshi/sub.git\",\n \"formula\": \"sprinter.formula.git\",\n \"depends\": \"git\",\n \"branch\": \"yusuke\",\n \"rc\": \"temp=`pwd`; cd %(sub:root_dir)s/libexec && . sub-init2 && cd $tmp\",\n \"bc\": \"temp=`pwd`; cd %(sub:testvar)s/libexec && . sub-init2 && cd $tmp\",\n },\n )", "async def test_minimal_config(hass: HomeAssistant, mock_client) -> None:\n config = {prometheus.DOMAIN: {}}\n assert await async_setup_component(hass, prometheus.DOMAIN, config)\n await hass.async_block_till_done()", "def test_config_option_required_example():\n class Config(config.Config):\n a = config.option(int, required=True, example=12, help=\"\")\n\n with pytest.raises(config.ConfigError):\n config.structure({}, Config)\n\n c = config.make_example(Config)\n assert c.a == 12", "def test_init_with_custom_config(py_recipe):\n # actual parser doesn't matter. It's used for initialization only\n parser = argparse.ArgumentParser()\n subparser = parser.add_subparsers()\n init_obj = cli.Init(subparser)\n recipe = py_recipe.recipe\n # expected args object has\n\n with open(os.path.join(recipe, \"recipe\", \"conda-forge.yml\"), \"w\") as fp:\n fp.write(\n dedent(\n \"\"\"\\\n bot:\n automerge: true\n run_deps_from_wheel: true\n \"\"\"\n )\n )\n\n args = InitArgs(\n recipe_directory=os.path.join(recipe, \"recipe\"),\n feedstock_directory=os.path.join(recipe, \"{package.name}-feedstock\"),\n temporary_directory=os.path.join(recipe, \"temp\"),\n )\n init_obj(args)\n destination = os.path.join(recipe, \"py-test-feedstock\")\n assert os.path.isdir(destination)\n data = yaml.safe_load(\n open(os.path.join(destination, \"conda-forge.yml\"), \"r\").read()\n )\n assert data.get(\"bot\") != None\n assert data[\"bot\"][\"automerge\"] == True\n assert data[\"bot\"][\"run_deps_from_wheel\"] == True", "def test_get_empty_config():\n\n testutils.deploy_config_raw(\"\")\n\n with pytest.raises(prop.PropertyError):\n prop.get_prop('info', 'sdk')\n\n testutils.undeploy()\n\n return 0", "def test_default_config(self):\n registry = getUtility(IRegistry)\n settings = registry.forInterface(IImageWatchDogSettings)\n self.assertEqual(settings.source_formats, ['JPEG', 'GIF'])\n self.assertFalse(settings.optimize)\n self.assertFalse(settings.enabled)", "def test_stable_config(tmp_path, config, defaultenv):\n\n # Set environment variables that some of the configs expect. Using a\n # complex ROLE_CLAIM_KEY to make sure quoting works.\n env = {\n **defaultenv,\n \"ROLE_CLAIM_KEY\": '.\"https://www.example.com/roles\"[0].value',\n \"POSTGREST_TEST_SOCKET\": \"/tmp/postgrest.sock\",\n \"POSTGREST_TEST_PORT\": \"80\",\n \"JWT_SECRET_FILE\": \"a_file\",\n }\n\n # Some configs expect input from stdin, at least on base64.\n stdin = b\"Y29ubmVjdGlvbl9zdHJpbmc=\"\n\n dumped = dumpconfig(config, env=env, stdin=stdin)\n\n tmpconfigpath = tmp_path / \"config\"\n tmpconfigpath.write_text(dumped)\n redumped = dumpconfig(tmpconfigpath, env=env)\n\n assert dumped == redumped", "def test_conf(self):\n self.TESTED_UNIT = 'ceph-fs/0'\n\n def _get_conf():\n \"\"\"get/parse ceph daemon response into dict for specified configs.\n\n :returns dict: conf options selected from configs\n :rtype: dict\n \"\"\"\n configs = [\"mds_cache_memory_limit\",\n \"mds_cache_reservation\",\n \"mds_health_cache_threshold\"]\n holder = {}\n for config in configs:\n cmd = \"sudo ceph daemon mds.\" \\\n \"$HOSTNAME config show | grep {}\".format(config)\n conf = model.run_on_unit(self.TESTED_UNIT, cmd)\n for i in (conf['Stdout'].replace('\"', '')\n .replace(',', '')\n .strip()\n .split(\"\\n\")):\n key, val = i.split(\":\")\n holder[key] = val.strip()\n return holder\n\n @retry(wait=wait_exponential(multiplier=1, min=4, max=10),\n stop=stop_after_attempt(10))\n def _change_conf_check(mds_config):\n \"\"\"Change configs, then assert to ensure config was set.\n\n Doesn't return a value.\n \"\"\"\n loop = asyncio.get_event_loop()\n crt = model.async_set_application_config('ceph-fs', mds_config)\n loop.run_until_complete(crt)\n results = _get_conf()\n self.assertEquals(\n results['mds_cache_memory_limit'],\n mds_config['mds-cache-memory-limit'])\n self.assertAlmostEqual(\n float(results['mds_cache_reservation']),\n float(mds_config['mds-cache-reservation']))\n self.assertAlmostEqual(\n float(results['mds_health_cache_threshold']),\n float(mds_config['mds-health-cache-threshold']))\n\n # ensure defaults are set\n _get_conf()\n mds_config = {'mds-cache-memory-limit': '4294967296',\n 'mds-cache-reservation': '0.05',\n 'mds-health-cache-threshold': '1.5'}\n _change_conf_check(mds_config)\n\n # change defaults\n mds_config = {'mds-cache-memory-limit': '8589934592',\n 'mds-cache-reservation': '0.10',\n 'mds-health-cache-threshold': '2'}\n _change_conf_check(mds_config)\n\n # Restore config to keep tests idempotent\n mds_config = {'mds-cache-memory-limit': '4294967296',\n 'mds-cache-reservation': '0.05',\n 'mds-health-cache-threshold': '1.5'}\n _change_conf_check(mds_config)", "def test_none_in_config(self, mocked_callable_loader, mocked_load_config):\n config_filename = 'aconfigfile'\n importer.Finder(config_filename)", "def test_config():\n assert not sample.create_app().testing\n assert sample.create_app({\"TESTING\": True}).testing", "def config_entry() -> MockConfigEntry:\n return MockConfigEntry(\n domain=DOMAIN,\n unique_id=\"mock_radio_id\",\n data={CONF_WEBFSAPI_URL: \"http://1.1.1.1:80/webfsapi\", CONF_PIN: \"1234\"},\n )", "def testInitConfFromFile():\n\n conf = naiveConf.NaiveConf(exampleConfFname)\n assert type(conf.x) == datetime.date\n assert type(conf.y) == str\n dt = datetime.datetime(2013,1,1)\n assert conf.x == dt.date()\n assert conf.y == str(dt.date())\n\n assert type(conf.L) == list\n assert conf.L[-1]['a'] is None\n\n assert conf.x == conf['x']", "def bootstrap_default():\n\treturn default_configuration", "def test_get_with_None_value(self):\n self.assertEqual(self.config.get('none_types','some_value'),None)\n self.assertEqual(self.config.get('none_types','some_value','something'),'something')", "def test_no_config_keyword(self):\n args = self.get_args()\n config = {\n \"site\": {\n \"username\": \"\",\n \"name\": \"\",\n \"ip_address\": \"\",\n \"password\": \"\",\n \"local\": \"\",\n \"use_https\": \"\"\n }\n }\n temp = sys.stdout\n fake_out = FakeStdio()\n sys.stdout = fake_out\n\n config_filename = 'testsuite_cfg.json'\n args.config = config_filename\n config_file = open(config_filename, 'w')\n config_file.write(str(json.dumps(config)))\n config_file.close()\n\n execute_tool(args, test_mode=True)\n sys.stdout = temp\n self.assertTrue(fake_out.verify_output(['%% Invalid configuration file', '\\n']))", "def test_get(self):\n cf = ConfigFile()\n cf[\"k1\"] = \"v1\"\n\n self.assertEqual(cf.get(\"k1\"), \"v1\")\n self.assertEqual(cf.get(\"k2\"), None)\n self.assertEqual(cf.get(\"k3\", default=3), 3)", "def test_broken_config(broken_config):\n with pytest.raises(RuntimeError, match=\"Error reading config.yml\"):\n abcconfig.get_config(broken_config)", "def test_pkgutil(self):\n print(utilities.CONFIG_FILE)\n assert utilities.get_config('ROBINHOOD', 'oauth_endpoint')", "def configuration():", "def test_required_config(self, ckan_config, monkeypatch, option):\n monkeypatch.delitem(ckan_config, option)\n plugin = p.get_plugin(\"cloudstorage\")\n with pytest.raises(RuntimeError, match=\"configuration option\"):\n plugin.configure(ckan_config)", "def test_get_reg_ex_config(self):\n pass", "def convert_spec(spec):\n config = configobj.ConfigObj(configspec=spec)\n\n return config.configspec", "def test_config_device_restore_broken(get_config, write_config_fixture, monkeypatch):\n fname = 'will_be_broken.yml'\n # saving normal conf\n is_default = {'test': 'conf'}\n monkeypatch.setattr(DeviceConfig, 'minimal_essential_conf', is_default)\n cfg = get_config(DeviceConfig, base_config, fname=fname)\n cfg.save()\n write_config_fixture('<< EOF >>', fname)\n should_be_default = cfg.read()\n\n assert should_be_default == is_default, 'configs not matched'", "def testFakeLongHandConfigurables(self):\n config = self.getBaseConfiguration()\n # The following should be ignored by the configure step\n config.Webtools.section_('foo')\n config.Webtools.foo.bar = 'baz'\n config.Webtools.section_('stuff')\n config.Webtools.stuff = 'things'\n\n server = Root(config)\n server.start(blocking=False)\n\n self.assertFalse('foo' in cpconfig, 'non-standard configurable passed to server')\n self.assertFalse('stuff' in cpconfig, 'non-standard configurable passed to server')\n\n server.stop()", "def test_getboolean_with_default(self):\n self.assertEqual(self.config.getboolean('advanced','p'),None)\n self.assertEqual(self.config.getboolean('advanced','p',True),True)", "def test_config_option_not_required_no_default():\n class Config(config.Config):\n a = config.option(int, help=\"\")\n\n c = config.structure({}, Config)\n assert c.a is None", "def test_config_endpoint(self):\n endpoint = settings.CONFIG_ENDPOINT\n access_token = config.ACCESS_TOKEN\n self.assertValidGetOicJsonEndpoint(endpoint, access_token)", "def test_generateconfig(self):\n args = mock.Mock()\n args.debug = None\n args.generateconfig = True\n args.config = None\n expected_text = ('Sample configuration file written to sample_config.json\\n'\n \"Replicate the site JSON for each site.\\n\"\n \" Valid values for use_https and local are 'True' and 'False'\\n\"\n \" One site must have local set to 'True'\\n\"\n 'Replicate the export JSON for each exported contract.\\n')\n with mock.patch('sys.stdout', new=StringIO()) as fake_out:\n execute_tool(args)\n self.assertEqual(fake_out.getvalue(), expected_text)", "def testDefault():\n\n conf = naiveConf.NaiveConf(exampleConfFname)\n oldX = conf.x\n conf.default('x', None)\n conf.default('Z', 5)\n\n assert conf.x == oldX\n assert conf.Z == 5", "def test_configuration(self):\n self.assertEqual(self.Test.adapter_config['write'],\n { 'adapter': TestAdapter, 'foo': 'bar' })", "def test_returns_configured_apiurl_over_default(self):\n arguments = {'--api-url': None}\n config = {'api_url': 'configured_stuff'}\n result = get_api_url(arguments, config)\n self.assertEqual(result, 'configured_stuff')\n self.mock_sanitize_host.assert_not_called()", "def test_config_device_restore_missing(get_config, config_dict, monkeypatch, write_config_fixture):\n fname = 'will_be_missing.yml'\n # saving normal conf\n is_default = {'test': 'conf'}\n monkeypatch.setattr(DeviceConfig, 'minimal_essential_conf', is_default)\n cfg = get_config(DeviceConfig, config_dict, fname=fname)\n cfg.save()\n path = write_config_fixture('corrupted_string', fname)\n os.remove(path)\n # trying to read from non-existent file\n should_be_default = cfg.read()\n\n assert should_be_default == is_default, 'configs not matched'" ]
[ "0.77164143", "0.7436098", "0.7076511", "0.7013907", "0.6881537", "0.6805723", "0.678276", "0.6729274", "0.6729274", "0.66931385", "0.66794413", "0.6565056", "0.6563163", "0.65287656", "0.6503387", "0.64884305", "0.64868605", "0.6484734", "0.6454509", "0.6409027", "0.63939387", "0.63753587", "0.6355658", "0.6340941", "0.63258415", "0.632423", "0.6320525", "0.6308311", "0.62858176", "0.6272889", "0.62703544", "0.6256636", "0.6243779", "0.6232077", "0.6221565", "0.6210976", "0.62010866", "0.6186721", "0.6182758", "0.6177886", "0.6174423", "0.6167704", "0.6164074", "0.6158715", "0.61577624", "0.61305636", "0.61166805", "0.61140746", "0.61051244", "0.61050856", "0.6104759", "0.6102821", "0.60954237", "0.60954237", "0.6094436", "0.6094097", "0.60814637", "0.6066284", "0.6054225", "0.6042857", "0.6041123", "0.6040495", "0.60369724", "0.60335964", "0.60292566", "0.6025991", "0.6025899", "0.60176873", "0.601732", "0.6013619", "0.6005006", "0.5993381", "0.59908134", "0.59765345", "0.5972314", "0.5967961", "0.5964263", "0.59627044", "0.59612846", "0.5942066", "0.59418285", "0.59351563", "0.5933552", "0.5917525", "0.59165454", "0.5915233", "0.59139574", "0.59118074", "0.5910684", "0.59103703", "0.59003294", "0.58959204", "0.5895509", "0.589338", "0.5891388", "0.58872503", "0.5881208", "0.58798295", "0.58797294", "0.58768374" ]
0.8287769
0
Test get_spec_config on matching conf
def test_get_spec_config_match(self): spec_conf = get_spec_config({ 'defaults': { 'default_foo': 'default_bar', 'foo': 'bar' }, 'specific': [ {'mask': ['filenomatch'], 'foo': 'bar_nomatch'}, {'mask': ['filematch'], 'foo': 'match'}, {'mask': ['filenomatch2'], 'foo': 'bar_nomatch2'} ] }, 'filematch') self.assertEqual(spec_conf, {'default_foo': 'default_bar', 'foo': 'match', 'mask': ['filematch']})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_config_spec(self):\n spec = self._gen.config_spec()\n self.assertIn('Number of examples', spec)\n self.assertIn('Maximum number of columns to change', spec)\n self.assertIn('Regression threshold', spec)\n self.assertIn('Prediction key', spec)", "def test_get_spec_config_defaults(self):\n spec_conf = get_spec_config({\n 'defaults': {\n 'foo': 'bar'\n }\n }, '')\n self.assertEqual(spec_conf, {'foo': 'bar'})", "def test_config_spec(self):\n spec = self.ci.config_spec()\n self.assertIn('Label', spec)\n self.assertIsInstance(spec['Label'], lit_types.CategoryLabel)", "def test_get_spec_config_empty(self):\n spec_conf = get_spec_config({}, '')\n self.assertEqual(spec_conf, {})", "def test_SpecConfig_class():\n res = SpecConfig(**SPEC_CONFIG)\n assert res.path_out == SPEC_CONFIG['path_out']", "def get_config_spec(cls):\n return False", "def test_collect_configuration(self):\n sample_config = \"\"\"[dyndnsc]\nconfigs = testconfig\n\n[testconfig]\nuse_preset = testpreset\nupdater-userid = bob\nupdater-password = XYZ\n# test overwriting a preset value:\ndetector-url = http://myip.example.com/\n\n[preset:testpreset]\nupdater = fubarUpdater\nupdater-url = https://update.example.com/nic/update\nupdater-moreparam = some_stuff\ndetector = webcheck4\ndetector-family = INET\ndetector-url = http://ip.example.com/\ndetector-parser = plain\n \"\"\"\n p = configparser.ConfigParser()\n p.readfp(StringIO(sample_config)) # XXX readfp() is deprecated since py 3.2\n config = collect_config(p)\n self.assertEqual(dict, type(config))\n self.assertTrue('testconfig' in config)\n self.assertTrue('detector' in config['testconfig'])\n self.assertTrue(isinstance(config['testconfig']['detector'], list))\n self.assertEqual(1, len(config['testconfig']['detector']))\n detector, detector_opts = config['testconfig']['detector'][-1]\n self.assertEqual(detector, \"webcheck4\") # from the preset\n self.assertEqual(detector_opts['url'], \"http://myip.example.com/\") # from the user conf\n self.assertTrue('updater' in config['testconfig'])\n self.assertTrue(isinstance(config['testconfig']['updater'], list))\n self.assertEqual(1, len(config['testconfig']['updater']))\n updater = config['testconfig']['updater'][0]\n self.assertEqual(\"fubarUpdater\", updater[0])\n self.assertTrue(\"url\" in updater[1])\n self.assertTrue(\"moreparam\" in updater[1])\n self.assertEqual(\"some_stuff\", updater[1][\"moreparam\"])", "def check_config(cfg):", "async def test_full_config(hass: HomeAssistant, mock_client) -> None:\n config = {\n prometheus.DOMAIN: {\n \"namespace\": \"ns\",\n \"default_metric\": \"m\",\n \"override_metric\": \"m\",\n \"requires_auth\": False,\n \"component_config\": {\"fake.test\": {\"override_metric\": \"km\"}},\n \"component_config_glob\": {\"fake.time_*\": {\"override_metric\": \"h\"}},\n \"component_config_domain\": {\"climate\": {\"override_metric\": \"°C\"}},\n \"filter\": {\n \"include_domains\": [\"climate\"],\n \"include_entity_globs\": [\"fake.time_*\"],\n \"include_entities\": [\"fake.test\"],\n \"exclude_domains\": [\"script\"],\n \"exclude_entity_globs\": [\"climate.excluded_*\"],\n \"exclude_entities\": [\"fake.time_excluded\"],\n },\n }\n }\n assert await async_setup_component(hass, prometheus.DOMAIN, config)\n await hass.async_block_till_done()", "def _get_configspec():\n files = sorted(pkg_resources.resource_listdir(__name__, \"\"))\n # NOTE:\n # Explicit convert the filter results to a list, since the returned\n # iterator can ONLY be used ONCE.\n specfiles = list(filter(lambda fn: fn.endswith(\".conf.spec\"), files))\n if os.environ.get(\"DEBUG_FG21SIM\"):\n print(\"DEBUG: Found config specifications: %s\" % \", \".join(specfiles),\n file=sys.stderr)\n # NOTE:\n # `resource_string()` returns the resource in *binary/bytes* string\n configspec = \"\\n\".join([\n pkg_resources.resource_string(__name__, fn).decode(\"utf-8\")\n for fn in specfiles\n ]).split(\"\\n\")\n return configspec", "def test_read_config_option(self):\n # set up config\n config.set_config_file(os.path.join(path_to_module, \"test_config.conf\"))\n config.setup()\n # Test that all the parameters loaded from file are correct\n self.assertEqual(config.read_config_option('client_id'), 'uploader')\n self.assertEqual(config.read_config_option('client_secret'), 'secret')\n self.assertEqual(config.read_config_option('username'), 'admin')\n self.assertEqual(config.read_config_option('password'), 'password1')\n self.assertEqual(config.read_config_option('base_url'), 'http://localhost:8080/irida-latest/api/')\n self.assertEqual(config.read_config_option('parser'), 'miseq')\n self.assertEqual(config.read_config_option('readonly', bool), False)", "def test_get(self):\n self.assertEqual(self.config.get('basic','greeting'),'hello')", "def test_get_yaml_spec(self):\n pass", "def test_compliance_configuration(self, evidence):\n evidence_config = json.loads(evidence.content)\n if evidence_config != self.config.raw_config:\n evidence = json.dumps(evidence_config, indent=2).split('\\n')\n config = json.dumps(self.config.raw_config, indent=2).split('\\n')\n self.add_failures(\n 'Differences found',\n {\n 'Fetcher Configuration': evidence,\n 'Check Configuration': config\n }\n )", "def test_load_configs_testing(self):\n global locator, config_paths\n locator.load_config(config_paths[0])\n\n self.assertEqual(locator.config['routines'], ['debug'])\n self.assertEqual(locator.config['driver'],\n {\n 'type': 'TestDriver',\n 'kwargs': {\n 'verbose': False\n }\n })", "def _expected_config(self) -> Dict[str, Optional[str]]:\n return EXPECTED_CONFIG", "def _expected_config(self) -> Dict[str, Optional[str]]:\n return EXPECTED_CONFIG", "async def test_full_config(hass, mock_client):\n config = {\n prometheus.DOMAIN: {\n \"namespace\": \"ns\",\n \"default_metric\": \"m\",\n \"override_metric\": \"m\",\n \"component_config\": {\"fake.test\": {\"override_metric\": \"km\"}},\n \"component_config_glob\": {\"fake.time_*\": {\"override_metric\": \"h\"}},\n \"component_config_domain\": {\"climate\": {\"override_metric\": \"°C\"}},\n \"filter\": {\n \"include_domains\": [\"climate\"],\n \"include_entity_globs\": [\"fake.time_*\"],\n \"include_entities\": [\"fake.test\"],\n \"exclude_domains\": [\"script\"],\n \"exclude_entity_globs\": [\"climate.excluded_*\"],\n \"exclude_entities\": [\"fake.time_excluded\"],\n },\n }\n }\n assert await async_setup_component(hass, prometheus.DOMAIN, config)\n await hass.async_block_till_done()\n assert hass.bus.listen.called\n assert hass.bus.listen.call_args_list[0][0][0] == EVENT_STATE_CHANGED", "def test_get_config_default_value(configs):\n assert get_config('SOURCE_FOLDER') == configs['SOURCE_FOLDER']", "def test_get_reg_ex_config(self):\n pass", "def test_SpecConfig_class_minimal():\n res = SpecConfig(path=PATH_SPECS_2_YAML)\n assert res.path_out == PATH_SPECS_2_YAML_MODIFIED", "def test_get_property_success(self):\r\n self.assertEqual(self.config.option1, 1337)", "def test_valid_configuration(self):\n\n conf = [\n 'gasoline', '228i', 'model_luxury_line', 'silver', 'rims_384',\n 'tapistry_black', 'steptronic', 'smoker_package', 'tow_hook'\n ]\n\n attr_val_ids = self.get_attr_val_ids(conf)\n validation = self.cfg_tmpl.validate_configuration(attr_val_ids)\n self.assertTrue(validation, \"Valid configuration failed validation\")", "def testConfigA(self):\n assert type(self.config) == dict, \"Read setting not returning a dictionary\"", "async def test_api_get_config(hass: HomeAssistant, mock_api_client: TestClient) -> None:\n resp = await mock_api_client.get(const.URL_API_CONFIG)\n result = await resp.json()\n if \"components\" in result:\n result[\"components\"] = set(result[\"components\"])\n if \"whitelist_external_dirs\" in result:\n result[\"whitelist_external_dirs\"] = set(result[\"whitelist_external_dirs\"])\n if \"allowlist_external_dirs\" in result:\n result[\"allowlist_external_dirs\"] = set(result[\"allowlist_external_dirs\"])\n if \"allowlist_external_urls\" in result:\n result[\"allowlist_external_urls\"] = set(result[\"allowlist_external_urls\"])\n\n assert hass.config.as_dict() == result", "def test_config_ok_config(self):\n test_data = (\"[gnupg]\\n\"\n \"recipients = [email protected]\\n\"\n \"signer = [email protected]\\n\"\n \"\\n\"\n \"[amazon-s3]\\n\"\n \"access_key = ACCESSKEY\\n\"\n \"secret_access_key = SECRETACCESSKEY\\n\"\n \"\\n\"\n \"[data]\\n\"\n \"\\n\"\n \"bucket = DATABUCKET\\n\"\n \"[metadata]\\n\"\n \"bucket = METADATABUCKET\\n\"\n \"\\n\")\n if os.path.isfile(\"test_config.conf\"):\n os.remove(\"test_config.conf\")\n file(\"test_config.conf\", \"wb\").write(test_data)\n config = Config(\"test_config.conf\")\n self.assertIn(\"gnupg\", config.config.sections())\n self.assertIn(\"amazon-s3\", config.config.sections())\n self.assertEqual(config.config.get(\n \"gnupg\", \"recipients\"), \"[email protected]\")\n self.assertEqual(config.config.get(\n \"gnupg\", \"signer\"), \"[email protected]\")\n self.assertEqual(config.config.get(\n \"amazon-s3\", \"access_key\"), \"ACCESSKEY\")\n self.assertEqual(config.config.get(\n \"amazon-s3\", \"secret_access_key\"), \"SECRETACCESSKEY\")\n self.assertEqual(config.config.get(\n \"data\", \"bucket\"), \"DATABUCKET\")\n self.assertEqual(config.config.get(\n \"metadata\", \"bucket\"), \"METADATABUCKET\")\n os.remove(\"test_config.conf\")", "def test_get_feature_config(self):\n tools.eq_(\n self.old_manifest.get_feature_config(\"sub\").to_dict(),\n {\n \"url\": \"git://github.com/Toumorokoshi/sub.git\",\n \"formula\": \"sprinter.formula.git\",\n \"depends\": \"git\",\n \"branch\": \"yusuke\",\n \"rc\": \"temp=`pwd`; cd %(sub:root_dir)s/libexec && . sub-init2 && cd $tmp\",\n \"bc\": \"temp=`pwd`; cd %(sub:testvar)s/libexec && . sub-init2 && cd $tmp\",\n },\n )", "def test_get_configs_with_filter(self) -> None:\n config1 = self.integration.create_config(name='Config 1',\n enabled=True,\n save=True)\n self.integration.create_config(name='Config 2',\n enabled=True,\n save=True)\n\n # Add some configs that shouldn't be returned.\n integration2 = \\\n self.manager.register_integration_class(DummyIntegration2)\n self.integration.create_config(name='Config 3', save=True)\n integration2.create_config(name='Config 4', save=True)\n\n self.assertEqual(self.integration.get_configs(name='Config 1'),\n [config1])", "def test_load_configs_simulation(self):\n global locator, config_paths\n locator.load_config(config_paths[1])\n\n self.assertEqual(locator.config['routines'], ['simulate'])\n self.assertEqual(locator.config['driver'],\n {\n 'type': 'SimDriver',\n 'kwargs': {\n \"arg\": \"val\"\n }\n })", "def test_config_class():\n assert config is not None", "def test_loads_a_config_file(self):\n from test.resources import config\n self.assertIsInstance(config, type(sys))\n self.assertIsNotNone(config.example)\n self.assertEqual(config.example.config_option, 'config-value')", "async def test_get_device_config(hass, hass_client):\n with patch.object(config, \"SECTIONS\", [\"automation\"]):\n await async_setup_component(hass, \"config\", {})\n\n client = await hass_client()\n\n def mock_read(path):\n \"\"\"Mock reading data.\"\"\"\n return [{\"id\": \"sun\"}, {\"id\": \"moon\"}]\n\n with patch(\"homeassistant.components.config._read\", mock_read):\n resp = await client.get(\"/api/config/automation/config/moon\")\n\n assert resp.status == 200\n result = await resp.json()\n\n assert result == {\"id\": \"moon\"}", "def test_conf(self):\n self.TESTED_UNIT = 'ceph-fs/0'\n\n def _get_conf():\n \"\"\"get/parse ceph daemon response into dict for specified configs.\n\n :returns dict: conf options selected from configs\n :rtype: dict\n \"\"\"\n configs = [\"mds_cache_memory_limit\",\n \"mds_cache_reservation\",\n \"mds_health_cache_threshold\"]\n holder = {}\n for config in configs:\n cmd = \"sudo ceph daemon mds.\" \\\n \"$HOSTNAME config show | grep {}\".format(config)\n conf = model.run_on_unit(self.TESTED_UNIT, cmd)\n for i in (conf['Stdout'].replace('\"', '')\n .replace(',', '')\n .strip()\n .split(\"\\n\")):\n key, val = i.split(\":\")\n holder[key] = val.strip()\n return holder\n\n @retry(wait=wait_exponential(multiplier=1, min=4, max=10),\n stop=stop_after_attempt(10))\n def _change_conf_check(mds_config):\n \"\"\"Change configs, then assert to ensure config was set.\n\n Doesn't return a value.\n \"\"\"\n loop = asyncio.get_event_loop()\n crt = model.async_set_application_config('ceph-fs', mds_config)\n loop.run_until_complete(crt)\n results = _get_conf()\n self.assertEquals(\n results['mds_cache_memory_limit'],\n mds_config['mds-cache-memory-limit'])\n self.assertAlmostEqual(\n float(results['mds_cache_reservation']),\n float(mds_config['mds-cache-reservation']))\n self.assertAlmostEqual(\n float(results['mds_health_cache_threshold']),\n float(mds_config['mds-health-cache-threshold']))\n\n # ensure defaults are set\n _get_conf()\n mds_config = {'mds-cache-memory-limit': '4294967296',\n 'mds-cache-reservation': '0.05',\n 'mds-health-cache-threshold': '1.5'}\n _change_conf_check(mds_config)\n\n # change defaults\n mds_config = {'mds-cache-memory-limit': '8589934592',\n 'mds-cache-reservation': '0.10',\n 'mds-health-cache-threshold': '2'}\n _change_conf_check(mds_config)\n\n # Restore config to keep tests idempotent\n mds_config = {'mds-cache-memory-limit': '4294967296',\n 'mds-cache-reservation': '0.05',\n 'mds-health-cache-threshold': '1.5'}\n _change_conf_check(mds_config)", "def test_invalid_config(self):\n # configuration has no auth section\n self.mock_config.get_config.return_value = {}\n with self.assertRaisesRegex(\n RuntimeError, 'No ldap configuration section found'):\n ldap.MANAGER()\n\n # configuration has auth section but no ldap sub-section\n self.mock_config.get_config.return_value = {'auth': None}\n with self.assertRaisesRegex(\n RuntimeError, 'No ldap configuration section found'):\n ldap.MANAGER()\n\n # configuration has ldap section but invalid parameters\n self.mock_config.get_config.return_value = {\n 'auth': {\n 'ldap': 'foo'\n }\n }\n self.assertRaises(jsonschema.exceptions.ValidationError, ldap.MANAGER)\n\n # configuration has missing parameters\n self.mock_config.get_config.return_value = {\n 'auth': {\n 'ldap': 'host'\n }\n }\n self.assertRaises(jsonschema.exceptions.ValidationError, ldap.MANAGER)\n\n # specified group filter but no group base\n self.mock_config.get_config.return_value = {\n 'auth': {\n 'ldap': {\n 'host': 'foo.com',\n 'user_base': 'ou=base,o=foo.com',\n 'group_filter': '(cn=foo-users)',\n }\n }\n }\n self.assertRaisesRegex(\n RuntimeError,\n 'group_filter requires group_base parameter',\n ldap.MANAGER)\n\n # specified group filter and group base but no group membership\n # attribute\n self.mock_config.get_config.return_value = {\n 'auth': {\n 'ldap': {\n 'host': 'foo.com',\n 'user_base': 'ou=base,o=foo.com',\n 'group_filter': '(cn=foo-users)',\n 'group_base': 'ou=foogroups,o=foo.com',\n }\n }\n }\n self.assertRaisesRegex(\n RuntimeError,\n 'group_filter requires group_membership_attr parameter',\n ldap.MANAGER)", "def test_config_setup(hass, loop):\n loop.run_until_complete(async_setup_component(hass, 'config', {}))\n assert 'config' in hass.config.components", "def testGetConfig():\n configs = GetConfig()\n # print(configs.host_ip)\n # print(configs.proxy_local)\n \n # print(configs.proxy_online)\n # print(configs.user_img_url)\n # print(configs.user_login_url)\n print(configs.user_start_id)\n\n # assert isinstance(configs.proxy_getter_functions, list)\n # print(configs.proxy_getter_functions)", "def test_config_opts(sc):\n assert sc.server_name is not None\n assert sc.deployment == Deployment.stg\n assert sc.admins is not None\n assert sc.command_handler is not None\n assert sc.command_handler_work_dir is not None\n assert sc.command_handler_pvc_env_var is not None\n assert sc.command_handler_image_reference is not None\n assert sc.command_handler_k8s_namespace is not None\n assert sc.fas_password is not None\n assert sc.testing_farm_secret is not None\n assert sc.github_requests_log_path is not None\n assert sc.webhook_secret is not None\n assert sc.validate_webhooks is not None\n assert sc.gitlab_token_secret is not None", "def test_valid_hook_with_config_file(self):\n with mock.patch(\n 'detect_secrets_server.core.usage.common.output.ALL_HOOKS',\n [\n HookDescriptor(\n display_name='config_needed',\n module_name='will_be_mocked',\n class_name='ConfigFileRequiredHook',\n config_setting=HookDescriptor.CONFIG_REQUIRED,\n ),\n ],\n ), mock.patch(\n 'detect_secrets_server.core.usage.common.output.import_module',\n return_value=Module(\n ConfigFileRequiredHook=ConfigFileRequiredMockClass,\n ),\n ):\n args = self.parse_args(\n 'scan '\n '--output-hook config_needed '\n '--output-config examples/pysensu.config.yaml '\n 'examples '\n )\n\n with open('examples/pysensu.config.yaml') as f:\n content = f.read()\n\n assert args.output_hook.config == content", "def valid_config(hass: HomeAssistant, requests_mock):\n requests_mock.get(\n \"https://api.vultr.com/v1/account/info?api_key=ABCDEFG1234567\",\n text=load_fixture(\"account_info.json\", \"vultr\"),\n )\n\n with patch(\n \"vultr.Vultr.server_list\",\n return_value=json.loads(load_fixture(\"server_list.json\", \"vultr\")),\n ):\n # Setup hub\n vultr.setup(hass, VALID_CONFIG)", "def test_all_configs_values():\n\n app_configs = application_services.get_configs()\n\n assert app_configs['TITLE'] == 'pyrin_unit_tests'\n assert app_configs['ENCODING'] == 'utf-8'\n assert app_configs['FLASK_LOG_LEVEL'] == 'DEBUG'\n assert app_configs['SERVER_NAME'] is None\n assert app_configs['SERVER_HOST'] == '127.0.0.1'\n assert app_configs['SERVER_PORT'] == 5001\n assert app_configs['ENV'] == 'testing'\n assert app_configs['DEBUG'] is False\n assert app_configs['TESTING'] is True\n assert app_configs['UNIT_TESTING'] is True", "def test_config(app):\n assert app.testing", "def test_broken_config(broken_config):\n with pytest.raises(RuntimeError, match=\"Error reading config.yml\"):\n abcconfig.get_config(broken_config)", "def check_config(config):\n pass", "def config_entry() -> MockConfigEntry:\n return MockConfigEntry(\n domain=DOMAIN,\n unique_id=\"mock_radio_id\",\n data={CONF_WEBFSAPI_URL: \"http://1.1.1.1:80/webfsapi\", CONF_PIN: \"1234\"},\n )", "def test_getJsonConfig(self) -> None:\n jsonConf = getJsonConfig()\n self.assertTrue(\"idSrvDiscoUrl\" in jsonConf)\n self.assertTrue(\"accessTokenFetchAudience\" in jsonConf)", "def test_config_endpoint(self):\n endpoint = settings.CONFIG_ENDPOINT\n access_token = config.ACCESS_TOKEN\n self.assertValidGetOicJsonEndpoint(endpoint, access_token)", "def test_config_metadata(self):\n result = self.driver.get_config_metadata()\n self.assert_(isinstance(result, dict))\n\n self.assert_(isinstance(result[ConfigMetadataKey.DRIVER], dict))\n self.assert_(isinstance(result[ConfigMetadataKey.COMMANDS], dict))\n self.assert_(isinstance(result[ConfigMetadataKey.PARAMETERS], dict))\n\n self.assertEquals(len(result[ConfigMetadataKey.DRIVER]), 1)\n self.assertEquals(result[ConfigMetadataKey.DRIVER],\n {DriverDictKey.VENDOR_SW_COMPATIBLE: True})\n\n # Check a few in the cmd list...the leaves in the structure are\n # tested in the cmd dict test cases\n self.assertEquals(len(result[ConfigMetadataKey.COMMANDS]), 2)\n self.assert_(\"cmd1\" in result[ConfigMetadataKey.COMMANDS].keys())\n self.assert_(\"cmd2\" in result[ConfigMetadataKey.COMMANDS].keys())\n\n # Check a few in the param list...the leaves in the structure are\n # tested in the param dict test cases\n self.assertEquals(len(result[ConfigMetadataKey.PARAMETERS]), 4)\n self.assert_(\"foo\" in result[ConfigMetadataKey.PARAMETERS].keys())\n self.assert_(\"bar\" in result[ConfigMetadataKey.PARAMETERS].keys())\n self.assert_(\"baz\" in result[ConfigMetadataKey.PARAMETERS].keys())\n self.assert_(\"bat\" in result[ConfigMetadataKey.PARAMETERS].keys())", "def test_configuration_map(self):\n config = load_configuration(package_dir=self.dir, gitconfig_file=self.gitconfig)\n mapping = config.configuration_map()\n self.failUnless('cirrus' in mapping)\n self.failUnless('credentials' in mapping['cirrus'])\n self.failUnless('configuration' in mapping['cirrus'])\n self.failUnless('github_credentials' in mapping['cirrus']['credentials'])\n self.assertEqual(\n mapping['cirrus']['credentials']['github_credentials'],\n {'github_user': None, 'github_token': None}\n )\n self.assertEqual(\n mapping['cirrus']['configuration']['package']['name'], 'cirrus_tests'\n )", "def test_config():\n args = Namespace(molecule=\"nucleotide\", verbose=False)\n config = core.Config.from_args(args)\n assert config.verbose is False\n assert config.molecule == 'nucleotide'\n assert config.extended_validation == 'none'\n\n args = Namespace(molecule=\"protein\", verbose=True)\n config = core.Config.from_args(args)\n assert config.verbose is True\n assert config.molecule == 'protein'", "def _check_config(self):", "def test_get_configs():\n\n configs = application_services.get_configs()\n\n assert isinstance(configs, dict)\n assert len(configs) > 0", "def convert_spec(spec):\n config = configobj.ConfigObj(configspec=spec)\n\n return config.configspec", "def test_renderer_discovers_special_config(self):\n datastore = Mock(spec=DatastoreVersion)\n datastore.datastore_name = 'mysql'\n datastore.name = 'mysql-test'\n datastore.manager = 'mysql'\n config = template.SingleInstanceConfigTemplate(datastore,\n self.flavor_dict,\n self.server_id)\n self.validate_template(config.render(), \"hyper\",\n {'ram': 0}, self.server_id)", "def test_define():\n client = TestClient()\n client.run(\"config set general.fakeos=Linux\")\n conf_file = load(client.cache.conan_conf_path)\n assert \"fakeos = Linux\" in conf_file\n\n client.run('config set general.compiler=\"Other compiler\"')\n conf_file = load(client.cache.conan_conf_path)\n assert 'compiler = Other compiler' in conf_file\n\n client.run('config set general.compiler.version=123.4.5')\n conf_file = load(client.cache.conan_conf_path)\n assert 'compiler.version = 123.4.5' in conf_file\n assert \"14\" not in conf_file\n\n client.run('config set general.new_setting=mysetting')\n conf_file = load(client.cache.conan_conf_path)\n assert 'new_setting = mysetting' in conf_file\n\n client.run('config set proxies.https=myurl')\n conf_file = load(client.cache.conan_conf_path)\n assert \"https = myurl\" in conf_file.splitlines()", "def test_get_config(default_config, tmp_path):\n abcconfig.write_config(default_config, configpath=tmp_path)\n config = abcconfig.get_config(configpath=tmp_path)\n assert config == default_config", "async def test_config_sensor(hass, method_discovery, do_config):\n sensor_name = \"test_sensor\"\n config_sensor = {\n CONF_NAME: sensor_name,\n **do_config,\n }\n await base_config_test(\n hass,\n config_sensor,\n sensor_name,\n SENSOR_DOMAIN,\n CONF_BIT_SENSORS,\n CONF_INPUTS,\n method_discovery=method_discovery,\n )", "def test_configurations_get(self):\n result = instance_info.dbaas.configurations.get(configuration_info.id)\n assert_equal(configuration_info.id, result.id)\n assert_equal(configuration_info.name, result.name)\n assert_equal(configuration_info.description, result.description)\n\n # check the result field types\n with TypeCheck(\"configuration\", result) as check:\n check.has_field(\"id\", str)\n check.has_field(\"name\", str)\n check.has_field(\"description\", str)\n check.has_field(\"values\", dict)\n check.has_field(\"created\", str)\n check.has_field(\"updated\", str)\n check.has_field(\"instance_count\", int)\n\n print(result.values)\n\n # check for valid timestamps\n assert_true(_is_valid_timestamp(result.created))\n assert_true(_is_valid_timestamp(result.updated))\n\n # check that created and updated timestamps differ, since\n # test_appending_to_existing_configuration should have changed the\n # updated timestamp\n if not CONFIG.fake_mode:\n assert_not_equal(result.created, result.updated)\n\n assert_equal(result.instance_count, 1)\n\n with CollectionCheck(\"configuration_values\", result.values) as check:\n # check each item has the correct type according to the rules\n for (item_key, item_val) in result.values.items():\n print(\"item_key: %s\" % item_key)\n print(\"item_val: %s\" % item_val)\n dbaas = instance_info.dbaas\n param = dbaas.configuration_parameters.get_parameter(\n instance_info.dbaas_datastore,\n instance_info.dbaas_datastore_version,\n item_key)\n if param.type == 'integer':\n check.has_element(item_key, int)\n if param.type == 'string':\n check.has_element(item_key, str)\n if param.type == 'boolean':\n check.has_element(item_key, bool)\n\n # Test to make sure that another user is not able to GET this config\n reqs = Requirements(is_admin=False)\n test_auth_user = instance_info.user.auth_user\n other_user = CONFIG.users.find_user(reqs, black_list=[test_auth_user])\n other_user_tenant_id = other_user.tenant_id\n client_tenant_id = instance_info.user.tenant_id\n if other_user_tenant_id == client_tenant_id:\n other_user = CONFIG.users.find_user(\n reqs, black_list=[instance_info.user.auth_user,\n other_user])\n print(other_user)\n print(other_user.__dict__)\n other_client = create_dbaas_client(other_user)\n assert_raises(exceptions.NotFound, other_client.configurations.get,\n configuration_info.id)", "def test_configuration(self):\n self.assertEqual(self.Test.adapter_config['write'],\n { 'adapter': TestAdapter, 'foo': 'bar' })", "def test_list_available_servers(self, mock_config_file):\n responses.add(responses.GET, 'https://test2.gigantum.com/.well-known/discover.json',\n json={\"id\": 'another-server',\n \"name\": \"Another server\",\n \"base_url\": \"https://test2.gigantum.com/\",\n \"git_url\": \"https://test2.repo.gigantum.com/\",\n \"git_server_type\": \"gitlab\",\n \"hub_api_url\": \"https://test2.gigantum.com/api/v1/\",\n \"object_service_url\": \"https://test2.api.gigantum.com/object-v1/\",\n \"user_search_url\": \"https://user-search2.us-east-1.cloudsearch.amazonaws.com\",\n \"lfs_enabled\": True,\n \"auth_config_url\": \"https://test2.gigantum.com/.well-known/auth.json\"},\n status=200)\n\n responses.add(responses.GET, 'https://test2.gigantum.com/.well-known/auth.json',\n json={\"audience\": \"api.test.gigantum.com\",\n \"issuer\": \"https://test2-auth.gigantum.com\",\n \"signing_algorithm\": \"RS256\",\n \"public_key_url\": \"https://test2-auth.gigantum.com/.well-known/jwks.json\",\n \"login_url\": \"https://test2.gigantum.com/auth/login\",\n \"token_url\": \"https://test2.gigantum.com/auth/token\",\n \"logout_url\": \"https://test2.gigantum.com/auth/logout\",\n \"client_id\": \"Z6Wl854wqCjNY0D4uJx8SyPyySyfKmAy\",\n \"login_type\": \"auth0\"},\n status=200)\n\n config_instance, working_dir = mock_config_file\n\n servers = config_instance.list_available_servers()\n assert len(servers) == 1\n assert servers[0].id == 'test-gigantum-com'\n assert servers[0].name == \"Gigantum Hub Test\"\n assert servers[0].login_url == \"https://test.gigantum.com/auth/redirect?target=login\"\n assert servers[0].token_url == \"https://test.gigantum.com/auth/token\"\n assert servers[0].logout_url == \"https://test.gigantum.com/auth/redirect?target=logout\"\n\n config_instance.add_server('https://test2.gigantum.com/')\n\n servers = config_instance.list_available_servers()\n assert len(servers) == 2\n for s in servers:\n assert s[0] in ['test-gigantum-com', 'another-server']\n assert s[1] in [\"Gigantum Hub Test\", \"Another server\"]", "def test_failure_config(self):\n resource_conf = {\n \"enable_dns_support\": \"true\"\n }\n scan_result = check.scan_resource_conf(conf=resource_conf)\n self.assertEqual(CheckResult.FAILED, scan_result)", "def test_spectrum_section_config(tardis_config_verysimple):\n tardis_config_verysimple[\"spectrum\"][\"start\"] = Quantity(\"2500 angstrom\")\n tardis_config_verysimple[\"spectrum\"][\"stop\"] = Quantity(\"500 angstrom\")\n with pytest.raises(ValueError):\n conf = Configuration.from_config_dict(\n tardis_config_verysimple, validate=True, config_dirname=\"test\"\n )", "def test_get_config(self):\r\n config = self.profile.get_config('testing.conf', TestConfig, storage_args=['this_section'])\r\n self.assertIsInstance(config, TestConfig)\r\n self.assertIsNone(config.save())", "def check_configs(self):\n\n pass", "def __determine_config_type():", "def testGetConfigValue(self):\n test_reader = dependencies.DependencyDefinitionReader()\n\n file_object = io.StringIO(self._TEST_CONFIGURATION_DATA)\n config_parser = configparser.ConfigParser(interpolation=None)\n config_parser.read_file(file_object)\n\n configuration_value = test_reader._GetConfigValue(\n config_parser, 'dfdatetime', 'dpkg_name')\n self.assertEqual(configuration_value, 'python3-dfdatetime')\n\n with self.assertRaises(configparser.NoSectionError):\n test_reader._GetConfigValue(config_parser, 'bogus', 'dpkg_name')\n\n configuration_value = test_reader._GetConfigValue(\n config_parser, 'dfdatetime', 'bogus')\n self.assertIsNone(configuration_value)", "def config():\n data = \"\"\"[YESSSSMS]\nLOGIN = 03211234567\nPASSWD = MySecr3t\nDEFAULT_TO = +43664123123123\nMVNO = YESSS\n\"\"\"\n with mock.patch(\n \"configparser.open\",\n # \"builtins.open\",\n mock.mock_open(read_data=data),\n ):\n yield", "def test_get_value_success(self):\r\n name = 'option1'\r\n option = self.config.options[name]\r\n value = self.config.values[name]\r\n\r\n self.assertEqual(self.config.get_value(name, option), value)", "def mock_config():\n from .. import config\n\n _old_fs = os.getenv('FREESURFER_HOME')\n if not _old_fs:\n os.environ['FREESURFER_HOME'] = mkdtemp()\n\n filename = Path(pkgrf('fmriprep', 'data/tests/config.toml'))\n settings = loads(filename.read_text())\n for sectionname, configs in settings.items():\n if sectionname != 'environment':\n section = getattr(config, sectionname)\n section.load(configs, init=False)\n config.nipype.omp_nthreads = 1\n config.nipype.init()\n config.loggers.init()\n config.init_spaces()\n\n config.execution.work_dir = Path(mkdtemp())\n config.execution.bids_dir = Path(pkgrf('fmriprep', 'data/tests/ds000005')).absolute()\n config.execution.fmriprep_dir = Path(mkdtemp())\n config.execution.init()\n\n yield\n\n shutil.rmtree(config.execution.work_dir)\n shutil.rmtree(config.execution.fmriprep_dir)\n\n if not _old_fs:\n del os.environ[\"FREESURFER_HOME\"]", "def test_get_config_th(self):\n self.assertTrue(settings.TH_TRELLO)\n self.assertIn('consumer_key', settings.TH_TRELLO)\n self.assertIn('consumer_secret', settings.TH_TRELLO)", "def test_get_hyperflex_config_result_list(self):\n pass", "def test_config():\n if not os.path.exists(CONFIG_DIR):\n raise mupub.BadConfiguration('Configuration folder not found.')\n if not os.path.exists(_CONFIG_FNM):\n raise mupub.BadConfiguration('Configuration file not found.')\n if not os.path.exists(getDBPath()):\n raise mupub.BadConfiguration('Local database not found.')\n if len(CONFIG_DICT) == 0:\n raise mupub.BadConfiguration('Configuration was not loaded.')", "def test_refecth_auth_config(self, mock_config_file):\n responses.add(responses.GET, 'https://test.gigantum.com/.well-known/auth.json',\n json={\"audience\": \"api.test.gigantum.com\",\n \"issuer\": \"https://auth.gigantum.com/\",\n \"signing_algorithm\": \"RS256\",\n \"public_key_url\": \"https://test.gigantum.com/gigantum/auth/jwks.json\",\n \"login_url\": \"https://test.gigantum.com/auth/redirect?target=login\",\n \"token_url\": \"https://test.gigantum.com/auth/token\",\n \"logout_url\": \"https://test.gigantum.com/auth/redirect?target=logout\",\n \"client_id\": \"Z6Wl854wqCjNY0D4uJx8SyPyySyfKmAy\",\n \"login_type\": \"auth0\"},\n status=200)\n\n config_instance, working_dir = mock_config_file\n config_instance.set_current_server(\"test-gigantum-com\")\n\n # Make sure cache is loaded\n config_instance.get_server_configuration()\n auth_config = config_instance.get_auth_configuration()\n assert auth_config.public_key_url == \"https://auth.gigantum.com/.well-known/jwks.json\"\n\n # Refetch\n config_instance.refetch_auth_config()\n\n # Verify\n auth_config = config_instance.get_auth_configuration()\n assert auth_config.public_key_url == \"https://test.gigantum.com/gigantum/auth/jwks.json\"\n\n # Do it again because should be in redis now\n auth_config = config_instance.get_auth_configuration()\n assert auth_config.public_key_url == \"https://test.gigantum.com/gigantum/auth/jwks.json\"\n\n # Explicitly check redis\n data = config_instance._get_redis_client().hgetall(config_instance.AUTH_CONFIG_CACHE_KEY)\n assert data['public_key_url'] == \"https://test.gigantum.com/gigantum/auth/jwks.json\"\n\n # Explicity check persisted file\n file_data = config_instance._load_current_configuration()\n assert file_data['auth']['public_key_url'] == \"https://test.gigantum.com/gigantum/auth/jwks.json\"", "def test_config_must_exist(cls, values):\n configs = [c.config for c in values.get('configs')]\n for test in values.get('tests'):\n if test.config not in configs:\n raise ValueError(\n f\"Test '{test.test}' gave the config '{test.config}', but \"\n \"this config does not exist in the file \"\n f\"'{values.get('yaml')}'. Configs detected : {configs} \\n\")\n return values", "def test_multiconf(self):\n\n tcd0, tcd1 = TestConfDriver(), TestConfDriver()\n\n tcd0.confbypath['test0'] = configuration(\n category(\n 'test',\n Parameter('test0', value='0')\n )\n )\n tcd0.confbypath['test1'] = configuration(\n category(\n 'test',\n Parameter('test1', value='1')\n )\n )\n tcd1.confbypath['test1'] = configuration(\n category(\n 'test',\n Parameter('test2', svalue='=@test0/test.test0 + @test1/test.test1'),\n Parameter('test3', value=3)\n )\n )\n\n configurable = Configurable(drivers=[tcd0, tcd1])\n\n configurable.applyconfiguration(\n targets=[configurable], paths=['test0', 'test1']\n )\n\n self.assertEqual(configurable.test0, '0')\n self.assertEqual(configurable.test1, '1')\n self.assertEqual(configurable.test2, '01')\n self.assertEqual(configurable.test3, 3)", "def test_config_device_init_with_defaults(get_config, monkeypatch):\n notbase_config = {'not_presented': 1}\n monkeypatch.setattr(DeviceConfig, 'minimal_essential_conf', notbase_config)\n cfg = get_config(DeviceConfig, base_config)\n\n assert isinstance(cfg, DeviceConfig), 'wrong class'\n assert cfg.data == notbase_config, 'bad config loaded'", "def test_using_cfg_config(line_sorted_checker, capsys):\n want = \"\"\"\nphmdoctest- project.md => .gendir-suite-cfg/test_project.py\nphmdoctest- doc/directive1.md => .gendir-suite-cfg/test_doc__directive1.py\nphmdoctest- doc/directive2.md => .gendir-suite-cfg/test_doc__directive2.py\nphmdoctest- doc/directive3.md => .gendir-suite-cfg/test_doc__directive3.py\nphmdoctest- doc/example1.md => .gendir-suite-cfg/test_doc__example1.py\nphmdoctest- doc/example2.md => .gendir-suite-cfg/test_doc__example2.py\nphmdoctest- doc/inline_example.md => .gendir-suite-cfg/test_doc__inline_example.py\nphmdoctest- tests/managenamespace.md => .gendir-suite-cfg/test_tests__managenamespace.py\nphmdoctest- tests/one_code_block.md => .gendir-suite-cfg/test_tests__one_code_block.py\nphmdoctest- tests/output_has_blank_lines.md => .gendir-suite-cfg/test_tests__output_has_blank_lines.py\nphmdoctest- tests/setup_only.md => .gendir-suite-cfg/test_tests__setup_only.py\nphmdoctest- tests/twentysix_session_blocks.md => .gendir-suite-cfg/test_tests__twentysix_session_blocks.py\nphmdoctest- tests/generate.cfg generated 12 pytest files\n\"\"\"\n phmdoctest.main.generate_using(config_file=Path(\"tests/generate.cfg\"))\n drop_newline = want.lstrip()\n line_sorted_checker(drop_newline, capsys.readouterr().out)", "def test_get_configuration(self, clean_mongo, test_case):\n self.logger.info(\"RUN: %s\", test_case[\"name\"])\n\n uuidv4 = str(uuid.uuid4())\n tenant, username, password = (\n \"test.mender.io-\" + uuidv4,\n \"some.user+\" + uuidv4 + \"@example.com\",\n \"secretsecret\",\n )\n tenant = create_org(tenant, username, password, \"enterprise\")\n update_tenant(tenant.id, addons=[\"configure\"])\n login(tenant.users[0], test_case[\"use_personal_access_token\"])\n\n admin_user = tenant.users[0]\n test_case[\"user\"][\"name\"] = test_case[\"user\"][\"name\"].replace(\"UUID\", uuidv4)\n if test_case[\"roles\"]:\n create_roles(tenant.users[0].token, test_case[\"roles\"])\n test_user = create_user(tid=tenant.id, **test_case[\"user\"])\n login(test_user, test_case[\"use_personal_access_token\"])\n\n # Initialize tenant's devices\n grouped_devices = setup_tenant_devices(tenant, test_case[\"device_groups\"])\n\n deviceconf_MGMT = ApiClient(deviceconfig.URL_MGMT)\n\n device_id = grouped_devices[test_case[\"view_group\"]][0].id\n\n # set the configuration using admin account\n rsp = deviceconf_MGMT.with_auth(admin_user.token).call(\n \"PUT\",\n deviceconfig.URL_MGMT_DEVICE_CONFIGURATION.format(id=device_id),\n body={\"foo\": \"bar\"},\n )\n assert rsp.status_code == 204, rsp.text\n\n # Attempt to get configuration\n rsp = deviceconf_MGMT.with_auth(test_user.token).call(\n \"GET\", deviceconfig.URL_MGMT_DEVICE_CONFIGURATION.format(id=device_id)\n )\n assert rsp.status_code == test_case[\"get_configuration_status_code\"], rsp.text\n self.logger.info(\"PASS: %s\" % test_case[\"name\"])", "def test_config(self, mocked_callable_loader, mocked_load_config):\n config_filename = 'aconfigfile'\n finder = importer.Finder(config_filename)\n mocked_load_config.assert_called_once_with(config_filename)\n\n module_config = finder.config.get('fake_package.fake_module')\n self.assertTrue(module_config is not None)\n self.assertTrue('callable' in module_config)\n self.assertTrue('config' in module_config)", "def mocked_config():\n mocked_config = mock.create_autospec(Config)\n mocked_config._reportportal_configured = True\n return mocked_config", "def testReadConfig(loggingMixin, configType, configTypeString):\n # This could be different than configType if we want to use a string.\n # We use a different object because we still want to use the standard config type later in the test.\n configTypeForReadingConfig = configType\n if configTypeString:\n configTypeForReadingConfig = configType.name\n (parameters, filesRead) = config.readConfig(configTypeForReadingConfig)\n\n filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), \"configTestFiles\", \"{}ConfigRef.yaml\".format(configType.name))\n\n # We need to treat whether the file exists with a bit of care.\n # NOTE: Since the parametization causes this to run mulitple times, some will pass and\n # and some will fail, even when creating the configuration files. This is fine.\n if os.path.exists(filename):\n # Access the expected values\n with open(filename, \"r\") as f:\n expected = yaml.load(f.read(), Loader = yaml.SafeLoader)\n else:\n # For making the reference\n with open(filename, \"w\") as f:\n yaml.dump(parameters, f)\n logger.warning(\"Creating configuration reference for {} module\".format(configType.name))\n # We don't want to go further - we're just creating the reference.\n assert False\n\n # Don't compare the full \"_users\" values because they will always be different due to differences in hashing\n paramUsers = parameters.pop(\"_users\", None)\n expectedUsers = expected.pop(\"_users\", None)\n # However, the beginning should match (same idea as in `testBcrypt`)\n lengthToCheck = 7\n # It won't always exist, so we need to check for it first.\n if paramUsers:\n for k, v in iteritems(paramUsers):\n assert v[:lengthToCheck] == expectedUsers[k][:lengthToCheck]\n\n # Apparently the order of these lists can vary between different systems. We don't care about the order\n # - just the values themselves - so we compare them as sets, which don't depend on order.\n paramTemplates = parameters.pop(\"availableRunPageTemplates\", None)\n expectedTemplates = expected.pop(\"availableRunPageTemplates\", None)\n # It won't always exist, so we need to check for it first.\n if paramTemplates:\n assert set(paramTemplates) == set(expectedTemplates)\n\n # Everything else should be identical.\n assert parameters == expected", "def test_all_configs_available():\n\n app_configs = application_services.get_configs()\n assert all(name in app_configs for name in ['TITLE', 'ENCODING', 'FLASK_LOG_LEVEL',\n 'SERVER_NAME', 'SERVER_HOST', 'SERVER_PORT',\n 'ENV', 'DEBUG', 'TESTING', 'UNIT_TESTING'])", "def test_get_with_default(self):\n self.assertEqual(self.config.get('basic','salutation'),None)\n self.assertEqual(self.config.get('basic','salutation','bonjour'),\n 'bonjour')", "def _get_test(self, config):\n expected_options = {'goodpassword', 'badpassword'}\n _warn_on_extra(set(config.options('test')) - expected_options -\n self.defaults, 'test section option(s)')\n\n get = partial(config.get, 'test')\n\n self.goodpassword = get('GOODPASSWORD')\n self.badpassword = get('BADPASSWORD')", "def test_config_list():\n client = TestClient()\n client.run('config list')\n assert \"Supported Conan *experimental* global.conf and [conf] properties:\" in client.out\n for key, description in BUILT_IN_CONFS.items():\n assert \"{}: {}\".format(key, description) in client.out", "def test_config_is_loaded(config):\n assert config[\"DEBUG\"] is False", "def test_init_from(config):\n\n config.init_from()\n config.init_from(file='../../config.cfg')", "def test_config_as_dict():\n c = core.Config(foo='bar')\n\n # check custom configuration\n assert c['foo'] == 'bar'\n \n # check len and iter behavior\n i = 0\n for k in c:\n i += 1\n assert len(c) == i\n assert 'datapath' in c._keys\n\n # check default get behavior\n assert c.get('doesNotExist') is None\n assert c.get('doesNotExists', 'foobar') == 'foobar'", "def testGetConfDict():\n\n conf = naiveConf.NaiveConf(exampleConfFname)\n confDict = conf.getConfDict()\n assert type(confDict) == dict\n assert confDict['x'] == conf.x\n assert confDict['y'] == conf.y\n assert confDict['L'] == conf.L", "def test_call_config(self):\n self.assertTrue(self.Foo._passed)", "def test_pkgutil(self):\n print(utilities.CONFIG_FILE)\n assert utilities.get_config('ROBINHOOD', 'oauth_endpoint')", "def config():", "def config():", "def test_invalid_config() -> None:\n config = {\"statsd\": {\"host1\": \"host1\"}}\n\n with pytest.raises(vol.Invalid):\n statsd.CONFIG_SCHEMA(None)\n with pytest.raises(vol.Invalid):\n statsd.CONFIG_SCHEMA(config)", "def test_expected_configurations_parameters(self):\n allowed_attrs = [\"configuration-parameters\"]\n instance_info.dbaas.configuration_parameters.parameters(\n instance_info.dbaas_datastore,\n instance_info.dbaas_datastore_version)\n resp, body = instance_info.dbaas.client.last_response\n attrcheck = AttrCheck()\n config_parameters_dict = json.loads(body.decode())\n attrcheck.contains_allowed_attrs(\n config_parameters_dict, allowed_attrs,\n msg=\"Configurations parameters\")\n # sanity check that a few options are in the list\n config_params_list = config_parameters_dict['configuration-parameters']\n config_param_keys = []\n for param in config_params_list:\n config_param_keys.append(param['name'])\n expected_configs = self.expected_default_datastore_configs()\n expected_config_params = expected_configs.get('parameters_list')\n # check for duplicate configuration parameters\n msg = \"check for duplicate configuration parameters\"\n assert_equal(len(config_param_keys), len(set(config_param_keys)), msg)\n for expected_config_item in expected_config_params:\n assert_true(expected_config_item in config_param_keys)", "def test_register_openapi_spec_yaml_2():\n app = App(__name__)\n spec_configs = [SpecConfig(**SPEC_CONFIG)]\n res = register_openapi(app=app, specs=spec_configs)\n assert isinstance(res, App)", "def test_configure():\n\n configs = DTO()\n configs.update(fake_name='fake_name', fake_id='fake_id',\n fake_number=33, fake_value='fake_value')\n application_services.configure(configs)\n\n app_configs = application_services.get_configs()\n assert all(name in app_configs for name in\n ['FAKE_NAME', 'FAKE_ID', 'FAKE_NUMBER', 'FAKE_VALUE'])\n\n assert not any(name in app_configs for name in\n ['fake_name', 'fake_id', 'fake_number', 'fake_value'])", "def test_expected_config(expectedconfig):\n expected = expectedconfig.read_text()\n config = CONFIGSDIR / expectedconfig.name\n\n assert dumpconfig(config) == expected", "def test_set_config_options(self):\n # set up config\n config.set_config_file(os.path.join(path_to_module, \"test_config.conf\"))\n config.setup()\n # Make sure id is initially set to what we expect\n self.assertEqual(config.read_config_option('client_id'), 'uploader')\n # Set and test to a new id\n config.set_config_options(client_id=\"new_id\")\n self.assertEqual(config.read_config_option('client_id'), \"new_id\")", "def test_config_read(get_config, write_config_fixture):\n test_dict = {'test': 'main'}\n cfg = get_config(Config, test_dict)\n write_config_fixture(test_dict, cfg.config_path)\n\n read = cfg.read()\n assert isinstance(read, dict), f'read return wrong type: {type(read)}'\n assert read == test_dict, 'failed to read config'", "def test_happy_generate_config(self):\n yaml_input = yaml.load(MOCK_YAML)\n yaml_with_dot_notation = common.dotdict(yaml_input)\n generate_config_return = subnets_template.generate_config(yaml_with_dot_notation)\n self.assertEquals(generate_config_return, EXPECTED_OUTPUT)" ]
[ "0.7582333", "0.67213416", "0.6621223", "0.66077214", "0.6536261", "0.65335846", "0.64342177", "0.6432759", "0.6411313", "0.6303535", "0.62789136", "0.6260329", "0.62516683", "0.6249136", "0.6229848", "0.6223742", "0.6223742", "0.62210363", "0.61939514", "0.617981", "0.61518675", "0.61431146", "0.6132865", "0.61304533", "0.61266756", "0.6122041", "0.6120953", "0.6092638", "0.6089856", "0.6081552", "0.6071167", "0.6061915", "0.6047363", "0.60379153", "0.601648", "0.6016168", "0.60155314", "0.59974164", "0.5993903", "0.59934545", "0.5992539", "0.5974639", "0.59598655", "0.5958588", "0.5954219", "0.5946473", "0.5939244", "0.59354", "0.5921617", "0.59154856", "0.59035754", "0.5902159", "0.5897341", "0.5891499", "0.58769107", "0.58698237", "0.58671546", "0.5867055", "0.5855529", "0.58508706", "0.58453906", "0.583886", "0.5836666", "0.5829158", "0.5815659", "0.580785", "0.5806911", "0.57834005", "0.5778455", "0.5771907", "0.57712436", "0.5764271", "0.5762722", "0.5749821", "0.5748353", "0.5739072", "0.5735378", "0.57348967", "0.57222474", "0.5719733", "0.5719505", "0.57185215", "0.57171243", "0.5694629", "0.5690132", "0.56887144", "0.5681308", "0.5681019", "0.5669881", "0.5669052", "0.56466323", "0.56466323", "0.5639198", "0.5632031", "0.5618308", "0.56182766", "0.5618087", "0.5607173", "0.56025285", "0.5601217" ]
0.8165602
0
Check that given modifier name is valid one. If not raise exception based on violation.
def _isValidModifier(self, modifiers, modifierName): if Modifiers.ILLEGAL_MODIFIER_PATTER.search(modifierName): msg = ('Modifier named "{0}" in sheet {1} contains illegal characters. ' 'Supported characters are a to z, A to Z, 0 to 9 and underscore "_". ' 'Spaces are not allowed characters, use underscore instead. For example ' '"some_mod".' ).format(modifierName, MODIFIER_LIST_SHEET_NAME) raise errors.UnsupportedCharacter(MODIFIER_LIST_SHEET_NAME, msg) if modifierName in map(lambda mod: mod.name, modifiers): msg = ('Modifier named "{0}" already exists in the sheet {1}. ' 'Modifier names must be unique. To fix remove or rename ' 'duplicates.' ).format(modifierName, MODIFIER_LIST_SHEET_NAME) raise errors.DuplicateError(MODIFIER_LIST_SHEET_NAME, msg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validateName(name):\r\n if not name:\r\n raise IllegalName('Name can not be an empty string.')\r\n\r\n m = _NAME_RE.match(name)\r\n\r\n if m is None or m.group(0) != name:\r\n raise IllegalName('Name has to start with a letter followed by an '\r\n 'arbitrary number of alphanumeric characters or '\r\n 'underscores.')", "def validname(name):\r\n return len(name)>0 and (\r\n Context.__invalid_character.search(name) is None)", "def raiseNameError(text):\n pattern = re.compile(\"[a-zA-Z]\")\n if not pattern.match(text):\n raise Exception(\"Invalid Name Entered\")", "def name_valid(name):\n return name.isalpha()", "def _check_is_name_valid(self, name):\n if name in self.forbidden_names or name.endswith(\n self.forbidden_extensions) or self.__check_is_match_regex(name):\n return False\n return True", "def _validate_mod(self, mod: Modifier):\r\n return not mod.name in self.mods", "def test_valid_name_invalid():\n assert not valid_name(\"\")\n assert not valid_name(\"a\"*21)", "def _is_valid_varname(self, name):\n if name in RESERVED or re.match(r'^str([0-9]+|L)$', name): return False\n return True if VALID_NAME_RE.match(name) else False", "def invalid_name(name):\n if any(not item.isalpha() for item in str(name)):\n return True\n return False", "def _is_valid_varname(self, name):\n if name in RESERVED or re.match(r'^str[0-9]+$', name): return False\n return True if VALID_NAME_RE.match(name) else False", "def validate_name(name: str) -> None:\n\n # Disallow empty.\n if not name:\n raise CleanError('Feature set name cannot be empty.')\n\n # Require starting with a letter.\n if not name[0].isalpha():\n raise CleanError(\n f\"Invalid feature set name '{name}'\"\n ' - names must start with a letter.'\n )\n\n # Require only letters, numbers, and underscores.\n if not name.replace('_', '').isalnum():\n raise CleanError(\n f\"Invalid feature set name '{name}'\"\n ' - only letters, numbers, and underscores are allowed.'\n )\n\n # Require all lowercase.\n if not name.islower():\n raise CleanError(\n f\"Invalid feature set name '{name}'\"\n ' - only lowercase letters are allowed.'\n )\n\n # Disallow leading, trailing, or consecutive underscores.\n # (these will result in a '' in the split results which evals to False)\n if not all(name.split('_')):\n raise CleanError(\n f\"Invalid feature set name '{name}'\"\n ' - leading, trailing, and consecutive underscores are'\n ' not allowed.'\n )", "def validate_name(name:str) -> bool:\r\n return name.isalpha() and name.count(\" \") == 0 and len(name) >= 2", "def validateMemberName(n):\n try:\n if len(n) < 1:\n raise Exception('Name must be at least one byte in length')\n if len(n) > 255:\n raise Exception('Name exceeds maximum length of 255')\n if n[0].isdigit():\n raise Exception('Names may not begin with a digit')\n if mbr_re.search(n):\n raise Exception(\n 'Names contains a character outside the set [A-Za-z0-9_]')\n except Exception as e:\n raise MarshallingError(f'Invalid member name \"{n}\": {str(e)}')", "def _assert_valid_name(name, container):\n container.file.name_validation(container.directory, name)", "def validateNamePart(self, passed_name):\n ## Declaring a Flag to control a while loop\n name_ok = False\n ## While loop to have user retry their input if they enter incorrectly\n while not name_ok:\n if passed_name.isalpha():\n name_ok = True\n return True\n\n else:\n print(\"You have entered an invalid character. Please try again.\")\n return False", "def is_valid(name):\n return bool(name)", "def test_nonreserved_name(self):\n try:\n field_name_validator('_identifier')\n except ValidationError:\n self.fail('Field name raised ValidationError unexpectedly')", "def _check_name(self):\n\t\tpass", "def test_valid_name_valid():\n assert valid_name(\"1\")\n assert valid_name(\"a\"*20)", "def check_name(self, name):\n status, msg = utils.validate_name(name, \"36\", \"storageview name\")\n if not status:\n LOG.error(msg)\n self.module.fail_json(msg=msg)\n else:\n LOG.info(msg)", "def _validate_content_name(content_name: str, performative: str) -> Tuple[bool, str]:\n # check content name's format\n if not _is_valid_regex(CONTENT_NAME_REGEX_PATTERN, content_name):\n return (\n False,\n \"Invalid name for content '{}' of performative '{}'. Content names must match the following regular expression: {} \".format(\n content_name, performative, CONTENT_NAME_REGEX_PATTERN\n ),\n )\n\n # check content name is not a reserved name\n if _is_reserved_name(content_name):\n return (\n False,\n \"Invalid name for content '{}' of performative '{}'. This name is reserved.\".format(\n content_name,\n performative,\n ),\n )\n\n return (\n True,\n \"Content name '{}' of performative '{}' is valid.\".format(\n content_name, performative\n ),\n )", "def name_error(name):\n\n if len(name) > MAX_NAME_LENGHT:\n raise InputError(description=\"Name cannot be more than 20 characters long\")", "def _validate_rule_target_name(name: str) -> None:\n if not name:\n raise common_exceptions.RuleTargetValidationError(\n \"A `name` field must be supplied.\"\n )", "def verify_name(name):\n try:\n if name.index(' '):\n return False\n except ValueError:\n return True", "def validate_team_name(name):\n if not re.match('^[A-Za-z0-9_]*$', name):\n print('INVALID NAME. LETTERS, NUMBERS AND UNDERSCORES ONLY')\n return False\n elif len(name) > 10:\n print('INVALID NAME. 10 CHARACTERS MAX')\n return False\n elif len(name) == 0:\n print('INVALID NAME. NOT LONG ENOUGH')\n else:\n return True", "def validate_interval_name(name):\n msg = 'invalid interval name \"{}\"'.format(name)\n if name[0] not in ['+', '-']:\n raise ValueError(msg)\n if name[1] not in ['d', 'm', 'P', 'M', 'A']:\n raise ValueError(msg)\n try:\n int(name[2:])\n except ValueError:\n raise ValueError(msg)", "def validate_custom_name(self, name):\n if not re.match( r'(/?[a-zA-Z_][a-zA-Z0-9_]*)+$', name):\n raise ValueError('Invalid name for node (%s)' % name)\n return", "def nameIsValid(self, name):\n self.notify.debug('nameIsValid')\n if (name in self.usedNames):\n return OTPLocalizer.ToonAlreadyExists % (name)\n\n problem = NameCheck.checkName(name, font=self.nameEntry.getFont())\n if problem:\n return problem\n\n # name has passed local checks\n return None", "def _validate_param(name, value):\n\n # First things first -- check that we have a legal parameter name.\n try:\n validator = _legal_params[name]\n except KeyError:\n raise ViewVCException(\"An illegal parameter name was provided.\", \"400 Bad Request\")\n\n # Is there a validator? Is it a regex or a function? Validate if\n # we can, returning without incident on valid input.\n if validator is None:\n return\n elif hasattr(validator, \"match\"):\n if validator.match(value):\n return\n else:\n if validator(value):\n return\n\n # If we get here, the input value isn't valid.\n raise ViewVCException(\n 'An illegal value was provided for the \"%s\" parameter.' % (name), \"400 Bad Request\"\n )", "def validated_name(cls, name):\n if (name[:5] == 'hive-'\n and name[5] in ['1', '2', '3']\n and re.match(r'^hive-[123]\\d{4,6}$', name)):\n return name\n return None", "def validate_names(name):\n return isinstance(name, str) and not re.search(r'[\\s]', name)", "def isEditName(id):\n for char in id:\n if re.compile('[0-9]+').match(char[0]) == None:\n print NameError(\"'%s' is not valid name. \\n Id should be numeric\" % (name))\n return -1\n return 0", "def test_name_validation(self, attr):\n kwargs = {'kind': POSITIONAL_ONLY, attr: 3}\n with pytest.raises(TypeError) as excinfo:\n FParameter(**kwargs)\n assert excinfo.value.args[0] == \\\n '{} must be a str, not a {}'.format(attr, 3)", "def _check_key_name(cls, name):\n return (isinstance(name, basestring) and\n re.match('^[A-Za-z][A-Za-z0-9_]*$', name) and\n not hasattr(cls, name))", "def test_reserved_name(self):\n with self.assertRaises(ValidationError):\n field_name_validator('_id')", "def assert_valid_attribute(self, name):\n if name.startswith('_'):\n return\n self.assert_known_field(name)", "def is_valid_attribute_name(self, name):\n try:\n self.validate_attribute_name(name)\n return True\n except etal.LabelsSchemaError:\n return False", "def validate_name(name, units=None, strict=False):\n\n if not units:\n units = TILE_SIZES.keys()\n else:\n if isinstance(units, str):\n units = [units]\n\n for unit in units:\n if unit not in TILE_SIZES.keys():\n raise ValueError(\"{0} is not a valid kvadratnet unit.\".format(unit))\n\n if strict:\n begin, end = \"^\", \"$\"\n else:\n begin, end = \"\", \"\"\n\n for unit in units:\n expr = \"{begin}{expr}{end}\".format(begin=begin, expr=REGEX[unit], end=end)\n if re.match(expr, name):\n return True\n\n return False", "def check_name(name):\n name = sanitize_name(name)\n for letter in name:\n if letter not in all_letters:\n # print(f\"Bad letter = {letter}\")\n return False\n role = extract_role(name)\n # remove group\n name = name.replace(f' - {role}', '')\n try:\n parts = name.split(' ')\n firstname = parts[0].title()\n if firstname[0] not in letters:\n return False\n for letter in firstname[1:]:\n if letter not in LETTERS:\n return False\n familynames = parts[1:]\n for familyname in familynames:\n if familyname[0] not in letters:\n return False\n for letter in familyname[1:]:\n if letter not in LETTERS:\n return False\n return True\n except:\n return False", "def isValidPart(name):\n\tfor n in name_forms:\n\t\tif re.match(n, name.lower()) is not None:\n\t\t\treturn True\n\treturn False", "def legal_name(name, is_param_name=False):\n if name.startswith('_'):\n return False\n\n if name in ('self',):\n return False\n\n if keyword.iskeyword(name):\n return False\n\n regex = r'^[a-zA-Z][a-zA-Z0-9_]*$' if is_param_name else (\n r'^[a-zA-Z][.\\w-]*$')\n return bool(re.match(regex, name))", "def check_valid_key_name(name):\n if type(name) not in [str]:\n return False\n bad_chars = [\"*\", \".\", \"&&&&\"]\n for k in bad_chars:\n if k in name:\n return False\n return True", "def _validate_name(self, key, name):\n \n name = Project._condition_name(name)\n \n return name", "def verify_naming(self, reserved):\n for w in reserved:\n if w in self.decisions:\n raise ParseError('Duplicate variable/block name \"{}\"'.format(w))", "def _validate_performatives(performative: str) -> Tuple[bool, str]:\n # check performative is not a reserved name\n if _is_reserved_name(performative):\n return (\n False,\n \"Invalid name for performative '{}'. This name is reserved.\".format(\n performative,\n ),\n )\n\n # check performative's format\n if not _is_valid_regex(PERFORMATIVE_REGEX_PATTERN, performative):\n return (\n False,\n \"Invalid name for performative '{}'. Performative names must match the following regular expression: {} \".format(\n performative, PERFORMATIVE_REGEX_PATTERN\n ),\n )\n\n return True, \"Performative '{}' is valid.\".format(performative)", "def validateName(name):\n if not name:\n # This happens when the name is an existing directory\n raise BadCommand('Please give the name of a layer.')\n # 'setup' is a valid controller name, but when paster controller is ran\n # from the root directory of a project, importing setup will import the\n # project's setup.py causing a sys.exit(). Blame relative imports\n if name != 'setup' and can_import(name):\n raise BadCommand(\n \"\\n\\nA module named '%s' is already present in your \"\n \"PYTHON_PATH.\\nChoosing a conflicting name will likely cause \"\n \"import problems in\\nyour controller at some point. It's \"\n \"suggested that you choose an\\nalternate name, and if you'd \"\n \"like that name to be accessible as\\n'%s', add a route \"\n \"to your projects config/routing.py file similar\\nto:\\n\"\n \" map.connect('%s', controller='my_%s')\" \\\n % (name, name, name, name))\n return True", "def CHECK_NAME(name):\n if WORDPAT.match(name):\n return name\n return None", "def name_check(f_name):\r\n if len(f_name) == 0:\r\n print('The first name must be filled in.')\r\n if len(f_name) < 2:\r\n print(f_name + ' is not a valid name. Itis too short.')", "def _check_name(\n self,\n node_type: str,\n name: str,\n node: nodes.NodeNG,\n confidence: interfaces.Confidence = interfaces.HIGH,\n disallowed_check_only: bool = False,\n ) -> None:\n\n def _should_exempt_from_invalid_name(node: nodes.NodeNG) -> bool:\n if node_type == \"variable\":\n inferred = utils.safe_infer(node)\n if isinstance(inferred, nodes.ClassDef):\n return True\n return False\n\n if self._name_allowed_by_regex(name=name):\n return\n if self._name_disallowed_by_regex(name=name):\n self.linter.stats.increase_bad_name(node_type, 1)\n self.add_message(\n \"disallowed-name\", node=node, args=name, confidence=interfaces.HIGH\n )\n return\n regexp = self._name_regexps[node_type]\n match = regexp.match(name)\n\n if _is_multi_naming_match(match, node_type, confidence):\n name_group = self._find_name_group(node_type)\n bad_name_group = self._bad_names.setdefault(name_group, {})\n # Ignored because this is checked by the if statement\n warnings = bad_name_group.setdefault(match.lastgroup, []) # type: ignore[union-attr, arg-type]\n warnings.append((node, node_type, name, confidence))\n\n if (\n match is None\n and not disallowed_check_only\n and not _should_exempt_from_invalid_name(node)\n ):\n self._raise_name_warning(None, node, node_type, name, confidence)\n\n # Check TypeVar names for variance suffixes\n if node_type == \"typevar\":\n self._check_typevar(name, node)", "def _is_file_valid(name: str) -> bool:\n return not name.startswith(\".\")", "def validate(self, name):\n return name in self.dict", "def valid_param(name, param, min_length, max_length, regex):\n\n if not StringValidator.is_valid_type(param):\n logging.error(f\"{name} is an invalid type - expecting string\")\n return False\n\n string_validator = StringValidator(\n param, min_length=min_length, max_length=max_length, regex=regex\n )\n\n if not string_validator.is_valid_length():\n logging.error(f\"{name} is invalid length {param}\")\n return False\n\n if not string_validator.valid_chars_only():\n logging.error(f\"{param} the param for {name} contains invalid characters.\")\n return False\n\n return True", "def is_valid(key):\n return key[0:2] == \"MR\" and key[2:].isdigit() and len(key) in [9, 10]", "def check_schema_name(name: str):\n if not is_valid_schema_name(name):\n raise ValidationError(\"Invalid string used for the schema name.\")", "def get_name_validation_error(name):\n return '' if name else accounts.REQUIRED_FIELD_NAME_MSG", "def validate_name(self, name):\n import re\n\n if not re.findall(\"^[\\w',]+$\", name):\n self.msg(\"That category name contains invalid characters.\")\n return False\n return True", "def test_invalid_optimizer_name_raises_error(self):\n\n self.assertRaises(InvalidVariableNameError,\n choose_optimizer, 'asd', None)", "def check_name(name, allow_services=False):", "def _validate(self, s: str):\n if not s.isidentifier():\n raise ValueError(('Invalid Django project name \"{}\": '\n 'must be a valid Python identifier').format(s))", "def _validate(self, s: str):\n if not s.isidentifier():\n raise ValueError(('Invalid Django project name \"{}\": '\n 'must be a valid Python identifier').format(s))", "def test_raises_useful_exception(self):\n exp = Expression(r'inalid (\\d]', {}, [], lambda x: x)\n with self.assertRaises(exp.InvalidPattern):\n assert not exp.pattern", "def IsVPCNameValid(vpc):\n if len(vpc) < 1 or len(vpc) > 63:\n return False\n return bool(re.match('^[a-z]$|^[a-z][a-z0-9-]*[a-z0-9]$', vpc))", "def validate_freezable_name(name):\n\n if re.search(\"[./:]+\", name) is None:\n return name\n else:\n raise FreezableNameInvalidError(f'Invalid Freezable Name: \"{name}\"')", "def validName(varname):\r\n if (len(varname[0])>32):\r\n return False\r\n if not(varname[0][0].isalpha()):\r\n return False \r\n for ch in varname[0][1:]:\r\n if not(ch.isalpha() or ch.isdigit() or ch=='_'):\r\n return False\r\n \r\n return True", "def verify_name_syntax(sv, name, here, argtext, last):\r\n if name.find(Equal)!=-1: # \"=\" is not allowed in names\r\n print(\"\\n\", Err_equal_in_name, \"\\n\", name) # *** Illegal character in name: \"+ Equal +\" *** \r\n raise ReferenceError\r\n\r\n if not name or here==0: # name may not start with a bracket\r\n print(\"\\n\", Err_empty_name) # *** Syntax error: empty name *** \r\n print(name)\r\n if num>2: # common source of empty name error\r\n print(Help_continuation+Mline+\"' ):\") # you may have meant (with continuation character '\"+Mline):\r\n print(lines[num-3].strip(Space)+Col, Mline, Crlf, name) # suggested correction\r\n raise ReferenceError\r\n\r\n if argtext: # name is a function or a dict\r\n fun=name[:here]\r\n if fun in Internal_Functions: \r\n print(\"\\n\", Err_redef_internal_func) # *** Error: You cannot define an internal function *** \r\n print(fun, \"in\", fun+Obr+argtext+Cbr)\r\n raise ReferenceError\r\n \r\n if name[last:]: # name must end with closing bracket after args\r\n print(\"\\n\", Err_text_after_args) # *** Syntax error: text found after arguments *** \r\n print(name)\r\n raise ReferenceError", "def checkValidClusterName(self):\n p = re.compile('^[a-zA-Z0-9][a-zA-Z0-9_.\\-]*[a-zA-Z0-9]$')\n if len(self.clusterName) < 2 or len(self.clusterName) > 64 or not p.match(self.clusterName):\n raise RuntimeError(\"Invalid cluster name (%s).\"\n \" It must be between 2 and 64 characters and contain only alpha-numeric\"\n \" characters, hyphens, underscores, and periods. It must start and\"\n \" end only with alpha-numeric characters.\" % self.clusterName)", "def check_character(char, name, parameters):\r\n if char in name:\r\n raise NameError('Invalid character in the variable name: ' + name)\r\n\r\n # Make sure people don't include # within the name of parameters\r\n for item in parameters.keys():\r\n if char in item:\r\n raise NameError('Invalid character in the variable parameters: ' + item)", "def isModifier(*args):\n return _libsbml.SBO_isModifier(*args)", "def validate_pitch_name(name):\n msg = 'invalid pitch name \"{}\"'.format(name)\n if name[0] not in ['A', 'B', 'C', 'D', 'E', 'F', 'G']:\n raise ValueError(msg)\n if (name.count('#') > 2) or (name.count('b')) > 2:\n raise ValueError('accepts a maximum of two accidentals')\n try:\n int(name.replace('#', '').replace('b', '')[1:])\n except ValueError:\n raise ValueError(msg)", "def isValid(self):\n if(not self.name or len(self.name) == 0):\n return False\n return True", "def test_invalid_module_names(self):\n self.assertRaises(ValueError, Module, '')\n self.assertRaises(ValueError, Module, 'names-with-dashes')\n self.assertRaises(ValueError, Module, 'names with spaces')\n self.assertRaises(ValueError, Module, 'names.with,punctuations!')\n self.assertRaises(ValueError, Module, '4names_starting_with_numbers')\n self.assertRaises(ValueError, Module, 'names.with.reserved.keywords')", "def check_name(self, node):\n assert \"name\" in node, \"Package node does not contain attribute 'node'\"\n assert len(node[\"name\"]) >= 1, \"Expecting at least one 'name' value\"\n # TODO: add more thorough checks", "def test_asterisk(self):\n with self.assertRaises(ValidationError):\n field_name_validator('logstash*')", "def _name_check(self, name, *args, chk_dict=None):\n if name is not None and len(name) > 0:\n lst = list(args)\n lst.append(name)\n if self._key_check(lst, chk_dict=chk_dict):\n result = EnvironmentDict._EXISTS\n else:\n result = EnvironmentDict._VALID\n else:\n result = EnvironmentDict._INVALID\n raise ValueError(f'Invalid name: {name}')\n return result", "def test_badname_down(self):\n msg = \"The radiation flux in UV downward has the wrong name\"\n with self.assertRaisesRegex(ValueError, msg):\n calculate_uv_index(self.cube_down_badname)", "def valid_routine_name(routine):\n\treturn re.match('^[a-z_]([a-z0-9_]*)', routine) is not None", "def test_valid(self):\n template = '{0} just right {1}'\n value_count = 2\n try:\n validate_str_substitution(template, value_count)\n except ValidationError:\n self.fail('Name raised ValidationError unexpectedly')", "def name_line_edit_changed(self, text):\n if re.findall(r\"[^a-zA-Z0-9\\-_ ]+\", text):\n self.name_line_edit.set_invalid(\"Invalid character\")\n else:\n if text == \"\":\n self.name_line_edit.set_invalid(\"Enter a name\")\n else:\n self.name_line_edit.set_valid()", "def is_invalid_proj_exp_runnames(name):\n return re.search(f'[^{settings.ALLOWED_PROJEXPRUN_CHARS}]', name)", "def test_underscore(self):\n try:\n db_name_validator('logstash_')\n except ValidationError:\n self.fail('Name raised ValidationError unexpectedly')", "def handle_modifier(self, mod):\n self.modifier += mod\n if not self.modifier.isdigit():\n self.modifier = str()", "def check_imported_name(self, name, field, sourcepath):\n if len(name) > 80:\n raise NameFormatError(\"ERROR: %s: %s name too long: %s\"\n % (sourcepath, field, name))", "def is_valid_package_module_name(name):\n if \".\" in name:\n for part in name.split(\".\"):\n if not is_valid_package_module_name(part):\n return False\n elif len(name):\n if name[0] not in _first_letter_for_valid_name:\n return False\n\n if len(set(name).difference(_char_set_for_valid_name)):\n return False\n else:\n return False\n return True", "def test_is_valid_kubernetes_resource_name_invalid_input():\n # test length violations\n assert not is_valid_kubernetes_resource_name(name=None) # Too short\n assert not is_valid_kubernetes_resource_name(name=\"\") # Too short\n assert not is_valid_kubernetes_resource_name(name=\"a\" * 254) # Too long\n # test first character violations (not alphanum or lowercase)\n assert not is_valid_kubernetes_resource_name(name=\"-a\")\n assert not is_valid_kubernetes_resource_name(name=\".b\")\n assert not is_valid_kubernetes_resource_name(name=\" c\")\n assert not is_valid_kubernetes_resource_name(name=\"Dave\")\n # test last character violations (not alphanum or lowercase)\n assert not is_valid_kubernetes_resource_name(name=\"a-\")\n assert not is_valid_kubernetes_resource_name(name=\"b.\")\n assert not is_valid_kubernetes_resource_name(name=\"c \")\n assert not is_valid_kubernetes_resource_name(name=\"sw33T\")\n # test middle characters violations\n assert not is_valid_kubernetes_resource_name(name=\"aBBa\")\n assert not is_valid_kubernetes_resource_name(name=\"b b\")", "def _validate_name(name):\r\n\tif HOST_NAME != name and len(name) > 0 and ZOOM_PHRASES[0] not in name and name not in WAITING_ROOM:\r\n\t\treturn True\r\n\treturn False", "def isAddName(name):\t\n if lib.essentials.isAlphanumeric(name) != 0:\n\tprint \" '%s' is not valid name. \\n Vadapter-name should be an alphanumeric.\" % (name)\n #output.completeOutputError(lib.errorhandler.InvalidArgumentCount(descape = \" '%s' is not valid name. \\n Vadapter-name should be an alphanumeric.\" % (name))) \n return -1\n \n if lib.essentials.isStartNumeric(name) != 0:\n\tprint \"'%s' is not valid name. \\n Vadapter name should not start with an digit\"% (name)\n\t#output.completeOutputError(lib.errorhandler.InvalidArgumentCount(descape = \"'%s' is not valid name. \\n Vadapter name should not start with an digit\"% (name)))\n return -1\n\n if lib.essentials.isContainSpecial(name) != 0:\n\tprint \"'%s' is not valid name. \\n Vadapter name should not contain special characher\" % (name)\n\t#output.completeOutputError(InvalidArgumentCount(descape = \"'%s' is not valid name. \\n Vadapter name should not contain special characher\" % (name)))\n return -1\n\n# if lib.db.db.ifExistsInDatabase(name) == 0:\n#\tprint NameError(\"'%s' is not valid name. \\n Already Exists\" % (name))\n#\treturn -1\n \n return 0", "def validate_strand(self, name, source, **kwargs):\n return self.validate({name: source}, **kwargs)[name]", "def validate_attribute_name(self, name):\n if not self.has_attribute(name):\n raise AttributeContainerSchemaError(\n \"Attribute '%s' is not allowed by the schema\" % name\n )", "def validate_identifier(self, identifier):\n pass", "def test_validate_input_rejection_invalid_symbol(self):\n with nose.assert_raises(exceptions.RejectionError):\n self.dtm1.validate_input('02')", "def test_place_name_exception(self):\n with pytest.raises(Exception):\n assert self.test_exception() != \"Mogn\"", "def validate_vsan_policy_name(policy_name, vmdk_path):\n if not vsan_info.is_on_vsan(vmdk_path):\n raise ValidationError('Cannot use a VSAN policy on a non-VSAN datastore')\n\n if not vsan_policy.policy_exists(policy_name):\n raise ValidationError('Policy {0} does not exist'.format(policy_name))", "def _is_valid_input(self, parameter_name):\n raise NotImplementedError()", "def is_valid_instance(instance):\n return re.match(r'^[a-z0-9\\-_]+$', instance)", "def test_hyphen(self):\n try:\n db_name_validator('logstash-')\n except ValidationError:\n self.fail('Name raised ValidationError unexpectedly')", "def test_password_strength_validator_missing_letter(self):\n with self.assertRaises(ValidationError):\n validate_password_strength('1234567')", "def isValidDataTypeName(name: unicode) -> bool:\n ...", "def validate_name_input(name):\n if all(letter.isalpha() or letter.isspace()\n for letter in name) and len(name) != 0:\n clear()\n return True\n\n else:\n clear()\n print('** Please enter a name of alphabetic characters and spaces **')\n return False", "def isValidName(theString, minimum, maximum) :\n\n return theString.isalpha() == True \\\n and len(theString) >= minimum \\\n and len(theString) <= maximum", "def _raise_format_error(self, name: str, format_str: str, source_format: str):\n\n raise ValueError(f\"The '{ name }' should be { format_str }, rather than { source_format }\")" ]
[ "0.64032656", "0.63721097", "0.6336271", "0.62860745", "0.62444806", "0.6232126", "0.6111689", "0.6085224", "0.60028106", "0.59533495", "0.5930594", "0.59233236", "0.5870704", "0.58153677", "0.5802187", "0.5795572", "0.57821715", "0.5776781", "0.5740856", "0.5735548", "0.5728841", "0.5715866", "0.5700054", "0.5685522", "0.5649832", "0.56189436", "0.56132185", "0.560684", "0.5598782", "0.55804724", "0.55688334", "0.5562792", "0.55611885", "0.554998", "0.553805", "0.55358976", "0.55341405", "0.55258185", "0.55037564", "0.5500599", "0.54880744", "0.5453016", "0.54483026", "0.5447312", "0.5444422", "0.5442026", "0.5428317", "0.5404777", "0.5379588", "0.5375193", "0.5357124", "0.5354986", "0.53445756", "0.5342666", "0.5340513", "0.533993", "0.5329447", "0.53275263", "0.5327375", "0.5327375", "0.53208494", "0.5313017", "0.5297436", "0.52963793", "0.5283782", "0.52808064", "0.52747655", "0.5252615", "0.5252402", "0.52492964", "0.52462804", "0.5241655", "0.52372664", "0.52302486", "0.5228353", "0.5222067", "0.5215953", "0.5208271", "0.52002573", "0.51894397", "0.51877177", "0.51792157", "0.5175168", "0.5157949", "0.51572335", "0.51528955", "0.51465315", "0.51458454", "0.5144783", "0.51124275", "0.5111905", "0.51048654", "0.5097444", "0.50968754", "0.5093921", "0.50931877", "0.5089703", "0.508801", "0.50841165", "0.5069795" ]
0.78125316
0
Supported functions for the user in boolean equations.
def _evalContext(self): def xor(*args): return sum(args) == 1 def neg(result): return not result context = { 'xor': xor, 'neg': neg } return context
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def boolean_func(experiment):", "def true(func):\n return MultipleChoice(_text_from_func(func), Answer('True'), Answer('False'), is_code=True)", "def __bool__(self):\n raise RuntimeError(\"Cannot evaluate CrypTensors to boolean values\")", "def main():\n var_num = int(input(\"Please, enter the num of variables:\"))\n print(\"Do you want to enter the function's \"\n \"values manually(0) or use random(1)?\")\n logs = int(input(\"Please, choose the variant 0 or 1: \"))\n if not logs:\n func_value = input(\"Please, enter the {} function values through \"\n \"a space:\".format(2 ** var_num)).split()\n else:\n func_value = [str(choice(('0', '1'))) for _ in range(2 ** var_num)]\n\n bool_func = BoolFunction(var_num, func_value)\n print((var_num + 1) * '------')\n print(bool_func)\n print((var_num + 1) * '------')\n print(\"Conjunctive normal form:\")\n print(bool_func.str_cnf())", "def test_for_bool(self, parse_input_mocked_metadata):\n bb = parse_input_mocked_metadata(\n \"for bool b in [True, False]\\n\\tUnaryGate(b, 0) | 0\"\n )\n assert np.all(\n bb._forvar[\"b\"] == np.array([True, False])\n )", "def __bool__(self):\r\n raise TypeError('cannot use secure type in Boolean expressions')", "def _op(\n x: Union[bool, dts.Boolean, tps.BooleanValue],\n y: Union[bool, dts.Boolean, tps.BooleanValue],\n ) -> T:", "def on_true(self) -> global___Expression:", "def boolean(\n function: Callable[..., celpy.celtypes.Value]) -> Callable[..., celpy.celtypes.BoolType]:\n @wraps(function)\n def bool_function(a: celpy.celtypes.Value, b: celpy.celtypes.Value) -> celpy.celtypes.BoolType:\n result = function(a, b)\n if result == NotImplemented:\n return cast(celpy.celtypes.BoolType, result)\n return celpy.celtypes.BoolType(bool(result))\n return bool_function", "def check_supported_features(self):", "def bool(x) -> bool:\n pass", "def evaluateBoolean(compiled_expression):", "def give_me_a_boolean():\n return True\n pass", "def visit_BoolOp(self, node: ast.BoolOp) -> ast.AST:\n # 1. wrap each operand with a lambda function\n operands = []\n for operand in node.values:\n o = self.visit(operand)\n if self.is_proposition_factory(o):\n # if the operand is already an temporal requirement factory, keep it\n operands.append(self.visit(o))\n continue\n # if the operand is not an temporal requirement factory, make it an AP\n closure = self._create_atomic_proposition_factory(o)\n operands.append(closure)\n\n # 2. create a function call and pass operands\n boolOpToFunctionName = {\n ast.Or: \"PropositionOr\",\n ast.And: \"PropositionAnd\",\n }\n funcId = boolOpToFunctionName.get(type(node.op))\n newNode = ast.Call(\n func=ast.Name(id=funcId, ctx=ast.Load()),\n # pass a list of operands as the first argument\n args=[ast.copy_location(ast.List(elts=operands, ctx=ast.Load()), node)],\n keywords=[],\n )\n return ast.copy_location(newNode, node)", "def is_equation(self): \n return False", "def Bool(arg):\n return arg.lower() in ('y', 'true', 't', '1')", "def true(symbol):\n return True", "def isTrue(*args, **kwargs)->None:\n pass", "def builtin(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"builtin\")", "def false(func):\n return MultipleChoice(_text_from_func(func), Answer('False'), Answer('True'), is_code=True)", "def Fus(self, *args):\n return _BRepAlgo.BRepAlgo_BooleanOperations_Fus(self, *args)", "def xpathBooleanFunction(self, nargs):\n libxml2mod.xmlXPathBooleanFunction(self._o, nargs)", "def boolean_function(bool_variable):\n\tif bool_variable:\n\t\treturn \"The boolean variable is True\"\n\telse:\n\t\treturn \"The boolean variable is False\"", "def test04_boolean_operator(self):\n\n import _cppyy\n number = _cppyy.gbl.number\n\n n = number(20)\n assert n\n\n n = number(0)\n assert not n", "async def test_input_boolean(client, input_boolean_entities) -> None:\n body = await generate_latest_metrics(client)\n\n assert (\n 'input_boolean_state{domain=\"input_boolean\",'\n 'entity=\"input_boolean.test\",'\n 'friendly_name=\"Test\"} 1.0' in body\n )\n\n assert (\n 'input_boolean_state{domain=\"input_boolean\",'\n 'entity=\"input_boolean.helper\",'\n 'friendly_name=\"Helper\"} 0.0' in body\n )", "def test_bool_field():", "def builtin(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"builtin\")", "def bool_op(\n self,\n opstring: str,\n precedence: int = 0,\n python_impl: Optional[Callable[..., Any]] = None,\n ) -> Callable[[Any], Operators]:\n return self.op(\n opstring,\n precedence=precedence,\n is_comparison=True,\n python_impl=python_impl,\n )", "def booleanize(text):\n ltext = text.lower()\n if ltext == 'true':\n booleanized = True\n elif ltext == 'false':\n booleanized = False\n else:\n raise ValueError('A monk asked: Is \"{}\" true or false.'.format(text))\n return booleanized", "def all_true(rules, func, args=()):\n if isinstance(func, six.string_types):\n func_name = func\n else:\n func_name = func.__name__\n return all([getattr(r, func_name)(*args) for r in rules])", "def iff(bool,trueValue,falseValue):\n if bool:\n return trueValue\n else:\n return falseValue", "def bool(a):\n # Booleans need to be converted to integers for Theano\n if cf.use_theano and isinstance(a, (builtins.bool, np.bool_)):\n return np.int8(a)\n elif cf.use_theano or is_theano_object(a):\n return a\n else:\n return builtins.bool(a)", "def supports(self, x):\n return True", "def test_evaluate_boolean_literal_expression(self):\n value = self.evaluate_common(\"true\")\n self.assertTrue(\n value.type_code == edm.SimpleType.Boolean, \"Expected Boolean\")\n self.assertTrue(value.value is True, \"Expected True\")\n value = self.evaluate_common(\"false\")\n self.assertTrue(\n value.type_code == edm.SimpleType.Boolean, \"Expected Boolean\")\n self.assertTrue(value.value is False, \"Expected False\")", "def __bool__(self):\n return bool(self.exp)", "def pl_true(exp, model={}):\n op, args = exp.op, exp.args\n if exp == TRUE:\n return True\n elif exp == FALSE:\n return False\n elif is_prop_symbol(op):\n return model.get(exp)\n elif op == '~':\n p = pl_true(args[0], model)\n if p is None: return None\n else: return not p\n elif op == '|':\n result = False\n for arg in args:\n p = pl_true(arg, model)\n if p is True: return True\n if p is None: result = None\n return result\n elif op == '&':\n result = True\n for arg in args:\n p = pl_true(arg, model)\n if p is False: return False\n if p is None: result = None\n return result\n p, q = args\n if op == '>>':\n return pl_true(~p | q, model)\n elif op == '<<':\n return pl_true(p | ~q, model)\n pt = pl_true(p, model)\n if pt is None: return None\n qt = pl_true(q, model)\n if qt is None: return None\n if op == '<=>':\n return pt == qt\n elif op == '^':\n return pt != qt\n else:\n raise ValueError, \"illegal operator in logic expression\" + str(exp)", "def __nonzero__(self):\n raise RuntimeError(\"Cannot evaluate CrypTensors to boolean values\")", "def implies(x: any, y: Union[Callable[[], bool], any]) -> bool:\r\n if python_helpers.is_function(y):\r\n if not bool(x):\r\n return True\r\n return bool(y())\r\n else:\r\n return (not(bool(x))) or (bool(y))", "def __bool__(x):\n if x.value == 1:\n return True\n elif x.value == -1:\n return False\n else:\n raise ValueError('cannot determine boolean value of Unknown')", "def is_equation(self):\n return True", "def is_equation(self):\n return True", "def __bool__(self):\n raise RuntimeError(\"Cannot evaluate BinarySharedTensors to boolean values\")", "def all_boolean_reductions(request):\n return request.param", "def get_bool2(self):\n pass", "def interpret_boolean(input):\n if not input:\n return False\n\n input = input.lower().strip() if isinstance(input, basestring) else input\n\n return input in _trues", "def boolean(A, B, operation, precision = 1e-4, num_divisions = [1, 1],\n max_points = 4000, layer = 0):\n D = Device('boolean')\n\n A_polys = []\n B_polys = []\n if type(A) is not list: A = [A]\n if type(B) is not list: B = [B]\n for e in A:\n if isinstance(e, Device): A_polys += e.get_polygons()\n elif isinstance(e, DeviceReference): A_polys += e.get_polygons()\n for e in B:\n if isinstance(e, Device): B_polys += e.get_polygons()\n elif isinstance(e, DeviceReference): B_polys += e.get_polygons()\n\n gds_layer, gds_datatype = _parse_layer(layer)\n\n operation = operation.lower().replace(' ', '')\n if operation == 'a-b':\n operation = 'not'\n elif operation == 'b-a':\n operation = 'not'\n A_polys, B_polys = B_polys, A_polys\n elif operation == 'a+b':\n operation = 'or'\n elif operation not in ['not', 'and', 'or', 'xor', 'a-b', 'b-a', 'a+b']:\n raise ValueError(\"[PHIDL] phidl.geometry.boolean() `operation` \"\n \"parameter not recognized, must be one of the \"\n \"following: 'not', 'and', 'or', 'xor', 'A-B', \"\n \"'B-A', 'A+B'\")\n\n # Check for trivial solutions\n if (len(A_polys) == 0) or (len(B_polys) == 0):\n if (operation == 'not'):\n if len(A_polys) == 0: p = None\n elif len(B_polys) == 0: p = A_polys\n elif (operation == 'and'):\n p = None\n elif (operation == 'or') or (operation == 'xor'):\n if (len(A_polys) == 0) and (len(B_polys) == 0): p = None\n elif len(A_polys) == 0: p = B_polys\n elif len(B_polys) == 0: p = A_polys\n else:\n # If no trivial solutions, run boolean operation either in parallel or\n # straight\n if all(np.array(num_divisions) == np.array([1,1])):\n p = gdspy.boolean(operand1 = A_polys, operand2 = B_polys,\n operation = operation, precision = precision,\n max_points = max_points, layer = gds_layer,\n datatype = gds_datatype)\n else:\n p = _boolean_polygons_parallel(polygons_A = A_polys,\n polygons_B = B_polys,\n num_divisions = num_divisions,\n operation = operation,\n precision = precision)\n\n if p is not None:\n polygons = D.add_polygon(p, layer = layer)\n [polygon.fracture(max_points = max_points, precision = precision)\n for polygon in polygons]\n return D", "def test_convert_logical():", "def test_human_readable_boolean_false():\n # TODO: add a test case that follows the provided example", "def is_equation(self):\n return False", "def _handle_boolean(\n *, artifacts: types.ColumnArtifacts # pylint: disable=unused-argument\n) -> Boolean:\n return Boolean", "def _operators_conductor(operator_name, _bool=None):\n func = getattr(Series, operator_name)\n if _bool is None:\n # return bool series.\n _pre, _post = bool, bool\n else:\n # return ints.\n _pre, _post = int, int\n\n @wraps(func)\n def operator_method(self, other=None):\n if other is None:\n # for unary such as pos, neg, invert\n def not_(df: dF):\n return func(df.pipe(self.copy().pop())).apply(_post)\n\n return not_\n\n # if not isinstance(other, Condition):\n # raise TypeError(\"only conditions can add, got %r\" % type(other))\n\n def comb(df: dF) -> Series:\n return func(df.pipe(self).apply(_pre), df.pipe(other).apply(_pre)).apply(_post)\n\n return comb\n\n return operator_method", "def set_boolean(x):\n\n if x:\n return \"True\"\n else:\n return \"False\"", "def Shape(self, *args):\n return _BRepAlgo.BRepAlgo_BooleanOperations_Shape(self, *args)", "def _true(*args):\n # pylint:disable=unused-argument\n return True", "def is_operator(formula):\n return is_binary_operator(formula) or isinstance(formula, Not)", "def boolean(val):\n\tif val == \"True\" or val == \"1\":\n\t\treturn True\n\telse:\n\t\treturn False", "def truth(text):\n lowered = str(text).lower()\n if lowered in frozenset(['y', 'yes', 'true']):\n return True\n elif lowered in frozenset(['n', 'no', 'false']):\n return False\n else:\n raise Error('Invalid truth value: %r' % text)", "def f_supports(self, data):\n return True", "def bool_validator_advice(validator_args):\n \n return \" {True, False}\"", "def testBooleanOps(root):\n\n letters = bitmap(\"vck.tif\")\n v1 = letters.view(root, \"vck\")\n\n cross = bitmap(letters.size())\n xmax, ymax = cross.size()\n r = ymax*1.0/xmax\n for x in range(xmax):\n cross.set(x, x*r)\n cross.set(x, x*r+1)\n cross.set(x, x*r-1)\n cross.set(x, ymax-x*r)\n cross.set(x, ymax-x*r+1)\n cross.set(x, ymax-x*r-1)\n v2 = cross.view(root, \"cross\")\n\n xorResult = XOR(letters, cross)\n v3 = xorResult.view(root, \"vck XOR cross\")\n\n orResult = OR(letters, cross)\n v4 = orResult.view(root, \"vck OR cross\")\n\n andResult = AND(letters, cross)\n v5 = andResult.view(root, \"vck AND cross\")\n\n notResult = NOT(letters)\n v6 = notResult.view(root, \"NOT vck\")\n\n return v1, v2, v3, v4, v5, v6", "def bool_option (arg: Any) -> bool:\n return True", "def all_bool_strs():\n fns = [lambda x: x, lambda x: x.upper(), lambda x: x.capitalize()]\n return sorted({fn(x) for fn in fns for x in BOOL_TRUE_STRS | BOOL_FALSE_STRS})", "def is_builtin(fn) -> bool:\n return getattr(fn, TRITON_BUILTIN, False)", "def __bool__(self):\n raise ValueError(\"bool() not permitted\")", "def check_for_bool(check):", "def check_bool_type(rep):\n rep = rep.lower()\n if rep in (\"true\", \"t\", \"yes\", \"y\"):\n return True\n elif rep in (\"false\", \"f\", \"no\", \"n\"):\n return False\n else:\n print(\"Enter a boolean type (True, T, False, F) !\")\n sys.exit(2)", "def get_kernel_supports(self):\n corresponding_simu = self._corresponding_simu()\n get_support = np.vectorize(lambda kernel: kernel.get_plot_support())\n return get_support(corresponding_simu.kernels)", "def boolean(\n A: Component,\n B: Component,\n operation: str,\n precision: float = 1e-4,\n num_divisions: Optional[int] = None,\n max_points: int = 4000,\n layer: ListConfig = 0,\n) -> Component:\n num_divisions = num_divisions or [1, 1]\n c = pg.boolean(\n A=A,\n B=B,\n operation=operation,\n precision=precision,\n num_divisions=num_divisions,\n max_points=max_points,\n layer=layer,\n )\n return import_phidl_component(component=c)", "def is_binary_operator(formula):\n return isinstance(formula, And) or isinstance(formula, Or) \\\n or isinstance(formula, If) or isinstance(formula, Iff)", "def f1(a, b): \n if a == False and b == True:\n return True\n else:\n return False", "def convertToBoolean(boolean: bool) -> bool:\n ...", "def bool_ops(self, ctx: Context) -> Iterator[AnnotatedExpression]:\n for left, right in combinations(ctx.expressions_by_type(bool), 2):\n yield AnnotatedExpression(\n ast.BoolOp(op=ast.And(), values=[left.expr, right.expr]),\n TypeAnnotation(bool),\n )\n yield AnnotatedExpression(\n ast.BoolOp(op=ast.Or(), values=[left.expr, right.expr]),\n TypeAnnotation(bool),\n )", "def infer_bool(input_value):\n # Boolean\n if isinstance(input_value, bool):\n return input_value\n\n # Integer\n if isinstance(input_value, int):\n return bool(input_value)\n\n # String\n if isinstance(input_value, str):\n if 'Y' in input_value.upper():\n return True\n else:\n return False\n\n # None\n if input_value is None:\n return False", "def test_bool(self, env: yaenv.Env):\n _val = env.bool('BOOL_VAR')\n assert not _val and type(_val) == bool\n _val = env.bool('INT_VAR')\n assert _val and type(_val) == bool\n _val = env.bool('MISSING', True)\n assert _val and type(_val) == bool\n with pytest.raises(yaenv.EnvError) as err:\n _ = env.bool('FLOAT_VAR')\n assert 'Invalid boolean' in str(err.value)\n assert env.bool('MISSING') is None", "def evaluate(self, *args, **kwargs) -> Union[str, int, float, bool]:\n return True", "def can_mi():\n pass", "def on_false(self) -> global___Expression:", "def bool_check(*args, func=None):\n func = func or inspect.stack()[2][3]\n for var in args:\n if not isinstance(var, bool):\n name = type(var).__name__\n raise BoolError(\n 'Function {} expected bool, {} got instead.'.format(func, name))", "def make_bool(value):\n def make_value():\n return verify.Term(verify.BOOLEAN, value)\n return make_value", "def getBool(string):\n return (True)", "def boolean(s):\r\n ss = str(s).lower()\r\n if ss in TRUTHY_STRINGS:\r\n return True\r\n elif ss in FALSY_STRINGS:\r\n return False\r\n else:\r\n raise ValueError(\"not a valid boolean value: \" + repr(s))", "def functionWithArg(arg):\n return bool(arg)", "def visit_true(self) -> T:", "def isEnabled(self):", "def evalBoolean(tree):\n # check if children the children is a \"or\" or a \"and\" tokken\n if (tree.children[0].data == \"or\"):\n return evalBoolean(tree.children[0].children[0]) or evalBoolean(tree.children[0].children[1])\n if (tree.children[0].data) == \"and\":\n return evalBoolean(tree.children[0].children[0]) and evalBoolean(tree.children[0].children[1])\n \n # set var1\n if(tree.children[0].data == \"integer\"):\n var1 = evalInteger(tree.children[0])\n elif(tree.children[0].data == \"variable\"):\n var1 = getValue(tree.children[0].children[0].value)\n\n # set var2\n if(tree.children[2].data == \"integer\"):\n var2 = evalInteger(tree.children[2])\n elif(tree.children[2].data == \"variable\"):\n var2 = getValue(tree.children[2].children[0].value)\n\n if(tree.children[1].children[0].data == \"greater\"):\n return var1 > var2\n if(tree.children[1].children[0].data == \"less\"):\n return var1 < var2\n if(tree.children[1].children[0].data == \"equals\"):\n return var1 == var2\n if(tree.children[1].children[0].data == \"nequal\"):\n return var1 != var2\n\n print(\"ERROR : UNEXPECTED TOKKEN\")\n return False", "def add_standard_builtins(engine, b=None, s=None, sp=None):\n\n # SPECIAL CASES NEED TO BE IN ORDER\n engine.add_builtin('true', 0, b(_builtin_true)) # -1\n engine.add_builtin('fail', 0, b(_builtin_fail)) # -2\n engine.add_builtin('false', 0, b(_builtin_fail)) # -3\n\n engine.add_builtin('=', 2, s(_builtin_eq)) # -4\n engine.add_builtin('\\=', 2, b(_builtin_neq)) # -5\n\n engine.add_builtin('findall', 3, sp(_builtin_findall)) # -6\n engine.add_builtin('all', 3, sp(_builtin_all)) # -7\n engine.add_builtin('all_or_none', 3, sp(_builtin_all_or_none)) # -8\n\n engine.add_builtin('==', 2, b(_builtin_same))\n engine.add_builtin('\\==', 2, b(_builtin_notsame))\n\n engine.add_builtin('is', 2, s(_builtin_is))\n\n engine.add_builtin('>', 2, b(_builtin_gt))\n engine.add_builtin('<', 2, b(_builtin_lt))\n engine.add_builtin('=<', 2, b(_builtin_le))\n engine.add_builtin('>=', 2, b(_builtin_ge))\n engine.add_builtin('=\\=', 2, b(_builtin_val_neq))\n engine.add_builtin('=:=', 2, b(_builtin_val_eq))\n\n engine.add_builtin('var', 1, b(_builtin_var))\n engine.add_builtin('atom', 1, b(_builtin_atom))\n engine.add_builtin('atomic', 1, b(_builtin_atomic))\n engine.add_builtin('compound', 1, b(_builtin_compound))\n engine.add_builtin('float', 1, b(_builtin_float))\n engine.add_builtin('rational', 1, b(_builtin_rational))\n engine.add_builtin('integer', 1, b(_builtin_integer))\n engine.add_builtin('nonvar', 1, b(_builtin_nonvar))\n engine.add_builtin('number', 1, b(_builtin_number))\n engine.add_builtin('simple', 1, b(_builtin_simple))\n engine.add_builtin('callable', 1, b(_builtin_callable))\n engine.add_builtin('dbreference', 1, b(_builtin_dbreference))\n engine.add_builtin('primitive', 1, b(_builtin_primitive))\n engine.add_builtin('ground', 1, b(_builtin_ground))\n engine.add_builtin('is_list', 1, b(_builtin_is_list))\n\n engine.add_builtin('=..', 2, s(_builtin_split_call))\n engine.add_builtin('arg', 3, s(_builtin_arg))\n engine.add_builtin('functor', 3, s(_builtin_functor))\n\n engine.add_builtin('@>', 2, b(_builtin_struct_gt))\n engine.add_builtin('@<', 2, b(_builtin_struct_lt))\n engine.add_builtin('@>=', 2, b(_builtin_struct_ge))\n engine.add_builtin('@=<', 2, b(_builtin_struct_le))\n engine.add_builtin('compare', 3, s(_builtin_compare))\n\n engine.add_builtin('length', 2, s(_builtin_length))\n # engine.add_builtin('call_external', 2, s(_builtin_call_external))\n\n engine.add_builtin('sort', 2, s(_builtin_sort))\n engine.add_builtin('between', 3, s(_builtin_between))\n engine.add_builtin('succ', 2, s(_builtin_succ))\n engine.add_builtin('plus', 3, s(_builtin_plus))\n\n engine.add_builtin('consult', 1, b(_builtin_consult))\n engine.add_builtin('.', 2, b(_builtin_consult_as_list))\n # engine.add_builtin('load_external', 1, b(_builtin_load_external))\n engine.add_builtin('unknown', 1, b(_builtin_unknown))\n\n engine.add_builtin('use_module', 1, b(_builtin_use_module))\n engine.add_builtin('use_module', 2, b(_builtin_use_module2))\n engine.add_builtin('module', 2, b(_builtin_module))\n\n engine.add_builtin('once', 1, _builtin_call)\n engine.add_builtin('call', 1, _builtin_call)\n engine.add_builtin('call_nc', 1, _builtin_call_nc)\n engine.add_builtin('try_call', 1, _builtin_try_call)\n for i in range(2, 10):\n engine.add_builtin('call', i, _builtin_calln)\n engine.add_builtin('call_nc', i, _builtin_calln_nc)\n engine.add_builtin('try_call', i, _builtin_try_calln)\n\n engine.add_builtin('subquery', 2, s(_builtin_subquery))\n engine.add_builtin('subquery', 3, s(_builtin_subquery))\n\n engine.add_builtin('sample_uniform1', 3, sp(_builtin_sample_uniform))\n\n for i in range(1, 10):\n engine.add_builtin('debugprint', i, b(_builtin_debugprint))\n\n for i in range(1, 10):\n engine.add_builtin('write', i, b(_builtin_write))\n\n for i in range(1, 10):\n engine.add_builtin('writenl', i, b(_builtin_writenl))\n engine.add_builtin('writeln', i, b(_builtin_writenl))\n\n for i in range(1, 10):\n engine.add_builtin('error', i, b(_builtin_error))\n\n engine.add_builtin('nl', 0, b(_builtin_nl))\n engine.add_builtin('cmd_args', 1, s(_builtin_cmdargs))\n engine.add_builtin('atom_number', 2, s(_builtin_atom_number))\n engine.add_builtin('nocache', 2, b(_builtin_nocache))\n\n engine.add_builtin('numbervars', 2, s(_builtin_numbervars_0))\n engine.add_builtin('numbervars', 3, s(_builtin_numbervars))\n engine.add_builtin('varnumbers', 2, s(_builtin_varnumbers))\n\n engine.add_builtin('subsumes_term', 2, b(_builtin_subsumes_term))\n engine.add_builtin('subsumes_chk', 2, b(_builtin_subsumes_term))\n\n engine.add_builtin('possible', 1, s(_builtin_possible))\n engine.add_builtin('clause', 2, s(_builtin_clause))\n engine.add_builtin('clause', 3, s(_builtin_clause3))\n\n engine.add_builtin('create_scope', 2, s(_builtin_create_scope))\n\n engine.add_builtin('subquery_in_scope', 3, s(_builtin_subquery_in_scope))\n engine.add_builtin('subquery_in_scope', 4, s(_builtin_subquery_in_scope))\n\n engine.add_builtin('call_in_scope', 2, _builtin_call_in_scope)\n for i in range(2, 10):\n engine.add_builtin('call_in_scope', i + 1, _builtin_calln_in_scope)\n\n engine.add_builtin('find_scope', 2, s(_builtin_find_scope))\n\n builtin.add_builtins(engine, b, s, sp)", "def _always_true(*args, **kwargs):\n return True", "def logic(self):\r\n raise NotImplementedError", "def isSetMath(self):\n return _libsbml.FunctionDefinition_isSetMath(self)", "def supported_features(self):\n return SUPPORT_FLAGS", "def supported_features(self):\n return SUPPORT_FLAGS", "def supported_features(self):\n return SUPPORT_FLAGS", "def supported_features(self):\n return SUPPORT_FLAGS", "def _xplane_boolean(self, is_true: bool):\n\t\txp_bool = 0x00000000\n\t\tif is_true:\n\t\t\txp_bool = 0x3F800000\n\t\treturn 'i', xp_bool", "def check_fees(prod_info):\n if prod_info == True:\n\n return (True)\n\n else:\n\n return(False)", "def _op_easy(self, op, reg_list, param_list=None): # pylint: disable-msg=invalid-name\n\n has_op = hasattr(self.circuit, op)\n\n if has_op:\n if param_list:\n # DEBUG\n # print(\"********** op {} param_list {} reg_list {}\".format(op, param_list, reg_list)) # pylint: disable-msg=line-too-long\n # END-DEBUG\n getattr(self.circuit, op)(*param_list, *reg_list)\n else:\n getattr(self.circuit, op)(*reg_list)\n\n return has_op", "def eqs_and_deriv(self, _):\n pass", "def get_boolean(self, df):\n if not self.value:\n return False\n elif not self.par:\n return ()\n if self.variable_type == 'interval':\n return self._interval_boolean(df)\n elif self.variable_type == 'list':\n return self._list_boolean(df)", "def isbuiltin(object):\r\n return isinstance(object, types.BuiltinFunctionType)", "def prompt_bool_input(prompt_name: str, get_user_input: GetInputFunc) -> bool:\n answer_map = {\"yes\": 1, \"no\": 0, \"y\": 1, \"n\": 0}\n try:\n answer = str(get_user_input(f\"{prompt_name} Type in yes or no:\"))\n return bool(answer_map[answer])\n except (ValueError, IndexError) as e:\n raise InvalidInput(str(e))", "def Common(self, *args):\n return _BRepAlgo.BRepAlgo_BooleanOperations_Common(self, *args)" ]
[ "0.6549295", "0.6121206", "0.59864044", "0.59644145", "0.5946533", "0.5924393", "0.5879584", "0.58733624", "0.58189994", "0.58100283", "0.58097804", "0.57936394", "0.5781556", "0.5762648", "0.57621485", "0.57520574", "0.5727584", "0.57086086", "0.56952477", "0.56867784", "0.5673919", "0.5672529", "0.5664898", "0.5621593", "0.55968577", "0.5575816", "0.5561638", "0.5560661", "0.5559144", "0.5546125", "0.5546107", "0.552258", "0.55219936", "0.5493805", "0.54937655", "0.54933465", "0.5491274", "0.54792815", "0.54694074", "0.5464968", "0.5464968", "0.5459495", "0.5453904", "0.545146", "0.54471624", "0.54337114", "0.5431293", "0.5430999", "0.5428259", "0.54234624", "0.54197145", "0.54147625", "0.5405757", "0.53971446", "0.53949165", "0.5387991", "0.5379651", "0.53748876", "0.5374109", "0.5371241", "0.5370874", "0.5365329", "0.53608245", "0.5357576", "0.53536284", "0.5339709", "0.5323563", "0.5309169", "0.5291405", "0.52901876", "0.5288082", "0.52802217", "0.52782905", "0.5270514", "0.5267073", "0.5264739", "0.5262755", "0.52627474", "0.52612615", "0.52611196", "0.52359486", "0.52273154", "0.5214554", "0.5200942", "0.51973546", "0.519076", "0.51869863", "0.518468", "0.51837224", "0.51752573", "0.51752573", "0.51752573", "0.51752573", "0.51748663", "0.5173628", "0.5160426", "0.5160018", "0.51582235", "0.5155571", "0.51440257", "0.5123524" ]
0.0
-1
The process_song_data function extracts song data in JSON file format from an AWS S3 bucket(input_data), transforms data into specified tables and then loads the tables back into a designated AWS S3 bucket(output_data) in parquet file format.
def process_song_data(spark, input_data, output_data): # get filepath to song data file song_data = os.path.join(input_data, 'song_data/*/*/*/*.json') #specify schema for increased performance and control song_schema = StructType([ StructField("artist_id", StringType()), StructField("artist_latitude", DoubleType()), StructField("artist_location", StringType()), StructField("artist_longitude", StringType()), StructField("artist_name", StringType()), StructField("duration", DoubleType()), StructField("num_songs", IntegerType()), StructField("song_id", StringType()), StructField("title", StringType()), StructField("year", IntegerType()), ]) # read song data file dfs = spark.read.json(song_data, schema=song_schema) # create temporary view of table in order to run SQL queries dfs.createOrReplaceTempView("song_table") # extract columns to create songs table dim_songs = spark.sql(""" SELECT song_id, title, artist_id, year, duration FROM song_table WHERE song_id IS NOT NULL """) # write songs table to parquet files partitioned by year and artist dim_songs.write.mode('overwrite').partitionBy("year", "artist_id").parquet(output_data+"songs") # extract columns to create artists table dim_artists = spark.sql(""" SELECT DISTINCT artist_id, artist_name AS name, artist_location AS location, artist_latitude AS latitude, artist_longitude AS longitude FROM song_table WHERE artist_id IS NOT NULL """) # write artists table to parquet files dim_artists.write.mode('overwrite').parquet(output_data+"artists")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def process_song_data(spark, input_data, output_data):\n \n # get filepath to song data file\n song_data = input_data + \"song_data/*/*/*/*.json\"\n \n # read song data file\n df = spark.read.json(song_data).dropDuplicates()\n\n # extract columns to create songs table\n global songs_table\n songs_table = df.select(['song_id', 'title', 'artist_id', 'year', 'duration'])\n \n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy(\"year\", \"artist_id\").parquet(\"output_data + SongTable.parquet\")\n\n # extract columns to create artists table\n artists_table = df.select(['artist_id', 'artist_name', 'artist_location', 'artist_latitude', 'artist_longitude'])\n \n # write artists table to parquet files\n artists_table.write.parquet(output_data + \"ArtistTable.parquet\")", "def process_song_data(spark, input_data, output_data):\n \n # get filepath to all json file of song_data in S3 bucket\n song_data = input_data + \"song_data/*/*/*/*.json\"\n \n # read song data file\n df = spark.read.json(song_data)\n \n # print out the schema in tree format\n print(\"---------- Print out the schema of song dataset in tree format: ----------\")\n df.printSchema()\n\n # extract columns to create songs table\n # songs table attributes: song_id, title, artist_id, year, duration\n songs_table = df.select(\"song_id\", \"title\", \"artist_id\", \"year\", \"duration\")\n # show first 10 rows in songs_table\n print(\" ---------- Show first 10 rows of songs table ----------\")\n songs_table.show(10)\n \n # write songs table to parquet files partitioned by year and artist\n out_path_songs = os.path.join(output_data, \"songs_table.parquet\")\n if path.exists(out_path_songs):\n songs_table.write.parquet(path = out_path_songs, \n partitionBy = (\"year\", \"artist_id\"),\n mode = \"overwrite\")\n else:\n songs_table.write.parquet(path = out_path_songs, \n partitionBy = (\"year\", \"artist_id\"),\n mode = \"append\")\n \n # read parquet file and check the first 10 rows of partitioned parquet dataframes\n df_songs_parquet = spark.read.parquet(\"songs_table.parquet\")\n print(\" ---------- Show first 10 rows of songs table parquet file ----------\")\n df_songs_parquet.show(10)\n\n # extract columns to create artists table\n # artists table attributes: artist_id, name, location, lattitude, longitude\n artists_table = df.select(\"artist_id\", \"artist_name\", \"artist_location\", \n \"artist_latitude\", \"artist_longitude\")\n # show first 10 rows in artists_table\n print(\" ---------- Show first 10 rows of artists table ----------\")\n artists_table.show(10)\n \n # write artists table to parquet files\n out_path_artists = os.path.join(output_data, \"artists_table.parquet\")\n if path.exists(out_path_artists):\n artists_table.write.parquet(path = out_path_artists, \n mode = \"overwrite\")\n else:\n artists_table.write.parquet(path = out_path_artists, \n mode = \"append\")\n \n # read parquet file and check the first 10 rows of partitioned parquet dataframes\n df_artists_parquet = spark.read.parquet(\"artists_table.parquet\")\n print(\" ---------- Show first 10 rows of artists table parquet file ----------\")\n df_artists_parquet.show(10)", "def process_song_data(spark, input_data, output_data):\n # get filepath to song data file\n song_data = os.path.join(input_data, \"song-data/*/*/*/*.json\")\n\n # read song data file\n df = spark.read.json(song_data)\n\n # extract columns to create songs table\n songs_table = df.select('song_id', 'title', 'artist_id', 'year', 'duration')\n\n # write songs table to parquet files partitioned by year and artist_id\n songs_table.write.partitionBy('year', 'artist_id').parquet(output_data + 'songs', mode='overwrite')\n\n # extract columns to create artists table\n artists_table = df.select('artist_id', 'artist_name', 'artist_location', 'artist_latitude', 'artist_longitude')\n\n # write artists table to parquet files\n artists_table.write.parquet(output_data + 'artists', mode='overwrite')", "def process_song_data(spark, input_data, output_data):\n\n # get filepath to song data file\n song_data = input_data + \"song-data/*/*/*/*.json\"\n\n # read song data file\n df = spark.read.json(song_data)\n\n # extract columns to create songs table\n songs_table = df.select(\"song_id\", \"title\", \"artist_id\", \"year\", \"duration\")\n \n # write songs table to parquet files partitioned by year and artist\n file_name = output_data + \"songs.parquet\"\n songs_table.write.partitionBy([\"year\", \"artist_id\"]).parquet(file_name)\n\n # extract columns to create artists table\n artists_table = df.select(\"artist_id\", \"artist_name\", \"artist_longitude\", \"artist_latitude\", \"artist_location\")\n \n # write artists table to parquet files\n file_name = output_data + \"artists.parquet\"\n artists_table.write.parquet(file_name)", "def process_song_data(\n spark: SparkSession,\n input_data: str,\n output_data: str\n) -> None:\n # get filepath to song data file\n song_data = input_data + 'song_data/*/*/*/'\n\n songdata_schema = StructType([\n StructField(\"song_id\", StringType(), True),\n StructField(\"title\", StringType(), True),\n StructField(\"year\", StringType(), True),\n StructField(\"duration\", DoubleType(), True),\n StructField(\"artist_id\", StringType(), True),\n StructField(\"artist_name\", StringType(), True),\n StructField(\"artist_location\", StringType(), True),\n StructField(\"artist_latitude\", DoubleType(), True),\n StructField(\"artist_longitude\", DoubleType(), True),\n ])\n # read song data file\n df = spark.read.json(song_data, schema=songdata_schema)\n\n # extract columns to create songs table\n songs_table = df.select(\n 'song_id', 'title', 'artist_id', 'year', 'duration') \\\n .dropDuplicates()\n songs_table.createOrReplaceTempView('songs')\n\n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy('year', 'artist_id') \\\n .parquet(join(output_data, 'songs/songs.parquet'), 'overwrite')\n\n # # extract columns to create artists table\n artists_table = df.select(\n 'artist_id',\n 'artist_name',\n 'artist_location',\n 'artist_latitude',\n 'artist_longitude',\n ).withColumnRenamed('artist_name', 'name') \\\n .withColumnRenamed('artist_location', 'location') \\\n .withColumnRenamed('artist_latitude', 'latitude') \\\n .withColumnRenamed('artist_longitude', 'longitude') \\\n .dropDuplicates()\n artists_table.createOrReplaceTempView('artists')\n # # write artists table to parquet files\n artists_table.write.parquet(\n join(output_data, 'artists/artists.parquet'),\n 'overwrite'\n )", "def process_song_data(spark, input_data, output_data):\n \n print(\"Read song data\")\n df_song = spark.read.json(input_data+\"song_data/*/*/*/*.json\", schema=build_song_schema())\n \n # extract columns to create songs table\n songs_table = df_song[['song_id', 'title', 'artist_id', 'year', 'duration']].drop_duplicates()\n\n \n print(\"Write...\")\n # write songs table to parquet files partitioned by year and artist\n songs_table.write.save(path=output_data+'song_table',\n format='parquet',\n partitionBy=['year', 'artist_id'],\n mode='overwrite' )\n\n # extract columns to create artists table\n artists_table = df_song[['artist_id', 'artist_name', 'artist_location', 'artist_latitude', 'artist_longitude']].drop_duplicates()\n\n print(\"Write...\")\n # write artists table to parquet files\n artists_table.write.save(path=output_data+'artists_table',\n format='parquet',\n mode='overwrite' )", "def process_song_data(spark, input_data, output_data):\n # get filepath to song data file\n song_data = f'{input_data}/song_data/*/*/*/*.json'\n \n # read song data file\n df = spark.read.json(song_data)\n\n # extract columns to create songs table\n songs_table = df.select([\"song_id\", \"title\", \"artist_id\", \"year\", \"duration\"]).where(df[\"song_id\"].isNotNull())\n \n # write songs table to parquet files partitioned by year and artist\n song_data_out = f'{output_data}/songs_table/songs_table.parquet'\n songs_table.write.mode('overwrite').partitionBy('year','artist_id').parquet(song_data_out)\n\n # extract columns to create artists table\n artists_table = df.select([\"artist_id\", \"artist_name\", \"artist_location\", \"artist_latitude\", \"artist_longitude\"]).where(df[\"artist_id\"].isNotNull())\n \n # write artists table to parquet files\n artists_data_out = f'{output_data}/artists_table/artists_table.parquet'\n artists_table.write.mode('overwrite').parquet(artists_data_out)", "def process_song_data(spark, song_input_data, output_data):\n # get filepath to song data file\n song_data = song_input_data\n \n # read song data file\n print(\"Processing JSON song data...\")\n df = spark.read.json(song_data)\n\n # extract columns to create songs table\n print(\"Extracting columns for song table...\")\n df.createOrReplaceTempView(\"songs_table\")\n songs_table = spark.sql(\"\"\"\n SELECT song_id, title, artist_id, year, duration\n FROM songs_table\n ORDER BY song_id\n \"\"\") \n \n print(\"Song table sample:\")\n songs_table.show(5, truncate=False)\n \n # write songs table to parquet files partitioned by year and artist\n print(\"Writing songs table to parquet files...\")\n songs_table.write.mode(\"overwrite\").partitionBy(\"year\", \"artist_id\")\\\n .parquet(output_data + \"songs_table.parquet\")\n \n # extract columns to create artists table\n print(\"Extracting columns for artists table...\")\n df.createOrReplaceTempView(\"artists_table\")\n artists_table = spark.sql(\"\"\"\n SELECT artist_id,\n artist_name AS name,\n artist_location AS location,\n artist_latitude AS latitude,\n artist_longitude AS longitude\n FROM artists_table\n ORDER BY artist_id DESC\n \"\"\") \n\n print(\"Artists table sample:\")\n artists_table.show(5, truncate=False)\n \n # write artists table to parquet files\n print(\"Writing artists table to parquet files...\")\n songs_table.write.mode(\"overwrite\").parquet(output_data + \"artists_table.parquet\")", "def process_song_data(spark, input_data, output_data):\n\n # get filepath to song data file\n song_data = input_data + \"song_data/A/A/B/*.json\"\n \n # read song data file\n song_data_schema = StructType([\n StructField(\"artist_id\", StringType(), False),\n StructField(\"artist_latitude\", StringType(), True),\n StructField(\"artist_longitude\", StringType(), True),\n StructField(\"artist_location\", StringType(), True),\n StructField(\"artist_name\", StringType(), False),\n StructField(\"song_id\", StringType(), False),\n StructField(\"title\", StringType(), False),\n StructField(\"duration\", DoubleType(), False),\n StructField(\"year\", IntegerType(), False)\n ])\n \n print(\"Start read s3 song data\") \n df = spark.read.json(song_data, schema=song_data_schema)\n \n # extract columns to create songs table\n print(\"Start create songs table\")\n songs_table = df.select(\"song_id\", \"title\", \"artist_id\", \"year\", \"duration\")\n \n # write songs table to parquet files partitioned by year and artist\n print(\"Start write songs table to parquet\")\n songs_table.write.parquet(output_data + \"songs_table.parquet\", mode=\"overwrite\", partitionBy=[\"year\", \"artist_id\"])\n\n # extract columns to create artists table\n # needs to distinct artist (can duplicate)\n print(\"Start create artists table\")\n artists_table = df.select(\"artist_id\", \"artist_name\", \"artist_location\", \n \"artist_latitude\", \"artist_longitude\").distinct()\n \n # write artists table to parquet files\n print(\"Start write artists table to parquet\")\n artists_table.write.parquet(output_data + \"artists_table.parquet\", mode=\"overwrite\")", "def process_song_data(spark, input_data, output_data):\n # define json structure\n songdata_schema = StructType([\n StructField(\"song_id\", StringType(), True),\n StructField(\"year\", StringType(), True),\n StructField(\"duration\", DoubleType(), True),\n StructField(\"artist_id\", StringType(), True),\n StructField(\"artist_name\", StringType(), True),\n StructField(\"artist_location\", StringType(), True),\n StructField(\"artist_latitude\", DoubleType(), True),\n StructField(\"artist_longitude\", DoubleType(), True),\n ])\n \n # get filepath to song data file\n song_data = os.path.join(input_data, \"song_data/*/*/*/*.json\")\n \n # read song data file\n df = spark.read.json(song_data, schema=songdata_schema)\n\n # extract columns to create songs table\n songs_table = df.select('song_id', 'artist_id', 'year', 'duration')\n \n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy('year', 'artist_id').parquet(output_data + \"songs\")\n\n # extract columns to create artists table\n artists_table = df.select('artist_id', 'artist_name', 'artist_location', 'artist_latitude',\n 'artist_longitude')\n \n # write artists table to parquet files\n artists_table.write.parquet(output_data + \"artists\")", "def process_song_data(spark, input_data, output_data):\n # get filepath to song data file\n song_data = input_data + \"song_data/*/*/*/*.json\"\n \n # read song data file\n df = spark.read.json(song_data)\n\n # extract columns to create songs table\n songs_table = df.filter(df.song_id != '') \\\n .select(\"song_id\", \"title\", \"artist_id\", \"year\", \"duration\") \\\n .dropDuplicates()\n \n # output filepath to songs table file\n songs_table_path = output_data + \"songs_table.parquet\"\n \n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy(\"year\", \"artist_id\").mode(\"overwrite\") \\\n .parquet(songs_table_path)\n\n # extract columns to create artists table\n artists_table = df.filter(df.artist_id != '').select(\"artist_id\",\n \"artist_name\", \"artist_location\", \"artist_latitude\", \"artist_longitude\") \\\n .dropDuplicates()\n \n # output filepath to artists table file\n artists_table_path = output_data + \"artists_table.parquet\"\n \n # write artists table to parquet files\n artists_table.write.mode(\"overwrite\") \\\n .parquet(artists_table_path)", "def process_song_data(spark, input_data, output_data):\n \n # get filepath to song data file\n song_data = os.path.join( input_data, \"song_data/*/*/*/*.json\")\n \n # SONG TABLE\n # read song data file\n df = spark.read.json(song_data)\n \n # extract columns to create songs table\n songs_table = df.select('song_id', 'title', 'artist_id',\n 'year', 'duration').dropDuplicates(['song_id'])\n \n print( \"HERE songs_table sample:\\n\")\n songs_table.show(5)\n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy('year', 'artist_id').parquet(os.path.join(output_data, 'songs/songs.parquet'), 'overwrite')\n \n # ARTISTS TABLE\n # extract columns to create artists table\n artists_table = df.select(\"artist_id\",\"artist_name\",\"artist_location\",\"artist_latitude\",\"artist_longitude\").dropDuplicates(['artist_id'])\n \n print( \"HERE artists_table sample:\\n\")\n artists_table.show(5)\n # write artists table to parquet files\n artists_table.write.parquet(output_data + \"artists/\", mode=\"overwrite\")", "def process_song_data(spark, input_data, output_data):\n\n # get filepath to song data file\n song_data = input_data + 'song_data/A/A/A/*.json'\n\n # read song data file\n df = spark.read.json(song_data)\n\n # write song data to temp view song_data_table\n df.createOrReplaceTempView(\"song_data_table\")\n\n # extract columns to create songs table\n songs_table = df.select('song_id', 'title', 'artist_id', 'year', 'duration').drop_duplicates()\n\n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy(\"year\", \"artist_id\").format(\"parquet\").mode(\"overwrite\").save(\n output_data + 'songs/song_data.parquet')\n\n # extract columns to create artists table\n artists_table = df.select('artist_id', 'artist_name', 'artist_location', 'artist_latitude', 'artist_longitude') \\\n .drop_duplicates()\n\n # write artists table to parquet files\n artists_table.write.format(\"parquet\").mode(\"overwrite\").save(output_data + 'artists/artist_data.parquet')", "def process_song_data(spark, input_data, output_data):\n # get filepath to song data file\n song_data = \"{}/song_data/*/*/*/*\".format(input_data)\n \n # define schema used to import song json data into our data frame\n songSchema = R([\n Fld(\"num_songs\", Int()),\n Fld(\"artist_id\", Str()),\n Fld(\"artist_latitude\", Dbl()),\n Fld(\"artist_longitude\", Dbl()),\n Fld(\"artist_location\", Str()),\n Fld(\"artist_name\", Str()),\n Fld(\"song_id\", Str()),\n Fld(\"title\", Str()),\n Fld(\"duration\", Dbl()),\n Fld(\"year\", Int())\n\n ])\n \n # read song data file\n df = spark.read.format(\"json\").load(song_data, schema=songSchema)\n\n # register the temp view from our dataframe df_song\n df.createOrReplaceTempView(\"df_song\")\n \n # extract columns to create songs table\n songs_table = spark.sql(\"\"\"\n SELECT DISTINCT song_id, \n title,\n artist_id, \n artist_name,\n year, \n duration FROM df_song\n \"\"\")\n \n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy('year', 'artist_id').parquet('{}/dim_song.pq'.format(output_data))\n\n # extract columns to create artists table\n artists_table = spark.sql(\"\"\"\n SELECT DISTINCT artist_id,\n artist_name,\n artist_location,\n artist_latitude,\n artist_longitude\n FROM df_song\n \"\"\")\n \n # write artists table to parquet files\n artists_table.write.parquet('{}/dim_artist.pq'.format(output_data))", "def process_song_data(spark, input_data, output_data):\n # get filepath to song data file\n song_data = os.path.join(input_data, 'song_data', '*', '*', '*')\n \n # read song data file\n df = spark.read.json(song_data)\n print(f\"Loaded song_data from {input_data}\")\n df.createOrReplaceTempView(\"song_data\")\n \n # extract columns to create songs table\n songs_table = spark.sql(\"\"\"\n SELECT song_id, title, artist_id, year, duration\n FROM song_data\n \"\"\")\n \n # write songs table to parquet files partitioned by year and artist\n songs_table_path = os.path.join(output_data, \"songs_table.parquet\")\n (songs_table.\n write.\n mode(\"overwrite\").\n partitionBy(\"year\", \"artist_id\").\n parquet(songs_table_path))\n print(f\"Stored song table on {songs_table_path}\")\n # extract columns to create artists table\n artists_table = spark.sql(\"\"\"\n SELECT \n DISTINCT(artist_id) AS artist_id, \n artist_name AS name, \n artist_location AS location, \n artist_latitude AS latitude, \n artist_longitude AS longitude\n FROM song_data\n \"\"\")\n \n # write artists table to parquet files\n artists_table_path = os.path.join(output_data, \"artists_table.parquet\")\n (artists_table.\n write.\n mode(\"overwrite\").\n parquet(artists_table_path))\n print(f\"Stored artists table at {artists_table_path}\")", "def process_song_data(spark, input_data, output_data):\n # get filepath to song data file\n song_data = input_data + 'song_data/*/*/*/*.json'\n \n # read song data file\n df = spark.read.json(song_data)\n \n # create view for songs table\n df.createOrReplaceTempView(\"songs\") \n \n \n # extract columns to create songs table. Adding Distinct and Not null to song_id as it is the primary key\n songs_table = spark.sql(\"\"\"\n SELECT DISTINCT song_id, \n title,\n artist_id,\n year,\n duration\n FROM songs\n WHERE song_id IS NOT NULL\n \"\"\")\n \n # write songs table to parquet files partitioned by year and artist\n songs_table.write.mode('overwrite').partitionBy(\"year\", \"artist_id\").parquet(output_data+'songs_table/')\n\n # create view for artists table\n df.createOrReplaceTempView(\"artists\") \n \n # extract columns to create artists table, Adding Distinct and Not null to artist_id as it is the primary key\n artists_table = spark.sql(\"\"\"\n SELECT DISTINCT artist_id, \n artist_name,\n artist_location,\n artist_latitude,\n artist_longitude\n FROM artists\n WHERE artist_id IS NOT NULL\n \"\"\")\n \n # write artists table to parquet files\n artists_table.write.mode('overwrite').parquet(output_data+'artists_table/')", "def process_song_data(spark, input_data, output_data):\n \n # get filepath to song data file\n song_data = input_data + \"/song_data/A/A/A/*.json\"\n \n # read song data file\n df = spark.read.json(song_data)\n #df.show(5)\n \n # extract columns to create songs table\n songs_table = df.select(\"song_id\",\"title\", \"artist_id\",\"year\", \"duration\")\n songs_table = songs_table.dropDuplicates()\n songs_table.createOrReplaceTempView(\"temp_songs_table\")\n # write songs table to parquet files partitioned by year and artist\n songs_table = df.write.partitionBy(\"year\",\"artist_id\")\\\n .mode(\"overwrite\").parquet(output_data + \"/songs_table\")\n print(\"Songs Table Successfully created\")\n \n # extract columns to create artists table\n artists_table = df.select(\"artist_id\",\"artist_name\", \\\n \"artist_location\",\"artist_latitude\", \"artist_longitude\")\n artists_table = artists_table.dropDuplicates()\n artists_table.createOrReplaceTempView(\"temp_artists_table\")\n \n # write artists table to parquet files\n artists_table = df.write.mode(\"overwrite\").parquet(output_data + \"/artists_table\")\n print(\"Artists Table Successfully created\")", "def process_song_data(spark, input_data, output_data):\n\n # get filepath to song data file\n input_data = input_data+\"/song-data\"\n print(input_data)\n \n \"\"\"\n for x in os.walk(input_data):\n for y in glob.glob(os.path.join(x[0], '*.json')):\n song_data.append(y)\n \"\"\"\n\n df = spark.read.json(input_data)\n df.createOrReplaceTempView(\"Staging_Song_Data\")\n song_Data_DF = spark.sql(\"select * from Staging_Song_Data\")\n\n # extract columns to create songs table\n songs_query = \" SELECT song_id, title, artist_id, year, duration \" \\\n \" FROM Staging_Song_Data\"\n songs_table = spark.sql(songs_query)\n\n # write songs table to parquet files partitioned by year and artist\n songs_table.write.mode('overwrite').partitionBy(\"year\",\"artist_id\").parquet(path=output_data+\"songs\")\n\n # extract columns to create artists table\n artists_query=\"select artist_id, artist_name as name, \" \\\n \" artist_location as location, \" \\\n \" artist_latitude as latitude, \" \\\n \" artist_longitude as longitude \" \\\n \"from Staging_Song_Data \"\n artists_table =spark.sql(artists_query)\n artists_table = artists_table.dropDuplicates(['artist_id'])\n\n # write artists table to parquet files\n artists_table.write.mode('overwrite').parquet(path=output_data+\"artists\")", "def process_song_data(spark, input_data, output_data, mode=\"overwrite\"):\n # get filepath to song data file\n song_data = input_data + \"song_data/*/*/*/*.json\"\n\n # read song data file\n print(\"reading song logs from {}\".format(song_data))\n df = spark.read.json(song_data)\n\n # extract columns to create songs table\n song_fields = ['song_id', 'title', 'artist_id', 'year', 'duration']\n songs_table = df.select(song_fields)\n songs_table = songs_table.withColumn('year', F.col('year').cast(IntegerType()))\n songs_table = songs_table.withColumn('duration', F.col('duration').cast(DoubleType()))\n\n # write songs table to parquet files partitioned by year and artist\n song_path = output_data + 'star_schema/song_table/'\n print(\"Writing Song Table to {}\".format(song_path))\n songs_table.write \\\n .mode(mode) \\\n .partitionBy('year', 'artist_id') \\\n .parquet(song_path)\n\n # extract columns to create artists table\n artist_fields = [\n 'artist_id', 'artist_name',\n 'artist_location', 'artist_latitude', 'artist_longitude'\n ]\n artists_table = df.select(artist_fields)\n artists_table = artists_table.withColumnRenamed(\n 'artist_name', 'name'\n )\n artists_table = artists_table.withColumnRenamed(\n 'artist_location', 'location'\n )\n artists_table = artists_table.withColumn(\n 'latitude',\n F.col('artist_latitude').cast(DoubleType())\n )\n artists_table = artists_table.withColumn(\n 'longitude',\n F.col('artist_longitude').cast(DoubleType())\n )\n artist_col_names = [\n 'artist_id', 'name', 'location', 'latitude', 'longitude'\n ]\n artists_table = artists_table.select(artist_col_names)\n\n # write artists table to parquet files\n artists_path = output_data + 'star_schema/artist_table'\n print(\"Writing Artist Table to {}\".format(artists_path))\n artists_table.write \\\n .mode(mode) \\\n .partitionBy('artist_id') \\\n .parquet(artists_path)", "def process_song_file(cur, filepath):\n # open song file\n\n inputData = pd.read_json(filepath, lines=True)\n song_df = pd.DataFrame(data=inputData)\n song_df.head()\n \n\n # insert song record\n song_data = song_df[['song_id', 'title', 'artist_id','year','duration']].values\n for i, row in song_df.iterrows():\n cur.execute(song_table_insert, song_data[i])\n \n \n # insert artist record\n \n artist_data = song_df[['artist_id', 'artist_name', 'artist_location','artist_latitude','artist_longitude']].values\n for i, row in song_df.iterrows():\n cur.execute(artist_table_insert, artist_data[i])", "def process_song_file(cur, filepath):\n df = pd.read_json(filepath, typ='series')\n\n columns = ['song_id', 'title', 'artist_id', 'year', 'duration']\n song_data = df[[*columns]]\n cur.execute(song_table_insert, song_data)\n\n columns = ['artist_id', 'artist_name', 'artist_location', 'artist_latitude', 'artist_longitude']\n artist_data = df[[*columns]]\n cur.execute(artist_table_insert, artist_data)", "def process_song_file(cur, filepath):\n\n df = pd.read_json(filepath, lines=True)\n\n song_data = df[['song_id', 'title',\n 'artist_id', 'year', 'duration']].values[0]\n cur.execute(song_table_insert, song_data)\n\n artist_data = df[['artist_id', 'artist_name', 'artist_location',\n 'artist_latitude', 'artist_longitude']].values[0]\n cur.execute(artist_table_insert, artist_data)", "def main():\n import sys\n n = len(sys.argv)\n if n != 3:\n raise ValueError(\"Please specify an input s3 bin and output s3 bin\")\n spark = create_spark_session()\n input_data = sys.argv[1]\n output_data = sys.argv[2]\n \n process_song_data(spark, input_data, output_data) \n process_log_data(spark, input_data, output_data)", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = input_data + \"log_data/*.json\"\n\n # read log data file\n df = spark.read.json(\"data/log_data/*.json\").dropDuplicates()\n \n # filter by NextSong actions for song plays\n df = df.filter('page = \"NextSong\"')\n\n # extract columns for users table \n artists_table = df.select(['userId', 'firstName', 'lastName', 'gender', 'level']) \n \n # write users table to parquet files\n artists_table.write.parquet(output_data + \"UserTable.parquet\")\n\n # create timestamp column from original timestamp column\n get_timestamp = udf( lambda x: datetime.fromtimestamp(x / 1000).strftime('%Y-%m-%d %H:%M:%S'))\n df = df.withColumn(\"time_stamp\", get_time_stamp(dfLog.ts))\n \n # extract columns to create time table\n time_table = dfL.select('ts', hour('time_stamp').alias('hour'), dayofmonth('time_stamp').alias('day'), weekofyear('time_stamp').alias('week')\n , month('time_stamp').alias('month'), year('time_stamp').alias('year'), date_format('time_stamp', 'EEEE').alias('weekday'))\n \n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy(\"year\", \"month\").parquet(output_data + \"SongTable.parquet\")\n\n # read in song data to use for songplays table\n song_df = songs_table\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = song_df.join(dfLog).where((artist_and_song.title == dfLog.song) & \n (artist_and_song.artist_name == dfLog.artist) & \n (artist_and_song.duration == dfLog.length)).select('ts', 'userid', 'level', \\\n 'song_id', 'artist_id','sessionid', \\\n 'location', 'useragent')\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table.join(time_table.select('ts', 'year', 'month')).where(songplays_table.ts == time_table.ts).write.partitionBy(\"year\", \"month\").parquet(output_data + \"songplaysTable.parquet\")", "def process_song_file(cur, filepath):\n df = pd.read_json(filepath, lines=True)\n \n # insert artist record\n artist_data = df[['artist_id', 'artist_name', 'artist_location',\n 'artist_latitude', 'artist_longitude']].values[0].tolist()\n cur.execute(artist_table_insert, artist_data)\n \n # insert song record\n song_data = df[['song_id', 'title', 'artist_id',\n 'year', 'duration']].values[0].tolist()\n cur.execute(song_table_insert, song_data)", "def process_song_file(cur, filepath):\r\n\r\n \"\"\" open song file\r\n drop duplicates\r\n set NAs to Zero \"\"\"\r\n df = pd.read_json(filepath, lines=True)\r\n df.drop_duplicates(subset=['song_id','artist_id'], keep = 'first')\r\n df['artist_latitude'] = df['artist_latitude'].fillna(0)\r\n df['artist_longitude'] = df['artist_longitude'].fillna(0)\r\n\r\n\r\n \"\"\" Extract columns for dataframe for song table\r\n drop duplicates before performing insert\r\n convert dataframe to a list for insert \"\"\"\r\n\r\n song_data = (df[['song_id','title','artist_id','year','duration']])\r\n song_data.drop_duplicates(subset='song_id',keep ='first',inplace = True)\r\n song_data = (song_data.values).tolist()\r\n song_data = song_data[0]\r\n # insert song record\r\n cur.execute(song_table_insert,song_data)\r\n\r\n \"\"\" Extract columns for dataframe for artist table,\r\n drop duplicates before performing insert\r\n convert dataframe to a list for insert \"\"\"\r\n\r\n artist_data = (df[['artist_id','artist_name','artist_location','artist_latitude','artist_longitude']])\r\n artist_data.drop_duplicates(subset='artist_id',keep ='first',inplace = True)\r\n artist_data = (artist_data.values).tolist()\r\n artist_data = artist_data[0]\r\n # insert artist record\r\n cur.execute(artist_table_insert, artist_data)", "def process_song_file(cur, filepath):\n \n # open song file\n \n df = pd.read_json(filepath,lines=True)\n \n # insert song record\n song_data = df[['song_id', 'title', 'artist_id','year',\n 'duration']].values[0].tolist()\n cur.execute(song_table_insert, song_data)\n \n # insert artist record\n artist_data = df[['artist_id','artist_name',\n 'artist_location', 'artist_latitude',\n 'artist_longitude']].values[0].tolist()\n cur.execute(artist_table_insert, artist_data)", "def process_song_file(cur, filepath):\n # open song file\n df = pd.read_json(filepath, lines=True)\n\n # insert song record\n song_data = list(df[[\"song_id\", \"title\", \"artist_id\", \"year\", \"duration\"]].values[0])\n try:\n cur.execute(song_table_insert, song_data)\n except psycopg2.Error as e:\n print(\"Error: Unable to insert record in songs table\")\n print(e)\n\n # insert artist record\n artist_data = list(df[[\"artist_id\", \"artist_name\", \"artist_location\", \"artist_latitude\", \"artist_longitude\"]].values[0])\n try:\n cur.execute(artist_table_insert, artist_data)\n except psycopg2.Error as e:\n print(\"Error: Unable to insert record in artists table\")\n print(e)", "def process_song_file(cur, filepath: str) -> None:\n # open song file\n df = pd.read_json(filepath, lines=True)\n\n # insert song record\n for song_record in df[\n [\n \"song_id\",\n \"title\",\n \"artist_id\",\n \"year\",\n \"duration\",\n ]\n ].values:\n cur.execute(sql_queries.song_table_insert, song_record)\n\n # insert artist record\n for artist_record in df[\n [\n \"artist_id\",\n \"artist_name\",\n \"artist_location\",\n \"artist_latitude\",\n \"artist_longitude\",\n ]\n ].values:\n cur.execute(sql_queries.artist_table_insert, artist_record)", "def process_song_file(cur, filepath):\n\n # open song file\n df = pd.read_json(filepath, lines=True)\n\n # insert song record\n song_data = list(\n df[['song_id', 'artist_id', 'title', 'year', 'duration']].values[0])\n cur.execute(song_table_insert, song_data)\n\n # insert artist record\n artist_data = list(df[['artist_id', 'artist_name', 'artist_location',\n 'artist_latitude', 'artist_longitude']].values[0])\n cur.execute(artist_table_insert, artist_data)", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = f'{input_data}/log_data/*.json'\n\n # read log data file\n df = spark.read.json(log_data)\n \n # filter by actions for song plays\n df = df.filter(df.page =='NextSong')\n\n # extract columns for users table \n user_table = df.select([\"userId\", \"firstname\", \"lastname\", \"gender\", \"level\"]).where(df[\"userId\"].isNotNull())\n \n # write users table to parquet files\n user_data_out = f'{output_data}/user_table/user_table.parquet'\n user_table.write.mode('overwrite').parquet(user_data_out)\n\n # create timestamp column from original timestamp column\n get_timestamp = F.udf(lambda x: datetime.fromtimestamp( (x/1000.0) ), T.TimestampType()) \n df = df.withColumn(\"timestamp\", get_timestamp(df.ts))\n \n # extract columns to create time table\n time_table = df.select(['timestamp']).dropDuplicates()\n time_table = time_table.withColumn(\"hour\", hour(time_table[\"timestamp\"]))\n time_table = time_table.withColumn(\"day\", dayofyear(time_table[\"timestamp\"]))\n time_table = time_table.withColumn(\"week\", weekofyear(time_table[\"timestamp\"]))\n time_table = time_table.withColumn(\"month\", month(time_table[\"timestamp\"]))\n time_table = time_table.withColumn(\"year\", year(time_table[\"timestamp\"]))\n time_table = time_table.withColumn(\"weekday\", dayofweek(time_table[\"timestamp\"]))\n\n \n # write time table to parquet files partitioned by year and month\n time_data_out = f'{output_data}/time_table/time_table.parquet'\n time_table.write.mode('overwrite').partitionBy('year','month').parquet(time_data_out)\n\n # read in song data to use for songplays table\n song_data = f'{input_data}/song_data/*/*/*/*.json'\n sdf = spark.read.json(song_data)\n sdf.createOrReplaceTempView(\"song_df_table\")\n \n # Adding month and year column to log data read and preparing log data table\n df = df.withColumn(\"month\", month(df[\"timestamp\"]))\n df = df.withColumn(\"year\", year(df[\"timestamp\"]))\n df.createOrReplaceTempView(\"log_df_table\")\n \n # extract columns from joined song and log datasets to create songplays table \n songplays_table = spark.sql(\"\"\"\n SELECT ldf.timestamp as start_time,\n ldf.userid as user_id,\n ldf.level,\n sdf.song_id,\n sdf.artist_id,\n ldf.sessionid as session_id,\n ldf.location,\n ldf.useragent as user_agent,\n ldf.month,\n ldf.year\n FROM log_df_table ldf\n JOIN song_df_table sdf\n ON (ldf.song = sdf.title) AND (ldf.artist = sdf.artist_name) AND (ldf.length = sdf.duration)\n WHERE ldf.page = 'NextSong' and ldf.userid is not null\n \"\"\")\n \n # adding the songplay_id column\n window = Window.orderBy(F.col('start_time'))\n songplays_table = songplays_table.withColumn('songplay_id', F.row_number().over(window))\n songplays_table.select('songplay_id', 'start_time', 'user_id', 'level', 'song_id', 'artist_id', 'session_id', 'location', 'user_agent', 'month', 'year').show()\n\n # write songplays table to parquet files partitioned by year and month\n songplays_data_out = f'{output_data}/songplays_table/songplays_table.parquet'\n songplays_table.write.mode('overwrite').partitionBy('year','month').parquet(songplays_data_out)", "def main():\n spark = create_spark_session()\n\n input_data = \"s3a://udacitydenanodegree2020/\"\n output_data = \"s3a://udacitydenanodegree2020/output/\"\n\n process_song_data(spark, input_data, output_data) \n process_log_data(spark, input_data, output_data)", "def process_song_file(cur, filepath):\r\n\r\n\r\n\r\n\r\n df=pd.read_json(filepath,lines=True)\r\n for j,row in df.iterrows():\r\n n, artist_id, artist_latitude, artist_longitude, artist_location, artist_name, song_id, title, duration, year =row\r\n cur.execute(song_table_insert,[song_id,title,artist_id,year,duration])\r\n\r\n cur.execute(artist_table_insert, [artist_id, artist_name, artist_location,artist_latitude,artist_longitude])", "def process_song_file(cur, filepath):\n # open song file\n data_frame = pd.read_json(filepath, lines=True)\n\n # insert song record\n song_data = list(data_frame[['song_id', 'title', 'artist_id', 'year', 'duration']].values[0])\n cur.execute(song_table_insert, song_data)\n\n # insert artist record\n artist_data = list(\n data_frame[['artist_id', 'artist_name', 'artist_location',\n 'artist_latitude', 'artist_longitude']].values[0])\n cur.execute(artist_table_insert, artist_data)", "def process_song_file(cur, filepath):\n # open song file\n df = pd.read_json(filepath,lines=True)\n\n # insert song record\n __insert_song_data(cur, df)\n \n # insert artist record\n __insert_artist_data(cur, df)", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = input_data + \"log_data/*/*/*.json\"\n\n # read log data file\n df = df = spark.read.json(log_data)\n \n # filter by actions for song plays\n df = df = df.filter(df.page == 'NextSong')\n\n # extract columns for users table \n users_table = df.filter(df.userId != '').selectExpr(\"userId as user_id\",\n \"firstName as first_name\", \"lastName as last_name\", \"gender\", \"level\") \\\n .dropDuplicates()\n \n # output filepath to users table file\n users_table_path = output_data + \"users_table.parquet\"\n \n # write users table to parquet files\n users_table.write.mode(\"overwrite\") \\\n .parquet(users_table_path)\n\n # create timestamp column from original timestamp column\n get_timestamp = udf(lambda x: datetime.fromtimestamp(x/1000).strftime('%Y-%m-%d %H:%M:%S'))\n df = df.withColumn(\"start_time\", get_timestamp(df.ts))\n \n # create datetime column from original timestamp column\n get_datetime = udf(lambda x: datetime.fromtimestamp(x/1000).strftime('%Y-%m-%d'))\n df = df.withColumn(\"datetime\", get_datetime(df.ts))\n \n # extract columns to create time table\n time_table = df.select(\"start_time\", hour(\"start_time\").alias(\"hour\"), dayofmonth(\"datetime\").alias(\"day\"),\n weekofyear(\"datetime\").alias(\"week\"), month(\"datetime\").alias(\"month\"),\n year(\"datetime\").alias(\"year\"), dayofweek(\"datetime\").alias(\"weekday\")).dropDuplicates()\n \n # output filepath to time table\n time_table_path = output_data + \"time_table.parquet\"\n \n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy(\"year\", \"month\").mode(\"overwrite\") \\\n .parquet(time_table_path)\n \n # get filepath to song data file\n song_data = input_data + \"song_data/*/*/*/*.json\"\n\n # read in song data to use for songplays table\n song_df = spark.read.json(song_data)\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = df.join(song_df, (df.song == song_df.title) & (df.length == song_df.duration) & \n (df.artist == song_df.artist_name), how='left').dropDuplicates()\n songplays_table = songplays_table.withColumn(\"id\", monotonically_increasing_id())\n windowSpec = Window.orderBy(\"id\")\n songplays_table.withColumn(\"songplay_id\", row_number().over(windowSpec))\n songplays_table = songplays_table.selectExpr(\"songplay_id\", \"start_time\", \n \"userId as user_id\", \"level\", \"song_id\", \"artist_id\", \"sessionId as session_id\",\n \"location\", \"userAgent as user_agent\", \"year(start_time) as year\",\n \"month(start_time) as month\")\n\n # output filepath to songplays table\n songplays_table_path = output_data + \"songplays_table.parquet\"\n \n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.partitionBy(\"year\", \"month\").mode(\"overwrite\") \\\n .parquet(songplays_table_path)", "def process_log_data(spark, input_data, output_data):\n\n # get filepath to log data file\n log_data = input_data + \"log_data/2018/11/*.json\"\n\n # read log data file\n log_data_schema = StructType([\n StructField(\"artist\", StringType(), True),\n StructField(\"auth\", StringType(), False),\n StructField(\"firstName\", StringType(), True),\n StructField(\"gender\", StringType(), True),\n StructField(\"itemInSession\", IntegerType(), False),\n StructField(\"lastName\", StringType(), True),\n StructField(\"length\", DoubleType(), True),\n StructField(\"level\", StringType(), False),\n StructField(\"location\", StringType(), True),\n StructField(\"method\", StringType(), False),\n StructField(\"page\", StringType(), False),\n StructField(\"registration\", DoubleType(), True),\n StructField(\"sessionId\", IntegerType(), False),\n StructField(\"song\", StringType(), True),\n StructField(\"status\", IntegerType(), False),\n StructField(\"ts\", DoubleType(), False),\n StructField(\"userAgent\", StringType(), True),\n StructField(\"userId\", StringType(), True)\n ])\n print(\"Start read s3 log data\") \n df = spark.read.json(log_data, schema=log_data_schema)\n \n # filter by actions for song plays\n print(\"Start filter for song plays\") \n df = df.filter(col(\"page\") == \"NextSong\")\n\n # extract columns for users table\n print(\"Start extract columns for users table\") \n users_table = df.filter((col(\"userID\") != \"\") & (col(\"userID\").isNotNull())).select(\n \"userID\", \"firstName\", \"lastName\", \"gender\", \"level\")\n \n # write users table to parquet files\n print(\"Start write users table to parquet files\") \n users_table.write.parquet(output_data + \"users_table.parquet\", mode=\"overwrite\")\n\n # create timestamp column from original timestamp column\n print(\"Start create timestamp column from original timestamp column\") \n get_timestamp = udf(\n lambda x: x/1000,\n DoubleType()\n )\n df = df.withColumn(\"start_timestamp\", get_timestamp(\"ts\")) \n \n # create datetime column from original timestamp column\n print(\"Start create datetime column from original timestamp column\")\n get_datetime = udf(\n lambda x: datetime.fromtimestamp(x / 1000).replace(microsecond=0),\n TimestampType()\n )\n df = df.withColumn(\"start_datetime\", get_datetime(\"ts\")) \n \n # extract columns to create time table\n print(\"Start extract columns to create time table\")\n time_table = df.withColumn(\n \"hour\", hour(\"start_datetime\")).withColumn(\n \"day\", dayofmonth(\"start_datetime\")).withColumn(\n \"week\", weekofyear(\"start_datetime\")).withColumn(\n \"month\", month(\"start_datetime\")).withColumn(\n \"year\", year(\"start_datetime\")).withColumn(\n \"weekday\", dayofweek(\"start_datetime\")).select(\n \"start_datetime\", \"hour\", \"day\", \"week\", \"month\", \"year\", \"weekday\").distinct()\n \n # write time table to parquet files partitioned by year and month\n print(\"Start write time table to parquet files partitioned by year and month\")\n time_table.write.parquet(output_data + \"time_table.parquet\", mode=\"overwrite\", partitionBy=[\"year\", \"month\"])\n\n # read in song data to use for songplays table\n print(\"Start read in song data to use for songplays table\")\n song_df = spark.read.parquet(output_data + \"songs_table.parquet\")\n\n # extract columns from joined song and log datasets to create songplays table \n print(\"Start extract columns from joined song and log datasets to create songplays table \")\n artists_table = spark.read.parquet(output_data + \"artists_table.parquet\")\n songs = song_df.join(artists_table, \"artist_id\", \"full\").select(\n \"song_id\", \"title\", \"artist_id\", \"name\", \"duration\")\n \n songplays_table = df.join(songs, [df.song == songs.title, df.artist == songs.name, df.length == songs.duration], \"left\")\n \n songplays_table = songplays_table.join(time_table, \"start_datetime\", \"left\").select(\n \"start_datetime\", \"userId\", \"level\", \"song_id\", \"artist_id\", \"sessionId\",\n \"location\", \"userAgent\", \"year\", \"month\").withColumn(\"songplay_id\", monotonically_increasing_id())\n \n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.parquet(output_data + \"songplays_table.parquet\", mode=\"overwrite\", partitionBy=[\"year\", \"month\"])", "def main():\n spark = create_spark_session()\n input_data = \"s3a://udacity-dend/\"\n output_data = \"s3a://dend-bucket-cpm/\"\n\n process_song_data(spark, input_data, output_data)\n process_log_data(spark, input_data, output_data)", "def main():\n spark = create_spark_session()\n input_data = \"s3a://udacity-dend\"\n output_data = \"s3a://vivek1bucket\"\n \n process_song_data(spark, input_data, output_data) \n process_log_data(spark, input_data, output_data)", "def process_song_file(cursor, filepath):\n # open song file\n df = pd.read_json(filepath, lines=True)\n\n # insert artist record\n artist_columns = ['artist_id', 'artist_name', 'artist_location', 'artist_latitude', 'artist_longitude']\n artist_data = df[artist_columns].values[0].tolist()\n cursor.execute(artist_table_insert, artist_data)\n\n # insert song record\n song_columns = ['song_id', 'title', 'artist_id', 'year', 'duration']\n song_data = df[song_columns].values[0].tolist()\n cursor.execute(song_table_insert, song_data)", "def process_log_data(spark, input_data, output_data):\n\n print(\"Read log data\")\n # read log data file\n df_log_data = spark.read.json(input_data + \"log-data/*/*/*.json\")\n\n # filter by actions for song plays\n df_log_data = df_log_data[df_log_data['page']=='NextSong']\n\n # extract columns for users table \n users_table = df_log_data[['userId', 'firstName', 'lastName', 'gender', 'level']].drop_duplicates()\n\n \n print(\"Write...\")\n # write users table to parquet files\n users_table.write.save(path=output_data + 'users_table',\n format='parquet',\n mode='overwrite'\n )\n\n df_log_data = df_log_data.withColumn('timestamp', F.from_unixtime(df_log_data['ts']/1000))\\\n .withColumn('hour', F.hour(F.col('timestamp')))\\\n .withColumn('day', F.dayofmonth(F.col('timestamp')))\\\n .withColumn('month', F.month(F.col('timestamp')))\\\n .withColumn('year', F.year(F.col('timestamp')))\\\n .withColumn('weekofyear', F.weekofyear(F.col('timestamp')))\\\n .withColumn('dayofweek', F.dayofweek(F.col('timestamp')))\n\n # extract columns to create time table\n time_table = df_log_data[['timestamp','hour','day','month','year','weekofyear','dayofweek',]].drop_duplicates()\n\n print(\"Write...\")\n # write time table to parquet files partitioned by year and month\n time_table.write.save(path=output_data + 'time_table',\n format='parquet',\n mode='overwrite',\n partitionBy=['year','month'] )\n\n # read in song data to use for songplays table\n df_song = spark.read.json(input_data + \"song_data/*/*/*/*.json\", schema=build_song_schema())\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = df_log_data.join(df_song, \n on = (df_song['title'] == df_log_data['song']) & \\\n (df_song['artist_name'] == df_log_data['artist']) & \\\n (df_song['duration'] == df_log_data['length']) \n )\n\n print(\"Write...\")\n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.save(path=output_data + 'songplays_table',\n format='parquet',\n mode='overwrite',\n partitionBy=['year','month'] )", "def process_log_data(spark, input_data, output_data):\n\n # get filepath to log data file\n input_data = input_data+\"/log-data\"\n\n \"\"\"\n log_data=[]\n for x in os.walk(input_data):\n for y in glob.glob(os.path.join(x[0], '*.json')):\n log_data.append(y)\n \"\"\"\n \n # read log data file\n df = spark.read.json(input_data)\n\n # filter by actions for song plays\n df=df.filter(col(\"page\")=='NextSong').withColumn(\"new_ts\", df[\"ts\"].cast(IntegerType())).drop(\"ts\").withColumnRenamed(\"new_ts\", \"ts\")\n df.createOrReplaceTempView(\"staging_log_data\")\n\n # extract columns for users table\n user_query = \" SELECT userid, firstName, lastName, gender, level \" \\\n \" FROM staging_log_data \"\n users_table = spark.sql(user_query)\n \n # write users table to parquet files\n users_table.write.mode('overwrite').parquet(path=output_data+\"users\")\n\n df=df.filter(df['ts'].isNotNull())\n time_table= df.select(from_unixtime(df['ts']/1000).alias('start_time'))\n time_table=time_table.select(time_table['start_time'], \\\n hour(time_table['start_time']).alias(\"hour\"), \\\n dayofmonth(time_table['start_time']).alias(\"day\"), \\\n weekofyear(time_table['start_time']).alias(\"week\"), \\\n month(time_table['start_time']).alias(\"month\"), \\\n year(time_table['start_time']).alias(\"year\"), \\\n date_format(time_table['start_time'],'E').alias(\"DOW\"))\n\n # write time table to parquet files partitioned by year and month\n time_table.write.mode('overwrite').partitionBy('year','month').parquet(path=output_data + \"time\")\n\n # read in song data to use for songplays table\n songplay_query=\" Select DISTINCT monotonically_increasing_id() as songplay_id, \" \\\n \" from_unixtime(ld.ts/1000) as start_time , \" \\\n \" ld.userid as user_id, \" \\\n \" ld.level as level,\"\\\n \" sd.song_id as song_id,\" \\\n \" sd.artist_id as artist_id,\" \\\n \" ld.sessionid as session_id, \" \\\n \" ld.location as location, \" \\\n \" ld.useragent as user_agent, \" \\\n \" t.year as year, \" \\\n \" t.month as month \" \\\n \" from staging_log_data ld, Staging_Song_Data sd, time t\" \\\n \" Where ld.artist = sd.artist_name\" \\\n \" and ld.song = sd.title \" \\\n \" and from_unixtime(ld.ts/1000) = t.start_time \" \n\n # extract columns from joined song and log datasets to create songplays table\n songplays_table = spark.sql(songplay_query)\n #songplays_table = spark.sql(songplay_query).drop_duplicates('start_time','user_id','level','song_id','artist_id','location','user_agent')\n\n songplays_table.show()\n \n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.mode('overwrite').partitionBy('year','month').parquet(path=output_data + \"songplays\")", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = os.path.join(input_data, 'log_data/*/*/*.json')\n\n # read log data file\n dfl = spark.read.json(log_data)\n \n # filter by actions for song plays\n dfl = dfl.filter(dfl.page == \"NextSong\")\n \n #create temporary view in order to run SQL queries\n dfl.createOrReplaceTempView(\"log_table\")\n\n # extract columns for users table \n dim_users = spark.sql(\"\"\"\n SELECT DISTINCT userId AS user_id,\n firstName AS first_name,\n lastName AS last_name,\n gender,\n level\n FROM log_table\n WHERE userId IS NOT NULL\n \"\"\")\n \n # write users table to parquet files\n dim_users.write.mode('overwrite').parquet(output_data+\"users\")\n\n # create timestamp column from original timestamp column\n #get_timestamp = udf()\n #df = \n \n # create datetime column from original timestamp column\n #get_datetime = udf()\n #df = \n \n #Convert ts field to timestamp\n time_convert = spark.sql(\"\"\"\n SELECT to_timestamp(ts/1000) as start_times\n FROM log_table\n WHERE ts IS NOT NULL\n \"\"\")\n \n #create temporary view of time_table to run SQL queries\n time_convert.createOrReplaceTempView(\"time_table\")\n \n # extract columns to create time table\n dim_time = spark.sql(\"\"\"\n SELECT start_times as start_time,\n hour(start_times) as hour,\n dayofmonth(start_times) as day,\n weekofyear(start_times) as week,\n month(start_times) as month,\n year(start_times) as year,\n dayofweek(start_times) as weekday\n FROM time_table\n \"\"\")\n \n # write time table to parquet files partitioned by year and month\n dim_time.write.mode('overwrite').partitionBy(\"year\", \"month\").parquet(output_data+\"time\")\n\n # read in song data to use for songplays table\n song_df = spark.read.parquet(output_data+'songs')\n\n # extract columns from joined song and log datasets to create songplays table \n fact_songplays = spark.sql(\"\"\"\n SELECT monotonically_increasing_id() as songplay_id,\n to_timestamp(lt.ts/1000) as start_time,\n month(to_timestamp(lt.ts/1000)) as month,\n year(to_timestamp(lt.ts/1000)) as year,\n lt.userId as user_id,\n lt.level as level,\n st.song_id as song_id,\n st.artist_id as artist_id,\n lt.sessionId as session_id,\n lt.location as location,\n lt.userAgent as user_agent\n FROM log_table lt\n JOIN song_table st ON lt.song = st.title AND lt.artist = st.artist_name\n \"\"\")\n\n # write songplays table to parquet files partitioned by year and month\n fact_songplays.write.mode('overwrite').partitionBy(\"year\", \"month\").parquet(output_data+\"songplays\")", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = input_data + 'log_data/*/*/*.json'\n\n # read log data file\n df = df = spark.read.json(log_data)\n\n # filter by actions for song plays\n df = df[df.page == 'NextSong']\n\n # extract columns for users table\n users_table = df.select('userId', 'firstName', 'lastName', 'gender', 'level').drop_duplicates()\n\n # write users table to parquet files\n users_table.write.format(\"parquet\").mode(\"overwrite\").save(output_data + 'users/user_data.parquet')\n\n # create timestamp column from original timestamp column\n df = df.withColumn('timestamp', F.to_timestamp(df.ts / 1000))\n\n # create datetime column from original timestamp column\n df = df.withColumn('datetime', F.to_date(df.timestamp))\n\n # create uniqueId column for log_data\n df = df.withColumn('uniqueId', monotonically_increasing_id())\n\n # write log data to temp view log_data_table\n df.createOrReplaceTempView(\"log_data_table\")\n\n # extract columns to create time table\n time_table = spark.sql(\"\"\"\n select DISTINCT\n timestamp\n , datetime AS start_time\n , hour(timestamp) AS hour\n , day(timestamp) AS day\n , weekofyear(timestamp) AS week\n , month(timestamp) AS month\n , year(timestamp) AS year\n , weekday(timestamp) AS weekday\n from log_data_table\n \"\"\")\n\n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy(\"year\", \"month\").format(\"parquet\").mode(\"overwrite\").save(\n output_data + 'time/time_data.parquet')\n\n # extract columns from joined song and log datasets to create songplays table\n songplays_table = songplay_table = spark.sql(\"\"\"\n SELECT DISTINCT\n stg.uniqueId AS songplay_id,\n stg.ts AS start_time,\n month(stg.timestamp) AS month,\n year(stg.timestamp) AS year,\n stg.userId,\n stg.level,\n stg2.song_id,\n stg2.artist_id,\n stg.sessionId,\n stg.location,\n stg.userAgent\n FROM log_data_table stg\n LEFT JOIN song_data_table stg2\n ON stg.artist = stg2.artist_name\n AND stg.song = stg2.title\n AND stg.length = stg2.duration\n WHERE stg.userId IS NOT NULL\n \"\"\")\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.partitionBy(\"year\", \"month\").format(\"parquet\").mode(\"overwrite\").save(\n output_data + 'songplays/songplays_data.parquet')", "def main():\n spark = create_spark_session()\n input_data = \"s3a://udacity-dend/\"\n output_data = \"s3a://udacity-data-lake/output/\"\n\n process_song_data(spark, input_data, output_data)\n process_log_data(spark, input_data, output_data)", "def process_log_data(spark, input_data, output_data):\n # define json structure\n logdata_schema = StructType([\n StructField(\"artist\", StringType(), True),\n StructField(\"auth\", StringType(), True),\n StructField(\"firstName\", StringType(), True),\n StructField(\"gender\", StringType(), True),\n StructField(\"itemInSession\", LongType(), True),\n StructField(\"lastName\", StringType(), True),\n StructField(\"length\", DoubleType(), True),\n StructField(\"level\", StringType(), True),\n StructField(\"location\", StringType(), True),\n StructField(\"method\", StringType(), True),\n StructField(\"page\", StringType(), True),\n StructField(\"registration\", DoubleType(), True),\n StructField(\"sessionId\", LongType(), True),\n StructField(\"song\", StringType(), True),\n StructField(\"status\", LongType(), True),\n StructField(\"ts\", LongType(), True),\n StructField(\"userAgent\", StringType(), True),\n StructField(\"userId\", StringType(), True),\n ])\n # get filepath to log data file\n log_data = os.path.join(input_data, 'log_data/*/*/*.json')\n\n # read log data file\n df = spark.read.json(log_data, schema=logdata_schema)\n\n # filter by actions for song plays\n df = df.filter(col(\"page\") == 'NextSong')\n\n # extract columns for users table \n users_table = df.select(col(\"userId\").alias(\"user_id\"),col(\"firstName\").alias(\"first_name\"),\n col(\"lastName\").alias(\"last_name\"),\"gender\",\"level\")\n \n # write users table to parquet files\n users_table.write.parquet(output_data + \"users\")\n\n # create timestamp column from original timestamp column\n get_timestamp = udf(lambda x: datetime.fromtimestamp((x/1000)), TimestampType())\n df = df.withColumn(\"timestamp\", get_timestamp(df.ts))\n \n # create datetime column from original timestamp column\n get_datetime = udf(lambda x: datetime.fromtimestamp(x/1000).strftime('%Y-%m-%d %H:%M:%S'))\n df = df.withColumn(\"datetime\", get_datetime(df.ts))\n \n # extract columns to create time table\n time_table = df.select(col(\"timestamp\").alias(\"start_time\"),\n hour(col(\"timestamp\")).alias(\"hour\"),\n dayofmonth(col(\"timestamp\")).alias(\"day\"), \n weekofyear(col(\"timestamp\")).alias(\"week\"), \n month(col(\"timestamp\")).alias(\"month\"),\n year(col(\"timestamp\")).alias(\"year\"))\n \n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy(\"year\", \"month\").parquet(output_data + \"time\")\n\n # read in song data to use for songplays table\n song_data = os.path.join(input_data, \"song_data/A/A/A/TRAAAAK128F9318786.json\")\n song_df = spark.read.json(song_data)\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = song_df.join(df, \n song_df.artist_name==df.artist).withColumn(\"songplay_id\", \n monotonically_increasing_id()).withColumn(\"start_time\", get_datetime(df.ts)).select(\"songplay_id\",\n \"start_time\", \n col(\"userId\").alias(\"user_id\"),\n \"level\",\n \"song_id\",\n \"artist_id\",\n col(\"sessionId\").alias(\"session_id\"),\n col(\"artist_location\").alias(\"location\"),\n \"userAgent\",\n month(col(\"start_time\")).alias(\"month\"),\n year(col(\"start_time\")).alias(\"year\"))\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.partitionBy(\"year\", \"month\").parquet(output_data + \"songplays\")", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = input_data + \"log-data/*/*/*.json\"\n\n # read log data file\n df = spark.read.json(log_data)\n \n # filter by actions for song plays\n # rename the columns as per requirements\n df = df.filter(\"page='NextSong'\")\\\n .withColumnRenamed(\"userId\", \"user_id\")\\\n .withColumnRenamed(\"firstName\", \"first_name\")\\\n .withColumnRenamed(\"lastName\", \"last_name\")\\\n .withColumnRenamed(\"sessionId\", \"session_id\")\\\n .withColumnRenamed(\"userAgent\", \"user_agent\")\n\n # extract columns for users table \n users_table = df.select(\"user_id\", \"first_name\", \"last_name\", \"gender\", \"level\")\n \n # write users table to parquet files\n file_name = output_data + \"users.parquet\"\n users_table.write.parquet(file_name)\n\n # create timestamp column from original timestamp column\n get_timestamp = F.udf(lambda x: int(int(x)/1000))\n df = df.withColumn(\"timestamp\", get_timestamp(df.ts))\\\n .withColumn(\"datetime\", F.from_unixtime(\"timestamp\", \"MM-dd-yyyy HH:mm:ss\"))\\\n .withColumn(\"start_time\", F.to_timestamp(\"datetime\", \"MM-dd-yyyy HH:mm:ss\"))\\\n .withColumn(\"month\", F.month(\"start_time\"))\\\n .withColumn(\"year\", F.year(\"start_time\"))\\\n .withColumn(\"week\", F.weekofyear(\"start_time\"))\\\n .withColumn(\"day\", F.dayofmonth(\"start_time\"))\\\n .withColumn(\"weekday\", F.dayofweek(\"start_time\"))\\\n .withColumn(\"hour\", F.hour(\"start_time\"))\n \n # extract columns to create time table\n time_table = df.select(\"start_time\", \"month\", \"year\", \"week\", \"day\", \"weekday\", \"hour\")\n \n # write time table to parquet files partitioned by year and month\n file_name = output_data + \"time.parquet\"\n time_table.write.partitionBy([\"year\", \"month\"]).parquet(file_name)\n\n # read in song data to use for songplays table\n file_name = output_data + \"songs.parquet\"\n songs_df = spark.read.parquet(file_name)\n\n # Create views to perform sql query\n songs_df.createOrReplaceTempView(\"songs_data\")\n df.createOrReplaceTempView(\"logs_data\")\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = spark.sql(\"\"\"\n SELECT DISTINCT start_time, user_id, level, song_id, artist_id,\n session_id, location, user_agent, logs.year, month\n FROM logs_data as logs\n LEFT OUTER JOIN songs_data as songs\n ON logs.song = songs.title\n AND logs.length = songs.duration\n \"\"\")\n\n # Create a column songplays_id and assign it values using monotonically_increasing_id method\n songplays_table = songplays_table.withColumn(\"songplays_id\", F.monotonically_increasing_id())\n\n # write songplays table to parquet files partitioned by year and month\n file_name = output_data + \"songplays.parquet\"\n songplays_table.write.partitionBy([\"year\", \"month\"]).parquet(file_name)", "def process_log_data(spark, song_input_data, log_input_data, output_data):\n # get filepath to log data file\n log_data = log_input_data\n\n # read log data file\n print(\"Reading in the log data...\")\n log_df = spark.read.json(log_data)\n \n # filter by actions for song plays\n print(\"Filtering by actions for song plays...\")\n log_df = log_df.filter(log_df.page == 'NextSong')\n\n # extract columns for users table \n print(\"Extracting columns for users table...\")\n log_df.createOrReplaceTempView(\"users_table\")\n users_table = spark.sql(\"\"\"\n SELECT DISTINCT userId AS user_id,\n firstName AS first_name,\n lastName AS last_name,\n gender,\n level\n FROM users_table\n ORDER BY last_name\n \"\"\")\n \n print(\"Users table sample:\")\n users_table.show(5, truncate=False)\n \n # write users table to parquet files\n print(\"Writing users table to parquet files...\")\n users_table.write.mode(\"overwrite\").parquet(output_data + \"users_table.parquet\")\n\n # create timestamp column from original timestamp column\n print(\"Creating timestamp column...\")\n @udf(pt.TimestampType())\n def get_timestamp (ts):\n return datetime.fromtimestamp(ts / 1000.0)\n\n log_df = log_df.withColumn(\"timestamp\", get_timestamp(\"ts\"))\n log_df.show(5)\n \n # create datetime column from original timestamp column\n print(\"Creating datetime column...\")\n @udf(pt.StringType())\n def get_datetime(ts):\n return datetime.fromtimestamp(ts / 1000.0).strftime('%Y-%m-%d %H:%M:%S')\n\n log_df = log_df.withColumn(\"datetime\", get_datetime(\"ts\"))\n log_df.show(5)\n \n # extract columns to create time table\n print(\"Extracting columns for time table...\")\n log_df.createOrReplaceTempView(\"time_table\")\n time_table = spark.sql(\"\"\"\n SELECT DISTINCT datetime AS start_time,\n hour(timestamp) AS hour,\n day(timestamp) AS day,\n weekofyear(timestamp) AS week,\n month(timestamp) AS month,\n year(timestamp) AS year,\n dayofweek(timestamp) AS weekday\n FROM time_table\n ORDER BY start_time\n \"\"\")\n \n print(\"Users table sample:\")\n time_table.show(5, truncate=False)\n \n # write time table to parquet files partitioned by year and month\n print(\"Writing time table to parquet files...\")\n time_table.write.mode(\"overwrite\").partitionBy(\"year\", \"month\")\\\n .parquet(output_data + \"time_table.parquet\")\n\n # read in song data to use for songplays table\n song_df = spark.read.json(song_input_data)\n\n print(\"Joining log_data and song_data...\")\n joined_df = log_df.join(song_df, (log_df.artist == song_df.artist_name) & (log_df.song == song_df.title))\n\n # extract columns from joined song and log datasets to create songplays table \n print(\"Extracting columns from joined DF...\")\n joined_df = joined_df.withColumn(\"songplay_id\", monotonically_increasing_id())\n joined_df.createOrReplaceTempView(\"songplays_table\")\n songplays_table = spark.sql(\"\"\"\n SELECT songplay_id,\n timestamp AS start_time,\n month(timestamp) AS month,\n year(timestamp) AS year,\n userId AS user_id,\n level,\n song_id,\n artist_id,\n sessionId AS session_id,\n location,\n userAgent AS user_agent\n FROM songplays_table\n ORDER BY (user_id, session_id)\n \"\"\")\n\n print(\"Song plays table sample:\")\n songplays_table.show(5, truncate=False)\n \n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.mode(\"overwrite\").partitionBy(\"year\", \"month\") \\\n .parquet(output_data + \"songplays_table.parquet\")", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n # For working in the workspace: log_data = os.path.join(input_data, \"log-data/*.json\")\n log_data = os.path.join(input_data, \"log-data/*/*/*.json\")\n\n # read log data file\n df = spark.read.json(log_data)\n\n # filter by actions for song plays\n df = df.filter(df.page == 'NextSong')\n\n # rename the columns in df\n df = (df.withColumnRenamed('userId', 'user_id')\n .withColumnRenamed('firstName', 'first_name')\n .withColumnRenamed('lastName', 'last_name')\n .withColumnRenamed('itemInSession', 'item_in_session')\n .withColumnRenamed('sessionId', 'session_id')\n .withColumnRenamed('userAgent', 'user_agent'))\n\n # extract columns for users table\n users_table = df.select('user_id', 'first_name', 'last_name', 'gender', 'level').distinct()\n\n # write users table to parquet files\n users_table.write.parquet(output_data + 'users', mode='overwrite')\n\n # create timestamp column from original timestamp column\n # default type is string for UDFs, so we need to switch that by specifying the correct type\n get_timestamp = udf(lambda x: datetime.fromtimestamp(x/1000.0), T.TimestampType())\n df = df.withColumn('start_time', get_timestamp(df.ts))\n\n # extract columns to create time table\n time_table = df.select('start_time',\n hour(col('start_time')).alias('hour'),\n dayofmonth(col('start_time')).alias('day'),\n weekofyear(col('start_time')).alias('week'),\n month(col('start_time')).alias('month'),\n year(col('start_time')).alias('year'),\n date_format(col('start_time'), 'EEEE').alias('weekday'))\n\n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy('year', 'month').parquet(output_data + 'time', mode='overwrite')\n\n # read in song data to use for songplays table\n song_df = spark.read.parquet(output_data + 'songs/year=*/artist_id=*/*.parquet')\n artist_df = spark.read.parquet(output_data + 'artists/*.parquet')\n\n # extract columns from joined song and log datasets to create songplays table\n songplays_table = df.join(song_df, (df.song == song_df.title) & (df.length == song_df.duration)).join(artist_df, df.artist == artist_df.artist_name).join(time_table, ['start_time'])\n\n # create the songplay_id column\n songplays_table = songplays_table.withColumn('songplay_id', monotonically_increasing_id())\n\n # select the columns of interest\n songplays_table = songplays_table.select('songplay_id', 'start_time', 'user_id', 'level', 'song_id', 'artist_id', 'session_id', 'location', 'user_agent', 'year', 'month')\n\n # write songplays table to parquet files partitioned by year and month (I think this is a copy paste error because year and month aren't listed as required cols)\n songplays_table.write.partitionBy('year', 'month').parquet(output_data + 'songplays', mode='overwrite')", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = os.path.join(input_data, 'log_data', '*', '*', '*')\n\n # read log data file\n df = spark.read.json(log_data)\n \n # filter by actions for song plays\n df = df.filter(col(\"page\") == \"NextSong\")\n df.createOrReplaceTempView(\"songplays\")\n\n # extract columns for users table \n users_table = spark.sql(\"\"\"\n SELECT \n DISTINCT(userId) AS user_id, \n firstName AS first_name, \n lastName AS last_name, \n gender, \n level\n FROM songplays\n \"\"\")\n \n # write users table to parquet files\n users_table_path = os.path.join(output_data, \"users_table.parquet\")\n (users_table.\n write.\n mode(\"overwrite\").\n parquet(users_table_path))\n print(f\"Stored users table at {users_table_path}\")\n\n # create timestamp column from original timestamp column\n get_timestamp = udf(lambda x: pd.Timestamp(x, unit = \"ms\"), TimestampType())\n df = df.withColumn(\"timestamp\", get_timestamp(\"ts\"))\n \n # create datetime column from original timestamp column\n get_datetime = udf(lambda x: pd.Timestamp(x, unit = \"ms\"), TimestampType())\n df = df.withColumn(\"datetime\", get_datetime(\"ts\"))\n df.createOrReplaceTempView(\"log_table\")\n # extract columns to create time table\n time_table = spark.sql(\"\"\"\n SELECT \n DISTINCT(timestamp) AS start_time, \n HOUR(timestamp) AS hour,\n day(timestamp) AS day,\n weekofyear(timestamp) AS week,\n month(timestamp) AS month,\n year(timestamp) AS year,\n dayofweek(timestamp) AS weekday\n FROM log_table\n \"\"\")\n \n # write time table to parquet files partitioned by year and month\n time_table_path = os.path.join(output_data, \"time_table.parquet\")\n (time_table.\n write.\n mode(\"overwrite\").\n partitionBy(\"year\", \"month\").\n parquet(time_table_path))\n print(f\"Stored time table at {time_table_path}\")\n\n # read in song data to use for songplays table\n song_df_path = os.path.join(input_data, \"song_data\", \"*\", \"*\", \"*\")\n song_df = spark.read.json(song_df_path).alias(\"song_df\")\n df = df.alias(\"df\")\n \n joined_df = df.join(\n song_df, \n col('df.artist') == col('song_df.artist_name'), \n 'inner',\n )\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = joined_df.select(\n col(\"timestamp\").alias(\"start_time\"),\n col(\"userId\").alias(\"user_id\"),\n col(\"level\").alias(\"level\"),\n col(\"song_id\").alias(\"song_id\"),\n col(\"artist_id\").alias(\"artist_id\"),\n col(\"sessionId\").alias(\"session_id\"),\n col(\"location\").alias(\"location\"),\n col(\"userAgent\").alias(\"user_agent\")\n ).withColumn('songplay_id', monotonically_increasing_id())\n \n # Add year and month to enable partitioning\n songplays_table = (songplays_table.\n withColumn('year', year(songplays_table.start_time)).\n withColumn('month', month(songplays_table.start_time)))\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table_path= os.path.join(output_data, \"songplays_table.parquet\")\n (songplays_table.\n write.\n mode(\"overwrite\").\n partitionBy(\"year\", \"month\").\n parquet(songplays_table_path))\n print(f\"Stored songplays table at {songplays_table_path}\")", "def process_log_data(\n spark: SparkSession,\n input_data: str,\n output_data: str\n) -> None:\n # get filepath to log data file\n log_data = input_data + 'log-data/*'\n\n # read log data file\n df = spark.read.json(log_data)\n\n # filter by actions for song plays\n actions_df = df.filter(df.page == 'NextSong') \\\n .select('ts', 'userId', 'level', 'song', 'artist',\n 'sessionId', 'location', 'userAgent')\n\n # extract columns for users table\n users_table = df.select(\n 'userId',\n 'firstName',\n 'lastName',\n 'gender',\n 'level'\n ).dropDuplicates()\n\n users_table.createOrReplaceTempView('users')\n # write users table to parquet files\n users_table.write.parquet(\n join(output_data, 'users/users.parquet'),\n 'overwrite'\n )\n\n # create timestamp column from original timestamp column\n get_timestamp = udf(lambda x: str(int(int(x) / 1000)))\n actions_df = actions_df.withColumn(\n 'timestamp',\n get_timestamp(actions_df.ts)\n )\n\n # create datetime column from original timestamp column\n get_datetime = udf(lambda x: str(datetime.fromtimestamp(int(x) / 1000)))\n actions_df = actions_df.withColumn('datetime', get_datetime(actions_df.ts))\n\n # extract columns to create time table\n time_table = actions_df.select('datetime') \\\n .withColumn('start_time', actions_df.datetime) \\\n .withColumn('hour', hour('datetime')) \\\n .withColumn('day', dayofmonth('datetime')) \\\n .withColumn('week', weekofyear('datetime')) \\\n .withColumn('month', month('datetime')) \\\n .withColumn('year', year('datetime')) \\\n .withColumn('weekday', dayofweek('datetime')).dropDuplicates()\n\n # write time table to parquet files partitioned by year and month\n time_table.createOrReplaceTempView('time')\n time_table.write.partitionBy(\n 'year', 'month') \\\n .parquet(join(output_data, 'time/time.parquet'), 'overwrite')\n # read in song data to use for songplays table\n song_df = spark.read.json(input_data + 'song_data/*/*/*/')\n joined_df = actions_df.join(\n song_df,\n (actions_df.artist == song_df.artist_name),\n 'inner'\n )\n # extract columns from joined\n # song and log datasets to create songplays table\n songplays_table = joined_df.select(\n actions_df.datetime.alias('start_time'),\n actions_df.userId.alias('user_id'),\n actions_df.level.alias('level'),\n song_df.song_id.alias('song_id'),\n song_df.artist_id.alias('artist_id'),\n actions_df.sessionId.alias('session_id'),\n actions_df.location.alias('location'),\n actions_df.userAgent.alias('user_agent'),\n year(actions_df.datetime).alias('year'),\n month(actions_df.datetime).alias('month')) \\\n .withColumn('songplay_id', monotonically_increasing_id())\n\n songplays_table.createOrReplaceTempView('songplays')\n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.\\\n partitionBy('year', 'month').\\\n parquet(join(output_data, 'songplays/songplays.parquet'), 'overwrite')", "def process_data():\n for message in get_messages_from_sqs():\n try:\n message_content = json.loads(message.body)\n input_file = urllib.unquote_plus(message_content\n ['Records'][0]['s3']['object']\n ['key']).encode('utf-8')\n s3.download_file(input_bucket_name, input_file, input_file)\n output_file = os.path.join(output_dir, os.path.splitext(input_file)[0]+'.csv')\n parse_patient_data(input_file, output_file)\n upload_data(output_file)\n cleanup_files(input_file, output_file)\n except:\n message.change_visibility(VisibilityTimeout=0)\n continue\n else:\n message.delete()", "def process_log_data(spark, input_data, output_data, mode=\"overwrite\"):\n # get filepath to log data file\n log_data = input_data + \"log_data/*.json\"\n # read log data file\n print(\"reading event logs from {}\".format(log_data))\n df = spark.read.json(log_data)\n\n # filter by actions for song plays\n df = df.filter(F.col('page') == 'NextSong')\n df = df.withColumn(\n 'userId', F.col('userId').cast(LongType())\n )\n df = df.withColumn(\n 'registration',\n (F.round(F.col('registration')/1000)).cast(TimestampType())\n )\n df = df.withColumn(\n 'ts',\n (F.round(F.col('ts')/1000)).cast(TimestampType())\n )\n\n # extract columns for users table\n users_table = df.selectExpr(\n 'userId AS user_id',\n 'firstName AS first_name',\n 'lastName AS last_name',\n 'gender AS gender') \\\n .dropDuplicates(['user_id'])\n\n # write users table to parquet files\n users_path = output_data + 'star_schema/user_table'\n print(\"Writing Users Table to {}\".format(users_path))\n users_table.write \\\n .mode(mode) \\\n .partitionBy('user_id') \\\n .parquet(users_path)\n\n # extract columns to create time table\n time_table = df.selectExpr('ts AS start_time') \\\n .dropDuplicates() \\\n .orderBy('start_time', ascending=True) \\\n .withColumn('hour', F.hour('start_time')) \\\n .withColumn('day', F.dayofmonth('start_time')) \\\n .withColumn('week', F.weekofyear('start_time')) \\\n .withColumn('month', F.month('start_time')) \\\n .withColumn('year', F.year('start_time')) \\\n .withColumn('weekday', F.dayofweek('start_time'))\n\n # write time table to parquet files partitioned by year and month\n times_path = output_data + 'star_schema/time_table'\n print(\"Writing Time Table to {}\".format(times_path))\n time_table.write \\\n .mode(mode) \\\n .partitionBy('year', 'month', 'day') \\\n .parquet(times_path)\n\n # read in song data to use for songplays table)\n songs_path = output_data + 'star_schema/song_table'\n print(\"Reading Songs table for join query from {}\".format(songs_path))\n song_table = spark.read.parquet(songs_path)\n\n # extract columns from joined song and log datasets to create songplays table\n songplays_table = df.selectExpr(\n 'ts as start_time',\n 'userId as user_id',\n 'level',\n 'song', # join to song_id from songs_df\n # artist_id # join from songs df\n 'sessionId as session_id',\n 'location',\n 'userAgent as user_agent'\n )\n songplays_table = songplays_table \\\n .withColumn('songplay_id', F.monotonically_increasing_id()) \\\n .withColumn('songplay_year', F.year('start_time')) \\\n .withColumn('month', F.month('start_time'))\n songplays_table = songplays_table.join(\n song_table,\n song_table.title == songplays_table.song, how='left'\n ).select([\n 'songplay_id', 'start_time', 'songplay_year', 'month',\n 'user_id', 'level', 'song_id', 'artist_id',\n 'session_id', 'location', 'user_agent'\n ]).withColumnRenamed('songplay_year', 'year')\n\n # write songplays table to parquet files partitioned by year and month\n songplays_path = output_data + 'star_schema/songplay_table'\n print(\"Writing Songplays Table to {}\".format(songplays_path))\n songplays_table.write \\\n .mode(mode) \\\n .partitionBy('year', 'month') \\\n .parquet(songplays_path)", "def main() -> None:\n ROOT_DIR = dirname(abspath(__file__))\n spark = create_spark_session()\n input_data = 's3a://udacity-dend/'\n output_data = ROOT_DIR + '/data/'\n\n process_song_data(spark, input_data, output_data)\n process_log_data(spark, input_data, output_data)", "def main():\n spark = create_spark_session()\n input_data = \"s3a://udacity-dend/\"\n output_data = \"s3a://udacity-nanodegree-data-engineer/\"\n \n process_song_data(spark, input_data, output_data) \n process_log_data(spark, input_data, output_data)", "def process_song_file(cur, filepath):\n # open song file\n df = get_file_df(filepath)\n\n # insert song record\n song_data = songs_data = [df.loc[0].song_id, df.loc[0].title, df.loc[0].artist_id, int(df.loc[0].year), int(df.loc[0].duration)]\n cur.execute(song_table_insert, song_data)\n \n # insert artist record\n artist_data = [df.loc[0].artist_id, df.loc[0].artist_name, df.loc[0].artist_location, df.loc[0].artist_latitude, df.loc[0].artist_longitude] \n\n cur.execute(artist_table_insert, artist_data)", "def process_log_data(spark, input_data, output_data):\n \n # get filepath to log data file\n log_data = input_data + 'log_data/*.json'\n\n # read log data file\n df = spark.read.json(log_data)\n \n # filter by actions for song plays\n df = df.filter(df.page == 'NextSong')\n \n # create view for users table\n df.createOrReplaceTempView(\"users\") \n\n # extract columns for users table, Adding Distinct and Not null to user_id as it is the primary key\n users_table = spark.sql(\"\"\"\n SELECT DISTINCT userId as user_id, \n firstName as first_name,\n lastName as last_name,\n gender as gender,\n level as level\n FROM users\n WHERE userId IS NOT NULL\n \"\"\")\n \n # write users table to parquet files\n users_table.write.mode('overwrite').parquet(output_data+'users_table/') \n \n # create view for time table\n df.createOrReplaceTempView(\"time\") \n \n # extract columns to create time table, Adding Not null to ts as it is the primary key, its very unlikely that ts will be \n # same for two rows as its time in miliseconds. Much easier and striaghtforward to extract day, hour, week etc. from timestamp\n # through SQL then converting timestamp in Dataframe to datetime and then performing extraction \n time_table = spark.sql(\"\"\"\n SELECT \n timestamp_data.start_time_prev as start_time,\n hour(timestamp_data.start_time_prev) as hour,\n dayofmonth(timestamp_data.start_time_prev) as day,\n weekofyear(timestamp_data.start_time_prev) as week,\n month(timestamp_data.start_time_prev) as month,\n year(timestamp_data.start_time_prev) as year,\n dayofweek(timestamp_data.start_time_prev) as weekday\n FROM\n (SELECT to_timestamp(ts/1000) as start_time_prev\n FROM time\n WHERE ts IS NOT NULL\n ) timestamp_data\n \"\"\")\n \n # write time table to parquet files partitioned by year and month\n time_table.write.mode('overwrite').partitionBy(\"year\", \"month\").parquet(output_data+'time_table/') \n \n # get filepath to song data file\n song_data = input_data + 'song_data/*/*/*/*.json'\n \n # read song data file\n df = spark.read.json(song_data)\n \n # create view for song data\n df.createOrReplaceTempView(\"song_data\")\n \n # create view for songplays table \n df.createOrReplaceTempView(\"songplays\")", "def analyze(sc, input_path, output_path):\n context = LogsJobContext(sc)\n # get filepath to log data file\n log_data = \"{}/log_data/*/*/*.json\".format(input_path)\n # read log data file\n df = sc.read.json(log_data)\n df.cache()\n # filter by actions for song plays\n df = df.filter(df.page == 'NextSong')\n df.cache()\n\n # extract columns for users table\n users = df.selectExpr('userId as user_id', 'firstName as first_name', 'lastName as last_name', 'gender',\n 'level').dropDuplicates()\n # write users table to parquet files\n users.write.parquet(\"{}/users.parquet\".format(output_path), mode='overwrite')\n\n start_times_datetime = df.dropDuplicates().selectExpr(\"cast((from_unixtime(ts/1000.0)) as timestamp) as start_time\")\n start_times_datetime.printSchema()\n time = start_times_datetime.selectExpr([\"start_time\", \"hour(start_time) as hour\", \"day(start_time) as day\",\n \"weekofyear(start_time) as week\", \"month(start_time) as month\",\n \"year(start_time) as year\",\n \"dayofweek(start_time) as weekday\"\n ])\n\n # write time table to parquet files partitioned by year and month\n time.write.partitionBy(['year', 'month']).parquet(\"{}/times.parquet\".format(output_path), mode=\"overwrite\")\n\n # read in song data to use for songplays table\n song_df = sc.read.parquet(\"{}/songs.parquet\".format(output_path))\n\n # extract columns from joined song and log datasets to create songplays table\n song_df.createOrReplaceTempView('songs')\n df.createOrReplaceTempView('logs')\n songplays_table = sc.sql('select \\\n monotonically_increasing_id() as songplay_id, \\\n from_unixtime(l.ts/1000) as start_time, \\\n userId as user_id,\\\n l.level,\\\n s.song_id,\\\n s.artist_id,\\\n l.sessionId as session_id,\\\n l.location,\\\n l.userAgent as user_agent\\\n from \\\n logs l \\\n left join songs s on l.song = s.title')\n\n songplays_table_for_partitioning = songplays_table\\\n .withColumn('year', F.year(songplays_table.start_time))\\\n .withColumn('month', F.month(songplays_table.start_time))\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table_for_partitioning.write.partitionBy(['year', 'month']) \\\n .parquet(\"{}/songplays.parquet\".format(output_path), mode='overwrite')", "def process_input_data(input_data_path):\n if os.path.isdir(input_data_path):\n input_data_glob = glob.glob(input_data_path + \"/*.csv\")\n else:\n if is_gcs_path(input_data_path):\n # Download the input to a local\n with tempfile.NamedTemporaryFile() as hf:\n input_data = hf.name\n\n logging.info(\"Copying %s to %s\", input_data_path, input_data)\n input_data_gcs_bucket, input_data_gcs_path = split_gcs_uri(\n input_data_path)\n\n logging.info(\"Download bucket %s object %s.\", input_data_gcs_bucket,\n input_data_gcs_path)\n bucket = storage.Bucket(storage.Client(), input_data_gcs_bucket)\n storage.Blob(input_data_gcs_path, bucket).download_to_filename(\n input_data)\n else:\n input_data = input_data_path\n\n ext = os.path.splitext(input_data)[-1]\n if ext.lower() == '.zip':\n zip_ref = zipfile.ZipFile(input_data, 'r')\n zip_ref.extractall('.')\n zip_ref.close()\n # TODO: Hardcoding the file in the Archive to use is brittle.\n # We should probably just require the input to be a CSV file.:\n csv_file = 'stackoverflow-questions.csv'\n else:\n csv_file = input_data\n\n input_data_glob = glob.glob(csv_file)\n\n return input_data_glob", "def main():\n spark = create_spark_session()\n input_data = \"s3a://udacity-dend/\"\n output_data = \"data/analytics\"\n \n process_song_data(spark, input_data, output_data) \n process_log_data(spark, input_data, output_data)", "def main():\n spark = create_spark_session()\n\n input_data = config['STORAGE']['INPUT_DATA']\n output_data = config['STORAGE']['OUTPUT_DATA']\n\n process_song_data(spark, input_data, output_data)\n process_log_data(spark, input_data, output_data)", "def main():\n # Initiate Spark Session\n spark = create_spark_session()\n \n # Data files\n # Root Data Path\n # Uncomment below line for AWS S3\n #input_data = \"s3a://udacity-dend\"\n # Uncomment below line for local files\n input_data = \"data\"\n\n # Warehouse\n # Root WH\n # Uncomment below line for AWS S3\n #output_data = \"s3a://jerryespn-project-out\"\n # Uncomment below line for local files\n output_data = \"spark-warehouse\"\n \n process_song_data(spark, input_data, output_data) \n process_log_data(spark, input_data, output_data)", "def song(song_id):\n return process_input(song_id) #jsonify(recomendations)", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = \"{}/log_data/*/*/*\".format(input_data)\n\n # read log data file\n df = spark.read.format(\"json\").load(log_data)\n \n # create timestamp column from original timestamp column\n get_timestamp = udf(lambda x: datetime.fromtimestamp( (x/1000.0) ), T.TimestampType()) \n df = df.withColumn(\"listening_datetime\", get_timestamp(df.ts))\n \n # Using the same udf to convert epoch ts into timestamp already. \n #get_datetime = udf()\n #df = \n \n # filter by actions for song plays\n df = df.where(\"page == 'NextSong'\")\n \n # register dataframe df_log into the temp view\n df.createOrReplaceTempView(\"df_log\")\n # extract columns for users table \n users_table = spark.sql(\"\"\"\n select DISTINCT userId,\n firstName,\n lastName,\n gender,\n level\n FROM df_log as e\n \"\"\")\n\n \n # write users table to parquet files\n users_table.write.parquet('{}/dim_user.pq'.format(output_data))\n\n \n \n \n # extract columns to create time table\n time_table = spark.sql(\"\"\"\n SELECT DISTINCT listening_datetime as t_start_time,\n hour(listening_datetime) as t_hourofday,\n day(listening_datetime) as t_daynuminmonth,\n weekofyear(listening_datetime) as t_weeknuminyear,\n month(listening_datetime) as t_monthnuminyear,\n year(listening_datetime) as t_yearnuminyear,\n dayofweek(listening_datetime) as t_daynuminweek\n\n\n FROM df_log as s\n \"\"\")\n \n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy('t_yearnuminyear', 't_monthnuminyear').parquet('{}/dim_time.pq'.format(output_data))\n\n # read in song data to use for songplays table\n song_df = spark.read.parquet('data/analytics/dim_song.pq')\n \n # registering temp view for dim_time and df_song in order to join in the songplay \n song_df.createOrReplaceTempView(\"df_song\")\n time_table.createOrReplaceTempView(\"dim_time\")\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = spark.sql(\"\"\"\n SELECT DISTINCT e.listening_datetime AS t_start_time,\n userId as u_user_id,\n level as u_level,\n song_id as s_song_id,\n artist_id as a_artist_id,\n sessionId as sp_session_id,\n location as sp_location,\n userAgent as sp_user_agent,\n t.t_yearnuminyear,\n t.t_monthnuminyear\n FROM df_log as e\n JOIN df_song s\n ON (e.artist = s.artist_name)\n JOIN dim_time t\n on t.t_start_time = e.listening_datetime\n \n \"\"\")\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.partitionBy('t_yearnuminyear', 't_monthnuminyear').parquet('{}/songplays.pq'.format(output_data))", "def process_log_data(spark, input_data, output_data):\n \n # get filepath to log data file\n log_data = os.path.join(input_data,\"log_data/*/*/*.json\")\n\n\n # read log data file\n df = spark.read.json(log_data)\n \n # filter by actions for song plays\n #df = \n\n # extract columns for users table \n users_table = df['userId', 'firstName', 'lastName', 'gender', 'level'].dropDuplicates()\n \n # write users table to parquet files\n users_table.write.parquet(os.path.join(output_data, 'users.parquet'), 'overwrite')\n print(\"--- users.parquet completed ---\")\n\n # create timestamp column from original timestamp column\n get_timestamp = udf(lambda x: datetime.fromtimestamp( (x/1000.0) ), T.TimestampType())\n # create datetime column from original timestamp column\n get_datetime = udf(lambda x: datetime.fromtimestamp(int(int(x)/1000)))\n get_hour = udf(lambda x: x.hour, T.IntegerType()) \n get_day = udf(lambda x: x.day, T.IntegerType()) \n get_week = udf(lambda x: x.isocalendar()[1], T.IntegerType()) \n get_month = udf(lambda x: x.month, T.IntegerType()) \n get_year = udf(lambda x: x.year, T.IntegerType()) \n get_weekday = udf(lambda x: x.weekday(), T.IntegerType()) \n\n df = df.withColumn(\"timestamp\", get_timestamp(df.ts))\n df = df.withColumn('start_time', get_datetime(df.ts))\n df = df.withColumn(\"hour\", get_hour(df.timestamp))\n df = df.withColumn(\"day\", get_day(df.timestamp))\n df = df.withColumn(\"week\", get_week(df.timestamp))\n df = df.withColumn(\"month\", get_month(df.timestamp))\n df = df.withColumn(\"year\", get_year(df.timestamp))\n df = df.withColumn(\"weekday\", get_weekday(df.timestamp))\n \n \n # extract columns to create time table\n time_columns = ['start_time', 'hour', 'day', 'week', 'month', 'year', 'weekday'] \n \n # write time table to parquet files partitioned by year and month\n time_table = df[time_columns]\n \n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy('year', 'month').parquet(os.path.join(output_data, 'time.parquet'), 'overwrite')\n print(\"--- time.parquet completed ---\")\n \n # read in song data to use for songplays table\n df_songs = spark.read.parquet(os.path.join(output_data, 'songs.parquet'))\n \n df_songplays = df_songs.join(df, (df_songs.title == df.song)).where(df.page == 'NextSong').orderBy(df.timestamp)\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = df_songplays['timestamp', 'userId', 'level', 'song_id', 'artist_id', 'sessionId', 'location', 'userAgent']\n songplays_table.select(monotonically_increasing_id().alias('songplay_id')).collect()\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table\\\n .withColumn(\"year\", get_year(songplays_table.timestamp))\\\n .withColumn(\"month\", get_month(songplays_table.timestamp))\\\n .write\\\n .partitionBy('year', 'month')\\\n .parquet(os.path.join(output_data, 'songplays.parquet'), 'overwrite')\n \n print(\"--- songplays.parquet completed ---\")\n print(\"*** process_log_data completed ***\\n\\nEND\")", "def main():\n spark = create_spark_session()\n\n # Used for local testing - commented out\n # input_data = \"./data/\"\n # output_data = \"./data/\"\n input_data = \"s3a://udacity-dend/\"\n output_data = \"s3a://allen-lesson4-datalake-bucket/\"\n\n process_song_data(spark, input_data, output_data)\n process_log_data(spark, input_data, output_data)\n spark.stop()", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = input_data + \"log_data/*/*/*.json\"\n\n # read log data file\n df = spark.read.json(log_data)\n \n # print out the schema in tree format\n print(\"---------- Print out the schema of log dataset in tree format: ----------\")\n df.printSchema()\n \n # filter by actions for song plays\n df = df.filter(df.page == \"NextSong\")\n\n # extract columns for users table \n # users attributes: user_id, first_name, last_name, gender, level\n users_table = df.select(\"userId\", \"firstName\", \"lastName\", \"gender\", \"level\").distinct()\n \n # show first 10 rows in users table\n print(\" ---------- Show first 10 rows of users table ----------\")\n users_table.show(10)\n \n # write users table to parquet files\n output_data_users = os.path.join(output_data_users, \"users_table.parquet\")\n if path.exists(output_data_users):\n users_table.write.parquet(path = output_data_users, \n mode = \"overwrite\")\n else:\n users_table.write.parquet(path = output_data_users, \n mode = \"append\")\n \n # read parquet file and check the first 10 rows of partitioned parquet dataframes\n df_users_parquet = spark.read.parquet(\"users_table.parquet\")\n print(\" ---------- Show first 10 rows of users table parquet file ----------\")\n df_users_parquet.show(10)\n\n # create datetime column from original timestamp column\n # divide timestamp by 1000 to convert from milliseconds to seconds\n get_datetime = udf(lambda x: datetime.fromtimestamp(x / 1000), TimestampType())\n df = df.withColumn(\"start_time\", get_datetime(df.ts))\n \n # time table attributes: start_time, hour, day, week, month, year, weekday\n get_hour = udf(lambda x: x.hour) \n df = df.withColumn(\"hour\", get_hour(df.start_time)) # create hour column\n \n get_day = udf(lambda x: x.day)\n df = df.withColumn(\"day\", get_day(df.start_time)) # create day column\n \n get_week = udf(lambda x: x.isocalendar()[1])\n df = df.withColumn(\"week\", get_week(df.start_time)) # create week number column\n \n get_month = udf(lambda x: x.month)\n df = df.withColumn(\"month\", get_month(df.start_time)) # create month column\n \n get_year = udf(lambda x: x.year)\n df = df.withColumn(\"year\", get_year(df.start_time)) # create year column\n \n get_weekday = udf(lambda x: x.weekday())\n df = df.withColumn(\"weekday\", get_weekday(df.start_time)) # create weekday column\n \n # extract columns to create time table\n time_table = df.select(df.columns[-7:])\n \n # show first 10 rows of time table \n print(\" ---------- Show first 10 rows of time table ----------\")\n time_table.show(10)\n\n # write time table to parquet files partitioned by year and month\n out_path_time = os.path.join(output_data, \"time_table.parquet\")\n if path.exists(out_path_time):\n time_table.write.parquet(path = out_path_time, \n partitionBy = (\"year\", \"month\"),\n mode = \"overwrite\")\n else:\n time_table.write.parquet(path = out_path_time, \n partitionBy = (\"year\", \"month\"),\n mode = \"append\")\n\n # read parquet file and check the first 10 rows of partitioned parquet dataframes\n df_time_parquet = spark.read.parquet(\"time_table.parquet\")\n print(\" ---------- Show first 10 rows of time table parquet file ----------\")\n df_time_parquet.show(10)\n\n # read in song data to use for songplays table\n song_df = spark.read.parquet(\"songs_table.parquet\")\n \n # inner join df with song_df by song's name\n cond = [df.song == song_df.title]\n df_join = df.join(song_df, cond, \"inner\")\n \n # extract columns from joined song and log datasets to create songplays table \n # songplays attributes: songplay_id, start_time, user_id, level, song_id, \n # artist_id, session_id, location, user_agent\n songplays_table = df_join.select(\"start_time\", \"userId\", \"level\", \"song_id\", \n \"artist_id\", \"sessionId\", \"location\", \n \"userAgent\").distinct()\n \n # create songplay_id column with auto_increment\n songplays_table.withColumn(\"songplay_id\", monotonically_increasing_id())\n \n # show first 10 rows of songplays table \n print(\" ---------- Show first 10 rows of songplays table ----------\")\n songplays_table.show(10)\n \n # append year and month column into songplays_table\n songplays_table = songplays_table.withColumn(\"year\", get_year(df.start_time))\n songplays_table = songplays_table.withColumn(\"month\", get_month(df.start_time))\n \n # write songplays table to parquet files partitioned by year and month\n out_path_songplays = os.path.join(output_data, \"songplays_table.parquet\")\n if path.exists(out_path_songplays):\n songplays_table.write.parquet(path = out_path_songplays, \n partitionBy = (\"year\", \"month\"),\n mode = \"overwrite\")\n else:\n songplays_table.write.parquet(path = out_path_songplays, \n partitionBy = (\"year\", \"month\"),\n mode = \"append\")", "def main(transcribe_bucket_name, mp3_bucket_name):\n\n s3 = boto3.resource('s3')\n for bucket in s3.buckets.all():\n if bucket.name == transcribe_bucket_name:\n for key in bucket.objects.all():\n if key.key.endswith('.json'):\n r = {}\n # Get reference number\n reference = basename(key.key).replace('.json', '')\n r['ref'] = reference\n # Get URL\n location = boto3.client('s3') \\\n .get_bucket_location(\n Bucket=mp3_bucket_name)['LocationConstraint']\n base_url = join('https://s3-%s.amazonaws.com' % location,\n mp3_bucket_name)\n url = join(base_url, key.key.replace('.json', '.mp3'))\n r['url'] = url\n # Download json file\n try:\n s3.Bucket(transcribe_bucket_name) \\\n .download_file(key.key, key.key)\n except Exception as exception:\n return 1\n # Get text\n with open(key.key, 'r') as f:\n data = json.load(f)\n text = data['results']['transcripts'][0]['transcript']\n r['text'] = text\n # Get sentiment\n sentiment = get_sentiment(text)\n r['sentiment'] = sentiment\n # Check promotion\n promo = check_promo(text)\n r['promo'] = promo\n # Save to Gooogle Sheets\n values = [r['ref'], r['text'], r['promo'], r['sentiment'],\n r['url']]\n append_row(values)\n # Remove tmp json file from local machine\n remove(key.key)", "def preprocess(input_folder, output_folder, T, skip, overwrite=False):\n original_labels = ['songID', 'time', 'A_t', 'A#_t', 'B_t', 'C_t', 'C#_t', 'D_t', 'D#_t', 'E_t', 'F_t', 'F#_t',\n 'G_t', 'G#_t']\n input_file_paths = sorted([os.path.join(input_folder, p) for p in os.listdir(input_folder) if p.startswith('chroma-nnls')])[-10:-9]\n print(input_file_paths)\n # input_file_paths = _create_file_paths(input_folder)\n for f in input_file_paths:\n logging.info(\"Working on file {}\".format(f))\n data = pd.read_csv(f, header=None, names=original_labels)\n data['songID'] = data['songID'].apply(_take_id) # take just the ID of the song\n data['songID'] = data['songID'].fillna(method='ffill') # repeat the ID for all rows\n for s in set(data['songID']):\n path_output = os.path.join(output_folder, 'chroma-nnls_' + s + '.csv')\n if not overwrite and os.path.isfile(path_output):\n logging.info(\"Output file {} already exists. Skipping songID {}\".format(path_output, s))\n continue\n logging.info(\"Working on songID {}\".format(s))\n df = data.loc[data['songID'] == s] # select one song at a time not to use too much memory\n df = _create_datapoints_for_dnn(df, T, skip) # add the desired columns\n df.to_csv(path_output, header=False, index=False) # write the df in a file\n return", "def main():\n \n conn = psycopg2.connect(\"host=127.0.0.1 dbname=sparkifydb user=student password=student\")\n \n cur = conn.cursor()\n process_data(cur, conn, filepath='data/song_data',\n func=process_song_file) \n \n process_data(cur, conn, filepath='data/log_data',\n func=process_log_file)\n \n conn.close()", "def process_log_file(cur, filepath):\n # open log file\n df = pd.read_json(filepath, lines=True)\n\n # filter by NextSong action\n df = df[df['page']=='NextSong']\n\n # convert timestamp column to datetime\n t = pd.to_datetime(df['ts'], unit='ms')\n \n # insert time data records\n #timestamp, hour, day, week of year, month, year, and weekday\n hour = t.dt.hour\n day = t.dt.day\n weekofyear = t.dt.weekofyear\n month = t.dt.month\n year = t.dt.year\n weekday = t.dt.weekday\n\n time_data = [df['ts'], hour, day, weekofyear, month, year, weekday]\n column_labels = ['timestamp', 'hour', 'day', 'week of year', 'month', 'year', 'weekday']\n time_df = pd.DataFrame.from_dict(dict(zip(column_labels, time_data)))\n\n for i, row in time_df.iterrows():\n try:\n cur.execute(time_table_insert, list(row))\n except psycopg2.Error as e:\n print(\"Error: Unable to insert record in time table in row number : {}\".format(i))\n print(e)\n\n # load user table\n user_df = df[['userId', 'firstName', 'lastName', 'gender', 'level']]\n\n # insert user records\n for i, row in user_df.iterrows():\n try:\n cur.execute(user_table_insert, row)\n except psycopg2.Error as e:\n print(\"Error: Unable to insert record in users table in row number : {}\".format(i))\n print(e)\n\n # insert songplay records\n for index, row in df.iterrows():\n \n # get songid and artistid from song and artist tables\n try:\n cur.execute(song_select, (row.song, row.artist, row.length))\n except psycopg2.Error as e:\n print(\"Error: Unable to execute song_select query to join songs and artists table in row number : {}\".format(index))\n print(e)\n results = cur.fetchone()\n \n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n # insert songplay record\n #timestamp, user ID, level, song ID, artist ID, session ID, location, and user agent\n songplay_data = (row.ts, row.userId, row.level, songid, artistid, row.sessionId, row.location, row.userAgent)\n try:\n cur.execute(songplay_table_insert, songplay_data)\n except psycopg2.Error as e:\n print(\"Error: Unable to insert record in songplays table in row number : {}\".format(i))\n print(e)", "def main():\n conn = psycopg2.connect(\"host=127.0.0.1 dbname=sparkifydb user=student password=student\")\n cur = conn.cursor()\n\n process_data(cur, conn, filepath='data/song_data', func=process_song_file)\n print('song file processing is complete')\n process_data(cur, conn, filepath='data/log_data', func=process_log_file)\n print('log file processing is complete')\n conn.close()", "def process_data() -> None:\n yesterday_date = std.get_yesterday()\n s3_working_bucket = \"stwit-working-bucket\"\n s3_processed_bucket = \"stwit-processed-bucket\"\n\n df_tweets = process_tweets(s3_working_bucket, yesterday_date)\n df_users = process_users(s3_working_bucket, yesterday_date)\n df_stocks = process_stocks(s3_working_bucket, yesterday_date)\n\n logging.debug(\"Calling filter_tweets function.\")\n df_tweets, df_users = stp.filter_tweets(df_tweets, df_users)\n\n logging.debug(\"Calling remove_duplicate_tweets function.\")\n df_tweets, df_users = stp.remove_duplicate_tweets(df_tweets, df_users)\n\n logging.debug(\"Calling export_twitter_csv function with df_tweets data.\")\n stdm.export_csv(df_tweets, s3_processed_bucket, yesterday_date, \"twitter\", \"tweets\")\n\n logging.debug(\"Calling export_twitter_csv function with df_users data.\")\n stdm.export_csv(df_users, s3_processed_bucket, yesterday_date, \"twitter\", \"users\")\n\n logging.debug(\"Calling export_stocks_csv function.\")\n stdm.export_csv(df_stocks, s3_processed_bucket, yesterday_date, \"stocks\")", "def process_data(cur, conn, filepath, func):\r\n # get all files matching extension from directory\r\n all_files = []\r\n for root, dirs, files in os.walk(filepath):\r\n files = glob.glob(os.path.join(root, '*.json'))\r\n for f in files:\r\n all_files.append(os.path.abspath(f))\r\n\r\n\r\n\r\n # iterate over files and process\r\n for datafile in all_files:\r\n func(cur, datafile) ######### de function zy procces song file bta5od l filepath w currsor\r\n conn.commit()\r\n\r\n return all_files", "def process_log_file(cur, filepath):\n \n # open log file\n \n df = pd.read_json(filepath, lines = True)\n \n # filter by NextSong action\n df = df[df['page']=='NextSong']\n # convert timestamp column to datetime\n t = pd.to_datetime(df.ts, unit='ms')\n df.ts = t\n \n # insert time data records\n time_data = [t, t.dt.hour, t.dt.day, t.dt.weekofyear,\n t.dt.month, t.dt.year, t.dt.weekday]\n \n # column_labels = ['timestamp','Hour', \n # 'Day','Month','Year''Weekday']'\n column_labels = ['timestamp','hour','day','weekofyear','month','year','weekday']\n time_df = pd.DataFrame(dict(zip(column_labels, time_data)))\n\n for i, row in time_df.iterrows():\n cur.execute(time_table_insert, list(row))\n \n # load user table\n user_df = df[['userId','firstName', \n 'lastName','gender','level']]\n\n # insert user records\n for i, row in user_df.iterrows(): \n cur.execute(user_table_insert, row)\n \n # insert songplay records\n for index, row in df.iterrows():\n \n # get songid and artistid from song and artist tables\n cur.execute(song_select, (row.song, row.artist,\n row.length))\n results = cur.fetchone()\n \n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n # insert songplay record\n songplay_data = (index, row.ts, row.userId, row.level,\n songid, artistid, row.sessionId, \n row.location, row.userAgent)\n \n \n cur.execute(songplay_table_insert, songplay_data)", "def process_log_file(cur, filepath):\r\n\r\n # open log file\r\n df = pd.read_json(filepath, lines=True)\r\n df2 = pd.read_json(filepath, lines=True)\r\n\r\n # filter by NextSong action for missing data\r\n df2 = df2[df2['page']=='NextSong']\r\n\r\n\r\n\r\n # insert missing records into Song and Artist Table\r\n for i, row in df2.iterrows():\r\n cur.execute(artist_table_insert, (row.artist + str(i), row.artist, row.location, 0, 0))\r\n for i, row in df2.iterrows():\r\n cur.execute(song_table_insert, (row.song + str(i), row.song, row.artist + str(i), 0, row.length))\r\n\r\n # filter by NextSong action\r\n df = df[df['page']=='NextSong']\r\n\r\n # convert timestamp column to datetime\r\n t = pd.to_datetime(df['ts'], unit='ms')\r\n\r\n # extract time data from timestamp\r\n time_data = {'start_time': t,'hour': pd.Series(t).dt.hour, 'day':pd.Series(t).dt.day,\r\n 'month': pd.Series(t).dt.month, 'year': pd.Series(t).dt.year,\r\n 'weekday': pd.Series(t).dt.dayofweek}\r\n #column_labels = []\r\n # insert time data records\r\n time_df = pd.DataFrame(time_data)\r\n\r\n for i, row in time_df.iterrows():\r\n cur.execute(time_table_insert, list(row))\r\n\r\n # load user table\r\n user_df = df[['userId', 'firstName','lastName','gender','level']]\r\n user_df.drop_duplicates(subset='userId',keep ='first',inplace = True)\r\n\r\n # insert user records\r\n for i, row in user_df.iterrows():\r\n cur.execute(user_table_insert, row)\r\n\r\n # insert songplay records\r\n for index, row in df.iterrows():\r\n\r\n # get songid and artistid from song and artist tables\r\n print(cur.mogrify(song_select, (row.song, row.artist, row.length)))\r\n cur.execute(song_select, (row.song, row.artist, row.length))\r\n results = cur.fetchone()\r\n\r\n if results:\r\n songid, artistid = results[0],results[1]\r\n else:\r\n songid, artistid = \"None\" + str(index), \"None\" + str(index)\r\n\r\n # insert songplay record\r\n songplay_data = (df[['ts', 'userId', 'level', 'sessionId','location','userAgent' ]])\r\n songplay_data['ts'] = pd.to_datetime(df['ts'], unit='ms')\r\n cur.execute(songplay_table_insert, (index, row.ts, row.userId, row.level, songid, artistid, row.sessionId, row.location, row.userAgent))\r\n #conn.commit()\r", "def processSong(self, songID, outputFunction=None, completeFunc=None):\n if \"/\" in songID:\n raise AssertionError(\"processSong cannot handle URLs, only youtube video ids\")\n \n videoDir = DatabaseHandler.getVideoFolder()\n infoFile = os.path.join(videoDir, songID+\".info.json\")\n \n exit_code, text = self.downloadSong(songID, outputFolder = videoDir, outputFunction = outputFunction)\n if exit_code != 0: # If not successful, don't continue\n if callable(completeFunc):\n completeFunc(songID, False)\n return False\n \n with open(infoFile) as file:\n DatabaseHandler.addSongFromDict(json.load(file))\n os.remove(infoFile)\n \n DatabaseHandler.setDownloaded(songID)\n if callable(completeFunc):\n completeFunc(songID, True)\n return True", "def process_log_data(spark, input_data, output_data):\n\n # get filepath to log data file\n log_data = os.path.join( input_data, \"log-data/*/*/*.json\")\n\n # read log data file\n df = spark.read.json(log_data)\n\n # filter by actions for song plays\n df = df.filter(df.page == \"NextSong\")\n \n # USERS TABLE\n # extract columns for users table\n users_table = df.select(\"userId\",\"firstName\",\"lastName\",\"gender\",\"level\").dropDuplicates(['userId'])\n \n print( \"HERE users_table sample:\\n\")\n users_table.show(5)\n # write users table to parquet files\n users_table.write.parquet(os.path.join(output_data, \"users/\") , mode=\"overwrite\")\n\n # TIME TABLE\n # create timestamp column from original timestamp column\n get_start_time = udf(lambda x: datetime.fromtimestamp(x / 1000.0).strftime('%Y-%m-%d %H:%M:%S'))\n get_hour = udf(lambda x: datetime.fromtimestamp(x / 1000.0).hour)\n get_day = udf(lambda x: datetime.fromtimestamp(x / 1000.0).day)\n get_week = udf(lambda x: datetime.fromtimestamp(x / 1000.0).strftime('%W'))\n get_month = udf(lambda x: datetime.fromtimestamp(x / 1000.0).month)\n get_year = udf(lambda x: datetime.fromtimestamp(x / 1000.0).year)\n get_weekday = udf(lambda x: datetime.fromtimestamp(x / 1000.0).strftime('%A'))\n\n df = df.withColumn('start_time', get_start_time(df['ts']))\n df = df.withColumn('hour', get_hour(df['ts']))\n df = df.withColumn('day', get_day(df['ts']))\n df = df.withColumn('week', get_week(df['ts']))\n df = df.withColumn('month', get_month(df['ts']))\n df = df.withColumn('year', get_year(df['ts']))\n df = df.withColumn('week_day', get_weekday(df['ts'])).dropDuplicates(['start_time'])\n\n df.createOrReplaceTempView(\"time_table\")\n \n time_columns = ['start_time', 'hour', 'day', 'week', 'month', 'year', 'week_day']\n\n # extract columns to create time table\n time_table = spark.sql(\"\"\"\n SELECT start_time, hour, day, week, month, year, week_day\n FROM time_table\n \"\"\").toDF(*time_columns)\n \n print( \"HERE time_table sample:\\n\")\n time_table.show(5)\n # write time table to parquet files partitioned by year and month\n time_table.write.parquet(os.path.join(output_data, \"time_table/\"), mode='overwrite', partitionBy=[\"year\",\"month\"])\n\n # SONGPLAYS TABLE\n # add monotonically increasing id column\n df = df.withColumn('songplay_id', functions.monotonically_increasing_id())\n df.createOrReplaceTempView(\"songplays_table\")\n\n # song df\n song_data = os.path.join( input_data, \"song_data/*/*/*/*.json\")\n song_df = spark.read.json(song_data).dropDuplicates()\n song_df.createOrReplaceTempView(\"songs_table\")\n\n song_columns = ['songplay_id', 'start_time', 'userId', 'level', 'sessionId', 'location', 'userAgent', 'year', 'month',\n 'length', 'song_id', 'artist_id', 'title', 'artist_name', 'duration']\n\n # extract columns to create time table\n songplays_table = spark.sql(\n \"\"\"\n SELECT sp.songplay_id, sp.start_time, sp.userId, sp.level, sp.sessionId, sp.location, sp.userAgent, sp.year, \n sp.month, sp.length, s.song_id, s.artist_id, s.title, s.artist_name, s.duration\n FROM songplays_table AS sp \n JOIN songs_table AS s ON sp.song = s.title AND sp.artist = s.artist_name AND sp.length = s.duration\n \"\"\").toDF(*song_columns)\n \n print( \"HERE songplays_table sample:\\n\")\n songplays_table.show(5)\n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.parquet(os.path.join(output_data, \"songplays/\"), mode=\"overwrite\", partitionBy=[\"year\",\"month\"])", "def __insert_song_data(cur, df):\n song_data = (\n df.song_id.values[0],\n df.title.values[0],\n df.artist_id.values[0],\n (df.year.values[0]).item(),\n (df.duration.values[0]).item()\n )\n cur.execute(song_table_insert, song_data)", "def preprocess_files(file_path):\n # checking your current working directory\n cur_dir = os.getcwd()\n\n # Get your current folder and sub folder event data\n data_dir = os.path.join(cur_dir, 'event_data')\n\n # Create a for loop to create a list of files and collect each\n # file_path\n file_path_list = []\n for root, dirs, files in os.walk(data_dir):\n # join the file path and roots with the subdirectories using\n # glob\n file_path_list = glob.glob(os.path.join(root, '*'))\n\n full_data_rows_list = []\n\n # for every file_path in the file path list collect records\n for f in file_path_list:\n\n # reading csv file\n with open(f, 'r', encoding='utf8', newline='') as csvfile:\n\n # creating a csv reader object\n csvreader = csv.reader(csvfile)\n next(csvreader)\n\n # extracting each data row one by one and append it\n for line in csvreader:\n full_data_rows_list.append(line)\n\n csv.register_dialect('myDialect', quoting=csv.QUOTE_ALL,\n skipinitialspace=True)\n\n # create one file with all the records\n with open(file_path, 'w', encoding='utf8',\n newline='') as f:\n writer = csv.writer(f, dialect='myDialect')\n writer.writerow(\n ['artist', 'firstName', 'gender', 'itemInSession',\n 'lastName', 'length', 'level', 'location', 'sessionId',\n 'song', 'userId'])\n for row in full_data_rows_list:\n if row[0] == '':\n continue\n writer.writerow((row[0], row[2], row[3], row[4], row[5],\n row[6], row[7], row[8], row[12], row[13],\n row[16]))", "def process_log_file(cur, filepath):\n\n # open log file\n df = pd.read_json(filepath, lines=True)\n\n # filter by NextSong action\n df = df[df['page'] == 'NextSong']\n\n # convert timestamp column to datetime\n t = pd.to_datetime(df['ts'], unit='ms')\n\n # insert time data records\n time_data = (t, t.dt.hour, t.dt.day,\n t.dt.month, t.dt.year, t.dt.day_name())\n column_labels = ('start_time', 'hour', 'day', 'month', 'year', 'weekday')\n time_df = pd.DataFrame(dict(zip(column_labels, time_data)))\n\n # bulk insert time_df into Postgres\n # create a csv like object\n output = io.StringIO()\n time_df.to_csv(output, sep='\\t', index=False, header=False)\n # move the pointer to start of the file\n output.seek(0)\n # creating a temp table to handle conflict due to duplicate insert\n # ref: https://stackoverflow.com/questions/48019381/how-postgresql-copy-to-stdin-with-csv-do-on-conflic-do-update\n cur.execute(time_tmp_table)\n # copy data from csv to temp table\n cur.copy_from(output, 'tmp_table')\n # merge temp table with main table\n cur.execute(time_table_bulk_insert)\n\n # load user table\n user_df = df[['userId', 'firstName', 'lastName', 'gender', 'level']]\n\n # insert user records\n for i, row in user_df.iterrows():\n cur.execute(user_table_insert, row)\n\n # insert songplay records\n # create a csv like object\n output = io.StringIO()\n for index, row in df.iterrows():\n\n # get songid and artistid from song and artist tables\n cur.execute(song_select, (row.song, row.artist, row.length))\n results = cur.fetchone()\n\n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n # insert songplay record\n songplay_data = (t[index], row.userId, row.level, songid,\n artistid, row.sessionId, row.location, row.userAgent)\n # write to csv like object\n output.write('\\t'.join(map(clean_csv_value, songplay_data)) + '\\n')\n # move the pointer to start of the csv like object\n output.seek(0)\n # columns to insert (songplay_id is a serial insert)\n columns = ['start_time', 'user_id', 'level', 'song_id',\n 'artist_id', 'session_id', 'location', 'user_agent']\n # copy data to songplays table\n cur.copy_from(output, 'songplays', columns=columns)", "def preprocess(self):\r\n file_name = os.path.join(self.raw_path, \"amazon-amazon-instant-video.json.gz\")\r\n print(f\"file_name: {file_name}\")\r\n if not os.path.exists(file_name):\r\n self.download()\r\n\r\n # parse json data\r\n data = self.get_data_frame_from_gzip_file(file_name)\r\n\r\n # rename columns\r\n data = data.rename(\r\n columns={\r\n \"reviewerID\": DEFAULT_USER_COL,\r\n \"asin\": DEFAULT_ITEM_COL,\r\n \"overall\": DEFAULT_RATING_COL,\r\n \"unixReviewTime\": DEFAULT_TIMESTAMP_COL,\r\n }\r\n )\r\n\r\n # select necessary columns\r\n data = pd.DataFrame(\r\n data,\r\n columns=[\r\n DEFAULT_USER_COL,\r\n DEFAULT_ITEM_COL,\r\n DEFAULT_RATING_COL,\r\n DEFAULT_TIMESTAMP_COL,\r\n ],\r\n )\r\n\r\n self.save_dataframe_as_npz(\r\n data,\r\n os.path.join(self.processed_path, f\"{self.dataset_name}_interaction.npz\"),\r\n )", "def create_song_data_table():\n connection = connection_to_db()\n cursor = connection.cursor()\n\n cursor.execute(\n \"CREATE TABLE IF NOT EXISTS song_data \"\n \"(id serial PRIMARY KEY, \"\n \"user_id INTEGER NOT NULL, \"\n \"author_song VARCHAR DEFAULT 'empty', \"\n \"title_song VARCHAR DEFAULT 'empty');\"\n )\n\n connection.commit()", "def process_city_data(spark, input_data, output_data):\n\n # get filepath to city data file\n city_data = os.path.join(input_data, 'city_data/*.csv')\n\n # read city data files\n df = spark.read.csv(city_data, header=True, sep=';')\n df = df.drop_duplicates(subset=['City', 'State'])\n print('city_count = ', df.count())\n\n # extract columns to create city table\n city_table = df.select(\n F.monotonically_increasing_id().alias('city_id'),\n F.col('City').alias('city_name'),\n F.col('State Code').alias('state'),\n F.col('State Code').alias('state_code'),\n F.col('Total Population').alias('total_population')\n )\n\n city_table.write.parquet(os.path.join(output_data, 'cities'), 'overwrite')", "def process_log_file(cur, filepath):\n df = pd.read_json(filepath, lines=True)\n df = df.loc[df.page == 'NextSong', :]\n\n # insert time data records\n df['ts'] = pd.to_datetime(df.ts, unit='ms')\n t = df['ts']\n time_data = {\n 'start_time': t,\n 'hour': t.dt.hour,\n 'day': t.dt.day,\n 'week': t.dt.week,\n 'month': t.dt.month,\n 'year': t.dt.year,\n 'weekday': t.dt.weekday\n }\n time_df = pd.DataFrame(data=time_data)\n for i, row in time_df.iterrows():\n cur.execute(time_table_insert, list(row))\n\n # insert user records\n user_df = df.loc[:, ['userId', 'firstName', 'lastName', 'gender', 'level']]\n for i, row in user_df.iterrows():\n cur.execute(user_table_insert, row)\n\n # insert songplay records\n for index, row in df.iterrows():\n cur.execute(song_select, (row.song, row.artist, row.length))\n results = cur.fetchone()\n\n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n songplay_data = {\n 'start_time': row.ts,\n 'user_id': row.userId,\n 'level': row.level,\n 'song_id': songid,\n 'artist_id': artistid,\n 'session_id': row.sessionId,\n 'location': row.location,\n 'user_agent': row.userAgent\n }\n cur.execute(songplay_table_insert, list(songplay_data.values()))", "def process_data(fileprefix=DEFAULT_FILE_PREFIX):\n\n # TODO wow this is uggo code\n FILE_PREFIX = fileprefix\n\n MAX_SAMP=1500\n\n # Get data from file\n tf_record_file_names = [join(FILE_PREFIX, f) for f in listdir(FILE_PREFIX) if isfile(join(FILE_PREFIX, f)) and 'tfrecord' in f]\n assert len(tf_record_file_names) > 0\n\n dataset_it = iter(tf.data.TFRecordDataset(tf_record_file_names, compression_type='').take(MAX_SAMP))\n\n # Run the computation !\n with tqdm_joblib(tqdm(desc=\"My calculation\", total=MAX_SAMP)) as progress_bar:\n results = Parallel(n_jobs=-1)(\n delayed(_process_single)(data) for data in dataset_it\n )", "def process_log_file(cursor, filepath):\n\n def get_timestamp_data(df):\n # convert timestamp column to datetime\n timestamp = pd.to_datetime(df['ts'], unit='ms')\n\n return (df['ts'].values,\n timestamp.dt.hour.values,\n timestamp.dt.day.values,\n timestamp.dt.week.values,\n timestamp.dt.month.values,\n timestamp.dt.year.values,\n timestamp.dt.weekday.values)\n\n # open log file\n df = pd.read_json(filepath, lines=True)\n\n # filter by NextSong action\n df = df[df['page'] == 'NextSong']\n\n # insert time data records\n time_data = get_timestamp_data(df)\n column_labels = ('timestamp', 'hour', 'day', 'week', 'month', 'year', 'weekday')\n time_df = pd.DataFrame(data=dict(zip(column_labels, time_data)))\n\n for i, row in time_df.iterrows():\n cursor.execute(time_table_insert, list(row))\n\n # load user table\n user_columns = ['userId', 'firstName', 'lastName', 'gender', 'level']\n user_df = df[user_columns]\n\n # insert user records\n for i, row in user_df.iterrows():\n cursor.execute(user_table_insert, row)\n\n # insert songplay records\n for index, row in df.iterrows():\n\n # get song_id and artist_id from song and artist tables\n cursor.execute(song_select, (row.song, row.artist, row.length))\n results = cursor.fetchone()\n\n if results:\n song_id, artist_id = results\n else:\n song_id, artist_id = None, None\n\n # insert songplay record\n songplay_data = (\n row['ts'], row['userId'], row['level'], song_id, artist_id, row['sessionId'], row['location'],\n row['userAgent'])\n cursor.execute(songplay_table_insert, songplay_data)", "def _create_input_data(self):\n SCHEMA = parse_table_schema_from_json(\n '{\"fields\": [{\"name\": \"data\", \"type\": \"BYTES\"}]}')\n\n def format_record(record):\n # Since Synthetic Source returns data as a dictionary, we should skip one\n # of the part\n import base64\n return {'data': base64.b64encode(record[1])}\n\n with TestPipeline() as p:\n ( # pylint: disable=expression-not-assigned\n p\n | 'Produce rows' >> Read(\n SyntheticSource(self.parse_synthetic_source_options()))\n | 'Format' >> Map(format_record)\n | 'Write to BigQuery' >> WriteToBigQuery(\n dataset=self.input_dataset,\n table=self.input_table,\n schema=SCHEMA,\n create_disposition=BigQueryDisposition.CREATE_IF_NEEDED,\n write_disposition=BigQueryDisposition.WRITE_EMPTY))", "def main():\n conn = psycopg2.connect('host=127.0.0.1 dbname=sparkifydb user=student password=student')\n cur = conn.cursor()\n\n process_data(cur, conn, filepath='data/song_data', function=process_song_file)\n process_data(cur, conn, filepath='data/log_data', function=process_log_file)\n\n conn.close()", "def __insert_songplay_data(cur, df):\n # for each songplay event, described by a row in the dataframe\n for index, row in df.iterrows():\n \n # get songid and artistid from song and artist tables\n cur.execute(song_select, (row.song, row.artist, row.length))\n results = cur.fetchone()\n\n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n # insert songplay record\n songplay_data = (pd.to_datetime(row.ts, unit='ms'), row.userId, row.level, songid, artistid, row.sessionId, row.location, row.userAgent)\n cur.execute(songplay_table_insert, songplay_data)", "def extract_data():\n args = arguments()\n\n if args.list is not None:\n songs = utility.get_songs(args.list)\n logger.debug(str(songs))\n if len(songs) != 0:\n logger.info(\"Downloading songs in {}\".format(args.list))\n for song_name in songs:\n logger.debug(song_name)\n args.SONG_NAME = [song_name]\n main(args)\n else:\n logger.info(\"{}: is empty\".format(args.list))\n elif args.SONG_NAME and yt.is_playlist(args.SONG_NAME[0]):\n logger.info(\"Youtube playlist passed...extracting!\")\n songs, playlist_name = yt.get_playlist(\n args.SONG_NAME[0],\n args.proxy,\n args.pl_start,\n args.pl_end,\n args.pl_items\n )\n\n # Check if data is actually returned\n if songs is None:\n logger.error(\"Couldn't extract playlist data!\")\n\n logger.info(\"Playlist: {}\".format(playlist_name))\n logger.info(\"{} songs found\".format(len(songs)))\n\n # Iterate and work on the data.\n url_base = \"https://www.youtube.com/watch?v=\"\n for song in songs:\n args.url = url_base + song[\"url\"]\n\n # Try to pass the title as well, if it's not there\n # that will be handled by ytmdl\n try:\n args.SONG_NAME = [stringutils.remove_yt_words(song[\"title\"])]\n except KeyError:\n pass\n\n main(args)\n else:\n main(args)", "def lambda_handler(event, context):\r\n print(\"Function triggered\")\r\n if 'local' == environ.get('APP_STAGE'):\r\n dynamodb = boto3.resource('dynamodb', endpoint_url='http://localhost:8000')\r\n table = dynamodb.Table(\"audiobooksDB\")\r\n else:\r\n dynamodb = boto3.resource('dynamodb')\r\n table = dynamodb.Table(environ[\"TABLE_NAME\"])\r\n s3 = boto3.client('s3')\r\n \r\n s3FileName = event['Records'][0]['s3']['object']['key'].replace(\"+\", \" \")\r\n bucketName = event['Records'][0]['s3']['bucket']['name']\r\n # Download file from the S3 bucket\r\n try:\r\n book = s3.get_object(Bucket=bucketName, Key=s3FileName)\r\n print(\"Loading file from S3 bucket\")\r\n bookContent = book[\"Body\"].read().decode(\"utf-8\", errors=\"ignore\").split(\"------ END METADATA --------\")\r\n metadata = json.loads(bookContent[0])\r\n bookContent = bookContent[1]\r\n # Polly accepts 100,000 chars at a time. We make chunks of 99990 because we put the part 1 maker in\r\n bookContent = [bookContent[i:i+99990] for i in range(0, len(bookContent), 99990)]\r\n except Exception as e:\r\n print(\"Error while downloading file \" + s3FileName + \"from the S3 bucket \" + bucketName)\r\n raise\r\n # Add part marker to book\r\n if len(bookContent) > 1:\r\n count = 0\r\n for chunk in bookContent:\r\n chunk += \"Part \" + str(count)\r\n hasShortPart = False\r\n audioURLs = []\r\n pollyClient = boto3.client('polly')\r\n for chunk in bookContent:\r\n try:\r\n chunk = convert_text_to_ssml(chunk)\r\n print(\"Asking Polly to record the current chunk\")\r\n response = pollyClient.start_speech_synthesis_task(\r\n Engine='standard',\r\n LanguageCode='en-GB',\r\n OutputFormat='mp3',\r\n OutputS3BucketName=environ['AUDIO_S3_BUCKET'],\r\n Text=chunk,\r\n TextType='ssml',\r\n VoiceId='Brian',\r\n SnsTopicArn=environ[\"SNS_TOPIC\"],\r\n )\r\n\r\n audioURLs.append(response[\"SynthesisTask\"][\"OutputUri\"].split(\"amazonaws.com/\")[-1])\r\n if len(chunk) <= 2000:\r\n hasShortPart = True\r\n print(response)\r\n print(\"Polly was successfully asked to to record the current chunk\")\r\n except Exception as e:\r\n print(\"Error parsing chunk or requesting Polly to say it\")\r\n raise\r\n try:\r\n randomString = ''.join([random.choice(string.ascii_letters \r\n + string.digits) for n in range(32)]) \r\n audiobook = {\r\n \"id\": randomString,\r\n \"bookName\": metadata[\"bookName\"],\r\n \"imageURL\": metadata[\"imageURL\"],\r\n \"authorName\":metadata[\"authorName\"],\r\n \"genres\": metadata[\"genres\"],\r\n \"audioURLs\": audioURLs,\r\n \"description\": metadata[\"description\"],\r\n \"hidden\": False,\r\n \"hasShortPart\": hasShortPart,\r\n \"addedAt\": Decimal(datetime.now().timestamp())\r\n }\r\n response = table.put_item(\r\n Item=audiobook\r\n )\r\n except Exception as e:\r\n print(\"Exception inserting into database\")\r\n print(audiobook)\r\n print(response)\r\n raise\r\n return {\r\n \"statusCode\": 200,\r\n \"body\": json.dumps({\r\n \"message\": audioURLs\r\n }),\r\n }", "def process_log_file(cur, filepath):\n # open log file\n data_frame = pd.read_json(filepath, lines=True)\n\n # filter by NextSong action\n data_frame = data_frame[data_frame.page == 'NextSong']\n\n # convert timestamp column to datetime\n time_value = pd.to_datetime(data_frame['ts'])\n\n # insert time data records\n time_data = (time_value, time_value.dt.year, time_value.dt.month,\n time_value.dt.isocalendar().week, time_value.dt.dayofweek,\n time_value.dt.day, time_value.dt.hour)\n\n column_labels = ('timestamp', 'year', 'month', 'week', 'weekday', 'day', 'hour')\n\n time_df = pd.DataFrame.from_dict(dict(zip(column_labels, time_data)))\n\n for i, row in time_df.iterrows():\n cur.execute(time_table_insert, list(row))\n\n # load user table\n user_df = data_frame[['userId', 'firstName', 'lastName', 'gender', 'level']]\n\n # insert user records\n for i, row in user_df.iterrows():\n cur.execute(user_table_insert, row)\n\n # insert songplay records\n for index, row in data_frame.iterrows():\n\n # get songid and artistid from song and artist tables\n cur.execute(song_select, (row.song, row.artist, row.length))\n results = cur.fetchone()\n\n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n # insert songplay record\n songplay_data = row[['ts', 'userId', 'level', 'sessionId', 'location', 'userAgent']]. \\\n append(pd.Series([songid,\n artistid],\n index=[\n 'songid',\n 'artistid']))\n songplay_data['ts'] = pd.to_datetime(songplay_data['ts'])\n\n cur.execute(songplay_table_insert, songplay_data)", "def stream_to_s3(task_name,data,header,timestamp_suffix,**kwargs):\n if kwargs:\n file = '_'.join([task_name,timestamp_suffix,str(kwargs['batch_id'])])+'.csv'\n else:\n file = '_'.join([task_name,timestamp_suffix])+'.csv'\n\n f = io.StringIO()\n with smart_open.open('s3://{bucket_name}/{task_name}/{file}'.format(bucket_name=bucket_name,task_name=task_name,file=file), 'w') as fout:\n logger.info('Streaming file contents to S3')\n _writer = csv.writer(fout)\n _writer.writerow(header)\n fout.write(f.getvalue())\n \n for row in data:\n f.seek(0)\n f.truncate(0)\n _writer.writerow(row)\n fout.write(f.getvalue())\n \n f.close()\n logger.info('Complete')\n\n return file", "def main():\n\n conn = psycopg2.connect(\n \"host=127.0.0.1 dbname=sparkifydb user=student password=student\")\n cur = conn.cursor()\n\n process_data(cur, conn, filepath='data/song_data', func=process_song_file)\n process_data(cur, conn, filepath='data/log_data', func=process_log_file)\n\n conn.close()", "def main():\n conn = psycopg2.connect(\"host=127.0.0.1 dbname=hallo user=hallo password=hallo\")\n cur = conn.cursor()\n\n process_data(cur, conn, filepath='data/song_data', func=process_song_file)\n process_data(cur, conn, filepath='data/log_data', func=process_log_file)\n\n conn.close()", "def process_log_file(cur, filepath: str) -> None:\n # open log file\n df = pd.read_json(filepath, lines=True)\n\n # filter by NextSong action\n df = df[df.page == \"NextSong\"].copy()\n\n # convert timestamp column to datetime\n df[\"ts\"] = pd.to_datetime(df[\"ts\"], unit=\"ms\")\n\n # insert time data records\n column_labels = [\"hour\", \"day\", \"week\", \"month\", \"year\", \"weekday\"]\n\n for column_label in column_labels:\n if column_label == \"week\":\n # FutureWarning: Series.dt.weekofyear and Series.dt.week have been\n # deprecated. Please use Series.dt.isocalendar().week instead.\n df[column_label] = df[\"ts\"].dt.isocalendar().week\n else:\n df[column_label] = getattr(df[\"ts\"].dt, column_label)\n\n time_df = pd.DataFrame()\n time_df[\"start_time\"] = df[\"ts\"]\n time_df = pd.concat([time_df, df[column_labels]], axis=1)\n time_df.head()\n\n for i, row in time_df.iterrows():\n cur.execute(sql_queries.time_table_insert, list(row))\n\n # load user table\n user_df = df[[\"userId\", \"firstName\", \"lastName\", \"gender\", \"level\"]]\n\n # insert user records\n for i, row in user_df.iterrows():\n cur.execute(sql_queries.user_table_insert, row)\n\n # insert songplay records\n for _, row in df.iterrows():\n\n # get songid and artistid from song and artist tables\n cur.execute(sql_queries.song_select, (row.song, row.artist, row.length))\n results = cur.fetchone()\n\n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n # insert songplay record\n songplay_data = (\n row.ts,\n row.userId,\n row.level,\n songid,\n artistid,\n row.sessionId,\n row.location,\n row.userAgent,\n )\n cur.execute(sql_queries.songplay_table_insert, songplay_data)", "def process_data(dataset='school'):\n\n assert isinstance(dataset, str)\n assert dataset == 'school' or dataset == 'industry'\n\n if dataset == 'school':\n # get the directory where all the raw files live\n folder_name = os.path.join('raw_data')\n\n # create a connection to a database where all our processed data live\n # for details please see `SQLite.py`\n # db = SQLite(os.path.join('processed_data','database.db'))\n db = SQLite(os.path.join('processed_data','school.db'))\n else:\n folder_name = os.path.join('industry_data')\n db = SQLite(os.path.join('processed_data','industry.db'))\n\n\n # calling the above generator, we work with the every raw data file in the folder\n for i in get_raw_data_filenames_in_txt_format(folder_name):\n\n # calling the word_freq function in another file.\n # this function gives back two dictionaries including\n # (1) single words and their frequencies\n # (2) bigram phrases and their frequencies\n # for more detail please see `word_freq.py`\n logger.info(f'Currently working on: {i}')\n # print(\"[process data] Currently working on :\", i)\n dict_single, dict_bigram = word_freq(\n os.path.join(folder_name, i))\n\n # passing the two dictionaries into our database\n table_name = i.split('.txt')[0]\n table_name_single = table_name + '_single'\n table_name_bigram = table_name + '_bigram'\n\n # creating corresponding table and insert the dictionary into the table\n # err is an error code for debugging purposes, for details, please see\n # `SQLite.py`\n db.create_table(table_name_single)\n db.insert_dict(table_name_single, dict_single)\n\n db.create_table(table_name_bigram)\n db.insert_dict(table_name_bigram, dict_bigram)", "def process_data(data):\n # Table Name from Json file\n table_name = data['table_name']\n\n # No of Column\n column_count = data['column_count']\n\n # No of Row\n row_count = data['row_count']\n\n # Table columns schema from Json file\n column_properties = data['column_properties']\n\n # Get the row row_properties\n row_properties = data['row_properties']\n return table_name, column_count, row_count, column_properties, row_properties", "def all_wav_to_mp3(self):\n for each_file, artist in self.past_songs_db_data:\n self.convert_wav_to_mp3(each_file)" ]
[ "0.82500315", "0.82351744", "0.82207996", "0.82134485", "0.82060736", "0.8200832", "0.81907237", "0.81895614", "0.81846154", "0.8184577", "0.81579006", "0.8133783", "0.81304675", "0.8083736", "0.80220705", "0.8002966", "0.79769325", "0.7895738", "0.7668537", "0.6478855", "0.6466095", "0.6460363", "0.6406198", "0.6388116", "0.6316773", "0.63007665", "0.62811023", "0.62648106", "0.6235584", "0.6219051", "0.62101483", "0.62056553", "0.6194549", "0.6185481", "0.6166725", "0.6164329", "0.6074136", "0.6056915", "0.60432565", "0.60040194", "0.59753144", "0.59534454", "0.59165794", "0.591516", "0.588609", "0.58775556", "0.5869201", "0.5840461", "0.581269", "0.57944405", "0.57473457", "0.56682503", "0.56640655", "0.5662697", "0.5634863", "0.5622473", "0.5622005", "0.55993646", "0.5570152", "0.55372024", "0.5476762", "0.5388005", "0.53854746", "0.5374867", "0.5363728", "0.53356457", "0.53204083", "0.5246748", "0.52365", "0.5229799", "0.51431847", "0.51329046", "0.51318645", "0.51306856", "0.5125418", "0.51195115", "0.510017", "0.5079516", "0.50633246", "0.5063213", "0.506204", "0.50300246", "0.50276995", "0.50219506", "0.5019737", "0.5009135", "0.49910656", "0.49560532", "0.49527416", "0.49409166", "0.49262616", "0.48975143", "0.48886633", "0.4832723", "0.48190826", "0.4818552", "0.48121607", "0.4792843", "0.47709194", "0.47685155" ]
0.804698
14
The process_log_data function extracts log data in JSON file format from an AWS S3 bucket(input_data), transforms data into specified tables and then loads the tables back into a designated AWS S3 bucket(output_data) in parquet file format.
def process_log_data(spark, input_data, output_data): # get filepath to log data file log_data = os.path.join(input_data, 'log_data/*/*/*.json') # read log data file dfl = spark.read.json(log_data) # filter by actions for song plays dfl = dfl.filter(dfl.page == "NextSong") #create temporary view in order to run SQL queries dfl.createOrReplaceTempView("log_table") # extract columns for users table dim_users = spark.sql(""" SELECT DISTINCT userId AS user_id, firstName AS first_name, lastName AS last_name, gender, level FROM log_table WHERE userId IS NOT NULL """) # write users table to parquet files dim_users.write.mode('overwrite').parquet(output_data+"users") # create timestamp column from original timestamp column #get_timestamp = udf() #df = # create datetime column from original timestamp column #get_datetime = udf() #df = #Convert ts field to timestamp time_convert = spark.sql(""" SELECT to_timestamp(ts/1000) as start_times FROM log_table WHERE ts IS NOT NULL """) #create temporary view of time_table to run SQL queries time_convert.createOrReplaceTempView("time_table") # extract columns to create time table dim_time = spark.sql(""" SELECT start_times as start_time, hour(start_times) as hour, dayofmonth(start_times) as day, weekofyear(start_times) as week, month(start_times) as month, year(start_times) as year, dayofweek(start_times) as weekday FROM time_table """) # write time table to parquet files partitioned by year and month dim_time.write.mode('overwrite').partitionBy("year", "month").parquet(output_data+"time") # read in song data to use for songplays table song_df = spark.read.parquet(output_data+'songs') # extract columns from joined song and log datasets to create songplays table fact_songplays = spark.sql(""" SELECT monotonically_increasing_id() as songplay_id, to_timestamp(lt.ts/1000) as start_time, month(to_timestamp(lt.ts/1000)) as month, year(to_timestamp(lt.ts/1000)) as year, lt.userId as user_id, lt.level as level, st.song_id as song_id, st.artist_id as artist_id, lt.sessionId as session_id, lt.location as location, lt.userAgent as user_agent FROM log_table lt JOIN song_table st ON lt.song = st.title AND lt.artist = st.artist_name """) # write songplays table to parquet files partitioned by year and month fact_songplays.write.mode('overwrite').partitionBy("year", "month").parquet(output_data+"songplays")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def process_log_data(spark, input_data, output_data):\n\n # get filepath to log data file\n log_data = input_data + \"log_data/2018/11/*.json\"\n\n # read log data file\n log_data_schema = StructType([\n StructField(\"artist\", StringType(), True),\n StructField(\"auth\", StringType(), False),\n StructField(\"firstName\", StringType(), True),\n StructField(\"gender\", StringType(), True),\n StructField(\"itemInSession\", IntegerType(), False),\n StructField(\"lastName\", StringType(), True),\n StructField(\"length\", DoubleType(), True),\n StructField(\"level\", StringType(), False),\n StructField(\"location\", StringType(), True),\n StructField(\"method\", StringType(), False),\n StructField(\"page\", StringType(), False),\n StructField(\"registration\", DoubleType(), True),\n StructField(\"sessionId\", IntegerType(), False),\n StructField(\"song\", StringType(), True),\n StructField(\"status\", IntegerType(), False),\n StructField(\"ts\", DoubleType(), False),\n StructField(\"userAgent\", StringType(), True),\n StructField(\"userId\", StringType(), True)\n ])\n print(\"Start read s3 log data\") \n df = spark.read.json(log_data, schema=log_data_schema)\n \n # filter by actions for song plays\n print(\"Start filter for song plays\") \n df = df.filter(col(\"page\") == \"NextSong\")\n\n # extract columns for users table\n print(\"Start extract columns for users table\") \n users_table = df.filter((col(\"userID\") != \"\") & (col(\"userID\").isNotNull())).select(\n \"userID\", \"firstName\", \"lastName\", \"gender\", \"level\")\n \n # write users table to parquet files\n print(\"Start write users table to parquet files\") \n users_table.write.parquet(output_data + \"users_table.parquet\", mode=\"overwrite\")\n\n # create timestamp column from original timestamp column\n print(\"Start create timestamp column from original timestamp column\") \n get_timestamp = udf(\n lambda x: x/1000,\n DoubleType()\n )\n df = df.withColumn(\"start_timestamp\", get_timestamp(\"ts\")) \n \n # create datetime column from original timestamp column\n print(\"Start create datetime column from original timestamp column\")\n get_datetime = udf(\n lambda x: datetime.fromtimestamp(x / 1000).replace(microsecond=0),\n TimestampType()\n )\n df = df.withColumn(\"start_datetime\", get_datetime(\"ts\")) \n \n # extract columns to create time table\n print(\"Start extract columns to create time table\")\n time_table = df.withColumn(\n \"hour\", hour(\"start_datetime\")).withColumn(\n \"day\", dayofmonth(\"start_datetime\")).withColumn(\n \"week\", weekofyear(\"start_datetime\")).withColumn(\n \"month\", month(\"start_datetime\")).withColumn(\n \"year\", year(\"start_datetime\")).withColumn(\n \"weekday\", dayofweek(\"start_datetime\")).select(\n \"start_datetime\", \"hour\", \"day\", \"week\", \"month\", \"year\", \"weekday\").distinct()\n \n # write time table to parquet files partitioned by year and month\n print(\"Start write time table to parquet files partitioned by year and month\")\n time_table.write.parquet(output_data + \"time_table.parquet\", mode=\"overwrite\", partitionBy=[\"year\", \"month\"])\n\n # read in song data to use for songplays table\n print(\"Start read in song data to use for songplays table\")\n song_df = spark.read.parquet(output_data + \"songs_table.parquet\")\n\n # extract columns from joined song and log datasets to create songplays table \n print(\"Start extract columns from joined song and log datasets to create songplays table \")\n artists_table = spark.read.parquet(output_data + \"artists_table.parquet\")\n songs = song_df.join(artists_table, \"artist_id\", \"full\").select(\n \"song_id\", \"title\", \"artist_id\", \"name\", \"duration\")\n \n songplays_table = df.join(songs, [df.song == songs.title, df.artist == songs.name, df.length == songs.duration], \"left\")\n \n songplays_table = songplays_table.join(time_table, \"start_datetime\", \"left\").select(\n \"start_datetime\", \"userId\", \"level\", \"song_id\", \"artist_id\", \"sessionId\",\n \"location\", \"userAgent\", \"year\", \"month\").withColumn(\"songplay_id\", monotonically_increasing_id())\n \n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.parquet(output_data + \"songplays_table.parquet\", mode=\"overwrite\", partitionBy=[\"year\", \"month\"])", "def process_log_data(\n spark: SparkSession,\n input_data: str,\n output_data: str\n) -> None:\n # get filepath to log data file\n log_data = input_data + 'log-data/*'\n\n # read log data file\n df = spark.read.json(log_data)\n\n # filter by actions for song plays\n actions_df = df.filter(df.page == 'NextSong') \\\n .select('ts', 'userId', 'level', 'song', 'artist',\n 'sessionId', 'location', 'userAgent')\n\n # extract columns for users table\n users_table = df.select(\n 'userId',\n 'firstName',\n 'lastName',\n 'gender',\n 'level'\n ).dropDuplicates()\n\n users_table.createOrReplaceTempView('users')\n # write users table to parquet files\n users_table.write.parquet(\n join(output_data, 'users/users.parquet'),\n 'overwrite'\n )\n\n # create timestamp column from original timestamp column\n get_timestamp = udf(lambda x: str(int(int(x) / 1000)))\n actions_df = actions_df.withColumn(\n 'timestamp',\n get_timestamp(actions_df.ts)\n )\n\n # create datetime column from original timestamp column\n get_datetime = udf(lambda x: str(datetime.fromtimestamp(int(x) / 1000)))\n actions_df = actions_df.withColumn('datetime', get_datetime(actions_df.ts))\n\n # extract columns to create time table\n time_table = actions_df.select('datetime') \\\n .withColumn('start_time', actions_df.datetime) \\\n .withColumn('hour', hour('datetime')) \\\n .withColumn('day', dayofmonth('datetime')) \\\n .withColumn('week', weekofyear('datetime')) \\\n .withColumn('month', month('datetime')) \\\n .withColumn('year', year('datetime')) \\\n .withColumn('weekday', dayofweek('datetime')).dropDuplicates()\n\n # write time table to parquet files partitioned by year and month\n time_table.createOrReplaceTempView('time')\n time_table.write.partitionBy(\n 'year', 'month') \\\n .parquet(join(output_data, 'time/time.parquet'), 'overwrite')\n # read in song data to use for songplays table\n song_df = spark.read.json(input_data + 'song_data/*/*/*/')\n joined_df = actions_df.join(\n song_df,\n (actions_df.artist == song_df.artist_name),\n 'inner'\n )\n # extract columns from joined\n # song and log datasets to create songplays table\n songplays_table = joined_df.select(\n actions_df.datetime.alias('start_time'),\n actions_df.userId.alias('user_id'),\n actions_df.level.alias('level'),\n song_df.song_id.alias('song_id'),\n song_df.artist_id.alias('artist_id'),\n actions_df.sessionId.alias('session_id'),\n actions_df.location.alias('location'),\n actions_df.userAgent.alias('user_agent'),\n year(actions_df.datetime).alias('year'),\n month(actions_df.datetime).alias('month')) \\\n .withColumn('songplay_id', monotonically_increasing_id())\n\n songplays_table.createOrReplaceTempView('songplays')\n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.\\\n partitionBy('year', 'month').\\\n parquet(join(output_data, 'songplays/songplays.parquet'), 'overwrite')", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = input_data + \"log_data/*/*/*.json\"\n\n # read log data file\n df = df = spark.read.json(log_data)\n \n # filter by actions for song plays\n df = df = df.filter(df.page == 'NextSong')\n\n # extract columns for users table \n users_table = df.filter(df.userId != '').selectExpr(\"userId as user_id\",\n \"firstName as first_name\", \"lastName as last_name\", \"gender\", \"level\") \\\n .dropDuplicates()\n \n # output filepath to users table file\n users_table_path = output_data + \"users_table.parquet\"\n \n # write users table to parquet files\n users_table.write.mode(\"overwrite\") \\\n .parquet(users_table_path)\n\n # create timestamp column from original timestamp column\n get_timestamp = udf(lambda x: datetime.fromtimestamp(x/1000).strftime('%Y-%m-%d %H:%M:%S'))\n df = df.withColumn(\"start_time\", get_timestamp(df.ts))\n \n # create datetime column from original timestamp column\n get_datetime = udf(lambda x: datetime.fromtimestamp(x/1000).strftime('%Y-%m-%d'))\n df = df.withColumn(\"datetime\", get_datetime(df.ts))\n \n # extract columns to create time table\n time_table = df.select(\"start_time\", hour(\"start_time\").alias(\"hour\"), dayofmonth(\"datetime\").alias(\"day\"),\n weekofyear(\"datetime\").alias(\"week\"), month(\"datetime\").alias(\"month\"),\n year(\"datetime\").alias(\"year\"), dayofweek(\"datetime\").alias(\"weekday\")).dropDuplicates()\n \n # output filepath to time table\n time_table_path = output_data + \"time_table.parquet\"\n \n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy(\"year\", \"month\").mode(\"overwrite\") \\\n .parquet(time_table_path)\n \n # get filepath to song data file\n song_data = input_data + \"song_data/*/*/*/*.json\"\n\n # read in song data to use for songplays table\n song_df = spark.read.json(song_data)\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = df.join(song_df, (df.song == song_df.title) & (df.length == song_df.duration) & \n (df.artist == song_df.artist_name), how='left').dropDuplicates()\n songplays_table = songplays_table.withColumn(\"id\", monotonically_increasing_id())\n windowSpec = Window.orderBy(\"id\")\n songplays_table.withColumn(\"songplay_id\", row_number().over(windowSpec))\n songplays_table = songplays_table.selectExpr(\"songplay_id\", \"start_time\", \n \"userId as user_id\", \"level\", \"song_id\", \"artist_id\", \"sessionId as session_id\",\n \"location\", \"userAgent as user_agent\", \"year(start_time) as year\",\n \"month(start_time) as month\")\n\n # output filepath to songplays table\n songplays_table_path = output_data + \"songplays_table.parquet\"\n \n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.partitionBy(\"year\", \"month\").mode(\"overwrite\") \\\n .parquet(songplays_table_path)", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = input_data + \"log-data/*/*/*.json\"\n\n # read log data file\n df = spark.read.json(log_data)\n \n # filter by actions for song plays\n # rename the columns as per requirements\n df = df.filter(\"page='NextSong'\")\\\n .withColumnRenamed(\"userId\", \"user_id\")\\\n .withColumnRenamed(\"firstName\", \"first_name\")\\\n .withColumnRenamed(\"lastName\", \"last_name\")\\\n .withColumnRenamed(\"sessionId\", \"session_id\")\\\n .withColumnRenamed(\"userAgent\", \"user_agent\")\n\n # extract columns for users table \n users_table = df.select(\"user_id\", \"first_name\", \"last_name\", \"gender\", \"level\")\n \n # write users table to parquet files\n file_name = output_data + \"users.parquet\"\n users_table.write.parquet(file_name)\n\n # create timestamp column from original timestamp column\n get_timestamp = F.udf(lambda x: int(int(x)/1000))\n df = df.withColumn(\"timestamp\", get_timestamp(df.ts))\\\n .withColumn(\"datetime\", F.from_unixtime(\"timestamp\", \"MM-dd-yyyy HH:mm:ss\"))\\\n .withColumn(\"start_time\", F.to_timestamp(\"datetime\", \"MM-dd-yyyy HH:mm:ss\"))\\\n .withColumn(\"month\", F.month(\"start_time\"))\\\n .withColumn(\"year\", F.year(\"start_time\"))\\\n .withColumn(\"week\", F.weekofyear(\"start_time\"))\\\n .withColumn(\"day\", F.dayofmonth(\"start_time\"))\\\n .withColumn(\"weekday\", F.dayofweek(\"start_time\"))\\\n .withColumn(\"hour\", F.hour(\"start_time\"))\n \n # extract columns to create time table\n time_table = df.select(\"start_time\", \"month\", \"year\", \"week\", \"day\", \"weekday\", \"hour\")\n \n # write time table to parquet files partitioned by year and month\n file_name = output_data + \"time.parquet\"\n time_table.write.partitionBy([\"year\", \"month\"]).parquet(file_name)\n\n # read in song data to use for songplays table\n file_name = output_data + \"songs.parquet\"\n songs_df = spark.read.parquet(file_name)\n\n # Create views to perform sql query\n songs_df.createOrReplaceTempView(\"songs_data\")\n df.createOrReplaceTempView(\"logs_data\")\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = spark.sql(\"\"\"\n SELECT DISTINCT start_time, user_id, level, song_id, artist_id,\n session_id, location, user_agent, logs.year, month\n FROM logs_data as logs\n LEFT OUTER JOIN songs_data as songs\n ON logs.song = songs.title\n AND logs.length = songs.duration\n \"\"\")\n\n # Create a column songplays_id and assign it values using monotonically_increasing_id method\n songplays_table = songplays_table.withColumn(\"songplays_id\", F.monotonically_increasing_id())\n\n # write songplays table to parquet files partitioned by year and month\n file_name = output_data + \"songplays.parquet\"\n songplays_table.write.partitionBy([\"year\", \"month\"]).parquet(file_name)", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = f'{input_data}/log_data/*.json'\n\n # read log data file\n df = spark.read.json(log_data)\n \n # filter by actions for song plays\n df = df.filter(df.page =='NextSong')\n\n # extract columns for users table \n user_table = df.select([\"userId\", \"firstname\", \"lastname\", \"gender\", \"level\"]).where(df[\"userId\"].isNotNull())\n \n # write users table to parquet files\n user_data_out = f'{output_data}/user_table/user_table.parquet'\n user_table.write.mode('overwrite').parquet(user_data_out)\n\n # create timestamp column from original timestamp column\n get_timestamp = F.udf(lambda x: datetime.fromtimestamp( (x/1000.0) ), T.TimestampType()) \n df = df.withColumn(\"timestamp\", get_timestamp(df.ts))\n \n # extract columns to create time table\n time_table = df.select(['timestamp']).dropDuplicates()\n time_table = time_table.withColumn(\"hour\", hour(time_table[\"timestamp\"]))\n time_table = time_table.withColumn(\"day\", dayofyear(time_table[\"timestamp\"]))\n time_table = time_table.withColumn(\"week\", weekofyear(time_table[\"timestamp\"]))\n time_table = time_table.withColumn(\"month\", month(time_table[\"timestamp\"]))\n time_table = time_table.withColumn(\"year\", year(time_table[\"timestamp\"]))\n time_table = time_table.withColumn(\"weekday\", dayofweek(time_table[\"timestamp\"]))\n\n \n # write time table to parquet files partitioned by year and month\n time_data_out = f'{output_data}/time_table/time_table.parquet'\n time_table.write.mode('overwrite').partitionBy('year','month').parquet(time_data_out)\n\n # read in song data to use for songplays table\n song_data = f'{input_data}/song_data/*/*/*/*.json'\n sdf = spark.read.json(song_data)\n sdf.createOrReplaceTempView(\"song_df_table\")\n \n # Adding month and year column to log data read and preparing log data table\n df = df.withColumn(\"month\", month(df[\"timestamp\"]))\n df = df.withColumn(\"year\", year(df[\"timestamp\"]))\n df.createOrReplaceTempView(\"log_df_table\")\n \n # extract columns from joined song and log datasets to create songplays table \n songplays_table = spark.sql(\"\"\"\n SELECT ldf.timestamp as start_time,\n ldf.userid as user_id,\n ldf.level,\n sdf.song_id,\n sdf.artist_id,\n ldf.sessionid as session_id,\n ldf.location,\n ldf.useragent as user_agent,\n ldf.month,\n ldf.year\n FROM log_df_table ldf\n JOIN song_df_table sdf\n ON (ldf.song = sdf.title) AND (ldf.artist = sdf.artist_name) AND (ldf.length = sdf.duration)\n WHERE ldf.page = 'NextSong' and ldf.userid is not null\n \"\"\")\n \n # adding the songplay_id column\n window = Window.orderBy(F.col('start_time'))\n songplays_table = songplays_table.withColumn('songplay_id', F.row_number().over(window))\n songplays_table.select('songplay_id', 'start_time', 'user_id', 'level', 'song_id', 'artist_id', 'session_id', 'location', 'user_agent', 'month', 'year').show()\n\n # write songplays table to parquet files partitioned by year and month\n songplays_data_out = f'{output_data}/songplays_table/songplays_table.parquet'\n songplays_table.write.mode('overwrite').partitionBy('year','month').parquet(songplays_data_out)", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = input_data + \"log_data/*.json\"\n\n # read log data file\n df = spark.read.json(\"data/log_data/*.json\").dropDuplicates()\n \n # filter by NextSong actions for song plays\n df = df.filter('page = \"NextSong\"')\n\n # extract columns for users table \n artists_table = df.select(['userId', 'firstName', 'lastName', 'gender', 'level']) \n \n # write users table to parquet files\n artists_table.write.parquet(output_data + \"UserTable.parquet\")\n\n # create timestamp column from original timestamp column\n get_timestamp = udf( lambda x: datetime.fromtimestamp(x / 1000).strftime('%Y-%m-%d %H:%M:%S'))\n df = df.withColumn(\"time_stamp\", get_time_stamp(dfLog.ts))\n \n # extract columns to create time table\n time_table = dfL.select('ts', hour('time_stamp').alias('hour'), dayofmonth('time_stamp').alias('day'), weekofyear('time_stamp').alias('week')\n , month('time_stamp').alias('month'), year('time_stamp').alias('year'), date_format('time_stamp', 'EEEE').alias('weekday'))\n \n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy(\"year\", \"month\").parquet(output_data + \"SongTable.parquet\")\n\n # read in song data to use for songplays table\n song_df = songs_table\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = song_df.join(dfLog).where((artist_and_song.title == dfLog.song) & \n (artist_and_song.artist_name == dfLog.artist) & \n (artist_and_song.duration == dfLog.length)).select('ts', 'userid', 'level', \\\n 'song_id', 'artist_id','sessionid', \\\n 'location', 'useragent')\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table.join(time_table.select('ts', 'year', 'month')).where(songplays_table.ts == time_table.ts).write.partitionBy(\"year\", \"month\").parquet(output_data + \"songplaysTable.parquet\")", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = os.path.join(input_data, 'log_data', '*', '*', '*')\n\n # read log data file\n df = spark.read.json(log_data)\n \n # filter by actions for song plays\n df = df.filter(col(\"page\") == \"NextSong\")\n df.createOrReplaceTempView(\"songplays\")\n\n # extract columns for users table \n users_table = spark.sql(\"\"\"\n SELECT \n DISTINCT(userId) AS user_id, \n firstName AS first_name, \n lastName AS last_name, \n gender, \n level\n FROM songplays\n \"\"\")\n \n # write users table to parquet files\n users_table_path = os.path.join(output_data, \"users_table.parquet\")\n (users_table.\n write.\n mode(\"overwrite\").\n parquet(users_table_path))\n print(f\"Stored users table at {users_table_path}\")\n\n # create timestamp column from original timestamp column\n get_timestamp = udf(lambda x: pd.Timestamp(x, unit = \"ms\"), TimestampType())\n df = df.withColumn(\"timestamp\", get_timestamp(\"ts\"))\n \n # create datetime column from original timestamp column\n get_datetime = udf(lambda x: pd.Timestamp(x, unit = \"ms\"), TimestampType())\n df = df.withColumn(\"datetime\", get_datetime(\"ts\"))\n df.createOrReplaceTempView(\"log_table\")\n # extract columns to create time table\n time_table = spark.sql(\"\"\"\n SELECT \n DISTINCT(timestamp) AS start_time, \n HOUR(timestamp) AS hour,\n day(timestamp) AS day,\n weekofyear(timestamp) AS week,\n month(timestamp) AS month,\n year(timestamp) AS year,\n dayofweek(timestamp) AS weekday\n FROM log_table\n \"\"\")\n \n # write time table to parquet files partitioned by year and month\n time_table_path = os.path.join(output_data, \"time_table.parquet\")\n (time_table.\n write.\n mode(\"overwrite\").\n partitionBy(\"year\", \"month\").\n parquet(time_table_path))\n print(f\"Stored time table at {time_table_path}\")\n\n # read in song data to use for songplays table\n song_df_path = os.path.join(input_data, \"song_data\", \"*\", \"*\", \"*\")\n song_df = spark.read.json(song_df_path).alias(\"song_df\")\n df = df.alias(\"df\")\n \n joined_df = df.join(\n song_df, \n col('df.artist') == col('song_df.artist_name'), \n 'inner',\n )\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = joined_df.select(\n col(\"timestamp\").alias(\"start_time\"),\n col(\"userId\").alias(\"user_id\"),\n col(\"level\").alias(\"level\"),\n col(\"song_id\").alias(\"song_id\"),\n col(\"artist_id\").alias(\"artist_id\"),\n col(\"sessionId\").alias(\"session_id\"),\n col(\"location\").alias(\"location\"),\n col(\"userAgent\").alias(\"user_agent\")\n ).withColumn('songplay_id', monotonically_increasing_id())\n \n # Add year and month to enable partitioning\n songplays_table = (songplays_table.\n withColumn('year', year(songplays_table.start_time)).\n withColumn('month', month(songplays_table.start_time)))\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table_path= os.path.join(output_data, \"songplays_table.parquet\")\n (songplays_table.\n write.\n mode(\"overwrite\").\n partitionBy(\"year\", \"month\").\n parquet(songplays_table_path))\n print(f\"Stored songplays table at {songplays_table_path}\")", "def process_log_data(spark, input_data, output_data, mode=\"overwrite\"):\n # get filepath to log data file\n log_data = input_data + \"log_data/*.json\"\n # read log data file\n print(\"reading event logs from {}\".format(log_data))\n df = spark.read.json(log_data)\n\n # filter by actions for song plays\n df = df.filter(F.col('page') == 'NextSong')\n df = df.withColumn(\n 'userId', F.col('userId').cast(LongType())\n )\n df = df.withColumn(\n 'registration',\n (F.round(F.col('registration')/1000)).cast(TimestampType())\n )\n df = df.withColumn(\n 'ts',\n (F.round(F.col('ts')/1000)).cast(TimestampType())\n )\n\n # extract columns for users table\n users_table = df.selectExpr(\n 'userId AS user_id',\n 'firstName AS first_name',\n 'lastName AS last_name',\n 'gender AS gender') \\\n .dropDuplicates(['user_id'])\n\n # write users table to parquet files\n users_path = output_data + 'star_schema/user_table'\n print(\"Writing Users Table to {}\".format(users_path))\n users_table.write \\\n .mode(mode) \\\n .partitionBy('user_id') \\\n .parquet(users_path)\n\n # extract columns to create time table\n time_table = df.selectExpr('ts AS start_time') \\\n .dropDuplicates() \\\n .orderBy('start_time', ascending=True) \\\n .withColumn('hour', F.hour('start_time')) \\\n .withColumn('day', F.dayofmonth('start_time')) \\\n .withColumn('week', F.weekofyear('start_time')) \\\n .withColumn('month', F.month('start_time')) \\\n .withColumn('year', F.year('start_time')) \\\n .withColumn('weekday', F.dayofweek('start_time'))\n\n # write time table to parquet files partitioned by year and month\n times_path = output_data + 'star_schema/time_table'\n print(\"Writing Time Table to {}\".format(times_path))\n time_table.write \\\n .mode(mode) \\\n .partitionBy('year', 'month', 'day') \\\n .parquet(times_path)\n\n # read in song data to use for songplays table)\n songs_path = output_data + 'star_schema/song_table'\n print(\"Reading Songs table for join query from {}\".format(songs_path))\n song_table = spark.read.parquet(songs_path)\n\n # extract columns from joined song and log datasets to create songplays table\n songplays_table = df.selectExpr(\n 'ts as start_time',\n 'userId as user_id',\n 'level',\n 'song', # join to song_id from songs_df\n # artist_id # join from songs df\n 'sessionId as session_id',\n 'location',\n 'userAgent as user_agent'\n )\n songplays_table = songplays_table \\\n .withColumn('songplay_id', F.monotonically_increasing_id()) \\\n .withColumn('songplay_year', F.year('start_time')) \\\n .withColumn('month', F.month('start_time'))\n songplays_table = songplays_table.join(\n song_table,\n song_table.title == songplays_table.song, how='left'\n ).select([\n 'songplay_id', 'start_time', 'songplay_year', 'month',\n 'user_id', 'level', 'song_id', 'artist_id',\n 'session_id', 'location', 'user_agent'\n ]).withColumnRenamed('songplay_year', 'year')\n\n # write songplays table to parquet files partitioned by year and month\n songplays_path = output_data + 'star_schema/songplay_table'\n print(\"Writing Songplays Table to {}\".format(songplays_path))\n songplays_table.write \\\n .mode(mode) \\\n .partitionBy('year', 'month') \\\n .parquet(songplays_path)", "def process_log_data(spark, input_data, output_data):\n # define json structure\n logdata_schema = StructType([\n StructField(\"artist\", StringType(), True),\n StructField(\"auth\", StringType(), True),\n StructField(\"firstName\", StringType(), True),\n StructField(\"gender\", StringType(), True),\n StructField(\"itemInSession\", LongType(), True),\n StructField(\"lastName\", StringType(), True),\n StructField(\"length\", DoubleType(), True),\n StructField(\"level\", StringType(), True),\n StructField(\"location\", StringType(), True),\n StructField(\"method\", StringType(), True),\n StructField(\"page\", StringType(), True),\n StructField(\"registration\", DoubleType(), True),\n StructField(\"sessionId\", LongType(), True),\n StructField(\"song\", StringType(), True),\n StructField(\"status\", LongType(), True),\n StructField(\"ts\", LongType(), True),\n StructField(\"userAgent\", StringType(), True),\n StructField(\"userId\", StringType(), True),\n ])\n # get filepath to log data file\n log_data = os.path.join(input_data, 'log_data/*/*/*.json')\n\n # read log data file\n df = spark.read.json(log_data, schema=logdata_schema)\n\n # filter by actions for song plays\n df = df.filter(col(\"page\") == 'NextSong')\n\n # extract columns for users table \n users_table = df.select(col(\"userId\").alias(\"user_id\"),col(\"firstName\").alias(\"first_name\"),\n col(\"lastName\").alias(\"last_name\"),\"gender\",\"level\")\n \n # write users table to parquet files\n users_table.write.parquet(output_data + \"users\")\n\n # create timestamp column from original timestamp column\n get_timestamp = udf(lambda x: datetime.fromtimestamp((x/1000)), TimestampType())\n df = df.withColumn(\"timestamp\", get_timestamp(df.ts))\n \n # create datetime column from original timestamp column\n get_datetime = udf(lambda x: datetime.fromtimestamp(x/1000).strftime('%Y-%m-%d %H:%M:%S'))\n df = df.withColumn(\"datetime\", get_datetime(df.ts))\n \n # extract columns to create time table\n time_table = df.select(col(\"timestamp\").alias(\"start_time\"),\n hour(col(\"timestamp\")).alias(\"hour\"),\n dayofmonth(col(\"timestamp\")).alias(\"day\"), \n weekofyear(col(\"timestamp\")).alias(\"week\"), \n month(col(\"timestamp\")).alias(\"month\"),\n year(col(\"timestamp\")).alias(\"year\"))\n \n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy(\"year\", \"month\").parquet(output_data + \"time\")\n\n # read in song data to use for songplays table\n song_data = os.path.join(input_data, \"song_data/A/A/A/TRAAAAK128F9318786.json\")\n song_df = spark.read.json(song_data)\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = song_df.join(df, \n song_df.artist_name==df.artist).withColumn(\"songplay_id\", \n monotonically_increasing_id()).withColumn(\"start_time\", get_datetime(df.ts)).select(\"songplay_id\",\n \"start_time\", \n col(\"userId\").alias(\"user_id\"),\n \"level\",\n \"song_id\",\n \"artist_id\",\n col(\"sessionId\").alias(\"session_id\"),\n col(\"artist_location\").alias(\"location\"),\n \"userAgent\",\n month(col(\"start_time\")).alias(\"month\"),\n year(col(\"start_time\")).alias(\"year\"))\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.partitionBy(\"year\", \"month\").parquet(output_data + \"songplays\")", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n # For working in the workspace: log_data = os.path.join(input_data, \"log-data/*.json\")\n log_data = os.path.join(input_data, \"log-data/*/*/*.json\")\n\n # read log data file\n df = spark.read.json(log_data)\n\n # filter by actions for song plays\n df = df.filter(df.page == 'NextSong')\n\n # rename the columns in df\n df = (df.withColumnRenamed('userId', 'user_id')\n .withColumnRenamed('firstName', 'first_name')\n .withColumnRenamed('lastName', 'last_name')\n .withColumnRenamed('itemInSession', 'item_in_session')\n .withColumnRenamed('sessionId', 'session_id')\n .withColumnRenamed('userAgent', 'user_agent'))\n\n # extract columns for users table\n users_table = df.select('user_id', 'first_name', 'last_name', 'gender', 'level').distinct()\n\n # write users table to parquet files\n users_table.write.parquet(output_data + 'users', mode='overwrite')\n\n # create timestamp column from original timestamp column\n # default type is string for UDFs, so we need to switch that by specifying the correct type\n get_timestamp = udf(lambda x: datetime.fromtimestamp(x/1000.0), T.TimestampType())\n df = df.withColumn('start_time', get_timestamp(df.ts))\n\n # extract columns to create time table\n time_table = df.select('start_time',\n hour(col('start_time')).alias('hour'),\n dayofmonth(col('start_time')).alias('day'),\n weekofyear(col('start_time')).alias('week'),\n month(col('start_time')).alias('month'),\n year(col('start_time')).alias('year'),\n date_format(col('start_time'), 'EEEE').alias('weekday'))\n\n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy('year', 'month').parquet(output_data + 'time', mode='overwrite')\n\n # read in song data to use for songplays table\n song_df = spark.read.parquet(output_data + 'songs/year=*/artist_id=*/*.parquet')\n artist_df = spark.read.parquet(output_data + 'artists/*.parquet')\n\n # extract columns from joined song and log datasets to create songplays table\n songplays_table = df.join(song_df, (df.song == song_df.title) & (df.length == song_df.duration)).join(artist_df, df.artist == artist_df.artist_name).join(time_table, ['start_time'])\n\n # create the songplay_id column\n songplays_table = songplays_table.withColumn('songplay_id', monotonically_increasing_id())\n\n # select the columns of interest\n songplays_table = songplays_table.select('songplay_id', 'start_time', 'user_id', 'level', 'song_id', 'artist_id', 'session_id', 'location', 'user_agent', 'year', 'month')\n\n # write songplays table to parquet files partitioned by year and month (I think this is a copy paste error because year and month aren't listed as required cols)\n songplays_table.write.partitionBy('year', 'month').parquet(output_data + 'songplays', mode='overwrite')", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = input_data + 'log_data/*/*/*.json'\n\n # read log data file\n df = df = spark.read.json(log_data)\n\n # filter by actions for song plays\n df = df[df.page == 'NextSong']\n\n # extract columns for users table\n users_table = df.select('userId', 'firstName', 'lastName', 'gender', 'level').drop_duplicates()\n\n # write users table to parquet files\n users_table.write.format(\"parquet\").mode(\"overwrite\").save(output_data + 'users/user_data.parquet')\n\n # create timestamp column from original timestamp column\n df = df.withColumn('timestamp', F.to_timestamp(df.ts / 1000))\n\n # create datetime column from original timestamp column\n df = df.withColumn('datetime', F.to_date(df.timestamp))\n\n # create uniqueId column for log_data\n df = df.withColumn('uniqueId', monotonically_increasing_id())\n\n # write log data to temp view log_data_table\n df.createOrReplaceTempView(\"log_data_table\")\n\n # extract columns to create time table\n time_table = spark.sql(\"\"\"\n select DISTINCT\n timestamp\n , datetime AS start_time\n , hour(timestamp) AS hour\n , day(timestamp) AS day\n , weekofyear(timestamp) AS week\n , month(timestamp) AS month\n , year(timestamp) AS year\n , weekday(timestamp) AS weekday\n from log_data_table\n \"\"\")\n\n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy(\"year\", \"month\").format(\"parquet\").mode(\"overwrite\").save(\n output_data + 'time/time_data.parquet')\n\n # extract columns from joined song and log datasets to create songplays table\n songplays_table = songplay_table = spark.sql(\"\"\"\n SELECT DISTINCT\n stg.uniqueId AS songplay_id,\n stg.ts AS start_time,\n month(stg.timestamp) AS month,\n year(stg.timestamp) AS year,\n stg.userId,\n stg.level,\n stg2.song_id,\n stg2.artist_id,\n stg.sessionId,\n stg.location,\n stg.userAgent\n FROM log_data_table stg\n LEFT JOIN song_data_table stg2\n ON stg.artist = stg2.artist_name\n AND stg.song = stg2.title\n AND stg.length = stg2.duration\n WHERE stg.userId IS NOT NULL\n \"\"\")\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.partitionBy(\"year\", \"month\").format(\"parquet\").mode(\"overwrite\").save(\n output_data + 'songplays/songplays_data.parquet')", "def process_log_data(spark, input_data, output_data):\n\n print(\"Read log data\")\n # read log data file\n df_log_data = spark.read.json(input_data + \"log-data/*/*/*.json\")\n\n # filter by actions for song plays\n df_log_data = df_log_data[df_log_data['page']=='NextSong']\n\n # extract columns for users table \n users_table = df_log_data[['userId', 'firstName', 'lastName', 'gender', 'level']].drop_duplicates()\n\n \n print(\"Write...\")\n # write users table to parquet files\n users_table.write.save(path=output_data + 'users_table',\n format='parquet',\n mode='overwrite'\n )\n\n df_log_data = df_log_data.withColumn('timestamp', F.from_unixtime(df_log_data['ts']/1000))\\\n .withColumn('hour', F.hour(F.col('timestamp')))\\\n .withColumn('day', F.dayofmonth(F.col('timestamp')))\\\n .withColumn('month', F.month(F.col('timestamp')))\\\n .withColumn('year', F.year(F.col('timestamp')))\\\n .withColumn('weekofyear', F.weekofyear(F.col('timestamp')))\\\n .withColumn('dayofweek', F.dayofweek(F.col('timestamp')))\n\n # extract columns to create time table\n time_table = df_log_data[['timestamp','hour','day','month','year','weekofyear','dayofweek',]].drop_duplicates()\n\n print(\"Write...\")\n # write time table to parquet files partitioned by year and month\n time_table.write.save(path=output_data + 'time_table',\n format='parquet',\n mode='overwrite',\n partitionBy=['year','month'] )\n\n # read in song data to use for songplays table\n df_song = spark.read.json(input_data + \"song_data/*/*/*/*.json\", schema=build_song_schema())\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = df_log_data.join(df_song, \n on = (df_song['title'] == df_log_data['song']) & \\\n (df_song['artist_name'] == df_log_data['artist']) & \\\n (df_song['duration'] == df_log_data['length']) \n )\n\n print(\"Write...\")\n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.save(path=output_data + 'songplays_table',\n format='parquet',\n mode='overwrite',\n partitionBy=['year','month'] )", "def process_log_data(spark, input_data, output_data):\n \n # get filepath to log data file\n log_data = input_data + 'log_data/*.json'\n\n # read log data file\n df = spark.read.json(log_data)\n \n # filter by actions for song plays\n df = df.filter(df.page == 'NextSong')\n \n # create view for users table\n df.createOrReplaceTempView(\"users\") \n\n # extract columns for users table, Adding Distinct and Not null to user_id as it is the primary key\n users_table = spark.sql(\"\"\"\n SELECT DISTINCT userId as user_id, \n firstName as first_name,\n lastName as last_name,\n gender as gender,\n level as level\n FROM users\n WHERE userId IS NOT NULL\n \"\"\")\n \n # write users table to parquet files\n users_table.write.mode('overwrite').parquet(output_data+'users_table/') \n \n # create view for time table\n df.createOrReplaceTempView(\"time\") \n \n # extract columns to create time table, Adding Not null to ts as it is the primary key, its very unlikely that ts will be \n # same for two rows as its time in miliseconds. Much easier and striaghtforward to extract day, hour, week etc. from timestamp\n # through SQL then converting timestamp in Dataframe to datetime and then performing extraction \n time_table = spark.sql(\"\"\"\n SELECT \n timestamp_data.start_time_prev as start_time,\n hour(timestamp_data.start_time_prev) as hour,\n dayofmonth(timestamp_data.start_time_prev) as day,\n weekofyear(timestamp_data.start_time_prev) as week,\n month(timestamp_data.start_time_prev) as month,\n year(timestamp_data.start_time_prev) as year,\n dayofweek(timestamp_data.start_time_prev) as weekday\n FROM\n (SELECT to_timestamp(ts/1000) as start_time_prev\n FROM time\n WHERE ts IS NOT NULL\n ) timestamp_data\n \"\"\")\n \n # write time table to parquet files partitioned by year and month\n time_table.write.mode('overwrite').partitionBy(\"year\", \"month\").parquet(output_data+'time_table/') \n \n # get filepath to song data file\n song_data = input_data + 'song_data/*/*/*/*.json'\n \n # read song data file\n df = spark.read.json(song_data)\n \n # create view for song data\n df.createOrReplaceTempView(\"song_data\")\n \n # create view for songplays table \n df.createOrReplaceTempView(\"songplays\")", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = input_data + \"log_data/*/*/*.json\"\n\n # read log data file\n df = spark.read.json(log_data)\n \n # print out the schema in tree format\n print(\"---------- Print out the schema of log dataset in tree format: ----------\")\n df.printSchema()\n \n # filter by actions for song plays\n df = df.filter(df.page == \"NextSong\")\n\n # extract columns for users table \n # users attributes: user_id, first_name, last_name, gender, level\n users_table = df.select(\"userId\", \"firstName\", \"lastName\", \"gender\", \"level\").distinct()\n \n # show first 10 rows in users table\n print(\" ---------- Show first 10 rows of users table ----------\")\n users_table.show(10)\n \n # write users table to parquet files\n output_data_users = os.path.join(output_data_users, \"users_table.parquet\")\n if path.exists(output_data_users):\n users_table.write.parquet(path = output_data_users, \n mode = \"overwrite\")\n else:\n users_table.write.parquet(path = output_data_users, \n mode = \"append\")\n \n # read parquet file and check the first 10 rows of partitioned parquet dataframes\n df_users_parquet = spark.read.parquet(\"users_table.parquet\")\n print(\" ---------- Show first 10 rows of users table parquet file ----------\")\n df_users_parquet.show(10)\n\n # create datetime column from original timestamp column\n # divide timestamp by 1000 to convert from milliseconds to seconds\n get_datetime = udf(lambda x: datetime.fromtimestamp(x / 1000), TimestampType())\n df = df.withColumn(\"start_time\", get_datetime(df.ts))\n \n # time table attributes: start_time, hour, day, week, month, year, weekday\n get_hour = udf(lambda x: x.hour) \n df = df.withColumn(\"hour\", get_hour(df.start_time)) # create hour column\n \n get_day = udf(lambda x: x.day)\n df = df.withColumn(\"day\", get_day(df.start_time)) # create day column\n \n get_week = udf(lambda x: x.isocalendar()[1])\n df = df.withColumn(\"week\", get_week(df.start_time)) # create week number column\n \n get_month = udf(lambda x: x.month)\n df = df.withColumn(\"month\", get_month(df.start_time)) # create month column\n \n get_year = udf(lambda x: x.year)\n df = df.withColumn(\"year\", get_year(df.start_time)) # create year column\n \n get_weekday = udf(lambda x: x.weekday())\n df = df.withColumn(\"weekday\", get_weekday(df.start_time)) # create weekday column\n \n # extract columns to create time table\n time_table = df.select(df.columns[-7:])\n \n # show first 10 rows of time table \n print(\" ---------- Show first 10 rows of time table ----------\")\n time_table.show(10)\n\n # write time table to parquet files partitioned by year and month\n out_path_time = os.path.join(output_data, \"time_table.parquet\")\n if path.exists(out_path_time):\n time_table.write.parquet(path = out_path_time, \n partitionBy = (\"year\", \"month\"),\n mode = \"overwrite\")\n else:\n time_table.write.parquet(path = out_path_time, \n partitionBy = (\"year\", \"month\"),\n mode = \"append\")\n\n # read parquet file and check the first 10 rows of partitioned parquet dataframes\n df_time_parquet = spark.read.parquet(\"time_table.parquet\")\n print(\" ---------- Show first 10 rows of time table parquet file ----------\")\n df_time_parquet.show(10)\n\n # read in song data to use for songplays table\n song_df = spark.read.parquet(\"songs_table.parquet\")\n \n # inner join df with song_df by song's name\n cond = [df.song == song_df.title]\n df_join = df.join(song_df, cond, \"inner\")\n \n # extract columns from joined song and log datasets to create songplays table \n # songplays attributes: songplay_id, start_time, user_id, level, song_id, \n # artist_id, session_id, location, user_agent\n songplays_table = df_join.select(\"start_time\", \"userId\", \"level\", \"song_id\", \n \"artist_id\", \"sessionId\", \"location\", \n \"userAgent\").distinct()\n \n # create songplay_id column with auto_increment\n songplays_table.withColumn(\"songplay_id\", monotonically_increasing_id())\n \n # show first 10 rows of songplays table \n print(\" ---------- Show first 10 rows of songplays table ----------\")\n songplays_table.show(10)\n \n # append year and month column into songplays_table\n songplays_table = songplays_table.withColumn(\"year\", get_year(df.start_time))\n songplays_table = songplays_table.withColumn(\"month\", get_month(df.start_time))\n \n # write songplays table to parquet files partitioned by year and month\n out_path_songplays = os.path.join(output_data, \"songplays_table.parquet\")\n if path.exists(out_path_songplays):\n songplays_table.write.parquet(path = out_path_songplays, \n partitionBy = (\"year\", \"month\"),\n mode = \"overwrite\")\n else:\n songplays_table.write.parquet(path = out_path_songplays, \n partitionBy = (\"year\", \"month\"),\n mode = \"append\")", "def process_log_data(spark, input_data, output_data):\n\n # get filepath to log data file\n input_data = input_data+\"/log-data\"\n\n \"\"\"\n log_data=[]\n for x in os.walk(input_data):\n for y in glob.glob(os.path.join(x[0], '*.json')):\n log_data.append(y)\n \"\"\"\n \n # read log data file\n df = spark.read.json(input_data)\n\n # filter by actions for song plays\n df=df.filter(col(\"page\")=='NextSong').withColumn(\"new_ts\", df[\"ts\"].cast(IntegerType())).drop(\"ts\").withColumnRenamed(\"new_ts\", \"ts\")\n df.createOrReplaceTempView(\"staging_log_data\")\n\n # extract columns for users table\n user_query = \" SELECT userid, firstName, lastName, gender, level \" \\\n \" FROM staging_log_data \"\n users_table = spark.sql(user_query)\n \n # write users table to parquet files\n users_table.write.mode('overwrite').parquet(path=output_data+\"users\")\n\n df=df.filter(df['ts'].isNotNull())\n time_table= df.select(from_unixtime(df['ts']/1000).alias('start_time'))\n time_table=time_table.select(time_table['start_time'], \\\n hour(time_table['start_time']).alias(\"hour\"), \\\n dayofmonth(time_table['start_time']).alias(\"day\"), \\\n weekofyear(time_table['start_time']).alias(\"week\"), \\\n month(time_table['start_time']).alias(\"month\"), \\\n year(time_table['start_time']).alias(\"year\"), \\\n date_format(time_table['start_time'],'E').alias(\"DOW\"))\n\n # write time table to parquet files partitioned by year and month\n time_table.write.mode('overwrite').partitionBy('year','month').parquet(path=output_data + \"time\")\n\n # read in song data to use for songplays table\n songplay_query=\" Select DISTINCT monotonically_increasing_id() as songplay_id, \" \\\n \" from_unixtime(ld.ts/1000) as start_time , \" \\\n \" ld.userid as user_id, \" \\\n \" ld.level as level,\"\\\n \" sd.song_id as song_id,\" \\\n \" sd.artist_id as artist_id,\" \\\n \" ld.sessionid as session_id, \" \\\n \" ld.location as location, \" \\\n \" ld.useragent as user_agent, \" \\\n \" t.year as year, \" \\\n \" t.month as month \" \\\n \" from staging_log_data ld, Staging_Song_Data sd, time t\" \\\n \" Where ld.artist = sd.artist_name\" \\\n \" and ld.song = sd.title \" \\\n \" and from_unixtime(ld.ts/1000) = t.start_time \" \n\n # extract columns from joined song and log datasets to create songplays table\n songplays_table = spark.sql(songplay_query)\n #songplays_table = spark.sql(songplay_query).drop_duplicates('start_time','user_id','level','song_id','artist_id','location','user_agent')\n\n songplays_table.show()\n \n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.mode('overwrite').partitionBy('year','month').parquet(path=output_data + \"songplays\")", "def process_log_data(spark, song_input_data, log_input_data, output_data):\n # get filepath to log data file\n log_data = log_input_data\n\n # read log data file\n print(\"Reading in the log data...\")\n log_df = spark.read.json(log_data)\n \n # filter by actions for song plays\n print(\"Filtering by actions for song plays...\")\n log_df = log_df.filter(log_df.page == 'NextSong')\n\n # extract columns for users table \n print(\"Extracting columns for users table...\")\n log_df.createOrReplaceTempView(\"users_table\")\n users_table = spark.sql(\"\"\"\n SELECT DISTINCT userId AS user_id,\n firstName AS first_name,\n lastName AS last_name,\n gender,\n level\n FROM users_table\n ORDER BY last_name\n \"\"\")\n \n print(\"Users table sample:\")\n users_table.show(5, truncate=False)\n \n # write users table to parquet files\n print(\"Writing users table to parquet files...\")\n users_table.write.mode(\"overwrite\").parquet(output_data + \"users_table.parquet\")\n\n # create timestamp column from original timestamp column\n print(\"Creating timestamp column...\")\n @udf(pt.TimestampType())\n def get_timestamp (ts):\n return datetime.fromtimestamp(ts / 1000.0)\n\n log_df = log_df.withColumn(\"timestamp\", get_timestamp(\"ts\"))\n log_df.show(5)\n \n # create datetime column from original timestamp column\n print(\"Creating datetime column...\")\n @udf(pt.StringType())\n def get_datetime(ts):\n return datetime.fromtimestamp(ts / 1000.0).strftime('%Y-%m-%d %H:%M:%S')\n\n log_df = log_df.withColumn(\"datetime\", get_datetime(\"ts\"))\n log_df.show(5)\n \n # extract columns to create time table\n print(\"Extracting columns for time table...\")\n log_df.createOrReplaceTempView(\"time_table\")\n time_table = spark.sql(\"\"\"\n SELECT DISTINCT datetime AS start_time,\n hour(timestamp) AS hour,\n day(timestamp) AS day,\n weekofyear(timestamp) AS week,\n month(timestamp) AS month,\n year(timestamp) AS year,\n dayofweek(timestamp) AS weekday\n FROM time_table\n ORDER BY start_time\n \"\"\")\n \n print(\"Users table sample:\")\n time_table.show(5, truncate=False)\n \n # write time table to parquet files partitioned by year and month\n print(\"Writing time table to parquet files...\")\n time_table.write.mode(\"overwrite\").partitionBy(\"year\", \"month\")\\\n .parquet(output_data + \"time_table.parquet\")\n\n # read in song data to use for songplays table\n song_df = spark.read.json(song_input_data)\n\n print(\"Joining log_data and song_data...\")\n joined_df = log_df.join(song_df, (log_df.artist == song_df.artist_name) & (log_df.song == song_df.title))\n\n # extract columns from joined song and log datasets to create songplays table \n print(\"Extracting columns from joined DF...\")\n joined_df = joined_df.withColumn(\"songplay_id\", monotonically_increasing_id())\n joined_df.createOrReplaceTempView(\"songplays_table\")\n songplays_table = spark.sql(\"\"\"\n SELECT songplay_id,\n timestamp AS start_time,\n month(timestamp) AS month,\n year(timestamp) AS year,\n userId AS user_id,\n level,\n song_id,\n artist_id,\n sessionId AS session_id,\n location,\n userAgent AS user_agent\n FROM songplays_table\n ORDER BY (user_id, session_id)\n \"\"\")\n\n print(\"Song plays table sample:\")\n songplays_table.show(5, truncate=False)\n \n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.mode(\"overwrite\").partitionBy(\"year\", \"month\") \\\n .parquet(output_data + \"songplays_table.parquet\")", "def process_log_data(spark, input_data, output_data):\n # get filepath to log data file\n log_data = \"{}/log_data/*/*/*\".format(input_data)\n\n # read log data file\n df = spark.read.format(\"json\").load(log_data)\n \n # create timestamp column from original timestamp column\n get_timestamp = udf(lambda x: datetime.fromtimestamp( (x/1000.0) ), T.TimestampType()) \n df = df.withColumn(\"listening_datetime\", get_timestamp(df.ts))\n \n # Using the same udf to convert epoch ts into timestamp already. \n #get_datetime = udf()\n #df = \n \n # filter by actions for song plays\n df = df.where(\"page == 'NextSong'\")\n \n # register dataframe df_log into the temp view\n df.createOrReplaceTempView(\"df_log\")\n # extract columns for users table \n users_table = spark.sql(\"\"\"\n select DISTINCT userId,\n firstName,\n lastName,\n gender,\n level\n FROM df_log as e\n \"\"\")\n\n \n # write users table to parquet files\n users_table.write.parquet('{}/dim_user.pq'.format(output_data))\n\n \n \n \n # extract columns to create time table\n time_table = spark.sql(\"\"\"\n SELECT DISTINCT listening_datetime as t_start_time,\n hour(listening_datetime) as t_hourofday,\n day(listening_datetime) as t_daynuminmonth,\n weekofyear(listening_datetime) as t_weeknuminyear,\n month(listening_datetime) as t_monthnuminyear,\n year(listening_datetime) as t_yearnuminyear,\n dayofweek(listening_datetime) as t_daynuminweek\n\n\n FROM df_log as s\n \"\"\")\n \n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy('t_yearnuminyear', 't_monthnuminyear').parquet('{}/dim_time.pq'.format(output_data))\n\n # read in song data to use for songplays table\n song_df = spark.read.parquet('data/analytics/dim_song.pq')\n \n # registering temp view for dim_time and df_song in order to join in the songplay \n song_df.createOrReplaceTempView(\"df_song\")\n time_table.createOrReplaceTempView(\"dim_time\")\n\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = spark.sql(\"\"\"\n SELECT DISTINCT e.listening_datetime AS t_start_time,\n userId as u_user_id,\n level as u_level,\n song_id as s_song_id,\n artist_id as a_artist_id,\n sessionId as sp_session_id,\n location as sp_location,\n userAgent as sp_user_agent,\n t.t_yearnuminyear,\n t.t_monthnuminyear\n FROM df_log as e\n JOIN df_song s\n ON (e.artist = s.artist_name)\n JOIN dim_time t\n on t.t_start_time = e.listening_datetime\n \n \"\"\")\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.partitionBy('t_yearnuminyear', 't_monthnuminyear').parquet('{}/songplays.pq'.format(output_data))", "def main(input_filepath, output_filepath, data_type):\n a = jsonCSV(input_filepath, os.path.join(output_filepath, data_type+'.csv'))\n column_names = a.get_superset_column_names()\n a.read_write(column_names)\n\n logger = logging.getLogger(__name__)\n logger.info('transform log files into csv')", "def process_log_data(spark, input_data, output_data):\n\n # get filepath to log data file\n log_data = os.path.join( input_data, \"log-data/*/*/*.json\")\n\n # read log data file\n df = spark.read.json(log_data)\n\n # filter by actions for song plays\n df = df.filter(df.page == \"NextSong\")\n \n # USERS TABLE\n # extract columns for users table\n users_table = df.select(\"userId\",\"firstName\",\"lastName\",\"gender\",\"level\").dropDuplicates(['userId'])\n \n print( \"HERE users_table sample:\\n\")\n users_table.show(5)\n # write users table to parquet files\n users_table.write.parquet(os.path.join(output_data, \"users/\") , mode=\"overwrite\")\n\n # TIME TABLE\n # create timestamp column from original timestamp column\n get_start_time = udf(lambda x: datetime.fromtimestamp(x / 1000.0).strftime('%Y-%m-%d %H:%M:%S'))\n get_hour = udf(lambda x: datetime.fromtimestamp(x / 1000.0).hour)\n get_day = udf(lambda x: datetime.fromtimestamp(x / 1000.0).day)\n get_week = udf(lambda x: datetime.fromtimestamp(x / 1000.0).strftime('%W'))\n get_month = udf(lambda x: datetime.fromtimestamp(x / 1000.0).month)\n get_year = udf(lambda x: datetime.fromtimestamp(x / 1000.0).year)\n get_weekday = udf(lambda x: datetime.fromtimestamp(x / 1000.0).strftime('%A'))\n\n df = df.withColumn('start_time', get_start_time(df['ts']))\n df = df.withColumn('hour', get_hour(df['ts']))\n df = df.withColumn('day', get_day(df['ts']))\n df = df.withColumn('week', get_week(df['ts']))\n df = df.withColumn('month', get_month(df['ts']))\n df = df.withColumn('year', get_year(df['ts']))\n df = df.withColumn('week_day', get_weekday(df['ts'])).dropDuplicates(['start_time'])\n\n df.createOrReplaceTempView(\"time_table\")\n \n time_columns = ['start_time', 'hour', 'day', 'week', 'month', 'year', 'week_day']\n\n # extract columns to create time table\n time_table = spark.sql(\"\"\"\n SELECT start_time, hour, day, week, month, year, week_day\n FROM time_table\n \"\"\").toDF(*time_columns)\n \n print( \"HERE time_table sample:\\n\")\n time_table.show(5)\n # write time table to parquet files partitioned by year and month\n time_table.write.parquet(os.path.join(output_data, \"time_table/\"), mode='overwrite', partitionBy=[\"year\",\"month\"])\n\n # SONGPLAYS TABLE\n # add monotonically increasing id column\n df = df.withColumn('songplay_id', functions.monotonically_increasing_id())\n df.createOrReplaceTempView(\"songplays_table\")\n\n # song df\n song_data = os.path.join( input_data, \"song_data/*/*/*/*.json\")\n song_df = spark.read.json(song_data).dropDuplicates()\n song_df.createOrReplaceTempView(\"songs_table\")\n\n song_columns = ['songplay_id', 'start_time', 'userId', 'level', 'sessionId', 'location', 'userAgent', 'year', 'month',\n 'length', 'song_id', 'artist_id', 'title', 'artist_name', 'duration']\n\n # extract columns to create time table\n songplays_table = spark.sql(\n \"\"\"\n SELECT sp.songplay_id, sp.start_time, sp.userId, sp.level, sp.sessionId, sp.location, sp.userAgent, sp.year, \n sp.month, sp.length, s.song_id, s.artist_id, s.title, s.artist_name, s.duration\n FROM songplays_table AS sp \n JOIN songs_table AS s ON sp.song = s.title AND sp.artist = s.artist_name AND sp.length = s.duration\n \"\"\").toDF(*song_columns)\n \n print( \"HERE songplays_table sample:\\n\")\n songplays_table.show(5)\n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.parquet(os.path.join(output_data, \"songplays/\"), mode=\"overwrite\", partitionBy=[\"year\",\"month\"])", "def process_log_data(spark, input_data, output_data):\n \n # get filepath to log data file\n log_data = os.path.join(input_data,\"log_data/*/*/*.json\")\n\n\n # read log data file\n df = spark.read.json(log_data)\n \n # filter by actions for song plays\n #df = \n\n # extract columns for users table \n users_table = df['userId', 'firstName', 'lastName', 'gender', 'level'].dropDuplicates()\n \n # write users table to parquet files\n users_table.write.parquet(os.path.join(output_data, 'users.parquet'), 'overwrite')\n print(\"--- users.parquet completed ---\")\n\n # create timestamp column from original timestamp column\n get_timestamp = udf(lambda x: datetime.fromtimestamp( (x/1000.0) ), T.TimestampType())\n # create datetime column from original timestamp column\n get_datetime = udf(lambda x: datetime.fromtimestamp(int(int(x)/1000)))\n get_hour = udf(lambda x: x.hour, T.IntegerType()) \n get_day = udf(lambda x: x.day, T.IntegerType()) \n get_week = udf(lambda x: x.isocalendar()[1], T.IntegerType()) \n get_month = udf(lambda x: x.month, T.IntegerType()) \n get_year = udf(lambda x: x.year, T.IntegerType()) \n get_weekday = udf(lambda x: x.weekday(), T.IntegerType()) \n\n df = df.withColumn(\"timestamp\", get_timestamp(df.ts))\n df = df.withColumn('start_time', get_datetime(df.ts))\n df = df.withColumn(\"hour\", get_hour(df.timestamp))\n df = df.withColumn(\"day\", get_day(df.timestamp))\n df = df.withColumn(\"week\", get_week(df.timestamp))\n df = df.withColumn(\"month\", get_month(df.timestamp))\n df = df.withColumn(\"year\", get_year(df.timestamp))\n df = df.withColumn(\"weekday\", get_weekday(df.timestamp))\n \n \n # extract columns to create time table\n time_columns = ['start_time', 'hour', 'day', 'week', 'month', 'year', 'weekday'] \n \n # write time table to parquet files partitioned by year and month\n time_table = df[time_columns]\n \n # write time table to parquet files partitioned by year and month\n time_table.write.partitionBy('year', 'month').parquet(os.path.join(output_data, 'time.parquet'), 'overwrite')\n print(\"--- time.parquet completed ---\")\n \n # read in song data to use for songplays table\n df_songs = spark.read.parquet(os.path.join(output_data, 'songs.parquet'))\n \n df_songplays = df_songs.join(df, (df_songs.title == df.song)).where(df.page == 'NextSong').orderBy(df.timestamp)\n # extract columns from joined song and log datasets to create songplays table \n songplays_table = df_songplays['timestamp', 'userId', 'level', 'song_id', 'artist_id', 'sessionId', 'location', 'userAgent']\n songplays_table.select(monotonically_increasing_id().alias('songplay_id')).collect()\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table\\\n .withColumn(\"year\", get_year(songplays_table.timestamp))\\\n .withColumn(\"month\", get_month(songplays_table.timestamp))\\\n .write\\\n .partitionBy('year', 'month')\\\n .parquet(os.path.join(output_data, 'songplays.parquet'), 'overwrite')\n \n print(\"--- songplays.parquet completed ---\")\n print(\"*** process_log_data completed ***\\n\\nEND\")", "def process_song_data(spark, input_data, output_data):\n \n # get filepath to all json file of song_data in S3 bucket\n song_data = input_data + \"song_data/*/*/*/*.json\"\n \n # read song data file\n df = spark.read.json(song_data)\n \n # print out the schema in tree format\n print(\"---------- Print out the schema of song dataset in tree format: ----------\")\n df.printSchema()\n\n # extract columns to create songs table\n # songs table attributes: song_id, title, artist_id, year, duration\n songs_table = df.select(\"song_id\", \"title\", \"artist_id\", \"year\", \"duration\")\n # show first 10 rows in songs_table\n print(\" ---------- Show first 10 rows of songs table ----------\")\n songs_table.show(10)\n \n # write songs table to parquet files partitioned by year and artist\n out_path_songs = os.path.join(output_data, \"songs_table.parquet\")\n if path.exists(out_path_songs):\n songs_table.write.parquet(path = out_path_songs, \n partitionBy = (\"year\", \"artist_id\"),\n mode = \"overwrite\")\n else:\n songs_table.write.parquet(path = out_path_songs, \n partitionBy = (\"year\", \"artist_id\"),\n mode = \"append\")\n \n # read parquet file and check the first 10 rows of partitioned parquet dataframes\n df_songs_parquet = spark.read.parquet(\"songs_table.parquet\")\n print(\" ---------- Show first 10 rows of songs table parquet file ----------\")\n df_songs_parquet.show(10)\n\n # extract columns to create artists table\n # artists table attributes: artist_id, name, location, lattitude, longitude\n artists_table = df.select(\"artist_id\", \"artist_name\", \"artist_location\", \n \"artist_latitude\", \"artist_longitude\")\n # show first 10 rows in artists_table\n print(\" ---------- Show first 10 rows of artists table ----------\")\n artists_table.show(10)\n \n # write artists table to parquet files\n out_path_artists = os.path.join(output_data, \"artists_table.parquet\")\n if path.exists(out_path_artists):\n artists_table.write.parquet(path = out_path_artists, \n mode = \"overwrite\")\n else:\n artists_table.write.parquet(path = out_path_artists, \n mode = \"append\")\n \n # read parquet file and check the first 10 rows of partitioned parquet dataframes\n df_artists_parquet = spark.read.parquet(\"artists_table.parquet\")\n print(\" ---------- Show first 10 rows of artists table parquet file ----------\")\n df_artists_parquet.show(10)", "def process_data():\n for message in get_messages_from_sqs():\n try:\n message_content = json.loads(message.body)\n input_file = urllib.unquote_plus(message_content\n ['Records'][0]['s3']['object']\n ['key']).encode('utf-8')\n s3.download_file(input_bucket_name, input_file, input_file)\n output_file = os.path.join(output_dir, os.path.splitext(input_file)[0]+'.csv')\n parse_patient_data(input_file, output_file)\n upload_data(output_file)\n cleanup_files(input_file, output_file)\n except:\n message.change_visibility(VisibilityTimeout=0)\n continue\n else:\n message.delete()", "def process_song_data(spark, input_data, output_data):\n\n # get filepath to song data file\n song_data = input_data + \"song_data/A/A/B/*.json\"\n \n # read song data file\n song_data_schema = StructType([\n StructField(\"artist_id\", StringType(), False),\n StructField(\"artist_latitude\", StringType(), True),\n StructField(\"artist_longitude\", StringType(), True),\n StructField(\"artist_location\", StringType(), True),\n StructField(\"artist_name\", StringType(), False),\n StructField(\"song_id\", StringType(), False),\n StructField(\"title\", StringType(), False),\n StructField(\"duration\", DoubleType(), False),\n StructField(\"year\", IntegerType(), False)\n ])\n \n print(\"Start read s3 song data\") \n df = spark.read.json(song_data, schema=song_data_schema)\n \n # extract columns to create songs table\n print(\"Start create songs table\")\n songs_table = df.select(\"song_id\", \"title\", \"artist_id\", \"year\", \"duration\")\n \n # write songs table to parquet files partitioned by year and artist\n print(\"Start write songs table to parquet\")\n songs_table.write.parquet(output_data + \"songs_table.parquet\", mode=\"overwrite\", partitionBy=[\"year\", \"artist_id\"])\n\n # extract columns to create artists table\n # needs to distinct artist (can duplicate)\n print(\"Start create artists table\")\n artists_table = df.select(\"artist_id\", \"artist_name\", \"artist_location\", \n \"artist_latitude\", \"artist_longitude\").distinct()\n \n # write artists table to parquet files\n print(\"Start write artists table to parquet\")\n artists_table.write.parquet(output_data + \"artists_table.parquet\", mode=\"overwrite\")", "def process_data() -> None:\n yesterday_date = std.get_yesterday()\n s3_working_bucket = \"stwit-working-bucket\"\n s3_processed_bucket = \"stwit-processed-bucket\"\n\n df_tweets = process_tweets(s3_working_bucket, yesterday_date)\n df_users = process_users(s3_working_bucket, yesterday_date)\n df_stocks = process_stocks(s3_working_bucket, yesterday_date)\n\n logging.debug(\"Calling filter_tweets function.\")\n df_tweets, df_users = stp.filter_tweets(df_tweets, df_users)\n\n logging.debug(\"Calling remove_duplicate_tweets function.\")\n df_tweets, df_users = stp.remove_duplicate_tweets(df_tweets, df_users)\n\n logging.debug(\"Calling export_twitter_csv function with df_tweets data.\")\n stdm.export_csv(df_tweets, s3_processed_bucket, yesterday_date, \"twitter\", \"tweets\")\n\n logging.debug(\"Calling export_twitter_csv function with df_users data.\")\n stdm.export_csv(df_users, s3_processed_bucket, yesterday_date, \"twitter\", \"users\")\n\n logging.debug(\"Calling export_stocks_csv function.\")\n stdm.export_csv(df_stocks, s3_processed_bucket, yesterday_date, \"stocks\")", "def process_song_data(spark, input_data, output_data):\n \n print(\"Read song data\")\n df_song = spark.read.json(input_data+\"song_data/*/*/*/*.json\", schema=build_song_schema())\n \n # extract columns to create songs table\n songs_table = df_song[['song_id', 'title', 'artist_id', 'year', 'duration']].drop_duplicates()\n\n \n print(\"Write...\")\n # write songs table to parquet files partitioned by year and artist\n songs_table.write.save(path=output_data+'song_table',\n format='parquet',\n partitionBy=['year', 'artist_id'],\n mode='overwrite' )\n\n # extract columns to create artists table\n artists_table = df_song[['artist_id', 'artist_name', 'artist_location', 'artist_latitude', 'artist_longitude']].drop_duplicates()\n\n print(\"Write...\")\n # write artists table to parquet files\n artists_table.write.save(path=output_data+'artists_table',\n format='parquet',\n mode='overwrite' )", "def process_log_file(cur, filepath):\n # open log file\n datalog = pd.read_json(filepath, lines=True)\n\n df = pd.DataFrame(data=datalog)\n df.head()\n\n # convert timestamp column to datetime\n t = pd.to_datetime(df['ts'], unit='ms')\n \n # insert time data records\n time_log = []\n column_labels = ('start time','hour','day','week of year','month','year','weekday')\n index = 0\n for timestamp in t:\n time_data = (t[index],t.dt.hour[index],t.dt.day[index],t.dt.week[index],t.dt.month[index],t.dt.year[index],t.dt.weekday[index])\n time_log.append(time_data)\n index = index + 1\n \n time_df = pd.DataFrame.from_dict(time_log)\n #print(time_df)\n time_df.head()\n\n\n for i, row in time_df.iterrows():\n cur.execute(time_table_insert, list(row))\n \n\n # load user table\n user_df_data = df[['userId', 'firstName', 'lastName','gender','level']].values\n user_df = pd.DataFrame.from_dict(user_df_data)\n\n # insert user records\n for i, row in user_df.iterrows():\n cur.execute(user_table_insert, row)\n \n # insert songplay records\n for index, row in df.iterrows():\n \n # get songid and artistid from song and artist tables\n cur.execute(song_select, (row.song, row.artist, row.length))\n results = cur.fetchone()\n \n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n # insert songplay record\n songplay_data = (pd.to_datetime(row.ts, unit='ms'),row.userId,row.level,songid,artistid,row.sessionId,row.location,row.userAgent)\n cur.execute(songplay_table_insert, songplay_data)", "def process_song_data(\n spark: SparkSession,\n input_data: str,\n output_data: str\n) -> None:\n # get filepath to song data file\n song_data = input_data + 'song_data/*/*/*/'\n\n songdata_schema = StructType([\n StructField(\"song_id\", StringType(), True),\n StructField(\"title\", StringType(), True),\n StructField(\"year\", StringType(), True),\n StructField(\"duration\", DoubleType(), True),\n StructField(\"artist_id\", StringType(), True),\n StructField(\"artist_name\", StringType(), True),\n StructField(\"artist_location\", StringType(), True),\n StructField(\"artist_latitude\", DoubleType(), True),\n StructField(\"artist_longitude\", DoubleType(), True),\n ])\n # read song data file\n df = spark.read.json(song_data, schema=songdata_schema)\n\n # extract columns to create songs table\n songs_table = df.select(\n 'song_id', 'title', 'artist_id', 'year', 'duration') \\\n .dropDuplicates()\n songs_table.createOrReplaceTempView('songs')\n\n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy('year', 'artist_id') \\\n .parquet(join(output_data, 'songs/songs.parquet'), 'overwrite')\n\n # # extract columns to create artists table\n artists_table = df.select(\n 'artist_id',\n 'artist_name',\n 'artist_location',\n 'artist_latitude',\n 'artist_longitude',\n ).withColumnRenamed('artist_name', 'name') \\\n .withColumnRenamed('artist_location', 'location') \\\n .withColumnRenamed('artist_latitude', 'latitude') \\\n .withColumnRenamed('artist_longitude', 'longitude') \\\n .dropDuplicates()\n artists_table.createOrReplaceTempView('artists')\n # # write artists table to parquet files\n artists_table.write.parquet(\n join(output_data, 'artists/artists.parquet'),\n 'overwrite'\n )", "def process_song_data(spark, input_data, output_data):\n\n # get filepath to song data file\n song_data = input_data + 'song_data/A/A/A/*.json'\n\n # read song data file\n df = spark.read.json(song_data)\n\n # write song data to temp view song_data_table\n df.createOrReplaceTempView(\"song_data_table\")\n\n # extract columns to create songs table\n songs_table = df.select('song_id', 'title', 'artist_id', 'year', 'duration').drop_duplicates()\n\n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy(\"year\", \"artist_id\").format(\"parquet\").mode(\"overwrite\").save(\n output_data + 'songs/song_data.parquet')\n\n # extract columns to create artists table\n artists_table = df.select('artist_id', 'artist_name', 'artist_location', 'artist_latitude', 'artist_longitude') \\\n .drop_duplicates()\n\n # write artists table to parquet files\n artists_table.write.format(\"parquet\").mode(\"overwrite\").save(output_data + 'artists/artist_data.parquet')", "def process_song_data(spark, input_data, output_data):\n # define json structure\n songdata_schema = StructType([\n StructField(\"song_id\", StringType(), True),\n StructField(\"year\", StringType(), True),\n StructField(\"duration\", DoubleType(), True),\n StructField(\"artist_id\", StringType(), True),\n StructField(\"artist_name\", StringType(), True),\n StructField(\"artist_location\", StringType(), True),\n StructField(\"artist_latitude\", DoubleType(), True),\n StructField(\"artist_longitude\", DoubleType(), True),\n ])\n \n # get filepath to song data file\n song_data = os.path.join(input_data, \"song_data/*/*/*/*.json\")\n \n # read song data file\n df = spark.read.json(song_data, schema=songdata_schema)\n\n # extract columns to create songs table\n songs_table = df.select('song_id', 'artist_id', 'year', 'duration')\n \n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy('year', 'artist_id').parquet(output_data + \"songs\")\n\n # extract columns to create artists table\n artists_table = df.select('artist_id', 'artist_name', 'artist_location', 'artist_latitude',\n 'artist_longitude')\n \n # write artists table to parquet files\n artists_table.write.parquet(output_data + \"artists\")", "def process_app_log(event, context):\n pubsub_message = base64.b64decode(event[\"data\"]).decode(\"utf-8\")\n\n log_data = json.loads(pubsub_message)\n print(log_data)\n payload = None\n try:\n if \"protoPayload\" in log_data:\n # If there is a protoPayload, we assume it's an entry from the audit log\n protoPayload = log_data[\"protoPayload\"]\n payload = protoPayload[\"operation\"].copy()\n payload[\"methodName\"] = log_data[\"methodName\"]\n payload[\"timestamp\"] = log_data[\"timestamp\"]\n\n elif \"jsonPayload\" in log_data:\n # Assuming the log entry has the fields we need, we just pass it over\n payload = log_data[\"jsonPayload\"]\n\n if payload:\n time_difference = store_data(payload)\n if time_difference:\n send_metric(time_difference, payload[\"methodName\"])\n except Exception as e:\n print(e)", "def process_log_file(cur, filepath):\n\n # open log file\n df = pd.read_json(filepath, lines=True)\n\n # filter by NextSong action\n df = df[df['page'] == 'NextSong']\n\n # convert timestamp column to datetime\n t = pd.to_datetime(df['ts'], unit='ms')\n\n # insert time data records\n time_data = (t, t.dt.hour, t.dt.day,\n t.dt.month, t.dt.year, t.dt.day_name())\n column_labels = ('start_time', 'hour', 'day', 'month', 'year', 'weekday')\n time_df = pd.DataFrame(dict(zip(column_labels, time_data)))\n\n # bulk insert time_df into Postgres\n # create a csv like object\n output = io.StringIO()\n time_df.to_csv(output, sep='\\t', index=False, header=False)\n # move the pointer to start of the file\n output.seek(0)\n # creating a temp table to handle conflict due to duplicate insert\n # ref: https://stackoverflow.com/questions/48019381/how-postgresql-copy-to-stdin-with-csv-do-on-conflic-do-update\n cur.execute(time_tmp_table)\n # copy data from csv to temp table\n cur.copy_from(output, 'tmp_table')\n # merge temp table with main table\n cur.execute(time_table_bulk_insert)\n\n # load user table\n user_df = df[['userId', 'firstName', 'lastName', 'gender', 'level']]\n\n # insert user records\n for i, row in user_df.iterrows():\n cur.execute(user_table_insert, row)\n\n # insert songplay records\n # create a csv like object\n output = io.StringIO()\n for index, row in df.iterrows():\n\n # get songid and artistid from song and artist tables\n cur.execute(song_select, (row.song, row.artist, row.length))\n results = cur.fetchone()\n\n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n # insert songplay record\n songplay_data = (t[index], row.userId, row.level, songid,\n artistid, row.sessionId, row.location, row.userAgent)\n # write to csv like object\n output.write('\\t'.join(map(clean_csv_value, songplay_data)) + '\\n')\n # move the pointer to start of the csv like object\n output.seek(0)\n # columns to insert (songplay_id is a serial insert)\n columns = ['start_time', 'user_id', 'level', 'song_id',\n 'artist_id', 'session_id', 'location', 'user_agent']\n # copy data to songplays table\n cur.copy_from(output, 'songplays', columns=columns)", "def analyze(sc, input_path, output_path):\n context = LogsJobContext(sc)\n # get filepath to log data file\n log_data = \"{}/log_data/*/*/*.json\".format(input_path)\n # read log data file\n df = sc.read.json(log_data)\n df.cache()\n # filter by actions for song plays\n df = df.filter(df.page == 'NextSong')\n df.cache()\n\n # extract columns for users table\n users = df.selectExpr('userId as user_id', 'firstName as first_name', 'lastName as last_name', 'gender',\n 'level').dropDuplicates()\n # write users table to parquet files\n users.write.parquet(\"{}/users.parquet\".format(output_path), mode='overwrite')\n\n start_times_datetime = df.dropDuplicates().selectExpr(\"cast((from_unixtime(ts/1000.0)) as timestamp) as start_time\")\n start_times_datetime.printSchema()\n time = start_times_datetime.selectExpr([\"start_time\", \"hour(start_time) as hour\", \"day(start_time) as day\",\n \"weekofyear(start_time) as week\", \"month(start_time) as month\",\n \"year(start_time) as year\",\n \"dayofweek(start_time) as weekday\"\n ])\n\n # write time table to parquet files partitioned by year and month\n time.write.partitionBy(['year', 'month']).parquet(\"{}/times.parquet\".format(output_path), mode=\"overwrite\")\n\n # read in song data to use for songplays table\n song_df = sc.read.parquet(\"{}/songs.parquet\".format(output_path))\n\n # extract columns from joined song and log datasets to create songplays table\n song_df.createOrReplaceTempView('songs')\n df.createOrReplaceTempView('logs')\n songplays_table = sc.sql('select \\\n monotonically_increasing_id() as songplay_id, \\\n from_unixtime(l.ts/1000) as start_time, \\\n userId as user_id,\\\n l.level,\\\n s.song_id,\\\n s.artist_id,\\\n l.sessionId as session_id,\\\n l.location,\\\n l.userAgent as user_agent\\\n from \\\n logs l \\\n left join songs s on l.song = s.title')\n\n songplays_table_for_partitioning = songplays_table\\\n .withColumn('year', F.year(songplays_table.start_time))\\\n .withColumn('month', F.month(songplays_table.start_time))\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table_for_partitioning.write.partitionBy(['year', 'month']) \\\n .parquet(\"{}/songplays.parquet\".format(output_path), mode='overwrite')", "def process_song_data(spark, input_data, output_data):\n \n # get filepath to song data file\n song_data = input_data + \"song_data/*/*/*/*.json\"\n \n # read song data file\n df = spark.read.json(song_data).dropDuplicates()\n\n # extract columns to create songs table\n global songs_table\n songs_table = df.select(['song_id', 'title', 'artist_id', 'year', 'duration'])\n \n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy(\"year\", \"artist_id\").parquet(\"output_data + SongTable.parquet\")\n\n # extract columns to create artists table\n artists_table = df.select(['artist_id', 'artist_name', 'artist_location', 'artist_latitude', 'artist_longitude'])\n \n # write artists table to parquet files\n artists_table.write.parquet(output_data + \"ArtistTable.parquet\")", "def lambda_handler(event, context):\n\n for record in event['Records']:\n\n bucket = record['s3']['bucket']['name']\n key = unquote_plus(record['s3']['object']['key'])\n\n str_value = s3_utils.download_file_as_string(bucket, key)\n data = json.loads(str_value)\n\n normalized_data = {\n 'meta': {\n 'table': 'parcels',\n 'column_names': [\n 'dataset',\n 'as_of',\n 'apn',\n 'objectid',\n 'city',\n 'x_coordinate',\n 'y_coordinate',\n 'area',\n 'length'\n ]\n }\n }\n\n rows = []\n\n dataset = data['meta']['dataset']\n as_of = data['meta']['datetime']\n\n for r in data['results']:\n\n attr = r['attributes']\n\n temp_dict = {\n 'dataset': dataset,\n 'as_of': as_of,\n 'apn': attr.get('APN_SPACE'),\n 'objectid': attr.get('OBJECTID'),\n 'city': attr.get('CITY'),\n 'x_coordinate': attr.get('X'),\n 'y_coordinate': attr.get('Y'),\n 'area': attr.get('Shape.STArea()'),\n 'length': attr.get('Shape.STLength()')\n }\n\n rows.append(temp_dict)\n\n normalized_data['rows'] = rows\n \n bucket = 'gis-data-normalized'\n file_name = 'normalized_' + key\n s3_utils.upload_json_as_file(normalized_data, bucket, file_name)", "def process_song_data(spark, input_data, output_data):\n # get filepath to song data file\n song_data = f'{input_data}/song_data/*/*/*/*.json'\n \n # read song data file\n df = spark.read.json(song_data)\n\n # extract columns to create songs table\n songs_table = df.select([\"song_id\", \"title\", \"artist_id\", \"year\", \"duration\"]).where(df[\"song_id\"].isNotNull())\n \n # write songs table to parquet files partitioned by year and artist\n song_data_out = f'{output_data}/songs_table/songs_table.parquet'\n songs_table.write.mode('overwrite').partitionBy('year','artist_id').parquet(song_data_out)\n\n # extract columns to create artists table\n artists_table = df.select([\"artist_id\", \"artist_name\", \"artist_location\", \"artist_latitude\", \"artist_longitude\"]).where(df[\"artist_id\"].isNotNull())\n \n # write artists table to parquet files\n artists_data_out = f'{output_data}/artists_table/artists_table.parquet'\n artists_table.write.mode('overwrite').parquet(artists_data_out)", "def process_song_data(spark, input_data, output_data):\n\n # get filepath to song data file\n song_data = input_data + \"song-data/*/*/*/*.json\"\n\n # read song data file\n df = spark.read.json(song_data)\n\n # extract columns to create songs table\n songs_table = df.select(\"song_id\", \"title\", \"artist_id\", \"year\", \"duration\")\n \n # write songs table to parquet files partitioned by year and artist\n file_name = output_data + \"songs.parquet\"\n songs_table.write.partitionBy([\"year\", \"artist_id\"]).parquet(file_name)\n\n # extract columns to create artists table\n artists_table = df.select(\"artist_id\", \"artist_name\", \"artist_longitude\", \"artist_latitude\", \"artist_location\")\n \n # write artists table to parquet files\n file_name = output_data + \"artists.parquet\"\n artists_table.write.parquet(file_name)", "def process_log_file(cur, filepath):\n \n # open log file\n df = pd.read_json(filepath,lines=True)\n\n # filter by NextSong action - i.e. get only listening music events from the logs\n df = df[(df.page == \"NextSong\")]\n\n # insert time records\n __insert_time_data(cur, df)\n \n # insert user records\n __insert_user_data(cur, df)\n \n # insert songplay records\n __insert_songplay_data(cur, df)\n \n # erase dataframe\n df = df.iloc[0:0]", "def process_song_data(spark, input_data, output_data):\n \n # get filepath to song data file\n song_data = input_data + \"/song_data/A/A/A/*.json\"\n \n # read song data file\n df = spark.read.json(song_data)\n #df.show(5)\n \n # extract columns to create songs table\n songs_table = df.select(\"song_id\",\"title\", \"artist_id\",\"year\", \"duration\")\n songs_table = songs_table.dropDuplicates()\n songs_table.createOrReplaceTempView(\"temp_songs_table\")\n # write songs table to parquet files partitioned by year and artist\n songs_table = df.write.partitionBy(\"year\",\"artist_id\")\\\n .mode(\"overwrite\").parquet(output_data + \"/songs_table\")\n print(\"Songs Table Successfully created\")\n \n # extract columns to create artists table\n artists_table = df.select(\"artist_id\",\"artist_name\", \\\n \"artist_location\",\"artist_latitude\", \"artist_longitude\")\n artists_table = artists_table.dropDuplicates()\n artists_table.createOrReplaceTempView(\"temp_artists_table\")\n \n # write artists table to parquet files\n artists_table = df.write.mode(\"overwrite\").parquet(output_data + \"/artists_table\")\n print(\"Artists Table Successfully created\")", "def process_log_file(cur, filepath):\n \n # open log file\n \n df = pd.read_json(filepath, lines = True)\n \n # filter by NextSong action\n df = df[df['page']=='NextSong']\n # convert timestamp column to datetime\n t = pd.to_datetime(df.ts, unit='ms')\n df.ts = t\n \n # insert time data records\n time_data = [t, t.dt.hour, t.dt.day, t.dt.weekofyear,\n t.dt.month, t.dt.year, t.dt.weekday]\n \n # column_labels = ['timestamp','Hour', \n # 'Day','Month','Year''Weekday']'\n column_labels = ['timestamp','hour','day','weekofyear','month','year','weekday']\n time_df = pd.DataFrame(dict(zip(column_labels, time_data)))\n\n for i, row in time_df.iterrows():\n cur.execute(time_table_insert, list(row))\n \n # load user table\n user_df = df[['userId','firstName', \n 'lastName','gender','level']]\n\n # insert user records\n for i, row in user_df.iterrows(): \n cur.execute(user_table_insert, row)\n \n # insert songplay records\n for index, row in df.iterrows():\n \n # get songid and artistid from song and artist tables\n cur.execute(song_select, (row.song, row.artist,\n row.length))\n results = cur.fetchone()\n \n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n # insert songplay record\n songplay_data = (index, row.ts, row.userId, row.level,\n songid, artistid, row.sessionId, \n row.location, row.userAgent)\n \n \n cur.execute(songplay_table_insert, songplay_data)", "def processData(\n filePath: str,\n path: str,\n fileSchema: Dict,\n pkey: str,\n definingCol: str,\n hashKeyCols: List[str],\n hashDataCols: List[str],\n partitionCol: str = \"date_loaded\",\n) -> Dict:\n try:\n schema = generateSchema(fileSchema)\n logger.info(\"Loading data from input file\")\n df = (\n spark.read.format(\"csv\")\n .option(\"header\", True)\n .schema(schema)\n .load(filePath)\n )\n count = df.count()\n windowBy = Window.partitionBy(f.col(pkey)).orderBy(f.col(definingCol))\n logger.info(\"Generating the required Audit Columns\")\n df = (\n df.withColumn(\n \"hashKey\",\n f.sha2(\n f.concat_ws(\n \"|\", *map(lambda key_cols: f.col(key_cols), hashKeyCols)\n ),\n 256,\n ),\n )\n .withColumn(\n \"hashData\",\n f.sha2(\n f.concat_ws(\n \"|\", *map(lambda key_cols: f.col(key_cols), hashDataCols)\n ),\n 256,\n ),\n )\n .withColumn(\"startDate\", f.col(definingCol))\n .withColumn(\"endDate\", f.lead(f.col(definingCol)).over(windowBy))\n .withColumn(\n str(partitionCol), f.lit(datetime.datetime.now().strftime(\"%Y-%m-%d\"))\n )\n )\n if not DeltaTable.isDeltaTable(spark, path):\n logger.info(\"Performing the initial load\")\n status = performFirstLoad(df, path, partitionCol)\n else:\n logger.info(\"Performing the incremental load\")\n status = performDeltaLoad(df, path, pkey)\n return {**status, **{\"count\": count}}\n except Exception as e:\n logger.error(\n \"Issue while processing the {0} error : {1}\".format(filePath, str(e)[:100])\n )\n return {\"status\": \"Failed\", \"message\": str(e)[:100], \"count\": 0}\n # raise DLoaderException(\"Failed while processing the data :{0}\".format(e))", "def process_song_data(spark, input_data, output_data):\n # get filepath to song data file\n song_data = input_data + \"song_data/*/*/*/*.json\"\n \n # read song data file\n df = spark.read.json(song_data)\n\n # extract columns to create songs table\n songs_table = df.filter(df.song_id != '') \\\n .select(\"song_id\", \"title\", \"artist_id\", \"year\", \"duration\") \\\n .dropDuplicates()\n \n # output filepath to songs table file\n songs_table_path = output_data + \"songs_table.parquet\"\n \n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy(\"year\", \"artist_id\").mode(\"overwrite\") \\\n .parquet(songs_table_path)\n\n # extract columns to create artists table\n artists_table = df.filter(df.artist_id != '').select(\"artist_id\",\n \"artist_name\", \"artist_location\", \"artist_latitude\", \"artist_longitude\") \\\n .dropDuplicates()\n \n # output filepath to artists table file\n artists_table_path = output_data + \"artists_table.parquet\"\n \n # write artists table to parquet files\n artists_table.write.mode(\"overwrite\") \\\n .parquet(artists_table_path)", "def process_log_file(cur, filepath):\r\n\r\n # open log file\r\n df = pd.read_json(filepath, lines=True)\r\n df2 = pd.read_json(filepath, lines=True)\r\n\r\n # filter by NextSong action for missing data\r\n df2 = df2[df2['page']=='NextSong']\r\n\r\n\r\n\r\n # insert missing records into Song and Artist Table\r\n for i, row in df2.iterrows():\r\n cur.execute(artist_table_insert, (row.artist + str(i), row.artist, row.location, 0, 0))\r\n for i, row in df2.iterrows():\r\n cur.execute(song_table_insert, (row.song + str(i), row.song, row.artist + str(i), 0, row.length))\r\n\r\n # filter by NextSong action\r\n df = df[df['page']=='NextSong']\r\n\r\n # convert timestamp column to datetime\r\n t = pd.to_datetime(df['ts'], unit='ms')\r\n\r\n # extract time data from timestamp\r\n time_data = {'start_time': t,'hour': pd.Series(t).dt.hour, 'day':pd.Series(t).dt.day,\r\n 'month': pd.Series(t).dt.month, 'year': pd.Series(t).dt.year,\r\n 'weekday': pd.Series(t).dt.dayofweek}\r\n #column_labels = []\r\n # insert time data records\r\n time_df = pd.DataFrame(time_data)\r\n\r\n for i, row in time_df.iterrows():\r\n cur.execute(time_table_insert, list(row))\r\n\r\n # load user table\r\n user_df = df[['userId', 'firstName','lastName','gender','level']]\r\n user_df.drop_duplicates(subset='userId',keep ='first',inplace = True)\r\n\r\n # insert user records\r\n for i, row in user_df.iterrows():\r\n cur.execute(user_table_insert, row)\r\n\r\n # insert songplay records\r\n for index, row in df.iterrows():\r\n\r\n # get songid and artistid from song and artist tables\r\n print(cur.mogrify(song_select, (row.song, row.artist, row.length)))\r\n cur.execute(song_select, (row.song, row.artist, row.length))\r\n results = cur.fetchone()\r\n\r\n if results:\r\n songid, artistid = results[0],results[1]\r\n else:\r\n songid, artistid = \"None\" + str(index), \"None\" + str(index)\r\n\r\n # insert songplay record\r\n songplay_data = (df[['ts', 'userId', 'level', 'sessionId','location','userAgent' ]])\r\n songplay_data['ts'] = pd.to_datetime(df['ts'], unit='ms')\r\n cur.execute(songplay_table_insert, (index, row.ts, row.userId, row.level, songid, artistid, row.sessionId, row.location, row.userAgent))\r\n #conn.commit()\r", "def process_song_data(spark, input_data, output_data, mode=\"overwrite\"):\n # get filepath to song data file\n song_data = input_data + \"song_data/*/*/*/*.json\"\n\n # read song data file\n print(\"reading song logs from {}\".format(song_data))\n df = spark.read.json(song_data)\n\n # extract columns to create songs table\n song_fields = ['song_id', 'title', 'artist_id', 'year', 'duration']\n songs_table = df.select(song_fields)\n songs_table = songs_table.withColumn('year', F.col('year').cast(IntegerType()))\n songs_table = songs_table.withColumn('duration', F.col('duration').cast(DoubleType()))\n\n # write songs table to parquet files partitioned by year and artist\n song_path = output_data + 'star_schema/song_table/'\n print(\"Writing Song Table to {}\".format(song_path))\n songs_table.write \\\n .mode(mode) \\\n .partitionBy('year', 'artist_id') \\\n .parquet(song_path)\n\n # extract columns to create artists table\n artist_fields = [\n 'artist_id', 'artist_name',\n 'artist_location', 'artist_latitude', 'artist_longitude'\n ]\n artists_table = df.select(artist_fields)\n artists_table = artists_table.withColumnRenamed(\n 'artist_name', 'name'\n )\n artists_table = artists_table.withColumnRenamed(\n 'artist_location', 'location'\n )\n artists_table = artists_table.withColumn(\n 'latitude',\n F.col('artist_latitude').cast(DoubleType())\n )\n artists_table = artists_table.withColumn(\n 'longitude',\n F.col('artist_longitude').cast(DoubleType())\n )\n artist_col_names = [\n 'artist_id', 'name', 'location', 'latitude', 'longitude'\n ]\n artists_table = artists_table.select(artist_col_names)\n\n # write artists table to parquet files\n artists_path = output_data + 'star_schema/artist_table'\n print(\"Writing Artist Table to {}\".format(artists_path))\n artists_table.write \\\n .mode(mode) \\\n .partitionBy('artist_id') \\\n .parquet(artists_path)", "def process_song_data(spark, input_data, output_data):\n # get filepath to song data file\n song_data = os.path.join(input_data, 'song_data', '*', '*', '*')\n \n # read song data file\n df = spark.read.json(song_data)\n print(f\"Loaded song_data from {input_data}\")\n df.createOrReplaceTempView(\"song_data\")\n \n # extract columns to create songs table\n songs_table = spark.sql(\"\"\"\n SELECT song_id, title, artist_id, year, duration\n FROM song_data\n \"\"\")\n \n # write songs table to parquet files partitioned by year and artist\n songs_table_path = os.path.join(output_data, \"songs_table.parquet\")\n (songs_table.\n write.\n mode(\"overwrite\").\n partitionBy(\"year\", \"artist_id\").\n parquet(songs_table_path))\n print(f\"Stored song table on {songs_table_path}\")\n # extract columns to create artists table\n artists_table = spark.sql(\"\"\"\n SELECT \n DISTINCT(artist_id) AS artist_id, \n artist_name AS name, \n artist_location AS location, \n artist_latitude AS latitude, \n artist_longitude AS longitude\n FROM song_data\n \"\"\")\n \n # write artists table to parquet files\n artists_table_path = os.path.join(output_data, \"artists_table.parquet\")\n (artists_table.\n write.\n mode(\"overwrite\").\n parquet(artists_table_path))\n print(f\"Stored artists table at {artists_table_path}\")", "def process_song_data(spark, input_data, output_data):\n # get filepath to song data file\n song_data = os.path.join(input_data, \"song-data/*/*/*/*.json\")\n\n # read song data file\n df = spark.read.json(song_data)\n\n # extract columns to create songs table\n songs_table = df.select('song_id', 'title', 'artist_id', 'year', 'duration')\n\n # write songs table to parquet files partitioned by year and artist_id\n songs_table.write.partitionBy('year', 'artist_id').parquet(output_data + 'songs', mode='overwrite')\n\n # extract columns to create artists table\n artists_table = df.select('artist_id', 'artist_name', 'artist_location', 'artist_latitude', 'artist_longitude')\n\n # write artists table to parquet files\n artists_table.write.parquet(output_data + 'artists', mode='overwrite')", "def process_input_data(input_data_path):\n if os.path.isdir(input_data_path):\n input_data_glob = glob.glob(input_data_path + \"/*.csv\")\n else:\n if is_gcs_path(input_data_path):\n # Download the input to a local\n with tempfile.NamedTemporaryFile() as hf:\n input_data = hf.name\n\n logging.info(\"Copying %s to %s\", input_data_path, input_data)\n input_data_gcs_bucket, input_data_gcs_path = split_gcs_uri(\n input_data_path)\n\n logging.info(\"Download bucket %s object %s.\", input_data_gcs_bucket,\n input_data_gcs_path)\n bucket = storage.Bucket(storage.Client(), input_data_gcs_bucket)\n storage.Blob(input_data_gcs_path, bucket).download_to_filename(\n input_data)\n else:\n input_data = input_data_path\n\n ext = os.path.splitext(input_data)[-1]\n if ext.lower() == '.zip':\n zip_ref = zipfile.ZipFile(input_data, 'r')\n zip_ref.extractall('.')\n zip_ref.close()\n # TODO: Hardcoding the file in the Archive to use is brittle.\n # We should probably just require the input to be a CSV file.:\n csv_file = 'stackoverflow-questions.csv'\n else:\n csv_file = input_data\n\n input_data_glob = glob.glob(csv_file)\n\n return input_data_glob", "def process_song_data(spark, input_data, output_data):\n # get filepath to song data file\n song_data = input_data + 'song_data/*/*/*/*.json'\n \n # read song data file\n df = spark.read.json(song_data)\n \n # create view for songs table\n df.createOrReplaceTempView(\"songs\") \n \n \n # extract columns to create songs table. Adding Distinct and Not null to song_id as it is the primary key\n songs_table = spark.sql(\"\"\"\n SELECT DISTINCT song_id, \n title,\n artist_id,\n year,\n duration\n FROM songs\n WHERE song_id IS NOT NULL\n \"\"\")\n \n # write songs table to parquet files partitioned by year and artist\n songs_table.write.mode('overwrite').partitionBy(\"year\", \"artist_id\").parquet(output_data+'songs_table/')\n\n # create view for artists table\n df.createOrReplaceTempView(\"artists\") \n \n # extract columns to create artists table, Adding Distinct and Not null to artist_id as it is the primary key\n artists_table = spark.sql(\"\"\"\n SELECT DISTINCT artist_id, \n artist_name,\n artist_location,\n artist_latitude,\n artist_longitude\n FROM artists\n WHERE artist_id IS NOT NULL\n \"\"\")\n \n # write artists table to parquet files\n artists_table.write.mode('overwrite').parquet(output_data+'artists_table/')", "def process_song_data(spark, song_input_data, output_data):\n # get filepath to song data file\n song_data = song_input_data\n \n # read song data file\n print(\"Processing JSON song data...\")\n df = spark.read.json(song_data)\n\n # extract columns to create songs table\n print(\"Extracting columns for song table...\")\n df.createOrReplaceTempView(\"songs_table\")\n songs_table = spark.sql(\"\"\"\n SELECT song_id, title, artist_id, year, duration\n FROM songs_table\n ORDER BY song_id\n \"\"\") \n \n print(\"Song table sample:\")\n songs_table.show(5, truncate=False)\n \n # write songs table to parquet files partitioned by year and artist\n print(\"Writing songs table to parquet files...\")\n songs_table.write.mode(\"overwrite\").partitionBy(\"year\", \"artist_id\")\\\n .parquet(output_data + \"songs_table.parquet\")\n \n # extract columns to create artists table\n print(\"Extracting columns for artists table...\")\n df.createOrReplaceTempView(\"artists_table\")\n artists_table = spark.sql(\"\"\"\n SELECT artist_id,\n artist_name AS name,\n artist_location AS location,\n artist_latitude AS latitude,\n artist_longitude AS longitude\n FROM artists_table\n ORDER BY artist_id DESC\n \"\"\") \n\n print(\"Artists table sample:\")\n artists_table.show(5, truncate=False)\n \n # write artists table to parquet files\n print(\"Writing artists table to parquet files...\")\n songs_table.write.mode(\"overwrite\").parquet(output_data + \"artists_table.parquet\")", "def _process_task_log(self):\n directory = self._executor.log_dir\n if os.path.exists(directory):\n for root, _dirs, files in os.walk(directory):\n for name in files:\n filepath = os.path.join(root, name)\n object_name = str(self._task.project_id) + \"/\" + self._task.node_id + \"/log/\" + name\n if not self._s3.client.upload_file(self._s3.bucket, object_name, filepath):\n log.error(\"Error uploading file to S3\")", "def process_song_data(spark, input_data, output_data):\n \n # get filepath to song data file\n song_data = os.path.join( input_data, \"song_data/*/*/*/*.json\")\n \n # SONG TABLE\n # read song data file\n df = spark.read.json(song_data)\n \n # extract columns to create songs table\n songs_table = df.select('song_id', 'title', 'artist_id',\n 'year', 'duration').dropDuplicates(['song_id'])\n \n print( \"HERE songs_table sample:\\n\")\n songs_table.show(5)\n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy('year', 'artist_id').parquet(os.path.join(output_data, 'songs/songs.parquet'), 'overwrite')\n \n # ARTISTS TABLE\n # extract columns to create artists table\n artists_table = df.select(\"artist_id\",\"artist_name\",\"artist_location\",\"artist_latitude\",\"artist_longitude\").dropDuplicates(['artist_id'])\n \n print( \"HERE artists_table sample:\\n\")\n artists_table.show(5)\n # write artists table to parquet files\n artists_table.write.parquet(output_data + \"artists/\", mode=\"overwrite\")", "def process_song_data(spark, input_data, output_data):\n\n # get filepath to song data file\n input_data = input_data+\"/song-data\"\n print(input_data)\n \n \"\"\"\n for x in os.walk(input_data):\n for y in glob.glob(os.path.join(x[0], '*.json')):\n song_data.append(y)\n \"\"\"\n\n df = spark.read.json(input_data)\n df.createOrReplaceTempView(\"Staging_Song_Data\")\n song_Data_DF = spark.sql(\"select * from Staging_Song_Data\")\n\n # extract columns to create songs table\n songs_query = \" SELECT song_id, title, artist_id, year, duration \" \\\n \" FROM Staging_Song_Data\"\n songs_table = spark.sql(songs_query)\n\n # write songs table to parquet files partitioned by year and artist\n songs_table.write.mode('overwrite').partitionBy(\"year\",\"artist_id\").parquet(path=output_data+\"songs\")\n\n # extract columns to create artists table\n artists_query=\"select artist_id, artist_name as name, \" \\\n \" artist_location as location, \" \\\n \" artist_latitude as latitude, \" \\\n \" artist_longitude as longitude \" \\\n \"from Staging_Song_Data \"\n artists_table =spark.sql(artists_query)\n artists_table = artists_table.dropDuplicates(['artist_id'])\n\n # write artists table to parquet files\n artists_table.write.mode('overwrite').parquet(path=output_data+\"artists\")", "def process_log_file(cur, filepath):\n # open log file\n df = pd.read_json(filepath, lines=True)\n\n # filter by NextSong action\n df = df[df['page']=='NextSong']\n\n # convert timestamp column to datetime\n t = pd.to_datetime(df['ts'], unit='ms')\n \n # insert time data records\n #timestamp, hour, day, week of year, month, year, and weekday\n hour = t.dt.hour\n day = t.dt.day\n weekofyear = t.dt.weekofyear\n month = t.dt.month\n year = t.dt.year\n weekday = t.dt.weekday\n\n time_data = [df['ts'], hour, day, weekofyear, month, year, weekday]\n column_labels = ['timestamp', 'hour', 'day', 'week of year', 'month', 'year', 'weekday']\n time_df = pd.DataFrame.from_dict(dict(zip(column_labels, time_data)))\n\n for i, row in time_df.iterrows():\n try:\n cur.execute(time_table_insert, list(row))\n except psycopg2.Error as e:\n print(\"Error: Unable to insert record in time table in row number : {}\".format(i))\n print(e)\n\n # load user table\n user_df = df[['userId', 'firstName', 'lastName', 'gender', 'level']]\n\n # insert user records\n for i, row in user_df.iterrows():\n try:\n cur.execute(user_table_insert, row)\n except psycopg2.Error as e:\n print(\"Error: Unable to insert record in users table in row number : {}\".format(i))\n print(e)\n\n # insert songplay records\n for index, row in df.iterrows():\n \n # get songid and artistid from song and artist tables\n try:\n cur.execute(song_select, (row.song, row.artist, row.length))\n except psycopg2.Error as e:\n print(\"Error: Unable to execute song_select query to join songs and artists table in row number : {}\".format(index))\n print(e)\n results = cur.fetchone()\n \n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n # insert songplay record\n #timestamp, user ID, level, song ID, artist ID, session ID, location, and user agent\n songplay_data = (row.ts, row.userId, row.level, songid, artistid, row.sessionId, row.location, row.userAgent)\n try:\n cur.execute(songplay_table_insert, songplay_data)\n except psycopg2.Error as e:\n print(\"Error: Unable to insert record in songplays table in row number : {}\".format(i))\n print(e)", "def process_song_data(spark, input_data, output_data):\n # get filepath to song data file\n song_data = os.path.join(input_data, 'song_data/*/*/*/*.json')\n \n #specify schema for increased performance and control\n song_schema = StructType([\n StructField(\"artist_id\", StringType()),\n StructField(\"artist_latitude\", DoubleType()),\n StructField(\"artist_location\", StringType()),\n StructField(\"artist_longitude\", StringType()),\n StructField(\"artist_name\", StringType()),\n StructField(\"duration\", DoubleType()),\n StructField(\"num_songs\", IntegerType()),\n StructField(\"song_id\", StringType()),\n StructField(\"title\", StringType()),\n StructField(\"year\", IntegerType()),\n ])\n \n # read song data file\n dfs = spark.read.json(song_data, schema=song_schema)\n \n # create temporary view of table in order to run SQL queries\n dfs.createOrReplaceTempView(\"song_table\")\n\n # extract columns to create songs table\n dim_songs = spark.sql(\"\"\"\n SELECT song_id,\n title,\n artist_id,\n year,\n duration\n FROM song_table\n WHERE song_id IS NOT NULL\n \"\"\")\n \n # write songs table to parquet files partitioned by year and artist\n dim_songs.write.mode('overwrite').partitionBy(\"year\", \"artist_id\").parquet(output_data+\"songs\")\n\n # extract columns to create artists table\n dim_artists = spark.sql(\"\"\"\n SELECT DISTINCT artist_id,\n artist_name AS name,\n artist_location AS location,\n artist_latitude AS latitude,\n artist_longitude AS longitude\n FROM song_table\n WHERE artist_id IS NOT NULL\n \"\"\")\n \n # write artists table to parquet files\n dim_artists.write.mode('overwrite').parquet(output_data+\"artists\")", "def _create_input_data(self):\n SCHEMA = parse_table_schema_from_json(\n '{\"fields\": [{\"name\": \"data\", \"type\": \"BYTES\"}]}')\n\n def format_record(record):\n # Since Synthetic Source returns data as a dictionary, we should skip one\n # of the part\n import base64\n return {'data': base64.b64encode(record[1])}\n\n with TestPipeline() as p:\n ( # pylint: disable=expression-not-assigned\n p\n | 'Produce rows' >> Read(\n SyntheticSource(self.parse_synthetic_source_options()))\n | 'Format' >> Map(format_record)\n | 'Write to BigQuery' >> WriteToBigQuery(\n dataset=self.input_dataset,\n table=self.input_table,\n schema=SCHEMA,\n create_disposition=BigQueryDisposition.CREATE_IF_NEEDED,\n write_disposition=BigQueryDisposition.WRITE_EMPTY))", "def main():\n import sys\n n = len(sys.argv)\n if n != 3:\n raise ValueError(\"Please specify an input s3 bin and output s3 bin\")\n spark = create_spark_session()\n input_data = sys.argv[1]\n output_data = sys.argv[2]\n \n process_song_data(spark, input_data, output_data) \n process_log_data(spark, input_data, output_data)", "def lambda_handler(event, context):\n \n filename = None\n fobj = None\n\n try:\n \n filename = 'dlq' + '-' + datetime.datetime.now().strftime(\"%s\")\n fobj = open('/tmp/'+filename, 'w')\n logger.debug('S3 client set up.')\n\n for record in event['Records']:\n fobj.write(json.dumps(record['body']))\n fobj.write(\"\\n\")\n \n except Exception as ex:\n logger.error('Exception in executing ingestion to S3: {}'.format(ex))\n send_sns_alert(str(ex))\n raise\n\n else:\n \n #Saves file to S3\n fobj.close()\n load_data_s3(filename)\n\n return {\n 'statusCode': 200,\n 'body': json.dumps('Success!')\n }\n\n finally:\n\n # S3 - close temp object\n fobj.close()", "def process_song_data(spark, input_data, output_data):\n # get filepath to song data file\n song_data = \"{}/song_data/*/*/*/*\".format(input_data)\n \n # define schema used to import song json data into our data frame\n songSchema = R([\n Fld(\"num_songs\", Int()),\n Fld(\"artist_id\", Str()),\n Fld(\"artist_latitude\", Dbl()),\n Fld(\"artist_longitude\", Dbl()),\n Fld(\"artist_location\", Str()),\n Fld(\"artist_name\", Str()),\n Fld(\"song_id\", Str()),\n Fld(\"title\", Str()),\n Fld(\"duration\", Dbl()),\n Fld(\"year\", Int())\n\n ])\n \n # read song data file\n df = spark.read.format(\"json\").load(song_data, schema=songSchema)\n\n # register the temp view from our dataframe df_song\n df.createOrReplaceTempView(\"df_song\")\n \n # extract columns to create songs table\n songs_table = spark.sql(\"\"\"\n SELECT DISTINCT song_id, \n title,\n artist_id, \n artist_name,\n year, \n duration FROM df_song\n \"\"\")\n \n # write songs table to parquet files partitioned by year and artist\n songs_table.write.partitionBy('year', 'artist_id').parquet('{}/dim_song.pq'.format(output_data))\n\n # extract columns to create artists table\n artists_table = spark.sql(\"\"\"\n SELECT DISTINCT artist_id,\n artist_name,\n artist_location,\n artist_latitude,\n artist_longitude\n FROM df_song\n \"\"\")\n \n # write artists table to parquet files\n artists_table.write.parquet('{}/dim_artist.pq'.format(output_data))", "def process_city_data(spark, input_data, output_data):\n\n # get filepath to city data file\n city_data = os.path.join(input_data, 'city_data/*.csv')\n\n # read city data files\n df = spark.read.csv(city_data, header=True, sep=';')\n df = df.drop_duplicates(subset=['City', 'State'])\n print('city_count = ', df.count())\n\n # extract columns to create city table\n city_table = df.select(\n F.monotonically_increasing_id().alias('city_id'),\n F.col('City').alias('city_name'),\n F.col('State Code').alias('state'),\n F.col('State Code').alias('state_code'),\n F.col('Total Population').alias('total_population')\n )\n\n city_table.write.parquet(os.path.join(output_data, 'cities'), 'overwrite')", "def process_log_file(cur, filepath):\n # open log file\n data_frame = pd.read_json(filepath, lines=True)\n\n # filter by NextSong action\n data_frame = data_frame[data_frame.page == 'NextSong']\n\n # convert timestamp column to datetime\n time_value = pd.to_datetime(data_frame['ts'])\n\n # insert time data records\n time_data = (time_value, time_value.dt.year, time_value.dt.month,\n time_value.dt.isocalendar().week, time_value.dt.dayofweek,\n time_value.dt.day, time_value.dt.hour)\n\n column_labels = ('timestamp', 'year', 'month', 'week', 'weekday', 'day', 'hour')\n\n time_df = pd.DataFrame.from_dict(dict(zip(column_labels, time_data)))\n\n for i, row in time_df.iterrows():\n cur.execute(time_table_insert, list(row))\n\n # load user table\n user_df = data_frame[['userId', 'firstName', 'lastName', 'gender', 'level']]\n\n # insert user records\n for i, row in user_df.iterrows():\n cur.execute(user_table_insert, row)\n\n # insert songplay records\n for index, row in data_frame.iterrows():\n\n # get songid and artistid from song and artist tables\n cur.execute(song_select, (row.song, row.artist, row.length))\n results = cur.fetchone()\n\n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n # insert songplay record\n songplay_data = row[['ts', 'userId', 'level', 'sessionId', 'location', 'userAgent']]. \\\n append(pd.Series([songid,\n artistid],\n index=[\n 'songid',\n 'artistid']))\n songplay_data['ts'] = pd.to_datetime(songplay_data['ts'])\n\n cur.execute(songplay_table_insert, songplay_data)", "def process_log_file(cur, filepath):\n df = pd.read_json(filepath, lines=True)\n df = df.loc[df.page == 'NextSong', :]\n\n # insert time data records\n df['ts'] = pd.to_datetime(df.ts, unit='ms')\n t = df['ts']\n time_data = {\n 'start_time': t,\n 'hour': t.dt.hour,\n 'day': t.dt.day,\n 'week': t.dt.week,\n 'month': t.dt.month,\n 'year': t.dt.year,\n 'weekday': t.dt.weekday\n }\n time_df = pd.DataFrame(data=time_data)\n for i, row in time_df.iterrows():\n cur.execute(time_table_insert, list(row))\n\n # insert user records\n user_df = df.loc[:, ['userId', 'firstName', 'lastName', 'gender', 'level']]\n for i, row in user_df.iterrows():\n cur.execute(user_table_insert, row)\n\n # insert songplay records\n for index, row in df.iterrows():\n cur.execute(song_select, (row.song, row.artist, row.length))\n results = cur.fetchone()\n\n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n songplay_data = {\n 'start_time': row.ts,\n 'user_id': row.userId,\n 'level': row.level,\n 'song_id': songid,\n 'artist_id': artistid,\n 'session_id': row.sessionId,\n 'location': row.location,\n 'user_agent': row.userAgent\n }\n cur.execute(songplay_table_insert, list(songplay_data.values()))", "def process_log_file(cursor, filepath):\n\n def get_timestamp_data(df):\n # convert timestamp column to datetime\n timestamp = pd.to_datetime(df['ts'], unit='ms')\n\n return (df['ts'].values,\n timestamp.dt.hour.values,\n timestamp.dt.day.values,\n timestamp.dt.week.values,\n timestamp.dt.month.values,\n timestamp.dt.year.values,\n timestamp.dt.weekday.values)\n\n # open log file\n df = pd.read_json(filepath, lines=True)\n\n # filter by NextSong action\n df = df[df['page'] == 'NextSong']\n\n # insert time data records\n time_data = get_timestamp_data(df)\n column_labels = ('timestamp', 'hour', 'day', 'week', 'month', 'year', 'weekday')\n time_df = pd.DataFrame(data=dict(zip(column_labels, time_data)))\n\n for i, row in time_df.iterrows():\n cursor.execute(time_table_insert, list(row))\n\n # load user table\n user_columns = ['userId', 'firstName', 'lastName', 'gender', 'level']\n user_df = df[user_columns]\n\n # insert user records\n for i, row in user_df.iterrows():\n cursor.execute(user_table_insert, row)\n\n # insert songplay records\n for index, row in df.iterrows():\n\n # get song_id and artist_id from song and artist tables\n cursor.execute(song_select, (row.song, row.artist, row.length))\n results = cursor.fetchone()\n\n if results:\n song_id, artist_id = results\n else:\n song_id, artist_id = None, None\n\n # insert songplay record\n songplay_data = (\n row['ts'], row['userId'], row['level'], song_id, artist_id, row['sessionId'], row['location'],\n row['userAgent'])\n cursor.execute(songplay_table_insert, songplay_data)", "def s3_process(self, payload, classifier):\n s3_file_lines = StreamPreParsers.pre_parse_s3(payload.raw_record)\n for line in s3_file_lines:\n data = line.rstrip()\n payload.refresh_record(data)\n self.process_alerts(classifier, payload, data)", "def main():\n spark = create_spark_session()\n\n input_data = \"s3a://udacitydenanodegree2020/\"\n output_data = \"s3a://udacitydenanodegree2020/output/\"\n\n process_song_data(spark, input_data, output_data) \n process_log_data(spark, input_data, output_data)", "def process_log_file(cur, filepath: str) -> None:\n # open log file\n df = pd.read_json(filepath, lines=True)\n\n # filter by NextSong action\n df = df[df.page == \"NextSong\"].copy()\n\n # convert timestamp column to datetime\n df[\"ts\"] = pd.to_datetime(df[\"ts\"], unit=\"ms\")\n\n # insert time data records\n column_labels = [\"hour\", \"day\", \"week\", \"month\", \"year\", \"weekday\"]\n\n for column_label in column_labels:\n if column_label == \"week\":\n # FutureWarning: Series.dt.weekofyear and Series.dt.week have been\n # deprecated. Please use Series.dt.isocalendar().week instead.\n df[column_label] = df[\"ts\"].dt.isocalendar().week\n else:\n df[column_label] = getattr(df[\"ts\"].dt, column_label)\n\n time_df = pd.DataFrame()\n time_df[\"start_time\"] = df[\"ts\"]\n time_df = pd.concat([time_df, df[column_labels]], axis=1)\n time_df.head()\n\n for i, row in time_df.iterrows():\n cur.execute(sql_queries.time_table_insert, list(row))\n\n # load user table\n user_df = df[[\"userId\", \"firstName\", \"lastName\", \"gender\", \"level\"]]\n\n # insert user records\n for i, row in user_df.iterrows():\n cur.execute(sql_queries.user_table_insert, row)\n\n # insert songplay records\n for _, row in df.iterrows():\n\n # get songid and artistid from song and artist tables\n cur.execute(sql_queries.song_select, (row.song, row.artist, row.length))\n results = cur.fetchone()\n\n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n # insert songplay record\n songplay_data = (\n row.ts,\n row.userId,\n row.level,\n songid,\n artistid,\n row.sessionId,\n row.location,\n row.userAgent,\n )\n cur.execute(sql_queries.songplay_table_insert, songplay_data)", "def load_raw_to_bq(event, context):\n\n import os\n\n\n print(f\"Processing .....\")\n\n file = event\n project = os.environ.get('ENV_PROJECT')\n dataset = os.environ.get('ENV_DATASET')\n bucket = file.get(\"bucket\")\n tableCsv = file.get(\"name\")\n tableDestList = tableCsv.split(\".\")\n tableDest = tableDestList[0]\n table_id = f'{project}.{dataset}.{tableDest}'\n\n from Configuration import Configuration\n\n Configuration(tableCsv,bucket,table_id)\n\n\n print(f\"End Process.\")", "def _ProcessLog(self, log_processor, logfile): # pylint: disable=R0201\n for line in open(os.path.join(self.data_directory, logfile)):\n log_processor.ProcessLine(line)", "def get_data(self):\n s3 = boto3.resource('s3')\n for obj in s3.Bucket(self.bucket_name).objects.all():\n output=str(obj.get()['Body'].read())\n ip=re.findall( r'[0-9]+(?:\\.[0-9]+){3}', output)[0] \n date=output.split(' [')[-1].split(':')[0] \n date=datetime.datetime.strptime(date, \"%d/%b/%Y\")\n if int(self.check_table(ip, date)) == 0: \n self.write_to_table(ip, date)", "def main():\n spark = create_spark_session()\n input_data = \"s3a://udacity-dend/\"\n output_data = \"s3a://dend-bucket-cpm/\"\n\n process_song_data(spark, input_data, output_data)\n process_log_data(spark, input_data, output_data)", "def process_log_file(cur, filepath):\n df = pd.read_json(filepath, lines=True)\n\n df = df[df['page'] == 'NextSong'].astype({'ts':'datetime64[ms]'})\n\n t = pd.Series(df['ts'], index=df.index)\n\n time_data = [[d, d.hour, d.day, d.week, d.month, d.year, d.weekday()] for d in t]\n column_labels= ['ts', 'hour', 'day', 'week', 'month', 'year', 'weekday']\n time_df = pd.DataFrame(data=time_data, columns=column_labels)\n\n for i, row in time_df.iterrows():\n cur.execute(time_table_insert, list(row))\n\n columns = ['userId', 'firstName', 'lastName', 'gender', 'level']\n user_df = df[[*columns]]\n user_df = user_df[user_df.firstName.notnull()]\n\n for i, row in user_df.iterrows():\n cur.execute(user_table_insert, row)\n\n for index, row in df.iterrows():\n cur.execute(song_select, (row.song, row.artist, row.length))\n results = cur.fetchone()\n\n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n songplay_data = (row.ts, row.userId, row.level, songid, artistid, row.sessionId, row.location, row.userAgent)\n cur.execute(songplay_table_insert, songplay_data)", "def main(event, context):\n r = requests.get(yql_url)\n\n if r.status_code == 200:\n content = r.content\n t = datetime.now()\n filename = 'iran_' + t.strftime('%Y-%m-%dT%H-%M-%S') + '.json'\n put_data_in_s3(content, filename)\n\n data_dict = json.loads(content)\n data = data_dict.get('list')\n\n # start inserting data into timeseries database\n write_wind_data_to_csv(data)\n conn = psycopg2.connect(dbname='climateTSDB', host=DB_HOST_NAME,\n user=USERNAME, password=PASS)\n cur = conn.cursor()\n f = open('/tmp/wind_data.csv', 'r')\n cur.copy_from(f, 'windts', sep=',', columns=['speed', 'degree',\n 'utc_time', 'latitude', 'longitude'])\n conn.commit()\n try:\n os.remove('/tmp/wind_data.csv')\n except OSError:\n pass\n cur.close()\n\n write_temperature_data_to_csv(data)\n conn = psycopg2.connect(dbname='climateTSDB', host=DB_HOST_NAME,\n user=USERNAME, password=PASS)\n cur = conn.cursor()\n f = open('/tmp/temperature_data.csv', 'r')\n cur.copy_from(f, 'temperaturets', sep=',', columns=['temperature'\n ,'utc_time', 'latitude', 'longitude'])\n conn.commit()\n try:\n os.remove('/tmp/temperature_data.csv')\n except OSError:\n pass\n cur.close()\n\n write_pressure_data_to_csv(data)\n conn = psycopg2.connect(dbname='climateTSDB', host=DB_HOST_NAME,\n user=USERNAME, password=PASS)\n cur = conn.cursor()\n f = open('/tmp/pressure_data.csv', 'r')\n cur.copy_from(f, 'pressurets', sep=',', columns=['pressure'\n ,'utc_time', 'latitude', 'longitude'])\n conn.commit()\n try:\n os.remove('/tmp/pressure_data.csv')\n except OSError:\n pass\n cur.close()\n\n write_humidity_data_to_csv(data)\n conn = psycopg2.connect(dbname='climateTSDB', host=DB_HOST_NAME,\n user=USERNAME, password=PASS)\n cur = conn.cursor()\n f = open('/tmp/humidity_data.csv', 'r')\n cur.copy_from(f, 'humidityts', sep=',', columns=['humidity'\n ,'utc_time', 'latitude', 'longitude'])\n conn.commit()\n try:\n os.remove('/tmp/humidity_data.csv')\n except OSError:\n pass\n cur.close()\n\n write_condition_data_to_csv(data)\n conn = psycopg2.connect(dbname='climateTSDB', host=DB_HOST_NAME,\n user=USERNAME, password=PASS)\n cur = conn.cursor()\n f = open('/tmp/condition_data.csv', 'r')\n cur.copy_from(f, 'conditionts', sep=',', columns=['condition'\n ,'utc_time', 'latitude', 'longitude'])\n conn.commit()\n try:\n os.remove('/tmp/condition_data.csv')\n except OSError:\n pass\n cur.close()", "def main():\n spark = create_spark_session()\n input_data = \"s3a://udacity-dend\"\n output_data = \"s3a://vivek1bucket\"\n \n process_song_data(spark, input_data, output_data) \n process_log_data(spark, input_data, output_data)", "def process_log_file(cur, filepath):\r\n df=pd.read_json(filepath,lines=True)\r\n df2=df\r\n df=df[df['page']=='NextSong']\r\n ser=pd.to_datetime(df['ts'],unit='ms')\r\n times=[]\r\n for i in ser:\r\n times.append([i,i.hour,i.day,i.week,i.month,i.year,i.day_name()])\r\n for i in times:\r\n cur.execute(time_table_insert,i)\r\n df=df[['userId','firstName','lastName','gender','level']]\r\n for i,row in df.iterrows():\r\n cur.execute(users_table_insert,list(row))\r\n for i, row in df2.iterrows():\r\n cur.execute(song_select, (row.song, row.artist, row.length))\r\n res = cur.fetchone()\r\n if res:\r\n song_id, artist_id = res\r\n else:\r\n song_id, artist_id = None, None\r\n\r\n songplay_data = (\r\n i, pd.to_datetime(row.ts, unit='ms'),int(row.userId), row.level, song_id, artist_id, row.sessionId,\r\n row.location, row.userAgent)\r\n cur.execute(songplays_table_insert, songplay_data)", "def main(input_filepath, output_filepath):\n productsDict = dataToDict(input_filepath)\n productsList = dictToCSV(productsDict)\n toCSV(productsList, output_filepath)\n\n logger = logging.getLogger(__name__)\n logger.info('making final data set from raw data')", "def process_log_file(cur, filepath):\n\n df = pd.read_json(filepath, lines=True)\n\n df = df[df[\"page\"] == \"NextSong\"]\n\n t = df['ts'] = pd.to_datetime(df['ts'], unit='ms')\n\n accessor = t.dt\n time_data = (t, accessor.hour, accessor.day, accessor.week,\n accessor.month, accessor.year, accessor.weekday)\n\n time_df = pd.DataFrame.from_dict({\n \"timestamp\": t,\n \"hour\": accessor.hour,\n \"day\": accessor.day,\n \"week\": accessor.week,\n \"month\": accessor.month,\n \"year\": accessor.year,\n \"weekday\": accessor.weekday\n })\n\n for i, row in time_df.iterrows():\n cur.execute(time_table_insert, list(row))\n\n user_df = df[['userId', 'firstName', 'lastName', 'gender', 'level']]\n\n for i, row in user_df.iterrows():\n cur.execute(user_table_insert, row)\n\n for index, row in df.iterrows():\n\n cur.execute(song_select, (row.song, row.artist, row.length))\n results = cur.fetchone()\n\n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n start_time = row[\"ts\"]\n user_id = row[\"userId\"]\n level = row[\"level\"]\n song_id = songid\n artist_id = artistid\n session_id = row['sessionId']\n location = row['location']\n user_agent = row['userAgent']\n\n songplay_data = (start_time, user_id, level, song_id, artist_id, session_id,\n location, user_agent)\n cur.execute(songplay_table_insert, songplay_data)", "def handle_event(event, context):\n\n try:\n print(\"Received event: \" + json.dumps(event, indent=2))\n\n # grab resources section of event, get task execution ids\n task_execution_arns = event['resources']\n\n # now fetch the input filter info from each task_detail, fire off jobs\n new_files_to_process = []\n for task_execution_arn in task_execution_arns:\n # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/datasync.html#DataSync.Client.describe_task_execution\n response = datasync_client.describe_task_execution(TaskExecutionArn=task_execution_arn)\n print(\"Task execution details: \" + str(response))\n # this will be the location of the data in configured s3 bucket:\n # 'Includes': [\n # {\n # 'FilterType': 'SIMPLE_PATTERN',\n # 'Value': 'string'\n # },\n # ]\n if len(response['Includes']) > 0:\n file = response['Includes'][0]['Value']\n # files typically start with leading '/', strip that leading '/'\n print(\"Got filename:\" + file)\n if file.startswith('/', 0):\n new_files_to_process.append(file.lstrip('/'))\n else:\n new_files_to_process.append(file)\n else:\n print(\"Response didn't contain Includes files...\")\n\n if len(new_files_to_process) == 0:\n print('No files were parsed from input...exiting')\n return\n\n for new_file_to_process in new_files_to_process:\n state_machine_arn = os.environ['STATE_MACHINE_ARN']\n payload = {\"ObjectName\": new_file_to_process}\n json_payload = json.dumps(payload)\n print('Starting bcl2fastq with payload %s' % json_payload)\n #\n response = step_client.start_execution(stateMachineArn=state_machine_arn, input=json_payload)\n print(response)\n\n except Exception as e:\n print(e)\n print('Error handling event. %s' % e)\n raise e", "def lambda_handler(event, context): # pylint: disable=too-many-locals,too-many-branches,too-many-statements\r\n try: # pylint: disable=too-many-nested-blocks\r\n print(\"Execution started!\")\r\n #print(\"Event: \",event)\r\n # Bucket name and Full path for file - where file will be uploded\r\n source_bucket_name = event[\"detail\"][\"requestParameters\"][\"bucketName\"]\r\n source_key = urllib.parse.unquote_plus(\r\n event[\"detail\"][\"requestParameters\"][\"key\"], encoding='utf-8')\r\n \r\n print(\"file_path: \",source_key)\r\n #Loading master config\r\n print(\"Loading master_config\")\r\n audit_config = {}\r\n config_path = \"./config/\" + \\\r\n os.environ['CCM_ENV'] + \"/master_config.json\"\r\n config_content = open(config_path).read()\r\n config_json = json.loads(config_content)\r\n audit_config = config_json[\"audit_config\"]\r\n snow_params = config_json[\"ERROR_NOTIFICATION_SNOW_PARAMS\"]\r\n athena_query_param = config_json[\"ATHENA_QUERY_PARAMS\"]\r\n athena_table_params = config_json[\"ATHENA_TABLE_PARAMS\"]\r\n\r\n # Audit Parameters Based on the Invoking lambda and its operation involved\r\n audit_config[\"component_type_code\"] = \"ETL\"\r\n audit_config[\"component_name\"] = \"PCP Appflow\"\r\n audit_config[\"source_name\"] = \"Patient Connections Platform\"\r\n audit_config[\"target_name\"] = \"Consumer Consent Management\"\r\n audit_config[\"full_file_path\"] = \"s3://\" + \\\r\n source_bucket_name + \"/\" + source_key\r\n audit_config[\"file_version_id\"] = \"\"\r\n\r\n # Creates Job Entry in ABC Framework\r\n print(\"audit config::\", audit_config)\r\n process_execution_id = audit_helper.\\\r\n invoke_edb_abc_log_process_status_event_job_entry(audit_config)\r\n audit_config[\"process_execution_id\"] = process_execution_id\r\n print(\"process_execution_id ::\", process_execution_id)\r\n #print(\"source_key: \",source_key)\r\n s3_write = boto3.client('s3')\r\n record_dict = {}\r\n file_name = \"\"\r\n final_json = \"\"\r\n # prefix = \"\"\r\n # file_list = []\r\n # client = boto3.client(\"s3\")\r\n # result = client.list_objects(Bucket=source_bucket_name, Prefix=source_key, Delimiter='/')\r\n # #print(result)\r\n # for obj in result.get('CommonPrefixes'):\r\n # prefix = obj.get('Prefix')\r\n # #print(prefix)\r\n # file_list = list_files(client,source_bucket_name,prefix)\r\n # for file in file_list:\r\n # #print(file)\r\n json_read = read_s3_file(source_bucket_name, source_key)\r\n data = json.loads(json_read)\r\n #print(data)\r\n if data != '':\r\n record_dict = {k.lower(): v for k, v in data.items()}\r\n print(\"Record_Dict::\",record_dict)\r\n event_type_param = {}\r\n event_type_list = athena_table_params.keys()\r\n print(\"event_type_list\",event_type_list)\r\n for key in event_type_list:\r\n print(\"key\",key)\r\n if key in source_key:\r\n print(\"key\",key)\r\n event_type_param = athena_table_params[key]\r\n print(event_type_param)\r\n if \"changeeventheader\" in record_dict:\r\n if record_dict[\"changeeventheader\"][\"changeType\"] == \"CREATE\":\r\n #and record_dict[\"dtpc_affiliate__c\"] == 'US':\r\n recordid_create = record_dict[\"changeeventheader\"][\"recordIds\"][0]\r\n print(recordid_create)\r\n if recordid_create != '':\r\n last_modified_date = record_dict[\"lastmodifieddate\"].replace(\":\",\".\")\r\n create_json = json.dumps(record_dict)\r\n final_json = create_json\r\n file_name = recordid_create + \"-create-\" + str(last_modified_date)\r\n print(\"file_name: \",file_name)\r\n outbound_path = event_type_param[\"folder_path\"]\r\n final_source_key = outbound_path + '/' + file_name+\".json\"\r\n print(\"final_source_key :\", final_source_key)\r\n s3_write.put_object(\r\n Body=final_json, Bucket=source_bucket_name, Key=final_source_key)\r\n else:\r\n raise Exception(\"RecordId is missing: \", record_dict)\r\n elif record_dict[\"changeeventheader\"][\"changeType\"] == \"UPDATE\":\r\n record_ids_list = record_dict[\"changeeventheader\"][\"recordIds\"]\r\n if len(record_ids_list) != 0:\r\n for ele in record_ids_list:\r\n print(ele)\r\n element = \"'\" + ele + \"'\"\r\n payload_condition = event_type_param[\"recordid_condition\"]\r\n query = 'SELECT * FROM '+event_type_param[\"athena_create_table\"]+\\\r\n ' WHERE lastmodifieddate IN(SELECT max(lastmodifieddate) from '\\\r\n +event_type_param[\"athena_create_table\"]+\\\r\n ', UNNEST(\"'+payload_condition[0]+'\".\"'+payload_condition[1]+\\\r\n '\") AS ln(jsondata) WHERE jsondata IN ('+element+'));'\r\n print(query)\r\n athena_query_param['athena_query'] = query\r\n query_result_record_id = athena_helper.perform_athena_search\\\r\n (athena_query_param)\r\n print(\"Athena Query Result for Create Path:::\", query_result_record_id)\r\n update_json = create_complete_payload(data,query_result_record_id)\r\n print(\"update_json: \",update_json)\r\n if len(update_json) != 0:\r\n last_modified_date = record_dict[\"lastmodifieddate\"].replace\\\r\n (\":\",\".\")\r\n final_json = json.dumps(update_json)\r\n file_name = ele + \"-update-\" + str(last_modified_date)\r\n print(\"file_name: \",file_name)\r\n outbound_path = event_type_param[\"folder_path\"]\r\n final_source_key = outbound_path + '/' + file_name+\".json\"\r\n print(\"final_source_key :\", final_source_key)\r\n s3_write.put_object(\r\n Body=final_json, Bucket=source_bucket_name, \\\r\n Key=final_source_key)\r\n else:\r\n print(ele,\" does not have a create payload\")\r\n else:\r\n raise Exception(\"RecordId is missing: \", record_dict)\r\n else:\r\n raise Exception(\"ChangeEventHeader is missing: \", record_dict)\r\n else:\r\n raise Exception(\"Invalid Payload: \", record_dict)\r\n\r\n except (Exception) as err: # pylint: disable=line-too-long,broad-except\r\n print(\"Error occured: {0}\".format(str(err)))\r\n audit_type = \"error\"\r\n error_msg = sys.exc_info()\r\n exc_type = error_msg\r\n exc_obj = error_msg\r\n snow_params[\"flag\"] = \"FAIL\"\r\n snow_params[\"error_message\"] = str(exc_obj)\r\n snow_params[\"error_type\"] = str(exc_type)\r\n audit_config[\"exception_message\"] = str(exc_obj)\r\n if audit_config != {}:\r\n logging.exception(sys.exc_info())\r\n audit_helper.invoke_edb_abc_log_process_status_event(\r\n audit_type, audit_config) # pylint: disable=line-too-long\r\n audit_helper.raise_snow_incident(snow_params)", "def process_data(data):\n # Table Name from Json file\n table_name = data['table_name']\n\n # No of Column\n column_count = data['column_count']\n\n # No of Row\n row_count = data['row_count']\n\n # Table columns schema from Json file\n column_properties = data['column_properties']\n\n # Get the row row_properties\n row_properties = data['row_properties']\n return table_name, column_count, row_count, column_properties, row_properties", "def copy_blobs_in_gcp_storage(source_bucket_name):\n\n # extract file and convert in dataframe\n extracted_df = extract()\n\n # transform the df\n transformed_df = transform (extracted_df)\n\n # the function loads clean csv content as csv file in clean-zone-bucket\n load(transformed_df)\n\n\n return \"Function executed sucessfully!\"", "def main():\n spark = create_spark_session()\n input_data = \"s3a://udacity-dend/\"\n output_data = \"s3a://udacity-data-lake/output/\"\n\n process_song_data(spark, input_data, output_data)\n process_log_data(spark, input_data, output_data)", "def main():\n spark = create_spark_session()\n input_data = \"s3a://udacity-dend/\"\n output_data = \"data/analytics\"\n \n process_song_data(spark, input_data, output_data) \n process_log_data(spark, input_data, output_data)", "def process_log_data(log_data,\r\n seq_counts,\r\n mapping_file,\r\n fasta_files,\r\n qual_files,\r\n corrected_bc_count,\r\n keep_barcode=False,\r\n barcode_type=\"golay_12\",\r\n max_bc_errors=1.5,\r\n start_index=1,\r\n write_unassigned_reads=False,\r\n disable_bc_correction=False,\r\n added_demultiplex_field=None,\r\n save_barcode_frequencies=False):\r\n\r\n final_log_data = [\"demultiplex_fasta.py log data\\n\"]\r\n final_log_data.append(\"Metadata mapping file:\\t%s\" % mapping_file)\r\n final_log_data.append(\"Input FASTA file(s):\\t%s\" %\r\n \",\".join([curr_file.name for curr_file in fasta_files]))\r\n if qual_files:\r\n final_log_data.append(\"Input QUAL file(s):\\t%s\" %\r\n \",\".join([curr_file.name for curr_file in qual_files]))\r\n final_log_data.append(\"Total sequences in input files:\\t%d\" % seq_counts)\r\n final_log_data.append(\"Retain barcode:\\t%s\" % keep_barcode)\r\n final_log_data.append(\"Barcode type:\\t%s\" % barcode_type)\r\n final_log_data.append(\"Max barcode error/mismatches allowed:\\t%s\" %\r\n max_bc_errors)\r\n final_log_data.append(\"Starting sequence identifier:\\t%d\" % start_index)\r\n final_log_data.append(\r\n \"Write unassigned reads:\\t%s\" %\r\n write_unassigned_reads)\r\n final_log_data.append(\"Disable barcode correction:\\t%s\" %\r\n disable_bc_correction)\r\n final_log_data.append(\"Added demultiplex field:\\t%s\" %\r\n added_demultiplex_field)\r\n final_log_data.append(\"Save barcode frequencies:\\t%s\\n\" %\r\n save_barcode_frequencies)\r\n\r\n final_log_data.append(\"Barcodes corrected/not corrected:\\t%d/%d\" %\r\n (corrected_bc_count[0], corrected_bc_count[1]))\r\n\r\n # Make a list of SampleID/Counts/Barcodes/Added demultiplex for sorting\r\n id_counts_bcs = []\r\n counts_col = []\r\n for curr_key in log_data.keys():\r\n id_counts_bcs.append((curr_key.split(',')[-1], log_data[curr_key],\r\n ','.join(curr_key.split(',')[0:-1])))\r\n counts_col.append(int(log_data[curr_key]))\r\n\r\n counts_col = array(counts_col)\r\n\r\n final_log_data.append(\"Number of samples in mapping file:\\t%d\" %\r\n len(counts_col))\r\n final_log_data.append(\"Sample count min/max/mean:\\t%d / %d / %3.2f\" %\r\n (counts_col.min(), counts_col.max(), counts_col.mean()))\r\n\r\n id_counts_bcs = sorted(id_counts_bcs, key=itemgetter(1), reverse=True)\r\n\r\n final_log_data.append(\"Sample\\tSequence Count\\tBarcode/Added Demultiplex\")\r\n for curr_id in id_counts_bcs:\r\n final_log_data.append(\"%s\\t%s\\t%s\" %\r\n (curr_id[0], curr_id[1], curr_id[2]))\r\n\r\n final_log_data.append(\"Seqs written\\t%d\" % counts_col.sum())\r\n final_log_data.append(\"Percent of input seqs written\\t%3.2f\" %\r\n (counts_col.sum() / seq_counts))\r\n\r\n return final_log_data", "def execute(self, context): \n aws_hook = AwsHook(self.aws_credentials)\n credentials = aws_hook.get_credentials()\n redshift = PostgresHook(self.redshift_conn_id)\n execution_date = context['execution_date']\n \n self.log.info(f\"Truncating {self.table}\")\n redshift.run(f\"TRUNCATE TABLE {self.table}\")\n \n \n self.log.info(f\"Inserting data into {self.table}\")\n s3_path = f\"s3://{self.s3_bucket}/{self.s3_key}\"\n\n if self.s3_key == \"log_data\":\n year = execution_date.year\n month = execution_date.month\n \n s3_path = '/'.join([s3_path, str(year), str(month)])\n \n formatted_sql = StageToRedshiftOperator.copy_sql.format(\n self.table,\n s3_path,\n credentials.access_key,\n credentials.secret_key,\n self.file_format,\n self.format_path\n )\n \n redshift.run(formatted_sql)", "def runDataExtraction():\r\n config = CONFIG['steps']['DataExtraction']\r\n ci = config['inputs']\r\n co = config['outputs']\r\n columns = ci['columns']\r\n nrows = ci['nrows']\r\n input_bucket = ci['bucket']\r\n no_of_files = ci['no_of_files']\r\n\r\n output_bucket = co['bucket']\r\n csv_name_prefix = co['csv_name_prefix']\r\n\r\n minio_config = CONFIG['artifacts']['minio']\r\n minioClient = create_minio_client(minio_config[\"endpoint_url\"],\r\n access_key=minio_config[\"access_key\"],\r\n secret_key=minio_config[\"secret_key\"],\r\n secure=minio_config['secure'])\r\n\r\n boto_client = boto3.client(\"s3\",\r\n endpoint_url=minio_config[\"endpoint_url\"],\r\n aws_access_key_id=minio_config[\"access_key\"],\r\n aws_secret_access_key=minio_config[\"secret_key\"],\r\n region_name=minio_config[\"region_name\"])\r\n\r\n zip_files = get_files(input_bucket, boto_client, file_type='zip')\r\n\r\n no_of_files_to_process = no_of_files if no_of_files is not None else len(\r\n zip_files)\r\n for zip_file in tqdm(zip_files[:no_of_files_to_process], total=no_of_files_to_process):\r\n process_file(zip_file, input_bucket, output_bucket, minioClient, columns,\r\n nrows=nrows, output_csv_name_prefix=csv_name_prefix)", "def BqTableDataFileProcessor(file_arg):\n data_insert_request_type = GetApiMessage('TableDataInsertAllRequest')\n insert_row_type = data_insert_request_type.RowsValueListEntry\n data_row_type = GetApiMessage('JsonObject')\n\n try:\n data_json = yaml.load(file_arg)\n\n if not data_json or not isinstance(data_json, list):\n raise TableDataFileError(\n 'Error parsing data file: no data records defined in file')\n\n rows = []\n for row in data_json:\n rows.append(insert_row_type(json=encoding.DictToMessage(\n row, data_row_type)))\n\n return rows\n except yaml.YAMLParseError as ype:\n raise TableDataFileError('Error parsing data file [{}]'.format(ype))", "def process_log_file(cur, filepath):\n # open log file\n df = get_file_df(filepath)\n\n # filter by NextSong action\n df = df[df['page'] == 'NextSong']\n\n # convert timestamp column to datetime\n df['ts'] = df['ts'].apply(lambda x: datetime.datetime.fromtimestamp(x/1000)) \n t = df\n \n time_data = []\n for td in t['ts']:\n wd = True if td.weekday() <=6 else False\n time_data.append([str(td.time()), td.hour, td.day, td.week, td.month, td.year, wd])\n column_labels = ('start_time', 'hour', 'day', 'week', 'month', 'year', 'weekday')\n\n # insert time data records\n time_df = pd.DataFrame(time_data, columns=column_labels)\n\n for i, row in time_df.iterrows():\n cur.execute(time_table_insert, list(row))\n\n # load user table\n user_df = df[['userId', 'firstName', 'lastName', 'gender', 'level']].copy()\n\n # insert user records\n for i, row in user_df.iterrows():\n cur.execute(user_table_insert, row)\n\n # insert songplay records\n for index, row in df.iterrows():\n \n # get songid and artistid from song and artist tables\n cur.execute(song_select, (row.song, row.artist, row.length))\n results = cur.fetchone()\n \n if results:\n songid, artistid = results\n else:\n songid, artistid = None, None\n\n # insert songplay record\n #user_id, level, song_id, artist_id, session_id, location, user_agent\n songplay_data = [row.userId, row.level, songid, artistid, row.sessionId, row.location, row.userAgent]\n cur.execute(songplay_table_insert, songplay_data)", "def process_log_event(event, context):\n\n with open('config.json') as fh:\n config = json.load(fh)\n serialized = event['awslogs'].pop('data')\n data = json.loads(zlib.decompress(\n base64.b64decode(serialized), 16+zlib.MAX_WBITS))\n message = [\n \"An error was detected\",\n \"\",\n \"Log Group: %s\" % data['logGroup'],\n \"Log Stream: %s\" % data['logStream'],\n \"Log Owner: %s\" % data['owner'],\n \"\",\n \"Log Contents\",\n \"\"]\n\n for evt in data['logEvents']:\n message.append(message_event(evt))\n message.append(\"\")\n\n params = dict(\n TopicArn=config['topic'],\n Subject=config['subject'],\n Message='\\n'.join(message))\n sns.publish(**params)", "def convert_to_json(basepath, sendto):\n\n logger = logging.getLogger('WikiLog')\n\n k = bucket.new_key(basepath)\n\n filenames = []\n year = month = day = hrs = ''\n\n for key in bucket.list():\n thisfile = key.name.encode('utf-8')\n if 'projectviews' not in thisfile and 'sql' not in thisfile and '.gz' in thisfile and thisfile.startswith(basepath):\n # S3 key name is of the format kt-wiki/pageviews/2016/2016-06/pageviews-20160601-000000.gz\n # Split by / to get last element\n filenames.append(thisfile)\n logger.info(\"Processing file: {}\".format(thisfile))\n fname = thisfile.split('/')\n\n # Get content from filename and save to local\n # Split again to Grab year, month, day, hour value from filename\n key.get_contents_to_filename('/home/ubuntu/WikiView/data/' + fname[-1])\n fname1 = fname[-1]\n data_time = fname1[:-3].split('-')\n year, month, day, hrs = data_time[1][:4], data_time[1][4:6], data_time[1][-2:], data_time[-1]\n\n docname = 'pageviews-' + year + '-' + month + '-' + day + '-' + hrs + '.json'\n dictlist = []\n\n # save file from s3 to local, read, write to json, push json to s3\n with open(docname, 'w') as fp:\n #\n with gzip.open('/home/ubuntu/WikiView/data/'+fname[-1],'r') as fin:\n for line in fin:\n line = line.split(' ')\n doc = {}\n doc['ymdh'] = year + '-' + month + '-' + day + '-' + hrs\n try:\n # format: project, title, views, bytes ~ en Main_Page 242332 4737756101\n prj, title, vcount = line[0], line[1], line[2]\n doc['prj'] = prj\n doc['title'] = title\n doc['vcount'] = vcount\n json.dump(doc,fp)\n fp.write('\\n')\n except:\n logger.error('Error reading gzip file {} at line: {}'.format(thisfile, line))\n pass\n# sys.exc_clear()\n\n # Now, save the json file to \n key_name = 'pageviews-' + year + '-' + month + '-' + day + '-' + hrs + '.json'\n full_key_name = os.path.join(sendto, key_name)\n k = bucket.new_key(full_key_name)\n\n logger.info(\"Sending json file to S3: {}\".format(docname))\n k.set_contents_from_filename(key_name)\n\n # Remove temp file\n logger.info(\"Removing temp file: {} {}\".format('/home/ubuntu/WikiView/data/', fname[-1]))\n os.remove('/home/ubuntu/WikiView/data/'+fname[-1])\n logger.info(\"Removing temp file: {}\".format(key_name))\n os.remove(key_name)\n logger.info('Finished!!!')", "def main(input_filepath, output_filepath):\n logger = logging.getLogger(__name__)\n logger.info('making final data set from raw data')\n\n conn = sqlite3.connect('../raw/td_V2.db')\n git_commits = pd.read_sql_query(\"SELECT * FROM GIT_COMMITS\",conn)\n szz_fault_inducing_commits = pd.read_sql_query(\"SELECT * FROM szz_fault_inducing_commits\",conn)\n refactoring_miner = pd.read_sql_query(\"SELECT * FROM refactoring_miner\",conn)\n refactoring_miner = refactoring_miner[refactoring_miner[\"COMMIT_HASH\"].isin(git_commits[\"COMMIT_HASH\"])]\n git_commits_changes = pd.read_sql_query(\"SELECT * FROM GIT_COMMITS_CHANGES\", conn)\n git_commits_changes = git_commits_changes[git_commits_changes[\"COMMIT_HASH\"].isin(refactoring_miner[\"COMMIT_HASH\"])]\n\n preprocess(git_commits, szz_fault_inducing_commits, refactoring_miner, git_commits_changes)", "def get_raw_data(report, bucket, replay_path, query):\n\n logger = logging.getLogger(\"SimpleReplayLogger\")\n s3_client = boto3.client('s3')\n try:\n response = s3_client.get_object(Bucket=bucket.get('bucket_name'), Key=f\"{replay_path}/raw_data/{query}000\")\n except Exception as e:\n logger.error(f\"Unable to get raw data from S3. Results for {query} not found. {e}\")\n df = pd.read_csv(response.get(\"Body\")).fillna(0)\n logger.debug(f\"Parsing results from '{query}' query.\")\n if query == 'latency_distribution':\n report.feature_graph = df\n else:\n for t, vals in report.tables.items():\n if vals.get('sql') == query:\n vals['data'] = read_data(t, df, vals.get('columns'), report)", "def load_dict_to_delta_table(spark, s3_data_bucket, table_schema, table_name, data, overwrite=False):\n table_to_col_names_dict = {}\n table_to_col_names_dict[\"transaction_fabs\"] = TRANSACTION_FABS_COLUMNS\n table_to_col_names_dict[\"transaction_fpds\"] = TRANSACTION_FPDS_COLUMNS\n table_to_col_names_dict[\"transaction_normalized\"] = list(TRANSACTION_NORMALIZED_COLUMNS)\n table_to_col_names_dict[\"awards\"] = list(AWARDS_COLUMNS)\n table_to_col_names_dict[\"financial_accounts_by_awards\"] = list(FINANCIAL_ACCOUNTS_BY_AWARDS_COLUMNS)\n\n table_to_col_info_dict = {}\n for tbl_name, col_info in zip(\n (\"transaction_fabs\", \"transaction_fpds\"), (TRANSACTION_FABS_COLUMN_INFO, TRANSACTION_FPDS_COLUMN_INFO)\n ):\n table_to_col_info_dict[tbl_name] = {}\n for col in col_info:\n table_to_col_info_dict[tbl_name][col.dest_name] = col\n\n # Make sure the table has been created first\n call_command(\n \"create_delta_table\",\n \"--destination-table\",\n table_name,\n \"--alt-db\",\n table_schema,\n \"--spark-s3-bucket\",\n s3_data_bucket,\n )\n\n if data:\n insert_sql = f\"INSERT {'OVERWRITE' if overwrite else 'INTO'} {table_schema}.{table_name} VALUES\\n\"\n row_strs = []\n for row in data:\n value_strs = []\n for col_name in table_to_col_names_dict[table_name]:\n value = row.get(col_name)\n if isinstance(value, (str, bytes)):\n # Quote strings for insertion into DB\n value_strs.append(f\"'{value}'\")\n elif isinstance(value, (date, datetime)):\n # Convert to string and quote\n value_strs.append(f\"\"\"'{value.isoformat()}'\"\"\")\n elif isinstance(value, bool):\n value_strs.append(str(value).upper())\n elif isinstance(value, (Sequence, Set)):\n # Assume \"sequences\" must be \"sequences\" of strings, so quote each item in the \"sequence\"\n value = [f\"'{item}'\" for item in value]\n value_strs.append(f\"ARRAY({', '.join(value)})\")\n elif value is None:\n col_info = table_to_col_info_dict.get(table_name)\n if (\n col_info\n and col_info[col_name].delta_type.upper() == \"BOOLEAN\"\n and not col_info[col_name].handling == \"leave_null\"\n ):\n # Convert None/NULL to false for boolean columns unless specified to leave the null\n value_strs.append(\"FALSE\")\n else:\n value_strs.append(\"NULL\")\n else:\n value_strs.append(str(value))\n\n row_strs.append(f\" ({', '.join(value_strs)})\")\n\n sql = \"\".join([insert_sql, \",\\n\".join(row_strs), \";\"])\n spark.sql(sql)", "def format_data(df):\n\n if len(df) == 0:\n print('No logs found, please check your data')\n return None\n\n # Separate the fields in request and response\n print('Extracting request and response ...')\n df1 = pd.concat([df.drop(['request', 'response'], axis=1).reset_index(drop=True),\n df['request'].apply(pd.Series).add_prefix('request_').reset_index(drop=True),\n pd.DataFrame(df['response']\n .tolist()).add_prefix('response_')], axis=1) # type: pd.DataFrame\n df1['request_input'] = pd.json_normalize(df['request'])['input.text']\n\n # Add context and output fields\n print('Extracting context and output ...')\n df2 = pd.concat([df1.drop(['response_context', 'response_output'], axis=1),\n df1['response_context'].apply(pd.Series).add_prefix('response_context_'),\n pd.DataFrame(df1['response_output'].tolist()).add_prefix('response_')],\n axis=1) # type: pd.DataFrame\n # Add context_system fields\n df3 = pd.concat([df2.drop(['response_context_system'], axis=1),\n df2['response_context_system'].apply(pd.Series).add_prefix('response_')],\n axis=1) # type: pd.DataFrame\n\n if 'response_context_response_context_IntentStarted' in df3.columns \\\n and 'response_context_response_context_IntentCompleted' in df3.columns:\n cols = ['log_id', 'response_timestamp', 'response_context_conversation_id', 'request_input', 'response_text',\n 'response_intents', 'response_entities', 'response_nodes_visited', 'response_dialog_request_counter',\n 'response_dialog_stack', 'response_dialog_turn_counter',\n 'response_context_response_context_IntentStarted', 'response_context_response_context_IntentCompleted']\n else:\n cols = ['log_id', 'response_timestamp', 'response_context_conversation_id', 'request_input', 'response_text',\n 'response_intents', 'response_entities', 'response_nodes_visited', 'response_dialog_request_counter',\n 'response_dialog_stack', 'response_dialog_turn_counter']\n\n print('Extracting intents ...')\n # Select a few required columns\n df4 = df3[cols].copy(deep=True) # type: pd.DataFrame\n # Limit fetched intents to a maximum value of 3\n df4.loc[:, 'response_intents'] = df4['response_intents'].apply(lambda x: x[:3])\n # Separate intents into different fields\n df5 = pd.concat([df4.drop(['response_intents'], axis=1),\n pd.DataFrame(df4['response_intents'].values.tolist()).add_prefix(\n 'response_intent_')], axis=1) # type: pd.DataFrame\n # Check if at least 3 intents are identified\n if 'response_intent_2' in df5.columns:\n # Put the 3 intents and confidences into separate fields\n df6 = pd.concat([df5.drop(['response_intent_0', 'response_intent_1',\n 'response_intent_2'], axis=1),\n df5['response_intent_0'].apply(pd.Series).add_prefix('response.top_intent_'),\n df5['response_intent_1'].apply(pd.Series).add_prefix('Intent 2 '),\n df5['response_intent_2'].apply(pd.Series).add_prefix('Intent 3 ')],\n axis=1) # type: pd.DataFrame\n # Convert confidence to numeric type\n cols = ['response.top_intent_confidence', 'Intent 2 confidence', 'Intent 3 confidence']\n df6[cols] = df6[cols].apply(pd.to_numeric, errors='coerce', axis=1)\n # Add confidence gap column\n df6['Confidence gap (between 1 and 2)'] = df6['response.top_intent_confidence'] - df6['Intent 2 confidence']\n elif 'response_intent_1' in df5.columns:\n # Put the 3 intents and confidences into separate fields\n df6 = pd.concat([df5.drop(['response_intent_0', 'response_intent_1'], axis=1),\n df5['response_intent_0'].apply(pd.Series).add_prefix('response.top_intent_'),\n df5['response_intent_1'].apply(pd.Series).add_prefix('Intent 2 ')],\n axis=1) # type: pd.DataFrame\n # Convert confidence to numeric type\n cols = ['response.top_intent_confidence', 'Intent 2 confidence']\n df6[cols] = df6[cols].apply(pd.to_numeric, errors='coerce', axis=1)\n df6['Intent 3 intent'] = ''\n df6['Intent 3 confidence'] = ''\n # Add confidence gap column\n df6['Confidence gap (between 1 and 2)'] = df6['response.top_intent_confidence'] - df6['Intent 2 confidence']\n else:\n # Create the top intent and its confidence column\n df6 = pd.concat([df5.drop(['response_intent_0'], axis=1),\n df5['response_intent_0'].apply(pd.Series).add_prefix('response.top_intent_')],\n axis=1) # type: pd.DataFrame\n # df6['Confidence gap (between 1 and 2)'] = ''\n # df6['Intent 2 intent'] =''\n # df6['Intent 2 confidence'] = ''\n # df6['Intent 3 intent'] =''\n # df6['Intent 3 confidence'] = ''\n new_cols_list = ['Confidence gap (between 1 and 2)', 'Intent 2 intent', 'Intent 2 confidence',\n 'Intent 3 intent', 'Intent 3 confidence']\n\n df6.reindex(columns=[*df6.columns.tolist(), *new_cols_list], fill_value='')\n\n # Rename columns\n if 'response_context_response_context_IntentStarted' in df6.columns \\\n and 'response_context_response_context_IntentCompleted' in df6.columns:\n df6.rename(columns={'response_nodes_visited': 'response.output.nodes_visited_s',\n 'response_context_conversation_id': 'response.context.conversation_id',\n 'response_timestamp': 'response.timestamp',\n 'response_context_response_context_IntentStarted': 'response_context_IntentStarted',\n 'response_context_response_context_IntentCompleted': 'response_context_IntentCompleted'},\n inplace=True)\n else:\n df6.rename(columns={'response_nodes_visited': 'response.output.nodes_visited_s',\n 'response_context_conversation_id': 'response.context.conversation_id',\n 'response_timestamp': 'response.timestamp'},\n inplace=True)\n # Change format of numeric and date columns\n df6['response.top_intent_confidence'] = pd.to_numeric(df6['response.top_intent_confidence'])\n df6['response.timestamp'] = pd.to_datetime(df6['response.timestamp'])\n df6['Date'] = [datetime.datetime.date(d) for d in df6['response.timestamp']] # extracting date from timestamp\n df6['Customer ID (must retain for delete)'] = '' # Adding a column to retain customer id\n\n print('Completed!')\n return df6", "def stream_to_s3(task_name,data,header,timestamp_suffix,**kwargs):\n if kwargs:\n file = '_'.join([task_name,timestamp_suffix,str(kwargs['batch_id'])])+'.csv'\n else:\n file = '_'.join([task_name,timestamp_suffix])+'.csv'\n\n f = io.StringIO()\n with smart_open.open('s3://{bucket_name}/{task_name}/{file}'.format(bucket_name=bucket_name,task_name=task_name,file=file), 'w') as fout:\n logger.info('Streaming file contents to S3')\n _writer = csv.writer(fout)\n _writer.writerow(header)\n fout.write(f.getvalue())\n \n for row in data:\n f.seek(0)\n f.truncate(0)\n _writer.writerow(row)\n fout.write(f.getvalue())\n \n f.close()\n logger.info('Complete')\n\n return file", "def process_data(self, data):\n try:\n payload = self.extract(data)\n except (ValueError, InvalidToken) as doh:\n self.log.error('Error: {}, Data: {}'.format(doh, data))\n else:\n fields = {}\n tags = {}\n # writing strings to a field in Influx requires a double-quote\n tags['username'] = '\"{}\"'.format(payload['user'])\n # Dumbass Influx doesn't let you group by fields or aggregate tags...\n # I want to count the unique occurrences of a user over a period of time\n # to show current connected user counts, *and* be able to group by\n # those usernames over time to show specific user usage. Wish I\n # used TimescaleDB instead of InfluxDB\n fields['user'] = '\"{}\"'.format(payload.pop('user'))\n fields['source'] = '\"{}\"'.format(payload.pop('source'))\n fields['target'] = '\"{}\"'.format(payload.pop('target'))\n fields['packets'] = 1 # each event represents a single packet\n timestamp = payload.pop('time')\n self.influx.write(fields=fields, tags=tags, timestamp=timestamp)", "def process_logs(logs):\n all_data = {}\n for log in logs:\n with open(log) as f:\n data = json.load(f)\n scenario = data[0].get(\"scenario\", None)\n if scenario is None:\n # No scenario name, no way to organize the data\n continue\n\n # Use the log's date as the run identifier\n # This assumes the format is SCENARIO-YYYY-MM-DD.json\n # NOTE: This may not match the GitHub Action run dates due to tests taking\n # a very long time.\n day = datetime.strptime(log[1+len(scenario):-5], \"%Y-%m-%d\").strftime(\"%Y%m%d\")\n if day not in all_data:\n all_data[day] = {}\n\n # Group them by scenario, assume each file is from one scenario per day\n all_data[day][scenario] = data\n return all_data", "def dump_job_data(s3, bucket, key, ecosystem, package, version):\n data = s3.read_object(bucket, key)\n timestamp_str = datetime.datetime.utcnow().strftime(\"%Y-%m-%dT%H:%M:%S.%f\")\n filename = \"s3_data_{e}_{p}_{v}_{t}.json\".format(e=ecosystem,\n p=package,\n v=version,\n t=timestamp_str)\n with open(filename, 'w') as fout:\n json.dump(data, fout)", "def main():\n conn = psycopg2.connect(\"host=127.0.0.1 dbname=sparkifydb user=student password=student\")\n cur = conn.cursor()\n\n process_data(cur, conn, filepath='data/song_data', func=process_song_file)\n print('song file processing is complete')\n process_data(cur, conn, filepath='data/log_data', func=process_log_file)\n print('log file processing is complete')\n conn.close()", "def deduce_schema(self, input_data, *, schema_map=None):\n\n if self.input_format == 'csv':\n if self.csv_dialect:\n reader = csv.DictReader(input_data, dialect=self.csv_dialect)\n else:\n reader = csv.DictReader(input_data)\n elif self.input_format == 'json' or self.input_format is None:\n reader = json_reader(input_data)\n elif self.input_format == 'dict':\n reader = input_data\n else:\n raise Exception(f\"Unknown input_format '{self.input_format}'\")\n\n if schema_map is None:\n schema_map = OrderedDict()\n\n try:\n for json_object in reader:\n\n # Print a progress message periodically.\n self.line_number += 1\n if self.line_number % self.debugging_interval == 0:\n logging.info(f'Processing line {self.line_number}')\n\n # Deduce the schema from this given data record.\n if isinstance(json_object, dict):\n self.deduce_schema_for_record(\n json_object=json_object,\n schema_map=schema_map,\n )\n elif isinstance(json_object, Exception):\n self.log_error(\n f'Record could not be parsed: Exception: {json_object}'\n )\n if not self.ignore_invalid_lines:\n raise json_object\n else:\n self.log_error(\n 'Record should be a JSON Object '\n f'but was a {type(json_object)}'\n )\n if not self.ignore_invalid_lines:\n raise Exception(f'Record must be a JSON Object '\n f'but was a {type(json_object)}')\n finally:\n logging.info(f'Processed {self.line_number} lines')\n\n return schema_map, self.error_logs", "def main(input_filepath, output_filepath):\n logger = logging.getLogger(__name__)\n logger.info('making final data set from raw data')\n\n # Get the data/raw/git-refactoring-commits-raw.csv file\n dataset = pd.read_csv(input_filepath, usecols=['Message', 'CommitSHA', 'IsRefactoring'])\n logger.info('Loaded data file ' + input_filepath + ' with ' + str(len(dataset)) + ' rows')\n\n logger.info('Applying pre-processing steps on the \"Message\" column...')\n dataset['Message'] = dataset['Message'].apply(preprocess)\n\n # Save the processed subset on data data/processed/git-refactoring-commits.csv\n logger.info('Saved processed results on ' + output_filepath + ' with ' + str(len(dataset)) + ' rows')\n dataset.to_csv(output_filepath, encoding='utf-8', index=False)", "def create_dataset(input_file_path, output_file_path):\n col_index_map = {'user_id': 0, 'session_id': 1, 'timestamp': 2, 'step': 3, 'action_type': 4, 'reference': 5,\n 'platform': 6, 'city': 7, 'device': 8,\n 'current_filters': 9, 'impressions': 10, 'prices': 11}\n flat_dict = dict()\n with open(input_file_path, 'r') as csvFile:\n reader = csv.reader(csvFile)\n header = next(reader)\n col_names = [col_name for col_name in col_index_map.keys()]\n col_names.pop(0)\n index = 0\n for row in tqdm(reader):\n if len(flat_dict) > 40000:\n index += 1\n with open(output_file_path + \"_\" + str(index) + \".json\", \"w\") as file:\n json.dump(flat_dict, file)\n print(\" JSON : \", index)\n flat_dict = dict()\n col_values = [row[col_index_map[c_n]] for c_n in col_names]\n dict_for_each_row = dict(zip(col_names, col_values))\n to_list = dict_for_each_row['impressions']\n dict_for_each_row['impressions'] = to_list.split('|')\n to_list = dict_for_each_row['prices']\n dict_for_each_row['prices'] = to_list.split('|')\n user_id = row[col_index_map['user_id']]\n if user_id in flat_dict:\n flat_dict[user_id].append(dict_for_each_row)\n else:\n flat_dict[user_id] = [dict_for_each_row]\n\n print(\"Output is Saved\")", "def main():\n spark = create_spark_session()\n input_data = \"s3a://udacity-dend/\"\n output_data = \"s3a://udacity-nanodegree-data-engineer/\"\n \n process_song_data(spark, input_data, output_data) \n process_log_data(spark, input_data, output_data)" ]
[ "0.7315748", "0.73054594", "0.6975627", "0.69641143", "0.6958745", "0.6947524", "0.6941278", "0.69293684", "0.6911164", "0.6907852", "0.686215", "0.6784478", "0.6759412", "0.6692344", "0.66841125", "0.6366269", "0.62756515", "0.6237044", "0.62061685", "0.61751795", "0.59972167", "0.5939527", "0.5866242", "0.57967514", "0.567509", "0.565705", "0.5657001", "0.5622568", "0.56209975", "0.5620446", "0.5595221", "0.5591331", "0.5585552", "0.55787104", "0.55776334", "0.5576928", "0.5568119", "0.55587995", "0.555448", "0.5544945", "0.554229", "0.5541075", "0.5537971", "0.55029804", "0.5502841", "0.54942673", "0.5483128", "0.5476927", "0.5473878", "0.54663646", "0.54637015", "0.54470223", "0.5416641", "0.5385047", "0.5379023", "0.5376929", "0.5370862", "0.5325947", "0.5314946", "0.5303931", "0.5295463", "0.52649665", "0.5258383", "0.52415574", "0.52364767", "0.52061397", "0.51959574", "0.5190549", "0.51582855", "0.5149686", "0.5148432", "0.50758964", "0.50707114", "0.5065309", "0.5039602", "0.5038143", "0.50035596", "0.49930972", "0.4986368", "0.49770397", "0.49699938", "0.49614275", "0.49336246", "0.49171922", "0.49049136", "0.49039418", "0.4896005", "0.48690093", "0.4857497", "0.48566782", "0.485383", "0.48424017", "0.48398623", "0.4828445", "0.48282695", "0.48234314", "0.48178047", "0.4809939", "0.47976464", "0.47946277" ]
0.66948694
13
View function that handles inserting new comments via POST data (form submission)
def post_comment_form(request): try: comment, previous_version = get_comment(request) except InvalidCommentException as e: raise parent_comment = comment.parent tree_root = parent_comment.get_root() parent_object = tree_root.content_object if not user_can_post_comment(request, comment): raise Exception("User can't create comments") if is_past_max_depth(comment): raise Exception("Max depth reached") # If the comment object (NOT the message) hasn't been saved yet... if comment._state.adding == True: comment = add_comment(comment) # Everything has checked out, so we save the new version and return the appropriate response version_form, new_version = create_new_version(request, comment) return comment
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def comments_new():\n comment = {\n \"title\": request.form.get(\"title\"),\n \"content\": request.form.get(\"content\"),\n \"playlist_id\": ObjectId(request.form.get(\"playlist._id\")),\n }\n print(comment)\n comment_id = db.comments.insert_one(comment).inserted_id\n return redirect(\n url_for(\"playlists.playlists_show\", playlist_id=request.form.get(\"playlist._id\"))\n )", "def save_comments():\n potential_deal_id = int(request.form.get(\"id\"))\n action = request.form.get(\"action\")\n if action.lower() == \"none\":\n action = None\n comments = request.form.get(\"comments\")\n db_handler = DBHandler()\n db_handler.update_by_id(potential_deal_id, action, comments)\n # return redirect(url_for(\"home\"))\n return jsonify({\"success\": True}), 200", "def add_comment(request):\n if request.method != 'POST':\n return HttpResponseRedirect(reverse('wainz.views.composite'))\n else:\n img_id = request.POST['id']\n try:\n img = Image.objects.get(pk=img_id)\n except:\n return HttpResponseRedirect(reverse('wainz.views.composite'))\n comment_text = request.POST['comment']\n #TODO sanitize input\n comment = ImageComment()\n comment.submission_date = timezone.now()\n comment.comment_text= comment_text\n comment.image_id = img_id\n comment.submitter_id = int(request.POST['uid'])\n comment.save()\n return rest.rest_success(request, img_id)", "def on_comment(self, request, board_id):\n error = None\n if request.method == 'POST':\n creator = request.form['creator']\n comment = request.form['comment']\n if len(creator) > 30:\n error = 'creator name too long'\n elif len(comment) > 50:\n error = 'comment too long'\n else:\n self.insert_comment(request, board_id)\n return redirect('/board:' + board_id)\n return self.render_template('comment.html', error=error)", "def post(self, request, pk):\n\n post = Blog.objects.get(pk=int(pk))\n user_id = self.request.session.get('USER_ID')\n\n try:\n user = User.objects.get(pk=user_id)\n except:\n pass\n body = self.request.POST.get('body')\n\n if user_id is None:\n messages.add_message(request, messages.ERROR, \"Please login to add comments.\")\n return HttpResponseRedirect(self)\n\n comments = Comment.objects.create(post=post, author=user, body=body)\n\n d = model_to_dict(post)\n messages.add_message(request, messages.SUCCESS, \"Comment added successfully.\")\n return self.render_to_response(d)", "def newcomment(id):\n\n if g.is_logged == False:\n flash (\"You need to be logged in\")\n return redirect(url_for('show_place', id=id))\n\n if request.method == 'POST':\n rating = request.form['rating']\n\n if rating.isdigit() == False:\n flash (\"Rating must be between a number between 0 and 5 (inclusive)\")\n elif int(rating) < 0 or int(rating) > 5:\n flash (\"Rating must be between 0 and 5 (inclusive)\")\n else:\n db = get_db()\n db.execute('''insert into reviews (rating, title, message, user_id, place_id) values (?, ?, ?,?,?)''', [rating, request.form['title'], request.form['content'], g.user_id, id])\n db.commit()\n\n flash('Your comment was successfully added')\n return redirect(url_for('show_place', id=id))", "def post_comment(id):\n \n form = CommentForm()\n title = 'post comment'\n post = Post.query.filter_by(id=id).first()\n\n if post is None:\n\n abort(404)\n\n if form.validate_on_submit():\n comment = form.comment.data\n new_comment = Comments(opinion = comment, user_id = current_user.id, posts_id = post.id)\n new_comment.save_comment()\n return redirect(url_for('main.view_post', id = post.id))\n\n return render_template('comments.html', form = form, title = title)", "def createcomment(request, pk):\n issue = get_object_or_404(Issue, pk=pk)\n if request.method == \"POST\":\n form = CommentCreationForm(request.POST)\n if form.is_valid():\n comment = form.save(commit=False)\n comment.issue = issue\n comment.author = request.user\n comment.created_at = timezone.now()\n comment.save()\n return redirect('office:issue', pk=pk)\n else:\n form = CommentForm()\n return render(request, 'blog/add_comment_to_post.html', {'form': form})", "def form_valid(self, form):\n post = self.get_object()\n #WAS: post.comment_add(form.cleaned_data['text'], self.request.user)\n post.add_comment(form.cleaned_data['text'], \n self.request.user,\n added_at=None, \n by_email=False\n )\n return views_support.response_success(self.request)", "def post_comment(request, next=None, using=None):\n # Fill out some initial data fields from an authenticated user, if present\n data = request.POST.copy()\n if request.user.is_authenticated():\n if not data.get('name', ''):\n data[\"name\"] = request.user.get_full_name() or request.user.username\n if not data.get('email', ''):\n data[\"email\"] = request.user.email\n\n # Check to see if the POST data overrides the view's next argument.\n next = data.get(\"next\", next)\n\n # Look up the object we're trying to comment about\n ctype = data.get(\"content_type\")\n object_pk = data.get(\"object_pk\")\n model = models.get_model(*ctype.split(\".\", 1))\n target = model._default_manager.using(using).get(pk=object_pk)\n\n\n # Construct the comment form\n form = comments.get_form()(target, data=data)\n\n # Check security information\n if form.security_errors():\n return None\n # Create the comment\n comment = form.get_comment_object()\n comment.ip_address = request.META.get(\"REMOTE_ADDR\", None)\n if request.user.is_authenticated():\n comment.user = request.user\n\n # Signal that the comment is about to be saved\n responses = signals.comment_will_be_posted.send(\n sender = comment.__class__,\n comment = comment,\n request = request\n )\n\n # Save the comment and signal that it was saved\n comment.save()\n message = get_object_or_404(Message, pk = object_pk)\n message.envoyer_commentaire_notification(comment.pk, request.user.username)\n \n signals.comment_was_posted.send(\n sender = comment.__class__,\n comment = comment,\n request = request\n )\n\n comment_list = [comment]\n return render_to_response('comments/list.html', {'comment_list': comment_list},context_instance=RequestContext(request))", "def comment_add(request,comment=None, error='', message=''):\n categories = Category.objects.all()\n error_fields=[]\n default_comment = Comment(date=datetime.date.today(), time=datetime.datetime.now().time, source='', detail='', category=Category.objects.filter(description='Events')[0], user_email='')\n\n try:\n added = bool(request.POST['add'])\n except:\n added = False\n try:\n action = request.POST['action']\n except:\n action = 'add'\n\n if added == True:\n try:\n new_date = parse(request.POST['date'], dayfirst=True)\n new_time = parse(request.POST['time'])\n except:\n error += ' Datetime invalid or not specified.'\n\n try:\n new_detail = request.POST['detail']\n if new_detail == '':\n error += ' Comment text is blank.'\n except:\n error += ' No comment text provided.'\n\n try:\n new_source = request.POST['source']\n if new_source == '':\n error += ' Source is blank.'\n except:\n error += ' No comment source provided.'\n\n try:\n new_category = Category.objects.filter(pk=int(request.POST['category_id']))[0] #The [0] is OK since the fact that category_id is a primary key ensures that the array has only length 1.\n except:\n error += ' Category invalid or nonexistent.'\n\n try:\n new_user_email = request.POST['user_email']\n if new_user_email == '':\n error += ' You haven\\'t provided your e-mail address.'\n except:\n error += ' No user e-mail address provided.'\n\n if error == '':\n try:\n new_comment = Comment(date=new_date, time=new_time, source=new_source, detail=new_detail, category=new_category, user_email=new_user_email)\n new_comment.full_clean()\n try:\n new_comment.save()\n message += 'Your comment was added to the database.'\n except:\n error += 'Failed to access the database.'\n except ValidationError as ve:\n for k in ve.message_dict.keys():\n error_fields.append(k)\n for m in ve.message_dict[k]:\n error += m + ' '\n default_comment = new_comment\n\n if action == 'saveandaddanother' or action == 'add' or error != '':\n return render_to_response('feedback/comment_add.html',\n {'categories': categories,\n 'error': error,\n 'error_fields': error_fields,\n 'message': message,\n 'added': added,\n 'comment': default_comment},\n context_instance=RequestContext(request))\n elif action == 'save':\n return index(request, error=error, message=message)\n else:\n error += 'Invalid submit action requested.'\n return render_to_response('feedback/comment_add.html',\n {'categories': categories,\n 'error': error,\n 'error_fields': error_fields,\n 'added': added,\n 'message': message,\n 'comment': default_comment},\n context_instance=RequestContext(request))", "def add_comment_to_post(request, pk):\n post = get_object_or_404(Post, pk=pk)\n if request.method == 'POST':\n form = CommentForm(request.POST)\n if form.is_valid():\n comment = form.save(commit=False)\n comment.post = post\n comment.save()\n return redirect('post_detail', pk=post.pk)\n else:\n form = CommentForm()\n return render(request, 'blog/comment_form.html', {'form': form})", "def community_post_create_view(request):\n task = \"Create New\"\n form = AddEditPostForm() # An unbound form\n\n if request.method == 'POST': # If the form has been submitted...\n form = AddEditPostForm(request.POST, request.FILES) # A form bound to the POST data\n if form.is_valid(): # All validation rules pass\n post = form.save(commit=False) # Create a new object from the form, but don't save it to the database\n post.author = request.user # Set the author to the current user\n post.save() # Save the object to the database\n slug_str = \"%s %s\" % (post.title, post.date_posted) # Create a slug from the title and date\n post.slug = slugify(slug_str) # Create the slug\n post.save() # Save the object to the database\n return redirect('community-home') # Redirect to the home page\n\n context = { # Pass the variables to the template\n 'task': task,\n 'form': form,\n }\n return render(request,\n 'pages/patient-community/community-create-update-post.html',\n context) # render the patient community create post page", "def add_comment_view(request, e_pk):\n\n element = get_object_or_404(Element, pk=e_pk)\n group = element.tab.group\n\n if request.user not in group.users.all():\n return redirect(reverse('my_groups_view'))\n\n if request.method == 'POST':\n form = CreateCommentForm(request.POST)\n if form.is_valid():\n comment = Comment(\n text=form.cleaned_data['text'],\n creator=request.user,\n element=element,\n )\n comment.save()\n else:\n return HttpResponseBadRequest()\n\n return redirect(reverse('element_view', args=(e_pk,)))", "def post(self):\n comment_id = self.request.get('comment_id')\n post_id = self.request.get('post_id')\n comment = Comment.get_by_id(int(comment_id), parent=comment_key())\n post = Post.get_by_id(int(post_id), parent=blog_key())\n if comment and self.user.key().id() == comment.user.key().id():\n comment.content = self.request.get('content')\n\n have_errors = False\n\n if not comment.content:\n error_content = \"Content is required\"\n have_errors = True\n\n if have_errors:\n self.render(\"edit_comment.html\",\n comment=comment,\n error_content=error_content,\n user=self.user)\n else:\n comment.put()\n time.sleep(0.1)\n\n self.redirect('/blog/%s' % str(post.key().id()))", "def form_valid(self, form):\n action = self.get_object()\n #WAS: return action.comment_add(form.cleaned_data['text'], self.request.user)\n action.comment_add(form.cleaned_data['text'], self.request.user)\n return views_support.response_success(self.request)", "def add_comment(request, listing_id):\n if request.method == \"POST\":\n try:\n listing = Listing.objects.get(pk=listing_id)\n except Listing.DoesNotExist:\n return render(request, \"auctions/errors.html\", {\"error_message\":\n \"something went wrong, the id url argument is not valid\"})\n\n CommentForm = modelform_factory(Comment, exclude=(\"commenter\",\"listing\"))\n # validate and save from the formdata to the database\n form = CommentForm(request.POST)\n try:\n comment = form.save(commit=False)\n comment.commenter = request.user\n comment.listing = listing\n comment.save()\n except:\n # if something went wrong with comment form \n return render(request, \"auctions/errors.html\", {\"error_message\":\n \"something went wrong with the submission of your comment, try again\"})\n\n return redirect(reverse(\"single_listing\", \n args=[listing.title]) +f\"?id={listing.id}\")", "def post(self):\n modified_content = self.request.get('comment_edit')\n comment_id = self.request.get('comment_id')\n comment = Comments.get_by_id(int(comment_id))\n user = self.get_active_user()\n\n if user.key().id() == comment.submitter_id:\n comment.content = modified_content\n comment.put()\n self.redirect('/%s' % str(comment.post_id))\n else:\n self.error(403)", "def post(self):\n post_id = int(self.request.get('post_id'))\n post = Posts.get_by_id(post_id)\n comment = self.request.get('comment')\n submitter_id = self.get_active_user().key().id()\n\n if submitter_id:\n comment = Comments(post_id=post_id, content=comment,\n submitter_id=submitter_id)\n comment.put()\n self.redirect('/%s' % str(post.key().id()))\n else:\n self.error(403)", "def create_comment(request):\n\n # get data\n in_data = getRequestData(request)\n\n # get the Thread associated with the comments\n mythread = Thread.objects.get(id=in_data.get('mythreadid'))\n\n # save in database\n try:\n comment = Comment(pub_date = datetime.datetime.now(pytz.timezone('US/Eastern')), username = in_data.get('myusername'), text = in_data.get('mytext'), score = 0, thread = mythread )\n comment.save()\n except:\n return HttpResponseBadRequest('Error saving to database!')\n\n return JsonResponse(in_data)", "def add_comment(request, entry_pk):\n\n blog = get_object_or_404(BlogEntry, pk=entry_pk)\n\n if not request.user.is_authenticated():\n raise PermissionDenied\n\n form = BlogCommentForm(creator=request.user, blog=blog, data=request.POST)\n\n if form.is_valid():\n form.save()\n return HttpResponseRedirect(blog.get_absolute_url())\n\n return single(request, entry_pk=entry_pk, comment_form=form)", "def add_mvcomment(request, pk):\n\n # Getting the movie object hosting the comment\n movie = get_object_or_404(Movie, pk=pk)\n\n # if POST\n if request.method == \"POST\":\n form = MovieCommentForm(request.POST)\n\n # set the author and redirect if valid form\n if form.is_valid():\n comment = form.save(commit=False)\n comment.author = request.user\n comment.movie = movie\n comment.save()\n return redirect('../', pk=movie.pk)\n else:\n form = MovieCommentForm()\n\n # render the form through the template\n return render(request, {'form': form})", "def new_from_post():\n # If you make a post request with a question_id we will assume you want a new question editor\n # we will prepopulate the question new page with data from that question (if it is a valid question id)\n question_id = request.form['question_id'] if request.form['question_id'] else ''\n\n return render_template('questionNew.html', question_id=question_id)", "def create_comment(bid, pid):\n # pylint: disable=unused-argument\n form = CommentForm(request.form)\n if request.method == 'POST':\n if form.validate():\n DB.session.add(Comment(pid, current_user.uid, form.text.data))\n DB.session.commit()\n flash('Comment successfully created!')\n else:\n flash(constants.DEFAULT_SUBMISSION_ERR)\n return redirect(request.referrer)", "def post(self):\n\n # 160 characters limit\n\n if len(client_data[\"content\"]) >= 200:\n self.write_json_with_status(400,{\n 'result' : 'fail',\n 'reason' : 'content too long'\n })\n\n client_data = self.data\n now = str(time.time())\n comment_id = md5(client_data['content']+now)\n\n new_comment = self.event_comment_table.put_item(data={\n 'CommentID' : comment_id,\n 'CreatorID' : creator_id,\n 'Content' : content,\n 'EventID' : event_id,\n 'Timestamp' : now\n })\n \n\n self.write_json({\n 'comment_id' : comment_id\n })", "def questions_collection(request):\n if request.method == \"POST\":\n data = json.loads(request.body)\n task = Task.objects.get(id=data.get(\"taskId\", \"\"))\n commenter = User.objects.get(username=data.get(\"commenter\", \"\"))\n content = data.get(\"content\", \"\")\n\n question = Question(\n task=task,\n commenter=commenter,\n content=content\n )\n question.save()\n return JsonResponse({\"message\": \"Question created successfully\"}, status=201)", "def postVisitView(request, pk, post_id):\n post = Blogger.objects.get(id=pk).post_set.get(id=post_id)\n comments = post.comment_set.all().order_by(\"-date_created\")\n\n data = {\n \"user\": request.user,\n \"post\": post,\n }\n\n form = CommentForm(initial=data)\n\n if request.method == \"POST\":\n form = CommentForm(request.POST, initial=data)\n if form.is_valid():\n form.save()\n return redirect(f\"/blogger/{pk}/post/{post_id}\")\n\n context = {\n \"post\": post,\n \"comments\": comments,\n \"form\": form,\n }\n return render(request, \"blog/post_visit.html\", context)", "def post(self):\n post_id = self.request.get('post_id')\n post = Post.get_by_id(int(post_id), parent=blog_key())\n content = self.request.get('comment')\n\n if content:\n comment = Comment(parent=comment_key(),\n content=content,\n user=self.user,\n post=post)\n comment.put()\n\n time.sleep(0.1)\n self.redirect('/blog/%s' % str(post.key().id()))", "def post_comment_ajax(request, using=None):\n if not request.is_ajax():\n return HttpResponseBadRequest(\"Expecting Ajax call\")\n\n data = request.POST.copy()\n if request.user.is_authenticated():\n if not data.get('name', ''):\n data[\"name\"] = request.user.get_full_name() or request.user.get_username()\n if not data.get('email', ''):\n data[\"email\"] = request.user.email\n\n # Look up the object we're trying to comment about\n ctype = data.get(\"content_type\")\n object_pk = data.get(\"object_pk\")\n if ctype is None or object_pk is None:\n return CommentPostBadRequest(\"Missing content_type or object_pk field.\")\n try:\n model = apps.get_model(*ctype.split(\".\", 1))\n target = model._default_manager.using(using).get(pk=object_pk)\n except TypeError:\n return CommentPostBadRequest(\n \"Invalid content_type value: %r\" % escape(ctype))\n except AttributeError:\n return CommentPostBadRequest(\n \"The given content-type %r does not resolve to a valid model.\" % escape(ctype))\n except ObjectDoesNotExist:\n return CommentPostBadRequest(\n \"No object matching content-type %r and object PK %r exists.\" % (\n escape(ctype), escape(object_pk)))\n except (ValueError, ValidationError) as e:\n return CommentPostBadRequest(\n \"Attempting go get content-type %r and object PK %r exists raised %s\" % (\n escape(ctype), escape(object_pk), e.__class__.__name__))\n\n # Do we want to preview the comment?\n preview = \"preview\" in data\n\n # Construct the comment form\n form = django_comments.get_form()(target, data=data)\n\n # Check security information\n if form.security_errors():\n return CommentPostBadRequest(\n \"The comment form failed security verification: %s\" % escape(str(form.security_errors())))\n\n # If there are errors or if we requested a preview show the comment\n if preview:\n comment = form.get_comment_object() if not form.errors else None\n if comment is not None and request.user.is_authenticated():\n comment.user = request.user\n return _ajax_result(request, form, \"preview\", comment, object_id=object_pk)\n if form.errors:\n return _ajax_result(request, form, \"post\", object_id=object_pk)\n\n # Otherwise create the comment\n comment = form.get_comment_object()\n comment.ip_address = get_trusted_ip(request)\n if request.user.is_authenticated():\n comment.user = request.user\n\n # Signal that the comment is about to be saved\n responses = signals.comment_will_be_posted.send(\n sender=comment.__class__,\n comment=comment,\n request=request\n )\n\n for (receiver, response) in responses:\n if response is False:\n return CommentPostBadRequest(\n \"comment_will_be_posted receiver %r killed the comment\" % receiver.__name__)\n\n # Save the comment and signal that it was saved\n comment.save()\n signals.comment_was_posted.send(\n sender=comment.__class__,\n comment=comment,\n request=request\n )\n\n return _ajax_result(request, form, \"post\", comment, object_id=object_pk)", "def _post(self, data):\n new_comment_id = DB_COMMENT_TABLE.insert(data)\n return new_comment_id", "def add_new_comment(request):\n token = request.data.get('token')\n text = request.data.get('text', '')\n post_id = request.data.get('post_id', '')\n permission = request.data.get('permission')\n\n if Token.objects.filter(key=token).exists():\n if len(text) < 10:\n return Response({\"error\": 24})\n if len(text) > 1000:\n return Response({\"error\": 25})\n\n if type(post_id) is int:\n if Post.objects.filter(pk=post_id).exists():\n token = get_object_or_404(Token, key=token)\n post = get_object_or_404(Post, pk=post_id)\n comment = Comment.objects.create(post=post,\n author_id=token.user_id,\n text=text,\n permission=permission)\n serializer = PostSerializer(post, context={'user_id': token.user_id})\n UserFeed.objects.create(user=post.author,\n action_user=token.user,\n post_comment=comment,\n action=\"PostComment\")\n\n printable = set(string.printable)\n msg = filter(lambda x: x in printable, comment.text) \n message = \"{} commented: {}\".format(token.user.username, msg)\n\n custom = {\n \"post_id\": post.id,\n \"avatar\": UserProfile.objects.get(user=token.user).avatar.url\n }\n\n if post.author != token.user:\n user_notification = UserNotification.objects.get(user=post.author)\n send_notification(custom, message, user_notification)\n\n # check @ for users \n for item in text.split(' '):\n if item and item[0] == '@':\n username = item[1:].lower()\n user = User.objects.filter(username__iexact=username).first()\n if not user or user == token.user:\n continue\n UserFeed.objects.create(user=user,\n action_user=token.user,\n post_comment=comment,\n action=\"PostCommentComment\")\n msg = filter(lambda x: x in printable, comment.text) \n message = \"{} commented: {}\".format(token.user.username, msg)\n user_notification = UserNotification.objects.get(user=user)\n send_notification(custom, message, user_notification)\n\n return Response({\"success\": 26,\n \"post\": serializer.data})\n else:\n return Response({\"error\": 27})\n else:\n return Response({\"error\": 17})", "def add_comment(request, pk, pk2):\n template_var = base_template_vals(request)\n p = request.POST\n \n if p.has_key(\"content\") and p[\"content\"]:\n if request.user.is_authenticated():\n comment = Comment(event=Event.objects.get(id=pk))\n comment.user = UserProfile.objects.get(django_user=request.user)\n comment.content = p[\"content\"]\n comment.save()\n\n # Sys notification\n from_user = UserProfile.objects.get(django_user=pk2) # Who's event that is commented on\n to_user = Event.objects.get(id=pk).author\n event_id = pk\n sys_notification(to_user, \"add_comment\", from_user, event_id)\n return single(request, pk)", "def post(self, post_id):\n comment_content = self.request.get(\"comment_content\")\n Post.add_comment(int(post_id), int(\n self.user.get_id()), comment_content)\n self.redirect(\"/blog/\" + post_id + \"/comments\")", "def process_comment(request, comment, post):\n\n if request.user.is_authenticated:\n # We already set auth user's name and email in the form's inital vals.\n comment.author = request.user\n\n # Is this a threaded comment?\n if request.POST.get(\"parent_id\"):\n comment.parent = Comment.objects.get(id=request.POST.get(\"parent_id\"))\n\n # If commenter is logged in, override name and email with stored values from User object\n if request.user.is_authenticated:\n comment.name = request.user.get_full_name()\n comment.email = request.user.email\n\n # Set required relationship to Post object\n comment.post = post\n\n # Get commenter's IP and User-Agent string\n # ip = get_ip(request)\n # if ip is not None:\n # comment.ip_address = ip\n comment.user_agent = request.META.get(\"HTTP_USER_AGENT\", \"\")\n\n # Run spam check\n comment.spam = spam_check(comment)\n\n # Strip disallowed HTML tags. See tangerine docs to customize.\n comment.body = sanitize_comment(comment.body)\n\n # Call comment approval workflow\n comment.approved = get_comment_approval(comment.email, request.user.is_authenticated)\n if comment.approved:\n messages.add_message(request, messages.SUCCESS, \"Your comment has been posted.\")\n else:\n messages.add_message(request, messages.INFO, \"Your comment has been held for moderation.\")\n\n comment.save()\n\n # Alert post author that comment needs moderation, or that it's been auto-published:\n send_comment_moderation_email(comment)", "def post(self, request, *args, **kwargs):\n serializer = CommentSerializer(data=request.data)\n post_pk = self.kwargs['post_pk']\n post = Post.objects.get(pk=post_pk)\n if serializer.is_valid():\n serializer.save(author=request.user, post=post)\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)", "def create_post():\r\n\r\n # Check for and reject empty username or whinge\r\n if not request.values.get(\"username\") or not request.values.get(\"whinge\"):\r\n print(\"Ignoring request to with empty username or whinge\")\r\n else:\r\n # Form data ok; add to DB\r\n con = get_db()\r\n con.execute(\"INSERT INTO posts (submitter,content,ts) VALUES (?,?,?);\",\r\n (\r\n request.values.get(\"username\"), # form field username -> DB column submitter\r\n request.values.get(\"whinge\"), # form field whinge -> DB column content\r\n time.time()\r\n )\r\n )\r\n con.commit()\r\n con.close()\r\n \r\n # TODO: Handle possibility of failed INSERT\r\n\r\n # Send them back to the main page\r\n return redirect(url_for(\"display_top\"))", "def post_detail(request, pk):\n post = get_object_or_404(Post, pk=pk)\n comments = post.comments.all\n post.views += 1\n post.save()\n \n if request.method == \"POST\":\n comment_form = CommentForm(request.POST)\n if comment_form.is_valid() and request.user:\n new_comment = comment_form.save(commit=False)\n new_comment.post = post\n new_comment.author = request.user\n new_comment.save()\n elif request.user:\n comment_form = CommentForm()\n \n return render(request, \"details.html\", {\"post\": post, \"comments\": comments, \"comment_form\": comment_form})", "def post(self):\n\n subject = self.request.get('subject')\n content = self.request.get('content')\n\n have_errors = False\n\n if not subject:\n error_subject = \"Please write down the subject\"\n have_errors = True\n if not content:\n error_content = \"Content is required\"\n have_errors = True\n\n if have_errors:\n self.render(\"newpost.html\",\n subject=subject,\n content=content,\n error_subject=error_subject,\n error_content=error_content,\n user=self.user)\n else:\n post = Post(parent=blog_key(),\n subject=subject,\n content=content,\n user=self.user)\n post.put()\n self.redirect('/blog/%s' % str(post.key().id()))", "def post(self, request, *args, **kwargs):\n del request, args, kwargs # unused\n self.enforce_xsrf(self.ACTION_ID)\n\n notes = []\n for param_key, value in self.request.POST.items():\n if param_key.startswith('note.'):\n note = model.Note.get(self.env.repo, param_key[5:])\n if note:\n if value in ['accept', 'flag']:\n note.reviewed = True\n if value == 'flag':\n note.hidden = True\n notes.append(note)\n db.put(notes)\n\n return django.shortcuts.redirect(self.build_absolute_path())", "def add_accomment(request, pk):\n\n actor = get_object_or_404(Actor, pk=pk)\n if request.method == \"POST\":\n form = ActorCommentForm(request.POST)\n if form.is_valid():\n comment = form.save(commit=False)\n comment.author = request.user\n comment.actor = actor\n comment.save()\n return redirect('../', pk=actor.pk)\n else:\n form = ActorCommentForm()\n return render(request, {'form': form})", "def post():\n\n form = forms.PostForm()\n if form.validate_on_submit():\n models.Post.create(title=form.title.data,\n date=form.date.data,\n time_spent=form.time_spent.data,\n details=form.details.data,\n remember=form.remember.data)\n return redirect(url_for('index'))\n return render_template('new.html', form=form)", "def add_component_comment(request):\n try:\n person = get_person(request)\n\n print(\"Add components comment. Start.\")\n print(\"Add components comment. Request: \" + str(request))\n\n if request.method == \"POST\":\n # get values from form\n assessment_id = request.POST['assessment_id']\n element_id = None\n try:\n component_id = request.POST['component_id']\n except:\n component_id = None\n try:\n subcomponent_id = request.POST['subcomponent_id']\n except:\n subcomponent_id = None\n try:\n third_component_id = request.POST['third_component_id']\n except:\n third_component_id = None\n try:\n fourth_component_id = request.POST['fourth_component_id']\n except:\n fourth_component_id = None\n try:\n fifth_component_id = request.POST['fifth_component_id']\n except:\n fifth_component_id = None\n comment = request.POST['textComments']\n\n if fifth_component_id:\n element_id = fifth_component_id\n else:\n if fourth_component_id:\n element_id = fourth_component_id\n else:\n if fifth_component_id:\n element_id = fifth_component_id\n else:\n if third_component_id:\n element_id = third_component_id\n else:\n if subcomponent_id:\n element_id = subcomponent_id\n else:\n if component_id:\n element_id = component_id\n\n # get section and assessment\n component = Component.objects.get(id=element_id)\n\n print(\"Add components comment. Component: \" + component.name)\n print(\"Add components comment. comment: \" + comment)\n\n # create comment\n my_comment = AssessmentComponentComment()\n my_comment.assessment = Assessment.objects.get(id=assessment_id)\n my_comment.element = component\n my_comment.comment = comment\n my_comment.person = person\n my_comment.save()\n\n # trace action\n trace_action(TRACE_COMMENT, person, \"User added comment in component: \" + component.name)\n\n # send mail\n try:\n send_mail = request.POST['send_mail']\n t = Thread(target=send_comments_email, args=(my_comment.comment, \"Element: \" + component.name, person))\n t.start()\n # send_comments_email(my_comment.comment, section, person)\n except:\n # checkbox not set\n pass\n\n # redirect to section page\n url_to_redirect = \"/component_2/\" + assessment_id + SLASH\n\n if component_id:\n url_to_redirect += component_id + SLASH\n if subcomponent_id:\n url_to_redirect += subcomponent_id + SLASH\n if third_component_id:\n url_to_redirect += third_component_id + SLASH\n if fourth_component_id:\n url_to_redirect += fourth_component_id + SLASH\n if fifth_component_id:\n url_to_redirect += fifth_component_id + SLASH\n\n return redirect(url_to_redirect, context_instance=RequestContext(request))\n else:\n raise Exception(\"GET call to add new section comment\")\n except:\n if debug_is_on():\n raise\n else:\n return render_to_response(TEMPLATE_ERROR, {\"error_description\": sys.exc_info(), \"crpt_url\": CRPT_URL},\n context_instance=RequestContext(request))", "def post_comment(self):\n self.post_question()\n return self.client.post(\"api/v2/1/comments\", headers={\"Authorization\": \"{}\".format(self.token())}, data=json.dumps(self.comment), content_type='application/json')", "def add():\n if request.method == \"POST\":\n result = add_post(\n request.form[\"title\"],\n request.form[\"body\"]\n )\n flash(result)\n return redirect(url_for(\"show\"))\n else:\n return render_template(\"add.html\")", "def write_review(request):\n form = ReviewForm\n\n if request.method == 'POST':\n form_data = {\n 'title': request.POST['title'],\n 'description': request.POST['description'],\n 'author': request.POST['author'],\n }\n\n form = ReviewForm(form_data)\n\n if form.is_valid:\n form.save()\n messages.success(\n request, f'Review added successfully! Thanks!')\n else:\n messages.error(\n request, f'Upps something went wrong, please try again')\n\n context = {\n 'form': form\n }\n return render(request, 'reviews/write_review.html', context)", "def submit_textarea():\n print(\"--- submit ---\")\n post_content = request.form[\"content\"]\n author = request.form[\"author\"]\n\n post_object = {\n 'author': author,\n 'content': post_content,\n }\n\n # Submit a transaction\n new_tx_address = \"{}/new_transaction\".format(BASE_URL)\n\n requests.post(new_tx_address,\n json=post_object,\n headers={'Content-type': 'application/json'})\n\n return redirect('/')", "def add_comment(request, model, object_id):\n obj = get_object_or_404(model, id=object_id)\n if request.method == \"POST\":\n form = CommentForm(request.POST)\n if form.is_valid():\n Comment.objects.create(\n content_type = ContentType.objects.get_for_model(model),\n object_id = object_id,\n author = request.user,\n added_at = datetime.datetime.now(),\n comment = form.cleaned_data['comment']\n )\n if request.is_ajax():\n return JsonResponse({'success': True})\n else:\n return HttpResponseRedirect(obj.get_absolute_url())\n elif request.is_ajax():\n return JsonResponse({'success': False, 'errors': form.errors})\n else:\n form = CommentForm()\n\n # Let the appropriate fallback view take care of display/redisplay\n if model is Question:\n return question_comments(request, obj, form=form)\n elif model is Answer:\n return answer_comments(request, object_id, answer=obj, form=form)", "def new_post(request):\n if request.method != 'POST':\n # No data submitted; create a blank form.\n form = PostForm()\n else:\n # POST data submitted; process data.\n form = PostForm(data=request.POST)\n if form.is_valid():\n new_post = form.save(commit=False)\n new_post.owner = request.user\n new_post.save()\n return redirect('blogs:posts')\n\n # Display a blank or invalid form.\n context = {'form': form}\n return render(request, 'blogs/new_post.html', context)", "def signup(request, entry_id):\n harvest = get_object_or_404(Harvest, pk=entry_id)\n if request.method == 'POST':\n form = HarvestSignupForm(request.POST)\n if form.is_valid():\n username = form.cleaned_data['username']\n password = form.cleaned_data['password']\n user = authenticate(username=username, password=password)\n login(request, user)\n harvest = get_object_or_404(Harvest, pk=entry_id)\n volunteer = Volunteer.objects.get(user=user)\n harvest.volunteers.add(volunteer)\n print form.cleaned_data['comments']\n new_comment = Comment(volunteer=volunteer, comment=form.cleaned_data['comments'])\n new_comment.save()\n harvest.comment.add(new_comment)\n return HttpResponse(\"Thanks! You're signed up.\")\n else:\n form = HarvestSignupForm()\n return render_to_response(\"signup.html\", {'form':form, 'harvest':harvest}, context_instance=RequestContext(request))", "def post(self):\n subject = self.request.get('subject')\n post_content = self.request.get('post_content')\n submit = self.request.get('submit')\n cancel = self.request.get('cancel')\n user = self.get_active_user()\n created_by = int(user.key().id())\n post_id = self.request.get('post_id')\n\n if not user:\n self.redirect('/login')\n if post_id:\n post = Posts.get_by_id(int(post_id))\n else:\n post = None\n\n if cancel == \"cancel\":\n self.redirect('/%s' % str(post.key().id()))\n return\n if (post and post.submitter_id == user.key().id()) or not post:\n if submit == \"submit\" and subject and post_content:\n if post:\n post.subject = subject\n post.content = post_content\n post.put()\n else:\n post = Posts(subject=subject,\n content=post_content,\n submitter_id=created_by)\n post.put()\n self.redirect('/%s' % str(post.key().id()))\n else:\n self.render_newpage(user=user,\n subject=subject,\n post_content=post_content,\n error=\"\"\"Please provide both a subject and a\n post!\"\"\")\n else:\n self.redirect('/login')", "def comment(request, object_id):\n send_charob = request.user.char_ob\n rec_charob = get_character_from_ob(object_id)\n comment_txt = request.POST[\"comment\"]\n roster.create_comment(send_charob, rec_charob, comment_txt)\n return HttpResponseRedirect(reverse(\"character:sheet\", args=(object_id,)))", "def admincreate(object):\n if request.method == \"POST\":\n\n db = get_db()\n execute_string = 'INSERT INTO ' + object.title()\n\n if object == 'post':\n execute_string += '(title, content, authorId, categoryId) VALUES (\"' + request.form['title'] + '\", \"' + request.form[\"content\"] + '\", \"' + request.form[\"authorid\"] + '\", \"' + request.form[\"categoryid\"] + '\")'\n elif object == 'author':\n execute_string += '(name) VALUES (\"' + request.form['name'] + '\")'\n elif object == 'category':\n execute_string += '(name, description) VALUES (\"' + request.form['name'] + '\", \"' + request.form[\"description\"] + '\")'\n\n db.execute(execute_string)\n db.commit()\n return redirect(url_for(\"adminview\", object=object))\n\n return render_template(\"new.html\", object=object, item={})", "def new_post():\n form = PostForm()\n if form.validate_on_submit():\n post = Post(pub_date=datetime.date.today())\n post.title = form.title.data\n post.content = form.content.data\n post.slug = slugify(post.title)\n db.session.add(post)\n db.session.commit()\n return flask.redirect(flask.url_for(\n 'view_post',\n year=post.pub_date.year,\n month=post.pub_date.month,\n day=post.pub_date.day,\n slug=post.slug\n ))\n return flask.render_template('new.html', form=form)", "def add_comment() -> str:\n if \"markdown\" in request.form:\n if \"file\" in request.form:\n comment = Comment(\n markdown=request.form[\"markdown\"],\n submission_id=Submission.query.filter(\n Submission.filepath.contains(request.form[\"file\"])\n )\n .first()\n .id,\n cell_id=request.form[\"cell_id\"] if \"cell_id\" in request.form else None,\n user=UserModel.get_by_token(session[\"token\"]),\n )\n # If not cell_id this is a general comment\n comment.save()\n else:\n return \"Missing file or cell_id\", 400\n else:\n return \"Missing markdown\", 400\n\n comment_maker = get_template_attribute(\"_macros.html\", \"comment_block\")\n return comment_maker(comment)", "def comment(postid):\n context = {}\n if \"username\" not in flask.session:\n raise InvalidUsage('Forbidden', status_code=403)\n\n connection = insta485.model.get_db()\n cursor = connection.execute(\n \"SELECT * FROM comments WHERE postid=:id\", {'id': postid})\n comments = cursor.fetchall()\n ''' \n if bool(comments) is False:\n raise InvalidUsage('Not Found', status_code=404)\n '''\n # User\n logname = flask.session[\"username\"]\n\n if flask.request.method == 'POST':\n data = flask.request.get_json(force=True)\n context['text'] = data['text']\n context['owner'] = logname\n context['owner_show_url'] = '/u/' + logname + '/'\n connection.execute('INSERT INTO comments (owner, postid, text) \\\n VALUES (?,?,?)', (logname, postid, data['text']))\n cursor = connection.execute('SELECT last_insert_rowid() AS id')\n commentid_dic = cursor.fetchone()\n context['commentid'] = commentid_dic['id']\n context['postid'] = postid\n return flask.jsonify(**context), 201\n\n # url\n context[\"url\"] = flask.request.path\n context['comments'] = []\n\n for i in comments:\n one_comment = {}\n one_comment['commentid'] = i['commentid']\n one_comment['owner'] = i['owner']\n one_comment['owner_show_url'] = '/u/' + i['owner'] + '/'\n one_comment['postid'] = postid\n one_comment['text'] = i['text']\n context['comments'].append(one_comment)\n\n return flask.jsonify(**context)", "def post(self, request):\n\n # crear el formulario con los datos del post\n form = PostForm(request.POST)\n\n if form.is_valid():\n #crea el post\n post = form.save()\n\n #generar mensaje de exito\n msg = \"Post creado con éxito\"\n form = PostForm()\n else:\n msg = \"Ha ocurrido un error al guardar el post\" \\\n\n\n # renderiza la plantilla con el formulario\n context = {\n \"form\": form,\n \"msg\": msg\n }\n\n # renderiza y devuelve la plantilla\n return render(request, 'blogs/new-post.html', context)", "def post(self):\n teacher = self.request.get(\"teacher\")\n student = self.request.get(\"student\")\n lessondate = self.request.get(\"lessondate\")\n reason = self.request.get(\"reason\")\n comment = self.request.get(\"comment\")\n\n if teacher and student and lessondate and reason:\n\n # create a new Post object and store it in the database !!!!!!!!!!!!!!!\n loglesson = Teacher(\n teacher=teacher,\n student=student,\n lessondate=lessondate,\n reason = reason,\n comment = comment)\n loglesson.put()\n\n # get the id of the new post, so we can render the post's page (via the permalink)\n id = loglesson.key().id()\n self.redirect(\"/loglesson/%s\" % id)\n else:\n error = \"Please include a teacher, student, lesson date, reason and comment!\"\n self.render_form(teacher, student, lessondate,reason, comment, error)", "def add_awcomment(request, pk):\n\n award = get_object_or_404(Award, pk=pk)\n if request.method == \"POST\":\n form = AwardCommentForm(request.POST)\n if form.is_valid():\n comment = form.save(commit=False)\n comment.author = request.user\n comment.award = award\n comment.save()\n return redirect('../', pk=award.pk)\n else:\n form = AwardCommentForm()\n return render(request, {'form': form})", "def post(self):\n\n title = self.request.get(\"title\")\n blogPost = self.request.get(\"blogPost\")\n author = self.request.cookies.get('name')\n\n if title and blogPost:\n\n bp = Blogposts(parent=blog_key(), title=title,\n blogPost=blogPost, author=check_secure_val(author))\n\n bp.put()\n\n self.redirect('/%s' % str(bp.key.integer_id()))\n else:\n error = \"Please submit both a title and a blogpost!\"\n self.render(\"newpost.html\", title=title,\n blogPost=blogPost, error=error)", "def add_post():\n\tt_id = db.survey.insert(\n\t\tquestion = request.vars.question,\n\t\tuser_email = request.vars.email,\n\t\tuser_name = get_user_name_from_email(request.vars.email),\n\t\topt1 = request.vars.opt1,\n\t\topt2 = request.vars.opt2,\n\t\topt3 = request.vars.opt3,\n\t\topt4 = request.vars.opt4,\n\t\t#created_on_human = humanize.naturaltime(datetime.datetime.utcnow()),\n\n\t)\n\tt = db.survey(t_id)\n\treturn response.json(dict(post=t))", "def submit_textarea():\n\n post_content = request.form[\"content\"]\n author = request.form[\"author\"]\n\n post_object = {\n 'author': author,\n 'content': post_content\n }\n\n # Submit a tx\n new_tx_address = f\"{CONNECTED_NODE_ADDRESS}/new_transaction\"\n\n request.post(new_tx_address,\n json=post_object, \n headers={'Content-type': 'application/json'})\n # return to homepage\n return redirect('/')", "def _preview(request, context_processors, extra_context, form_class=ThreadedCommentForm):\r\n _adjust_max_comment_length(form_class)\r\n form = form_class(request.POST or None)\r\n context = {\r\n 'next' : _get_next(request),\r\n 'form' : form,\r\n }\r\n if form.is_valid():\r\n new_comment = form.save(commit=False)\r\n context['comment'] = new_comment\r\n else:\r\n context['comment'] = None\r\n return render_to_response(\r\n 'threadedcomments/preview_comment.html',\r\n extra_context, \r\n context_instance = RequestContext(request, context, context_processors)\r\n )", "def test_add_new_comment(self):\n\n result = self.client.post(\"/add_new_comment/2\",\n data={\"user_id\": 25, \"park_id\": \"2\", \"content\": \"My dog loves this park!\"},\n follow_redirects=True)\n self.assertIn(\"<h1 class=\\\"title\\\">Bark Park!</h1>\", result.data)", "def new_answer(request):\n if request.method == \"POST\":\n author = request.POST.get(\"author\")\n content = request.POST.get(\"content\")\n date = datetime.datetime.now()\n answer_tuple = {\n \"author\": author,\n \"content\": content,\n \"votes\": 0,\n \"topic_index\": request.matchdict[\"url\"],\n \"answer_date\": date.strftime(\"%d/%m/%Y\"),\n }\n request.db[\"answer\"].insert(answer_tuple)\n return HTTPFound(location=\"/topic/\" + request.matchdict[\"url\"])\n return HTTPFound(location=\"/\")", "def post_comment(request, send_signal=True):\n # Based on variables passed in we get the comment the user is attempting to create/edit\n try:\n comment, previous_version = get_comment(request)\n except InvalidCommentException as e:\n transaction.rollback()\n return JsonResponse({\n 'ok': False,\n 'error_message': str(e),\n })\n\n # Check if the user doesn't pass the appropriate permission check (on the parent_object)...\n # We call this on the parent comment because the comment itself may not have been saved yet (can't call .get_root on it)\n # TODO: Fix this for root comment? (no parent)\n parent_comment = comment.parent\n tree_root = parent_comment.get_root()\n parent_object = tree_root.content_object\n if not user_can_post_comment(request, comment):\n transaction.set_rollback(True)\n return JsonResponse({\n 'ok': False,\n 'error_message': \"You do not have permission to post this comment.\",\n })\n\n # Check to make sure we are not trying to save a comment \"deeper\" than we are allowed...\n if is_past_max_depth(comment):\n transaction.set_rollback(True)\n return JsonResponse({\n 'ok': False,\n 'error_message': \"You cannot respond to this comment.\",\n })\n\n # If the comment object (NOT the message) hasn't been saved yet...\n if comment._state.adding == True:\n comment = add_comment(comment)\n\n # Now that we have a comment object, we get a 'lock' on it to prevent a race condition\n try:\n lock_comment(comment)\n except DatabaseError:\n transaction.set_rollback(True)\n # Someone is already trying to update this comment, so we need to return an appropriate error\n return JsonResponse({\n 'ok': False,\n 'error_message': \"Someone else is currently editing this comment. Please refresh your page and try again.\",\n })\n\n # Now we know we have sole access to the comment object at the moment so we need to check if we are editing the most recent version\n if not_most_recent_version(comment, previous_version):\n transaction.set_rollback(True)\n return JsonResponse({\n 'ok': False,\n 'error_message': \"You are not editing the most recent version of this comment. Please refresh your page and try again.\",\n })\n\n # Everything has checked out, so we save the new version and return the appropriate response\n version_form, new_version = create_new_version(request, comment)\n if version_form.is_valid():\n comment_template, kwargs = get_template(request, comment, parent_object, tree_root, new_version, previous_version, send_signal=send_signal)\n\n return JsonResponse({\n 'ok': True,\n 'html_content': loader.render_to_string(comment_template, context=kwargs)\n })\n else:\n transaction.set_rollback(True)\n return JsonResponse({\n 'ok': False,\n 'error_message': \"There were errors in your submission. Please correct them and resubmit.\",\n })", "def post(self):\n subject = self.request.get('subject')\n content = self.request.get('content')\n\n # if user enter good subject and content, redirect them to new post page\n if subject and content:\n p = Post(parent = blog_key(), subject = subject, content = content)\n p.put() # store the post element into database\n self.redirect('/blog/%s' % str(p.key().id()))\n # otherwise, render an error page \n else:\n error = \"subject and content, please!\"\n self.render(\"newpost.html\", subject=subject, content=content, error=error)", "def _create_comment(request, course_key, thread_id=None, parent_id=None):\r\n assert isinstance(course_key, CourseKey)\r\n post = request.POST\r\n\r\n if 'body' not in post or not post['body'].strip():\r\n return JsonError(_(\"Body can't be empty\"))\r\n\r\n course = get_course_with_access(request.user, 'load', course_key)\r\n if course.allow_anonymous:\r\n anonymous = post.get('anonymous', 'false').lower() == 'true'\r\n else:\r\n anonymous = False\r\n\r\n if course.allow_anonymous_to_peers:\r\n anonymous_to_peers = post.get('anonymous_to_peers', 'false').lower() == 'true'\r\n else:\r\n anonymous_to_peers = False\r\n\r\n comment = cc.Comment(\r\n anonymous=anonymous,\r\n anonymous_to_peers=anonymous_to_peers,\r\n user_id=request.user.id,\r\n course_id=course_key.to_deprecated_string(),\r\n thread_id=thread_id,\r\n parent_id=parent_id,\r\n body=post[\"body\"]\r\n )\r\n comment.save()\r\n if post.get('auto_subscribe', 'false').lower() == 'true':\r\n user = cc.User.from_django_user(request.user)\r\n user.follow(comment.thread)\r\n if request.is_ajax():\r\n return ajax_content_response(request, course_key, comment.to_dict())\r\n else:\r\n return JsonResponse(utils.safe_content(comment.to_dict()))", "def post(self, req):\n error_messages = []\n success_message = ''\n\n # Creamos owner y se lo pasamos al form con un objeto pre-establecido\n post_with_owner = Post()\n post_with_owner.owner = req.user\n post_with_owner.blog = Blog.objects.filter(owner=req.user)[0]\n\n form = PostCreateForm(req.POST, instance=post_with_owner)\n if form.is_valid():\n\n new_post = form.save()\n form = PostCreateForm()\n success_message = u'Post guardado con éxito! '\n success_message += u'<a href=\"{0}\">'.format(reverse('post_detail', args=[req.user.username, new_post.pk]))\n success_message += u'(ver post)</a>'\n else:\n error_messages.append(u'Formulario incompleto.')\n\n context = {\n 'form': form,\n 'success_message': success_message\n }\n return render(req, 'posts/new_post.html', context)", "def post(request):\n if request.method == \"POST\":\n post = Post()\n post.content = request.POST['content']\n post.author = request.user\n post.save()\n return HttpResponseRedirect(reverse(\"index\"))", "def add_comment(request, pk, redirect_user):\n\n if request.is_ajax():\n create_comment_and_status_notification(request, pk)\n if redirect_user == 'profile':\n return redirect('profile')\n if redirect_user == 'news_feed':\n return redirect('news_feed')", "def post(self):\n title = self.request.get(\"title\")\n body = self.request.get(\"body\")\n\n if title and body:\n\n # create a new Post object and store it in the database\n post = Post(\n title=title,\n body=body\n )\n post.put()\n\n # get the id of the new post, so we can render the post's page (via the permalink)\n id = post.key().id()\n self.redirect(\"/blog/%s\" % id)\n else:\n error = \"we need both a title and a body!\"\n #self.render_form(title, body, error)\n self.render(\"newpost.html\", title, body, error)", "def post(self, request):\n\n # crear el formulario con los datos del POST\n blog_with_user = Blog(owner=request.user)\n form = BlogForm(request.POST, instance=blog_with_user)\n\n if form.is_valid():\n #crea el post\n blog = form.save()\n\n #generar mensaje de exito\n msg = \"Blog creado con éxito\"\n\n # limpiamos el formulario creando uno vacío para pasar a la plantilla\n form = BlogForm()\n else:\n msg = \"Ha ocurrido un error al guardar el blog\" \\\n\n\n # renderiza la plantilla con el formulario\n context = {\n \"form\": form,\n \"msg\": msg\n }\n\n # renderiza y devuelve la plantilla\n return render(request, 'blogs/new-blog.html', context)", "def post(self, request, slug):\n article = ArticleInst.fetch(slug)\n comment = request.data.get('comment', {})\n\n serializer = self.serializer_class(data=comment)\n serializer.is_valid(raise_exception=True)\n status_ = status.HTTP_201_CREATED\n\n try:\n Comment.objects.get(\n article=article,\n body=comment.get('body')\n )\n except Comment.DoesNotExist:\n serializer.save(author=request.user, article=article)\n resp = {'message': 'Comment created'}\n resp['data'] = serializer.data\n\n else:\n resp = {'message': \"Seems you've posted an exact comment before\"}\n status_ = status.HTTP_409_CONFLICT\n return Response(data=resp,\n status=status_\n )", "def about(request):\n data = {}\n if request.method == \"POST\":\n about = AboutForm(request.POST)\n if about.is_valid():\n about.save()\n data['success']=1\n data['message']='Comment successfully added'\n return JsonResponse(data)\n else:\n data['success']=0\n data['message']='Error while adding comment'\n return JsonResponse(data)\n \n else:\n about = AboutForm()\n return render(request, 'login/addcomment.html',\n {'form':about})", "def create():\n if request.method == 'POST':\n title = request.form['title']\n body = request.form['body']\n error = None\n\n if not title:\n error = 'Title is required.'\n\n if error is not None:\n flash(error)\n else:\n db = get_db()\n db.execute(\n 'INSERT INTO post (title, body, author_id)'\n ' VALUES (?, ?, ?)',\n (title, body, g.user['id'])\n )\n db.commit()\n return redirect(url_for('blog.index'))\n\n return render_template('blog/create.html')", "def save_comment(self):\n self.save()", "def annonceDetailView(request, pk):\n details = Annonce.objects.get(pk=pk)\n details.view_annonce += 1\n details.save()\n\n # comment = Comment.objects.filter(\n # for_post=details,\n # reply=None).order_by('-create_content')\n # is_favorite = False\n # if details.favorite.filter(id=request.user.id).exists():\n # is_favorite = True\n # else:\n # print('favorite is now False')\n # if request.method == \"POST\":\n # print(request.POST)\n # c_form = commentForm(request.POST or None)\n # if c_form.is_valid():\n # content = request.POST.get('content')\n # reply_id = request.POST.get('comment-id')\n # comment_qs = None # reply is null\n # if reply_id:\n # comment_qs = Comment.objects.get(id=reply_id) \n # comment_use = Comment(\n # commented_by=request.user,\n # for_post=details,\n # content=content,\n # reply=comment_qs\n # )\n # comment_use.save()\n # else:\n # c_form = commentForm()\n # print(f'send {is_favorite}')\n context = {\n # 'is_favorite': is_favorite,\n 'details': details,\n \n # 'comment': comment,\n # 'commentform': c_form,\n }\n # if request.is_ajax():\n # print('Ajax is true')\n # html = render_to_string(\n # 'annonce/comment.html',\n # context, request=request\n # )\n # return JsonResponse({'form': html})\n return render(request, 'Annonce/detail.html', context)", "def add_comment_to_announcement():\n vars = request.vars\n logger.info(\"vars.comment_text: %r\" % (vars.comment_text))\n comment_id = db.Comments.insert(\n comment_text = vars.comment_text,\n score = 1,\n ann_id= vars.ann_id,\n )\n comment = db.Announcements(comment_id)\n\n logger.info(\"api:add_comment_to_announcement ==> comment= %r\" % (comment))\n\n return response.json(comment)", "def snippet_new(request, template_name='dpaste/snippet_new.html'):\n if request.method == \"POST\":\n snippet_form = SnippetForm(data=request.POST, request=request)\n if snippet_form.is_valid():\n new_snippet = snippet_form.save()\n url = new_snippet.get_absolute_url()\n return HttpResponseRedirect(url)\n else:\n snippet_form = SnippetForm(request=request)\n\n template_context = {\n 'snippet_form': snippet_form,\n 'lexer_list': LEXER_LIST,\n 'is_new': True,\n }\n\n return render_to_response(\n template_name,\n template_context,\n RequestContext(request)\n )", "def render_POST(self, request):", "def get(self, request):\n form = PostForm()\n context = {\n 'form': form,\n 'success_message': ''\n }\n return render(request, 'posts/new-post.html', context)", "def create(thing):\n fields = {}\n errors = []\n\n for col in thing.cols:\n new[col.field_name] = request.form.get(col.field_name)\n if col.required and not new[col.field_name]:\n errors.append('%s cannot be empty' % col.human_name)\n\n if errors:\n for e in errors:\n flash(e)\n add_template_variable('thing', thing)\n add_template_variable('fields', fields)\n return my_render_template('generic/create_post.html')\n\n # insert into database\n\n db = get_db()\n cursor = db.cursor()\n\n # create the two strings we use in the query\n field_names = \"'\" + \"', '\".join(thing.field_names) + \"'\"\n question_marks = \", \".join(map(lambda x: '?', thing.field_names.count() ))\n\n cursor.execute(\"insert into posts (%s) values (%s)\" % (field_names, question_marks), (title, body))\n db.commit()\n new_id = cursor.lastrowid\n\n # show new post to the user\n flash(\"You made a new %s\" % thing.human_name)\n return redirect(url_for('show_one', id_=new_id))", "def create_post():\n\n #Get prompt id\n prompt_id = request.form.get('prompt_id')\n\n # Get post text\n post_text = request.form.get('user_post')\n\n # Create post timestamp\n created_at = datetime.now()\n user_facing_date = created_at.strftime(\"%B %d, %Y\")\n\n # Save post and related data to database\n post = crud.create_post(session['user_id'], prompt_id, post_text, session['lat'], session['lng'], session['user_facing_location'], created_at)\n\n return render_template('post_data.html', post=post, user_facing_date=user_facing_date)", "def ajax_form_valid(self, form):\n\n try:\n comment = form.save(self.request)\n num_comments = models.CommentModel.objects.permitted().count()\n\n return core_utils.respond_with_json({\n 'success': True,\n 'comment': render_to_string(\n self.template_name,\n RequestContext(self.request, {'comment': comment})\n ),\n 'num_comments': num_comments\n })\n except exceptions.RapidCommentingError as e:\n return core_utils.respond_with_json({\n 'success': False,\n 'reason': e.value\n })", "def newreply(request, post_id):\n if not request.user.is_authenticated():\n return redirect('/login/?next=%s' % request.path)\n else:\n\n reply = Reply.objects.create(\n creator = request.user,\n created = datetime.datetime.now(),\n body = request.POST.get('mensaje'),)\n post = Post.objects.get(id = post_id)\n post.reply.add(reply) \n return redirect('/home/')", "def submit_comment(book_id):\n \n #Information for inserting\n score = request.form.get(\"score\")\n comment = request.form.get(\"comment\")\n\n if score is None or comment is None:\n return render_template(\"error.html\",message=\"Please submit the complete information.\")\n\n #Inserte a new review\n db.execute(\"INSERT INTO reviewer (id_book, id_user, comment, score_user) VALUES (:id_book, :id_user, :comment, :score_user)\",\n {\"id_book\":book_id, \"id_user\":session[\"user_id\"], \"comment\": comment, \"score_user\": score})\n \n db.commit()\n\n #Get the info of the book\n book = db.execute(\"SELECT * FROM book WHERE id = :book_id\",{\"book_id\": book_id}).fetchone()\n if book is None:\n return render_template(\"error.html\", message = \"No such Book.\")\n\n #Get the reviews joined with the name of the user\n stmt = \"SELECT user_library.*, reviewer.* FROM user_library INNER JOIN reviewer ON user_library.id=reviewer.id_user WHERE id_book = :book_id\"\n reviews = db.execute(stmt,{\"book_id\": book_id}).fetchall()\n\n #Get the user_review info\n user_review = db.execute(\"SELECT * FROM reviewer WHERE id_book = :book_id AND id_user = :user_id\",\n {\"book_id\": book_id, \"user_id\": session[\"user_id\"]}).fetchone()\n\n #If this info not exist we could add a comment, else we can not.\n is_commented = True\n if user_review is None:\n is_commented = False\n\n #Insert a new score if a new comment is introduced\n average_score = db.execute(\"SELECT AVG(score_user) FROM reviewer WHERE id_book = :book_id\",{\"book_id\":book_id}).fetchone()\n average_score = average_score.items()\n average_score = average_score[0]\n average_score = float(average_score[1])\n\n db.execute(\"UPDATE book SET score = :average_score WHERE id = :book_id\", {\"average_score\":average_score, \"book_id\": book_id}) \n db.commit()\n\n #Get the info of the book\n book = db.execute(\"SELECT * FROM book WHERE id = :book_id\",{\"book_id\": book_id}).fetchone()\n if book is None:\n return render_template(\"error.html\", message = \"No such Book.\")\n\n #Proccess for rating count of Goofreaders\n goodreader_info = requests.get(\"https://www.goodreads.com/book/review_counts.json\", params={\"key\": KEY, \"isbns\": book.isbn })\n goodreader_info = goodreader_info.json()\n goodreader_info = goodreader_info[\"books\"]\n\n average_rating = goodreader_info[0][\"average_rating\"]\n ratings_counts = goodreader_info[0][\"ratings_count\"]\n\n return render_template(\"book_info.html\",book=book, reviews = reviews, is_commented = is_commented\n , average_rating = average_rating, ratings_counts = ratings_counts )", "def _add_comment():\r\n per_page = current_app.config['FLASKY_ANSWERS_PER_PAGE']\r\n id = request.args.get('answer_id')\r\n answer = Answer.query.get_or_404(id)\r\n comment =request.args.get('comment')\r\n answers = Answer.query.get_or_404(id)\r\n page = 1\r\n result= False\r\n if current_user.can(Permission.COMMENT):\r\n comment = Comment(body=comment,\r\n author=current_user._get_current_object(),\r\n answer_id=id)\r\n db.session.add(comment)\r\n db.session.commit()\r\n page = (answer.comments.count()-1)/per_page + 1\r\n result=True\r\n pagination = Comment.query.order_by(Comment.timestamp).filter_by(answer_id=id).paginate(\r\n page,per_page=per_page,error_out=False\r\n )\r\n macro_comment = get_template_attribute(\"_comments.html\", \"render_comments\")\r\n macro_page = get_template_attribute(\"_page.html\", \"render_page\")\r\n comments = pagination.items\r\n return jsonify({'result': result,\r\n 'comment_html': macro_comment(comments),\r\n 'page_html': macro_page(pagination),\r\n 'comments_timestamp': [comment.timestamp for comment in comments],\r\n 'comments_id': [comment.id for comment in comments]\r\n })", "def create_post(request):\n if request.method == 'POST':\n title = request.POST['title']\n content = request.POST['content']\n user_id = request.POST['author_id']\n category = request.POST['category']\n\n slug = \"-\".join(list(map(lambda word: word.lower(), title.split())))\n author = User.objects.get(id=int(user_id))\n\n # save info in models\n post = Post()\n post.author = author\n post.category = category\n post.title = title\n post.content = content\n post.slug = slug\n post.save()\n return redirect('post')\n\n return render(request, 'posts/create_post.html')", "def handle_comments(self):\r\n comments = Comment.objects.all()\r\n for c in comments:\r\n new = ThreadedComment(\r\n content_type = c.content_type,\r\n object_id = c.object_id,\r\n comment = c.comment,\r\n user = c.user,\r\n date_submitted = c.submit_date,\r\n date_modified = c.submit_date,\r\n date_approved = c.submit_date,\r\n is_public = c.is_public,\r\n ip_address = c.ip_address,\r\n is_approved = not c.is_removed\r\n )\r\n new.save()", "def add():\n if request.method == 'GET':\n return render_template('add.html')\n elif request.method == 'POST':\n data = {}\n for key in ('h', 'name', 'summary', 'content', 'published', 'updated', 'category',\n 'slug', 'location', 'in-reply-to', 'repost-of', 'syndication'):\n data[key] = None\n\n for title in request.form:\n data[title] = request.form[title]\n\n for title in request.files:\n data[title] = request.files[title].read()\n\n try:\n photo = request.files['photo']\n except:\n photo = None\n\n for key in data:\n if data[key] == \"\":\n data[key] = None\n\n data['published'] = datetime.now()\n\n location = create_entry(data, image=data['photo'], g=g)\n\n if data['in-reply-to']:\n send_mention('http://' + DOMAIN_NAME + '/e/'+location, data['in-reply-to'])\n\n if request.form.get('twitter'):\n t = Timer(30, bridgy_twitter, [location])\n t.start()\n\n if request.form.get('facebook'):\n t = Timer(30, bridgy_facebook, [location])\n t.start()\n return redirect(location)\n else:\n return redirect('/404'), 404", "def post(self, request):\n pass", "def free_comment(request, content_type=None, object_id=None, edit_id=None, parent_id=None, add_messages=False, ajax=False, model=FreeThreadedComment, form_class=FreeThreadedCommentForm, context_processors=[], extra_context={}):\r\n if not edit_id and not (content_type and object_id):\r\n raise Http404 # Must specify either content_type and object_id or edit_id\r\n if \"preview\" in request.POST:\r\n return _preview(request, context_processors, extra_context, form_class=form_class)\r\n if edit_id:\r\n instance = get_object_or_404(model, id=edit_id)\r\n else:\r\n instance = None\r\n _adjust_max_comment_length(form_class)\r\n form = form_class(request.POST, instance=instance)\r\n if form.is_valid():\r\n new_comment = form.save(commit=False)\r\n if not edit_id:\r\n new_comment.ip_address = request.META.get('REMOTE_ADDR', None)\r\n new_comment.content_type = get_object_or_404(ContentType, id = int(content_type))\r\n new_comment.object_id = int(object_id)\r\n if model == ThreadedComment:\r\n new_comment.user = request.user\r\n if parent_id:\r\n new_comment.parent = get_object_or_404(model, id = int(parent_id))\r\n new_comment.save()\r\n if model == ThreadedComment:\r\n if add_messages:\r\n request.user.message_set.create(message=\"Your message has been posted successfully.\")\r\n else:\r\n request.session['successful_data'] = {\r\n 'name' : form.cleaned_data['name'],\r\n 'website' : form.cleaned_data['website'],\r\n 'email' : form.cleaned_data['email'],\r\n }\r\n if ajax == 'json':\r\n return JSONResponse([new_comment,])\r\n elif ajax == 'xml':\r\n return XMLResponse([new_comment,])\r\n else:\r\n return HttpResponseRedirect(_get_next(request))\r\n elif ajax==\"json\":\r\n return JSONResponse({'errors' : form.errors}, is_iterable=False)\r\n elif ajax==\"xml\":\r\n template_str = \"\"\"\r\n<errorlist>\r\n {% for error,name in errors %}\r\n <field name=\"{{ name }}\">\r\n {% for suberror in error %}<error>{{ suberror }}</error>{% endfor %}\r\n </field>\r\n {% endfor %}\r\n</errorlist>\r\n \"\"\"\r\n response_str = Template(template_str).render(Context({'errors' : zip(form.errors.values(), form.errors.keys())}))\r\n return XMLResponse(response_str, is_iterable=False)\r\n else:\r\n return _preview(request, context_processors, extra_context, form_class=form_class)", "def editor_save():\n markdown = request.form.get('markdown')\n html = request.form.get('html')\n title = request.form.get('title')\n if 'post_id' in request.form:\n post_id = int(request.form.get('post_id'))\n edit_post = Post.query.get(post_id)\n edit_post.markdown = markdown\n edit_post.html = html\n edit_post.title = title\n db.session.add(edit_post)\n db.session.commit()\n return jsonify(saved_success=True, new_post=None, post_id=None)\n else:\n new_post = Post(markdown=markdown, html=html, title=title);\n db.session.add(new_post)\n db.session.commit()\n return jsonify(saved_success=True, new_post=True, post_id=new_post.id)", "def render_form(self, title=\"\", body=\"\", error=\"\"):\n self.render(\"newpost.html\", title=title, body=body, error=error)", "def add_feedback(username):\n form = addFeedback()\n\n if \"user\" not in session: \n flash(\"Not logged in\")\n return redirect('/login')\n \n elif form.validate_on_submit():\n title = form.title.data\n content = form.content.data\n recipient = username\n user = session[\"user\"]\n\n new_feedback = Feedback(title=title, content=content, recipient=recipient, user=user)\n db.session.add(new_feedback)\n db.session.commit()\n\n flash(\"Added feedback\")\n return redirect(f'/users/{username}')\n else:\n return render_template(\"feedback_form.html\", form=form, username=username)", "def create_post(request):\n\n # modified from: http://django-angular.readthedocs.org/en/latest/angular-model-form.html\n\n # get data\n in_data = getRequestData(request)\n\n try:\n # save in database\n # note that in_data.mytitle throws an error while in_data.get('mytitle') works smoothly\n post = Thread(pub_date = datetime.datetime.now(pytz.timezone('US/Eastern')), username = in_data.get('myusername'), title = in_data.get('mytitle'), description = in_data.get('mydescription'))\n post.save()\n except:\n return HttpResponseBadRequest('Error saving to database!')\n\n return JsonResponse(in_data)", "def _comment():\r\n id = request.args.get('answer_id')\r\n per_page=current_app.config['FLASKY_ANSWERS_PER_PAGE']\r\n answer = Answer.query.get_or_404(id)\r\n page = request.args.get('page', type=int, default=1)\r\n comment =request.args.get('comment')\r\n if current_user.can(Permission.COMMENT) and comment is not None:\r\n comment = Comment(body=comment,\r\n author=current_user._get_current_object(),\r\n answer_id=id)\r\n db.session.add(comment)\r\n db.session.commit()\r\n page = -1\r\n if page == -1:\r\n page = answer.comments.count() / per_page\r\n pagination = Comment.query.order_by(Comment.timestamp).filter_by(answer_id=id).paginate(\r\n page,per_page=per_page,error_out=False\r\n )\r\n macro_comment = get_template_attribute(\"_comments.html\", \"render_comments\")\r\n macro_page = get_template_attribute(\"_page.html\", \"render_page\")\r\n comments = pagination.items\r\n return jsonify({'result': True,\r\n 'comment_html': macro_comment(comments),\r\n 'page_html':macro_page(pagination),\r\n 'comments_timestamp':[comment.timestamp for comment in comments],\r\n 'comments_id':[comment.id for comment in comments]\r\n })", "def test_editing_post_comment(self):\n\n form_data = {\"comment\": \"Here's my new comment!\"}\n new_comment = edit_post_comment(1, form_data)\n\n self.assertIn(\"my new comment\", new_comment.comment_body)", "def feature_comment(request, id=id):\n feature = get_object_or_404(Feature, id=id)\n comment_form = FeatureCommentForm(request.POST, request.FILES)\n if comment_form.is_valid():\n comment = comment_form.save(commit=False)\n comment.user = request.user\n comment.feature = feature\n comment_form.save()\n return redirect(feature_detail, id)", "def add_comment(request):\n \n \"\"\"\n device type\n \"\"\"\n is_Mobile = dmb.process_request(request)\n \n \"\"\"\n User validation \n \"\"\"\n \n \n if request.user.is_anonymous():\n return comm.redirect_login_path(is_Mobile, request)\n result = dayword_comment(request, 1) \n return HttpResponse(json.dumps(result), content_type='application/json')" ]
[ "0.7209647", "0.7182928", "0.70809394", "0.70615923", "0.7031212", "0.70248646", "0.70202553", "0.6970048", "0.6925778", "0.69036263", "0.68516064", "0.68059367", "0.6770113", "0.6715247", "0.6710386", "0.67016625", "0.6607484", "0.66067475", "0.6575949", "0.65087175", "0.6499307", "0.6476845", "0.64617276", "0.637791", "0.6375545", "0.6353876", "0.6315487", "0.6289599", "0.62716705", "0.6266605", "0.62648076", "0.6261812", "0.6259241", "0.62570554", "0.625627", "0.6253319", "0.6239593", "0.6234117", "0.62293494", "0.6222089", "0.62117875", "0.6206138", "0.6204796", "0.6200007", "0.61994505", "0.61745656", "0.61737365", "0.6167005", "0.61664474", "0.61655486", "0.61279416", "0.612402", "0.61196035", "0.61087453", "0.6105337", "0.6104828", "0.6095142", "0.6086812", "0.6054424", "0.60527503", "0.6052165", "0.6046809", "0.6039737", "0.603921", "0.60349", "0.60330176", "0.6030738", "0.6029808", "0.6022376", "0.60217243", "0.6020527", "0.60122335", "0.599389", "0.5989844", "0.5988574", "0.59855855", "0.5981686", "0.5971458", "0.596457", "0.59589964", "0.59544003", "0.59532696", "0.5935298", "0.5935246", "0.59105366", "0.59048736", "0.590446", "0.5897863", "0.5897683", "0.5886206", "0.5885844", "0.5881842", "0.5878892", "0.5854371", "0.5851145", "0.58455896", "0.58395094", "0.582897", "0.5825102", "0.58209664" ]
0.70013684
7
View function that handles inserting new/editing previously existing comments via Ajax
def post_comment(request, send_signal=True): # Based on variables passed in we get the comment the user is attempting to create/edit try: comment, previous_version = get_comment(request) except InvalidCommentException as e: transaction.rollback() return JsonResponse({ 'ok': False, 'error_message': str(e), }) # Check if the user doesn't pass the appropriate permission check (on the parent_object)... # We call this on the parent comment because the comment itself may not have been saved yet (can't call .get_root on it) # TODO: Fix this for root comment? (no parent) parent_comment = comment.parent tree_root = parent_comment.get_root() parent_object = tree_root.content_object if not user_can_post_comment(request, comment): transaction.set_rollback(True) return JsonResponse({ 'ok': False, 'error_message': "You do not have permission to post this comment.", }) # Check to make sure we are not trying to save a comment "deeper" than we are allowed... if is_past_max_depth(comment): transaction.set_rollback(True) return JsonResponse({ 'ok': False, 'error_message': "You cannot respond to this comment.", }) # If the comment object (NOT the message) hasn't been saved yet... if comment._state.adding == True: comment = add_comment(comment) # Now that we have a comment object, we get a 'lock' on it to prevent a race condition try: lock_comment(comment) except DatabaseError: transaction.set_rollback(True) # Someone is already trying to update this comment, so we need to return an appropriate error return JsonResponse({ 'ok': False, 'error_message': "Someone else is currently editing this comment. Please refresh your page and try again.", }) # Now we know we have sole access to the comment object at the moment so we need to check if we are editing the most recent version if not_most_recent_version(comment, previous_version): transaction.set_rollback(True) return JsonResponse({ 'ok': False, 'error_message': "You are not editing the most recent version of this comment. Please refresh your page and try again.", }) # Everything has checked out, so we save the new version and return the appropriate response version_form, new_version = create_new_version(request, comment) if version_form.is_valid(): comment_template, kwargs = get_template(request, comment, parent_object, tree_root, new_version, previous_version, send_signal=send_signal) return JsonResponse({ 'ok': True, 'html_content': loader.render_to_string(comment_template, context=kwargs) }) else: transaction.set_rollback(True) return JsonResponse({ 'ok': False, 'error_message': "There were errors in your submission. Please correct them and resubmit.", })
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_comments():\n potential_deal_id = int(request.form.get(\"id\"))\n action = request.form.get(\"action\")\n if action.lower() == \"none\":\n action = None\n comments = request.form.get(\"comments\")\n db_handler = DBHandler()\n db_handler.update_by_id(potential_deal_id, action, comments)\n # return redirect(url_for(\"home\"))\n return jsonify({\"success\": True}), 200", "def on_comment(self, request, board_id):\n error = None\n if request.method == 'POST':\n creator = request.form['creator']\n comment = request.form['comment']\n if len(creator) > 30:\n error = 'creator name too long'\n elif len(comment) > 50:\n error = 'comment too long'\n else:\n self.insert_comment(request, board_id)\n return redirect('/board:' + board_id)\n return self.render_template('comment.html', error=error)", "def comments_new():\n comment = {\n \"title\": request.form.get(\"title\"),\n \"content\": request.form.get(\"content\"),\n \"playlist_id\": ObjectId(request.form.get(\"playlist._id\")),\n }\n print(comment)\n comment_id = db.comments.insert_one(comment).inserted_id\n return redirect(\n url_for(\"playlists.playlists_show\", playlist_id=request.form.get(\"playlist._id\"))\n )", "def add_comment(request):\n if request.method != 'POST':\n return HttpResponseRedirect(reverse('wainz.views.composite'))\n else:\n img_id = request.POST['id']\n try:\n img = Image.objects.get(pk=img_id)\n except:\n return HttpResponseRedirect(reverse('wainz.views.composite'))\n comment_text = request.POST['comment']\n #TODO sanitize input\n comment = ImageComment()\n comment.submission_date = timezone.now()\n comment.comment_text= comment_text\n comment.image_id = img_id\n comment.submitter_id = int(request.POST['uid'])\n comment.save()\n return rest.rest_success(request, img_id)", "def add_comment(request, model, object_id):\n obj = get_object_or_404(model, id=object_id)\n if request.method == \"POST\":\n form = CommentForm(request.POST)\n if form.is_valid():\n Comment.objects.create(\n content_type = ContentType.objects.get_for_model(model),\n object_id = object_id,\n author = request.user,\n added_at = datetime.datetime.now(),\n comment = form.cleaned_data['comment']\n )\n if request.is_ajax():\n return JsonResponse({'success': True})\n else:\n return HttpResponseRedirect(obj.get_absolute_url())\n elif request.is_ajax():\n return JsonResponse({'success': False, 'errors': form.errors})\n else:\n form = CommentForm()\n\n # Let the appropriate fallback view take care of display/redisplay\n if model is Question:\n return question_comments(request, obj, form=form)\n elif model is Answer:\n return answer_comments(request, object_id, answer=obj, form=form)", "def annonceDetailView(request, pk):\n details = Annonce.objects.get(pk=pk)\n details.view_annonce += 1\n details.save()\n\n # comment = Comment.objects.filter(\n # for_post=details,\n # reply=None).order_by('-create_content')\n # is_favorite = False\n # if details.favorite.filter(id=request.user.id).exists():\n # is_favorite = True\n # else:\n # print('favorite is now False')\n # if request.method == \"POST\":\n # print(request.POST)\n # c_form = commentForm(request.POST or None)\n # if c_form.is_valid():\n # content = request.POST.get('content')\n # reply_id = request.POST.get('comment-id')\n # comment_qs = None # reply is null\n # if reply_id:\n # comment_qs = Comment.objects.get(id=reply_id) \n # comment_use = Comment(\n # commented_by=request.user,\n # for_post=details,\n # content=content,\n # reply=comment_qs\n # )\n # comment_use.save()\n # else:\n # c_form = commentForm()\n # print(f'send {is_favorite}')\n context = {\n # 'is_favorite': is_favorite,\n 'details': details,\n \n # 'comment': comment,\n # 'commentform': c_form,\n }\n # if request.is_ajax():\n # print('Ajax is true')\n # html = render_to_string(\n # 'annonce/comment.html',\n # context, request=request\n # )\n # return JsonResponse({'form': html})\n return render(request, 'Annonce/detail.html', context)", "def post_comment_ajax(request, using=None):\n if not request.is_ajax():\n return HttpResponseBadRequest(\"Expecting Ajax call\")\n\n data = request.POST.copy()\n if request.user.is_authenticated():\n if not data.get('name', ''):\n data[\"name\"] = request.user.get_full_name() or request.user.get_username()\n if not data.get('email', ''):\n data[\"email\"] = request.user.email\n\n # Look up the object we're trying to comment about\n ctype = data.get(\"content_type\")\n object_pk = data.get(\"object_pk\")\n if ctype is None or object_pk is None:\n return CommentPostBadRequest(\"Missing content_type or object_pk field.\")\n try:\n model = apps.get_model(*ctype.split(\".\", 1))\n target = model._default_manager.using(using).get(pk=object_pk)\n except TypeError:\n return CommentPostBadRequest(\n \"Invalid content_type value: %r\" % escape(ctype))\n except AttributeError:\n return CommentPostBadRequest(\n \"The given content-type %r does not resolve to a valid model.\" % escape(ctype))\n except ObjectDoesNotExist:\n return CommentPostBadRequest(\n \"No object matching content-type %r and object PK %r exists.\" % (\n escape(ctype), escape(object_pk)))\n except (ValueError, ValidationError) as e:\n return CommentPostBadRequest(\n \"Attempting go get content-type %r and object PK %r exists raised %s\" % (\n escape(ctype), escape(object_pk), e.__class__.__name__))\n\n # Do we want to preview the comment?\n preview = \"preview\" in data\n\n # Construct the comment form\n form = django_comments.get_form()(target, data=data)\n\n # Check security information\n if form.security_errors():\n return CommentPostBadRequest(\n \"The comment form failed security verification: %s\" % escape(str(form.security_errors())))\n\n # If there are errors or if we requested a preview show the comment\n if preview:\n comment = form.get_comment_object() if not form.errors else None\n if comment is not None and request.user.is_authenticated():\n comment.user = request.user\n return _ajax_result(request, form, \"preview\", comment, object_id=object_pk)\n if form.errors:\n return _ajax_result(request, form, \"post\", object_id=object_pk)\n\n # Otherwise create the comment\n comment = form.get_comment_object()\n comment.ip_address = get_trusted_ip(request)\n if request.user.is_authenticated():\n comment.user = request.user\n\n # Signal that the comment is about to be saved\n responses = signals.comment_will_be_posted.send(\n sender=comment.__class__,\n comment=comment,\n request=request\n )\n\n for (receiver, response) in responses:\n if response is False:\n return CommentPostBadRequest(\n \"comment_will_be_posted receiver %r killed the comment\" % receiver.__name__)\n\n # Save the comment and signal that it was saved\n comment.save()\n signals.comment_was_posted.send(\n sender=comment.__class__,\n comment=comment,\n request=request\n )\n\n return _ajax_result(request, form, \"post\", comment, object_id=object_pk)", "def add_comment(request, pk, redirect_user):\n\n if request.is_ajax():\n create_comment_and_status_notification(request, pk)\n if redirect_user == 'profile':\n return redirect('profile')\n if redirect_user == 'news_feed':\n return redirect('news_feed')", "def _comment():\r\n id = request.args.get('answer_id')\r\n per_page=current_app.config['FLASKY_ANSWERS_PER_PAGE']\r\n answer = Answer.query.get_or_404(id)\r\n page = request.args.get('page', type=int, default=1)\r\n comment =request.args.get('comment')\r\n if current_user.can(Permission.COMMENT) and comment is not None:\r\n comment = Comment(body=comment,\r\n author=current_user._get_current_object(),\r\n answer_id=id)\r\n db.session.add(comment)\r\n db.session.commit()\r\n page = -1\r\n if page == -1:\r\n page = answer.comments.count() / per_page\r\n pagination = Comment.query.order_by(Comment.timestamp).filter_by(answer_id=id).paginate(\r\n page,per_page=per_page,error_out=False\r\n )\r\n macro_comment = get_template_attribute(\"_comments.html\", \"render_comments\")\r\n macro_page = get_template_attribute(\"_page.html\", \"render_page\")\r\n comments = pagination.items\r\n return jsonify({'result': True,\r\n 'comment_html': macro_comment(comments),\r\n 'page_html':macro_page(pagination),\r\n 'comments_timestamp':[comment.timestamp for comment in comments],\r\n 'comments_id':[comment.id for comment in comments]\r\n })", "def _add_comment():\r\n per_page = current_app.config['FLASKY_ANSWERS_PER_PAGE']\r\n id = request.args.get('answer_id')\r\n answer = Answer.query.get_or_404(id)\r\n comment =request.args.get('comment')\r\n answers = Answer.query.get_or_404(id)\r\n page = 1\r\n result= False\r\n if current_user.can(Permission.COMMENT):\r\n comment = Comment(body=comment,\r\n author=current_user._get_current_object(),\r\n answer_id=id)\r\n db.session.add(comment)\r\n db.session.commit()\r\n page = (answer.comments.count()-1)/per_page + 1\r\n result=True\r\n pagination = Comment.query.order_by(Comment.timestamp).filter_by(answer_id=id).paginate(\r\n page,per_page=per_page,error_out=False\r\n )\r\n macro_comment = get_template_attribute(\"_comments.html\", \"render_comments\")\r\n macro_page = get_template_attribute(\"_page.html\", \"render_page\")\r\n comments = pagination.items\r\n return jsonify({'result': result,\r\n 'comment_html': macro_comment(comments),\r\n 'page_html': macro_page(pagination),\r\n 'comments_timestamp': [comment.timestamp for comment in comments],\r\n 'comments_id': [comment.id for comment in comments]\r\n })", "def comment_add(request,comment=None, error='', message=''):\n categories = Category.objects.all()\n error_fields=[]\n default_comment = Comment(date=datetime.date.today(), time=datetime.datetime.now().time, source='', detail='', category=Category.objects.filter(description='Events')[0], user_email='')\n\n try:\n added = bool(request.POST['add'])\n except:\n added = False\n try:\n action = request.POST['action']\n except:\n action = 'add'\n\n if added == True:\n try:\n new_date = parse(request.POST['date'], dayfirst=True)\n new_time = parse(request.POST['time'])\n except:\n error += ' Datetime invalid or not specified.'\n\n try:\n new_detail = request.POST['detail']\n if new_detail == '':\n error += ' Comment text is blank.'\n except:\n error += ' No comment text provided.'\n\n try:\n new_source = request.POST['source']\n if new_source == '':\n error += ' Source is blank.'\n except:\n error += ' No comment source provided.'\n\n try:\n new_category = Category.objects.filter(pk=int(request.POST['category_id']))[0] #The [0] is OK since the fact that category_id is a primary key ensures that the array has only length 1.\n except:\n error += ' Category invalid or nonexistent.'\n\n try:\n new_user_email = request.POST['user_email']\n if new_user_email == '':\n error += ' You haven\\'t provided your e-mail address.'\n except:\n error += ' No user e-mail address provided.'\n\n if error == '':\n try:\n new_comment = Comment(date=new_date, time=new_time, source=new_source, detail=new_detail, category=new_category, user_email=new_user_email)\n new_comment.full_clean()\n try:\n new_comment.save()\n message += 'Your comment was added to the database.'\n except:\n error += 'Failed to access the database.'\n except ValidationError as ve:\n for k in ve.message_dict.keys():\n error_fields.append(k)\n for m in ve.message_dict[k]:\n error += m + ' '\n default_comment = new_comment\n\n if action == 'saveandaddanother' or action == 'add' or error != '':\n return render_to_response('feedback/comment_add.html',\n {'categories': categories,\n 'error': error,\n 'error_fields': error_fields,\n 'message': message,\n 'added': added,\n 'comment': default_comment},\n context_instance=RequestContext(request))\n elif action == 'save':\n return index(request, error=error, message=message)\n else:\n error += 'Invalid submit action requested.'\n return render_to_response('feedback/comment_add.html',\n {'categories': categories,\n 'error': error,\n 'error_fields': error_fields,\n 'added': added,\n 'message': message,\n 'comment': default_comment},\n context_instance=RequestContext(request))", "def add_comment(request, pk, pk2):\n template_var = base_template_vals(request)\n p = request.POST\n \n if p.has_key(\"content\") and p[\"content\"]:\n if request.user.is_authenticated():\n comment = Comment(event=Event.objects.get(id=pk))\n comment.user = UserProfile.objects.get(django_user=request.user)\n comment.content = p[\"content\"]\n comment.save()\n\n # Sys notification\n from_user = UserProfile.objects.get(django_user=pk2) # Who's event that is commented on\n to_user = Event.objects.get(id=pk).author\n event_id = pk\n sys_notification(to_user, \"add_comment\", from_user, event_id)\n return single(request, pk)", "def newcomment(id):\n\n if g.is_logged == False:\n flash (\"You need to be logged in\")\n return redirect(url_for('show_place', id=id))\n\n if request.method == 'POST':\n rating = request.form['rating']\n\n if rating.isdigit() == False:\n flash (\"Rating must be between a number between 0 and 5 (inclusive)\")\n elif int(rating) < 0 or int(rating) > 5:\n flash (\"Rating must be between 0 and 5 (inclusive)\")\n else:\n db = get_db()\n db.execute('''insert into reviews (rating, title, message, user_id, place_id) values (?, ?, ?,?,?)''', [rating, request.form['title'], request.form['content'], g.user_id, id])\n db.commit()\n\n flash('Your comment was successfully added')\n return redirect(url_for('show_place', id=id))", "def free_comment(request, content_type=None, object_id=None, edit_id=None, parent_id=None, add_messages=False, ajax=False, model=FreeThreadedComment, form_class=FreeThreadedCommentForm, context_processors=[], extra_context={}):\r\n if not edit_id and not (content_type and object_id):\r\n raise Http404 # Must specify either content_type and object_id or edit_id\r\n if \"preview\" in request.POST:\r\n return _preview(request, context_processors, extra_context, form_class=form_class)\r\n if edit_id:\r\n instance = get_object_or_404(model, id=edit_id)\r\n else:\r\n instance = None\r\n _adjust_max_comment_length(form_class)\r\n form = form_class(request.POST, instance=instance)\r\n if form.is_valid():\r\n new_comment = form.save(commit=False)\r\n if not edit_id:\r\n new_comment.ip_address = request.META.get('REMOTE_ADDR', None)\r\n new_comment.content_type = get_object_or_404(ContentType, id = int(content_type))\r\n new_comment.object_id = int(object_id)\r\n if model == ThreadedComment:\r\n new_comment.user = request.user\r\n if parent_id:\r\n new_comment.parent = get_object_or_404(model, id = int(parent_id))\r\n new_comment.save()\r\n if model == ThreadedComment:\r\n if add_messages:\r\n request.user.message_set.create(message=\"Your message has been posted successfully.\")\r\n else:\r\n request.session['successful_data'] = {\r\n 'name' : form.cleaned_data['name'],\r\n 'website' : form.cleaned_data['website'],\r\n 'email' : form.cleaned_data['email'],\r\n }\r\n if ajax == 'json':\r\n return JSONResponse([new_comment,])\r\n elif ajax == 'xml':\r\n return XMLResponse([new_comment,])\r\n else:\r\n return HttpResponseRedirect(_get_next(request))\r\n elif ajax==\"json\":\r\n return JSONResponse({'errors' : form.errors}, is_iterable=False)\r\n elif ajax==\"xml\":\r\n template_str = \"\"\"\r\n<errorlist>\r\n {% for error,name in errors %}\r\n <field name=\"{{ name }}\">\r\n {% for suberror in error %}<error>{{ suberror }}</error>{% endfor %}\r\n </field>\r\n {% endfor %}\r\n</errorlist>\r\n \"\"\"\r\n response_str = Template(template_str).render(Context({'errors' : zip(form.errors.values(), form.errors.keys())}))\r\n return XMLResponse(response_str, is_iterable=False)\r\n else:\r\n return _preview(request, context_processors, extra_context, form_class=form_class)", "def ajax_form_valid(self, form):\n\n try:\n comment = form.save(self.request)\n num_comments = models.CommentModel.objects.permitted().count()\n\n return core_utils.respond_with_json({\n 'success': True,\n 'comment': render_to_string(\n self.template_name,\n RequestContext(self.request, {'comment': comment})\n ),\n 'num_comments': num_comments\n })\n except exceptions.RapidCommentingError as e:\n return core_utils.respond_with_json({\n 'success': False,\n 'reason': e.value\n })", "def create_comment(request):\n\n # get data\n in_data = getRequestData(request)\n\n # get the Thread associated with the comments\n mythread = Thread.objects.get(id=in_data.get('mythreadid'))\n\n # save in database\n try:\n comment = Comment(pub_date = datetime.datetime.now(pytz.timezone('US/Eastern')), username = in_data.get('myusername'), text = in_data.get('mytext'), score = 0, thread = mythread )\n comment.save()\n except:\n return HttpResponseBadRequest('Error saving to database!')\n\n return JsonResponse(in_data)", "def comments():\n return render_template(\"/scene/comments/comments.html\")", "def post_comment(id):\n \n form = CommentForm()\n title = 'post comment'\n post = Post.query.filter_by(id=id).first()\n\n if post is None:\n\n abort(404)\n\n if form.validate_on_submit():\n comment = form.comment.data\n new_comment = Comments(opinion = comment, user_id = current_user.id, posts_id = post.id)\n new_comment.save_comment()\n return redirect(url_for('main.view_post', id = post.id))\n\n return render_template('comments.html', form = form, title = title)", "def update_comment(request, course_id, comment_id):\r\n comment = cc.Comment.find(comment_id)\r\n if 'body' not in request.POST or not request.POST['body'].strip():\r\n return JsonError(_(\"Body can't be empty\"))\r\n comment.body = request.POST[\"body\"]\r\n comment.save()\r\n if request.is_ajax():\r\n return ajax_content_response(request, SlashSeparatedCourseKey.from_deprecated_string(course_id), comment.to_dict())\r\n else:\r\n return JsonResponse(utils.safe_content(comment.to_dict()))", "def edit_comment():\n # Implement me!\n\n logger.info(\"vars: %r\" % request.vars)\n logger.info(\"vars_comment_text: %r\" % request.vars.comment_text)\n logger.info(\"vars id: %r\" % request.vars.comment_id)\n logger.info(\"comment_text: %r\" % db(db.Comments.id == request.vars.comment_id))\n\n #comment.comment_text = request.vars.comment_text\n #comment.edited_on = datetime.datetime.utcnow()\n db(db.Comments.id == request.vars.comment_id).update(comment_text=request.vars.comment_text, edited_on=datetime.datetime.utcnow())\n db.commit()\n logger.info(\"comment_text: %r\" % db(db.Comments.id == request.vars.comment_id))\n return \"ok\"", "def add_comment_view(request, e_pk):\n\n element = get_object_or_404(Element, pk=e_pk)\n group = element.tab.group\n\n if request.user not in group.users.all():\n return redirect(reverse('my_groups_view'))\n\n if request.method == 'POST':\n form = CreateCommentForm(request.POST)\n if form.is_valid():\n comment = Comment(\n text=form.cleaned_data['text'],\n creator=request.user,\n element=element,\n )\n comment.save()\n else:\n return HttpResponseBadRequest()\n\n return redirect(reverse('element_view', args=(e_pk,)))", "def load_comments(request):\n # TODO: Add the ability to return comment tree in JSON format.\n # First we get the root of the comment tree being requested\n try:\n tree_root, parent_object = _get_or_create_tree_root(request)\n except InvalidCommentException as e:\n return JsonResponse({\n 'ok': False,\n 'error_message': str(e),\n })\n\n # Check if the user doesn't pass the appropriate permission check (on the parent_object)...\n if not user_has_permission(request, parent_object, 'can_view_comments'):\n return JsonResponse({\n 'ok': False,\n 'error_message': \"You do not have permission to view comments for this object.\",\n })\n\n # Once we have our desired nodes, we tack on all of the select/prefetch related stuff\n nodes = tree_root.get_family().select_related('deleted_user_info', 'created_by', 'parent', 'content_type')\\\n .prefetch_related(Prefetch('versions', queryset=CommentVersion.objects.order_by('-date_posted')\\\n .select_related('posting_user', 'deleted_user_info')))\n\n # The 'X_KWARGS' header is populated by settings.kwarg in comments.js\n kwargs = json.loads(request.META.get('HTTP_X_KWARGS', {}))\n kwargs.update({\n 'nodes': nodes,\n 'parent_object': parent_object,\n 'max_depth': tree_root.max_depth\n })\n\n comments_template = get_attr_val(request, parent_object, 'comments_template', 'comments/comments.html', **kwargs)\n\n # In the parent_object, sites can define a function called 'filter_nodes' if they wish to apply any additional filtering to the nodes queryset before it's rendered to the template.\n # Default value is the nodes tree with the deleted comments filtered out.\n nodes = get_attr_val(request, parent_object, \"filter_nodes\", default=nodes.filter(deleted=False), **kwargs)\n kwargs.update({\"nodes\": nodes, 'request': request})\n\n # Checks/assigns permissions to each node (so the template doesn't have to)\n _process_node_permissions(**kwargs)\n\n return JsonResponse({\n 'ok': True,\n 'html_content': loader.render_to_string(comments_template, context=kwargs, request=request),\n 'number_of_comments': tree_root.get_descendant_count()\n })", "def post(self):\n comment_id = self.request.get('comment_id')\n post_id = self.request.get('post_id')\n comment = Comment.get_by_id(int(comment_id), parent=comment_key())\n post = Post.get_by_id(int(post_id), parent=blog_key())\n if comment and self.user.key().id() == comment.user.key().id():\n comment.content = self.request.get('content')\n\n have_errors = False\n\n if not comment.content:\n error_content = \"Content is required\"\n have_errors = True\n\n if have_errors:\n self.render(\"edit_comment.html\",\n comment=comment,\n error_content=error_content,\n user=self.user)\n else:\n comment.put()\n time.sleep(0.1)\n\n self.redirect('/blog/%s' % str(post.key().id()))", "def add_comment_to_announcement():\n vars = request.vars\n logger.info(\"vars.comment_text: %r\" % (vars.comment_text))\n comment_id = db.Comments.insert(\n comment_text = vars.comment_text,\n score = 1,\n ann_id= vars.ann_id,\n )\n comment = db.Announcements(comment_id)\n\n logger.info(\"api:add_comment_to_announcement ==> comment= %r\" % (comment))\n\n return response.json(comment)", "def createcomment(request, pk):\n issue = get_object_or_404(Issue, pk=pk)\n if request.method == \"POST\":\n form = CommentCreationForm(request.POST)\n if form.is_valid():\n comment = form.save(commit=False)\n comment.issue = issue\n comment.author = request.user\n comment.created_at = timezone.now()\n comment.save()\n return redirect('office:issue', pk=pk)\n else:\n form = CommentForm()\n return render(request, 'blog/add_comment_to_post.html', {'form': form})", "def comment(postid):\n context = {}\n if \"username\" not in flask.session:\n raise InvalidUsage('Forbidden', status_code=403)\n\n connection = insta485.model.get_db()\n cursor = connection.execute(\n \"SELECT * FROM comments WHERE postid=:id\", {'id': postid})\n comments = cursor.fetchall()\n ''' \n if bool(comments) is False:\n raise InvalidUsage('Not Found', status_code=404)\n '''\n # User\n logname = flask.session[\"username\"]\n\n if flask.request.method == 'POST':\n data = flask.request.get_json(force=True)\n context['text'] = data['text']\n context['owner'] = logname\n context['owner_show_url'] = '/u/' + logname + '/'\n connection.execute('INSERT INTO comments (owner, postid, text) \\\n VALUES (?,?,?)', (logname, postid, data['text']))\n cursor = connection.execute('SELECT last_insert_rowid() AS id')\n commentid_dic = cursor.fetchone()\n context['commentid'] = commentid_dic['id']\n context['postid'] = postid\n return flask.jsonify(**context), 201\n\n # url\n context[\"url\"] = flask.request.path\n context['comments'] = []\n\n for i in comments:\n one_comment = {}\n one_comment['commentid'] = i['commentid']\n one_comment['owner'] = i['owner']\n one_comment['owner_show_url'] = '/u/' + i['owner'] + '/'\n one_comment['postid'] = postid\n one_comment['text'] = i['text']\n context['comments'].append(one_comment)\n\n return flask.jsonify(**context)", "def about(request):\n data = {}\n if request.method == \"POST\":\n about = AboutForm(request.POST)\n if about.is_valid():\n about.save()\n data['success']=1\n data['message']='Comment successfully added'\n return JsonResponse(data)\n else:\n data['success']=0\n data['message']='Error while adding comment'\n return JsonResponse(data)\n \n else:\n about = AboutForm()\n return render(request, 'login/addcomment.html',\n {'form':about})", "def handle_comments(self):\r\n comments = Comment.objects.all()\r\n for c in comments:\r\n new = ThreadedComment(\r\n content_type = c.content_type,\r\n object_id = c.object_id,\r\n comment = c.comment,\r\n user = c.user,\r\n date_submitted = c.submit_date,\r\n date_modified = c.submit_date,\r\n date_approved = c.submit_date,\r\n is_public = c.is_public,\r\n ip_address = c.ip_address,\r\n is_approved = not c.is_removed\r\n )\r\n new.save()", "def comment(request, object_id):\n send_charob = request.user.char_ob\n rec_charob = get_character_from_ob(object_id)\n comment_txt = request.POST[\"comment\"]\n roster.create_comment(send_charob, rec_charob, comment_txt)\n return HttpResponseRedirect(reverse(\"character:sheet\", args=(object_id,)))", "def save_comment(self):\n self.save()", "def post(self, request, pk):\n\n post = Blog.objects.get(pk=int(pk))\n user_id = self.request.session.get('USER_ID')\n\n try:\n user = User.objects.get(pk=user_id)\n except:\n pass\n body = self.request.POST.get('body')\n\n if user_id is None:\n messages.add_message(request, messages.ERROR, \"Please login to add comments.\")\n return HttpResponseRedirect(self)\n\n comments = Comment.objects.create(post=post, author=user, body=body)\n\n d = model_to_dict(post)\n messages.add_message(request, messages.SUCCESS, \"Comment added successfully.\")\n return self.render_to_response(d)", "def save_comment(data):\n data['comment_id'] = len(commentslist) + 1\n data['message'] =\"message\"\n data['author'] = \"author\"\n data['date_created'] = datetime.datetime.now()\n # save to list\n commentslist.append(data)", "def add_comment_to_post(request, pk):\n post = get_object_or_404(Post, pk=pk)\n if request.method == 'POST':\n form = CommentForm(request.POST)\n if form.is_valid():\n comment = form.save(commit=False)\n comment.post = post\n comment.save()\n return redirect('post_detail', pk=post.pk)\n else:\n form = CommentForm()\n return render(request, 'blog/comment_form.html', {'form': form})", "def questions_collection(request):\n if request.method == \"POST\":\n data = json.loads(request.body)\n task = Task.objects.get(id=data.get(\"taskId\", \"\"))\n commenter = User.objects.get(username=data.get(\"commenter\", \"\"))\n content = data.get(\"content\", \"\")\n\n question = Question(\n task=task,\n commenter=commenter,\n content=content\n )\n question.save()\n return JsonResponse({\"message\": \"Question created successfully\"}, status=201)", "def comments(self, request, pk=None):\n post = self.get_object()\n comments = Comment.objects.filter(post=post).order_by('created_at')\n serializer = PostCommentsSerializer(comments, many=True)\n return Response(serializer.data, status.HTTP_200_OK)", "def add_mvcomment(request, pk):\n\n # Getting the movie object hosting the comment\n movie = get_object_or_404(Movie, pk=pk)\n\n # if POST\n if request.method == \"POST\":\n form = MovieCommentForm(request.POST)\n\n # set the author and redirect if valid form\n if form.is_valid():\n comment = form.save(commit=False)\n comment.author = request.user\n comment.movie = movie\n comment.save()\n return redirect('../', pk=movie.pk)\n else:\n form = MovieCommentForm()\n\n # render the form through the template\n return render(request, {'form': form})", "def post_comment(request, next=None, using=None):\n # Fill out some initial data fields from an authenticated user, if present\n data = request.POST.copy()\n if request.user.is_authenticated():\n if not data.get('name', ''):\n data[\"name\"] = request.user.get_full_name() or request.user.username\n if not data.get('email', ''):\n data[\"email\"] = request.user.email\n\n # Check to see if the POST data overrides the view's next argument.\n next = data.get(\"next\", next)\n\n # Look up the object we're trying to comment about\n ctype = data.get(\"content_type\")\n object_pk = data.get(\"object_pk\")\n model = models.get_model(*ctype.split(\".\", 1))\n target = model._default_manager.using(using).get(pk=object_pk)\n\n\n # Construct the comment form\n form = comments.get_form()(target, data=data)\n\n # Check security information\n if form.security_errors():\n return None\n # Create the comment\n comment = form.get_comment_object()\n comment.ip_address = request.META.get(\"REMOTE_ADDR\", None)\n if request.user.is_authenticated():\n comment.user = request.user\n\n # Signal that the comment is about to be saved\n responses = signals.comment_will_be_posted.send(\n sender = comment.__class__,\n comment = comment,\n request = request\n )\n\n # Save the comment and signal that it was saved\n comment.save()\n message = get_object_or_404(Message, pk = object_pk)\n message.envoyer_commentaire_notification(comment.pk, request.user.username)\n \n signals.comment_was_posted.send(\n sender = comment.__class__,\n comment = comment,\n request = request\n )\n\n comment_list = [comment]\n return render_to_response('comments/list.html', {'comment_list': comment_list},context_instance=RequestContext(request))", "def add_new_comment(request):\n token = request.data.get('token')\n text = request.data.get('text', '')\n post_id = request.data.get('post_id', '')\n permission = request.data.get('permission')\n\n if Token.objects.filter(key=token).exists():\n if len(text) < 10:\n return Response({\"error\": 24})\n if len(text) > 1000:\n return Response({\"error\": 25})\n\n if type(post_id) is int:\n if Post.objects.filter(pk=post_id).exists():\n token = get_object_or_404(Token, key=token)\n post = get_object_or_404(Post, pk=post_id)\n comment = Comment.objects.create(post=post,\n author_id=token.user_id,\n text=text,\n permission=permission)\n serializer = PostSerializer(post, context={'user_id': token.user_id})\n UserFeed.objects.create(user=post.author,\n action_user=token.user,\n post_comment=comment,\n action=\"PostComment\")\n\n printable = set(string.printable)\n msg = filter(lambda x: x in printable, comment.text) \n message = \"{} commented: {}\".format(token.user.username, msg)\n\n custom = {\n \"post_id\": post.id,\n \"avatar\": UserProfile.objects.get(user=token.user).avatar.url\n }\n\n if post.author != token.user:\n user_notification = UserNotification.objects.get(user=post.author)\n send_notification(custom, message, user_notification)\n\n # check @ for users \n for item in text.split(' '):\n if item and item[0] == '@':\n username = item[1:].lower()\n user = User.objects.filter(username__iexact=username).first()\n if not user or user == token.user:\n continue\n UserFeed.objects.create(user=user,\n action_user=token.user,\n post_comment=comment,\n action=\"PostCommentComment\")\n msg = filter(lambda x: x in printable, comment.text) \n message = \"{} commented: {}\".format(token.user.username, msg)\n user_notification = UserNotification.objects.get(user=user)\n send_notification(custom, message, user_notification)\n\n return Response({\"success\": 26,\n \"post\": serializer.data})\n else:\n return Response({\"error\": 27})\n else:\n return Response({\"error\": 17})", "def post(self):\n modified_content = self.request.get('comment_edit')\n comment_id = self.request.get('comment_id')\n comment = Comments.get_by_id(int(comment_id))\n user = self.get_active_user()\n\n if user.key().id() == comment.submitter_id:\n comment.content = modified_content\n comment.put()\n self.redirect('/%s' % str(comment.post_id))\n else:\n self.error(403)", "def add_comment(request, entry_pk):\n\n blog = get_object_or_404(BlogEntry, pk=entry_pk)\n\n if not request.user.is_authenticated():\n raise PermissionDenied\n\n form = BlogCommentForm(creator=request.user, blog=blog, data=request.POST)\n\n if form.is_valid():\n form.save()\n return HttpResponseRedirect(blog.get_absolute_url())\n\n return single(request, entry_pk=entry_pk, comment_form=form)", "def comment_added(self, event):\n pass", "def edit(request, comment_id, next=None):\n comment = get_object_or_404(\n get_model(), pk=comment_id, site__pk=settings.SITE_ID\n )\n \n # Make sure user has correct permissions to change the comment,\n # or return a 401 Unauthorized error.\n if not (request.user == comment.user and request.user.has_perm(\"comments.change_comment\")\n or request.user.has_perm(\"comments.can_moderate\")):\n return HttpResponse(\"Unauthorized\", status=401)\n \n # Populate POST data with all required initial data\n # unless they are already in POST\n data = request.POST.copy()\n if not data.get(\"user_name\", \"\"):\n data[\"user_name\"] = request.user.get_full_name() or request.user.username\n if not data.get(\"user_email\"):\n data[\"user_email\"] = request.user.email\n \n next = data.get(\"next\", next)\n CommentEditForm = comments_extension.get_edit_form()\n form = CommentEditForm(data, instance=comment)\n\n if form.security_errors():\n # NOTE: security hash fails!\n return CommentEditBadRequest(\n \"The comment form failed security verification: %s\" % \\\n escape(str(form.security_errors())))\n\n # If there are errors, or if a preview is requested\n if form.errors or \"preview\" in data:\n app_label, model = (form.instance.content_type.app_label, form.instance.content_type.model)\n template_search_list = [\n \"comments/%s/%s/edit-preview.html\" % (app_label, model),\n \"comments/%s/edit-preview.html\" % model,\n \"comments/edit-preview.html\"\n ]\n return render_to_response(\n template_search_list, {\n \"comment_obj\": comment,\n \"comment\": form.data.get(\"comment\", \"\"),\n \"form\": form,\n \"next\": next,\n },\n RequestContext(request, {})\n )\n \n # Otherwise, try to save the comment and emit signals\n if form.is_valid():\n MODERATOR_EDITED = \"moderator edited\"\n flag, created = CommentFlag.objects.get_or_create(\n comment = form.instance,\n user = request.user,\n flag = MODERATOR_EDITED\n )\n \n form.instance.is_removed = False\n form.save()\n\n comment_was_flagged.send(\n sender = comment.__class__,\n comment = comment,\n flag = flag,\n created = created,\n request = request\n )\n \n return utils.next_redirect(\n request, fallback=next or 'comments-comment-done', c=comment._get_pk_val()\n )\n \n else:\n # If we got here, raise Bad Request error.\n return CommentEditBadRequest(\"Could not complete request!\")", "def comment_modal(request, comment_id):\n comment = Comment.objects.get(id=comment_id)\n tags = Tag.objects.all()\n return render_to_response('feedback/tagmodal.html', {\n 'object': comment, 'type': 'comment', 'tags': tags,\n }, context_instance=RequestContext(request))", "def _create_comment(request, course_key, thread_id=None, parent_id=None):\r\n assert isinstance(course_key, CourseKey)\r\n post = request.POST\r\n\r\n if 'body' not in post or not post['body'].strip():\r\n return JsonError(_(\"Body can't be empty\"))\r\n\r\n course = get_course_with_access(request.user, 'load', course_key)\r\n if course.allow_anonymous:\r\n anonymous = post.get('anonymous', 'false').lower() == 'true'\r\n else:\r\n anonymous = False\r\n\r\n if course.allow_anonymous_to_peers:\r\n anonymous_to_peers = post.get('anonymous_to_peers', 'false').lower() == 'true'\r\n else:\r\n anonymous_to_peers = False\r\n\r\n comment = cc.Comment(\r\n anonymous=anonymous,\r\n anonymous_to_peers=anonymous_to_peers,\r\n user_id=request.user.id,\r\n course_id=course_key.to_deprecated_string(),\r\n thread_id=thread_id,\r\n parent_id=parent_id,\r\n body=post[\"body\"]\r\n )\r\n comment.save()\r\n if post.get('auto_subscribe', 'false').lower() == 'true':\r\n user = cc.User.from_django_user(request.user)\r\n user.follow(comment.thread)\r\n if request.is_ajax():\r\n return ajax_content_response(request, course_key, comment.to_dict())\r\n else:\r\n return JsonResponse(utils.safe_content(comment.to_dict()))", "def add_comment(request, listing_id):\n if request.method == \"POST\":\n try:\n listing = Listing.objects.get(pk=listing_id)\n except Listing.DoesNotExist:\n return render(request, \"auctions/errors.html\", {\"error_message\":\n \"something went wrong, the id url argument is not valid\"})\n\n CommentForm = modelform_factory(Comment, exclude=(\"commenter\",\"listing\"))\n # validate and save from the formdata to the database\n form = CommentForm(request.POST)\n try:\n comment = form.save(commit=False)\n comment.commenter = request.user\n comment.listing = listing\n comment.save()\n except:\n # if something went wrong with comment form \n return render(request, \"auctions/errors.html\", {\"error_message\":\n \"something went wrong with the submission of your comment, try again\"})\n\n return redirect(reverse(\"single_listing\", \n args=[listing.title]) +f\"?id={listing.id}\")", "def post_comment(self):\n self.post_question()\n return self.client.post(\"api/v2/1/comments\", headers={\"Authorization\": \"{}\".format(self.token())}, data=json.dumps(self.comment), content_type='application/json')", "def save_comment(data):\n data['comment_id'] = len(commentslist) + 1\n data['date_created'] = datetime.datetime.now()\n # save to list\n commentslist.append(data)", "def add_component_comment(request):\n try:\n person = get_person(request)\n\n print(\"Add components comment. Start.\")\n print(\"Add components comment. Request: \" + str(request))\n\n if request.method == \"POST\":\n # get values from form\n assessment_id = request.POST['assessment_id']\n element_id = None\n try:\n component_id = request.POST['component_id']\n except:\n component_id = None\n try:\n subcomponent_id = request.POST['subcomponent_id']\n except:\n subcomponent_id = None\n try:\n third_component_id = request.POST['third_component_id']\n except:\n third_component_id = None\n try:\n fourth_component_id = request.POST['fourth_component_id']\n except:\n fourth_component_id = None\n try:\n fifth_component_id = request.POST['fifth_component_id']\n except:\n fifth_component_id = None\n comment = request.POST['textComments']\n\n if fifth_component_id:\n element_id = fifth_component_id\n else:\n if fourth_component_id:\n element_id = fourth_component_id\n else:\n if fifth_component_id:\n element_id = fifth_component_id\n else:\n if third_component_id:\n element_id = third_component_id\n else:\n if subcomponent_id:\n element_id = subcomponent_id\n else:\n if component_id:\n element_id = component_id\n\n # get section and assessment\n component = Component.objects.get(id=element_id)\n\n print(\"Add components comment. Component: \" + component.name)\n print(\"Add components comment. comment: \" + comment)\n\n # create comment\n my_comment = AssessmentComponentComment()\n my_comment.assessment = Assessment.objects.get(id=assessment_id)\n my_comment.element = component\n my_comment.comment = comment\n my_comment.person = person\n my_comment.save()\n\n # trace action\n trace_action(TRACE_COMMENT, person, \"User added comment in component: \" + component.name)\n\n # send mail\n try:\n send_mail = request.POST['send_mail']\n t = Thread(target=send_comments_email, args=(my_comment.comment, \"Element: \" + component.name, person))\n t.start()\n # send_comments_email(my_comment.comment, section, person)\n except:\n # checkbox not set\n pass\n\n # redirect to section page\n url_to_redirect = \"/component_2/\" + assessment_id + SLASH\n\n if component_id:\n url_to_redirect += component_id + SLASH\n if subcomponent_id:\n url_to_redirect += subcomponent_id + SLASH\n if third_component_id:\n url_to_redirect += third_component_id + SLASH\n if fourth_component_id:\n url_to_redirect += fourth_component_id + SLASH\n if fifth_component_id:\n url_to_redirect += fifth_component_id + SLASH\n\n return redirect(url_to_redirect, context_instance=RequestContext(request))\n else:\n raise Exception(\"GET call to add new section comment\")\n except:\n if debug_is_on():\n raise\n else:\n return render_to_response(TEMPLATE_ERROR, {\"error_description\": sys.exc_info(), \"crpt_url\": CRPT_URL},\n context_instance=RequestContext(request))", "def problem_comments_append(self, identifier, comment, html=None):\n params = {\"text\": comment}\n if html is not None:\n params[\"html\"] = html\n \n self._post(\"problems/%d/comments\" % identifier, json=params)", "def add_accomment(request, pk):\n\n actor = get_object_or_404(Actor, pk=pk)\n if request.method == \"POST\":\n form = ActorCommentForm(request.POST)\n if form.is_valid():\n comment = form.save(commit=False)\n comment.author = request.user\n comment.actor = actor\n comment.save()\n return redirect('../', pk=actor.pk)\n else:\n form = ActorCommentForm()\n return render(request, {'form': form})", "def comment_added(self, event):\n import simplejson\n comment = str(event[\"comment\"])\n author_name = str(event[\"author\"][\"name\"])\n change_url = str(event[\"change\"][\"url\"])\n change_subject = str(event[\"change\"][\"subject\"])\n comment = simplejson.dumps({\n \"issue\": {\n \"notes\": self._prepare_comment_added_template(event)\n }\n })\n # get a unique list of issue IDs\n subject_issue_ids = self.__get_issue_ids(change_subject)\n comment_issue_ids = self.__get_issue_ids(comment)\n issue_ids = list(set(subject_issue_ids + comment_issue_ids))\n for issue_id in issue_ids:\n self.__add_comment(issue_id, comment)", "def post_detail(request, pk):\n post = get_object_or_404(Post, pk=pk)\n comments = post.comments.all\n post.views += 1\n post.save()\n \n if request.method == \"POST\":\n comment_form = CommentForm(request.POST)\n if comment_form.is_valid() and request.user:\n new_comment = comment_form.save(commit=False)\n new_comment.post = post\n new_comment.author = request.user\n new_comment.save()\n elif request.user:\n comment_form = CommentForm()\n \n return render(request, \"details.html\", {\"post\": post, \"comments\": comments, \"comment_form\": comment_form})", "def add_comment() -> str:\n if \"markdown\" in request.form:\n if \"file\" in request.form:\n comment = Comment(\n markdown=request.form[\"markdown\"],\n submission_id=Submission.query.filter(\n Submission.filepath.contains(request.form[\"file\"])\n )\n .first()\n .id,\n cell_id=request.form[\"cell_id\"] if \"cell_id\" in request.form else None,\n user=UserModel.get_by_token(session[\"token\"]),\n )\n # If not cell_id this is a general comment\n comment.save()\n else:\n return \"Missing file or cell_id\", 400\n else:\n return \"Missing markdown\", 400\n\n comment_maker = get_template_attribute(\"_macros.html\", \"comment_block\")\n return comment_maker(comment)", "def customer_edit_view(h):\n global html\n html = h\n \n user_id = html.var(\"customer_id\"); #get the user id from the url query\n\n common_elements = customer_common_elements()\n \n css_list = common_elements[\"css_list\"]\n\n javascript_list = common_elements[\"javascript_list\"]\n\n all_btn = common_elements[\"all_btn\"]\n\n html.new_header(\"Edit Customers\", \"customer_management.py\", all_btn, css_list, javascript_list)\n customer_string = \"\"\"\n <div>\n <form action=\"customer_put_ajax.py\" method=\"get\" id=\"add_customer_form\" name=\"add_customer_form\" autocomplete=\"on\" >\n <div class=\"form-div\">\n <table class=\"tt-table\" cellspacing=\"0\" cellpadding=\"0\" width=\"100%\">\n <tr>\n <th class=\"cell-title\">Edit Customer</th>\n </tr>\n </table>\n <div class=\"form-body\">\n <div class=\"row-elem\">\n <input type=\"hidden\" id=\"user_id\" name=\"user_id\" disabled='disabled' />\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"username\">Customer User Name</label>\n <input type=\"text\" id=\"user_name\" name=\"user_name\" disabled='disabled'\n title=\"Choose Unique User Name. <br/>Must be at least 5 characters.\" />\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"password\">Password</label>\n <input type=\"password\" id=\"password\" name=\"password\" title=\"Must be at least 8 characters. \"/>\n </div>\n \"\"\"\n customer_string += \"\"\"\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"groups\">Select Group</label>\n \"\"\"\n customer_string += (customer_group_customer_widget())\n customer_string += \"\"\"\n </div>\n \"\"\"\n customer_string += \"\"\"\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"first_name\">First Name</label>\n <input type=\"text\" id=\"first_name\" name=\"first_name\" title=\"Please Enter First name.\"/>\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"last_name\">Last Name</label>\n <input type=\"text\" id=\"last_name\" name=\"last_name\" title=\"Please Enter Last name.\"/>\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"company_name\">Company</label>\n <input type=\"text\" id=\"company_name\" name=\"company_name\" title=\"Please Enter Company Name.\"/>\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"designation\">Designation</label>\n <input type=\"text\" id=\"designation\" name=\"designation\" title=\"Please Enter Designation.\"/>\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"mobile_no\">Mobile Number</label>\n <input type=\"text\" id=\"mobile_no\" name=\"mobile_no\" \n title=\"Please Enter Mobile Number<br/> Don't include +91 or 0.\"/>\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"telephone_no\">Telephone Number</label>\n <input type=\"text\" id=\"telephone_no\" name=\"telephone_no\" \n title=\"Please Enter Mobile Number<br/> Don't include +91 or 0.\"/>\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"fax\">Fax</label>\n <input type=\"text\" id=\"fax\" name=\"fax\" \n title=\"Please Enter Mobile Number<br/> Don't include +91 or 0.\"/>\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"email_id\">E-Mail ID</label>\n <input type=\"text\" id=\"email_id\" name=\"email_id\" title=\"Please Enter E-Mail ID.\"/>\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"city_id\">City</label>\n <input type=\"text\" id=\"city_id\" name=\"city_id\" title=\"Please Enter City Name.\"/>\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"state_id\">State</label>\n <input type=\"text\" id=\"state_id\" name=\"state_id\" title=\"Please Enter State.\"/>\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"country_id\">Country</label>\n <input type=\"text\" id=\"country_id\" name=\"country_id\" title=\"Please Enter Country.\"/>\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"usage\">Usage</label>\n <select id='usage' name='usage'>\n <option value=0>Personal</option>\n <option value=1>Commercial</option>\n </select>\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"purpose\">Purpose</label>\n <input type=\"text\" id=\"purpose\" name=\"purpose\" title=\"Please Enter Purpose.\"/>\n </div>\n <div class=\"row-elem\">\n <label class=\"lbl lbl-big\" for=\"address\">Address</label>\n <textarea id=\"address\" name=\"address\" title=\"Please Enter own Address.\"></textarea>\n </div>\n </div>\n </div>\n <div class=\"form-div-footer\">\n <button type=\"submit\" class=\"yo-small yo-button\"><span class=\"add\">Save</span></button>\n <button type=\"reset\" class=\"yo-small yo-button\" id=\"close_add_user\"><span class=\"cancel\">Cancel</span></button>\n </div>\n </form>\n </div>\n \"\"\" \n customer_string += \"\"\"\n <script>\n put_customer_values(\"%s\");\n post_customers(action=\"put\", user_id = \"%s\");\n </script>\n \"\"\" %(user_id,user_id)\n html.write(customer_string)\n html.new_footer()", "def mai_ajax_edit_news(request, id1, template_name='generic/form.html', form_class=NewsForm):\n if request.method == 'POST':\n form = form_class(data=request.POST)\n if form.is_valid():\n dictionary = form.cleaned_data\n dictionary.update({'news_id': id1})\n prep_data(('admin_clm/news/edit/', dictionary), request.session)\n\n return messages_ajax.success(_('News entry edited.'))\n\n else:\n rest_data = prep_data(('admin_clm/news/get_by_id/', {'news_id': id1}), request.session)\n\n rest_data['sticky'] = rest_data['sticky'] != 0\n form = form_class(rest_data)\n\n return messages_ajax.success(render_to_string(template_name, {'form': form,\n 'text': _('Edit news data:'),\n 'confirmation': _('Save'),\n 'id': id1},\n context_instance=RequestContext(request)),\n status=1)", "def _preview(request, context_processors, extra_context, form_class=ThreadedCommentForm):\r\n _adjust_max_comment_length(form_class)\r\n form = form_class(request.POST or None)\r\n context = {\r\n 'next' : _get_next(request),\r\n 'form' : form,\r\n }\r\n if form.is_valid():\r\n new_comment = form.save(commit=False)\r\n context['comment'] = new_comment\r\n else:\r\n context['comment'] = None\r\n return render_to_response(\r\n 'threadedcomments/preview_comment.html',\r\n extra_context, \r\n context_instance = RequestContext(request, context, context_processors)\r\n )", "def question_comments(request, question, form=None):\n populate_foreign_key_caches(User, (\n ((question,), ('author', 'last_edited_by', 'closed_by')),\n ),\n fields=('username', 'gravatar', 'reputation', 'gold', 'silver',\n 'bronze'))\n\n content_type = ContentType.objects.get_for_model(Question)\n comments = Comment.objects.filter(content_type=content_type,\n object_id=question.id)\n\n if form is None:\n form = CommentForm()\n\n return render_to_response('question.html', {\n 'title': u'Comments on %s' % question.title,\n 'question': question,\n 'tags': question.tags.all(),\n 'comments': comments,\n 'comment_form': form,\n }, context_instance=RequestContext(request))", "def comments(self, request, pk=None):\n\n if request.method == 'GET':\n user_wall_post = self.get_object()\n post_comments = UserWallPostComment.objects.filter(user_wall_post=user_wall_post)\n post_comment_serializer = UserWallPostCommentSerializer(post_comments, many=True)\n return Response(post_comment_serializer.data)\n\n user_wall_post = self.get_object()\n post_comment_serializer = UserWallPostCommentSerializer(data=request.data)\n post_comment_serializer.is_valid(raise_exception=True)\n post_comment_serializer.save(comment_by=self.request.user, user_wall_post=user_wall_post)\n\n to_user = user_wall_post.owner\n from_user = request.user\n UserNotification.create_post_friend_comment_notification(from_user, to_user, 'Right', id=pk)\n return Response(data=post_comment_serializer.data, status=201)", "def form_valid(self, form):\n post = self.get_object()\n #WAS: post.comment_add(form.cleaned_data['text'], self.request.user)\n post.add_comment(form.cleaned_data['text'], \n self.request.user,\n added_at=None, \n by_email=False\n )\n return views_support.response_success(self.request)", "def post(self):\n\n # 160 characters limit\n\n if len(client_data[\"content\"]) >= 200:\n self.write_json_with_status(400,{\n 'result' : 'fail',\n 'reason' : 'content too long'\n })\n\n client_data = self.data\n now = str(time.time())\n comment_id = md5(client_data['content']+now)\n\n new_comment = self.event_comment_table.put_item(data={\n 'CommentID' : comment_id,\n 'CreatorID' : creator_id,\n 'Content' : content,\n 'EventID' : event_id,\n 'Timestamp' : now\n })\n \n\n self.write_json({\n 'comment_id' : comment_id\n })", "def add_comment(request):\n \n \"\"\"\n device type\n \"\"\"\n is_Mobile = dmb.process_request(request)\n \n \"\"\"\n User validation \n \"\"\"\n \n \n if request.user.is_anonymous():\n return comm.redirect_login_path(is_Mobile, request)\n result = dayword_comment(request, 1) \n return HttpResponse(json.dumps(result), content_type='application/json')", "def add_comment(article_id):\n\n if 'username' in session:\n user = mongo.db.user.find_one({'username': session['username']})\n \n if request.method == 'POST':\n articles = mongo.db.articles\n article = articles.find_one_and_update({'_id': ObjectId(article_id) },\n {'$push':\n {'comments':\n {'username': session['username'],\n 'date': datetime.utcnow(),\n 'text': request.form.get('comment')\n }\n }\n })\n \n comment = mongo.db.article_comments\n comment.insert_one({'user': user['_id'],\n 'from_user': session['username'],\n 'article': article['_id'],\n 'article_title': article['title'],\n 'date': datetime.utcnow(),\n 'to_user': article['author'],\n 'text': request.form.get('comment')\n })\n \n flash('Your comment has been added.', 'success')\n return redirect(url_for('blog'))\n \n flash('Please login to post a comment.', 'info')\n return redirect(url_for('login'))", "def form_valid(self, form):\n action = self.get_object()\n #WAS: return action.comment_add(form.cleaned_data['text'], self.request.user)\n action.comment_add(form.cleaned_data['text'], self.request.user)\n return views_support.response_success(self.request)", "def question_comments(request, lot_id, question_pk):\n question = get_object_or_404(Question, lot__slug=lot_id, pk=question_pk)\n question.views += 1\n question.save()\n return render(request, 'comments_on_question.html', {'question': question})", "def post(self, request, slug):\n article = ArticleInst.fetch(slug)\n comment = request.data.get('comment', {})\n\n serializer = self.serializer_class(data=comment)\n serializer.is_valid(raise_exception=True)\n status_ = status.HTTP_201_CREATED\n\n try:\n Comment.objects.get(\n article=article,\n body=comment.get('body')\n )\n except Comment.DoesNotExist:\n serializer.save(author=request.user, article=article)\n resp = {'message': 'Comment created'}\n resp['data'] = serializer.data\n\n else:\n resp = {'message': \"Seems you've posted an exact comment before\"}\n status_ = status.HTTP_409_CONFLICT\n return Response(data=resp,\n status=status_\n )", "def post_comment_form(request):\n try:\n comment, previous_version = get_comment(request)\n except InvalidCommentException as e:\n raise\n\n parent_comment = comment.parent\n tree_root = parent_comment.get_root()\n parent_object = tree_root.content_object\n if not user_can_post_comment(request, comment):\n raise Exception(\"User can't create comments\")\n\n if is_past_max_depth(comment):\n raise Exception(\"Max depth reached\")\n\n # If the comment object (NOT the message) hasn't been saved yet...\n if comment._state.adding == True:\n comment = add_comment(comment)\n\n # Everything has checked out, so we save the new version and return the appropriate response\n version_form, new_version = create_new_version(request, comment)\n\n return comment", "def api_draft_comments(request):\n try:\n def sanitize(comment):\n patch = models.Patch.get_by_id(int(comment.patch_id),\n parent=request.patchset.key)\n assert not patch is None\n message_id = str(comment.message_id) if message_id in comment else None,\n return {\n user: request.user,\n issue: request.issue,\n patch: patch,\n lineno: int(comment.lineno),\n left: bool(comment.left),\n text: str(comment.text),\n message_id: message_id,\n }\n return [\n {message_id: _add_or_update_comment(**comment).message_id}\n for comment in map(sanitize, json.load(request.data))\n ]\n except Exception as err:\n return HttpTextResponse('An error occurred.', status=500)", "def put(self):\n client_data = self.data\n comment_id = client_data['comment_id']\n\n try:\n comment = self.event_comment_table.get_item(CommentID=comment_id)\n except:\n self.write_json_with_status(400,{\n 'result' : 'fail',\n 'reason' : 'invalid comment id'\n })\n\n if self.current_userid != comment[\"CreatorID\"]:\n self.write_json_with_status(403,{\n 'result' : 'fail',\n 'reason' : 'Anthantication failed'\n })\n\n comment['Coentent'] = client_data['data']\n comment['Timestamp'] = str(time.time())\n comment.partial_save();\n\n self.write_json({\n 'comment_id' : comment_id,\n 'Timestamp' : comment['Timestamp']\n })", "def detail(request, article_id):\n article = get_object_or_404(Article, pk=article_id)\n form = PartialCommentForm() #blank form\n return render_to_response('article/detail.html', locals(),\n context_instance=RequestContext(request))", "def test_add_new_comment(self):\n\n result = self.client.post(\"/add_new_comment/2\",\n data={\"user_id\": 25, \"park_id\": \"2\", \"content\": \"My dog loves this park!\"},\n follow_redirects=True)\n self.assertIn(\"<h1 class=\\\"title\\\">Bark Park!</h1>\", result.data)", "def comment(*args, **kwargs):\r\n kwargs['model'] = ThreadedComment\r\n kwargs['form_class'] = ThreadedCommentForm\r\n return free_comment(*args, **kwargs)", "def add_comment(self):\n comment = Comment(\n title=self.title,\n comment=self.comment,\n rating=self.rating,\n user_from_id=g.user.id,\n user_to_id=self.user_to_id\n )\n db.session.add(comment)\n db.session.commit()\n return comment", "def mutate(parent, info, comment_details):\n\n user = User.find_or_fail(comment_details.user_id)\n post = Post.find_or_fail(comment_details.post_id)\n\n comment = Comments()\n comment.body = comment_details.body\n\n user.comments().save(comment)\n post.comments().save(comment)\n\n return comment", "def test_adds_comment_to_activity(self):\n response = self.client.get(\n reverse(\n 'crt_forms:crt-forms-show',\n kwargs={'id': self.pk}),\n )\n content = str(response.content)\n self.assertTrue(self.note in content)", "def feature_comment(request, id=id):\n feature = get_object_or_404(Feature, id=id)\n comment_form = FeatureCommentForm(request.POST, request.FILES)\n if comment_form.is_valid():\n comment = comment_form.save(commit=False)\n comment.user = request.user\n comment.feature = feature\n comment_form.save()\n return redirect(feature_detail, id)", "def bug(request, bugid):\n user = request.user\n if user.is_authenticated:\n if request.method == \"POST\":\n user = request.user\n comment = request.POST['comment']\n ticket = get_object_or_404(Ticket, pk=bugid)\n if comment.strip() == '':\n messages.error(request, 'Comment message is required.')\n return redirect('bug', bugid=ticket.pk)\n\n comment = Comment(user=user, comment=comment, ticket=ticket)\n comment.save()\n messages.success(request, 'Thanks for your comment.')\n return redirect('bug', bugid=ticket.pk)\n\n current_bug = get_object_or_404(Ticket, pk=bugid)\n comments = Comment.objects.all().filter(ticket=bugid)\n votes = Vote.objects.all().filter(ticket=bugid).count()\n context = {\n 'bug': current_bug,\n 'comments': comments,\n 'votes': votes\n }\n return render(request, 'bug.html', context)", "def add_awcomment(request, pk):\n\n award = get_object_or_404(Award, pk=pk)\n if request.method == \"POST\":\n form = AwardCommentForm(request.POST)\n if form.is_valid():\n comment = form.save(commit=False)\n comment.author = request.user\n comment.award = award\n comment.save()\n return redirect('../', pk=award.pk)\n else:\n form = AwardCommentForm()\n return render(request, {'form': form})", "def test_editing_comment(self):\n\n data = {\"comment\": \"Edited comment body.\"}\n result = self.client.post(\"/comment/1/edit.json\", data=data)\n\n self.assertEqual(result.status_code, 200)\n self.assertIn(b\"Edited comment\", result.data)", "def endorse_comment(request, course_id, comment_id):\r\n comment = cc.Comment.find(comment_id)\r\n comment.endorsed = request.POST.get('endorsed', 'false').lower() == 'true'\r\n comment.save()\r\n return JsonResponse(utils.safe_content(comment.to_dict()))", "def _post(self, data):\n new_comment_id = DB_COMMENT_TABLE.insert(data)\n return new_comment_id", "def visit(self, node):\n self.body.append('<div id =\"discourse-comments\"></div>')", "def post(self, request, *args, **kwargs):\n serializer = CommentSerializer(data=request.data)\n post_pk = self.kwargs['post_pk']\n post = Post.objects.get(pk=post_pk)\n if serializer.is_valid():\n serializer.save(author=request.user, post=post)\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)", "def edit_accomment(request, pk):\n\n comment = get_object_or_404(ActorComment, pk=pk)\n form = ActorCommentForm()\n if request.method == \"POST\":\n form = ActorCommentForm(request.POST, instance=comment)\n\n if form.is_valid():\n form.save()\n url = '../../' + str(comment.actor.pk)\n return redirect(url)\n\n context = {\n 'form': form,\n 'comment': comment,\n }\n return render(request, context)", "def post(self):\n post_id = int(self.request.get('post_id'))\n post = Posts.get_by_id(post_id)\n comment = self.request.get('comment')\n submitter_id = self.get_active_user().key().id()\n\n if submitter_id:\n comment = Comments(post_id=post_id, content=comment,\n submitter_id=submitter_id)\n comment.put()\n self.redirect('/%s' % str(post.key().id()))\n else:\n self.error(403)", "def fig_comments(request,fig_id):\n try:\n figconf = FigConf.objects.get(id=fig_id)\n except:\n raise Http404\n\n context = {'figconf': figconf}\n return render(request, 'sgdata/fig_comments.html', context)", "def start_comment_edit(self, comment_id):\r\n old_body = self.get_comment_body(comment_id)\r\n self._find_within(\"#comment_{} .action-edit\".format(comment_id)).first.click()\r\n EmptyPromise(\r\n lambda: (\r\n self.is_comment_editor_visible(comment_id) and\r\n not self.is_comment_visible(comment_id) and\r\n self._get_comment_editor_value(comment_id) == old_body\r\n ),\r\n \"Comment edit started\"\r\n ).fulfill()", "def process_comment(request, comment, post):\n\n if request.user.is_authenticated:\n # We already set auth user's name and email in the form's inital vals.\n comment.author = request.user\n\n # Is this a threaded comment?\n if request.POST.get(\"parent_id\"):\n comment.parent = Comment.objects.get(id=request.POST.get(\"parent_id\"))\n\n # If commenter is logged in, override name and email with stored values from User object\n if request.user.is_authenticated:\n comment.name = request.user.get_full_name()\n comment.email = request.user.email\n\n # Set required relationship to Post object\n comment.post = post\n\n # Get commenter's IP and User-Agent string\n # ip = get_ip(request)\n # if ip is not None:\n # comment.ip_address = ip\n comment.user_agent = request.META.get(\"HTTP_USER_AGENT\", \"\")\n\n # Run spam check\n comment.spam = spam_check(comment)\n\n # Strip disallowed HTML tags. See tangerine docs to customize.\n comment.body = sanitize_comment(comment.body)\n\n # Call comment approval workflow\n comment.approved = get_comment_approval(comment.email, request.user.is_authenticated)\n if comment.approved:\n messages.add_message(request, messages.SUCCESS, \"Your comment has been posted.\")\n else:\n messages.add_message(request, messages.INFO, \"Your comment has been held for moderation.\")\n\n comment.save()\n\n # Alert post author that comment needs moderation, or that it's been auto-published:\n send_comment_moderation_email(comment)", "def add_snippet(request):\n original_id = request.GET.get('oid', None)\n \n if request.method == 'POST':\n form = forms.AddSnippetForm(request.POST)\n if form.is_valid():\n new_snippet = form.save(commit=False)\n new_snippet.author = request.user\n if original_id:\n new_snippet.original_id = original_id\n new_snippet.save()\n return HttpResponseRedirect(new_snippet.get_absolute_url())\n else:\n form = forms.AddSnippetForm()\n return render_to_response('cab/add_snippet_form.html',\n { 'form': form },\n context_instance=RequestContext(request))", "def post(self, request, slug, id):\n article = ArticleInst.fetch(slug)\n comment = request.data.get('comment', {})\n posted_comment = CommentAPIView.check_comment(\n id, article)\n\n serializer = self.serializer_class(data=comment)\n serializer.is_valid(raise_exception=True)\n status_ = status.HTTP_201_CREATED\n\n try:\n CommentReply.objects.get(\n comment_to=posted_comment,\n author=request.user,\n body=comment.get('body').strip()\n )\n except CommentReply.DoesNotExist:\n serializer.save(author=request.user,\n article=article,\n comment_to=posted_comment)\n resp = {'message': f'Replied to comment of ID {id}'}\n resp['data'] = serializer.data\n\n else:\n resp = {'message': \"Seems you've posted an exact comment before\"}\n status_ = status.HTTP_409_CONFLICT\n return Response(data=resp,\n status=status_\n )", "def update_comments(self):\n self.nb_comments = self.comments.count()\n self.save()", "def edit_mvcomment(request, pk):\n\n comment = get_object_or_404(MovieComment, pk=pk)\n\n if request.method == \"POST\":\n form = MovieCommentForm(request.POST, instance=comment)\n\n if form.is_valid():\n form.save()\n url = '../../' + str(comment.movie.pk)\n return redirect(url)\n else:\n form = MovieCommentForm(instance=comment)\n\n context = {\n 'form': form,\n 'comment': comment,\n }\n return render(request, context)", "def add_vote():\n \n\n comment_id = request.form.get(\"comment_id\")\n voted_item = request.form.get(\"voted_item\")\n\n\n comment = Comment.query.get(int(comment_id))\n \n \n vote_check = Vote.query.filter(Vote.comment_id == int(comment_id), Vote.user_id == session['user_id']).first()\n if vote_check:\n db.session.delete(vote_check)\n db.session.commit()\n else:\n vote_added = Vote(user_id = session['user_id'], comment_id = int(comment_id), up_vote = True)\n db.session.add(vote_added)\n db.session.commit()\n\n \n \n result = {'vote': comment.vote_count(), \"comment_id\": comment_id}\n return jsonify(result)", "def like_comment(request):\n\n # get data\n in_data = getRequestData(request)\n\n # increment comment score\n try:\n comment = Comment.objects.get(id=in_data.get('mycommentid'))\n comment.score += 1\n comment.save()\n except:\n return HttpResponseBadRequest('Error saving to database!')\n\n return JsonResponse(in_data)", "def edit_comment(self, id, body, **args):\n args.update(id=id, body=body)\n return self.fetch(\"/comment\", post_args=args)", "def post(self):\n post_id = self.request.get('post_id')\n post = Post.get_by_id(int(post_id), parent=blog_key())\n content = self.request.get('comment')\n\n if content:\n comment = Comment(parent=comment_key(),\n content=content,\n user=self.user,\n post=post)\n comment.put()\n\n time.sleep(0.1)\n self.redirect('/blog/%s' % str(post.key().id()))", "def handle_free_comments(self):\r\n comments = FreeComment.objects.all()\r\n for c in comments:\r\n new = FreeThreadedComment(\r\n content_type = c.content_type,\r\n object_id = c.object_id,\r\n comment = c.comment,\r\n name = c.person_name,\r\n website = '',\r\n email = '',\r\n date_submitted = c.submit_date,\r\n date_modified = c.submit_date,\r\n date_approved = c.submit_date,\r\n is_public = c.is_public,\r\n ip_address = c.ip_address,\r\n is_approved = c.approved\r\n )\r\n new.save()", "def process_comments(session, comments):\n for c in tqdm(comments, desc=\"Injecting comments into DB\"):\n db_comment = session.query(Comment).get(c['id'])\n if db_comment:\n db_comment.update(session, **c)\n else:\n Comment.create(session, **c)", "def thread_comments(request, myid):\n\n try:\n\t# get comments using the thread (foreign key) id\n mycomments = Comment.objects.filter(thread__id=myid)\n except:\n return HttpResponse(status=404)\n\n if request.method == 'GET':\n serializer = CommentSerializer(mycomments, many=True)\n return drfJSONResponse(serializer.data)", "def create_comment(bid, pid):\n # pylint: disable=unused-argument\n form = CommentForm(request.form)\n if request.method == 'POST':\n if form.validate():\n DB.session.add(Comment(pid, current_user.uid, form.text.data))\n DB.session.commit()\n flash('Comment successfully created!')\n else:\n flash(constants.DEFAULT_SUBMISSION_ERR)\n return redirect(request.referrer)", "def community_post_create_view(request):\n task = \"Create New\"\n form = AddEditPostForm() # An unbound form\n\n if request.method == 'POST': # If the form has been submitted...\n form = AddEditPostForm(request.POST, request.FILES) # A form bound to the POST data\n if form.is_valid(): # All validation rules pass\n post = form.save(commit=False) # Create a new object from the form, but don't save it to the database\n post.author = request.user # Set the author to the current user\n post.save() # Save the object to the database\n slug_str = \"%s %s\" % (post.title, post.date_posted) # Create a slug from the title and date\n post.slug = slugify(slug_str) # Create the slug\n post.save() # Save the object to the database\n return redirect('community-home') # Redirect to the home page\n\n context = { # Pass the variables to the template\n 'task': task,\n 'form': form,\n }\n return render(request,\n 'pages/patient-community/community-create-update-post.html',\n context) # render the patient community create post page" ]
[ "0.67972344", "0.6647041", "0.663194", "0.66208017", "0.65695584", "0.6565645", "0.6551287", "0.65155035", "0.6482362", "0.6476776", "0.6422695", "0.6392417", "0.6366266", "0.6361407", "0.63019305", "0.6271926", "0.6269781", "0.6195616", "0.61950845", "0.61534417", "0.61481893", "0.6133915", "0.6068977", "0.6055021", "0.6047715", "0.6018806", "0.6007634", "0.59891266", "0.59636855", "0.59418595", "0.5923893", "0.59204197", "0.5912901", "0.5903977", "0.5891716", "0.58888763", "0.5875274", "0.5867666", "0.58551806", "0.5854555", "0.58514994", "0.584737", "0.5833851", "0.5833368", "0.582483", "0.5811046", "0.5809055", "0.5768143", "0.57428527", "0.57402086", "0.5731248", "0.571119", "0.5709281", "0.56822777", "0.56713784", "0.5658102", "0.5651685", "0.56411266", "0.56252265", "0.56248486", "0.56127256", "0.56085235", "0.56051594", "0.55994487", "0.55970085", "0.558299", "0.55344003", "0.5523614", "0.55220056", "0.5519018", "0.550611", "0.55019873", "0.54991096", "0.5484717", "0.54765564", "0.5458783", "0.54375726", "0.54203475", "0.54121155", "0.5409582", "0.5409345", "0.5391747", "0.5390299", "0.5390108", "0.53791136", "0.53765523", "0.5342886", "0.5338847", "0.5337659", "0.53329486", "0.5329789", "0.53165454", "0.53129256", "0.5312698", "0.5307602", "0.53064066", "0.5304", "0.5301681", "0.52971876", "0.52938384" ]
0.6193762
19
View function that returns the comment tree for the desired parent object.
def load_comments(request): # TODO: Add the ability to return comment tree in JSON format. # First we get the root of the comment tree being requested try: tree_root, parent_object = _get_or_create_tree_root(request) except InvalidCommentException as e: return JsonResponse({ 'ok': False, 'error_message': str(e), }) # Check if the user doesn't pass the appropriate permission check (on the parent_object)... if not user_has_permission(request, parent_object, 'can_view_comments'): return JsonResponse({ 'ok': False, 'error_message': "You do not have permission to view comments for this object.", }) # Once we have our desired nodes, we tack on all of the select/prefetch related stuff nodes = tree_root.get_family().select_related('deleted_user_info', 'created_by', 'parent', 'content_type')\ .prefetch_related(Prefetch('versions', queryset=CommentVersion.objects.order_by('-date_posted')\ .select_related('posting_user', 'deleted_user_info'))) # The 'X_KWARGS' header is populated by settings.kwarg in comments.js kwargs = json.loads(request.META.get('HTTP_X_KWARGS', {})) kwargs.update({ 'nodes': nodes, 'parent_object': parent_object, 'max_depth': tree_root.max_depth }) comments_template = get_attr_val(request, parent_object, 'comments_template', 'comments/comments.html', **kwargs) # In the parent_object, sites can define a function called 'filter_nodes' if they wish to apply any additional filtering to the nodes queryset before it's rendered to the template. # Default value is the nodes tree with the deleted comments filtered out. nodes = get_attr_val(request, parent_object, "filter_nodes", default=nodes.filter(deleted=False), **kwargs) kwargs.update({"nodes": nodes, 'request': request}) # Checks/assigns permissions to each node (so the template doesn't have to) _process_node_permissions(**kwargs) return JsonResponse({ 'ok': True, 'html_content': loader.render_to_string(comments_template, context=kwargs, request=request), 'number_of_comments': tree_root.get_descendant_count() })
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_by_parent(parent_id):\n return CommentsTreeDAO(root_id=parent_id)", "def parent(self) -> Comment | praw.models.Submission:\n # pylint: disable=no-member\n if self.parent_id == self.submission.fullname:\n return self.submission\n\n if self.parent_id in self.submission._comments_by_id:\n # The Comment already exists, so simply return it\n return self.submission._comments_by_id[self.parent_id]\n # pylint: enable=no-member\n\n parent = Comment(self._reddit, self.parent_id.split(\"_\", 1)[1])\n parent._submission = self.submission\n return parent", "def parent(self, parent_object, limit_parent_language=True):\n return self.all().parent(parent_object, limit_parent_language)", "def children(self):\n return Comment.objects.filter(parent=self)", "def get_comments_from_parent(doc):\n\ttry:\n\t\tif is_virtual_doctype(doc.reference_doctype):\n\t\t\t_comments = \"[]\"\n\t\telse:\n\t\t\t_comments = frappe.db.get_value(doc.reference_doctype, doc.reference_name, \"_comments\") or \"[]\"\n\n\texcept Exception as e:\n\t\tif frappe.db.is_missing_table_or_column(e):\n\t\t\t_comments = \"[]\"\n\n\t\telse:\n\t\t\traise\n\n\ttry:\n\t\treturn json.loads(_comments)\n\texcept ValueError:\n\t\treturn []", "def get_parent(self) :\n return self.parent", "def parent_comments_in_reverse_order(self):\n return self.exclude(parent__isnull=False).order_by(\"-created_at\")\\\n .select_related(\"user\")", "def comments(self):\r\n return RepoCommitsComments(self.parent)", "def get_parent(self): # real signature unknown; restored from __doc__\n pass", "def get_parent(self):\n return self.parent", "def get_parent(self):\n return self.parent", "def get_parent(self):\n return self.parent", "def get_comments_by_parent_genus_type(self, comment_genus_type):\n # Implemented from template for\n # osid.resource.ResourceLookupSession.get_resources_by_parent_genus_type\n # STILL NEED TO IMPLEMENT!!!\n return objects.CommentList([])", "def get_parent(self):\n return self.__parent", "def get_parent(self):\n return self.__parent", "def getParent():", "def parent(self):\n return None", "def parent(self):\n return None", "def parent(self):\n return None", "def parent(self):\n return None", "def getParent(obj):", "def get_parent(self):\n return self._parent", "def parent(self):\n\t\treturn self._parent", "def parent(self):\n return self.get_parent().specific", "def parent(self, node):\r\n return self.find_node(node).parent.content", "def comment(self, comment_id):\r\n return RepoCommitsComment(self.parent, comment_id)", "def modify_comment_tree(self, comment_tree):\n return comment_tree", "def find_parent(self):\n pass", "def parent(self):\r\n return self._parent", "def get_parent(self):\n if self.parent:\n return self.parent()\n else:\n return None", "def parent(self):\n \n return self._parent", "def parent(self):\n return self._parent", "def parent(self):\n return self._parent", "def parent(self):\n return self._parent", "def parent(self):\n return self._parent", "def parent(self):\n return self._parent", "def parent(self):\n return self._parent", "def parent(self):\n return self._parent", "def parent(self):\n return self._parent", "def parent(self):\n return self._parent", "def parent(self):\n return self._parent", "def parent(self):\n return self._parent", "def parent(self):\n return self._collection.parent(self)", "def parent(self, n):\n return n._parent", "def comments(self):\r\n return Comments(self)", "def comments(self):\r\n return Comments(self)", "def comments(self):\r\n return Comments(self)", "def parent(self):\n if self._parent is not None:\n return self._parent()\n else:\n return None", "def getParents(obj):", "def parent(self):\n address = self.parent_address\n try:\n parent = Page.objects.get(address=address)\n except Page.DoesNotExist:\n parent = None\n\n return parent", "def parent(self) -> Union[\"ExpressionNode\", None]:\n return self.__parent", "def get_comment_url(content_object, parent=None):\r\n kwargs = get_contenttype_kwargs(content_object)\r\n if parent:\r\n if not isinstance(parent, ThreadedComment):\r\n raise template.TemplateSyntaxError, \"get_comment_url requires its parent object to be of type ThreadedComment\"\r\n kwargs.update({'parent_id' : getattr(parent, 'pk', getattr(parent, 'id'))})\r\n return reverse('tc_comment_parent', kwargs=kwargs)\r\n else:\r\n return reverse('tc_comment', kwargs=kwargs)", "def parent(self):\n if self.__parent is None:\n return None\n parent = self.__parent()\n if parent is None:\n self.__parent = parent\n return parent", "def _get_parent_record(self) -> Link:\n rel = \"parent\"\n href = self.api_endpoint\n return Link(href=href, rel=rel)", "def get_parent(self):\n return BinaryNode.or_none(self.parent)", "def get_post_comments_recur(comment, comments, parent_comment_id, parent_post_id):\n if 'data' in comment:\n comment_data = comment['data']\n\n new_comment = None\n\n # a new comment exists at this layer, add it to the total list of comments\n if 'body' in comment_data:\n new_comment = {\n \"score\": comment_data['score'],\n \"body\": comment_data['body'],\n \"subreddit\": comment_data['subreddit'],\n \"author\": comment_data['author'],\n \"parent_comment_id\": parent_comment_id,\n \"parent_post_id\": parent_post_id,\n \"created\": comment_data['created'],\n \"comment_id\": comment_data['id']\n }\n comments.append(new_comment)\n\n next_parent_comment_id = parent_comment_id if new_comment is None else new_comment['comment_id']\n\n # recurse on children\n if 'children' in comment_data:\n for child in comment_data['children']:\n comments = get_post_comments_recur(child, comments, next_parent_comment_id, parent_post_id)\n\n # recurse on replies\n if 'replies' in comment_data:\n comments = get_post_comments_recur(comment_data['replies'], comments, next_parent_comment_id, parent_post_id)\n\n return comments", "def parent(self) -> Optional[DictionaryObject]:\n return self.get(\"/Parent\")", "async def get_comments_tree(request):\n\n comment_id = request.match_info.get('comment_id')\n if comment_id:\n # valitation was in route (\\d+)\n comment_id = int(comment_id)\n tree = CommentsTreeDAO.create_by_parent(comment_id)\n\n else:\n entity_type = request.match_info.get('entity_type')\n if not entity_type:\n return web.HTTPBadRequest(reason=\"Entity params error!\")\n # valitation was in route (\\d+)\n entity_id = int(request.match_info.get('entity_id'))\n tree = CommentsTreeDAO.create_by_entity(entity_type, entity_id)\n\n await tree.fetch(request['conn'])\n\n return web.json_response(await tree.rows)", "def get_parent(self):\n return self.lodgeit.get_paste_by_id(self.parent_id)", "def comments(self):\r\n return c.Comments(self)", "def comments(self):\r\n return c.Comments(self)", "def comments(self):\r\n return c.Comments(self)", "def parent(self):\r\n if not self._meta.parent:\r\n return None\r\n\r\n if not self.__parent__:\r\n self.__parent__ = self._meta.parent()\r\n\r\n return self.__parent__", "def generate_discreet_comment_tree(tribe):\n\n p1 = generate_random_post(tribe, user=get_random_user())\n p2 = generate_random_post(tribe, user=get_random_user(), parent_comment=p1)\n p3 = generate_random_post(tribe, user=get_random_user(), parent_comment=p1)\n p4 = generate_random_post(tribe, user=get_random_user())\n p5 = generate_random_post(tribe, user=get_random_user(), parent_comment=p4)\n p6 = generate_random_post(tribe, user=get_random_user(), parent_comment=p5)\n\n posts = [p1, p2, p3, p4, p5, p6]\n for post in posts:\n post.save()\n\n return posts", "def _get_comment_order(self):\n\n comment_tuples = CommentOrdererBase.get_comment_order(self)\n if not comment_tuples:\n return comment_tuples\n elif isinstance(comment_tuples[-1], MissingChildrenTuple):\n missing_children_tuple = comment_tuples.pop()\n else:\n missing_children_tuple = None\n\n special_responder_ids = self.link.responder_ids\n\n # unfortunately we need to look up all the Comments for QA\n comment_ids = {ct.comment_id for ct in comment_tuples}\n comments_by_id = Comment._byID(comment_ids, data=True)\n\n # figure out which comments will be kept (all others are discarded)\n kept_comment_ids = set()\n for comment_tuple in comment_tuples:\n if comment_tuple.depth == 0:\n kept_comment_ids.add(comment_tuple.comment_id)\n continue\n\n comment = comments_by_id[comment_tuple.comment_id]\n parent = comments_by_id[comment.parent_id] if comment.parent_id else None\n\n if comment.author_id in special_responder_ids:\n kept_comment_ids.add(comment_tuple.comment_id)\n continue\n\n if parent and parent.author_id in special_responder_ids:\n kept_comment_ids.add(comment_tuple.comment_id)\n continue\n\n if hasattr(comment, \"distinguished\") and comment.distinguished != \"no\":\n kept_comment_ids.add(comment_tuple.comment_id)\n continue\n\n # add all ancestors to kept_comment_ids\n for comment_id in sorted(kept_comment_ids):\n # sort the comments so we start with the most root level comments\n comment = comments_by_id[comment_id]\n parent_id = comment.parent_id\n\n counter = 0\n while (parent_id and\n parent_id not in kept_comment_ids and\n counter < g.max_comment_parent_walk):\n kept_comment_ids.add(parent_id)\n counter += 1\n\n comment = comments_by_id[parent_id]\n parent_id = comment.parent_id\n\n # remove all comment tuples that aren't in kept_comment_ids\n comment_tuples = [comment_tuple for comment_tuple in comment_tuples\n if comment_tuple.comment_id in kept_comment_ids\n ]\n\n if missing_children_tuple:\n comment_tuples.append(missing_children_tuple)\n\n return comment_tuples", "def get_tree_json(self):\n title = self.name\n if self.abbreviation:\n title = title + '(' + self.abbreviation + ')'\n result = {\"title\": title,\n \"key\": self.pk,\n \"tooltip\": self.description,\n \"extraClasses\": 'tag',\n \"lazy\": True,\n \"data\": {\"parentId\": None,\n \"name\": self.name,\n \"abbreviation\": self.abbreviation,\n \"description\": self.description\n }\n }\n parent = self.get_parent()\n if parent:\n result['data']['parentId'] = parent.pk\n return result", "def parent(self):\n return getattr(self, \"parent_%s\" % self.discriminator)", "def parent(self):\n return self if self.is_root else self.__parent", "def get_queryset(self, *args, **kwargs):\n return CommentQuerySet(self.model, using=self._db).order_by(\n self.tree_id_attr,\n self.left_attr\n )", "def comments(self):\r\n return comments.Comments(self)", "def parent(self):\n raise NotImplemented", "def get_parent():\n\n data = list(request.files.values())[0].file.read() if len(request.files) else request.body.read()\n return getParentView(data, request.params)", "def make_comment_data(self, comment_id, parent_id=None, children=[]): # pylint: disable=W0102\n return make_minimal_cs_comment({\n \"id\": comment_id,\n \"parent_id\": parent_id,\n \"course_id\": str(self.course.id),\n \"thread_id\": self.thread_id,\n \"thread_type\": \"discussion\",\n \"username\": self.user.username,\n \"user_id\": str(self.user.id),\n \"created_at\": \"2015-06-03T00:00:00Z\",\n \"updated_at\": \"2015-06-03T00:00:00Z\",\n \"body\": \"Original body\",\n \"children\": children,\n })", "def parent_model(self):\n return self.prop.parent.class_", "def get_parent(self):\n if not self._parent:\n self._parent = yield self.parent_resource.get(self.parent_id)\n\n raise Return(self._parent)", "def parent(self):\n other = self\n while True:\n for rev in other._hgmo['parents']:\n parent = Push(rev)\n if parent.id != self.id:\n return parent\n other = parent", "def get_parent_model(self):\n return self._model", "def get_comment_model(self):\n return get_model()", "def GetParent(self):\n return self.parent", "def get_question(self):\n if self.is_question:\n return self\n elif self.is_answer or self.is_comment and self.parent.is_question:\n return self.parent\n elif self.is_comment and self.parent.is_answer:\n return self.parent.parent\n raise IntegrityError('Content object {} is orphan.'.format(self.pk))", "def parent(self) -> Optional[Heirarchical]:\n return None", "def get_parents(self):\n return self.parents", "def child_comments_in_order(self):\n return self.order_by(\"created_at\").select_related(\"user\")", "def parent(self, p):\n node = self._validate(p)\n return self._make_position(node._parent)", "def get_outer(self):\n return self.parent", "def parent_document(cls):\n return cls.parent_resources()[0]", "def comments(self):\r\n return GistComments(self)", "def test_post_matches_parent_when_parent_is_comment(self):\n post = create_a_post()\n parent = Comment.create(body=\"I'm a parent comment\", post=post)\n comment = Comment.create(body=\"I'm a child comment\", parent=parent)\n self.assertIs(comment.post, parent.post)", "def get_parent(self):\n return self._find_by_locator().parent", "def get_comment_url_json(content_object, parent=None):\r\n try:\r\n return get_comment_url_ajax(content_object, parent, ajax_type=\"json\")\r\n except template.TemplateSyntaxError:\r\n raise template.TemplateSyntaxError, \"get_comment_url_json requires its parent object to be of type ThreadedComment\"\r\n return ''", "def get_comment(self, object_id):\n return self.get_object(\"comment\", object_id)", "def __str__(self):\n\t\treturn str(self.__parent)", "def comments(self):\n return comments.Comments(self)", "def outer(self):\n if self.parent is not None:\n self.parent.lines += self.lines\n # print (f\"{self.name} -> {self.parent.name}\")\n return self.parent", "def GetParent(self):\r\n\r\n return self._parent", "def parent(self):\n raise NotImplementedError()", "def parent(self):\n return self.key().parent()", "def parentItem(self):\n return None", "def viewer(self):\n return self.parent", "def parent(self, parent_object):\n lookup = get_parent_lookup_kwargs(parent_object)\n return self.filter(**lookup)" ]
[ "0.69411963", "0.67797315", "0.6721547", "0.66233695", "0.65036964", "0.64508355", "0.63603806", "0.63551664", "0.6318134", "0.6311694", "0.6311694", "0.6311694", "0.6236535", "0.6184287", "0.6184287", "0.61701477", "0.6166827", "0.6166827", "0.6166827", "0.6166827", "0.6165333", "0.6122415", "0.6119646", "0.6116784", "0.6099386", "0.60552555", "0.6053595", "0.60477865", "0.60151964", "0.60133815", "0.60016525", "0.59930086", "0.59930086", "0.59930086", "0.59930086", "0.59930086", "0.59930086", "0.59930086", "0.59930086", "0.59930086", "0.59930086", "0.59930086", "0.599175", "0.5979098", "0.59737754", "0.59737754", "0.59737754", "0.59599745", "0.594734", "0.5923973", "0.5920671", "0.5917566", "0.59098625", "0.5889549", "0.58767724", "0.58754504", "0.58668315", "0.585592", "0.5855254", "0.5849559", "0.5849559", "0.5849559", "0.5841959", "0.5820543", "0.58045244", "0.5804151", "0.5803902", "0.57974595", "0.5776215", "0.5773311", "0.5760429", "0.57603866", "0.57519686", "0.57499284", "0.5742533", "0.57390153", "0.57383746", "0.57308424", "0.57247543", "0.5722535", "0.5707429", "0.5698502", "0.5689061", "0.56888366", "0.5669979", "0.5655717", "0.5651006", "0.5646798", "0.5645619", "0.56276083", "0.5614676", "0.5612813", "0.561213", "0.5603199", "0.5601433", "0.5587246", "0.558158", "0.5578278", "0.5578196", "0.5578142" ]
0.5738746
76
Interpret and store the decoded json dictionary from the handshake header file. Start an empty data tracking dictionary, according to data_labels.
def __init__(self, sock, header_dict): super().__init__(header_dict) self.sock = sock
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _read_header(\n self, header, filename, run_check_acceptability=True, background_lsts=True\n ):\n # get telescope information\n latitude = header[\"latitude\"][()]\n longitude = header[\"longitude\"][()]\n altitude = header[\"altitude\"][()]\n self.telescope_location_lat_lon_alt_degrees = (latitude, longitude, altitude)\n self.instrument = header[\"instrument\"][()].tobytes().decode(\"utf8\")\n self.telescope_name = header[\"telescope_name\"][()].tobytes().decode(\"utf8\")\n\n # get source information\n self.object_name = header[\"object_name\"][()].tobytes().decode(\"utf8\")\n\n # set history appropriately\n self.history = header[\"history\"][()].tobytes().decode(\"utf8\")\n if not uvutils._check_history_version(self.history, self.pyuvdata_version_str):\n self.history += self.pyuvdata_version_str\n\n # check for vis_units\n if \"vis_units\" in header:\n self.vis_units = header[\"vis_units\"][()].tobytes().decode(\"utf8\")\n else:\n # default to uncalibrated data\n self.vis_units = \"UNCALIB\"\n\n # check for optional values\n if \"dut1\" in header:\n self.dut1 = float(header[\"dut1\"][()])\n if \"earth_omega\" in header:\n self.earth_omega = float(header[\"earth_omega\"][()])\n if \"gst0\" in header:\n self.gst0 = float(header[\"gst0\"][()])\n if \"rdate\" in header:\n self.rdate = header[\"rdate\"][()].tobytes().decode(\"utf8\")\n if \"timesys\" in header:\n self.timesys = header[\"timesys\"][()].tobytes().decode(\"utf8\")\n if \"x_orientation\" in header:\n self.x_orientation = header[\"x_orientation\"][()].tobytes().decode(\"utf8\")\n if \"blt_order\" in header:\n blt_order_str = header[\"blt_order\"][()].tobytes().decode(\"utf8\")\n self.blt_order = tuple(blt_order_str.split(\", \"))\n if self.blt_order == (\"bda\",):\n self._blt_order.form = (1,)\n\n if \"antenna_diameters\" in header:\n self.antenna_diameters = header[\"antenna_diameters\"][()]\n if \"uvplane_reference_time\" in header:\n self.uvplane_reference_time = int(header[\"uvplane_reference_time\"][()])\n if \"eq_coeffs\" in header:\n self.eq_coeffs = header[\"eq_coeffs\"][()]\n if \"eq_coeffs_convention\" in header:\n self.eq_coeffs_convention = (\n header[\"eq_coeffs_convention\"][()].tobytes().decode(\"utf8\")\n )\n\n # check for phasing information\n self.phase_type = header[\"phase_type\"][()].tobytes().decode(\"utf8\")\n if self.phase_type == \"phased\":\n self._set_phased()\n self.phase_center_ra = float(header[\"phase_center_ra\"][()])\n self.phase_center_dec = float(header[\"phase_center_dec\"][()])\n self.phase_center_epoch = float(header[\"phase_center_epoch\"][()])\n if \"phase_center_frame\" in header:\n self.phase_center_frame = (\n header[\"phase_center_frame\"][()].tobytes().decode(\"utf8\")\n )\n elif self.phase_type == \"drift\":\n self._set_drift()\n else:\n self._set_unknown_phase_type()\n\n # get antenna arrays\n # cast to native python int type\n self.Nants_data = int(header[\"Nants_data\"][()])\n self.Nants_telescope = int(header[\"Nants_telescope\"][()])\n self.ant_1_array = header[\"ant_1_array\"][:]\n self.ant_2_array = header[\"ant_2_array\"][:]\n self.antenna_names = [\n n.tobytes().decode(\"utf8\") for n in header[\"antenna_names\"][:]\n ]\n self.antenna_numbers = header[\"antenna_numbers\"][:]\n self.antenna_positions = header[\"antenna_positions\"][:]\n\n # set telescope params\n try:\n self.set_telescope_params()\n except ValueError as ve:\n warnings.warn(str(ve))\n\n # get baseline array\n self.baseline_array = self.antnums_to_baseline(\n self.ant_1_array, self.ant_2_array\n )\n self.Nbls = len(np.unique(self.baseline_array))\n\n # get uvw array\n self.uvw_array = header[\"uvw_array\"][:, :]\n\n # get time information\n self.time_array = header[\"time_array\"][:]\n integration_time = header[\"integration_time\"]\n self.integration_time = integration_time[:]\n proc = None\n if \"lst_array\" in header:\n self.lst_array = header[\"lst_array\"][:]\n # check that lst_array in file is self-consistent\n if run_check_acceptability:\n (\n latitude,\n longitude,\n altitude,\n ) = self.telescope_location_lat_lon_alt_degrees\n lst_array = uvutils.get_lst_for_time(\n self.time_array, latitude, longitude, altitude\n )\n if not np.all(\n np.isclose(\n self.lst_array,\n lst_array,\n rtol=self._lst_array.tols[0],\n atol=self._lst_array.tols[1],\n )\n ):\n warnings.warn(\n \"LST values stored in {file} are not self-consistent \"\n \"with time_array and telescope location. Consider \"\n \"recomputing with utils.get_lst_for_time.\".format(file=filename)\n )\n else:\n # compute lst_array from time_array and telescope location\n proc = self.set_lsts_from_time_array(background=background_lsts)\n\n # get frequency information\n self.freq_array = header[\"freq_array\"][:, :]\n self.channel_width = float(header[\"channel_width\"][()])\n self.spw_array = header[\"spw_array\"][:]\n\n # get polarization information\n self.polarization_array = header[\"polarization_array\"][:]\n\n # get data shapes\n self.Nfreqs = int(header[\"Nfreqs\"][()])\n self.Npols = int(header[\"Npols\"][()])\n self.Ntimes = int(header[\"Ntimes\"][()])\n self.Nblts = int(header[\"Nblts\"][()])\n self.Nspws = int(header[\"Nspws\"][()])\n\n # get extra_keywords\n if \"extra_keywords\" in header:\n self.extra_keywords = {}\n for key in header[\"extra_keywords\"].keys():\n if header[\"extra_keywords\"][key].dtype.type in (np.string_, np.object_):\n self.extra_keywords[key] = (\n header[\"extra_keywords\"][key][()].tobytes().decode(\"utf8\")\n )\n else:\n self.extra_keywords[key] = header[\"extra_keywords\"][key][()]\n\n if proc is not None:\n # if lsts are in the background wait for them to return\n proc.join()\n\n return", "def _main_header(self, hdr):\n d = {}\n # Called readDefAnalysis in OpenMIMS\n d['sample type'], d['data included'], d['sample x'], d['sample y'], \\\n d['analysis type'], d['user name'], d['sample z'], date, time = \\\n unpack(self._bo + '4i 32s 16s i 12x 16s 16s', hdr.read(112))\n\n d['data included'] = bool(d['data included'])\n d['user name'] = self._cleanup_string(d['user name'])\n d['analysis type'] = self._cleanup_string(d['analysis type']).lower()\n date = self._cleanup_string(date)\n time = self._cleanup_string(time)\n d['date'] = self._cleanup_date(date + ' ' + time)\n\n if self.header['file type'] in (27, 29, 39):\n # Called MaskImage/readMaskIm in OpenMIMS\n d['original filename'], d['analysis duration'], d['frames'], \\\n d['scan type'], d['magnification'], d['size type'], \\\n d['size detector'], d['beam blanking'], d['presputtering'], \\\n d['presputtering duration'] = \\\n unpack(self._bo + '16s 3i 3h 2x 3i', hdr.read(48))\n\n d['AutoCal'] = self._autocal(hdr)\n d['HVControl'] = {}\n d['HVControl']['hvcontrol enabled'] = False\n\n elif self.header['file type'] in (22, 41):\n # Called MaskSampleStageImage/readMaskIss in OpenMIMS\n d['original filename'], d['analysis duration'], d['scan type'], \\\n d['steps'], d['step size x'], d['step size y'], d['step size?'], \\\n d['step waittime'], d['frames'], d['beam blanking'], \\\n d['presputtering'], d['presputtering duration'] = \\\n unpack(self._bo + '16s 6i d 4i', hdr.read(64))\n\n d['scan type'] = _stage_scan_types.get(d['scan type'], str(d['scan type']))\n\n d['AutoCal'] = self._autocal(hdr)\n d['HVControl'] = self._hvcontrol(hdr)\n # Don't know if this unused byte needs to go after HVControl or after SigRef.\n hdr.seek(4, 1)\n\n elif self.header['file type'] in (21, 26):\n # Not in OpenMIMS\n # this bit same as image, 1 extra unused/unknown\n d['original filename'], d['analysis duration'], d['frames'], \\\n d['scan type'], d['magnification'], d['size type'], \\\n d['size detector'], d['beam blanking'], d['presputtering'], \\\n d['presputtering duration'] = \\\n unpack(self._bo + '16s 4x 3i 3h 2x 3i', hdr.read(52))\n\n # this bit same as stage scan\n d['AutoCal'] = self._autocal(hdr)\n d['HVControl'] = self._hvcontrol(hdr)\n\n # 24 bytes unknown, not sure if they go here or before AutoCal\n hdr.seek(24, 1)\n\n elif self.header['file type'] == 31:\n # Don't know if this is correct, all 0s anyway\n d['original filename'], d['scan type'], \\\n d['beam blanking'], d['presputtering'] = \\\n unpack(self._bo + '16s 3i 4x', hdr.read(32))\n\n elif self.header['file type'] == 35:\n d['original filename'], d['scan type'], d['analysis duration'], \\\n d['frames'], d['beam blanking'], d['presputtering'] = \\\n unpack(self._bo + '16s 5i 40x', hdr.read(76))\n\n d['AutoCal'] = self._autocal(hdr)\n d['HVControl'] = self._hvcontrol(hdr)\n\n else:\n raise TypeError('What type of image are you? {}'.format(self.header['file type']))\n\n # Continue main header for all types\n d['SigRef'] = self._sigref(hdr)\n d['masses'] = unpack(self._bo + 'i', hdr.read(4))[0]\n\n # scan type is set for stage scan analysis, set others\n if isinstance(d['scan type'], int):\n if d['scan type'] == 0:\n d['scan type'] = ''\n else:\n d['scan type'] = str(d['scan type'])\n\n d['beam blanking'] = bool(d['beam blanking'])\n d['presputtering'] = bool(d['presputtering'])\n d['original filename'] = self._cleanup_string(d['original filename'])\n\n if self.header['file type'] in (21, 26, 27, 29, 35, 39):\n if self.header['file version'] >= 4108:\n n = 60\n else:\n n = 10\n elif self.header['file type'] in (22, 31, 40, 41):\n n = 20\n else:\n n = 0\n\n # Not sure what this is, memory pointers? Not needed.\n # d['mass table ptr'] = unpack(self._bo + 2*n*'h', hdr.read(n*4))\n hdr.seek(n*4, 1)\n\n if self.header['file type'] in (21, 22, 26, 40, 41, 35):\n hdr.seek(4, 1) # 4 bytes unused\n\n # Mass table, dict by species label.\n d['MassTable'] = collections.OrderedDict()\n for m in range(d['masses']):\n mi = {}\n mi['trolley index'], unknown, mi['mass'], mi['matrix or trace'], \\\n mi['detector'], mi['wait time'], mi['frame count time'] = \\\n unpack(self._bo + '2i d 2i 2d', hdr.read(40))\n\n if self.header['file type'] == 31:\n if d['analysis type'].endswith('trolley step scan'):\n # start and end are in mm, step is in μm; convert to mm\n mi['radius start'], mi['radius end'], \\\n mi['radius step'], mi['b field bits'] = \\\n unpack(self._bo + '3d i', hdr.read(28))\n mi['radius step'] /= 1000\n else:\n mi['voltage start'], mi['voltage end'], \\\n mi['voltage step'], mi['b field bits'] = \\\n unpack(self._bo + '3d i', hdr.read(28))\n else:\n mi['offset'], mi['b field bits'] = unpack(self._bo + '2i', hdr.read(8))\n\n mi.update(self._species(hdr))\n\n if self.header['file type'] == 31:\n hdr.seek(4, 1)\n\n # Add correction controls, my own addition.\n mi['background corrected'] = False\n mi['deadtime corrected'] = False\n mi['yield corrected'] = False\n\n label = mi.pop('label')\n # This is true for NS50L and file version 4108.\n # Anywhere else different?\n # Maybe confirm this with the Trolleys dict,\n # there is an Esi trolley.\n if mi['trolley index'] == 8:\n label = 'SE'\n\n d['MassTable'][label] = mi\n\n # Create a few convenient lists\n d['label list'] = tuple(d['MassTable'].keys())\n d['label list fmt'] = tuple(format_species(m) for m in d['label list'])\n d['mass list'] = tuple(d['MassTable'][m]['mass'] for m in d['label list'])\n\n return d", "def _read_data(self) -> None:\n raw_data = self.__mmap[:].decode('ascii').rstrip('\\0')\n self.__data = json.loads(raw_data)", "def _parseData(self, payload):\n out=[]\n bytesParsed = 0\n while bytesParsed < len(payload):\n\n #check for the extended Code Level, code and length\n #count the number of EXCODE_BYTE\n #extendedCodeLevel = sum([1 for x in data if x == EXCODE_BYTE] )\n #bytesParsed += extendedCodeLevel\n\n #identify the length of the expected bytes in the payload\n code = payload[bytesParsed]\n bytesParsed +=1\n if code > 0x7F:\n # multi-byte code, length > 1\n length = payload[bytesParsed]\n bytesParsed +=1\n else:\n length = 1\n\n if code == SENSOR_STATUS:\n # value of 0==no contact, 200==contact\n #print \"leadoff: %i\" % payload[bytesParsed]\n out.append( {'timestamp': self.curtime, 'leadoff': payload[bytesParsed] } )\n bytesParsed +=1\n\n elif code == HEART_RATE:\n #print \"HR: %i\" % payload[bytesParsed]\n out.append( {'timestamp': self.curtime, 'HR': payload[bytesParsed:] } )\n bytesParsed +=1\n\n elif code == CONFIG_BYTE:\n #print \"config: %i\" % payload[bytesParsed]\n out.append( {'timestamp': self.curtime, 'config': payload[bytesParsed:] } )\n bytesParsed +=1\n\n elif code == RAW_ECG:\n # raw value is between -32768 and 32767, in twos compliment form\n # if the raw value is higher than 32768, it should be rolled around to allow for negative values\n raw = payload[bytesParsed]*256 + payload[bytesParsed]\n if raw >= 32768: \n raw = raw - 65536\n #print \"ecg: %i\" % ecg\n\n # create the timestamp on each ECG sample, starting from the first\n if self.starttime is None:\n self.starttime = time.time()\n self.curtime = self.starttime\n else:\n self.curtime = self.curtime + 1./self.Fs\n\n out.append( {'timestamp': self.curtime, 'ecg_raw': raw } )\n bytesParsed += length\n\n elif code == DEBUG_1:\n #print \"debug1: \" + str(payload[bytesParsed:]).strip('[]')\n out.append( {'timestamp': self.curtime, 'debug1': payload[bytesParsed:] } )\n bytesParsed += length\n\n elif code == DEBUG_2:\n #print \"debug2: \" + str(payload[bytesParsed:]).strip('[]')\n out.append( {'timestamp': self.curtime, 'debug2': payload[bytesParsed:] } )\n bytesParsed += length\n\n else:\n print \"unknown code: %i\" % code\n\n return out", "def parse_data(self, frame, header_structure):\n raise NotImplementedError()", "def read_metadata_record(raw_features_string):\n full_metadata = json.loads(raw_features_string)\n return {\"sha256\": full_metadata[\"sha256\"], \"appeared\": full_metadata[\"appeared\"], \"label\": full_metadata[\"label\"]}", "def record() -> Dict[str, int]:\n data: Dict[str, List[str]] = json.loads(request.json)\n\n mac: str = data['mac']\n letters: List[str] = data['keys_pressed']\n\n if not Path(f'.{os.sep}data').exists():\n Path(f'.{os.sep}data').mkdir()\n\n data_file: str = \"data/keylog.json\"\n if os.path.exists(data_file):\n with open(data_file, \"r\") as fp:\n data = json.load(fp)\n else:\n data = {}\n\n previous_string: str = ''\n if mac in data.keys():\n previous_string = data[mac]['string']\n\n previous_string += process_key_string(letters)\n data[mac] = {}\n data[mac][\"string\"] = previous_string\n data[mac][\"frequency_map\"] = generate_frequency_map(previous_string)\n\n with open(data_file, \"w+\") as fp:\n json.dump(data, fp)\n\n return {\"response\": 0}", "def _decode_next_layer(self, dict_, length=None):\n # make next layer protocol name\n proto = str(self._prot or 'Raw').lower()\n\n # make BytesIO from frame package data\n bytes_ = io.BytesIO(self._file.read(dict_['len']))\n info, protochain = self._import_next_layer(bytes_, length)\n\n # write info and protocol chain into dict\n self._protos = ProtoChain(self._prot, protochain)\n dict_[proto] = info\n dict_['protocols'] = self._protos.chain\n return dict_", "def parse_handshake(self, data):\n\n if (data[0] != len(PSTR) or data[1:20] != PSTR\n or data[28:48] != self.factory.torrent.info_hash):\n\n self.transport.loseConnection()\n else:\n self.handshaked = True\n\n reserved = data[20:28]\n if reserved[7] & ord('\\x04'):\n self.fast_extension = True\n\n if reserved[7] & ord('\\x01'):\n self.dht = True", "def __init__(self, header: CmdHeader, raw_data: bytes) -> None:\n super().__init__(header, raw_data)\n self.status, *self.values = unpack_from(f'<{self.header.params_count}I', raw_data)", "def load_data_file(self):\n with open(self.files['data'], 'r') as infile:\n data = json.load(infile)\n self.boundary_nodes = data['boundary_nodes']\n self.nodes = {int(k): v for k, v in data['nodes'].items()}\n self.levels = data['levels']\n infile.close()", "def deserialise_hs01(buffer):\n check_schema_identifier(buffer, FILE_IDENTIFIER)\n event_hist = EventHistogram.EventHistogram.GetRootAsEventHistogram(buffer, 0)\n\n dims = []\n for i in range(event_hist.DimMetadataLength()):\n bins_fb = _create_array_object_for_type(\n event_hist.DimMetadata(i).BinBoundariesType()\n )\n\n # Get bins\n bins_offset = event_hist.DimMetadata(i).BinBoundaries()\n bins_fb.Init(bins_offset.Bytes, bins_offset.Pos)\n bin_boundaries = bins_fb.ValueAsNumpy()\n\n hist_info = {\n \"length\": event_hist.DimMetadata(i).Length(),\n \"bin_boundaries\": bin_boundaries,\n \"unit\": event_hist.DimMetadata(i).Unit().decode(\"utf-8\")\n if event_hist.DimMetadata(i).Unit()\n else \"\",\n \"label\": event_hist.DimMetadata(i).Label().decode(\"utf-8\")\n if event_hist.DimMetadata(i).Label()\n else \"\",\n }\n dims.append(hist_info)\n\n metadata_timestamp = event_hist.LastMetadataTimestamp()\n\n data_fb = _create_array_object_for_type(event_hist.DataType())\n data_offset = event_hist.Data()\n data_fb.Init(data_offset.Bytes, data_offset.Pos)\n shape = event_hist.CurrentShapeAsNumpy().tolist()\n data = data_fb.ValueAsNumpy().reshape(shape)\n\n # Get the errors\n errors_offset = event_hist.Errors()\n if errors_offset:\n errors_fb = _create_array_object_for_type(event_hist.ErrorsType())\n errors_fb.Init(errors_offset.Bytes, errors_offset.Pos)\n errors = errors_fb.ValueAsNumpy().reshape(shape)\n else:\n errors = []\n\n hist = {\n \"source\": event_hist.Source().decode(\"utf-8\") if event_hist.Source() else \"\",\n \"timestamp\": event_hist.Timestamp(),\n \"current_shape\": shape,\n \"dim_metadata\": dims,\n \"data\": data,\n \"errors\": errors,\n \"last_metadata_timestamp\": metadata_timestamp,\n \"info\": event_hist.Info().decode(\"utf-8\") if event_hist.Info() else \"\",\n }\n return hist", "def load_payload(self, server_payload) -> None:", "def data_dict0():\n\n # 0- Sample from detectron2 -> 5 different sections.\n info_val0 = [{\"date_created\": \"2020-03-15 04:59:45.442988\",\n \"description\": \"Automatically generated COCO json file for Detectron2.\"}]\n images0 = [{\"id\": \"image\", \"width\": 100,\n \"height\": 100, \"file_name\": \"image.png\"}]\n annotations0 = [{\"id\": 1, \"image_id\": \"image\", \"bbox\": [70.0, 30.0, 30.0, 40.0],\n \"area\": 1200.0, \"iscrowd\": 0, \"category_id\": 0}]\n categories0 = [{\"id\": 0, \"name\": \"first\"}]\n licence0 = 'null'\n\n return [{\"info\": info_val0,\n \"images\": images0,\n \"annotations\": annotations0,\n \"categories\": categories0,\n \"licenses\": licence0}]", "def decode(data): #@NoSelf", "def processReadback(resp):\n a = np.fromstring(resp, dtype='<u1')\n return {\n 'build': a[51],\n 'serDAC': a[56],\n 'noPllLatch': bool((a[58] & 0x80) > 0),\n 'ackoutI2C': a[61],\n 'I2Cbytes': a[69:61:-1],\n 'executionCounter': (a[53] << 8) + a[52]\n }", "def _readheader(lines):\n hdrdict = {}\n #input list of 26 lines of header\n #station and channel\n line = lines[5]\n parts = line.strip().split()\n fname = parts[1]\n fparts = fname.split('_')\n hdrdict['station'] = fparts[-2]+'_'+fparts[-1]\n\n #the \"Component\" lines look like either: Component S00W, Component S90E, Component Up\n compstr = lines[12].strip().split()[1]\n hdrdict['channel'] = get_comp_name(compstr)\n\n #instrument\n hdrdict['instrument'] = lines[3].split()[1].strip()\n \n #location string\n line = lines[6]\n hdrdict['location'] = line.strip()\n #event origin, buffer start year/month\n line = lines[16]\n parts = line.strip().split()\n bufyear = int(parts[8])\n bufmonth = int(parts[9])\n #epicentral location, buffer start day/hour\n line = lines[17]\n parts = line.strip().split()\n bufday = int(parts[8])\n bufhour = int(parts[9])\n #numpoints, buffer start min/sec\n line = lines[19]\n parts = line.strip().split()\n hdrdict['npts'] = int(parts[0])\n bufmin = int(parts[8])\n millisec = int(parts[9])\n bufsec = int(millisec/1000)\n bufmicrosec = int(np.round(millisec/1000.0 - bufsec))\n hdrdict['starttime'] = UTCDateTime(datetime(bufyear,bufmonth,bufday,bufhour,bufmin,bufsec,bufmicrosec))\n #part C\n #frequency, calibration value and some other stuff we don't care about\n line = lines[20]\n parts = line.strip().split()\n hdrdict['sampling_rate'] = float(parts[0])\n hdrdict['delta'] = 1.0/hdrdict['sampling_rate']\n hdrdict['calib'] = float(parts[7])\n #site location info, this time in dd\n line = lines[21]\n parts = line.strip().split()\n hdrdict['lat'] = float(parts[0]) * -1\n hdrdict['lon'] = float(parts[1])\n hdrdict['height'] = 0.0\n #duration\n line = lines[22]\n parts = line.strip().split()\n hdrdict['duration'] = float(parts[0])\n hdrdict['endtime'] = hdrdict['starttime'] + hdrdict['duration']\n #max acceleration - good for sanity check\n line = lines[23]\n parts = line.strip().split()\n hdrdict['maxacc'] = float(parts[0])\n hdrdict['network'] = 'NZ'\n hdrdict['units'] = 'acc'\n return hdrdict", "def __init__(self, header: CmdHeader, raw_data: bytes) -> None:\n super().__init__(header, raw_data)\n self.status, self.length, *self.values = unpack_from(f'<{self.header.params_count}I', raw_data)\n self.data = raw_data[8:8 + self.length] if self.length > 0 else b''", "def readHead(self):\n filesize = self.rhd.tell()\n \n #the order in which all of this is called is critcal\n self.header_identifier = hex(np.uint32(struct.unpack('<I', self.rhd.read(4))))\n v = np.int8(struct.unpack('BBBB', self.rhd.read(4)))\n\n #read each property of the header\n self.version = str(v[0]) + '.' + str(v[2])\n self.sample_rate = np.float32(struct.unpack('f', self.rhd.read(4)))[0] \n self.dsp_enabled = np.int8(struct.unpack('BB', self.rhd.read(2)))[0]\n self.actual_dsp_cutoff_frequency = np.float32(struct.unpack('f', self.rhd.read(4)))[0]\n self.actual_lower_bandwidth = np.float32(struct.unpack('f', self.rhd.read(4)))[0]\n self.actual_upper_bandwidth = np.float32(struct.unpack('f', self.rhd.read(4)))[0]\n self.desired_dsp_cutoff_frequency = np.float32(struct.unpack('f', self.rhd.read(4)))[0]\n self.desired_lower_bandwidth = np.float32(struct.unpack('f', self.rhd.read(4)))[0]\n self.desired_upper_bandwidth = np.float32(struct.unpack('f', self.rhd.read(4)))[0]\n self.notch_cutoff_mode = np.int8(struct.unpack('BB', self.rhd.read(2)))[0]\n self.desired_impedance_test_frequency = np.float32(struct.unpack('f', self.rhd.read(4)))[0]\n self.actual_impedance_test_frequency = np.float32(struct.unpack('f', self.rhd.read(4)))[0]\n #list of 3 notes\n self.note = [_qstring(self.rhd),_qstring(self.rhd),_qstring(self.rhd)]\n self.number_of_temperature_sensors = np.int16(struct.unpack('h', self.rhd.read(2)))[0]\n self._TEMP_SENSORS = self.number_of_temperature_sensors\n self.board_mode = np.int16(struct.unpack('h', self.rhd.read(2)))[0]\n self.number_of_signal_groups = np.int16(struct.unpack('h', self.rhd.read(2)))[0]\n\n #dict of signal groups\n self.signal_groups = {} \n for i in range(self.number_of_signal_groups):\n sg = Signal_Group(self)\n self.signal_groups[sg.signal_group_name] = sg\n \n #dict of channels\n self.channels = {}\n for key, group in self.signal_groups.iteritems():\n self.channels.update(group.channels)", "def test_serialises_and_deserialises_hs00_message_correctly_for_full_1d_data(self):\n original_hist = {\n \"source\": \"some_source\",\n \"timestamp\": 123456,\n \"current_shape\": [5],\n \"dim_metadata\": [\n {\n \"length\": 5,\n \"unit\": \"m\",\n \"label\": \"some_label\",\n \"bin_boundaries\": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]),\n }\n ],\n \"last_metadata_timestamp\": 123456,\n \"data\": np.array([1.0, 2.0, 3.0, 4.0, 5.0]),\n \"errors\": np.array([5.0, 4.0, 3.0, 2.0, 1.0]),\n \"info\": \"info_string\",\n }\n\n buf = serialise_hs00(original_hist)\n hist = deserialise_hs00(buf)\n\n assert hist[\"source\"] == original_hist[\"source\"]\n assert hist[\"timestamp\"] == original_hist[\"timestamp\"]\n assert hist[\"current_shape\"] == original_hist[\"current_shape\"]\n self._check_metadata_for_one_dimension(\n hist[\"dim_metadata\"][0], original_hist[\"dim_metadata\"][0]\n )\n assert np.array_equal(hist[\"data\"], original_hist[\"data\"])\n assert np.array_equal(hist[\"errors\"], original_hist[\"errors\"])\n assert hist[\"info\"] == original_hist[\"info\"]\n assert (\n hist[\"last_metadata_timestamp\"] == original_hist[\"last_metadata_timestamp\"]\n )", "def read_metadata_record(raw_features_string):\n all_data = json.loads(raw_features_string)\n metadata_keys = {\"sha256\", \"appeared\", \"label\", \"avclass\"}\n return {k: all_data[k] for k in all_data.keys() & metadata_keys}", "def test_serialises_and_deserialises_hs00_message_correctly_for_minimal_1d_data(\n self,\n ):\n original_hist = {\n \"timestamp\": 123456,\n \"current_shape\": [5],\n \"dim_metadata\": [\n {\n \"length\": 5,\n \"unit\": \"m\",\n \"label\": \"some_label\",\n \"bin_boundaries\": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]),\n }\n ],\n \"data\": np.array([1.0, 2.0, 3.0, 4.0, 5.0]),\n }\n buf = serialise_hs00(original_hist)\n\n hist = deserialise_hs00(buf)\n assert hist[\"source\"] == \"\"\n assert hist[\"timestamp\"] == original_hist[\"timestamp\"]\n assert hist[\"current_shape\"] == original_hist[\"current_shape\"]\n self._check_metadata_for_one_dimension(\n hist[\"dim_metadata\"][0], original_hist[\"dim_metadata\"][0]\n )\n assert np.array_equal(hist[\"data\"], original_hist[\"data\"])\n assert len(hist[\"errors\"]) == 0\n assert hist[\"info\"] == \"\"", "def _read_new_header(self, raw):\n\n byte_count = 0\n\n data_size = 4\n self.label = struct.unpack('<4s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.version = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.revision = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 28\n self.date = struct.unpack('<28s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.file_format = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.file_type = struct.unpack('<4s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 68\n self.original_file_name = struct.unpack('<68s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 68\n self.reference_file_name = struct.unpack('<68s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 68\n self.related_file_name_a = struct.unpack('<68s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 68\n self.related_file_name_b = struct.unpack('<68s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 68\n self.related_file_name_c = struct.unpack('<68s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 84\n self.annotate = struct.unpack('<84s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 36\n self.instrument_model = struct.unpack('<36s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 36\n self.instrument_serial_number = struct.unpack('<36s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 36\n self.software_version_number = struct.unpack('<36s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 36\n self.crystal_material = struct.unpack('<36s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 8\n self.laser_wavelength_microns = struct.unpack('<d',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.laser_null_doubling = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.padding = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 8\n self.dispersion_constant_xc = struct.unpack('<d',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 8\n self.dispersion_constant_xm = struct.unpack('<d',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 8\n self.dispersion_constant_xb = struct.unpack('<d',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.num_chan = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.interferogram_size = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.scan_direction = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.acquire_mode = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.emissivity = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.apodization = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.zero_fill = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.run_time_math = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.fft_size = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.number_of_coadds = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.single_sided = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.chan_display = struct.unpack('<l',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 8\n self.amb_temperature = struct.unpack('<d',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 8\n self.inst_temperature = struct.unpack('<d',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 8\n self.wbb_temperature = struct.unpack('<d',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 8\n self.cbb_temperature = struct.unpack('<d',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 8\n self.temperature_dwr = struct.unpack('<d',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 8\n self.emissivity_dwr = struct.unpack('<d',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 8\n self.laser_temperature = struct.unpack('<d',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 40\n self.spare_i = struct.unpack('<llllllllll',\n raw[byte_count:byte_count+data_size])\n byte_count += data_size\n\n data_size = 80\n self.spare_f = struct.unpack('<dddddddddd',\n raw[byte_count:byte_count+data_size])\n byte_count += data_size\n\n data_size = 68\n self.spare_na = struct.unpack('<68s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 68\n self.spare_nb = struct.unpack('<68s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 68\n self.spare_nc = struct.unpack('<68s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 68\n self.spare_nd = struct.unpack('<68s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 68\n self.spare_ne = struct.unpack('<68s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.header_end = struct.unpack('<4s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size", "def load_data(filename) :\n data_dict = None\n with open(filename, 'r') as infile :\n data_dict = json.load(infile)\n\n# Reset Counters\n for counter in data_dict['counters'] :\n data_dict['counters'][counter] = 0\n\n return data_dict", "def process_data():\n with open('configClear_v2.json') as config:\n json_config = json.load(config)\n cisco_ios_xe_native = json_config.get('frinx-uniconfig-topology:configuration').get('Cisco-IOS-XE-native:native')\n\n config_container = {\n \"BDI\" : cisco_ios_xe_native.get('interface').get('BDI'),\n \"Loopback\" : cisco_ios_xe_native.get('interface').get('Loopback'),\n \"Port-channel\" : cisco_ios_xe_native.get('interface').get('Port-channel'),\n \"TenGigabitEthernet\" : cisco_ios_xe_native.get('interface').get('TenGigabitEthernet'),\n \"GigabitEthernet\" : cisco_ios_xe_native.get( 'interface' ).get( 'GigabitEthernet' )\n }\n\n\n with create_connection() as conn:\n for k,v in config_container.items():\n for i in v:\n cur = conn.cursor()\n cur.execute(\"\"\"\n INSERT INTO device_interface_config( name, description, config, port_channel_id, max_frame_size )\n VALUES( %s, %s, %s, %s, %s);\n \"\"\", (f\"{k}{i.get('name')}\",\n i.get('description'),\n json.dumps(i),\n i.get( 'Cisco-IOS-XE-ethernet:channel-group' ),\n i.get('mtu')\n )\n )", "def get_protocol_init_data(self):\n\t\tcontents = self.archive.read_file('replay.initData')\n\t\treturn self.protocol.decode_replay_initdata(contents)", "def _parse_cookie_data() -> dict:\n\n with open('Data/cookies.json') as cookie_data_file:\n cookie_data_dict = json.load(cookie_data_file)\n\n # Cast the necessary data\n for cookie_type in cookie_data_dict:\n cookie_type['weight'] = float(cookie_type['weight'])\n cookie_type['target'] = CookieHuntTarget(cookie_type['target'])\n\n return cookie_data_dict", "def load(self) -> None:\n data = get_dictionary()\n if 'error' in data:\n quit()\n self.data = data", "def __init__(self, data: RawData, HDDO_version: int=0,\r\n compatibility_limit: int=0):\r\n\r\n self.__data = data\r\n self.__version = HDDO_version\r\n self.__compatibility_limit = compatibility_limit\r\n self.__script = []\r\n self.__series_signature = ''\r\n self.__pha = ''\r\n self.__identity_info = {}\r\n self.__message = ''\r\n self.__is_closed = False\r\n self.__hash_base = ''\r\n self.__inner_hash = ''\r\n self.__is_transmitted = False\r\n self.__outer_hash = ''", "def parse_dict(self, data):\n self.public_key = VerifyingKey(data['public_key'])\n self.signature = base58.b58decode(data['signature']) if data['signature'] else None", "def decode_faceshift_datastream(self, data):\n \n #block_id = struct.unpack_from('H', data)\n #print(\"Received block id \" + str(block_id)) ;\n\n offset = 0\n block_id, version, block_size = struct.unpack_from('HHI', data, offset)\n \n #print(\"ID, v, size = \" + str(block_id) + \",\" + str(version) + \",\" + str(block_size) )\n \n offset += 8\n\n if(block_id == BLOCK_ID_TRACKING_STATE):\n n_blocks, = struct.unpack_from('H', data, offset)\n #print(\"n_blocks = \" + str(n_blocks))\n offset += 2\n\n track_ok = 0 # Will be a byte: 1 if tracking ok, 0 otherwise.\n head_rotation_quat = None # Will be filled with the rotation using mathutils.Quaternion\n blend_shape_values = [] # Will be a list of float in the range 0-1\n #eyes_values = None # Will be a sequence of 4 angle values\n markers_position = [] # Will be a list of mathutils.Vector\n \n curr_block = 0\n while(curr_block < n_blocks):\n block_id, version, block_size = struct.unpack_from('HHI', data, offset)\n #print(\"ID, v, size = \" + str(block_id) + \",\" + str(version) + \",\" + str(block_size) )\n \n # put the offset at the beginning of the block\n offset += 8\n \n if(block_id == 101): # Frame Information blobk (timestamp and tracking status)\n ts, track_ok = struct.unpack_from('dB', data, offset)\n #print(\"timestamp, track_ok \" + str(ts) + \", \" + str(track_ok) )\n #offset += 9\n elif(block_id == 102): # Pose block (head rotation and position)\n x,y,z,w = struct.unpack_from('ffff', data, offset)\n #head_rotation_quat = mathutils.Quaternion((w,x,y,z))\n elif(block_id == 103): # Blendshapes block (blendshape values)\n n_coefficients, = struct.unpack_from('I', data, offset)\n #print(\"Blend shapes count=\"+ str(n_coefficients) )\n i = 0\n coeff_list = \"\"\n while(i < n_coefficients):\n # Offset of the block, plus the 4 bytes for int n_coefficients, plus 4 bytes per float\n val, = struct.unpack_from('f', data, offset + 4 + (i*4))\n blend_shape_values.append(val)\n coeff_list += repr(val) + \" \"\n i += 1\n print(\"Values: \" + coeff_list)\n elif(block_id == 104): # Eyes block (eyes gaze)\n leye_theta, leye_phi, reye_theta, reye_phi = struct.unpack_from('ffff', data, offset)\n elif(block_id == 105): # Markers block (absolute position of mark points)\n n_markers, = struct.unpack_from('H', data, offset)\n #print(\"n markers=\"+str(n_markers))\n i = 0\n while(i < n_markers):\n # Offset of the block, plus the 2 bytes for int n_markers, plus 4 bytes for each x,y,z floats\n x, y, z = struct.unpack_from('fff', data, offset + 2 + (i*4*3))\n #print(\"m\" + str(i) + \" \" + str(x) + \"\\t\" + str(y) + \"\\t\" + str(z))\n markers_position.append(mathutils.Vector((x,y,z)))\n i += 1\n \n curr_block += 1\n offset += block_size\n \n msg = fsMsgTrackingState()\n\n msg.m_timestamp = ts\n\n self.pub.publish(msg)\n\n # end -- while on blocks. Track State scan complete", "def data():\n print (\"&\")\n res = {}\n\t\n # Load Data\n with open(DATA_PATH_TRAIN, 'rb') as f:\n data = pickle.load(f)\n\t\t\n for d in data:\n for j in range(len(d)):\n if not d[j][\"addinfo\"][\"path\"] in res:\n res[d[j][\"addinfo\"][\"path\"]] = {}\n d[j][\"environment\"][\"text\"] = d[j][\"addinfo\"][\"text\"]\n res[d[j][\"addinfo\"][\"path\"]][d[j][\"addinfo\"][\"line\"]] = d[j][\"environment\"]\n\t \t\n with open(DATA_PATH_TEST, 'rb') as f:\n data = pickle.load(f)\n\t\t\n for d in data:\n for j in range(len(d)):\n if not d[j][\"addinfo\"][\"path\"] in res:\n res[d[j][\"addinfo\"][\"path\"]] = {}\n d[j][\"environment\"][\"text\"] = d[j][\"addinfo\"][\"text\"]\n res[d[j][\"addinfo\"][\"path\"]][d[j][\"addinfo\"][\"line\"]] = d[j][\"environment\"]\n\t\t\t\n with open('tasks/env/data/data.json', 'w') as outfile:\n json.dump(res, outfile)", "def build_header_2_40(self):\n self.header_2 = b'\\x0e\\x00\\x00\\x00AssignmentList\\x01\\x00\\x00\\x000' + \\\n (b'\\x0c\\x00\\x00\\x00ComputerName' + len(self.agent_hostname).to_bytes(4, 'little') + self.agent_hostname) + \\\n (b'\\n\\x00\\x00\\x00DomainName\\t\\x00\\x00\\x00WORKGROUP'\n b'\\x12\\x00\\x00\\x00EventFilterVersion\\x01\\x00\\x00\\x000'\n b'\\x19\\x00\\x00\\x00GuidRegenerationSupported\\x01\\x00\\x00\\x001'\n b'\\t\\x00\\x00\\x00IPAddress\\x0f\\x00\\x00\\x00192.168.236.199') + \\\n b'\\n\\x00\\x00\\x00NETAddress' + len(self.agent_mac_address).to_bytes(4, 'little') +self.agent_mac_address + \\\n (b'\\x0b\\x00\\x00\\x00PackageType\\x0b\\x00\\x00\\x00AgentPubKey'\n b'\\n\\x00\\x00\\x00PlatformID\\n\\x00\\x00\\x00W2KW:5:0:4'\n b'\\r\\x00\\x00\\x00PolicyVersion\\x01\\x00\\x00\\x000'\n b'\\x0c\\x00\\x00\\x00PropsVersion\\x0e\\x00\\x00\\x0020170724000500'\n b'\\x0e\\x00\\x00\\x00SequenceNumber\\x01\\x00\\x00\\x003') + \\\n b'\\r\\x00\\x00\\x00ServerKeyHash' + len(self.serverkeyhash).to_bytes(4, 'little') + self.serverkeyhash + \\\n (b'\\x0f\\x00\\x00\\x00SiteinfoVersion\\x01\\x00\\x00\\x000'\n b'\\x15\\x00\\x00\\x00SupportedSPIPEVersion\\x0b\\x00\\x00\\x003.0;4.0;5.0'\n b'\\x0b\\x00\\x00\\x00TaskVersion\\x01\\x00\\x00\\x000') + \\\n b'\\x0f\\x00\\x00\\x00TransactionGUID' + len(self.transaction_guid).to_bytes(4, 'little') + self.transaction_guid\n return self.header_2", "def _unpack(self, headerBytes):\n xtraH = struct.unpack(self.PACKAGING_FORMAT, headerBytes)\n\n self.qubit_id = xtraH[0]\n self.remote_app_id = xtraH[1]\n self.remote_node = xtraH[2]\n self.datetime = xtraH[3]\n self.remote_port = xtraH[4]\n self.outcome = xtraH[5]", "def _load_data(self, save_temp=False):\n # directly read processed data and encode\n print ('Start tokenizing data...')\n self.data = json.loads(\n open(self.cfg.data_path+self.cfg.data_file, 'r', encoding='utf-8').read().lower())\n self.train, self.dev, self.test = [], [], []\n print ('Start encoding data...')\n p = progressbar.ProgressBar(len(self.data))\n p.start()\n p_idx = 0\n for fn, dial in self.data.items():\n p.update(p_idx)\n p_idx += 1\n if '.json' in fn:\n fn = fn.replace('.json', '')\n if 'all' in self.cfg.exp_domains or self.exp_files.get(fn):\n if self.dev_files.get(fn):\n self.dev.append(self._get_encoded_data(fn, dial))\n elif self.test_files.get(fn):\n self.test.append(self._get_encoded_data(fn, dial))\n else:\n if self.data_mode == 'train':\n self.train.append(self._get_encoded_data(fn, dial))\n elif self.data_mode == 'test':\n pass\n else:\n raise Exception('Wrong Reader Data Mode!!!')\n p.finish()", "def load_and_fix(self):\n # Read in json\n self.read_json()\n\n if self.size_to_load:\n self.data = self.data[:self.size_to_load]\n\n # Add names from database given _bsn:\n self.extend_dataframe_with_personnames()\n\n # Clean rows in the data_frame where the names column is empty - > thus no response from the database\n self.clean_none_response()\n\n # Fix path from A09.pdf to A09.json\n self.fix_path()\n\n # Get the correct names from the database response\n self.parse_names_from_response()\n\n print(\" --- Final Shape Data ---\")\n print(self.data.shape)\n print(list(self.data))\n\n # Save pickled object in ./data map\n self.save_obj(self.data, self.file_name_to_save)", "def _load_data(self):\n self.mapper = Mapper()\n self.mapper.generate_vocabulary(self.review_summary_file)\n self.X_fwd, self.X_bwd, self.Y = self.mapper.get_tensor(reverseflag=True)\n # Store all the mapper values in a dict for later recovery\n self.mapper_dict = dict()\n self.mapper_dict['seq_length'] = self.mapper.get_seq_length()\n self.mapper_dict['vocab_size'] = self.mapper.get_vocabulary_size()\n self.mapper_dict['rev_map'] = self.mapper.get_reverse_map()\n # Split into test and train data\n self._split_train_tst()", "def test_serialises_and_deserialises_hs00_message_correctly_for_full_2d_data(self):\n original_hist = {\n \"source\": \"some_source\",\n \"timestamp\": 123456,\n \"current_shape\": [2, 5],\n \"dim_metadata\": [\n {\n \"length\": 2,\n \"unit\": \"b\",\n \"label\": \"y\",\n \"bin_boundaries\": np.array([10.0, 11.0, 12.0]),\n },\n {\n \"length\": 5,\n \"unit\": \"m\",\n \"label\": \"x\",\n \"bin_boundaries\": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]),\n },\n ],\n \"last_metadata_timestamp\": 123456,\n \"data\": np.array([[1.0, 2.0, 3.0, 4.0, 5.0], [6.0, 7.0, 8.0, 9.0, 10.0]]),\n \"errors\": np.array([[5.0, 4.0, 3.0, 2.0, 1.0], [10.0, 9.0, 8.0, 7.0, 6.0]]),\n \"info\": \"info_string\",\n }\n buf = serialise_hs00(original_hist)\n\n hist = deserialise_hs00(buf)\n assert hist[\"source\"] == original_hist[\"source\"]\n assert hist[\"timestamp\"] == original_hist[\"timestamp\"]\n assert hist[\"current_shape\"] == original_hist[\"current_shape\"]\n self._check_metadata_for_one_dimension(\n hist[\"dim_metadata\"][0], original_hist[\"dim_metadata\"][0]\n )\n self._check_metadata_for_one_dimension(\n hist[\"dim_metadata\"][1], original_hist[\"dim_metadata\"][1]\n )\n assert np.array_equal(hist[\"data\"], original_hist[\"data\"])\n assert np.array_equal(hist[\"errors\"], original_hist[\"errors\"])\n assert hist[\"info\"] == original_hist[\"info\"]\n assert (\n hist[\"last_metadata_timestamp\"] == original_hist[\"last_metadata_timestamp\"]\n )", "def getInitialData(nsmapi):\r\n # Done 6-1-2020\r\n # TODO extract ids not using the regex?\r\n initData = {}\r\n\r\n url = f\"/healthcheck\"\r\n print(\"Running basic healthcheck\")\r\n healthcheckData = nsmapi.call(url, method=\"PUT\", message='{\"id\":[\"default\"]}')\r\n initData[\"healthcheck\"] = healthcheckData\r\n\r\n for i in range(20):\r\n print(f\".\", end=\"\", flush=True)\r\n time.sleep(.5)\r\n print(\"\")\r\n\r\n print(\"Getting initial sensor data\")\r\n url = \"/sensors\"\r\n basicData = json.dumps(nsmapi.call(url))\r\n dataType = url[1:].replace(\"/\", \"_\")\r\n initData[dataType] = []\r\n for id in re.findall(\"\\\"sensorId\\\":.*?, \\\"name\\\":.*?,\", basicData):\r\n if id[-1] == \",\":\r\n id = id[:-1]\r\n id = id.replace(\"\\\"\", \"\")\r\n id = id.replace(\": \", \":\")\r\n num, name = id.split(\",\")\r\n num = num.split(\":\")[-1]\r\n name = name.split(\":\")[-1]\r\n idName = f\"{num},{name}\"\r\n initData[dataType].append(idName)\r\n\r\n print(\"Getting initial domain data\")\r\n url = \"/domain\"\r\n basicData = json.dumps(nsmapi.call(url))\r\n dataType = url[1:].replace(\"/\", \"_\")\r\n initData[dataType] = []\r\n for id in re.findall(\"\\\"id\\\":.*?, \\\"name\\\":.*?,\", basicData):\r\n if id[-1] == \",\":\r\n id = id[:-1]\r\n id = id.replace(\"\\\"\", \"\")\r\n id = id.replace(\": \", \":\")\r\n num, name = id.split(\",\")\r\n num = num.split(\":\")[-1]\r\n name = name.split(\":\")[-1]\r\n idName = f\"{num},{name}\"\r\n initData[dataType].append(idName)\r\n\r\n policyURLs = [\r\n \"/domain/{domainId}/ipspolicies\",\r\n \"/domain/{domainId}/firewallpolicy\",\r\n \"/domain/{domainId}/connectionlimitingpolicies\",\r\n \"/domain/{domainId}/qospolicy\",\r\n \"/protectionoptionspolicy\",\r\n \"/domain/{domainId}/malwarepolicy\",\r\n \"/domain/{domainId}/policygroups\"\r\n ]\r\n\r\n print(\"Getting initial policy data\")\r\n initData[\"policy\"] = {}\r\n for domain in initData[\"domain\"]:\r\n domainId, domainName = domain.split(\",\")\r\n initData[\"policy\"][domainId] = {}\r\n for url in policyURLs:\r\n url = url.replace(\"{domainId}\", domainId)\r\n policyData = nsmapi.call(url)\r\n key = list(policyData.keys())[0]\r\n policyType = url.split(\"/\")[-1].replace(\"policy\", \"\").replace(\"policies\", \"\")\r\n initData[\"policy\"][domainId][policyType] = []\r\n for policy in policyData[key]:\r\n policy = json.dumps(policy)\r\n # pattern = \"\\\"([^\\\"]*?)(id|ID|iD|Id){0,1}(name){0,1}\\\": (.*?),\" - don't seem to work\r\n # extracted = re.findall(pattern, policy) - don'tens seem to works\r\n # initData[\"policy\"][domainId][policyType][\"full\"] = policy\r\n for polK, polV in json.loads(policy).items():\r\n if \"omain\" not in polK.lower():\r\n if \"name\" in polK.lower():\r\n name = polV\r\n elif \"id\" in polK.lower():\r\n id = polV\r\n initData[\"policy\"][domainId][policyType].append((id,name))\r\n\r\n print(\"Got Initial Data\")\r\n\r\n return initData", "def _get_gedi1b_main_data_dict(self) -> dict:\n data = {\n # General identifiable data\n \"granule_name\": [self.parent_granule.filename] * self.n_shots,\n \"shot_number\": self[\"shot_number\"][:],\n \"beam_type\": [self.beam_type] * self.n_shots,\n \"beam_name\": [self.name] * self.n_shots,\n # Temporal data\n \"delta_time\": self[\"delta_time\"][:],\n # Quality data\n \"degrade\": self[\"geolocation/degrade\"][:],\n \"stale_return_flag\": self[\"stale_return_flag\"][:],\n \"solar_elevation\": self[\"geolocation/solar_elevation\"][:],\n \"solar_azimuth\": self[\"geolocation/solar_elevation\"][:],\n \"rx_energy\": self[\"rx_energy\"][:],\n # DEM\n \"dem_tandemx\": self[\"geolocation/digital_elevation_model\"][:],\n \"dem_srtm\": self[\"geolocation/digital_elevation_model_srtm\"][:],\n # geolocation bin0\n \"latitude_bin0\": self[\"geolocation/latitude_bin0\"][:],\n \"latitude_bin0_error\": self[\"geolocation/latitude_bin0_error\"][:],\n \"longitude_bin0\": self[\"geolocation/longitude_bin0\"][:],\n \"longitude_bin0_error\": self[\"geolocation/longitude_bin0_error\"][:],\n \"elevation_bin0\": self[\"geolocation/elevation_bin0\"][:],\n \"elevation_bin0_error\": self[\"geolocation/elevation_bin0_error\"][:],\n # geolocation lastbin\n \"latitude_lastbin\": self[\"geolocation/latitude_lastbin\"][:],\n \"latitude_lastbin_error\": self[\"geolocation/latitude_lastbin_error\"][:],\n \"longitude_lastbin\": self[\"geolocation/longitude_lastbin\"][:],\n \"longitude_lastbin_error\": self[\"geolocation/longitude_lastbin_error\"][:],\n \"elevation_lastbin\": self[\"geolocation/elevation_lastbin\"][:],\n \"elevation_lastbin_error\": self[\"geolocation/elevation_lastbin_error\"][:],\n # relative waveform position info in beam and ssub-granule\n \"waveform_start\": self[\"rx_sample_start_index\"][:] - 1,\n \"waveform_count\": self[\"rx_sample_count\"][:],\n }\n return data", "def __init__(self, autoload=True):\n self.header_size = 20\n self.messages = {}\n self.message_hashes = {}\n self.message_rhashes = {}\n self.handlers = {}\n self.remainder = \"\"\n if autoload:\n self.load_definitions()", "def __init__(self, data):\n\t\tself.protocol_version, self.le_state, self.playback_state, \\\n\t\t self.source, self.le_flags, self.playback_flags, \\\n\t\t self.source_flags, self.fullness, self.point_rate, \\\n\t\t self.point_count = \\\n\t\t\tstruct.unpack(\"<BBBBHHHHII\", data)", "def dataio_prep(hparams):\n\n # Define audio pipeline\n @sb.utils.data_pipeline.takes(\"wav\")\n @sb.utils.data_pipeline.provides(\"sig\")\n def audio_pipeline(wav):\n \"\"\"Load the signal, and pass it and its length to the corruption class.\n This is done on the CPU in the `collate_fn`.\"\"\"\n sig = sb.dataio.dataio.read_audio(wav)\n return sig\n\n # Initialization of the label encoder. The label encoder assignes to each\n # of the observed label a unique index (e.g, 'spk01': 0, 'spk02': 1, ..)\n label_encoder = sb.dataio.encoder.CategoricalEncoder()\n\n # Define label pipeline:\n @sb.utils.data_pipeline.takes(\"emo\")\n @sb.utils.data_pipeline.provides(\"emo\", \"emo_encoded\")\n def label_pipeline(emo):\n yield emo\n emo_encoded = label_encoder.encode_label_torch(emo)\n yield emo_encoded\n\n # Define datasets. We also connect the dataset with the data processing\n # functions defined above.\n datasets = {}\n for dataset in [\"train\", \"valid\", \"test\"]:\n datasets[dataset] = sb.dataio.dataset.DynamicItemDataset.from_json(\n json_path=hparams[f\"{dataset}_annotation\"],\n replacements={\"data_root\": hparams[\"data_folder\"]},\n dynamic_items=[audio_pipeline, label_pipeline],\n output_keys=[\"id\", \"sig\", \"emo_encoded\"],\n )\n # Load or compute the label encoder (with multi-GPU DDP support)\n # Please, take a look into the lab_enc_file to see the label to index\n # mappinng.\n\n lab_enc_file = os.path.join(hparams[\"save_folder\"], \"label_encoder.txt\")\n label_encoder.load_or_create(\n path=lab_enc_file,\n from_didatasets=[datasets[\"train\"]],\n output_key=\"emo\",\n )\n\n return datasets", "def _readFixedHeader(self):\n # Init empty fixed header dictionary. Use an ordered dictionary to\n # achieve the same order as in the Mini-SEED manual.\n self.fixed_header = SimpleOrderedDict()\n # Read and unpack.\n self.file.seek(self.record_offset, 0)\n fixed_header = self.file.read(48)\n encoding = ('%s20c2H3Bx4H4Bl2H' % self.endian)\n header_item = unpack(encoding, fixed_header)\n # Write values to dictionary.\n self.fixed_header['Sequence number'] = int(''.join(header_item[:6]))\n self.fixed_header['Data header/quality indicator'] = header_item[6]\n self.fixed_header['Station identifier code'] = \\\n ''.join(header_item[8:13]).strip()\n self.fixed_header['Location identifier'] = \\\n ''.join(header_item[13:15]).strip()\n self.fixed_header['Channel identifier'] = \\\n ''.join(header_item[15:18]).strip()\n self.fixed_header['Network code'] = \\\n ''.join(header_item[18:20]).strip()\n # Construct the starttime. This is only the starttime in the fixed\n # header without any offset. See page 31 of the SEED manual for the\n # time definition.\n self.fixed_header['Record start time'] = \\\n UTCDateTime(year=header_item[20], julday=header_item[21],\n hour=header_item[22], minute=header_item[23],\n second=header_item[24], microsecond=header_item[25] * 100)\n self.fixed_header['Number of samples'] = int(header_item[26])\n self.fixed_header['Sample rate factor'] = int(header_item[27])\n self.fixed_header['Sample rate multiplier'] = int(header_item[28])\n self.fixed_header['Activity flags'] = int(header_item[29])\n self.fixed_header['I/O and clock flags'] = int(header_item[30])\n self.fixed_header['Data quality flags'] = int(header_item[31])\n self.fixed_header['Number of blockettes that follow'] = \\\n int(header_item[32])\n self.fixed_header['Time correction'] = int(header_item[33])\n self.fixed_header['Beginning of data'] = int(header_item[34])\n self.fixed_header['First blockette'] = int(header_item[35])", "def _unpack(self, headerBytes):\n pass", "def __init__(self, buff):\n fmt = 'hiSSS[SY ]'\n response = struct_helpers.unpack_from(fmt, buff, 0)\n\n self.error_code = response[0]\n self.generation_id = response[1]\n self.group_protocol = response[2]\n self.leader_id = response[3]\n self.member_id = response[4]\n # TODO - parse metadata bytestring into ConsumerGroupProtocolMetadata?\n self.members = {_id: meta for _id, meta in response[5]}", "def parse_header_dict(self, header_dict=None):\n if header_dict is not None:\n self.header_dict = header_dict\n\n assert isinstance(self.header_dict, dict)\n\n for key, value in self.header_dict.items():\n if \"wire\" in key:\n if key.find(\"n\") == 0:\n self.ex_length = float(value.split()[0])\n self.ex_azimuth = float(value.split()[1])\n elif key.find(\"e\") == 0:\n self.ey_length = float(value.split()[0])\n self.ey_azimuth = float(value.split()[1])\n elif \"system\" in key:\n self.box_id = value.split(\";\")[0].strip()\n self.mag_id = value.split(\";\")[1].strip()\n elif \"gps\" in key:\n gps_list = value.split()\n self.header_gps_stamp = MTime(\n dateutil.parser.parse(\n \" \".join(gps_list[0:2]), dayfirst=True\n )\n )\n self.header_gps_latitude = self._get_latitude(\n gps_list[2], gps_list[3]\n )\n self.header_gps_longitude = self._get_longitude(\n gps_list[4], gps_list[5]\n )\n self.header_gps_elevation = float(gps_list[6])\n elif \"run\" in key:\n self.run_id = value.replace('\"', \"\")\n else:\n setattr(self, key.replace(\" \", \"_\").replace(\"/\", \"_\"), value)", "def _load_data(self):\n if self._api_response.status_code == 200:\n self._dataset = self._api_response.json()\n self._fill_day_dicts()", "def _detectors1(self, hdr):\n d = {}\n d['FCs'] = self._exit_slits(hdr)\n\n for n in range(1, 6):\n det = 'Detector {}'.format(n)\n d[det] = self._exit_slits(hdr)\n\n d['LD'] = {}\n d['LD']['exit slit width'], d['LD']['exit slit coeff a'], \\\n d['LD']['exit slit coeff b'], d['E0S'], \\\n d['pressure multicollection chamber'], \\\n d['FCs']['fc background setup positive'], \\\n d['FCs']['fc background setup negative'] = \\\n unpack(self._bo + '4d 32s 2i', hdr.read(72))\n\n d['pressure multicollection chamber'] = \\\n self._cleanup_string(d['pressure multicollection chamber'])\n\n for n in range(1, 6):\n det = 'Detector {}'.format(n)\n d[det].update(self._electron_multiplier(hdr))\n\n d['LD'].update(self._electron_multiplier(hdr))\n\n d['EMBig'] = self._exit_slits(hdr)\n d['EMBig'].update(self._electron_multiplier(hdr))\n\n # 8 bytes unused\n hdr.seek(8, 1)\n return d", "def main() -> Dict[int, int]:\n # DOCUMENTS DIRECTORY\n # Path of the directory that contains the .txt documents. One .txt document by company. IT NEEDS TO BE \"/data\" when you upload it in data challenge platform. For test in local, you can modifiy to match your data path.\n if dev:\n documents_directory = \"../example_dataset/data\"\n else: \n documents_directory = \"/data\"\n\n path_to_files: List[str] = [os.path.join(documents_directory, file) for file in os.listdir(documents_directory)]\n assert len(path_to_files) == 10 # 10 files in documents directory\n path_to_files.sort() # Sort list of path file by alphabetical order to match ground truth annotations order : IT IS ESSENTIAL.\n\n # INITIALIZATION OF YOUR OBJECTS\n data_model = FormDataModel.from_json_file(\n os.path.join(os.path.dirname(__file__), \"resources\", \"data-model.json\")\n )\n country_referential = CountryReferential.from_csv(\n os.path.join(os.path.dirname(__file__), \"resources\", \"countries_code.csv\")\n )\n form_company_filling = FormCompanyFilling([\n BasicExtractor(\n question_ids=NOT_COUNTRY_QUESTIONS_NUMBERS,\n form_data_model=data_model,\n ),\n BasicCountryExtractor(\n question_ids=COUNTRY_QUESTIONS_NUMBERS,\n form_data_model=data_model,\n country_code_referential=country_referential,\n\n )\n ])\n\n # COMPUTE PREDICTION BY FILE (ie company)\n print(\"##################################\")\n print(\"RUNNING PREDICTION\")\n results: Dict[int, int] = {}\n start_time = time.time()\n for i, path in enumerate(path_to_files):\n start = time.time()\n print(f\"File : {path}\")\n with open(path, \"r\") as input_file:\n text = input_file.read()\n print(\"... Encoding ...\")\n start_encoding = time.time()\n embeddings = model.encode(prepare_sentences(text))\n # embeddings = []\n print(\"Successfully encoded\")\n print(\"Encoding time : \", time.time() - start_encoding)\n\n form_company_response = form_company_filling.fill(text, embeddings, model)\n form_company_response.sort_by_question_id() # ESSENTIAL : Sort the response by question number for each company\n for answer in form_company_response.answers:\n question_number = answer.question_id + i * 22 # ESSENTIAL : each company has 22 questions. Each question_number in results should be unique\n results[question_number] = answer.answer_id\n # gc.collect()\n print(\"File time :\", time.time()-start, '\\n')\n # CHECK FORMAT RESULTS IS DATACHALLENGE PLATFORM COMPATIBLE\n assert len(results) == len(path_to_files) * (len(COUNTRY_QUESTIONS_NUMBERS) + len(NOT_COUNTRY_QUESTIONS_NUMBERS))\n assert set(list(results.keys())) == {i for i in range(1,221)}\n print(\"Full Time\", time.time()-start_time)\n return results", "def __init__(self, data: bytes):\n super().__init__()\n self._expected_packet_type = MessageType.MAIN\n self._expected_data_size = 34\n self._data_raw = b''\n self._packet_type = MessageType.UNDEFINED\n self._packet_number = 0\n self.time_stamp_1MHz = 0\n self.accelerometer_x = 0\n self.accelerometer_y = 0\n self.accelerometer_z = 0\n self.magnetometer_x = 0\n self.magnetometer_y = 0\n self.magnetometer_z = 0\n self.gyroscope_x = 0\n self.gyroscope_y = 0\n self.gyroscope_z = 0\n self.quaternion_q0 = 0\n self.quaternion_q1 = 0\n self.quaternion_q2 = 0\n self.quaternion_q3 = 0\n self.flags = 0\n self.shield_and_kinetis_byte = 0\n self._is_valid = False\n self._parse_data(data)", "def __record(self, data={}):\n # Create a config file\n if not self.config_file:\n self.config_file = \"./default.config\"\n\n # Create empty config json file if not there\n if not os.path.isfile(self.config_file):\n with open(self.config_file, 'w') as f:\n d = json.dumps({})\n f.write(d)\n\n # This structure ensures that for different servers, there can exist\n # multiple licenses (`license_key`), which contains one `device_token`\n # each and multiple users (identified by `user_email`), each with its\n # own `user_token`.\n\n # Overwrite values with current values\n if not data or data == {}:\n with open(self.config_file, 'r') as f:\n data = json.load(f)\n data[self.server] = {}\n if self.license_key != '':\n data[self.server][self.license_key] = {}\n if self.device_token != '':\n data[self.server][self.license_key][\"device_token\"] = self.device_token\n if self.user.email != '':\n data[self.server][self.license_key][self.user.email] = {}\n if self.user_token != '':\n data[self.server][self.license_key][self.user.email][\"user_token\"] = self.user_token\n else:\n data = data\n\n # Clean up the remaining values (i.e. get rid of it if it's empty)\n copied = copy.deepcopy(data)\n for server in copied.keys():\n if server not in self.__valid_servers.keys():\n data.pop(server, None)\n\n for key in copied[server].keys():\n data[server].pop('', None)\n data[server].pop(' ', None)\n if copied[server][key] == {}:\n data[server].pop(key, None)\n\n for k in copied[server][key].keys():\n if k != \"device_token\":\n data[server][key].pop('', None)\n data[server][key].pop(' ', None)\n if copied[server][key][k] == {} or copied[server][key][k] == \"\":\n data[server][key].pop(k, None)\n\n with open(self.config_file, 'w') as f:\n d = json.dumps(data)\n f.write(d)", "def deserialize(self, str):\n try:\n if self.header is None:\n self.header = std_msgs.msg.Header()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.canmsg = str[start:end].decode('utf-8')\n else:\n self.canmsg = str[start:end]\n _x = self\n start = end\n end += 30\n (_x.track_id, _x.track_lat_rate, _x.track_group_changed, _x.track_status, _x.track_angle, _x.track_range, _x.track_bridge_object, _x.track_rolling_count, _x.track_width, _x.track_range_accel, _x.track_med_range_mode, _x.track_range_rate,) = _get_struct_Bf2B2f2B2fBf().unpack(str[start:end])\n self.track_group_changed = bool(self.track_group_changed)\n self.track_bridge_object = bool(self.track_bridge_object)\n self.track_rolling_count = bool(self.track_rolling_count)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill", "def _decode_header(self):\n #header = self.file_content[0:6]\n log_screen_descr = self.file_content[6:13]\n self.canvas_width = log_screen_descr[0] + (log_screen_descr[1]<<8)\n self.canvas_height = log_screen_descr[2] + (log_screen_descr[3]<<8)\n # is there a global color table? (usually yes)\n flags = log_screen_descr[4]\n self.glob_col_table = (flags & 0b10000000) != 0\n\n # determine the number of bits per primary color value\n self.color_resolution = (flags & 0b01110000) >> 4\n self.bits_per_pixel = self.color_resolution + 1\n\n # If the value is 1, then the colors in the global color table are sorted\n # in order of \"decreasing importance,\" which typically means \"decreasing\n # frequency\" in the image\n self.sort_flag = (flags & 0b00001000) != 0\n\n # If this value is N, then the actual table size is 2^(N+1).\n self.glob_col_table_sz = 1 << ((flags & 0b00000111)+1)\n\n self.bg_color_index = log_screen_descr[5]\n self.pix_asp_ratio = log_screen_descr[6]", "def _unpack(self, headerBytes):\n xtraH = struct.unpack(self.PACKAGING_FORMAT, headerBytes)\n\n self.qubit_id = xtraH[0]\n self.remote_app_id = xtraH[1]\n self.remote_node = xtraH[2]\n self.cmdLength = xtraH[3]\n self.remote_port = xtraH[4]\n self.step = xtraH[5]", "def upload_initial_data():\n data = {\"updated\":1512866292573,\"data\":[[[\"DAY %\",\"PPS (CAD)\",\"TKR\",\"NAME\",\"SECTOR\",\"EX.\",\"ALT. TKR\",\"VOL\",\"VOL @ CRNT $\",\"MC ($MM)\"],[\"5.84%\",\"1.45\",\"ABCN\",\"ABcann Medicinals\",\"BioTech\",\"CVE\",\"ABCCF\",\"901,940\",\"1,307,813\",\"78.49\"],[\"6.14%\",\"7.09\",\"ACB\",\"Aurora Cannabis\",\"Cultivation & Retail\",\"TSE\",\"ACBFF\",\"13,927,205\",\"98,743,883.45\",\"2,861.53\"],[\"9.09%\",\"0.24\",\"ACG\",\"Alliance Growers \",\"AgTech\",\"CNSX\",\"--\",\"1,189,385\",\"285,452.4\",\"\"],[\"0.00%\",\"0.10\",\"AFI\",\"Affinor Growers\",\"AgTech\",\"CNSX\",\"RSSFF\",\"210,310\",\"21,031\",\"\"],[\"3.22%\",\"13.47\",\"APH\",\"Aphria\",\"Cultivation & Retail\",\"TSE\",\"APHQF\",\"2,663,133\",\"35,872,401.51\",\"2,042.08\"],[\"13.95%\",\"0.25\",\"ATT\",\"Abattis Bio\",\"BioTech\",\"CNSX\",\"ATTBF\",\"2,706,357\",\"676,589.25\",\"39.86\"],[\"0.00%\",\"2.03\",\"BE\",\"Beleave\",\"Cultivation & Retail\",\"CNSX\",\"BLEVF\",\"597,909\",\"1,213,755.27\",\"\"],[\"1.28%\",\"1.58\",\"BLO\",\"Cannabix Tech\",\"LawTech\",\"CNSX\",\"BLOZF\",\"465,869\",\"736,073.02\",\"136.61\"],[\"-2.20%\",\"0.89\",\"CBW\",\"Cannabis Wheaton \",\"Investing & Finance\",\"CVE\",\"KWFLF\",\"815,477\",\"725,774.53\",\"234.57\"],[\"-0.65%\",\"19.93\",\"CMED\",\"Cannimed\",\"Cultivation & Retail\",\"TSE\",\"CMMDF\",\"130,722\",\"2,605,289.46\",\"457.69\"],[\"12.73%\",\"0.62\",\"CMM\",\"Canabo Medical\",\"MedCare\",\"CVE\",\"CAMDF\",\"330,404\",\"204,850.48\",\"23.54\"],[\"-2.87%\",\"2.71\",\"CRZ\",\"Cannaroyalty\",\"Investing & Finance\",\"CNSX\",\"CNNRF\",\"961,449\",\"2,605,526.79\",\"115.09\"],[\"-6.67%\",\"0.28\",\"CYX\",\"Calyx Bio\",\"AgTech\",\"CVE\",\"CLYXF\",\"2,120,562\",\"593,757.36\",\"24.23\"],[\"0.00%\",\"1.23\",\"DOJA\",\"DOJA Cannabis\",\"Cultivation & Retail\",\"CNSX\",\"DJACF\",\"206,635\",\"254,161.05\",\"72.27\"],[\"-4.40%\",\"0.44\",\"DVA\",\"Delivra\",\"BioTech\",\"CVE\",\"--\",\"89,485\",\"39,373.4\",\"19.55\"],[\"6.52%\",\"0.25\",\"EAT\",\"Nutritional High\",\"Marijuana Edibles & Extracts\",\"CNSX\",\"SPLIF\",\"3,067,636\",\"766,909\",\"61.54\"],[\"-1.20%\",\"1.64\",\"EMC\",\"Emblem\",\"Cultivation & Retail\",\"CVE\",\"EMMBF\",\"411,764\",\"675,292.96\",\"130.60\"],[\"2.05%\",\"3.98\",\"EMH\",\"Emerald\",\"Cultivation & Retail\",\"CVE\",\"TBQBF\",\"1,430,067\",\"5,691,666.66\",\"374.34\"],[\"-5.88%\",\"0.48\",\"FFT\",\"Future Farm Tech\",\"AgTech\",\"CNSX\",\"AGSTF\",\"1,291,240\",\"619,795.2\",\"0.61\"],[\"1.06%\",\"1.90\",\"FIRE\",\"Supreme Pharma\",\"Cultivation & Retail\",\"CVE\",\"SPRWF\",\"1,275,906\",\"2,424,221.4\",\"391.96\"],[\"5.26%\",\"0.10\",\"GHG\",\"Global Hemp\",\"Cultivation & Retail\",\"CNSX\",\"GBHPF\",\"764,350\",\"76,435\",\"\"],[\"3.28%\",\"0.31\",\"GLH\",\"Golden Leaf\",\"Marijuana Products\",\"CNSX\",\"GLDFF\",\"4,298,567\",\"1,332,555.77\",\"116.96\"],[\"-1.96%\",\"0.50\",\"HC\",\"High Hampton Holdings\",\"Investing & Finance\",\"CNSX\",\"--\",\"727,116\",\"363,558\",\"\"],[\"1.89%\",\"0.54\",\"HIP\",\"Newstirke Resources \",\"Cultivation & Retail\",\"CVE\",\"NWKRF\",\"431,875\",\"233,212.5\",\"210.35\"],[\"8.91%\",\"1.10\",\"HVST\",\"Harvest One Cannabis\",\"Cultivation & Retail\",\"CVE\",\"HRVOF\",\"2,192,877\",\"2,412,164.7\",\"98.10\"],[\"8.89%\",\"0.98\",\"ICC\",\"International Cannabis\",\"Cultivation & Retail\",\"CVE\",\"ICCLF\",\"123,538\",\"121,067.24\",\"110.84\"],[\"0.00%\",\"1.62\",\"IMH\",\"Invictus MD\",\"Investing & Finance\",\"CVE\",\"IVITF\",\"781,924\",\"1,266,716.88\",\"129.87\"],[\"12.50%\",\"0.90\",\"IN\",\"Inmed Pharma\",\"BioTech\",\"CNSX\",\"IMLFF\",\"3,846,586\",\"3,461,927.4\",\"\"],[\"2.27%\",\"1.80\",\"ISOL\",\"Isodiol International \",\"Hemp Products\",\"CNSX\",\"LAGBF\",\"8,514,952\",\"15,326,913.6\",\"\"],[\"7.84%\",\"0.28\",\"KALY\",\"Kalytera Therapeutics\",\"BioTech\",\"CVE\",\"QUEZD\",\"5,634,186\",\"1,577,572.08\",\"34.74\"],[\"-1.72%\",\"0.57\",\"LDS\",\"Lifestyle Delivery Systems\",\"BioTech\",\"CNSX\",\"LDSYF\",\"685,628\",\"390,807.96\",\"51.44\"],[\"0.19%\",\"15.50\",\"LEAF\",\"MedReleaf Corp\",\"Cultivation & Retail\",\"TSE\",\"MEDFF\",\"229,190\",\"3,552,445\",\"1,459.18\"],[\"2.33%\",\"0.44\",\"LIB\",\"Liberty Leaf Holdings\",\"Investing & Finance\",\"CNSX\",\"LIBFF\",\"4,555,082\",\"2,004,236.08\",\"\"],[\"10.42%\",\"1.59\",\"LXX\",\"Lexaria Bio\",\"Hemp Products\",\"CNSX\",\"LXRP\",\"1,523,338\",\"2,422,107.42\",\"\"],[\"-1.38%\",\"2.14\",\"MARI\",\"Maricann Group\",\"Cultivation & Retail\",\"CNSX\",\"MRRCF\",\"678,106\",\"1,451,146.84\",\"157.10\"],[\"3.26%\",\"0.95\",\"MDM\",\"Marapharm\",\"Cultivation & Retail\",\"CNSX\",\"MRPHF\",\"209,019\",\"198,568.05\",\"\"],[\"0.00%\",\"0.57\",\"MGW\",\"Maple Leaf Green World\",\"Cultivation & Retail\",\"CVE\",\"MGWFF\",\"367,479\",\"209,463.03\",\"83.83\"],[\"7.37%\",\"1.02\",\"MJ\",\"True Leaf\",\"Hemp Pet Chews\",\"CNSX\",\"TLFMF\",\"164,101\",\"167,383.02\",\"\"],[\"2.27%\",\"4.50\",\"MJN\",\"Pharmacan /Cronos\",\"Investing & Finance\",\"CVE\",\"PRMCF\",\"419,922\",\"1,889,649\",\"675.43\"],[\"4.23%\",\"2.71\",\"MYM\",\"My Marijuana\",\"Cultivation & Retail\",\"CNSX\",\"--\",\"1,066,122\",\"2,889,190.62\",\"\"],[\"4.40%\",\"0.95\",\"N\",\"Namaste Tech\",\"Consumption Devices\",\"CNSX\",\"NXTTF\",\"5,714,764\",\"5,429,025.8\",\"192.50\"],[\"0.00%\",\"0.10\",\"NF\",\"New Age Farm\",\"Hemp Products\",\"CNSX\",\"NWGFF\",\"3,938,476\",\"393,847.6\",\"\"],[\"-7.27%\",\"0.25\",\"NSP\",\"Naturally Splendid\",\"Hemp Products\",\"CVE\",\"NSPDF\",\"484,812\",\"121,203\",\"24.42\"],[\"4.99%\",\"3.79\",\"OGI\",\"Organigram\",\"Cultivation & Retail\",\"CVE\",\"OGRMF\",\"3,654,843\",\"13,851,854.97\",\"375.89\"],[\"1.15%\",\"0.88\",\"PUF\",\"PUF Ventures\",\"Consumption Devices\",\"CNSX\",\"PUFXF\",\"719,534\",\"633,189.92\",\"45.85\"],[\"10.68%\",\"1.14\",\"RHT\",\"Reliq Health Tech\",\"Mobile Software\",\"CVE\",\"RQHTF\",\"1,564,567\",\"1,783,606.38\",\"98.74\"],[\"4.05%\",\"1.80\",\"RTI\",\"Radient Technologies\",\"Extraction\",\"CVE\",\"RDDTF\",\"2,181,473\",\"3,926,651.4\",\"345.53\"],[\"3.64%\",\"0.28\",\"RVV\",\"Revive Therapeutics\",\"Medication\",\"CVE\",\"RVVTF\",\"399,705\",\"111,917.4\",\"15.50\"],[\"-2.90%\",\"0.67\",\"SUN\",\"Wildflower\",\"Hemp Products\",\"CNSX\",\"WLDFF\",\"87,197\",\"58,421.99\",\"29.48\"],[\"-0.67%\",\"4.45\",\"SXP\",\"Supremex\",\"Packaging\",\"TSE\",\"SUMXF\",\"27,015\",\"120,216.75\",\"126.40\"],[\"0.00%\",\"0.76\",\"TBP\",\"Tetra Bio-Pharma\",\"BioTech\",\"CVE\",\"GRPOF\",\"497,745\",\"378,286.2\",\"88.67\"],[\"2.44%\",\"2.10\",\"TER\",\"TerrAscend Corp\",\"Cultivation & Retail\",\"CNSX\",\"--\",\"270,176\",\"567,369.6\",\"\"],[\"4.29%\",\"0.73\",\"THC\",\"THC Biomed\",\"BioTech\",\"CNSX\",\"THCBF\",\"818,162\",\"597,258.26\",\"81.29\"],[\"3.55%\",\"3.21\",\"THCX\",\"Hydropothecary Corp\",\"Cultivation & Retail\",\"CVE\",\"HYYDF\",\"1,581,640\",\"5,077,064.4\",\"282.37\"],[\"8.22%\",\"0.79\",\"TNY\",\"Tinley Beverage Co\",\"Beverage\",\"CNSX\",\"QRSRF\",\"945,154\",\"746,671.66\",\"57.81\"],[\"3.49%\",\"7.70\",\"TRST\",\"CannTrust\",\"Cultivation & Biotech\",\"CNSX\",\"CNTTF\",\"368,892\",\"2,840,468.4\",\"699.98\"],[\"-8.04%\",\"1.03\",\"VGW\",\"Valens Groworks\",\"BioTech\",\"CNSX\",\"MYMSF\",\"23,285\",\"23,983.55\",\"62.77\"],[\"0.00%\",\"0.52\",\"VIN\",\"Vinergy Resources\",\"Investing & Finance\",\"CNSX\",\"VNNYF\",\"0\",\"\",\"\"],[\"-2.50%\",\"0.39\",\"VP\",\"Vodis Pharma\",\"Cultivation & Retail\",\"CNSX\",\"VDQSF\",\"52,661\",\"20,537.79\",\"\"],[\"6.67%\",\"0.80\",\"VRT\",\"Veritas Pharma\",\"BioTech\",\"CNSX\",\"VRTHF\",\"377,901\",\"302,320.8\",\"\"],[\"6.41%\",\"19.42\",\"WEED\",\"Canopy Growth\",\"Cultivation & Retail\",\"TSE\",\"TWMJF\",\"4,940,034\",\"95,935,460.28\",\"3,706.63\"],[\"6.25%\",\"2.38\",\"WMD\",\"WeedMD\",\"Cultivation & Retail\",\"CVE\",\"WDDMF\",\"1,174,148\",\"2,794,472.24\",\"124.71\"],[\"3.36%\",\"14.75\",\"HMMJ\",\"Horizons Marijuana Life Sciences\",\"Canadian Marijuana ETF\",\"TSE\",\"HMLSF\",\"336,579\",\"4,964,540.25\",\"197.64\"]]],\"sheetnames\":[\"ALLSHOW\"]}\n \n exchange_suffixes = {'TSE': 'TO', 'CVE': 'V'}\n\n # create sector\n sector, _ = Sector.objects.get_or_create(name=\"Cannabis\", slug='cannabis')\n\n # create currency\n currency, _ = Currency.objects.get_or_create(symbol='CAD', defaults={'character':'$', 'name':'Canadian Dollar'})\n us_currency, _ = Currency.objects.get_or_create(symbol='USD', defaults={'character':'$', 'name':'US Dollar'})\n\n # OTC exchange\n otc, _ = Exchange.objects.get_or_create(symbol='OTC', defaults={'name':'OTC', 'currency': us_currency})\n\n # iterate over each item in our table, make the items\n for row in data[\"data\"][0][1:]:\n # percent = float(row[0].replace(\"%\",\"\"))\n suffix = exchange_suffixes[row[5]] if row[5] in exchange_suffixes else ''\n exchange, _ = Exchange.objects.get_or_create(symbol=row[5], defaults={'name':row[5], 'currency':currency, 'ticker_suffix': suffix})\n company, _ = Company.objects.get_or_create(name=row[3], defaults={'sector':sector})\n stock, _ = Stock.objects.get_or_create(ticker=row[2], defaults={\n 'company': company,\n 'exchange': exchange,\n 'market_cap': float(row[9].replace(\",\",\"\")) * 1000000 if row[9] else 0.0,\n # 'previous_close': float(row[1]) - float(row[1]) * percent / 100,\n # 'open': float(row[1]),\n # 'current': float(row[1]),\n # 'volume': float(row[8].replace(\",\",\"\")) if row[8] else 0.0,\n })\n stock.save()\n\n if row[4]:\n tag, _ = Tag.objects.get_or_create(name=row[4])\n company.tags.add(tag)\n\n if row[6] and not row[6] == \"--\":\n stock, _ = Stock.objects.get_or_create(ticker=row[6], defaults={'company':company, 'exchange':otc})\n\n\n print data", "def Load(self):\n\t\tfile = open(self.fileName, 'r')\n\t\tself.hdr = file.readline().split('\\n')[0].split(',')\n\t\t\n\t\tfor line in file.readlines():\n\t\t\ttokens = line.split('\\n')[0].split(',')\n\t\t\tif int(tokens[1]) == 0:\n\t\t\t\tself.h0.append(tokens[0])\n\t\t\telse:\n\t\t\t\tself.h1.append(tokens[0])\n\t\tfile.close()\n\t\tself.numH1 = len(self.h1)\n\t\tself.numH0 = len(self.h0)", "def read_header(infile):\n h = dict()\n fid = open(infile, 'r+b')\n h['filename'] = b''.join(np.fromfile(fid, dtype = 'S1', count = 20))\n h['parent_filename'] = b''.join(np.fromfile(fid, dtype = 'S1', count = 20))\n h['comments1'] = b''.join(np.fromfile(fid, dtype = 'S1', count = 80))\n h['comments2'] = b''.join(np.fromfile(fid, dtype = 'S1', count = 80))\n h['energy_type'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['config_type'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['file_type'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['trans_type'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['scan_type'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['data_type'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['date_modified'] = b''.join(np.fromfile(fid, dtype = 'S1', count = 16))\n h['frequency'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['mat_velocity'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['num_pts'] = np.fromfile(fid, dtype = np.int32, count = 1)\n h['num_polarization_channels'] =np.fromfile(fid, dtype = np.int16,count = 1)\n h['spare00'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['adc_min_voltage'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['adc_max_voltage'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['band_width'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['spare01'] = np.fromfile(fid, dtype = np.int16, count = 5)\n h['polarization_type'] = np.fromfile(fid, dtype = np.int16, count = 4)\n h['record_header_size'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['word_type'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['word_precision'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['min_data_value'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['max_data_value'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['avg_data_value'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['data_scale_factor'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['data_units'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['surf_removal'] = np.fromfile(fid, dtype = np.uint16, count = 1)\n h['edge_weighting'] = np.fromfile(fid, dtype = np.uint16, count = 1)\n h['x_units'] = np.fromfile(fid, dtype = np.uint16, count = 1)\n h['y_units'] = np.fromfile(fid, dtype = np.uint16, count = 1)\n h['z_units'] = np.fromfile(fid, dtype = np.uint16, count = 1)\n h['t_units'] = np.fromfile(fid, dtype = np.uint16, count = 1)\n h['spare02'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['x_return_speed'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['y_return_speed'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['z_return_speed'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['scan_orientation'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['scan_direction'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['data_storage_order'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['scanner_type'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['x_inc'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['y_inc'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['z_inc'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['t_inc'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['num_x_pts'] = np.fromfile(fid, dtype = np.int32, count = 1)\n h['num_y_pts'] = np.fromfile(fid, dtype = np.int32, count = 1)\n h['num_z_pts'] = np.fromfile(fid, dtype = np.int32, count = 1)\n h['num_t_pts'] = np.fromfile(fid, dtype = np.int32, count = 1)\n h['x_speed'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['y_speed'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['z_speed'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['x_acc'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['y_acc'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['z_acc'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['x_motor_res'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['y_motor_res'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['z_motor_res'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['x_encoder_res'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['y_encoder_res'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['z_encoder_res'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['date_processed'] = b''.join(np.fromfile(fid, dtype = 'S1', count = 8))\n h['time_processed'] = b''.join(np.fromfile(fid, dtype = 'S1', count = 8))\n h['depth_recon'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['x_max_travel'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['y_max_travel'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['elevation_offset_angle'] = np.fromfile(fid,dtype = np.float32, count = 1)\n h['roll_offset_angle'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['z_max_travel'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['azimuth_offset_angle'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['adc_type'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['spare06'] = np.fromfile(fid, dtype = np.int16, count = 1)\n h['scanner_radius'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['x_offset'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['y_offset'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['z_offset'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['t_delay'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['range_gate_start'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['range_gate_end'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['ahis_software_version'] = np.fromfile(fid, dtype = np.float32, count = 1)\n h['spare_end'] = np.fromfile(fid, dtype = np.float32, count = 10)\n return h", "def _decode_data_dict(self, data_dict: dict) -> dict:\n if ENCODED_DATA_KEY not in data_dict:\n return data_dict\n\n encoded_data = data_dict[ENCODED_DATA_KEY]\n _buffer = io.BytesIO(encoded_data)\n data_dict = torch.load(\n _buffer,\n map_location=\"cpu\"\n # Not using ray.cloudpickle here as it doesn't\n # define an Unpickler (as it is not necessary).\n )\n return data_dict", "def headersFromRawFile(self, rawFile: str, headers: Dict) -> None:\n dFile = open(os.path.join(self.dataPath, rawFile), \"r\", encoding=\"ISO-8859-1\")\n generalHeaderString = dFile.read(1000) # this should be long enough\n generalSplit = generalHeaderString.split()\n # read GENERAL HEADER\n generalHeader = {}\n generalHeader[\"recLength\"] = int(generalSplit[0])\n generalHeader[\"fileType\"] = generalSplit[1]\n generalHeader[\"wordLength\"] = int(generalSplit[2])\n generalHeader[\"version\"] = generalSplit[3]\n generalHeader[\"procId\"] = generalSplit[4]\n generalHeader[\"numCh\"] = int(generalSplit[5])\n generalHeader[\"totalRec\"] = int(generalSplit[6])\n generalHeader[\"firstEvent\"] = int(generalSplit[7])\n generalHeader[\"numEvent\"] = int(generalSplit[8])\n generalHeader[\"extend\"] = int(generalSplit[9])\n\n # read EVENT HEADER - there can be multiple of these, but normally only the one\n # Multiple events are largely deprecated. Only a single event is used\n eventHeaders = []\n fileSize = os.path.getsize(os.path.join(self.dataPath, rawFile))\n record = generalHeader[\"firstEvent\"]\n for ir in range(0, generalHeader[\"numEvent\"]):\n seekPt = (record - 1) * generalHeader[\"recLength\"]\n if not seekPt > fileSize:\n # seek from beginning of file\n dFile.seek(seekPt, 0)\n # read extra to make sure\n eventString = dFile.read(1000)\n eventSplit = eventString.split()\n eH = {}\n eH[\"start\"] = int(eventSplit[0])\n eH[\"startms\"] = int(eventSplit[1])\n eH[\"stop\"] = int(eventSplit[2])\n eH[\"stopms\"] = int(eventSplit[3])\n eH[\"cvalue1\"] = float(eventSplit[4])\n eH[\"cvalue2\"] = float(eventSplit[5])\n eH[\"cvalue3\"] = float(eventSplit[6])\n eH[\"EHInfile\"] = int(eventSplit[7])\n eH[\"nextEH\"] = int(eventSplit[8])\n eH[\"previousEH\"] = int(eventSplit[9])\n eH[\"numData\"] = int(eventSplit[10])\n eH[\"startData\"] = int(eventSplit[11])\n eH[\"extended\"] = int(eventSplit[12])\n eventHeaders.append(eH)\n if eH[\"nextEH\"] < generalHeader[\"totalRec\"]:\n record = eH[\"nextEH\"] # set to go to next eH\n else:\n break # otherwise break out of for loops\n # close the data file\n dFile.close()\n # now compare number of samples with that calculated previously\n if eventHeaders[0][\"numData\"] != headers[\"num_samples\"]:\n self.printWarning(\"Data file: {}\".format(dFile))\n self.printWarning(\n \"Number of samples in raw file header {} does not equal that calculated from data {}\".format(\n eventHeaders[0][\"numData\"], headers[\"num_samples\"]\n )\n )\n self.printWarning(\"Number of samples calculated from data will be used\")\n # set the byte offset for the file\n self.dataByteOffset[rawFile] = (\n eventHeaders[0][\"startData\"] - 1\n ) * generalHeader[\"recLength\"]\n self.recChannels[rawFile] = generalHeader[\"numCh\"]", "def decodemeta(data):\n d = {}\n for l in data.split('\\0'):\n if l:\n key, value = l.split(':')\n d[key] = value\n return d", "def loads(self, data):\n self._id = data.get('id', -1)\n self._created = data.get('created', 0) # datetime.strptime(data.get('created', '1970-01-01T00:00:00'), '%Y-%m-%dT%H:%M:%S').timestamp()\n self._stage = data.get('stage', 0) # self.stage_from_str(data.get('stage', ''))\n self._dir = data.get('direction', 0) # self.direction_from_str(data.get('direction', ''))\n self._timeframe = data.get('timeframe') # timeframe_from_str(data.get('timeframe', 't'))\n self._expiry = data.get('expiry', 0) # datetime.strptime(data.get('expiry', '1970-01-01T00:00:00'), '%Y-%m-%dT%H:%M:%S').timestamp()", "def _read_old_header(self, raw):\n\n byte_count = 0\n\n data_size = 4\n self.label = struct.unpack('<4s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 2\n self.version = struct.unpack('<h',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 2\n self.revision = struct.unpack('<h',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 26\n self.date = struct.unpack('<26s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 2\n self.file_format = struct.unpack('<h',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.file_type = struct.unpack('<4s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 65\n self.original_file_name = struct.unpack('<65s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 65\n self.reference_file_name = struct.unpack('<65s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 65\n self.related_file_name_a = struct.unpack('<65s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 65\n self.related_file_name_b = struct.unpack('<65s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 65\n self.related_file_name_c = struct.unpack('<65s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 65\n self.related_file_name_d = struct.unpack('<65s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 82\n self.annotate = struct.unpack('<82s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 33\n self.instrument_model = struct.unpack('<33s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 33\n self.instrument_serial_number = struct.unpack('<33s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 33\n self.software_version_number = struct.unpack('<33s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 33\n self.crystal_material = struct.unpack('<33s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.laser_wavelength_microns = struct.unpack('<f',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 2\n self.laser_null_doubling = struct.unpack('<h',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.optical_ratio = struct.unpack('<f',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.dispersion_constant_xc = struct.unpack('<f',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.dispersion_constant_xm = struct.unpack('<f',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.dispersion_constant_xb = struct.unpack('<f',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 2\n self.interferogram_size = struct.unpack('<H',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 2\n self.interferogram_center.append(struct.unpack('<H',\n raw[byte_count:byte_count+data_size])[0])\n byte_count += data_size\n\n data_size = 2\n self.interferogram_center.append(struct.unpack('<H',\n raw[byte_count:byte_count+data_size])[0])\n byte_count += data_size\n\n data_size = 2\n self.acquire_mode = struct.unpack('<h',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 2\n self.emissivity = struct.unpack('<h',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 2\n self.apodization = struct.unpack('<h',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 2\n self.zero_fill = struct.unpack('<h',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 2\n self.run_time_math = struct.unpack('<h',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 2\n self.fft_size = struct.unpack('<h',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 2\n self.number_of_coadds = struct.unpack('<h',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 2\n self.number_of_igrams = struct.unpack('<h',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.amb_temperature = struct.unpack('<f',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.inst_temperature = struct.unpack('<f',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.wbb_temperature = struct.unpack('<f',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.cbb_temperature = struct.unpack('<f',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 20\n self.spare_i = struct.unpack('<hhhhhhhhhh',\n raw[byte_count:byte_count+data_size])\n byte_count += data_size\n\n data_size = 40\n self.spare_f = struct.unpack('<ffffffffff',\n raw[byte_count:byte_count+data_size])\n byte_count += data_size\n\n data_size = 40\n self.spare_l = struct.unpack('<ffffffffff',\n raw[byte_count:byte_count+data_size])\n byte_count += data_size\n\n data_size = 65\n self.spare_na = struct.unpack('<65s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 65\n self.spare_nb = struct.unpack('<65s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 65\n self.spare_nc = struct.unpack('<65s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 65\n self.spare_nd = struct.unpack('<65s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 65\n self.spare_ne = struct.unpack('<65s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size\n\n data_size = 4\n self.header_end = struct.unpack('<4s',\n raw[byte_count:byte_count+data_size])[0]\n byte_count += data_size", "def verify_header (filename, htypes=None):\n\n # dictionary\n dict_head = {\n # raw header\n # commenting out SIMPLE, BSCALE and BZERO - basic keywords\n # that will be present in images but not in binary fits tables\n #'SIMPLE': {'htype':'raw', 'dtype':bool, 'DB':False, 'None_OK':True},\n #'BSCALE': {'htype':'raw', 'dtype':float, 'DB':False, 'None_OK':True},\n #'BZERO': {'htype':'raw', 'dtype':float, 'DB':False, 'None_OK':True},\n 'BITPIX': {'htype':'raw', 'dtype':int, 'DB':False, 'None_OK':True},\n 'NAXIS': {'htype':'raw', 'dtype':int, 'DB':False, 'None_OK':True},\n 'NAXIS1': {'htype':'raw', 'dtype':int, 'DB':False, 'None_OK':True},\n 'NAXIS2': {'htype':'raw', 'dtype':int, 'DB':False, 'None_OK':True},\n 'BUNIT': {'htype':'raw', 'dtype':str, 'DB':False, 'None_OK':True},\n #'CCD-AMP': {'htype':'raw', 'dtype':str, 'DB':False, 'None_OK':True},\n 'SET-TEMP': {'htype':'raw', 'dtype':float, 'DB':False, 'None_OK':True},\n 'CCD-TEMP': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'XBINNING': {'htype':'raw', 'dtype':int, 'DB':False, 'None_OK':True},\n 'YBINNING': {'htype':'raw', 'dtype':int, 'DB':False, 'None_OK':True},\n #'CCD-SET': {'htype':'raw', 'dtype':str, 'DB':False, 'None_OK':True},\n 'ALTITUDE': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'AZIMUTH': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'DOMEAZ': {'htype':'raw', 'dtype':float, 'DB':False, 'None_OK':True},\n 'RADESYS': {'htype':'raw', 'dtype':str, 'DB':False, 'None_OK':True},\n 'EPOCH': {'htype':'raw', 'dtype':float, 'DB':False, 'None_OK':True},\n 'RA': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':False},\n 'RA-REF': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n #'RA-TEL': {'htype':'raw', 'dtype':float, 'DB':False, 'None_OK':True},\n 'DEC': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':False},\n 'DEC-REF': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n #'DEC-TEL': {'htype':'raw', 'dtype':float, 'DB':False, 'None_OK':True},\n 'HA': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':False},\n 'FLIPSTAT': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n 'EXPTIME': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':False},\n 'ISTRACKI': {'htype':'raw', 'dtype':bool, 'DB':False, 'None_OK':True},\n 'ACQSTART': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':False},\n 'ACQEND': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n 'GPSSTART': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n 'GPSEND': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n 'GPS-SHUT': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'DATE-OBS': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':False},\n 'MJD-OBS': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':False},\n 'LST': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':False},\n 'UTC': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':False},\n 'TIMESYS': {'htype':'raw', 'dtype':str, 'DB':False, 'None_OK':True},\n 'ORIGIN': {'htype':'raw', 'dtype':str, 'DB':False, 'None_OK':True},\n 'MPC-CODE': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':False},\n 'TELESCOP': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':False},\n 'CL-BASE': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'RH-MAST': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'RH-DOME': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'RH-AIRCO': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'RH-PIER': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'PRESSURE': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-PIER': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-DOME': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-ROOF': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-AIRCO': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-MAST': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-STRUT': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-CRING': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-SPIDER': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-FWN': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-FWS': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-M2HOLD': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-GUICAM': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-M1': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-CRYWIN': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-CRYGET': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-CRYCP': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'PRES-CRY': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'WINDAVE': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'WINDGUST': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'WINDDIR': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'SITELAT': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'SITELONG': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'ELEVATIO': {'htype':'raw', 'dtype':int, 'DB':True, 'None_OK':True},\n #'WEATIME': {'htype':'raw', 'dtype':str, 'DB':False, 'None_OK':True},\n 'FILTER': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':False},\n #'FILTERID': {'htype':'raw', 'dtype':str, 'DB':False, 'None_OK':True},\n 'CCD-ID': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n 'CONTROLL': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n 'DETSPEED': {'htype':'raw', 'dtype':int, 'DB':True, 'None_OK':True},\n 'CCD-NW': {'htype':'raw', 'dtype':int, 'DB':False, 'None_OK':True},\n 'CCD-NH': {'htype':'raw', 'dtype':int, 'DB':False, 'None_OK':True},\n 'INSTRUME': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n 'FOCUSPOS': {'htype':'raw', 'dtype':int, 'DB':True, 'None_OK':True},\n 'IMAGETYP': {'htype':'raw', 'dtype':str, 'DB':False, 'None_OK':True},\n 'OBJECT': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':False},\n 'AIRMASS': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':False},\n 'ORIGFILE': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':False},\n 'OBSERVER': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n 'ABOTVER': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n 'PROGNAME': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n 'PROGID': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n 'GUIDERST': {'htype':'raw', 'dtype':str, 'DB':True, 'None_OK':True},\n 'GUIDERFQ': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'TRAKTIME': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'ADCX': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n 'ADCY': {'htype':'raw', 'dtype':float, 'DB':True, 'None_OK':True},\n #\n # full header\n 'BB-V': {'htype':'full', 'dtype':str, 'DB':True, 'None_OK':False},\n 'BB-START': {'htype':'full', 'dtype':str, 'DB':True, 'None_OK':False},\n 'KW-V': {'htype':'full', 'dtype':str, 'DB':True, 'None_OK':False},\n 'LOG': {'htype':'full', 'dtype':str, 'DB':False, 'None_OK':True},\n 'LOG-IMA': {'htype':'full', 'dtype':str, 'DB':False, 'None_OK':True},\n 'N-INFNAN': {'htype':'full', 'dtype':int, 'DB':True, 'None_OK':True},\n 'XTALK-P': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'XTALK-F': {'htype':'full', 'dtype':str, 'DB':False, 'None_OK':True},\n 'NONLIN-P': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'NONLIN-F': {'htype':'full', 'dtype':str, 'DB':False, 'None_OK':True},\n 'GAIN-P': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'GAIN': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'GAIN1': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'GAIN16': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'OS-P': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'BIASMEAN': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'BIASM1': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'BIASM16': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'RDNOISE': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'RDN1': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'RDN16': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'BIAS1A0': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'BIAS1A1': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'VFITOK1': {'htype':'full', 'dtype':bool, 'DB':False, 'None_OK':True},\n 'BIAS16A0': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'BIAS16A1': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'VFITOK16': {'htype':'full', 'dtype':bool, 'DB':False, 'None_OK':True},\n 'MBIAS-P': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'MBIAS-F': {'htype':'full', 'dtype':str, 'DB':True, 'None_OK':True},\n 'MB-NDAYS': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'SATURATE': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'NOBJ-SAT': {'htype':'full', 'dtype':int, 'DB':False, 'None_OK':True},\n 'MFLAT-P': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'MFLAT-F': {'htype':'full', 'dtype':str, 'DB':True, 'None_OK':True},\n 'MF-NDAYS': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'MFRING-P': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'MFRING-F': {'htype':'full', 'dtype':str, 'DB':True, 'None_OK':True},\n 'FRRATIO': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'COSMIC-P': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'NCOSMICS': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'SAT-P': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'NSATS': {'htype':'full', 'dtype':int, 'DB':True, 'None_OK':True},\n 'REDFILE': {'htype':'full', 'dtype':str, 'DB':True, 'None_OK':True},\n 'MASKFILE': {'htype':'full', 'dtype':str, 'DB':True, 'None_OK':True},\n 'S-P': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'S-V': {'htype':'full', 'dtype':str, 'DB':False, 'None_OK':True},\n 'S-NOBJ': {'htype':'full', 'dtype':int, 'DB':True, 'None_OK':True},\n 'S-FWHM': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'S-FWSTD': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'S-SEEING': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'S-SEESTD': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'S-ELONG': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'S-ELOSTD': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'S-BKG': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'S-BKGSTD': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'S-VIGNET': {'htype':'full', 'dtype':int, 'DB':False, 'None_OK':True},\n 'BKG-CORR': {'htype':'full', 'dtype':bool, 'DB':False, 'None_OK':True},\n 'BKG-CHI2': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'BKG-CF1': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'BKG-CF16': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'BKG-FDEG': {'htype':'full', 'dtype':int, 'DB':False, 'None_OK':True},\n 'BKG-FC0': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'A-P': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'A-V': {'htype':'full', 'dtype':str, 'DB':False, 'None_OK':True},\n 'A-INDEX': {'htype':'full', 'dtype':str, 'DB':False, 'None_OK':True},\n 'A-PSCALE': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'A-PSCALX': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'A-PSCALY': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'A-ROT': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'A-ROTX': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'A-ROTY': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'A-CAT-F': {'htype':'full', 'dtype':str, 'DB':True, 'None_OK':True},\n 'A-NAST': {'htype':'full', 'dtype':int, 'DB':True, 'None_OK':True},\n 'A-TNAST': {'htype':'full', 'dtype':int, 'DB':False, 'None_OK':True},\n 'A-NAMAX': {'htype':'full', 'dtype':int, 'DB':False, 'None_OK':True},\n 'A-DRA': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'A-DRASTD': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'A-DDEC': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'A-DDESTD': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'PSF-P': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'PSF-V': {'htype':'full', 'dtype':str, 'DB':False, 'None_OK':True},\n 'PSF-RAD': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-RADP': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-SIZE': {'htype':'full', 'dtype':int, 'DB':True, 'None_OK':True},\n 'PSF-FRAC': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-SAMP': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-CFGS': {'htype':'full', 'dtype':int, 'DB':True, 'None_OK':True},\n 'PSF-NOBJ': {'htype':'full', 'dtype':int, 'DB':True, 'None_OK':True},\n 'PSF-FIX': {'htype':'full', 'dtype':bool, 'DB':False, 'None_OK':True},\n 'PSF-PLDG': {'htype':'full', 'dtype':int, 'DB':False, 'None_OK':True},\n 'PSF-CHI2': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'PSF-FWHM': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-SEE': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'PSF-PMIN': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-PMAX': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-PMED': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-PSTD': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-BMIN': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-BMAX': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-BMED': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-BSTD': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-EMNM': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-EMXM': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-EMDM': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-ESTM': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-FMNM': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-FMXM': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-FMDM': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-FSTM': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-EMNG': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-EMXG': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-EMDG': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-ESTG': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-FMNG': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-FMXG': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-FMDG': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PSF-FSTG': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PC-P': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'PC-CAT-F': {'htype':'full', 'dtype':str, 'DB':True, 'None_OK':True},\n 'PC-NCAL': {'htype':'full', 'dtype':int, 'DB':True, 'None_OK':True},\n 'PC-TNCAL': {'htype':'full', 'dtype':int, 'DB':False, 'None_OK':True},\n 'PC-FNCAL': {'htype':'full', 'dtype':int, 'DB':False, 'None_OK':True},\n 'PC-NCMAX': {'htype':'full', 'dtype':int, 'DB':False, 'None_OK':True},\n 'PC-NCMIN': {'htype':'full', 'dtype':int, 'DB':False, 'None_OK':True},\n 'PC-ZPFDG': {'htype':'full', 'dtype':int, 'DB':False, 'None_OK':True},\n 'PC-ZPF0': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PC-TNSUB': {'htype':'full', 'dtype':int, 'DB':False, 'None_OK':True},\n 'PC-NSUB': {'htype':'full', 'dtype':int, 'DB':False, 'None_OK':True},\n 'PC-MZPD': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'PC-MZPS': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PC-ZPDEF': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'PC-ZP': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'PC-ZPSTD': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'PC-EXTCO': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'AIRMASSC': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'RA-CNTR': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'DEC-CNTR': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'PC-AIRM': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'NSIGMA': {'htype':'full', 'dtype':int, 'DB':True, 'None_OK':True},\n 'LIMEFLUX': {'htype':'full', 'dtype':float, 'DB':False, 'None_OK':True},\n 'LIMMAG': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'NOBJECTS': {'htype':'full', 'dtype':int, 'DB':True, 'None_OK':True},\n 'RADECOFF': {'htype':'full', 'dtype':float, 'DB':True, 'None_OK':True},\n 'FORMAT-P': {'htype':'full', 'dtype':bool, 'DB':False, 'None_OK':True},\n 'DUMCAT': {'htype':'full', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'QC-FLAG': {'htype':'full', 'dtype':str, 'DB':True, 'None_OK':False},\n 'DATEFILE': {'htype':'full', 'dtype':str, 'DB':False, 'None_OK':True},\n #\n # transient header\n 'SWARP-P': {'htype':'trans', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'SWARP-V': {'htype':'trans', 'dtype':str, 'DB':False, 'None_OK':True},\n 'Z-REF': {'htype':'trans', 'dtype':str, 'DB':False, 'None_OK':True},\n 'Z-DXYLOC': {'htype':'trans', 'dtype':bool, 'DB':False, 'None_OK':True},\n 'Z-DX': {'htype':'trans', 'dtype':float, 'DB':True, 'None_OK':True},\n 'Z-DY': {'htype':'trans', 'dtype':float, 'DB':True, 'None_OK':True},\n 'Z-DXSTD': {'htype':'trans', 'dtype':float, 'DB':True, 'None_OK':True},\n 'Z-DYSTD': {'htype':'trans', 'dtype':float, 'DB':True, 'None_OK':True},\n 'Z-FNRLOC': {'htype':'trans', 'dtype':bool, 'DB':False, 'None_OK':True},\n 'Z-FNR': {'htype':'trans', 'dtype':float, 'DB':True, 'None_OK':True},\n 'Z-FNRSTD': {'htype':'trans', 'dtype':float, 'DB':True, 'None_OK':True},\n 'Z-P': {'htype':'trans', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'Z-V': {'htype':'trans', 'dtype':str, 'DB':False, 'None_OK':True},\n 'Z-SIZE': {'htype':'trans', 'dtype':int, 'DB':False, 'None_OK':True},\n 'Z-BSIZE': {'htype':'trans', 'dtype':int, 'DB':False, 'None_OK':True},\n 'Z-SCMED': {'htype':'trans', 'dtype':float, 'DB':True, 'None_OK':True},\n 'Z-SCSTD': {'htype':'trans', 'dtype':float, 'DB':True, 'None_OK':True},\n 'Z-FPEMED': {'htype':'trans', 'dtype':float, 'DB':False, 'None_OK':True},\n 'Z-FPESTD': {'htype':'trans', 'dtype':float, 'DB':False, 'None_OK':True},\n 'T-NSIGMA': {'htype':'trans', 'dtype':int, 'DB':True, 'None_OK':True},\n 'T-LFLUX': {'htype':'trans', 'dtype':float, 'DB':False, 'None_OK':True},\n 'T-NTRANS': {'htype':'trans', 'dtype':int, 'DB':True, 'None_OK':True},\n 'T-FTRANS': {'htype':'trans', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-LMAG': {'htype':'trans', 'dtype':float, 'DB':True, 'None_OK':True},\n 'T-NFAKE': {'htype':'trans', 'dtype':int, 'DB':False, 'None_OK':True},\n 'T-FAKESN': {'htype':'trans', 'dtype':float, 'DB':False, 'None_OK':True},\n 'MC-P': {'htype':'trans', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'MC-V': {'htype':'trans', 'dtype':str, 'DB':False, 'None_OK':True},\n 'MC-MODEL': {'htype':'trans', 'dtype':str, 'DB':False, 'None_OK':True},\n 'TDUMCAT': {'htype':'trans', 'dtype':bool, 'DB':True, 'None_OK':False},\n 'TQC-FLAG': {'htype':'trans', 'dtype':str, 'DB':True, 'None_OK':False},\n }\n\n # read header of filename\n if isfile (filename):\n header = read_hdulist (filename, get_data=False, get_header=True)\n else:\n # return success=False if it does not exist\n log.warning ('file {} does not exist; not able to verify its header'\n .format(filename))\n return False\n\n\n # force [htypes] to be a list\n htypes_list = list(htypes)\n\n # loop keys in dict_head\n for key in dict_head.keys():\n\n # only check keywords with htype matching the input [htypes]\n if dict_head[key]['htype'] not in htypes_list:\n continue\n\n # check that key is present in header\n if key in header:\n\n # provide warning if dtype not as expected and header\n # keyword value is not 'None'\n if (dict_head[key]['dtype'] != type(header[key]) and\n header[key] != 'None'):\n log.warning ('dtype of keyword {}: {} does not match the '\n 'expected dtype: {} in header of {}'\n .format(key, type(header[key]),\n dict_head[key]['dtype'], filename))\n\n # if key goes to DataBase and value is 'None' or None\n # while 'None_OK' is False, raise an exception\n if (dict_head[key]['DB'] and not dict_head[key]['None_OK'] and\n (header[key] is None or header[key] == 'None')):\n msg = ('DataBase keyword {} not allowed to have \\'None\\' or '\n 'None value in header of {}'.format(key, filename))\n log.error (msg)\n raise ValueError (msg)\n\n\n else:\n msg = 'keyword {} not present in header of {}'.format(key, filename)\n # if keyword will be ingested into the database, raise an exception\n if dict_head[key]['DB']:\n log.error (msg)\n raise KeyError (msg)\n\n else:\n log.warning (msg)\n\n\n return", "def read_data(filename):\n # Store debug mode\n debug = params.debug\n params.debug = None\n\n # Initialize dictionary\n header_dict = {}\n\n headername = filename + \".hdr\"\n\n with open(headername, \"r\") as f:\n # Replace characters for easier parsing\n hdata = f.read()\n hdata = hdata.replace(\",\\n\", \",\")\n hdata = hdata.replace(\"\\n,\", \",\")\n hdata = hdata.replace(\"{\\n\", \"{\")\n hdata = hdata.replace(\"\\n}\", \"}\")\n hdata = hdata.replace(\" \\n \", \"\")\n hdata = hdata.replace(\";\", \"\")\n hdata = hdata.split(\"\\n\")\n\n # Loop through and create a dictionary from the header file\n for i, string in enumerate(hdata):\n if ' = ' in string:\n header_data = string.split(\" = \")\n header_dict.update({header_data[0].rstrip(): header_data[1].rstrip()})\n elif ' : ' in string:\n header_data = string.split(\" : \")\n header_dict.update({header_data[0].rstrip(): header_data[1].rstrip()})\n\n # Reformat wavelengths\n header_dict[\"wavelength\"] = header_dict[\"wavelength\"].replace(\"{\", \"\")\n header_dict[\"wavelength\"] = header_dict[\"wavelength\"].replace(\"}\", \"\")\n header_dict[\"wavelength\"] = header_dict[\"wavelength\"].replace(\" \", \"\")\n header_dict[\"wavelength\"] = header_dict[\"wavelength\"].split(\",\")\n\n # Create dictionary of wavelengths\n wavelength_dict = {}\n for j, wavelength in enumerate(header_dict[\"wavelength\"]):\n wavelength_dict.update({float(wavelength): float(j)})\n\n # Replace datatype ID number with the numpy datatype\n dtype_dict = {\"1\": np.uint8, \"2\": np.int16, \"3\": np.int32, \"4\": np.float32, \"5\": np.float64, \"6\": np.complex64,\n \"9\": np.complex128, \"12\": np.uint16, \"13\": np.uint32, \"14\": np.uint64, \"15\": np.uint64}\n header_dict[\"data type\"] = dtype_dict[header_dict[\"data type\"]]\n\n # Read in the data from the file\n raw_data = np.fromfile(filename, header_dict[\"data type\"], -1)\n\n # Reshape the raw data into a datacube array\n array_data = raw_data.reshape(int(header_dict[\"lines\"]),\n int(header_dict[\"bands\"]),\n int(header_dict[\"samples\"])).transpose((0, 2, 1))\n\n if \"default bands\" in header_dict:\n header_dict[\"default bands\"] = header_dict[\"default bands\"].replace(\"{\", \"\")\n header_dict[\"default bands\"] = header_dict[\"default bands\"].replace(\"}\", \"\")\n default_bands = header_dict[\"default bands\"].split(\",\")\n\n pseudo_rgb = cv2.merge((array_data[:, :, int(default_bands[0])],\n array_data[:, :, int(default_bands[1])],\n array_data[:, :, int(default_bands[2])]))\n\n else:\n max_wavelength = max([float(i) for i in wavelength_dict.keys()])\n min_wavelength = min([float(i) for i in wavelength_dict.keys()])\n # Check range of available wavelength\n if max_wavelength >= 635 and min_wavelength <= 490:\n id_red = _find_closest(np.array([float(i) for i in wavelength_dict.keys()]), 710)\n id_green = _find_closest(np.array([float(i) for i in wavelength_dict.keys()]), 540)\n id_blue = _find_closest(np.array([float(i) for i in wavelength_dict.keys()]), 480)\n\n pseudo_rgb = cv2.merge((array_data[:, :, [id_blue]],\n array_data[:, :, [id_green]],\n array_data[:, :, [id_red]]))\n else:\n # Otherwise take 3 wavelengths, first, middle and last available wavelength\n id_red = int(header_dict[\"bands\"]) - 1\n id_green = int(id_red / 2)\n pseudo_rgb = cv2.merge((array_data[:, :, [0]],\n array_data[:, :, [id_green]],\n array_data[:, :, [id_red]]))\n\n # Gamma correct pseudo_rgb image\n pseudo_rgb = pseudo_rgb ** (1 / 2.2)\n # Scale each of the channels up to 255\n pseudo_rgb = cv2.merge((rescale(pseudo_rgb[:, :, 0]),\n rescale(pseudo_rgb[:, :, 1]),\n rescale(pseudo_rgb[:, :, 2])))\n\n max_wl = float(str(header_dict[\"wavelength\"][-1]).rstrip())\n min_wl = float(str(header_dict[\"wavelength\"][0]).rstrip())\n\n # Create an instance of the spectral_data class\n spectral_array = Spectral_data(array_data=array_data, max_wavelength=max_wl,\n min_wavelength=min_wl, d_type=header_dict[\"data type\"],\n wavelength_dict=wavelength_dict, samples=int(header_dict[\"samples\"]),\n lines=int(header_dict[\"lines\"]), interleave=header_dict[\"interleave\"],\n wavelength_units=header_dict[\"wavelength units\"], array_type=\"datacube\",\n pseudo_rgb=pseudo_rgb, filename=filename)\n\n # Reset debug mode\n params.debug = debug\n\n if params.debug == \"plot\":\n # Gamma correct pseudo_rgb image\n plot_image(pseudo_rgb)\n elif params.debug == \"print\":\n print_image(pseudo_rgb, os.path.join(params.debug_outdir, str(params.device) + \"_pseudo_rgb.png\"))\n\n return spectral_array", "def __init__(self,\n resp_data,\n ):\n self.raw_data = resp_data.dict()\n\n # Packet parsed for host db processing\n self.parsed_data = {'insert': {'product': self.get_product_pack(),\n 'selling_status': self.get_selling_status_pack(),\n 'shipping_info': self.get_shipping_info_pack(),\n 'listing_info': self.get_listing_info_pack(),\n },\n 'items_received': resp_data.dict()['searchResult']['_count']}", "def processLnkHeader(self):\n\t\t# Read LNK heaer from file\n\t\theader_raw = self.fpLnk.read(HEADER_LEN)\n\t\t# Build new dictionary containing header data\n\t\tself.header = parseStructuredData(header_raw, LNK_HEADER)", "def __init__(self, header: CmdHeader, raw_data: bytes) -> None:\n assert isinstance(header, CmdHeader)\n assert isinstance(raw_data, (bytes, bytearray))\n self.header = header\n self.raw_data = raw_data\n self.status, = unpack_from(\"<L\", raw_data)", "def __init__(self, header: CmdHeader, raw_data: bytes) -> None:\n super().__init__(header, raw_data)\n self.status, self.length = unpack_from('<2I', raw_data)", "def __init__(self, header: CmdHeader, raw_data: bytes) -> None:\n super().__init__(header, raw_data)\n self.status, self.length = unpack_from('<2I', raw_data)", "def __init__(self, header: CmdHeader, raw_data: bytes) -> None:\n super().__init__(header, raw_data)\n self.status, self.length = unpack_from('<2I', raw_data)", "def load(self):\n basepath = os.path.dirname(os.path.abspath(__file__))\n filename = os.sep.join([basepath, c.FOLDER_JSON, c.FILE_GAME_VERSIONS])\n Handler.ALL_VERS_DATA = {} # reset known data; do not retain defunct information\n with open(filename, \"r\") as f:\n data = json.loads( f.read() )\n self.update(data)\n self._updated = False\n #for v,record in iteritems(Handler.ALL_VERS_DATA):\n # print(type(v), v)\n #for k,v in iteritems(record): ", "def _read_headers(self):\n # Read the textual header.\n self._read_textual_header()\n # The next 400 bytes are from the Binary File Header.\n binary_file_header = self.file.read(400)\n bfh = SEGYBinaryFileHeader(binary_file_header, self.endian)\n self.binary_file_header = bfh\n self.data_encoding = self.binary_file_header.data_sample_format_code\n # If bytes 3506-3506 are not zero, an extended textual header follows\n # which is not supported so far.\n if bfh.number_of_3200_byte_ext_file_header_records_following != 0:\n msg = 'Extended textual headers are supported yet. ' + \\\n 'Please contact the developers.'\n raise NotImplementedError(msg)", "def _build_header_dictionary(self):\n start = 0\n #print self.raw_data\n for a in range(20):\n redatapuller = re.compile(\"\\r\\n\\r\\n\\r\\n(?P<word>.*?)\\t.*?\\n\", re.DOTALL)\n m = redatapuller.search(self.raw_data[start:])\n if not(m):\n break\n self.header_dictionary[m.group(\"word\")] = start + m.end()\n if a==0:\n self.header_dictionary[\"main\"] = start + m.end()\n start += m.end()", "def parse(data):\n try:\n header = decompress(data)\n version, game, save, log = parse_version(header, data)\n if version not in (Version.USERPATCH15, Version.DE, Version.HD):\n raise RuntimeError(f\"{version} not supported\")\n de = parse_de(header, version, save)\n hd = parse_hd(header, version, save)\n metadata, num_players = parse_metadata(header)\n map_ = parse_map(header, version)\n players, mod, device = parse_players(header, num_players, version, save)\n scenario = parse_scenario(header, num_players, version, save)\n lobby = parse_lobby(header, version, save)\n except (struct.error, zlib.error, AssertionError, MemoryError, ValueError) as e:\n raise RuntimeError(f\"could not parse: {e}\")\n return dict(\n version=version,\n game_version=game,\n save_version=save,\n log_version=log,\n players=players,\n map=map_,\n de=de,\n hd=hd,\n mod=de.get('dlc_ids') if de else mod,\n metadata=metadata,\n scenario=scenario,\n lobby=lobby,\n device=device\n )", "def on_frame(self, frame: str) -> None:\n\n logger.debug(\"Frame: {}\".format(frame))\n try:\n message = json.loads(frame)\n except:\n logger.exception(\"Could not decode the JSON message\")\n self.transport.close()\n return\n\n mtype = message.get('type', None)\n self.log_state(mtype)\n if mtype == 'NEGOTIATION_RESPONSE':\n logger.debug(\"NEGOTIATION RESPONSE\")\n\n # Receive the chosen algorithms by the server \n self.process_negotiation_response(message)\n\n # Generate DH client private and public keys\n bytes_public_key,p,g,y=self.crypto.dh_client()\n \n message = {'type':'DH_PARAMETERS','parameters':{'p':p,'g':g,'public_key':str(bytes_public_key,'ISO-8859-1')}}\n self._send(message)\n self.state = STATE_DH\n \n return\n\n elif mtype == 'DH_PARAMETERS_RESPONSE':\n logger.debug('DH_PARAMETERS_RESPONSE')\n public_key=bytes(message['parameters']['public_key'],'ISO-8859-1')\n \n #Create shared key with the server public key\n self.crypto.create_shared_key(public_key)\n \n # Generate a symmetric key\n self.crypto.symmetric_key_gen()\n logger.debug(\"Key: {}\".format(self.crypto.symmetric_key))\n\n if self.state == STATE_ROTATION:\n self.state = STATE_OPEN\n self.send_file(self.file_name)\n \n elif self.state == STATE_DH:\n secure_message = self.encrypt_message({'type': 'OPEN', 'file_name': self.file_name})\n self._send(secure_message)\n self.send_mac()\n self.state = STATE_OPEN\n\n return\n\n elif mtype == 'INTEGRITY_CONTROL':\n flag = message['data']\n if flag == 'True':\n self._send(self.encrypt_message({'type': 'CLOSE'}))\n self.send_mac()\n logger.info(\"File transfer finished. Closing transport\")\n self.transport.close()\n\n elif mtype == 'OK': # Server replied OK. We can advance the state\n if self.state == STATE_OPEN:\n logger.info(\"Channel open\")\n self.send_file(self.file_name)\n elif self.state == STATE_DATA: # Got an OK during a message transfer.\n # Reserved for future use\n pass\n else:\n logger.warning(\"Ignoring message from server\")\n return\n\n elif mtype == 'ERROR':\n logger.warning(\"Got error from server: {}\".format(message.get('data', None)))\n \n else:\n logger.warning(\"Invalid message type\")\n\n logger.debug('Closing')\n self.transport.close()\n self.loop.stop()", "def to_data(self) -> dict:\n return {'pingData': {'challenge': self.ping_challenge}}", "def load_id_dict(self) -> None:\n sys.stdout.write(\"Loading identifier dictionaries...\\n\")\n assert os.path.exists(self.mapping_file)\n with open(self.mapping_file, 'r') as f:\n self.forward_map, self.backward_map = json.load(f)\n self.forward_map = {int(k): v for k, v in self.forward_map.items()}", "def __init__(self, header=None):\r\n\r\n self.data = []", "def _initialize_data(self):\n self.reset_count = 0\n self._idn_no_firmware = \"KEPCO,BOP 50-20,E1234,\"\n self._firmware = 2.6\n self._init_data()", "def _init_data(self) -> None:\n self.dtype = dict()\n self.shape = dict()\n self.size = dict()\n self.attrs = dict()\n self.data_ptr = dict()\n\n if self.mode == 'r':\n for k in self.fp.keys():\n self.dtype[k] = self.fp[k].dtype\n self.shape[k] = self.fp[k].shape\n self.size[k] = self.fp[k].shape[0]\n self.data_ptr[k] = 0", "def _read_chunk_head(self, data):\n if ';' in data:\n data, ext = data.split(';', 1)\n else:\n ext = ''\n\n length = int(data.strip(), 16)\n\n if length == 0:\n resp = self.current_response\n if resp._decompressor:\n resp.body += resp._decompressor.flush()\n del resp._decompressor\n\n self._stream.on_read = self._read_additional_headers\n resp._additional_headers = ''\n self._stream.read_delimiter = CRLF\n\n else:\n self._stream.on_read = self._read_chunk_body\n self._stream.read_delimiter = length + 2", "def _load(self):\n # Extract the ASCII header (5 first lines)\n with open(self._xst_bin, 'rb') as f:\n header = list(islice(f, 0, 5))\n assert header[0] == b'HeaderStart\\n',\\\n 'Wrong header start'\n assert header[-1] == b'HeaderStop\\n',\\\n 'Wrong header stop'\n header = [s.decode('utf-8') for s in header]\n hd_size = sum([len(s) for s in header])\n\n # Parse informations into a metadata dictionnary\n keys = ['freq', 'ma', 'accu']\n search = ['Freq.List', 'Mr.List', 'accumulation']\n types = ['float64', 'int', 'int']\n for key, word, typ in zip(keys, search, types):\n for h in header:\n if word in h:\n self.meta[key] = np.array(\n h.split('=')[1].split(','),\n dtype=typ\n )\n\n # Deduce the dtype for decoding\n n_ma = self.meta['ma'].size\n n_sb = self.meta['freq'].size\n dtype = np.dtype(\n [('jd', 'float64'),\n ('data', 'complex64', (n_sb, n_ma*n_ma*2 + n_ma))]\n )\n\n # Decoding the binary file\n tmp = np.memmap(\n filename=self._xst_bin,\n dtype='int8',\n mode='r',\n offset=hd_size\n )\n decoded = tmp.view(dtype)\n\n self.data = decoded['data'] / self.meta['accu']\n self.time = Time(decoded['jd'], format='jd', precision=0)\n\n return", "def _decode(self):\n \n self.version = int(data_to_hex_str(self.packet[0])[2])\n self.header_len = int(data_to_hex_str(self.packet[0])[3]) * 4\n self.type_of_service = data_to_hex_str(self.packet[1:2])\n self.total_len = int(data_to_hex_str(self.packet[2:4]), 16)\n self.id = data_to_hex_str(self.packet[4:6])\n \n #parse the flags fields(reservedbit, don't fragment, more fragment)\n if ((ord(self.packet[6]) & (1 << 7)) != 0):\n self.flags_reservedbit = 1\n else:\n self.flags_reservedbit = 0\n #endof if\n \n if ((ord(self.packet[6]) & (1 << 6)) != 0):\n self.flags_dont_fragment = 1\n else:\n self.flags_dont_fragment = 0\n #endof if\n \n if ((ord(self.packet[6]) & (1 << 5)) != 0):\n self.flags_more_fragment = 1\n else:\n self.flags_more_fragment = 0\n #endof if\n \n #parse the offset field(in packet[6:7]): 00011111 & packet[6] (to filter flags) -->> get packet[6:7] in hex_str\n #tmp = str(31 & ord(self.packet[6]))\n self.fragment_offset = int(data_to_hex_str(self.packet[6:8]), 16)\n if (self.fragment_offset >= (1 << 13)):\n #take away the flags fields: 00011111 11111111 & self.fragment_offset\n self.fragment_offset = self.fragment_offset & ((1 << 13) - 1) \n \n self.TTL = ord(self.packet[8])\n self.protocol = IPPROTO[ord(self.packet[9])]\n self.header_checksum = data_to_hex_str(self.packet[10:12])\n \n self.src = str(ord(self.packet[12])) + '.' + str(ord(self.packet[13])) + '.' + \\\n str(ord(self.packet[14])) + '.' + str(ord(self.packet[15]))\n self.dst = str(ord(self.packet[16])) + '.' + str(ord(self.packet[17])) + '.' + \\\n str(ord(self.packet[18])) + '.' + str(ord(self.packet[19]))\n \n if (self.header_len > 20):\n self.opt_paddings = self.packet[20 : (self.header_len)]", "def act_on_header(self, dct):\n self.socket.send_pyobj(dict(dct), protocol=2)", "def deserialize(self, reader: serialization.BinaryReader) -> None:\n self.index_start = reader.read_uint32()\n self.count = reader.read_int16()\n if self.count < 1 or self.count == 0 or self.count > HeadersPayload.MAX_HEADERS_COUNT:\n raise ValueError(\"Deserialization error - invalid count\")", "def _parseHeader(self):\n # Big or little endian for the header.\n self._getEndianess()\n # Read the fixed header.\n self._readFixedHeader()\n # Get the present blockettes.\n self._getBlockettes()\n # Calculate the starttime.\n self._calculateStarttime()", "def _decode1(self, body, data):\r\n if \" \" in body:\r\n evtype,body = body.split(\" \",1)\r\n else:\r\n evtype,body = body,\"\"\r\n evtype = evtype.upper()\r\n if evtype == \"CIRC\":\r\n m = re.match(r\"(\\d+)\\s+(\\S+)(\\s\\S+)?(\\s\\S+)?(\\s\\S+)?(\\s\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"CIRC event misformatted.\")\r\n ident,status,path,purpose,reason,remote = m.groups()\r\n ident = int(ident)\r\n if path:\r\n if \"PURPOSE=\" in path:\r\n remote = reason\r\n reason = purpose\r\n purpose=path\r\n path=[]\r\n elif \"REASON=\" in path:\r\n remote = reason\r\n reason = path\r\n purpose = \"\"\r\n path=[]\r\n else:\r\n path_verb = path.strip().split(\",\")\r\n path = []\r\n for p in path_verb:\r\n path.append(p.replace(\"~\", \"=\").split(\"=\")[0])\r\n else:\r\n path = []\r\n\r\n if purpose and \"REASON=\" in purpose:\r\n remote=reason\r\n reason=purpose\r\n purpose=\"\"\r\n\r\n if purpose: purpose = purpose[9:]\r\n if reason: reason = reason[8:]\r\n if remote: remote = remote[15:]\r\n event = CircuitEvent(evtype, ident, status, path, purpose, reason,\r\n remote, body)\r\n elif evtype == \"STREAM\":\r\n #plog(\"DEBUG\", \"STREAM: \"+body)\r\n m = re.match(r\"(\\S+)\\s+(\\S+)\\s+(\\S+)\\s+(\\S+)?:(\\d+)(\\sREASON=\\S+)?(\\sREMOTE_REASON=\\S+)?(\\sSOURCE=\\S+)?(\\sSOURCE_ADDR=\\S+)?(\\s+PURPOSE=\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"STREAM event misformatted.\")\r\n ident,status,circ,target_host,target_port,reason,remote,source,source_addr,purpose = m.groups()\r\n ident,circ = map(int, (ident,circ))\r\n if not target_host: # This can happen on SOCKS_PROTOCOL failures\r\n target_host = \"(none)\"\r\n if reason: reason = reason[8:]\r\n if remote: remote = remote[15:]\r\n if source: source = source[8:]\r\n if source_addr: source_addr = source_addr[13:]\r\n if purpose:\r\n purpose = purpose.lstrip()\r\n purpose = purpose[8:]\r\n event = StreamEvent(evtype, ident, status, circ, target_host,\r\n int(target_port), reason, remote, source, source_addr,\r\n purpose, body)\r\n elif evtype == \"ORCONN\":\r\n m = re.match(r\"(\\S+)\\s+(\\S+)(\\sAGE=\\S+)?(\\sREAD=\\S+)?(\\sWRITTEN=\\S+)?(\\sREASON=\\S+)?(\\sNCIRCS=\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"ORCONN event misformatted.\")\r\n target, status, age, read, wrote, reason, ncircs = m.groups()\r\n\r\n #plog(\"DEBUG\", \"ORCONN: \"+body)\r\n if ncircs: ncircs = int(ncircs[8:])\r\n else: ncircs = 0\r\n if reason: reason = reason[8:]\r\n if age: age = int(age[5:])\r\n else: age = 0\r\n if read: read = int(read[6:])\r\n else: read = 0\r\n if wrote: wrote = int(wrote[9:])\r\n else: wrote = 0\r\n event = ORConnEvent(evtype, status, target, age, read, wrote,\r\n reason, ncircs, body)\r\n elif evtype == \"STREAM_BW\":\r\n m = re.match(r\"(\\d+)\\s+(\\d+)\\s+(\\d+)\", body)\r\n if not m:\r\n raise ProtocolError(\"STREAM_BW event misformatted.\")\r\n event = StreamBwEvent(evtype, body, *m.groups())\r\n elif evtype == \"BW\":\r\n m = re.match(r\"(\\d+)\\s+(\\d+)\", body)\r\n if not m:\r\n raise ProtocolError(\"BANDWIDTH event misformatted.\")\r\n read, written = map(long, m.groups())\r\n event = BWEvent(evtype, read, written, body)\r\n elif evtype in (\"DEBUG\", \"INFO\", \"NOTICE\", \"WARN\", \"ERR\"):\r\n event = LogEvent(evtype, body)\r\n elif evtype == \"NEWDESC\":\r\n ids_verb = body.split(\" \")\r\n ids = []\r\n for i in ids_verb:\r\n ids.append(i.replace(\"~\", \"=\").split(\"=\")[0].replace(\"$\",\"\"))\r\n event = NewDescEvent(evtype, ids, body)\r\n elif evtype == \"ADDRMAP\":\r\n # TODO: Also parse errors and GMTExpiry\r\n m = re.match(r'(\\S+)\\s+(\\S+)\\s+(\\\"[^\"]+\\\"|\\w+)', body)\r\n if not m:\r\n raise ProtocolError(\"ADDRMAP event misformatted.\")\r\n fromaddr, toaddr, when = m.groups()\r\n if when.upper() == \"NEVER\": \r\n when = None\r\n else:\r\n when = time.strptime(when[1:-1], \"%Y-%m-%d %H:%M:%S\")\r\n event = AddrMapEvent(evtype, fromaddr, toaddr, when, body)\r\n elif evtype == \"NS\":\r\n event = NetworkStatusEvent(evtype, parse_ns_body(data), data)\r\n elif evtype == \"NEWCONSENSUS\":\r\n event = NewConsensusEvent(evtype, parse_ns_body(data), data)\r\n elif evtype == \"BUILDTIMEOUT_SET\":\r\n m = re.match(\r\n r\"(\\S+)\\sTOTAL_TIMES=(\\d+)\\sTIMEOUT_MS=(\\d+)\\sXM=(\\d+)\\sALPHA=(\\S+)\\sCUTOFF_QUANTILE=(\\S+)\",\r\n body)\r\n set_type, total_times, timeout_ms, xm, alpha, quantile = m.groups()\r\n event = BuildTimeoutSetEvent(evtype, set_type, int(total_times),\r\n int(timeout_ms), int(xm), float(alpha),\r\n float(quantile), body)\r\n elif evtype == \"GUARD\":\r\n m = re.match(r\"(\\S+)\\s(\\S+)\\s(\\S+)\", body)\r\n entry, guard, status = m.groups()\r\n event = GuardEvent(evtype, entry, guard, status, body)\r\n elif evtype == \"TORCTL_TIMER\":\r\n event = TimerEvent(evtype, data)\r\n else:\r\n event = UnknownEvent(evtype, body)\r\n\r\n return event", "def prepare_data(self):\n self.empty = 'yes' if len(self.data) == 0 else 'no'\n self._keys = sorted(key for key in self.data.keys())\n\n # Validate the keys\n for key in self._keys:\n if not is_valid_matlab_field_label(key):\n msg = \"'{}' is not a valid MATLAB field label\".format(key)\n raise ValueError(msg)\n self.field_names = \" \".join(self._keys)", "def __read_json(self):\n try:\n with open(self.X_path) as Xfile, open(self.Y_path) as Yfile:\n print('Starting to read the json files')\n t1 = time.perf_counter()\n Ydata = [obj[0] for obj in json.load(Yfile)]\n Xdata = json.load(Xfile)\n # Note: some samples (31) are empty\n # Xdata = [obj for obj in json.load(Xfile) if len(obj)>1]\n for record in Xdata:\n del record['sha256'] # Remove signature from features\n t2 = time.perf_counter()\n print(f'Finished reading the json files. '\n f'Elapsed time: {(t2 - t1) / 60.0} minutes')\n except Exception as e:\n print(f'Exception while reading json files ({e})')\n raise e\n\n self.Y = np.array(Ydata, dtype=np.uint8)\n print('Y.shape', self.Y.shape)\n self.vectorizer = DictVectorizer(sparse=True, dtype=np.uint8)\n self.X = self.vectorizer.fit_transform(Xdata)\n print('X.shape (All):', self.X.shape)\n # If feature selection is applied\n if self.n_features != -1:\n support = SelectKBest(chi2, k=self.n_features).fit(self.X, self.Y)\n self.vectorizer.restrict(support.get_support())\n self.X = support.transform(self.X)\n print('X.shape (Reduced):', self.X.shape)\n\n self.feature_names = self.vectorizer.feature_names_\n malware_idx = np.where(self.Y == 1)[0]\n self.X_malware = self.X[malware_idx, :]\n goodware_idx = np.where(self.Y == 0)[0]\n self.X_goodware = self.X[goodware_idx, :]\n\n\n try:\n with shelve.open(self.shelf_path, 'c') as shelf:\n print('Saving data to a shelf')\n shelf['X'] = self.X\n shelf['X_malware'] = self.X_malware\n shelf['X_goodware'] = self.X_goodware\n shelf['Y'] = self.Y\n shelf['feature_names'] = self.feature_names\n print('Finished saving the shelf')\n except Exception as e:\n print(f'Exception while saving data to a shelf ({e})')", "def __init__(self, data: dict):\n super().__init__(data)\n self._supports_validation = False\n self._ping_data_raw = data['pingData']", "def _decode_5104(data):\n\n text = []\n start_byte = 0\n while start_byte + 2 < len(data):\n tag = data[start_byte:start_byte + 2]\n if tag == b'#u':\n start_byte += 2\n text_size = struct.unpack(\n '<h', data[start_byte:start_byte + 2])[0]\n start_byte += 2\n text.append(data[start_byte:start_byte + text_size].decode('utf8'))\n start_byte += text_size\n start_byte += 6\n elif tag == b'$u':\n start_byte += 2\n text.append(struct.unpack(\n '<h', data[start_byte:start_byte + 2])[0])\n start_byte += 2\n start_byte += 6\n elif tag == b',u':\n start_byte += 2\n text.append(struct.unpack(\n '<h', data[start_byte:start_byte + 2])[0])\n start_byte += 2\n else:\n start_byte += 1\n\n return {'analyst': text[0],\n 'date': text[2],\n 'image_name': text[4],\n 'instrument_model': text[5],\n 'instrument_serial_number': text[6],\n 'instrument_software_version': text[7],\n 'accumulations': text[9],\n 'detector': text[11],\n 'source': text[12],\n 'beam_splitter': text[13],\n 'apodization': text[15],\n 'spectrum_type': text[16],\n 'beam_type': text[17],\n 'phase_correction': text[20],\n 'ir_accessory': text[26],\n 'igram_type': text[28],\n 'scan_direction': text[29],\n 'background_scans': text[32]}", "def get_json(self):\n data = {}\n data['ip'] = self.ip\n\n try:\n data['country'] = self.processedvtdata[\"country\"]\n except KeyError:\n data['country'] = 'None'\n try:\n data['as'] = self.processedvtdata[\"as_owner\"]\n except KeyError:\n data['as'] = 'None'\n try:\n data['rdns'] = self.processedvtdata[\"self.reversedns\"]\n except KeyError:\n data['rdns'] = 'None'\n try:\n data['label'] = self.expertlabel\n except AttributeError:\n data['label'] = ''\n\n # geodata\n #{\"status\":\"success\",\"country\":\"Yemen\",\"countryCode\":\"YE\",\"region\":\"SA\",\"regionName\":\"Amanat Alasimah\",\"city\":\"Sanaa\",\"zip\":\"\",\"lat\":15.3522,\"lon\":44.2095,\"timezone\":\"Asia/Aden\",\"isp\":\"Public Telecommunication Corporation\",\"org\":\"YemenNet\",\"as\":\"AS30873 Public Telecommunication Corporation\",\"query\":\"134.35.218.63\"}\n if self.geodata:\n data['geodata'] = self.geodata\n \n # vt resolutions. Is a list\n data['vt'] = {}\n try:\n if self.processedvtdata['resolutions'] != 'None':\n data['vt']['resolutions'] = []\n for count, resolution_tuple in enumerate(self.processedvtdata['resolutions']):\n if count >= self.amount_to_print:\n break\n temp = {}\n temp['date'] = resolution_tuple[0]\n temp['domain'] = resolution_tuple[1]\n data['vt']['resolutions'].append(temp)\n except KeyError:\n pass\n\n # vt urls. Is a list\n try:\n if self.processedvtdata['detected_urls'] != 'None':\n data['vt']['detected_urls'] = []\n for count, url_tuple in enumerate(self.processedvtdata['detected_urls']):\n if count >= self.amount_to_print:\n break\n temp = {}\n temp['date'] = url_tuple[0]\n temp['url'] = url_tuple[1][0]\n temp['detections'] = str(url_tuple[1][1]) + '/' + str(url_tuple[1][2])\n data['vt']['detected_urls'].append(temp)\n except KeyError:\n pass\n\n\n # vt detected communicating samples. Is a list\n try:\n if self.processedvtdata['detected_communicating_samples'] != 'None':\n data['vt']['detected_communicating_samples'] = []\n for count, communcating_tuple in enumerate(self.processedvtdata['detected_communicating_samples']):\n if count >= self.amount_to_print:\n break\n temp = {}\n temp['date'] = communcating_tuple[0]\n temp['detections'] = str(communcating_tuple[1][0]) + '/' + str(communcating_tuple[1][1])\n temp['sha256'] = communcating_tuple[1][2]\n data['vt']['detected_communicating_samples'].append(temp)\n except AttributeError:\n pass\n\n # vt detected downloaded samples. Is a list\n try:\n if self.processedvtdata['detected_downloaded_samples'] != 'None':\n data['vt']['detected_downloaded_samples'] = []\n for count, detected_tuple in enumerate(self.processedvtdata['detected_downloaded_samples']):\n if count >= self.amount_to_print:\n break\n temp = {}\n temp['date'] = detected_tuple[0]\n temp['detections'] = str(detected_tuple[1][0]) + '/' + str(detected_tuple[1][1])\n temp['sha256'] = detected_tuple[1][2]\n data['vt']['detected_downloaded_samples'].append(temp)\n except AttributeError:\n pass\n\n # vt referrer downloaded samples. Is a list\n try:\n if self.processedvtdata['detected_referrer_samples'] != 'None':\n data['vt']['detected_referrer_samples'] = []\n for count, referrer_tuple in enumerate(self.processedvtdata['detected_referrer_samples']):\n if count >= self.amount_to_print:\n break\n temp = {}\n temp['sha256'] = referrer_tuple[0]\n temp['detections'] = str(referrer_tuple[1][0]) + '/' + str(referrer_tuple[1][1])\n data['vt']['detected_referrer_samples'].append(temp)\n except AttributeError:\n pass\n\n # pt data\n data['pt'] = {}\n if self.processedptdata:\n count = 0\n data['pt']['passive_dns'] = []\n for result in self.processedptdata_results:\n if count >= self.amount_to_print:\n break\n temp = {}\n temp['lastseen'] = result[0]\n temp['firstseen'] = result[1][0]\n temp['hostname'] = result[1][1]\n data['pt']['passive_dns'].append(temp)\n count += 1\n\n # shodan data\n try:\n if self.shodandata:\n data['shodan'] = self.shodandata\n except AttributeError:\n pass\n\n data = json.dumps(data)\n return data", "def load_data(self) -> None:", "def __init__(self):\n self.data = []\n self.record = {}", "def _readHeader(self):\n self.ControllerVersion = self._readInt(0)\n self.LogicOutput = self._readInt(2)\n self.AppHiCapLowNoise = self._readInt(4)\n self.TimingMode = self._readInt(8)\n self.Exposure = self._readFloat(10)\n self.DetTemperature = self._readFloat(36)\n self.DetectorType = self._readInt(40)\n self.TriggerDiode = self._readInt(44)\n self.DelayTime = self._readFloat(46)\n self.ShutterControl = self._readInt(50)\n self.AbsorbLive = self._readInt(52)\n self.AbsorbMode = self._readInt(54)\n self.CanDoVirtualChip = self._readInt(56)\n self.ThresholdMinLive = self._readInt(58)\n self.ThresholdMin = self._readFloat(60)\n self.ThresholdMaxLive = self._readInt(64)\n self.ThresholdMax = self._readFloat(66)\n self.ADCOffset = self._readInt(188)\n self.ADCRate = self._readInt(190)\n self.ADCType = self._readInt(192)\n self.ADCRes = self._readInt(194)\n self.ADCBitAdj = self._readInt(196)\n self.Gain = self._readInt(198)\n self.GeometricOps = self._readInt(600)", "def startProtocol(self):\n #init state trackers\n self.initialMessageSentToServer = False\n self.sentPacketToPeerPublicAndPrivateAddresses = False\n self.mirroredBackWhicheverPacketMadeIt = False\n self.peerConnectionEstablished = False\n #init peer info\n self.peerPublicAddress = None\n self.peerPrivateAddress = None\n self.peerAddress = None \n self.peerUserName = None\n\n #talk to handshake server\n if iAmServer:\n data = {\n 'registering-server': True,\n 'user-name': userName,\n 'private-ip': myPrivateIp, \n 'private-port': myPrivatePort\n }\n else:\n data = {\n 'registering-server': False,\n 'user-name': userName,\n 'private-ip': myPrivateIp, \n 'private-port': myPrivatePort,\n 'server-name' : serverName,\n 'server-password': serverPassword,\n }\n self.transport.write(json.dumps(data).encode(), (HANDSHAKE_SERVER_IP, HANDSHAKE_SERVER_PORT))\n print(\"sent initial message to server \" + HANDSHAKE_SERVER_IP + \":\" + str(HANDSHAKE_SERVER_PORT))\n self.initialMessageSentToServer = True", "def _decode(self, input_dict):\n pass", "def _read_header_line_2(lines: list) -> dict:\n fields = (\n \"detection_status\",\n \"warning\",\n \"cloud_base_data\",\n \"warning_flags\",\n )\n values = [[line[0], line[1], line[3:20], line[21:].strip()] for line in lines]\n return values_to_dict(fields, values)", "def __init__(self, fileName):\n self.recordDict = {}\n for line in open(fileName, 'r') :\n sipRecord = json.loads(line)\n self.recordDict[sipRecord['addressOfRecord']] = line", "def __process_ethframe(self, eth_header: bytes) -> Dict[str, Any]:\n eth = struct.unpack('!6s6sH', eth_header)\n eth_protocol = socket.ntohs(eth[2])\n\n return {\n 'header_length': Sniffer.ETH_HEADER_LENGTH,\n 'protocol': eth_protocol,\n }" ]
[ "0.5554169", "0.5521527", "0.53812873", "0.5335025", "0.5277362", "0.5266372", "0.5250536", "0.52459896", "0.5201471", "0.5174532", "0.5169621", "0.5158002", "0.5134929", "0.5060775", "0.5059112", "0.5053946", "0.50465447", "0.50403374", "0.50372404", "0.50230026", "0.5013707", "0.49840298", "0.49820694", "0.4975421", "0.4965126", "0.4963513", "0.49616337", "0.49530494", "0.49451962", "0.49432674", "0.49310958", "0.49284548", "0.492647", "0.49049306", "0.48948961", "0.48936507", "0.4893332", "0.4893026", "0.48888153", "0.48815712", "0.4877547", "0.48764375", "0.48679805", "0.48677358", "0.48607337", "0.4859309", "0.48580712", "0.48528638", "0.48472285", "0.48445553", "0.48437163", "0.48431298", "0.48375133", "0.48353112", "0.48173434", "0.48115897", "0.48106772", "0.48067665", "0.48040423", "0.4801262", "0.48003197", "0.4799278", "0.4794196", "0.47918835", "0.47916383", "0.47877154", "0.47851872", "0.47802532", "0.47771934", "0.47771934", "0.47771934", "0.47759876", "0.47702926", "0.47677368", "0.47675332", "0.47671244", "0.47659585", "0.47612363", "0.4756934", "0.4755744", "0.47514", "0.47491455", "0.47451773", "0.47420615", "0.47401464", "0.47389933", "0.47360614", "0.4734958", "0.47304857", "0.47236165", "0.4721751", "0.47178397", "0.47090822", "0.4702159", "0.46964377", "0.4692878", "0.46821427", "0.46793064", "0.46718496", "0.46713755", "0.46675187" ]
0.0
-1
Called by the GUI clock at regular intervals according to self.sampling_rate. Send a data request signal to the Arduino and interpret the json output. Return the entire history of all recorded data (regardless of whether or not a null value was read or if the data series are not 'checked' in the GUI).
def sample(self): print("sampling bluetooth arduino") self.sock.send(b'B') data = b'' '''while True: data += self.sock.recv(1024) if data.endswith(b'\n'): break ''' #self.sock.settimeout(2) try: while True: d = self.sock.recv(255) data += d if d.find(b'\n') != -1: break except Exception as err: print(err) pass print(data) data = json.loads(data.decode()) if not any(x == 0 for x in data.values()): for label in self.data_labels: self.data[label].append(data[label]) #self.data["time"].append(time.time() - self.start) print(data) #print(self.data) #return self.data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def recive_data(self):\n # read all available data\n while self.ser.inWaiting() > self.INPUT_DATA_SIZE+1:\n data = array.array('c')\n # search the header\n data.append(self.ser.read(1))\n while data[0] != chr(1):\n data[0] = self.ser.read(1)\n \n # wait for all available data\n while self.ser.inWaiting() < (self.INPUT_DATA_SIZE-1):\n time.sleep(0.03);\n \n # recives data\n data = self.ser.read(self.INPUT_DATA_SIZE-1)\n \n # prove if you want graphical data\n if self.pushButton_monitor.isChecked():\n # decodes the data\n t = struct.unpack('I', data[3]+data[2]+data[1]+data[0])\n r = struct.unpack('f', data[4]+data[5]+data[6]+data[7])\n x0 = struct.unpack('f', data[8]+data[9]+data[10]+data[11])\n x1 = struct.unpack('f', data[12]+data[13]+data[14]+data[15])\n u = struct.unpack('f', data[16]+data[17]+data[18]+data[19])\n \n self.time = t[0]*25e-9\n \n # prepare the string output\n aux_str = \" t = \"+str(self.time)+\"\\t\"\n aux_str += \" r = \"+str(r[0])+\"\\t\"\n aux_str += \" u = \"+str(u[0])+\"\\t\"\n aux_str += \" x1 = \"+str(x1[0])+\"\\t\"\n aux_str += \" x0 = \"+str(x0[0])+\"\\n\"\n # print string output\n self.textBrowser.insertPlainText(aux_str)\n \n # append data to the arrays\n self.graf_t.append(self.time)\n self.graf_r.append(r[0])\n self.graf_x0.append(x0[0])\n self.graf_x1.append(x1[0])\n self.graf_u.append(u[0])\n \n # remove one value if the arrays have maximum length\n if self.graf_t.buffer_info()[1] >= NUM_SAMPLES:\n self.graf_t.pop(0)\n self.graf_r.pop(0)\n self.graf_x0.pop(0)\n self.graf_x1.pop(0)\n self.graf_u.pop(0)\n \n # reload number of samples lavel\n self.label_samples_value.setText(str(self.graf_t.buffer_info()[1]))\n # reload number of waiting chars in serial rx buffer\n self.label_rx_buff_value.setText(str(self.ser.inWaiting()))\n\n # reload mutex area\n self.updated_data = 1\n \n # prove if there are available id's\n if (self.actionPC_Monitor.isChecked() and data[20] == chr(2)):\n # if it is true, looks how much id's\n i = struct.unpack('B', data[21])\n\n if i[0] < STACK_SIZE:\n for z in range(i[0]):\n new_device = struct.unpack('B', data[z+22])\n new_string = str(new_device[0])\n \n llista = self.listWidget_link.findItems(new_string, QtCore.Qt.MatchExactly)\n if len(llista) == 0:\n self.listWidget_link.addItem(new_string)", "def run(self):\n while(not self.stop_event.is_set()):\n # read values until stop is sent\n response1 = _read_once(1,self.serial)\n response2 = _read_once(2,self.serial)\n #print(response)\n self.data1[\"d\"].append(response1) # Push response to the data list for later\n self.data2[\"d\"].append(response2) # Push response to the data list for later\n curTime = time.time()\n self.data1[\"t\"].append(curTime)\n self.data2[\"t\"].append(curTime)\n #sleep(0.0001) # I need to be small enough to capture peaks.\n return", "def record_data(self, no_of_samples, interval):\r\n\r\n #tempory storage while function is completing\r\n temp_return_list = []\r\n\r\n #colec\r\n for i in range(0,no_of_samples):\r\n\r\n print(i)\r\n sensor_value = self.sen.get_sensor_value()\r\n\r\n temp_return_list.append([sensor_value,(i*interval)])\r\n\r\n time.sleep(interval)\r\n\r\n \r\n \r\n self.return_data = temp_return_list", "def gather_data(self, *args, **kwargs):\n instrument_arg = kwargs.get('instrument', 'EUR_USD')\n granularity_arg = kwargs.get('granularity', 'M1')\n candle_format = kwargs.get('candleFormat', 'bidask')\n start_time = kwargs.get('start', '2014-10-01T00:00:00.000000Z')\n count_arg = kwargs.get('count', 5000)\n out_data = []\n data_complete = False\n while(not data_complete):\n response = self.oanda.get_history(instrument=instrument_arg,\n granularity=granularity_arg,\n candleFormat=candle_format,\n start=start_time,\n count=count_arg)\n raw_data = response['candles']\n if (len(out_data) == 0):\n out_data = out_data + raw_data\n elif (len(out_data) > 1):\n # raw_data[0] is already in out_data as raw_data[-1] from last\n # iteration\n out_data = out_data + raw_data[1:]\n start_time = raw_data[-1]['time']\n if (len(raw_data) < 5000):\n data_complete = True\n\n out_data = self._list_to_df(out_data)\n return out_data", "def result(self):\n\n chart_series = [] # will hold all the series created\n\n # determine the sensor to plot from the sensor selected by the user.\n the_sensor = bmsapp.models.Sensor.objects.get(pk=self.request_params['select_sensor'])\n\n # get the requested averaging interval in hours\n averaging_hours = float(self.request_params['averaging_time'])\n\n # determine the start time for selecting records\n st_ts, end_ts = self.get_ts_range()\n\n # get the database records\n df = self.reading_db.dataframeForOneID(the_sensor.sensor_id, st_ts, end_ts, pytz.timezone(self.timezone))\n\n if not df.empty:\n\n # info needed to create each series (selection list, series name, visible)\n if self.schedule:\n occupied_times = df.ts.apply(self.schedule.is_occupied)\n unoccupied_times = -occupied_times\n\n series_info = [(None, 'All Data', True),\n (occupied_times, 'Occupied Periods', False),\n (unoccupied_times, 'Unoccupied Periods', False)]\n else:\n # no schedule, so just return the 'All Data' series\n series_info = [(None, 'All Data', True)]\n\n for mask, series_name, visibility in series_info:\n if mask is None:\n select_df = df\n else:\n select_df = df[mask]\n\n if averaging_hours:\n select_df = bmsapp.data_util.resample_timeseries(select_df, averaging_hours)\n\n histogram_series = bmsapp.data_util.histogram_from_series(select_df.val)\n\n chart_series.append({'x': [x for x,y in histogram_series],\n 'y': [y for x,y in histogram_series],\n 'type': 'scatter',\n 'mode': 'lines', \n 'name': series_name, \n 'visible': 'true' if visibility else 'legendonly'\n })\n\n opt = self.get_chart_options('plotly')\n opt['data'] = chart_series\n opt['layout']['title'] = the_sensor.title + ' Histogram: ' + self.building.title\n opt['layout']['xaxis']['title'] = the_sensor.unit.label\n opt['layout']['xaxis']['type'] = 'linear'\n opt['layout']['yaxis']['title'] = '% of Readings'\n opt['layout']['yaxis']['rangemode'] = 'tozero'\n\n html = basechart.chart_config.chart_container_html(opt['layout']['title'])\n\n return {'html': html, 'objects': [('plotly', opt)]}", "def processData(self):\n recordSet = AresChartsService.toMultiSeries(self.vals, self.chartKeys, self.selectedX , self.chartVals, extKeys=self.extKeys)\n self.aresObj.jsGlobal.add(\"data_%s = %s\" % (self.htmlId, json.dumps(recordSet)))", "def get_all(self):\n try:\n return self.current_data\n except:\n print('No data received from sensor')", "def reqData(self):\r\n #self.reqGlobalCancel()\r\n #self.add_historical(\"Stock('TSLA', 'SMART', 'USD')\")\r\n #self.add_historical(\"Stock('IBM', 'SMART', 'USD')\")\r\n #self.add_historical(\"Stock('MSFT', 'SMART', 'USD')\")\r\n self.add_historical(\"Stock('FB', 'SMART', 'USD')\")", "async def request_data(\r\n self,\r\n **kwargs, # pylint: disable=unused-argument\r\n ) -> None:\r\n await self._send(\"report 2\")\r\n\r\n if self.device_info.is_meter_integrated():\r\n await self._send(\"report 3\")\r\n\r\n if self.device_info.is_data_logger_integrated():\r\n await self._send(\"report 100\")", "def automatic_meter_reading_demo():\n\n\t#Get sensor data from ellitrack\n\tserial = 17112915\n\n\t#get period dates\n\tfirst_day_of_the_month = datetime.datetime.today().replace(day=1)\n\tyesterday = datetime.datetime.now().date() - datetime.timedelta(days=1)\n\t#seven_days_earlier = yesterday - datetime.timedelta(days=7)\n\n\tperiod_from = yesterday.strftime(\"%d-%m-%Y\")\n\tperiod_from = period_from + ' +00:00:00'\n\tperiod_to = yesterday.strftime(\"%d-%m-%Y\")\n\tperiod_to = period_to + \" +23:59:59\"\n\n\t#print(period_from, period_to)\n\n\t#initialize request session\n\tlogin_url = \"http://www.ellitrack.nl/auth/login/\"\n\n\t#authenticate session\n\tellitrack_session = requests.Session()\n\tellitrack_session.post(login_url, data={'username': \"markdeblois\", 'password': \"upandegani\"})\n\n\t#get data for pure fresh ellitrack\n\tdata_url = \"http://www.ellitrack.nl/measurement/downloadexport/serialnumber/%s/type/period/n/0\" \\\n \"/periodtype/date/periodfrom/%s/periodto/%s/format/json\" % (serial, period_from, period_to)\n\tresp = ellitrack_session.get(data_url)\n\n\tif resp.status_code == 200:\n\t\toutput = StringIO(resp.text)\n\t\tell_df = pandas.read_csv(output, sep=\"\\t\")\n\n\t\t#unnecessary columns removed\n\t\tell_df_stripped = ell_df[ell_df.columns[0:2]]\n\n\t\t#get prev and curr readings\n\t\tprev_reading = ell_df_stripped['1 Energie'].min()\n\t\tcurr_reading = ell_df_stripped['1 Energie'].max()\n\n\t\t#Insert data into Meter Reading Sheet Doctype\n\t\tdata = {\n\t\t\t\"doctype\": \"Meter Reading Capture\",\n\t\t\t\"reading_date\": first_day_of_the_month,\n\t\t}\n\t\t\n\t\t#billing month\n\t\tget_curr_month = first_day_of_the_month.strftime(\"%B\")\n\n\t\tautomatic_readings_data = {\n\t\t\t\t\"doctype\": \"Meter Reading Sheet\",\n\t\t\t\t\"automatic_meter_id\": \"%s\" %serial,\n\t\t\t\t\"previous_automatic_readings\": \"%s\" %prev_reading,\n\t\t\t\t\"current_automatic_readings\": \"%s\" %curr_reading,\n\t\t\t\t\"parent\": \"All Territories %s 2018\" %get_curr_month,\n\t\t\t\t\"parenttype\": \"Meter Reading Capture\",\n\t\t\t\t\"parentfield\": \"meter_reading_sheet\"\n\t\t\t}\n\n\t\t#check if doc exists\n\t\tname = \"All Territories \"+get_curr_month+\" 2018\"\n\t\tdoc_list = frappe.get_list(\"Meter Reading Capture\", filters={'name':name})\n\t\tif doc_list:\n\t\t\tmdoc = frappe.get_doc(\"Meter Reading Capture\", name)\n\n\t\t\ttry:\n\t\t\t\tmrs_doc = frappe.get_doc(automatic_readings_data)\n\t\t\t\tmrs_doc.insert()\n\t\t\t\tfrappe.db.commit()\n\t\t\texcept Exception as err:\n\t\t\t\treturn err\n\t\telse:\n\t\t\t#create new doc\n\t\t\tdoc = frappe.new_doc(\"Meter Reading Capture\")\n\t\t\tdoc.reading_date = first_day_of_the_month\n\t\t\tdoc.billing_period = get_curr_month+\" 2018\"\n\t\t\tdoc.route = \"All Territories\"\n\t\t\tdoc.route_and_billing_period = name\n\n\t\t\ttry:\n\t\t\t\tdoc.insert()\n\t\t\t\tfrappe.db.commit()\n\t\t\texcept Exception as err:\n\t\t\t\treturn err\n\n\t\t\ttry:\n\t\t\t\t#update doc with the meter readings\n\t\t\t\tmrs_doc = frappe.get_doc(automatic_readings_data)\n\t\t\t\tmrs_doc.insert()\n\t\t\t\tfrappe.db.commit()\n\t\t\texcept Exception as err:\n\t\t\t\treturn err", "def read(self):\n # return previous data if read() was already called less than 1 second ago\n time_since_last_read = (\n time.time_ns()*self.NS_TO_MS - self.query_time) * self.MS_TO_SEC\n if time_since_last_read < 1:\n return self.readings\n\n data_bytes = self.sensor.readline()\n data = data_bytes.decode('utf-8')\n\n # extract wind direction (deg) data\n min_direction = float(data[7:10])\n avg_direction = float(data[15:18])\n max_direction = float(data[23:26])\n\n # extract wind speed (m/s) data\n min_speed = float(data[31:34])\n avg_speed = float(data[39:42])\n max_speed = float(data[47:50])\n\n self.readings = [\n {\n \"type\": \"windDirection\",\n \"value\": {\n \"minDirection\": min_direction,\n \"avgDirection\": avg_direction,\n \"maxDirection\": max_direction\n }\n },\n {\n \"type\": \"windSpeed\",\n \"value\": {\n \"minSpeed\": min_speed,\n \"avgSpeed\": avg_speed,\n \"maxSpeed\": max_speed\n }\n }\n ]\n\n # update the last time of read() being called\n self.query_time = time.time_ns() * self.NS_TO_MS\n\n return self.readings", "def get_data(self):\n try:\n data_string = self.ser.readline().decode()\n except UnicodeDecodeError as e:\n return self.get_data()\n \n if not data_string: # check for empty string\n return self.get_data()\n \n if data_string[0] == '|' and data_string[-1] == '\\n' and\\\n self.reset_confirmed:\n # if the data_string is valid, process it\n try:\n data_string = data_string.strip() \n data = data_string.split(',')\n assert len(data) == 7, \"Bad data Length\" \n data = [float(val) for val in data[1:]]\n data[0] /= 1000\n if self.ser.in_waiting: self.ser.reset_input_buffer()\n return data\n except (AssertionError, ValueError) as e:\n print(\"Error:\", type(e), e)\n if self.ser.in_waiting: self.ser.reset_input_buffer()\n return self.get_data()\n\n\n elif data_string[0] == '+' and data_string[-1] == '\\n' and\\\n self.reset_confirmed:\n # if the data_string is a valid time stamp, process it\n # self.system_timestamp = \"\\nSystem start time is: \"\\\n # \"%s\" % strftime(\"%Y/%m/%d %H:%M:%S\", localtime())\n self.arduino_timestamp = data_string.strip()\n print(self.arduino_timestamp)\n return self.get_data()\n \n elif data_string[0] == '/' and data_string[-1] == '\\n':\n # if string begins with / then it is a debug message and should\n # just be returned\n if \"setup finished\" in data_string.lower(): \n self.reset_confirmed = True\n print(data_string.strip())\n return self.get_data()\n else:\n # if the data_string is invalid try again\n return self.get_data()", "def stremingData(self):\n ports = com_select()\n self.comport = None\n if not ports:\n QMessageBox.warning(self, 'No Connection',\n \"No unit found please verify that the IMU is connected\")\n else:\n for port in ports:\n if test_con(port):\n self.comport = port\n break\n if self.comport:\n if self.ui.qtBStream.text() == 'Stream':\n self.ui.qtBStream.setText('Stop')\n self.streamData = GetValue(self.comport)\n self.streamData.start()\n iter(self.streamData)\n self.timer = QTimer()\n self.timer.timeout.connect(self.graphupdate)\n self.i = 0\n self.__x = []\n if self.recF:\n if self.ui.qtBrowse.text() == '':\n self.file_select()\n if self.ui.qtBrowse.text() == '':\n QMessageBox.warning(self,\n 'No file',\n 'No file selected')\n self.stremingData()\n return 0\n self.timer.start()\n elif self.ui.qtBStream.text() == 'Stop':\n self.ui.qtBStream.setText('Stream')\n self.timer.stop()\n self.streamData.stop()\n self.data = [[] for i in range(8)]\n else:\n QMessageBox.warning(self, 'No Connection com search',\n \"No unit found please verify that the IMU is connected\")", "def readOneData(self):\n\n\t\tif self._mt5Client is not None:\n\t\t\tdatas = self._mt5Client.getData()\n\n\t\t\tif datas is not None:\n\t\t\t\tPERIOD = int(self._config['data']['predict'])\n\t\t\t\tHALF_PERIOD = int(PERIOD/2)\n\n\t\t\t\tdata = []\n\n\t\t\t\t#Time Got\n\t\t\t\tself._LAST_PERIOD_PREDICTED_END = datas['time']\n\n\t\t\t\t#time open high low close tick_volume spread real_\n\t\t\t\t#Switch the price type calucation\n\n\t\t\t\tw_p = self._config['data']['price']\n\t\t\t\tv = 0\n\n\t\t\t\tif(w_p == CHART_PRICES_TYPE['O']):\n\t\t\t\t\tv = float(datas['open']) \n\n\t\t\t\telif(w_p == CHART_PRICES_TYPE['C']):\n\t\t\t\t\t\n\t\t\t\t\tv = float(datas['close']) \n\n\t\t\t\telif(w_p == CHART_PRICES_TYPE['H']):\n\t\t\t\t\t\n\t\t\t\t\tv = float(datas['high'])\n\n\t\t\t\telif(w_p == CHART_PRICES_TYPE['L']):\n\n\t\t\t\t\tv = float(datas['low']) \n\n\t\t\t\telif(w_p == CHART_PRICES_TYPE['HL/2']):\n\t\t\t\t\tv = ( float(datas['low']) + float(datas['high']) ) /2\n\t\t\t\t\n\t\t\t\tself.notify(msg={\n\t\t\t\t\t\t\t\t\t'prices': {\n\t\t\t\t\t\t\t\t\t\t'values': {\n\t\t\t\t\t\t\t\t\t\t\t'RP': str(v)\n\t\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t\t} \n\t\t\t\t\t\t\t\t} \n\t\t\t\t)\n\n\t\t\t\tdata.append(100000 * v ) \n\n\t\t\t\tself._TEMPORARY_GLOBAL_DATA.append(data[-1])\n\n\t\t\t\tself._GLOBAL_DATA.append(data[-1])\n\n\t\t\t\treturn data", "def data_process():\n global localtime\n global value_dict\n sensor_types = sEtting.sensor_types\n sensor_values = []\n msg = None\n value_dict = collections.OrderedDict.fromkeys(sEtting.payload_header)\n value_dict[\"ver_format\"] = sEtting.ver_format\n value_dict[\"FAKE_GPS\"] = sEtting.fake_gps\n value_dict[\"app\"] = sEtting.app\n value_dict[\"ver_app\"] = sEtting.ver_app\n value_dict[\"device_id\"] = sEtting.device_id\n value_dict[\"date\"] = localtime.strftime(\"%Y-%m-%d\")\n value_dict[\"time\"] = localtime.strftime(\"%H:%M:%S\")\n value_dict[\"device\"] = sEtting.device\n\n for sensor in sensor_types:\n if sensor == 'pm25-at':\n value_dict[\"s_d0\"] = get_reading_csv(sensor)\n elif sensor == 'temperature':\n value_dict[\"s_t0\"] = get_reading_csv(sensor)\n elif sensor == 'humidity':\n value_dict[\"s_h0\"] = get_reading_csv(sensor)\n elif sensor == 'pm10-at':\n value_dict[\"s_d1\"] = get_reading_csv(sensor)\n else:\n print 'Not support sensor type.'\n if sEtting.fake_gps == 1:\n value_dict[\"gps_lat\"] = sEtting.fgps_lat\n value_dict[\"gps_lon\"] = sEtting.fgps_lon\n value_dict[\"gps_alt\"] = sEtting.fgps_alt\n value_dict[\"gps_fix\"] = 0\n else:\n value_dict[\"gps_lat\"] = get_gps()[0]\n value_dict[\"gps_lon\"] = get_gps()[1]\n value_dict[\"gps_alt\"] = get_gps()[2]\n value_dict[\"gps_fix\"] = gpsd.fix.mode\n value_dict[\"gps_num\"] = 0\n #if debug_enable == '0':\n msg = \"|\" + \"|\".join([\"=\".join([key, str(val)])\n for key, val in value_dict.items()])\n return msg\n #elif debug_enable == '1':\n # msg_debug = \",\".join([\"=\".join([key, str(val)]) for key, val in value_dict.items()])\n # return msg_debug", "def recive_data(self, data_waveformreceived):\r\n self.adcollector.save_as_binary(self.savedirectory)\r\n self.channel_number = len(data_waveformreceived)\r\n if self.channel_number == 1: \r\n if 'Vp' in self.readinchan:\r\n self.data_collected_0 = data_waveformreceived[0]\r\n \r\n self.PlotDataItem_patch_voltage = PlotDataItem(self.xlabelhere_all, self.data_collected_0)\r\n #use the same color as before, taking advantages of employing same keys in dictionary\r\n self.PlotDataItem_patch_voltage.setPen('w')\r\n self.pw_data.addItem(self.PlotDataItem_patch_voltage)\r\n \r\n self.textitem_patch_voltage = pg.TextItem(('Vp'), color=('w'), anchor=(1, 1))\r\n self.textitem_patch_voltage.setPos(0, 1)\r\n self.pw_data.addItem(self.textitem_patch_voltage)\r\n elif 'Ip' in self.readinchan:\r\n self.data_collected_0 = data_waveformreceived[0]\r\n \r\n self.PlotDataItem_patch_current = PlotDataItem(self.xlabelhere_all, self.data_collected_0)\r\n #use the same color as before, taking advantages of employing same keys in dictionary\r\n self.PlotDataItem_patch_current.setPen('c')\r\n self.pw_data.addItem(self.PlotDataItem_patch_current)\r\n \r\n self.textitem_patch_current = pg.TextItem(('Ip'), color=('w'), anchor=(1, 1))\r\n self.textitem_patch_current.setPos(0, 1)\r\n self.pw_data.addItem(self.textitem_patch_current) \r\n elif 'PMT' in self.readinchan: # repeatnum, PMT_data_index_array, averagenum, ScanArrayXnum\r\n self.data_collected_0 = data_waveformreceived[0]*-1\r\n self.data_collected_0 = self.data_collected_0[0:len(self.data_collected_0)-1]\r\n \r\n # pmt data could come from raster scanning mode or from contour scanning mode.\r\n try:\r\n for i in range(self.repeatnum):\r\n self.PMT_image_reconstructed_array = self.data_collected_0[np.where(self.PMT_data_index_array_repeated == i+1)]\r\n Dataholder_average = np.mean(self.PMT_image_reconstructed_array.reshape(self.averagenum, -1), axis=0)\r\n Value_yPixels = int(len(self.samples_1)/self.ScanArrayXnum)\r\n self.PMT_image_reconstructed = np.reshape(Dataholder_average, (Value_yPixels, self.ScanArrayXnum))\r\n \r\n # Stack the arrays into a 3d array\r\n if i == 0:\r\n self.PMT_image_reconstructed_stack = self.PMT_image_reconstructed\r\n else:\r\n self.PMT_image_reconstructed_stack = np.concatenate((self.PMT_image_reconstructed_stack, self.PMT_image_reconstructed), axis=0)\r\n \r\n Localimg = Image.fromarray(self.PMT_image_reconstructed) #generate an image object\r\n Localimg.save(os.path.join(self.savedirectory, datetime.now().strftime('%Y-%m-%d_%H-%M-%S')+'_PMT_'+self.saving_prefix+'_'+str(i)+'.tif')) #save as tif\r\n \r\n plt.figure()\r\n plt.imshow(self.PMT_image_reconstructed, cmap = plt.cm.gray)\r\n plt.show()\r\n except:\r\n np.save(os.path.join(self.savedirectory, datetime.now().strftime('%Y-%m-%d_%H-%M-%S')+'_PMT_'+self.saving_prefix+'_'+'flatten'), self.data_collected_0)\r\n \r\n elif self.channel_number == 2: \r\n if 'PMT' not in self.readinchan:\r\n self.data_collected_0 = data_waveformreceived[0]\r\n \r\n self.PlotDataItem_patch_voltage = PlotDataItem(self.xlabelhere_all, self.data_collected_0)\r\n #use the same color as before, taking advantages of employing same keys in dictionary\r\n self.PlotDataItem_patch_voltage.setPen('w')\r\n self.pw_data.addItem(self.PlotDataItem_patch_voltage)\r\n \r\n self.textitem_patch_voltage = pg.TextItem(('Vp'), color=('w'), anchor=(1, 1))\r\n self.textitem_patch_voltage.setPos(0, 1)\r\n self.pw_data.addItem(self.textitem_patch_voltage) \r\n \r\n self.data_collected_1 = data_waveformreceived[1]\r\n \r\n self.PlotDataItem_patch_current = PlotDataItem(self.xlabelhere_all, self.data_collected_1)\r\n #use the same color as before, taking advantages of employing same keys in dictionary\r\n self.PlotDataItem_patch_current.setPen('c')\r\n self.pw_data.addItem(self.PlotDataItem_patch_current)\r\n \r\n self.textitem_patch_current = pg.TextItem(('Ip'), color=('w'), anchor=(1, 1))\r\n self.textitem_patch_current.setPos(0, 1)\r\n self.pw_data.addItem(self.textitem_patch_current) \r\n elif 'PMT' in self.readinchan:\r\n self.data_collected_0 = data_waveformreceived[0]*-1\r\n self.data_collected_0 = self.data_collected_0[0:len(self.data_collected_0)-1]\r\n \r\n try:\r\n for i in range(self.repeatnum):\r\n self.PMT_image_reconstructed_array = self.data_collected_0[np.where(self.PMT_data_index_array_repeated == i+1)]\r\n Dataholder_average = np.mean(self.PMT_image_reconstructed_array.reshape(self.averagenum, -1), axis=0)\r\n Value_yPixels = int(len(self.samples_1)/self.ScanArrayXnum)\r\n self.PMT_image_reconstructed = np.reshape(Dataholder_average, (Value_yPixels, self.ScanArrayXnum))\r\n \r\n # Stack the arrays into a 3d array\r\n if i == 0:\r\n self.PMT_image_reconstructed_stack = self.PMT_image_reconstructed\r\n else:\r\n self.PMT_image_reconstructed_stack = np.concatenate((self.PMT_image_reconstructed_stack, self.PMT_image_reconstructed), axis=0)\r\n \r\n Localimg = Image.fromarray(self.PMT_image_reconstructed) #generate an image object\r\n Localimg.save(os.path.join(self.savedirectory, datetime.now().strftime('%Y-%m-%d_%H-%M-%S')+'_PMT_'+self.saving_prefix+'_'+str(i)+'.tif')) #save as tif\r\n \r\n plt.figure()\r\n plt.imshow(self.PMT_image_reconstructed, cmap = plt.cm.gray)\r\n plt.show()\r\n except:\r\n np.save(os.path.join(self.savedirectory, datetime.now().strftime('%Y-%m-%d_%H-%M-%S')+'_PMT_'+self.saving_prefix+'_'+'contourscanning'), self.data_collected_0)\r\n \r\n if 'Vp' in self.readinchan:\r\n self.data_collected_1 = data_waveformreceived[1]\r\n \r\n self.PlotDataItem_patch_voltage = PlotDataItem(self.xlabelhere_all, self.data_collected_1)\r\n #use the same color as before, taking advantages of employing same keys in dictionary\r\n self.PlotDataItem_patch_voltage.setPen('w')\r\n self.pw_data.addItem(self.PlotDataItem_patch_voltage)\r\n \r\n self.textitem_patch_voltage = pg.TextItem(('Vp'), color=('w'), anchor=(1, 1))\r\n self.textitem_patch_voltage.setPos(0, 1)\r\n self.pw_data.addItem(self.textitem_patch_voltage)\r\n elif 'Ip' in self.readinchan:\r\n self.data_collected_1 = data_waveformreceived[1]\r\n \r\n self.PlotDataItem_patch_current = PlotDataItem(self.xlabelhere_all, self.data_collected_1)\r\n #use the same color as before, taking advantages of employing same keys in dictionary\r\n self.PlotDataItem_patch_current.setPen('c')\r\n self.pw_data.addItem(self.PlotDataItem_patch_current)\r\n \r\n self.textitem_patch_current = pg.TextItem(('Ip'), color=('w'), anchor=(1, 1))\r\n self.textitem_patch_current.setPos(0, 1)\r\n self.pw_data.addItem(self.textitem_patch_current)", "def sample(self):\n timestamp = time.time()\n try:\n res = requests.get(self.url)\n except requests.exceptions.ConnectionError as error:\n LOG.warning(\"%s %s\", self, error)\n return\n if 199 < res.status_code < 300:\n self.data.append((timestamp, res.json()))\n LOG.debug(\"%s appended data sample\", self)\n else:\n LOG.warning(\"Error %s loading data from %s\", res.status_code, self)\n self.data = self.data[-self.max_samples:]", "def getData(self):\n self.ser.write(b'g')\n readString = self.ser.readline()\n print(readString)\n readString = readString.decode(\"utf-8\")\n splittedString = readString.split('\\t')\n for i, num in enumerate(splittedString):\n try:\n splittedString[i] = int(float(num))\n except ValueError:\n pass\n self.accString.set('Accleration\\nX: %.5f\\nY: %.5f\\nZ: %.5f' %\n (splittedString[0], splittedString[1],\n splittedString[2]))\n self.logFile.write(readString)\n self.comJob = root.after(10, self.getData)", "def get_values(self):\n if self.__work_state == work_state[\"Sleeping\"]:\n raise RuntimeError(\"{}: the sensor is sleeping and will not send any values. Will wake it up first.\".format(self.sensor_name))\n if self.__report_mode == report_mode[\"Passive\"]:\n raise RuntimeError(\"{}: the sensor is in passive report mode and will not automatically send values. You need to call request() to get values.\".format(self.sensor_name))\n\n self.__duty_cycle_start = time.time()\n while self.duty_cycle == 0 or time.time() < self.__duty_cycle_start + self.__read_timeout:\n response_data = self.__response()\n if len(response_data) > 0:\n self.logger.info(\"{}: received response from sensor {} bytes.\".format(self.sensor_name, len(response_data)))\n return self.__extract_values_from_response(response_data)\n raise IOError(\"{}: no data within read timeout of {} has been received.\".format(self.sensor_name, self.__read_timeout))", "def collect_samples(serialPort,NO_SENSORS,NO_SAMPLES,log):\n run = '1'\n badSamples = 0\n count = 1\n log_temp = []\n temp = [0] * 20\n NO_FIELDS = (NO_SENSORS * 3) + 1\n \n while (run == '1'):\n # If the input buffer is not empty read the data out into rawData using \\n as a delimiter.\n if (serialPort.inWaiting()>0):\n rawData = serialPort.readline()\n print(rawData)\n \n # If invalid data is recieved this prevents program crash\n try:\n # Decode the bytes into a string\n data = rawData.decode()\n \n # Split x, y, z and newline values into a list\n if (count >= (NO_SAMPLES + 1)):\n endTime_temp = data.split(\" \", 2)\n if (len(endTime_temp) == 2 and '' not in endTime_temp):\n endTime = int(endTime_temp[0])\n else:\n endTime = 780\n print('Time not recieved')\n print('Lost Samples: ' + str(badSamples))\n run = '0'\n else:\n data_readings = data.split(\" \", NO_FIELDS)\n print(data_readings)\n \n # A correct sample should contain 16 values and not include null and so this is used\n # to validate the data and record any samples that are discarded in this way\n if (len(data_readings) == NO_FIELDS and '' not in data_readings):\n # Discard newline characters before saving data\n int_data_readings = list(map(int,data_readings[:(NO_FIELDS - 1)]))\n log_temp.append(int_data_readings)\n else:\n badSamples += 1\n except:\n print('Invalid data recieved')\n \n count += 1\n\n samplingPeriod = (endTime/NO_SAMPLES)/NO_SENSORS\n timeStamp = 0.0\n\n for i in range(0,len(log_temp)):\n for j in range(0,NO_SENSORS):\n temp[0+(j*4)] = log_temp[i][0+(j*3)]\n temp[1+(j*4)] = log_temp[i][1+(j*3)]\n temp[2+(j*4)] = log_temp[i][2+(j*3)]\n temp[3+(j*4)] = timeStamp\n timeStamp += samplingPeriod\n log.append(temp.copy())", "def collect_data(self):\r\n self.vcp.read(self.vcp.inWaiting())\r\n while True:\r\n data = self.vcp.readline()\r\n data = data.decode(\"ASCII\")\r\n timestamp = \",\" + datetime.datetime.now().strftime(\"%H:%M:%S\")\r\n data_timestamp = data + timestamp\r\n if not self.data_pause:\r\n self.update_anemometer_log.emit(data_timestamp)\r\n if self.stop_timer:\r\n break", "def send_data():\n range = request.args.get('range', '30')\n time = arrow.utcnow().replace(minutes=-int(range))\n data = Temperature.query\\\n .filter(Temperature.timestamp > time).order_by(Temperature.timestamp.desc()).all()\n return jsonify(results=[i.serialize for i in data])", "def _obtain_data(self):\n (self.data_df, self.column_df, self.station_name, self.log_file, self.station_lat, self.station_lon,\n self.station_elev, self.ws_anemometer_height, self.missing_fill_value, self.script_mode,\n self.auto_mode, self.fill_mode, self.metadata_mode, self.generate_bokeh, self.metadata_df,\n metadata_series) = input_functions.obtain_data(self.config_path, self.metadata_path)\n\n if self.script_mode == 1: # correcting data\n self.mc_iterations = 1000 # Number of iters for MC simulation of thornton running solar radiation gen\n else:\n self.mc_iterations = 50 # if we're not correcting data then only do a few iterations to save time\n\n print(\"\\nSystem: Raw data successfully extracted from station file.\")\n\n # Extract individual variables from data frame back into to numpy arrays.\n self.data_year = np.array(self.data_df.year)\n self.data_month = np.array(self.data_df.month)\n self.data_day = np.array(self.data_df.day)\n self.data_tavg = np.array(self.data_df.tavg)\n self.data_tmax = np.array(self.data_df.tmax)\n self.data_tmin = np.array(self.data_df.tmin)\n self.data_tdew = np.array(self.data_df.tdew)\n self.data_ea = np.array(self.data_df.ea)\n self.data_rhavg = np.array(self.data_df.rhavg)\n self.data_rhmax = np.array(self.data_df.rhmax)\n self.data_rhmin = np.array(self.data_df.rhmin)\n self.data_rs = np.array(self.data_df.rs)\n self.data_ws = np.array(self.data_df.ws)\n self.data_precip = np.array(self.data_df.precip)\n\n self.output_file_path = \"correction_files/\" + self.station_name + \"_output\" + \".xlsx\"", "def getSpectrumData(self):\n spectrum = []\n\n if not self.lastStatus.isHighSpeed:\n raise NotImplementedError('Full speed mode not implemented for {0}.'.format(self.model))\n\n packetCount = self.lastStatus.packetCount\n exposureTime = self.lastStatus.integrationTime\n\n for packet in range(packetCount):\n inputEndpoint = self.inputEndpoints[0]\n if self.idProduct == USB4000.classIdProduct and packet <= 3:\n inputEndpoint = self.inputEndpoints[1]\n\n values = self.readReply(inputEndpoint, unpackingFormat='<'+'H'*256, timeout=exposureTime*2)\n spectrum.extend(np.array(values))\n\n confirmation = self.readReply(inputEndpoint, size=1)\n if confirmation[0] != 0x69:\n self.flushEndpoints()\n raise RuntimeError('Spectrometer is desynchronized. Should disconnect')\n\n if self.discardTrailingSamples > 0:\n spectrum = spectrum[:-self.discardTrailingSamples]\n if self.discardLeadingSamples > 0:\n spectrum = spectrum[self.discardLeadingSamples:]\n\n return np.array(spectrum)", "def logging_data(self):\n with open('sensor_data.log','w') as f:\n json.dump(self.read_continuous_data, f)", "def linedata():\n get_values = request.args\n pc = get_values.get('pc') is not None # Per Capita\n gr = get_values.get('gr') is not None # Growth Rate\n place_args, _ = get_place_args(get_values)\n plot_data, _ = datachart_handler.get_plot_data(place_args, pc, gr)\n return json.dumps(plot_data)", "def LoadingData(self, ticker, FullHistory=False):\r\n if FullHistory == False:\r\n url = \"https://www.alphavantage.co/query?function=TIME_SERIES_DAILY_ADJUSTED&symbol={}&apikey={}\"\r\n else:\r\n url = \"https://www.alphavantage.co/query?function=TIME_SERIES_DAILY_ADJUSTED&symbol={}&outputsize=full&apikey={}\"\r\n\r\n try:\r\n response = requests.get(url.format(ticker, self.key))\r\n response.raise_for_status()\r\n except requests.exceptions.RequestException as e:\r\n raise SystemExit(e)\r\n\r\n # The API returns 200 status even after you have a typo\r\n try:\r\n outputjson = response.json()['Time Series (Daily)']\r\n except:\r\n print(\"Please check ticker for typos or mismatches\")\r\n outputjson = None\r\n\r\n return outputjson, ticker", "def getTemperatureMeasurements(self):\n # self.board.readline()\n self.stop = False\n times = []\n temps = [[], [], []]\n \n # A synchronisation string containing the characters tx is sent before each set of measurements,\n # we ensure correct reading of the measurements by waiting for this string\n while str(self.board.readline()).strip('b\\'\\\\rn') != 'tx':\n pass\n \n while not self.stop:\n # A synchronisation string containing the characters tx is sent before each set of measurements\n tx = self.board.readline()\n if str(tx).strip('b\\'\\\\rn') == 'tx':\n rawData1 = self.board.readline()\n rawData2 = self.board.readline()\n rawData3 = self.board.readline()\n rawData4 = self.board.readline()\n \n \n timeStamp = str(rawData1).strip('b\\'\\\\rn')\n temp1 = str(rawData2).strip('b\\'\\\\rn')\n temp2 = str(rawData3).strip('b\\'\\\\rn')\n temp3 = str(rawData4).strip('b\\'\\\\rn')\n try:\n times.append(float(timeStamp) / 1000)\n temps[0].append(float(temp1) / 128)\n temps[1].append(float(temp2) / 128)\n temps[2].append(float(temp3) / 128)\n # print(f'\\rtime: {float(timeStamp) / 1000:.2f} s, Temperature measured on sensor 1: {float(temp1) / 128:.2f} °C,'\n # f'sensor 2: {float(temp2) / 128:.2f} °C, sensor 3: {float(temp3) / 128:.2f} °C', sep='', end='', flush=True)\n except:\n print(rawData1, rawData2, rawData3, rawData4)\n \n \n if self.stop:\n print('\\nMeasurement finished...')\n \n self.data_stack[self.fetch_kinds[0]] = times\n self.data_stack[self.fetch_kinds[1]] = temps[0]\n self.data_stack[self.fetch_kinds[2]] = temps[1]\n self.data_stack[self.fetch_kinds[3]] = temps[2]\n \n if (len(self.data_stack['Sensor 1 Temp']) != len(times) or len(self.data_stack['Sensor 2 Temp']) != len(times) or len(self.data_stack['Sensor 3 Temp']) != len(times)):\n print(\"Warning: There may be some missing values!\")", "def slot_fullhistory(self, dummy_sender, data):\r\n (history) = data\r\n\r\n if not len(history):\r\n self.debug(\"### history download was empty\")\r\n return\r\n\r\n def get_time_round(date):\r\n \"\"\"round timestamp to current candle timeframe\"\"\"\r\n return int(date / self.timeframe) * self.timeframe\r\n\r\n #remove existing recent candle(s) if any, we will create them fresh\r\n date_begin = get_time_round(int(history[0][\"date\"]))\r\n while len(self.candles) and self.candles[0].tim >= date_begin:\r\n self.candles.pop(0)\r\n\r\n new_candle = OHLCV(0, 0, 0, 0, 0, 0) #this is a dummy, not actually inserted\r\n count_added = 0\r\n for trade in history:\r\n date = int(trade[\"date\"])\r\n price = int(trade[\"price_int\"])\r\n volume = int(trade[\"amount_int\"])\r\n time_round = get_time_round(date)\r\n if time_round > new_candle.tim:\r\n if new_candle.tim > 0:\r\n self._add_candle(new_candle)\r\n count_added += 1\r\n new_candle = OHLCV(\r\n time_round, price, price, price, price, volume)\r\n new_candle.update(price, volume)\r\n\r\n # insert current (incomplete) candle\r\n self._add_candle(new_candle)\r\n count_added += 1\r\n self.debug(\"### got %d updated candle(s)\" % count_added)\r\n self.ready_history = True\r\n self.signal_fullhistory_processed(self, None)\r\n self.signal_changed(self, (self.length()))", "def sample(self):\n logger.info(\"%s: collect sensor data\", self.__class__.__name__)\n samples = []\n self._fetch_data(samples)\n return samples", "def AcquiredData (self, arguments=None) :\n\t\tself.OODriver.Wrapper_getSpectrum(self.wrapperHandle,self.spectrometerIndex,self.bufferHandle)\n\t\t\n\t\tif self.OODriver.Wrapper_isSaturated(self.wrapperHandle,self.spectrometerIndex) :\n\t\t\tprint \"Warning: OcenOptics spectrometer is saturated!\"\n\t\t\t\n\t\ttry : return self.buffer[self.spectral_interval]\n\t\texcept AttributeError : return self.buffer", "def got_data(self, data):\n if self.get_current_state() == SBE37ProtocolState.DIRECT_ACCESS:\n # direct access mode\n if len(data) > 0:\n mi_logger.debug(\"SBE37Protocol._got_data(): <\" + data + \">\") \n if self._driver_event:\n self._driver_event(DriverAsyncEvent.DIRECT_ACCESS, data)\n # TODO: what about logging this as an event?\n return\n \n if len(data)>0:\n # Call the superclass to update line and prompt buffers.\n CommandResponseInstrumentProtocol.got_data(self, data)\n \n # If in streaming mode, process the buffer for samples to publish.\n cur_state = self.get_current_state()\n if cur_state == SBE37ProtocolState.AUTOSAMPLE:\n if SBE37_NEWLINE in self._linebuf:\n lines = self._linebuf.split(SBE37_NEWLINE)\n self._linebuf = lines[-1]\n for line in lines:\n self._extract_sample(line)", "def get_data(self):\n self._send_command(self._adapter.get_data())", "def sendHistoricAndPrediction(data):\n global df_hisotorical\n df_hisotorical = get_historical_data(data[\"UserID\"])\n data[\"seqMode\"] = 0\n data[\"versionline\"] = 0\n rounded_pred = predict(data)\n n = sendReport(data, rounded_pred, data[\"satzID\"], \"intv5\")\n\n return rounded_pred", "def _log_data_received(self, timestamp, data, logconf):\n\n # Check so that the incoming data belongs to what we are currently\n # logging\n \n if self._previous_config:\n if self._previous_config.name == logconf.name:\n self._plot.add_data(data, timestamp)\n\n self.avgsumnumb=self.avgsumnumb+1;\n if self.avgsumnumb >= 50:\n print(\"Total:\", self.avgsumnumb)\n for name in data:\n print(\"Average\", name, self.avgsumvalue[name]/self.avgsumnumb)\n self.avgsumvalue[name]=0;\n self.avgsumnumb=0;\n for name in data:\n self.avgsumvalue[name]=self.avgsumvalue[name]+data[name];\n #print(self.avgsumvalue[name])\n #logger.info(\"Here %s\", data[name])\n #self._items[name].add_point(data[name], ts)\n #self.avgsumvalue[name]+=data[name];\n #logger.info(\"Here %s\", self.avgsumvalue[name])", "def on_redraw_timer(self, event):\n \n if self.sampling_timer.IsRunning():\n self.daq.get_data()\n self.draw_plot()\n else:\n self.control_box.txt_info_box.SetLabel('Measurement complete')\n self.calculate()\n return", "def get_new_data(self):\n\n # record bar parse performance\n self.logger.debug(\"Started parsing new ticks.\")\n start_parse = time.time()\n for exchange in self.exchanges:\n exchange.parse_ticks()\n end_parse = time.time()\n duration = round(end_parse - start_parse, 5)\n\n self.logger.debug(\n \"Parsed \" + str(self.total_instruments) +\n \" instruments' ticks in \" + str(duration) + \" seconds.\")\n self.track_tick_processing_performance(duration)\n\n # wrap new 1 min bars in market events\n new_market_events = []\n for exchange in self.exchanges:\n bars = exchange.get_new_bars()\n for symbol in exchange.get_symbols():\n for bar in bars[symbol]:\n event = MarketEvent(exchange.get_name(), bar)\n new_market_events.append(event)\n # add bars to save-to-db-later queue\n # TODO: store new bars concurrently with a processpool\n self.bars_save_to_db.put(event)\n return new_market_events", "def get_data_sync(self, out_format: str='json', chk_interval=0.25, max_chks=65535):\n if self.data:\n return self.data\n check_cnt = 0\n while True:\n if check_cnt >= max_chks:\n break\n self.data_ready = self.check_available()\n if self.data_ready:\n break\n else:\n check_cnt += 1\n sleep(chk_interval)\n if not self.data_ready:\n raise DataNotReady(\"The run {} has not yet finished, data not available yet.\".format(self))\n resp = self.ph.conn.request(\n 'GET', self.ph.URLS['getdata'].format(self.run_token), dict(api_key=self.ph.api_key, format=out_format))\n data = resp.data.decode('utf-8')\n self.data = self.parse_json_data(data)\n return self.data", "def generate_data(self):\n print(\"generate_data - init\")\n with open(self.input_file, \"r\") as f:\n\n # read JSON data from input file\n data = json.loads(f.read())\n\n for idx, row in enumerate(data): \n # serialize Python dict to string\n msg = self.serialize_json(row)\n #print(f\"Linha: {row}\")\n self.send(self.topic, msg)\n self.flush()\n #print(\"Sleeping\")\n time.sleep(1)", "def getCurentData(self):\n if not self.labExperiment:\n super().getCurentData()\n else:\n return np.array(self.connection.query('get_actuator_data'))", "def data():\n if request.method == 'POST':\n params = extract_variables(['temp', 'humid', 'pressure', 'light',\n 'smoke', 'time', 'sensor_id'], request)\n # Test to see if the minimum required variables were parsed\n # TODO: Make this smarter\n if not (params['temp'] or params['humid'] or params['pressure'])\\\n or not params[\"sensor_id\"]:\n # There's either no information or no sensor_id and time\n abort(400)\n # If there's no timestamp we'll assign one\n # This makes it possible to use micro controller based sensors\n # without RTC\n if not params['time']:\n params['time'] = time.time()\n # Make sure sensor id is a valid integer\n try:\n int(params[\"sensor_id\"])\n except ValueError:\n abort(400)\n\n try:\n # Try inserting values\n query_db('INSERT INTO climate VALUES (?, ?, ?, ?, ?, ?, ?)',\n [params['temp'], params['humid'], params['pressure'],\n params['light'], params['smoke'],\n params['time'], params[\"sensor_id\"]], commit=True)\n except sqlite3.IntegrityError:\n # Sensor not in database yet, add it with a temporary name\n query_db('INSERT INTO sensors VALUES (?, ?)',\n [params[\"sensor_id\"], str(uuid.uuid4())], commit=True)\n # Then insert values...\n query_db('INSERT INTO climate VALUES (?, ?, ?, ?, ?, ?, ?)',\n [params['temp'], params['humid'], params['pressure'],\n params['light'], params['smoke'],\n params['time'], params[\"sensor_id\"]], commit=True)\n return 'Sensor added and data saved'\n return 'Data saved'\n elif request.method == 'GET':\n params = extract_variables(['start_time', 'end_time', 'sensor_id'], request)\n data = query_climate_range(**params)\n return jsonify(results=data)", "def read_and_display_data(hat, samples_per_channel, num_channels): \n \n # Read all of the available samples (up to the size of the read_buffer which\n # is specified by the user_buffer_size). Since the read_request_size is set\n # to -1 (READ_ALL_AVAILABLE), this function returns immediately with\n # whatever samples are available (up to user_buffer_size) and the timeout\n # parameter is ignored.\n total_samples_read = 0\n read_request_size = READ_ALL_AVAILABLE\n completeFlag = 0 \n timeout = 5.0\n \n # file switch: w = Write to a file\n # file switch: w+ = Write to a file, if it doesn't exist create it\n # file switch: a = Append to a file\n # file switch: a+ = Append to a file, if is doesn't exist create it.\n # file switch: x = will create a file, returns an error if the file exist\n \n\n # If the scan starts, create a file name based upon current date and time.\n # Retrieve the Current Working Directory and generate the full path \n # to where to write the collected data as a .csv file. Open the file \n # begin writing the data to the file. When done, close the file.\n \n try:\n if os.path.exists(basepath):\n if not (os.path.exists(mypath)):\n os.mkdir(mypath)\n else:\n os.mkdir(basepath)\n os.chdir(basepath)\n os.mkdir(mypath)\n except OSError as exc:\n raise\n \n os.chdir(mypath)\n fileDateTime = datetime.strftime(datetime.now(), \"(%m_%d_%Y)-(%H-%M-%S)\")\n #filePath = mypath + \"/\" + DAQ_NAME + \"_\" + fileName + \".csv\"\n filePath = mypath + \"/\" + DAQ_NAME + \"_\" + fileDateTime + \".csv\"\n csvfile = open(filePath, \"w+\")\n csvwriter = csv.writer(csvfile) \n \n # Recording LED\n GPIO.setmode(GPIO.BCM)\n GPIO.setup(RECORDING_LED, GPIO.OUT, initial=GPIO.LOW)\n GPIO.output(RECORDING_LED,GPIO.HIGH)\n \n while total_samples_read < samples_per_channel:\n read_result = hat.a_in_scan_read(read_request_size, timeout)\n\n # Check for an overrun error\n if read_result.hardware_overrun:\n print('\\n\\nHardware overrun\\n')\n break\n elif read_result.buffer_overrun:\n print('\\n\\nBuffer overrun\\n')\n break\n elif not (read_result.running and completeFlag == 0):\n completeFlag = 1\n print('\\n (2) Recording Completed - Buffer Draining')\n\n samples_read_per_channel = int(len(read_result.data) / num_channels)\n total_samples_read += samples_read_per_channel\n \n totalSamples = len(read_result.data) \n\n if samples_read_per_channel > 0:\n index = samples_read_per_channel * num_channels - num_channels\n \n new_index = 0\n myArray=[] #create an empty array\n for i in range(0, totalSamples, num_channels):\n myArray.append([]) #add a row to the array (COLUMN)\n for j in range(num_channels):\n\t\t\t\t\t#append a num_channels of data to the array (ROW)\n myArray[new_index].append(read_result.data[i + j]) \n new_index+=1\n\n csvwriter.writerows(myArray) #Write the array to file\n csvfile.flush\n\n # Cleanup\n csvfile.close() \n print('\\n (3) Buffer Drained - Data Saved to CSV File\\n')\n GPIO.cleanup()\n GPIO.setmode(GPIO.BCM)\n \n # Complete LED\n GPIO.setup(COMPLETE_LED, GPIO.OUT, initial=GPIO.LOW)\n GPIO.output(COMPLETE_LED,GPIO.HIGH)\n time.sleep(5)\n GPIO.cleanup()\n hat.a_in_scan_cleanup()\n global CMD_RECEIVED\n CMD_RECEIVED = 1\n \n # Restarts script to prepare for another recording\n main()", "def _response_power_buffer(self, message):\n if message.logaddr.value == self._last_log_address:\n self._last_log_collected = True\n # Collect logged power usage\n for i in range(1, 5):\n if getattr(message, \"logdate%d\" % (i,)).value != None:\n dt = getattr(message, \"logdate%d\" % (i,)).value\n if getattr(message, \"pulses%d\" % (i,)).value == 0:\n self.power_history[dt] = 0.0\n else:\n self.power_history[dt] = self.pulses_to_kWs(\n getattr(message, \"pulses%d\" % (i,)).value, 3600\n )\n # Cleanup history for more than 2 day's ago\n if len(self.power_history.keys()) > 48:\n for dt in list(self.power_history.keys()):\n if (dt + self.stick.timezone_delta - timedelta(hours=1)).date() < (\n datetime.now().today().date() - timedelta(days=1)\n ):\n del self.power_history[dt]\n # Recalculate power use counters\n last_hour_usage = 0\n today_power = 0\n yesterday_power = 0\n for dt in self.power_history:\n if (dt + self.stick.timezone_delta) == datetime.now().today().replace(\n minute=0, second=0, microsecond=0\n ):\n last_hour_usage = self.power_history[dt]\n if (\n dt + self.stick.timezone_delta - timedelta(hours=1)\n ).date() == datetime.now().today().date():\n today_power += self.power_history[dt]\n if (dt + self.stick.timezone_delta - timedelta(hours=1)).date() == (\n datetime.now().today().date() - timedelta(days=1)\n ):\n yesterday_power += self.power_history[dt]\n if self.power_consumption_prev_hour != last_hour_usage:\n self.power_consumption_prev_hour = last_hour_usage\n self.do_callback(SENSOR_POWER_CONSUMPTION_PREVIOUS_HOUR[\"id\"])\n if self.power_consumption_today != today_power:\n self.power_consumption_today = today_power\n self.do_callback(SENSOR_POWER_CONSUMPTION_TODAY[\"id\"])\n if self.power_consumption_yesterday != yesterday_power:\n self.power_consumption_yesterday = yesterday_power\n self.do_callback(SENSOR_POWER_CONSUMPTION_YESTERDAY[\"id\"])", "def get_data_from_api():\n s_date = datetime.now()\n uid = uuid.uuid1()\n start_time = time.time()\n cfl.logging.info(f'new session num {uid} was created at {s_date} ')\n\n functions_dict = {'graph' : get_stock_graph_in_range, 'holder': get_stock_holder, 'balance': get_balance,\n 'cash_flow': get_cash_flow, 'earning': get_earning}\n\n answers = inquirer.prompt(questions)\n symbol = answers.get(\"symbol\")\n\n func = functions_dict[answers.get('user_option')]\n\n if answers.get(\"user_option\") == \"graph\" :\n answer_graph = inquirer.prompt(question_graph)\n start_date = answer_graph.get(\"start_date\")\n end_date = answer_graph.get(\"end_date\")\n period = answer_graph.get(\"period\")\n e_date = datetime.now()\n end_time = time.time()\n print(f\"{func(symbol, start_date, end_date, period)}\\n\")\n cfl.logging.info(f' session {uid} end at : {e_date} ')\n cfl.logging.info(f'the time it took to get data from api for session {uid} is: {end_time - start_time} ')\n return\n\n else:\n end_time = time.time()\n e_date = datetime.now()\n print(f\"{func(symbol)}\\n\\n\")\n cfl.logging.info(f' session {uid} end at : {e_date} ')\n cfl.logging.info(f'the time it took to get data from api for session {uid} is: {end_time - start_time} ')\n return", "def get_data(self):\n if self.ser.in_waiting:\n data_string = self.ser.readline().decode().strip()\n if not data_string: return self.data\n self.data = [\n float(element) for element in data_string.split()\n ]\n self.ser.reset_input_buffer()\n return self.data", "def _stab_log_data(self, timestamp, data, logconf):\n\t\t#print strftime(\"%H:%M:%S \", gmtime())\n\t\tstr1=data['mb.distance']\n\t\tnum1=float(str1)\n\t\tnum1=30-num1\n\n\t\t#self.updateplot(num1)\n\t\t#print \"test: \",num1\n\t\t#self.databuffer.append(num1)\n\t\t#self.y[:] = self.databuffer\n\t\t#self.curve.setData(x,num1)\n\t\t#self.app.processEvents()\n\n\t\tresults.append(num1)\n\t\tresultspyqt.append(num1)\n\t\tself.x = list(range(0,len(resultspyqt)))\n\t\t\n\t\t\n\t\t\n\t\tprint \"[%d][%s]: %s\" % (timestamp, logconf.name, data)\n\t\t\n\t\t#if not data: break\n\t\tdata=self.serv_listen()\n\t\tif data>0:\n\t\t\tprint \"app: \",data\n\t\t\tif(int(data)<100):#we are in thrust\n\t\t\t\tprint \"thrust\"\n\t\t\t\tprint self.roll, self.pitch, self.yawrate, self.thrust \n\t\t\t\tself.thrust=int(data)*600\n\t\t\t\tself._cf.commander.send_setpoint(self.roll, self.pitch, self.yawrate, self.thrust)\n\t\t\t\t#time.sleep(0.1)\n\t\t\telif((int(data)>100)and(int(data)<200)):#we are in pitch\n\t\t\t\tprint roll, pitch, yawrate, thrust \n\t\t\t\tpitch=(int(data))/5-30\n\t\t\t\tself._cf.commander.send_setpoint(roll, (int(data))/5-30, yawrate, thrust)\n\t\t\t\t#time.sleep(0.1)\n\t\t\telif(int(data)>200):#we are in roll\n\t\t\t\tprint \"add roll: \",150-(int(data))*3/5\n\t\t\t\tprint roll, pitch, yawrate, thrust \n\t\t\t\troll=50-(int(data))/5\n\t\t\t\tself._cf.commander.send_setpoint(50-(int(data))/5, pitch, yawrate, thrust)\n\t\t\t\t#time.sleep(0.1) \n\t\tif data == 'Hover':\n\t\t\tprint \"app: \",data", "def result(self):\n\n # determine the X and Y sensors to plot from those sensors selected by the user.\n sensorX = bmsapp.models.Sensor.objects.get(pk=self.request_params['select_sensor_x'])\n sensorY = bmsapp.models.Sensor.objects.get(pk=self.request_params['select_sensor_y'])\n\n # determine the averaging time\n averaging_hours = float(self.request_params['averaging_time_xy'])\n\n # get the building's timezone\n tz = pytz.timezone(self.timezone)\n\n # determine the start and end time for selecting records\n st_ts, end_ts = self.get_ts_range()\n\n # get the dividing date, if there is one\n div_datestring = self.request_params['div_date']\n div_dt = tz.localize(parser.parse(div_datestring)) if len(div_datestring) else None\n\n\n # The list that will hold each series\n series = []\n\n # get the X and Y sensor records and perform the requested averaging\n dfX = self.reading_db.dataframeForOneID(sensorX.sensor_id, st_ts, end_ts, tz)\n dfY = self.reading_db.dataframeForOneID(sensorY.sensor_id, st_ts, end_ts, tz)\n\n if not dfX.empty and not dfY.empty: # both sensors have some data, so proceed to average the data points\n \n dfX = bmsapp.data_util.resample_timeseries(dfX,averaging_hours)\n dfX.rename(columns = {'val':'X'}, inplace = True)\n\n dfY = bmsapp.data_util.resample_timeseries(dfY,averaging_hours)\n dfY.rename(columns = {'val':'Y','ts':'tsY'}, inplace = True)\n\n # Join the X and Y values for the overlapping time intervals and make\n # a list of points.\n df_all = dfX.join(dfY, how='inner') # inner join does intersection of timestamps\n\n # make sure there are matched records before continuing\n if len(df_all):\n\n # add a point name column to be used in the tooltip.\n df_all['name'] = df_all.index.strftime('%a %m/%d/%y %H:%M')\n\n # add a column identifying whether point is in occupied or unoccupied period.\n resolution = self.occupied_resolution()\n if (self.schedule is None) or (resolution is None):\n # no schedule or data doesn't lend itself to classifying\n # consider all points to be occupied\n df_all['occupied'] = 1\n else:\n df_all['occupied'] = [self.schedule.is_occupied(ts, resolution=resolution) for ts in df_all.ts]\n\n # Set up the parameters for the different series of data\n # Required Info is (starting datetime, ending datetime, occupied status (0 or 1), series name, \n # series color, series symbol, series radius, series zindex).\n now_dt = datetime.now()\n if div_dt:\n # A dividing date was provided by the user.\n div_dt = div_dt.replace(tzinfo=None) # needs to be naive\n ser_params = ( (datetime(1970,1,1), div_dt, 1, 'Prior to %s' % div_datestring, '#2f7ed8', 'circle', 4.5),\n (datetime(1970,1,1), div_dt, 0, 'Prior to %s, Unoccupied' % div_datestring, '#2f7ed8', 'triangle-up', 3),\n (div_dt, now_dt, 1, '%s and beyond' % div_datestring, '#FF0000', 'circle', 4.5),\n (div_dt, now_dt, 0, '%s and beyond, Unoccupied' % div_datestring, '#FF0000', 'triangle-up', 3) )\n else:\n # Divide data by how recent it is.\n ser_params = ( (now_dt - timedelta(days=1), now_dt, 1, 'Last 24 Hours', '#FF0000', 'circle', 4.5),\n (now_dt - timedelta(days=1), now_dt, 0, 'Last 24 Hours, Unoccupied', '#FF0000', 'triangle-up', 3),\n (now_dt - timedelta(days=7), now_dt - timedelta(days=1), 1, 'Last 7 Days', '#00CC00', 'circle', 4.5),\n (now_dt - timedelta(days=7), now_dt - timedelta(days=1), 0, 'Last 7 Days, Unoccupied', '#00CC00', 'triangle-up', 3),\n (datetime(1970,1,1), now_dt - timedelta(days=7), 1, '7+ Days Old', '#2f7ed8', 'circle', 4.5),\n (datetime(1970,1,1), now_dt - timedelta(days=7), 0, '7+ Days Old, Unoccupied', '#2f7ed8', 'triangle-up', 3),\n )\n\n for t_start, t_end, occup, ser_name, ser_color, ser_symbol, radius in reversed(ser_params):\n mask = (df_all.index >= t_start) & (df_all.index < t_end) & (df_all.occupied==occup)\n if mask.max():\n series.append( {'x': np.char.mod('%.4g',df_all[mask].X.values).astype(float).tolist(),\n 'y': np.char.mod('%.4g',df_all[mask].Y.values).astype(float).tolist(),\n 'text': df_all[mask].name.values.tolist(),\n 'type': 'scatter',\n 'mode': 'markers', \n 'name': ser_name,\n 'marker': { 'color': ser_color,\n 'symbol': ser_symbol,\n 'size': radius * 2\n }\n } )\n\n # create the X and Y axis labels and the series\n x_label = '%s, %s' % (sensorX.title, sensorX.unit.label)\n y_label = '%s, %s' % (sensorY.title, sensorY.unit.label)\n\n opt = self.get_chart_options('plotly')\n opt['data'] = series\n opt['layout']['title'] = sensorY.title + \" vs. \" + sensorX.title\n opt['layout']['xaxis']['title'] = x_label\n opt['layout']['yaxis']['title'] = y_label\n opt['layout']['legend']['traceorder'] = 'reversed'\n\n html = basechart.chart_config.chart_container_html(opt['layout']['title'])\n\n return {'html': html, 'objects': [('plotly', opt)]}", "def realtimestreaming(self, **kwargs):\n url_path = 'realtimestreaming'\n self.logger.debug(f\"Get Realtime Streaming report data\")\n body = self._make_body(kwargs)\n return self._common_post(request_path=url_path, body=body)", "def _send_data(self):\n \n # Do not send more than 100 datasets each time (totally arbitrary)\n MAX_DATA_SETS_PER_POST = 100\n data_to_send = self._data_buffer[0:MAX_DATA_SETS_PER_POST]\n data_to_keep = self._data_buffer[MAX_DATA_SETS_PER_POST:]\n\n # Prepare data string with the values in data buffer\n now = time.time()\n data_string = '[' \n for (timestamp, data) in data_to_send:\n data_string += '['\n data_string += str(round(timestamp-now,2))\n for sample in data:\n data_string += ','\n data_string += str(sample)\n data_string += '],'\n # Remove trailing comma and close bracket\n data_string = data_string[0:-1]+']'\n\n self._log.debug(\"Data string: \" + data_string)\n \n # Prepare URL string of the form\n # 'http://domain.tld/emoncms/input/bulk.json?apikey=\n # 12345&data=[[-10,10,1806],[-5,10,1806],[0,10,1806]]'\n url_string = self._settings['protocol'] + self._settings['domain'] + \\\n self._settings['path'] + \"/input/bulk_json?apikey=\" + \\\n self._settings['apikey'] + \"&data=\" + data_string\n self._log.debug(\"URL string: \" + url_string)\n\n # Send data to server\n self._log.info(\"Sending to \" + \n self._settings['domain'] + self._settings['path'])\n try:\n result = urllib2.urlopen(url_string, timeout=60)\n except urllib2.HTTPError as e:\n self._log.warning(\"Couldn't send to server, HTTPError: \" + \n str(e.code))\n except urllib2.URLError as e:\n self._log.warning(\"Couldn't send to server, URLError: \" + \n str(e.reason))\n except httplib.HTTPException:\n self._log.warning(\"Couldn't send to server, HTTPException\")\n except Exception:\n import traceback\n self._log.warning(\"Couldn't send to server, Exception: \" + \n traceback.format_exc())\n else:\n if (result.readline() == 'ok'):\n self._log.debug(\"Send ok\")\n # Send ok -> empty buffer\n self._data_buffer = data_to_keep\n return True\n else:\n self._log.warning(\"Send failure\")", "def giveHistoricalData(stockName):\n now = datetime.datetime.fromtimestamp(getTime())\n fiveDaysAgo = datetime.datetime.fromtimestamp(\n getTime() - daysToSeconds(5)\n )\n\n resp = json.dumps(\n getHistoricalData(stockName, fiveDaysAgo)\n )\n return resp", "async def last_read(self):\n try:\n asyncio.set_event_loop(self.loop)\n asyncio.get_event_loop().create_task(self.browse())\n await self.browse()\n\n # parse the return reads and extract the most recent one\n # (i.e. last not None)\n jsonResponse = json.loads(self.raw_data)\n lastRead = None\n for read in jsonResponse['reads']:\n if read['value'] is None:\n break\n lastRead = read\n _LOGGER.debug(\"lastRead = %s\", lastRead)\n\n self.startTime = lastRead['startTime']\n self.endTime = lastRead['endTime']\n self.last_read_val = lastRead['value']\n self.unit_of_measurement = jsonResponse['unit']\n\n _LOGGER.debug(\"last read = %s %s %s %s\", self.startTime, self.endTime, self.last_read_val, self.unit_of_measurement)\n\n return self.startTime, self.endTime, self.last_read_val, self.unit_of_measurement\n except:\n raise MeterError(\"Error requesting meter data\")", "def GetData(self):\r\n if self.Error == False:\r\n Extra = {}\r\n try:\r\n result = {}\r\n temp = self.ScrapeMainWebpage()\r\n if temp != None:\r\n result.update(temp)\r\n temp = self.ScrapeParameters1Webpage()\r\n if temp != None:\r\n result.update(temp)\r\n temp = self.ScrapeParameters2Webpage()\r\n if temp != None:\r\n result.update(temp)\r\n temp = self.ScrapeStatusWebpage()\r\n if temp != None:\r\n result.update(temp)\r\n sqlArray = {}\r\n sqlArray[self.deviceDescr] = {}\r\n sqlArray[self.deviceDescr][self.devNumber] = {}\r\n sqlArray[self.deviceDescr][self.devNumber][\"General\"] = result\r\n sqlArray[self.deviceDescr][self.devNumber][\"_ExtractInfo\"] = {}\r\n sqlArray[self.deviceDescr][self.devNumber][\"_ExtractInfo\"][\"ExtractTime\"] = time.time()\r\n sqlArray[\"ReadError\"] = False \r\n return sqlArray\r\n \r\n except Exception as e: \r\n self.log.printError(\"ERROR in Retreiving Seatel VSAT Data,%s Module Error\" % sys._getframe().f_code.co_name) \r\n self.log.printError( str(e))\r\n self.Error = True\r\n Extra[\"ReadError\"] = True\r\n return Extra\r\n else:\r\n self.log.printWarning(\"%s skipped due to previous failure\" % sys._getframe().f_code.co_name)\r\n return None", "def get_raw_data(self, dataset):\n\t\tprint(\"Getting raw data for\", dataset)\n\t\tself.raw_documents[dataset] = []\n\t\tself.Y[dataset] = []\n\n\t\twith open(self.jsons[dataset]) as handle:\n\t\t\tfor line in handle:\n\t\t\t\tjson_obj = json.loads(line)\n\t\t\t\tstars = json_obj[\"stars\"]\n\t\t\t\tif stars != 3:\n\t\t\t\t\tself.raw_documents[dataset].append(json_obj[\"text\"])\n\t\t\t\t\tself.Y[dataset].append(int(stars > 3))", "def request_realtime_info(self):\n self.socket_datastream.sendto(b\"!r\", self.ip_port_arduino_datastream)\n self.socket_datastream.sendto(b\"!s\", self.ip_port_arduino_datastream)", "def update_data():\n values = temp_serial_placeholder()\n time = current_time_milli() - __start\n points = [ [time, values[0]], [time, values[1]] ]\n __data.append(points)\n return points", "def process_data(self, data):\n parsed = json.loads(data)\n current = parsed[\"current\"]\n future = parsed[\"hourly\"]\n current_res = self.get_columns(current)\n self.database_conn(\"current\", -1, current_res)\n for hour, data in enumerate(future):\n current_res = self.get_columns(data)\n self.database_conn(\"current\", hour, current_res)", "def get_data(stream_url, count, is_graph):\n data = requests.get(stream_url + '/Data' + '?count=' + str(count) + '&Startindex=1970-01-01T00:00:00Z', headers=headers)\n Values = []\n for event in data.json():\n Values.append(event['Value'])\n if is_graph:\n terminalplot.plot(range(len(Values)), Values)\n else:\n print(Values)\n input(\"Press any key to continue...\")\n return", "def _plot_data(self, data):\r\n # only send data if xqueue exists\r\n if self.capa_system.xqueue is None:\r\n return {'success': False, 'message': 'Cannot connect to the queue'}\r\n\r\n # pull relevant info out of get\r\n response = data['submission']\r\n\r\n # construct xqueue headers\r\n qinterface = self.capa_system.xqueue['interface']\r\n qtime = datetime.utcnow().strftime(xqueue_interface.dateformat)\r\n callback_url = self.capa_system.xqueue['construct_callback']('ungraded_response')\r\n anonymous_student_id = self.capa_system.anonymous_student_id\r\n # TODO: Why is this using self.capa_system.seed when we have self.seed???\r\n queuekey = xqueue_interface.make_hashkey(str(self.capa_system.seed) + qtime +\r\n anonymous_student_id +\r\n self.input_id)\r\n xheader = xqueue_interface.make_xheader(\r\n lms_callback_url=callback_url,\r\n lms_key=queuekey,\r\n queue_name=self.queuename)\r\n\r\n # construct xqueue body\r\n student_info = {\r\n 'anonymous_student_id': anonymous_student_id,\r\n 'submission_time': qtime\r\n }\r\n contents = {\r\n 'grader_payload': self.plot_payload,\r\n 'student_info': json.dumps(student_info),\r\n 'student_response': response\r\n }\r\n\r\n (error, msg) = qinterface.send_to_queue(header=xheader,\r\n body=json.dumps(contents))\r\n # save the input state if successful\r\n if error == 0:\r\n self.input_state['queuekey'] = queuekey\r\n self.input_state['queuestate'] = 'queued'\r\n self.input_state['queuetime'] = time.time()\r\n\r\n return {'success': error == 0, 'message': msg}", "def test_get_measurement_history(self):\n device = DeviceFactory(node=Node.objects.first(), external_id='123', type__code=SecureDeviceType.SRT321,\n device_param__type__code=SecureDeviceParameterType.MEASURED_TEMPERATURE)\n d_id_1 = device.external_id\n\n now_loc = datetime.datetime.now(bst)\n ts_loc = now_loc - datetime.timedelta(seconds=30)\n ts_str = ts_loc.strftime('%Y-%m-%dT%H:%M:%S')\n\n data = self.create_secure_server_push_data(d_id_1, ts_str)\n\n SecureClient.process_push_data(data)\n time.sleep(.5)\n\n # get newer timestamp\n ts_str = now_loc.strftime('%Y-%m-%dT%H:%M:%S')\n data = self.create_secure_server_push_data(d_id_1, ts_str, value=\"23.5\")\n\n SecureClient.process_push_data(data)\n\n token = Token.objects.get(user__username=email)\n device_param = device.parameters.first()\n client = APIClient()\n client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)\n url = reverse('api:device_measurements', kwargs={'device_parameter_id': device_param.id})\n\n time.sleep(.5)\n\n response = client.get(url, format='json')\n\n self.assertTrue(response.status_code == 200)\n self.assertTrue(len(response.data) >= 2)", "def get_com_data(self):\n self.form_url_str()\n if self.__print_url: print self.com_data_full_url\n self.download_json()\n self.get_datalist_fr_json()", "def measurements():\n print(\"server received request for precipitation data...\")\n return jsonify(measurements_data)", "def read_live_data(wearable_port):\r\n IMU1_num = []\r\n IMU2_num = []\r\n IMU3_num = []\r\n\r\n try:\r\n wearable = serial.Serial(wearable_port, baudrate=115200, timeout=5)\r\n #arduino = serial.Serial(arduino_port, timeout=1)\r\n # Delay for 2 seconds to wait for serial port to be ready.\r\n print(\"Waiting 2 seconds for serial to be ready.\")\r\n time.sleep(2)\r\n except Exception as e:\r\n print(e)\r\n print('Please check the port')\r\n return\r\n\r\n input(\"Press Enter to continue...\")\r\n str(wearable.write(bytes(33)))\r\n # Open file to store the data; filename includes date and time; format: data-YYYYMMDDHHmmss.csv\r\n filename = \"data-\" + str(dt.datetime.now().strftime(\"%Y%m%d%H%M%S\")) + \".csv\"\r\n filenamplot = \"plot-\" + str(dt.datetime.now().strftime(\"%Y%m%d%H%M%S\")) + \".png\"\r\n print(\"Opening %s\" % filename)\r\n f = open(filename, \"a+\")\r\n # f.write(\"power,rpm\\n\")\r\n count = 1000\r\n # Get data and continuously yield Power and RPM as integers\r\n\r\n while (count >0):\r\n count = count -1\r\n #if arduino.in_waiting > 0:\r\n wearable.flushInput()\r\n\r\n '''\r\n arduino_output = arduino.readline().decode(\"utf_8\", \"strict\")\r\n print(\"Distance: %s\" % arduino_output)\r\n f.writelines(\"%s\" % arduino_output)\r\n if arduino_output == \"Hard Stop\\r\\n\":\r\n break\r\n arduino_output = arduino_output.replace(\"\\r\\n\", \"\")\r\n Distance.append(int(float(arduino_output)))\r\n '''\r\n\r\n try:\r\n data = wearable.readline().decode(\"utf_8\", \"strict\")\r\n data = data.replace(\"\\r\\n\", \"\\n\").split()\r\n IMU1= data[2].replace(\"\\n\", \"\")\r\n IMU1_num.append(int(IMU1))\r\n IMU2 = data[3].replace(\"\\n\", \"\")\r\n IMU2_num.append(int(IMU2))\r\n IMU3 = data[4].replace(\"\\n\", \"\")\r\n IMU3_num.append(int(IMU3))\r\n print(\"IMU1: %s\\t IMU2: %s\\t IMU3: %s\\t\" % (IMU1, IMU2, IMU3))\r\n f.writelines(\"%s,%s,%s,%s\\n\" % (IMU1, IMU2, IMU3))\r\n yield int(IMU1), int(IMU2), int(IMU3)\r\n except Exception as e:\r\n print('error')\r\n f.writelines(\"Error\\n\")\r\n\r\n print('Program ended.')\r\n t = numpy.linspace(1, len(IMU1_num), len(IMU1_num))\r\n fig, (ax1) = plt.subplots(nrows=1, ncols=1, figsize=(16.0, 9.0)) # create figure & 1 axis\r\n ax1.plot(t, IMU1_num, t, IMU2_num,t, IMU3_num)\r\n ax1.set_title('IMU')\r\n ax1.legend(('IMU1', 'IMU2', 'IMU3'))\r\n # manager = plt.get_current_fig_manager()\r\n # manager.resize(*manager.window.maxsize())\r\n fig.savefig(filenamplot)\r\n plt.show()\r\n\r\n f.close()\r\n #arduino.close()\r\n wearable.close()", "def loop(self) -> None:\n while True:\n # Sleep before running code to ensure that the sensor is\n # initialized on first run, as per the specifications.\n sleep(config.SLEEP)\n\n self.setup_quiet_hours()\n\n if self.quiet_setup:\n if self.in_quiet_hours():\n if config.DC_QH:\n self.sensor.close()\n self.sleep_quiet_hours()\n continue\n\n # In the case that quiet hours were established during first run\n # and removed from the endpoint afterwards, the sensor may not\n # be in the open state. Because serial.Serial.open() may raise\n # an exception if the sensor is already open, just check prior.\n if not self.sensor.isOpen():\n self.sensor.open()\n\n config.LOGGER.info('Woke up after sleeping. Running loop()')\n self.data = []\n for _ in range(10):\n datum = self.sensor.read()\n self.data.append(datum)\n\n for pm, start in self._SLICES.items():\n # Might be necessary to give the endpoint some time\n # between responses\n sleep(10)\n reading = self.read_data_from_bytes(start)\n aq_dict = air_quality.AQS[pm].get_range(reading)\n\n sections = [\n {\n 'type': 'text',\n 'color': aq_dict['color'],\n 'value': f'Quality: {aq_dict[\"label\"]}'\n },\n {\n 'type': 'gauge',\n 'color': [aq_dict['color']],\n 'range': [aq_dict['lower'], aq_dict['upper']],\n 'value': reading,\n },\n {\n 'type': 'gauge',\n 'color': air_quality.COLORS,\n 'range': air_quality.AQS[pm].get_all_ranges(),\n 'value': reading,\n }\n ]\n\n data = {\n 'module': f'photo-dash-sds011-pm{pm}',\n 'title': f'Air Quality - PM{pm}',\n 'sections': sections,\n }\n\n try:\n r = requests.put(config.ENDPOINT, json=data)\n except Exception as e: # Catching broad Exceptions for now\n config.LOGGER.error(e)\n config.LOGGER.info(r.status_code)", "def get_data(last):\n Table = \"ServerRoom\"\n filter = \"\"\n if last == \"lastone\":\n data = request_meteodata(\"SELECT * from `ServerRoom` ORDER BY id DESC LIMIT 1 \")\n if len(data) == 0:\n return [SensorData(datetime.datetime.now(), 0, 0)]\n res = []\n for d in data:\n res.append(SensorData(d[1], d[2], d[3]))\n return res\n if last != \"All\":\n limit = datetime.datetime.now().astimezone(utz)\n if last == \"24hours\":\n limit -= datetime.timedelta(hours=24)\n else:\n limit = limit.replace(hour=0, minute=0, second=0, microsecond=0)\n if last == \"3days\":\n limit -= datetime.timedelta(days=3)\n elif last == \"7days\":\n limit -= datetime.timedelta(days=7)\n elif last == \"month\":\n limit = limit.replace(day=1)\n elif last == \"30days\":\n limit -= datetime.timedelta(days=30)\n elif last == \"year\":\n limit = limit.replace(day=1, month=1)\n filter = \" WHERE `date` > '\" + str(limit) + \"'\"\n order = \" ORDER BY `date` ASC\"\n req = \"SELECT * FROM `\" + Table + \"`\" + filter + order\n data = request_meteodata(req)\n if len(data) == 0:\n print(\"no data: get all\")\n req = \"SELECT * FROM `\" + Table + \"`\" + order\n data = request_meteodata(req)\n res = []\n for d in data:\n res.append(SensorData(d[1], d[2], d[3]))\n return res", "def pull_data(stop_event):\r\n logger = logging.getLogger(__name__)\r\n\r\n # List of current formats supported\r\n currency_list = ['https://www.bitstamp.net/api/v2/ticker_hour/btceur', 'https://www.bitstamp.net/api/v2/ticker_hour/btcusd',\r\n 'https://www.bitstamp.net/api/v2/ticker_hour/ethusd', 'https://www.bitstamp.net/api/v2/ticker_hour/etheur']\r\n\r\n # Loop until told otherwise!\r\n while not stop_event.is_set():\r\n for currency in currency_list:\r\n res = requests.get(currency)\r\n try:\r\n res.raise_for_status()\r\n except requests.exceptions.HTTPError as e:\r\n # Not 200\r\n logger.error(str(e))\r\n continue\r\n\r\n # Get the end characters to dertermine the type e.g. btceur, ethusd, etc...\r\n currency_type = (currency.rpartition('/')[-1])\r\n logger.info('The Curreny type: ' + currency_type)\r\n\r\n if currency_type == 'btceur':\r\n table = 'btceur'\r\n elif currency_type == 'btcusd':\r\n table = 'btcusd'\r\n elif currency_type == 'ethusd':\r\n table = 'ethusd'\r\n elif currency_type == 'etheur':\r\n table = 'etheur'\r\n else:\r\n table = None\r\n\r\n # Extract Data and Fields\r\n data = res.json()\r\n field_list = data.keys()\r\n logger.info(field_list)\r\n value_list = data.values()\r\n logger.info(value_list)\r\n\r\n # Write to DB\r\n db_commit(table, field_list, value_list)\r\n # Cannot make more than 600 requests per 10 minutes or they will ban your IP address.\r\n # Will in time get real time using their websocket API.\r\n time.sleep(5)", "def loadData(self):\r\n self.samplerate = self.app.samplerate\r\n self.sensors = self.app.sensors\r\n self.sensorMask = self.app.sensorMask\r\n self.measurements = self.app.measurements\r\n\r\n # Get min and max data points\r\n for sens in self.sensor_ids:\r\n try:\r\n for i in range(1,self.measurements):\r\n if float(self.app.data[i][sens].text) < self.sensor_range[0]:\r\n self.sensor_range[0] = float(self.app.data[i][sens].text)\r\n elif float(self.app.data[i][sens].text) > self.sensor_range[1]:\r\n self.sensor_range[1] = float(self.app.data[i][sens].text)\r\n except:\r\n print(self.app.data)\r\n \r\n # Set x scale from 0 to end of track\r\n self.scalex = [0,self.measurements]\r\n## self.scalex = [0,self.w/2]\r\n # Set y scale to maximum sensor measurement\r\n self.setScaleY(self.sensor_range[0], self.sensor_range[1])", "def get_data(self):\n\n self.set_query_string()\n self.realtime_data = super().get_data()\n self.set_coordinate()\n return self.realtime_data", "def get_json(self):\r\n self.get_recordrange()\r\n [\r\n self.json_data.setter(n, self.get_data(\"json\", x))\r\n for n, x in enumerate(tqdm(self.iterlist))\r\n ]", "def getDataParsed():\r\n serialConsole.flush()\r\n rawData = serialConsole.readline().decode(\"utf-8\").rstrip()\r\n parsedJson = json.loads(rawData)\r\n return parsedJson", "def test():\n temp_data = fetch_temp_data(\n (\"https://opendata-download-metobs.smhi.se/api/version/\" +\n \"latest/parameter/1/station/52350/period/latest-day/data.json\"))\n data = temp_series(temp_data)\n print(data)", "def plot(accessToken, collection):\n \n plt.xlabel('Date/Time')\n plt.ylabel('Sensor Value')\n plt.title(\"Sensors Monitor\")\n \n # to save png files\n i = 0\n \n # set interactive mode on\n plt.ion()\n \n # set figure to full screen\n mng = plt.get_current_fig_manager()\n mng.full_screen_toggle()\n\n while True:\n jsondata = getJsonData(accessToken)\n if jsondata:\n #limit date string\n jsondata[DATE] = jsondata[DATE][8:13]\n appendJsonData(jsondata, collection)\n \n # clear figure\n plt.clf()\n \n # limit samples to be viewed\n if (len(collection[DATE]) > SAMPLE_SIZE_LIMIT):\n plt.xticks(range(SAMPLE_SIZE_LIMIT), collection[DATE][-SAMPLE_SIZE_LIMIT:])\n plt.plot(collection[SENSOR1][-SAMPLE_SIZE_LIMIT:], 'k', label='sensor 1')\n plt.plot(collection[SENSOR2][-SAMPLE_SIZE_LIMIT:], 'b', label='sensor 2')\n plt.plot(collection[SENSOR3][-SAMPLE_SIZE_LIMIT:], 'g', label='sensor 3')\n plt.plot(collection[SENSOR4][-SAMPLE_SIZE_LIMIT:], 'r', label='sensor 4')\n else:\n plt.xticks(range(len(collection[DATE])), collection[DATE])\n plt.plot(collection[SENSOR1], 'k', label='sensor 1')\n plt.plot(collection[SENSOR2], 'b', label='sensor 2')\n plt.plot(collection[SENSOR3], 'g', label='sensor 3')\n plt.plot(collection[SENSOR4], 'r', label='sensor 4')\n \n plt.legend(loc='upper left')\n plt.show()\n \n # Take a screenshot on Gnome desktop\n if os.environ.get(\"XDG_MENU_PREFIX\").startswith(\"gnome\"):\n os.system(\"gnome-screenshot -f screenshot{}.png\".format(i))\n i = i+1\n \n #plt.pause(1)\n plt.pause(60*60) # one hour\n else:\n print(str(datetime.datetime.now()) + \" Empty json data\")", "def getSamples(self,limit=None):\n theseReadings = self.readings\n if limit:\n theseReadings = theseReadings[:limit]\n return [x.asJSON() for x in theseReadings]", "def readData(self, duration_s, toV=True, maxAI=1024, maxV=5.0,\n updateFunc=None, nPntsUpdate=1, trigger=[]):\n isDone = False\n nLines = 0\n empty = numpy.array([])\n \n if self.rate_ms >= 0:\n nPnts = round(duration_s *1000.0 /self.rate_ms)\n print(\"{0:.3f} s duration = {1} samples\".format(duration_s, nPnts))\n else: \n nPnts = 100\n print(\"Sample rate invalid, {0} samples will be recorded\".format(nPnts))\n\n if not(self.isOpen): \n print(\"ERROR: Link not open\")\n return (-1, empty, empty, empty)\n \n # Create empty arrays for the data\n #\n np_data_0 = numpy.zeros([nPnts], dtype=float)\n np_data_1 = numpy.zeros([nPnts], dtype=float)\n np_data_t = numpy.zeros([nPnts], dtype=float)\n np_dt_ms = numpy.zeros([nPnts], dtype=float)\n \n # Attempt to read data\n #\n while not(isDone):\n # Read a line\n #\n (errC, parts) = self.__readLine()\n if not(errC == 0):\n return (errC, empty, empty, empty)\n \n else: \n np_data_0[nLines] = float(parts[2])\n np_data_1[nLines] = float(parts[3])\n np_data_t[nLines] = float(parts[0])/1000.0\n np_dt_ms[nLines] = float(parts[1]) /1000.0\n \"\"\"\n print(\"{0:.3f} {1} {2}\".format(int(parts[0])/1000.0, \n int(parts[2]), int(parts[3])))\n \"\"\"\n # Update plot of data, if requested\n #\n if updateFunc and ((nLines % nPntsUpdate) == 0):\n updateFunc(nLines, np_data_t[nLines], np_data_0[nLines], \n np_data_1[nLines])\n \n if nLines < (nPnts-1):\n sys.stdout.write(\"\\r{0:.0f}% {1:.3f} s: {2:.3f} {3:.3f} ...\"\n .format(nLines/float(nPnts) *100, \n np_data_t[nLines]/1000.0,\n np_data_0[nLines], np_data_1[nLines]))\n else: \n sys.stdout.write(\"\\r100% done\" +\" \"*40 +\"\\n\")\n \n nLines += 1\n isDone = (nLines == nPnts)\n \n print(\"SUCCESS\") \n print(\"{0} data points recorded\".format(nLines))\n print(\"Rate = {0:.3f} +/- {1:.3f} ms\".format(numpy.mean(np_dt_ms), \n numpy.std(np_dt_ms)))\n\n if toV: \n np_data_0 = np_data_0 /float(maxAI -1) *maxV\n np_data_1 = np_data_1 /float(maxAI -1) *maxV\n \n return (0, np_data_t, np_data_0, np_data_1)", "def collect_data(self):\n self.lines = []\n\n while True:\n self._process_serial_data()", "def update(self):\r\n mc = self._motor_controllers[0]\r\n lj = self._labjacks[0]\r\n r = None\r\n\r\n lj._finished = False\r\n errors = 0\r\n missed = 0\r\n # Read from Queue until there is no data.\r\n try:\r\n # get([block[, timeout]])¶\r\n # Remove and return an item from the queue. If optional\r\n # args block is true and timeout is None (the default),\r\n # block if necessary until an item is available. If timeout\r\n # is a positive number, it blocks at most timeout seconds\r\n # and raises the Empty exception if no item was available\r\n # within that time. Otherwise (block is false), return an\r\n # item if one is immediately available, else raise the\r\n # Empty exception (timeout is ignored in that case).\r\n # Pull results out of the Queue in a blocking manner.\r\n result = lj.raw_data.get(False) # (True, 0.0001)\r\n if result[\"errors\"] != 0:\r\n errors += result[\"errors\"]\r\n missed += result[\"missed\"]\r\n logging.warning(\r\n \"Total Errors: {}, Total Missed: \"\r\n \"{}\".format(errors, missed),\r\n extra=self.extra)\r\n # Convert the raw bytes (result['result']) to voltage data.\r\n r = lj.device.processStreamData(result['result'])\r\n lj.raw_data.task_done()\r\n lj.raw_data_ready.clear()\r\n except lj.raw_data.Empty:\r\n if lj._finished:\r\n logging.info(\"Done reading from the Queue.\", extra=self.extra)\r\n else:\r\n logging.error(\"Queue is empty. Stopping...\", extra=self.extra)\r\n lj._finished = True\r\n except Exception as e:\r\n logging.error(\"{}: {}\".format(e), extra=self.extra)\r\n lj._finished = True\r\n\r\n with lj.raw_data.mutex:\r\n lj.raw_data.queue.clear()\r\n current_time = self._times[-1] + self._dt\r\n self._times.append(current_time)\r\n self._data[\"time (s)\"] = self._times\r\n\r\n for s in lj.sensors():\r\n name = '{} {} ({})'.format(s.parent().name,\r\n s.name,\r\n s.signal.units)\r\n if r is not None:\r\n # Get the average, make sure the length of the list\r\n # is a float for float division.\r\n voltage = (\r\n (sum(r[AIN.upper() + str(s.channel)]) /\r\n float(len(r[AIN.upper() + str(s.channel)])))\r\n )\r\n # convert to physical quantity\r\n v = Q_(np.asarray([voltage]), 'V')\r\n if s.name == 'p1':\r\n quantity = convert_pressure_px119(\r\n v, r_shunt=Q_(470.0, 'ohm'),\r\n value_range=Q_(np.array([0, 30]), 'psi'), zero=Q_(1.375, 'kPa')\r\n ).magnitude[0]\r\n elif s.name == 'p2':\r\n quantity = convert_pressure_px119(\r\n v, r_shunt=Q_(470.0, 'ohm'),\r\n value_range=Q_(np.array([0, 30]), 'psi'), zero=Q_(1.7, 'kPa')\r\n ).magnitude[0]\r\n elif s.name == 'p3':\r\n quantity = convert_pressure_px119(\r\n v, r_shunt=Q_(470.0, 'ohm'),\r\n value_range=Q_(np.array([0, 30]), 'psi'), zero=Q_(1.85, 'kPa')\r\n ).magnitude[0]\r\n elif s.name == 'T1':\r\n # logging.debug(\"T sensor voltage {}\".format(v[0]), extra=self.extra)\r\n quantity = convert_temperature_rtd(\r\n v, v_in=Q_(2.5, 'V'),\r\n r1=Q_(100., 'ohm'), filename='pt100.txt', zero=Q_(0.29, 'degC')\r\n ).magnitude[0]\r\n elif s.name == 'T2':\r\n # logging.debug(\"T sensor voltage {}\".format(v[0]), extra=self.extra)\r\n quantity = convert_temperature_rtd(\r\n v, v_in=Q_(2.5, 'V'),\r\n r1=Q_(100., 'ohm'), filename='pt100.txt', zero=Q_(-0.05, 'degC')\r\n ).magnitude[0]\r\n elif s.name == 'T3':\r\n # logging.debug(\"T sensor voltage {}\".format(v[0]), extra=self.extra)\r\n quantity = convert_temperature_rtd(\r\n v, v_in=Q_(2.5, 'V'),\r\n r1=Q_(100., 'ohm'), filename='pt100.txt'\r\n ).magnitude[0]\r\n elif s.name == 'T4':\r\n # logging.debug(\"T sensor voltage {}\".format(v[0]), extra=self.extra)\r\n quantity = convert_temperature_rtd(\r\n v, v_in=Q_(2.5, 'V'),\r\n r1=Q_(100., 'ohm'), filename='pt100.txt'\r\n ).magnitude[0]\r\n elif s.name == 'frate': # flow rate\r\n # logging.debug(\"magmeter voltage {}\".format(v[0]), extra=self.extra)\r\n quantity = convert_flow_rate_magmeter(\r\n v, r_shunt=Q_(465.2, 'ohm'),\r\n value_range=Q_(np.array([0, 80]), 'l/min')\r\n ).magnitude[0]\r\n if quantity < 0.0:\r\n quantity = 0.0\r\n else:\r\n quantity = voltage\r\n s.signal.field.setText('{:3.1f}'.format(quantity))\r\n self._data[name].append(quantity)\r\n\r\n for i, m in enumerate(mc.children()):\r\n if m is not None:\r\n mc.r_lock.acquire()\r\n for s in m.sensors():\r\n value = s.read()\r\n name = '{} {} ({})'.format(s.parent().name,\r\n s.name,\r\n s.signal.units)\r\n s.signal.field.setText('{:3.2f}'.format(value))\r\n self._data[name].append(value)\r\n mc.r_lock.release()\r\n d = {}\r\n for k, v in self._data.items():\r\n d[k] = '{:3.10f}'.format(v[-1])\r\n self._csv_writer.writerow(d)\r\n self._line_plot.setData(self._times, self._data[self._signal])", "async def _get_data(self):\n coros = []\n results = []\n for series_ids in self.series_ids:\n response = self._post(data={\"series_id\": series_ids})\n coros.append(response)\n if len(coros) == 5: # throttle at 5\n _ = await asyncio.gather(*coros)\n results.extend(_)\n coros = [] # Reset accumulator\n if coros:\n results.extend(await asyncio.gather(*coros))\n\n return filter(None, results)", "def sensor_history(self, sensor_name, start_time_sec, end_time_sec,\n include_value_ts=False, timeout_sec=0):\n\n if timeout_sec != 0:\n self._logger.warn(\n \"timeout_sec is no longer supported. Default tornado timeout is used\")\n\n params = {\n 'sensor': sensor_name,\n 'start_time': start_time_sec,\n 'end_time': end_time_sec,\n 'limit': MAX_SAMPLES_PER_HISTORY_QUERY,\n 'include_value_time': include_value_ts\n }\n\n url = url_concat(\n (yield self.get_sitemap())['historic_sensor_values'] + '/query', params)\n self._logger.debug(\"Sensor history request: %s\", url)\n response = yield self._http_client.fetch(url)\n data_json = json.loads(response.body)\n if 'data' not in data_json:\n raise SensorHistoryRequestError(\"Error requesting sensor history: {}\"\n .format(response.body))\n data = []\n for item in data_json['data']:\n if 'value_time' in item:\n sample = SensorSampleValueTime(item['sample_time'],\n item['value_time'],\n item['value'],\n item['status'])\n else:\n sample = SensorSample(item['sample_time'],\n item['value'],\n item['status'])\n data.append(sample)\n result = sorted(data, key=_sort_by_sample_time)\n raise tornado.gen.Return(result)", "def get_data_logic():\r\n global input_exchange\r\n global input_symbols\r\n global all_symbols\r\n global input_timeframe\r\n\r\n # create exchange connection\r\n exchange = Exchange(input_exchange)\r\n\r\n # perform check that exchange can grab price data\r\n if exchange.connection.has['fetchOHLCV']:\r\n\r\n # user ticked 'All Symbols?', so includes all symbols in\r\n # exchange_tickers.py for the particular exchange\r\n if all_symbols:\r\n symbol_list = SymbolList(symbols='auto', exchange=exchange)\r\n # user didn't tick 'All Symbols?', so create unpopulated symbol list\r\n else:\r\n symbol_list = SymbolList(exchange=exchange)\r\n # add all symbols user inputted\r\n for s in input_symbols:\r\n symbol_list.input_symbol(s)\r\n\r\n # get auto timeframe and check it is valid\r\n timeframe = Timeframe(timeframe=input_timeframe, exchange=exchange)\r\n while not timeframe.check_timeframe():\r\n timeframe.input_timeframe() # default to asking for input\r\n\r\n print(f\"Pulling data on the {timeframe.tf} timeframe for...\")\r\n print(symbol_list.symbols)\r\n\r\n # get current time in UTC in milliseconds\r\n now = datetime.now().astimezone(pytz.timezone('UTC'))\r\n now = int(now.timestamp()*1000)\r\n\r\n # loop over each symbol and pull new data\r\n for sym in symbol_list.symbols:\r\n # create csv filename and path\r\n file_sym = sym.replace('/', '')\r\n file_sym = file_sym.replace('-', '')\r\n filename = f\"{exchange.name}_{file_sym}_{timeframe.tf}.csv\" # generate filename from given information\r\n csv_path = f\"{exchange.name}/{timeframe.tf}/{filename}\"\r\n\r\n # get most recent price data and append it to existing data\r\n # (if it exists)\r\n price_data = PriceData(exchange=exchange, tf=timeframe.tf,\r\n sym=sym, now=now, path=csv_path)\r\n\r\n # check if price data csv already exists\r\n if price_data.exists():\r\n price_data.get_current()\r\n # get new data as far back as possible if csv does not exist\r\n else:\r\n price_data.get_new()\r\n\r\n # keep updating price_data until current time\r\n price_data.update()\r\n\r\n # write to csv\r\n price_data.write()\r\n\r\n print(\"Finished writing files!\")", "def main():\n data_file = 'shrec_timer.json'\n\n if len(sys.argv) == 2:\n generate_data(data_file, sys.argv[1])\n\n plot_data(data_file)", "def startDataAcq(self):\r\n\t\tglobal payload, control, output_settings, serials, datfiles\r\n\t\t# INITIALIZE THE OUTPUT FOLDER STRUCTURE\r\n\t\tcheck_dir(output_settings['folder'], output_settings['cruise'], payload)\r\n\t\tconfirm_dir(output_settings['folder'], output_settings['cruise'])\r\n\t\t# FIND THE START TIME\r\n\t\toutput_settings['start_time'] = init_time()\r\n\t\t# PRINT THE START TIME\r\n\t\tprint_spacer()\r\n\t\tprint 'Local Time: ', time.ctime(output_settings['start_time'])\r\n\t\tprint 'UTC: ', time.asctime(time.gmtime(output_settings['start_time']))\r\n\t\t\r\n\t\t# LOOP THROUGH THE SCIENTIFIC PAYLOAD\r\n\t\tfor k in payload.keys():\r\n\t\t\ttry:\r\n\t\t\t\tif serials[k].isOpen():\r\n\t\t\t\t\tclose_serial(serials[k])\r\n\t\t\texcept KeyError:\r\n\t\t\t\tprint ' '\r\n\t\t\t\t# print 'Serial port connected to '+k+' was not previously open.'\r\n\t\t\t# open the serial port\r\n\t\t\tserials[k] = init_serial(payload[k])\r\n\t\t\tif serials[k].isOpen():\t\t\t\t\r\n\t\t\t\t# print the serial info\r\n\t\t\t\tprint 'Receiving data from '+k\r\n\t\t\t\t# initialize the data file\r\n\t\t\t\tdatfiles[k] = init_datafile(output_settings, payload[k])\r\n\t\t\t\t# read one line because the first one after opening a port is usually gibberish\r\n\t\t\t\tline = serials[k].readline()\r\n\t\t\telse: \r\n\t\t\t\tprint 'Unable to connect to serial port '+payload[k]['port']+' connected to '+k\r\n\t\t\t# pause get everything setup\r\n\t\t\ttime.sleep(1)\r\n\t\t# start the loop \r\n\t\tcontrol.combine()", "async def get_api_data(self) -> dict:\n if not self.bos.is_set():\n self.lit = False\n return self.messages\n try:\n # get all data and split it up\n all_data = await self.send_api_cmd(\"devs+temps+fans\")\n devs_raw = all_data['devs'][0]\n temps_raw = all_data['temps'][0]\n fans_raw = all_data['fans'][0]\n\n # parse temperature data\n temps_data = {}\n for board in range(len(temps_raw['TEMPS'])):\n temps_data[f\"board_{temps_raw['TEMPS'][board]['ID']}\"] = {}\n temps_data[f\"board_{temps_raw['TEMPS'][board]['ID']}\"][\"Board\"] = temps_raw['TEMPS'][board]['Board']\n temps_data[f\"board_{temps_raw['TEMPS'][board]['ID']}\"][\"Chip\"] = temps_raw['TEMPS'][board]['Chip']\n\n # parse individual board and chip temperature data\n for board in temps_data.keys():\n if \"Board\" not in temps_data[board].keys():\n temps_data[board][\"Board\"] = 0\n if \"Chip\" not in temps_data[board].keys():\n temps_data[board][\"Chip\"] = 0\n\n # parse hashrate data\n hr_data = {}\n for board in range(len(devs_raw['DEVS'])):\n hr_data[f\"board_{devs_raw['DEVS'][board]['ID']}\"] = {}\n hr_data[f\"board_{devs_raw['DEVS'][board]['ID']}\"][\"HR\"] = round(\n devs_raw['DEVS'][board]['MHS 5s'] / 1000000,\n 2)\n\n # parse fan data\n fans_data = {}\n for fan in range(len(fans_raw['FANS'])):\n fans_data[f\"fan_{fans_raw['FANS'][fan]['ID']}\"] = {}\n fans_data[f\"fan_{fans_raw['FANS'][fan]['ID']}\"]['RPM'] = fans_raw['FANS'][fan]['RPM']\n\n # set the miner data\n miner_data = {'IP': self.ip, \"Light\": \"show\", 'Fans': fans_data, 'HR': hr_data, 'Temps': temps_data}\n\n # save stats for later\n self.stats = miner_data\n\n # return stats\n return miner_data\n except:\n # if it fails, return install data\n # usually fails on getting None from API\n self.lit = False\n data = self.messages\n data['Light'] = \"show\"\n return data", "def run(self):\n\n if self.transport == 'any':\n devs = kromek.discover()\n else:\n devs = kromek.discover(self.transport)\n\n print('Discovered %s' % devs)\n\n if len(devs) <= 0:\n return\n\n filtered = []\n\n for dev in devs:\n if self.device == 'all' or dev[0] in self.device:\n filtered.append(dev)\n\n devs = filtered\n if len(devs) <= 0:\n return\n\n done_devices = set()\n try:\n while self.running:\n print(\"Plot_manager.run: getting data\")\n with kromek.Controller(devs, self.interval) as controller:\n for reading in controller.read():\n if self.create_structures:\n self.total = np.array(reading[4])\n self.lst = np.array([reading[4]])\n self.create_structures = False\n else:\n self.total += np.array(reading[4])\n self.lst = np.concatenate(\n (self.lst, [np.array(reading[4])]))\n serial = reading[0]\n dev_count = reading[1]\n if serial not in done_devices:\n this_start, this_end = self.get_interval(\n time.time() - self.interval)\n\n self.handle_spectra(\n this_start, this_end, reading[4])\n if dev_count >= self.count > 0:\n done_devices.add(serial)\n controller.stop_collector(serial)\n if len(done_devices) >= len(devs):\n break\n except KeyboardInterrupt:\n self.vprint(1, '\\nKeyboardInterrupt: stopping Manager run')\n self.takedown()\n except SystemExit:\n self.vprint(1, '\\nSystemExit: taking down Manager')\n self.takedown()", "def receive_data(self):\n while True:\n\n data = self.recv_end()\n\n if len(data) > 0:\n\n try:\n self.current_data = json.loads(data)\n self.current_3D_points = base64.b64decode(self.current_data['points'])\n self.is_new_data = True\n\n except:\n # print('cannot load DepthCamera data')\n pass", "def get_data(self):\n logging.info(\n \"[IEXIntervalDataSource] started: fetching every %d seconds\",\n self._interval,\n )\n\n def _get(self):\n data = self._iex_source(**self._iex_source_kwargs)\n if data:\n should_update = True\n if self._should_hash:\n hashed = str_to_float(json.dumps(data, sort_keys=True))\n should_update = hashed != self._last_hash\n if should_update:\n self._last_hash = hashed\n if should_update:\n if self._data_cleaner:\n data = self._data_cleaner(data)\n self.table.update(data)\n \n callback = tornado.ioloop.PeriodicCallback(callback=partial(_get, self), callback_time=self._interval)\n callback.start()", "def log_data(self):\n\n assert self.tello is not None\n self.tello.subscribe(self.tello.EVENT_LOG_DATA, self.log_handler)\n self.tello.subscribe(self.tello.EVENT_FLIGHT_DATA, self.log_handler)\n self.tello.subscribe(self.tello.EVENT_FILE_RECEIVED, self.log_handler)", "def update_live_data(n, last_time, id1, id2, power):\n if power:\n raise PreventUpdate\n\n timer_start = perf_counter()\n # 1 sec delay so server has time to add live data\n end_time = datetime.now(timezone.utc) - timedelta(seconds=1)\n\n # Initialization and lag prevention\n if last_time is None or end_time - strptime_fix(last_time) > timedelta(seconds=3):\n logging.warning('Falling behind! Start %s End %s', last_time, end_time)\n return dash.no_update, dash.no_update, end_time.isoformat(), dash.no_update\n\n # Query data from SMIP\n logging.info(f'start_time {last_time} end_time {end_time}')\n timer_query_start = perf_counter()\n r = conn.get_data(last_time, end_time.isoformat(),\n [id1, id2], timeout=1)\n timer_query_end = perf_counter()\n response_json: dict = r.json()\n logging.debug(response_json.keys())\n if 'errors' in response_json:\n logging.error(response_json)\n raise Exception()\n data = response_json['data']['getRawHistoryDataWithSampling']\n logging.info('Got %s responses in %s seconds', len(\n data), timer_query_end - timer_query_start)\n\n # Used for measuring performance\n start_processing = perf_counter()\n\n # Unpack data\n def unpack(id: int):\n \"\"\"Unpacks return data into time and value lists\"\"\"\n id = int(id)\n time_list = [i['ts'] for i in data if int(i['id']) == id]\n val_list = [i['floatvalue'] for i in data if int(i['id']) == id]\n # SMIP always returns one entry before the start time for each ID, we don't need this\n if len(time_list) < 2 or len(val_list) < 2:\n return dash.no_update\n time_list.pop(0)\n val_list.pop(0)\n # Measure sampling rate\n rate = nan\n if len(time_list) > 1:\n rate = (strptime_fix(time_list[1])\n - strptime_fix(time_list[0])).total_seconds()\n return {'time_list': time_list, 'val_list': val_list, 'rate': rate}\n\n # Used for measuring performance\n data_processed = perf_counter()\n logging.info('Total %s Query %s Processing %s', data_processed - timer_start, timer_query_end - timer_query_start,\n data_processed - start_processing)\n\n return unpack(id1), unpack(id2), end_time.isoformat(), \\\n [f'Last updated {end_time.astimezone()},',\n html.Br(),\n f'received {len(data)} samples in {round(data_processed - timer_start, 3)} seconds']", "def on_timer(self):\n self.read_serial_data()\n # self.update_monitor()", "def _after_connect(self):\r\n _debug('GUISignalGenerator: _after_connect()')\r\n # Update the controls\r\n self.button_sweep.enable()\r\n self.button_send_list.enable()\r\n self.button_reset.enable()\r\n \r\n # Update the RF button.\r\n rf_on = self.api.get_output()\r\n if rf_on == None: self.button_rf.set_checked(True, block_events=True).enable()\r\n else: self.button_rf.set_checked(rf_on, block_events=True).enable()\r\n \r\n # Update the combo; we block first just in case the value doesn't \"change\"\r\n if self.api == None: self.label_instrument_name.set_text('Simulation')\r\n else:\r\n if self.api.get_mode() == 'Fixed': self.combo_mode.set_value(0, block_events=True).enable()\r\n else: self.combo_mode.set_value(1, block_events=True).enable()\r\n self._combo_mode_changed()\r\n \r\n # Update the list plot\r\n self.query_list()", "def get_last_data(request):\n\n return_list = []\n if request.POST:\n try:\n conn = psycopg2.connect(dbname=DB_NAME,\n user=DB_USER,\n password=DB_PASSWORD,\n host=DB_HOST)\n cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)\n\n cur.execute(\"SELECT sn, time, icsmain, icsz1, icsz2, icsz3, icsboiler, t1, t2, t3, t4, t5, t6, t7,\"\n \"rt1, rt2, rt3, endswitch, weather FROM devicedata \"\n \"WHERE sn='BRANDON_SN_US' \"\n \"ORDER BY id DESC LIMIT 5\")\n\n get_device_data = cur.fetchall()\n\n cur.close()\n conn.close()\n\n for info in get_device_data:\n item_dict = {\"sn\": info[0], \"time\": str(info[1]), \"icsmain\": info[2], \"icsz1\": info[3], \"icsz2\": info[4],\n \"icsz3\": info[5], \"icsboiler\": info[6], \"t1\": info[7], \"t2\": info[8], \"t3\": info[9],\n \"t4\": info[10], \"t5\": info[11], \"t6\": info[12], \"t7\": info[13], \"rt1\": info[14], \"rt2\": info[15],\n \"rt3\": info[16], \"endswitch\": info[17]}\n return_list.append(item_dict)\n except:\n pass\n\n return JsonResponse(return_list, safe=False)", "def handle_spectra(self, this_start, this_end, spectra):\n\n self.rt_plot.add_data(self.rt_plot.queue, spectra, self.maxspectra)\n\n if self.plot:\n\n '''\n Plot the data.\n '''\n self.plot_waterfall(1)\n self.plot_spectrum(2)\n # self.plot_fitter()\n\n '''\n Uncomment 3 lines below to plot the spectra fitter plots.\n '''\n else:\n self.data_handler.main(\n self.datalog, self.calibrationlog, spectra, this_start, this_end)", "def data_received(self, data):\n self.log.debug('data_received: {!r}'.format(data))\n self._last_received = datetime.datetime.now()\n for byte in (bytes([value]) for value in data):\n\n try:\n self.stream.feed_byte(byte)\n except (ValueError, AssertionError):\n e_type, e_value, _ = sys.exc_info()\n map(self.log.warn,\n traceback.format_exception_only(e_type, e_value))\n continue\n\n if self.stream.is_oob:\n continue\n\n # self.reader.feed_byte()\n self.shell.feed_byte(byte)", "def query_list(self):\r\n self.plot_list.clear()\r\n self.settings.send_to_databox_header(self.plot_list)\r\n \r\n self.label_list_status.set_text('Getting frequencies and powers.')\r\n self.window.process_events()\r\n \r\n fs = self.api.get_list_frequencies()\r\n ps = self.api.get_list_powers()\r\n if fs == None or ps == None: return\r\n \r\n if not len(fs) == len(ps):\r\n print(\"ERROR query_list(): List lengths do not match. len(fs)=\"+str(len(fs))+' len(ps)='+str(len(ps)) )\r\n \r\n N = len(fs)\r\n self.plot_list['n'] = _n.linspace(0, N-1, N)\r\n self.plot_list['f_Hz'] = fs\r\n self.plot_list['P_dBm'] = ps\r\n \r\n self.label_list_status.set_text(str(N) + ' points in list memory')\r\n self.plot_list.plot()\r\n self.button_send_list.disable()\r\n self.window.process_events()", "def get_history():\n return response_texts_to_entries(make_post_request(HISTORY_API, data={\"k\": config[\"api_key\"]}))", "def start(self):\n record_list_count = 0\n item_count = 0\n output_list = []\n record_in_range = False\n\n if not self.trim:\n print PROMPT + \"Getting Voltage readings that arrived at the server from device \" + \\\n self.name,\n if self.start_time is not None:\n print(\"after around UTC \" + date.strftime(self.start_time, \"%Y-%m-%d %H:%M\")),\n if self.end_time is not None:\n print(\"and before around UTC \" + date.strftime(self.end_time, \"%Y-%m-%d %H:%M\")),\n stdout.write('\\n')\n print PROMPT + \"V_BAT_OK, V_IN, V_PRIMARY (all in milliVolts)\"\n\n # Set some default start and end times if they are None\n if self.start_time is None:\n self.start_time = datetime.strptime('Jan 1 1970', '%b %d %Y').replace(tzinfo=utc)\n if self.end_time is None:\n self.end_time = datetime.strptime('Jan 1 2038', '%b %d %Y').replace(tzinfo=utc)\n\n # Create some dummy object IDs from the dates/times to use with PyMongo\n object_id_start_time = ObjectId.from_datetime(self.start_time)\n object_id_end_time = ObjectId.from_datetime(self.end_time)\n\n # Retrieve all the records that match the given name in the order they were created\n record_list = self.collection.find({\"n\": self.name, \\\n \"_id\": {\"$gte\": object_id_start_time, \\\n \"$lte\": object_id_end_time}}). \\\n sort([[\"_id\", 1]])\n for record in record_list:\n record_in_range = record[\"_id\"].generation_time > self.start_time\n if record_in_range:\n record_list_count += 1\n # Find the index in the record, just to be sure it's a record\n if \"i\" in record:\n # Find the report items in the record\n if \"r\" in record:\n r_list = record[\"r\"]\n # Go through the list\n for r_item in r_list:\n # See if there's a voltages item in it\n if \"vlt\" in r_item:\n voltages_item = r_item[\"vlt\"]\n item_count += 1\n # Decode the voltages and add them to our list\n voltages_struct = self.get_voltages_item(voltages_item)\n output_list.append(voltages_struct)\n # Having got everything, sort it in time order\n output_list.sort(key=sort_func)\n # Now print it\n for item in output_list:\n time_string = datetime.utcfromtimestamp(item.timestamp) \\\n .strftime(\"%Y-%m-%d_%H:%M:%S\")\n print(\"%s %d %d %d\" % (time_string, item.v_bat_ok_mv,\n item.v_in_mv, item.v_primary_mv))\n if not self.trim:\n print(PROMPT + \"%r record(s) returned containing %r voltage reading(s)\"\n % (record_list_count, item_count))", "def slot_history_changed(self, _sender, _data):\r\n last_candle = self.history.last_candle()\r\n if last_candle:\r\n self.client.history_last_candle = last_candle.tim", "def __flight_data_handler(self, event, sender, data):\n self.battery = data.battery_percentage\n self.fly_mode = data.fly_mode\n self.throw_fly_timer = data.throw_fly_timer\n self.throw_ongoing = data.throw_fly_timer > 0\n\n if self.prev_flight_data != str(data):\n print(data)\n self.prev_flight_data = str(data)\n self.flight_data = data\n\n if self.is_flying != data.em_sky:\n self.is_flying = data.em_sky\n log.debug(f\"FLYING : {self.is_flying}\")\n if not self.is_flying:\n self.reset()\n else:\n if self.tracking_after_takeoff:\n log.info(\"Tracking on after takeoff\")\n self.toggle_tracking(True)\n\n # if self.write_header_log:\n # self.write_header_log = False\n # self.log_file_log.write(f\"{data.format_cvs_header()}\\n\")\n # self.log_file_log.write(f\"{data.format_cvs(0)}\\n\")", "def get_data(self):\n return self.read_sample_win()", "def listen(self):\n\n if not self.key_data:\n self.key_data = {}\n for i in range(1024):\n self.key_data[i] = False\n\n if not self.axis_data:\n self.axis_data = {}\n for i in range(self.controller.get_numaxes()):\n self.axis_data[i] = 0.0\n\n if not self.button_data:\n self.button_data = {}\n for i in range(self.controller.get_numbuttons()):\n self.button_data[i] = False\n\n if not self.hat_data:\n self.hat_data = {}\n for i in range(self.controller.get_numhats()):\n self.hat_data[i] = (0, 0)\n\n debug_toggle = True\n print_state_toggle = True\n\n # These parameters define how frequesnt speed setting sent over serial to arduino\n speed_threshold = 10.0 # sets update threshold\n speed_step = 1 # sets acceleration\n speed_delay = 0.01 # delay per 1 step in sec\n\n mode_switch = \"j\" # control mode: k - keyboard, j - joystick\n\n # Parameters for keyboard control mode\n speed = 0.0\n speed_current = 0\n direction = \"r\" # r - release, f - forward, b - backward\n direction_current = \"r\"\n\n # Parameters for joystick control mode\n speed_l = 0\n speed_r = 0\n prev_speed_l = 0\n prev_speed_r = 0\n prev_btn = False\n\n while True:\n prev = self.axis_data\n for event in pygame.event.get():\n if event.type == pygame.KEYDOWN:\n self.key_data[event.key] = True\n elif event.type == pygame.KEYUP:\n self.key_data[event.key] = False\n if event.type == pygame.JOYAXISMOTION:\n self.axis_data[event.axis] = round(event.value,2)\n elif event.type == pygame.JOYBUTTONDOWN:\n self.button_data[event.button] = True\n elif event.type == pygame.JOYBUTTONUP:\n self.button_data[event.button] = False\n elif event.type == pygame.JOYHATMOTION:\n self.hat_data[event.hat] = event.value\n\n # check for exit command\n if self.button_data[9] or self.key_data[pygame.QUIT] or self.key_data[pygame.K_ESCAPE]:\n pygame.quit()\n break\n\n # toggle debug\n if self.key_data[pygame.K_d]:\n if debug_toggle:\n print(\"Toggle debug\")\n self.ser.write(b'd')\n debug_toggle = False\n else:\n debug_toggle = True\n\n # print out motors status\n if self.key_data[pygame.K_p]:\n if print_state_toggle:\n self.ser.write(b'p')\n if self.ser.in_waiting:\n print (self.ser.readline())\n print_state_toggle = False\n else:\n print_state_toggle = True\n\n if self.key_data[pygame.K_1] and mode_switch != \"k\":\n mode_switch = \"k\"\n\n if self.key_data[pygame.K_2] and mode_switch != \"j\":\n print(\"Joystick mode: ON\")\n mode_switch = \"j\"\n\n if mode_switch == \"k\": # keyboard control mode\n # accelearte forward\n if self.key_data[pygame.K_a] and direction != \"r\":\n if speed < 255.0:\n speed = speed + speed_step\n sleep(speed_delay)\n # accelerate backward\n if self.key_data[pygame.K_z] and direction != \"r\":\n if speed > 0.0:\n speed = speed - speed_step\n sleep(speed_delay)\n\n if self.key_data[pygame.K_UP] and direction != \"f\":\n direction = \"f\"\n if self.key_data[pygame.K_DOWN] and direction != \"b\":\n direction = \"b\"\n if self.key_data[pygame.K_UP] == False and direction == \"f\":\n direction = \"r\"\n if self.key_data[pygame.K_DOWN] == False and direction == \"b\":\n direction = \"r\"\n\n if math.fabs(speed - speed_current) > speed_threshold or direction != direction_current:\n # print(\"{0}, {1}, {2}, {3}\".format(speed, speed_current, direction, direction_current))\n direction_current = direction\n if direction == \"r\":\n speed = 0.0\n speed_current = int(speed)\n str_r = \"sr\" + direction_current + str(speed_current) + \"e\"\n str_l = \"sl\" + direction_current + str(speed_current) + \"e\"\n print(str_l)\n print(str_r)\n self.ser.write(str_r.encode())\n self.ser.write(str_l.encode())\n\n if(self.key_data[pygame.K_LEFT]):\n str_rf = \"srf\" + str(speed_current) + \"e\"\n self.ser.write(str_rf.encode())\n str_lf = \"slf\" + str(int(speed_current*0.9)) + \"e\"\n self.ser.write(str_lf.encode())\n elif(self.key_data[pygame.K_RIGHT]):\n str_rb = \"srf\" + str(int(speed_current*0.9)) + \"e\"\n self.ser.write(str_rb.encode())\n str_lb = \"slf\" + str(speed_current) + \"e\"\n self.ser.write(str_lb.encode())\n\n if (self.key_data[pygame.K_UP] == False and self.key_data[pygame.K_DOWN] == False) and (self.key_data[pygame.K_a] == False and self.key_data[pygame.K_z] == False):\n speed = 0\n speed_current = speed\n direction = \"r\"\n direction_current = direction\n self.ser.write(b'srze')\n self.ser.write(b'slze')\n\n if mode_switch == \"j\": # joystick control mode\n if self.ser.in_waiting:\n data = str(self.ser.readline().strip())\n data = data[2 :len(data)-1]\n print(data)\n #self.aio.send('Team Hacky Slackers', data)\n\n prev_speed_l = speed_l\n prev_speed_r = speed_r\n speed_threshold = 1\n\n #simplified linear mapping for controller\n speed_l = int((self.axis_data[0]*(-50)) + 90)\n speed_r = int(math.fabs(self.axis_data[3]*255))\n #print(self.axis_data)\n #print(\"curr_l: {0}, perv_l: {1}, curr_r:{2}, perv_r:{3}\".format(speed_l, prev_speed_l, speed_r,prev_speed_r))\n\n if self.axis_data[0] < -0.05 and math.fabs(speed_l - prev_speed_l) > speed_threshold:\n str_lf = \"slf\" + str(speed_l) + \"e\"\n self.ser.write(str_lf.encode())\n elif self.axis_data[0] > 0.05 and math.fabs(speed_l - prev_speed_l) > speed_threshold:\n str_lb = \"slb\" + str(speed_l) + \"e\"\n self.ser.write(str_lb.encode())\n\n\n if self.axis_data[3] < -0.03 and math.fabs(speed_r - prev_speed_r) > speed_threshold:\n str_rf = \"srf\" + str(speed_r) + \"e\"\n self.ser.write(str_rf.encode())\n elif self.axis_data[3] > 0.03 and math.fabs(speed_r - prev_speed_r) > speed_threshold:\n str_rb = \"srb\" + str(speed_r) + \"e\"\n self.ser.write(str_rb.encode())\n\n if ( self.axis_data[0] >= -0.05 and self.axis_data[0] <= 0.05 ) and ( self.axis_data[3] >= -0.05 and self.axis_data[3] <= 0.05 ):\n speed_l = 90\n speed_r = 0\n self.ser.write(b'srze')\n self.ser.write(b'slze')\n\n #Logic to call RFID scan only once per click of R1 button\n # if(prev_btn != self.button_data[5]):\n # prev_btn = self.button_data[5]\n # if self.button_data[5] :\n # print(\"Scanning for RFID Card\")\n # self.ser.write(\"i\".encode())\n\n # clear()\n # pprint.pprint(self.button_data)\n # pprint.pprint(self.axis_data)\n # pprint.pprint(self.hat_data)", "def receive_data():\n\n while True:\n try:\n bytes_to_read = ser.readline()\n print(bytes_to_read)\n data = json.loads(bytes_to_read.decode('utf-8'))\n distance = data['distance']\n print(f'distance: {distance}')\n except Exception as e:\n print(f'Error in reading bytes from the \\'duino: {e}')", "def sendPreviousDataPoints(self):\n if self.ioLoopInst is not None:\n cmd = {'cmd': 'setDataPoints', 'value': self.dataPoints}\n self._sendMessageToWeb(cmd)\n else:\n print(\"sendPreviousDataPoints: \" + self.dataPoints)" ]
[ "0.6700092", "0.6360538", "0.6166242", "0.60464233", "0.6038091", "0.6023963", "0.60063523", "0.5908595", "0.58683914", "0.5861097", "0.58507204", "0.5833523", "0.5832018", "0.57664955", "0.57027274", "0.5638211", "0.5628629", "0.5615999", "0.56110036", "0.56060934", "0.5573338", "0.55372125", "0.55178803", "0.5506934", "0.5479551", "0.54564303", "0.54483074", "0.5427582", "0.54211545", "0.5408876", "0.5407351", "0.5375614", "0.53665334", "0.5360664", "0.5356425", "0.53510857", "0.5341443", "0.5334029", "0.5332098", "0.5315797", "0.531547", "0.5304051", "0.53018165", "0.5300674", "0.52904433", "0.5290254", "0.52779025", "0.5276737", "0.5276123", "0.52648497", "0.5262615", "0.5249989", "0.5248876", "0.52415985", "0.523876", "0.5231435", "0.52294254", "0.52247196", "0.5218119", "0.5216528", "0.5214594", "0.52132654", "0.52109796", "0.52079827", "0.5203675", "0.520273", "0.5189451", "0.51790315", "0.517195", "0.51679075", "0.516474", "0.51528597", "0.5150831", "0.51491565", "0.5148176", "0.5146634", "0.5140949", "0.5130911", "0.51291615", "0.5126091", "0.5125773", "0.5115986", "0.5114233", "0.5110002", "0.5108117", "0.51050806", "0.5104406", "0.5102758", "0.5102146", "0.51020974", "0.50982267", "0.50956583", "0.50918645", "0.50890255", "0.5088444", "0.5086157", "0.50858295", "0.50841296", "0.50827307", "0.5082223" ]
0.6273694
2
Determines if a given datetime.datetime is aware.
def is_aware(value): return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_aware(value: datetime) -> bool:\n\n return value.utcoffset() is not None", "def test_make_datetime_aware(settings):\n # Set the TIME_ZONE in the settings.\n settings.TIME_ZONE = \"America/New_York\"\n\n # Calling make_datetime_aware() returns a timezone-aware datetime referring\n # to the moment from the naive_datetime_obj, in the appropriate time zone.\n naive_datetime_str = \"2018-01-01T20:00:00\"\n expected_datetime_obj = make_aware(\n datetime(year=2018, month=1, day=1, hour=20, minute=0, second=0),\n timezone=pytz.timezone(\"America/New_York\"),\n )\n assert make_datetime_aware(naive_datetime_str) == expected_datetime_obj\n\n # Calling make_datetime_aware() for non-datetime strings returns None.\n dt_str = \"\"\n aware_dt = make_datetime_aware(dt_str)\n assert aware_dt == None\n dt_str = None\n aware_dt = make_datetime_aware(dt_str)\n assert aware_dt == None", "def _is_timezone_aware(value):\n return value.utcoffset() is not None", "def valid_datetime(dt):\n if isinstance(dt.tzinfo, tzinfo) and not datetime_ambiguous(dt):\n return True\n return False", "def is_datetime(self) -> bool:\n return False", "def is_datetime(self):\n answer = self._call('is_datetime')\n return answer.yes", "def test_freeze_with_timezone_aware_datetime_in_non_utc():\n utc_now = datetime.datetime.utcnow()\n assert utc_now.tzinfo is None\n assert utc_now == datetime.datetime(1970, 1, 1, 4)", "def check_dt_consistency(date_dt):\n\n # https://en.wikipedia.org/wiki/Tz_database\n # https://www.iana.org/time-zones\n \n if date_dt.tzinfo is None:\n return True\n else:\n \n # This check is quite heavy but there is apparently no other way to do it.\n if date_dt.utcoffset() != dt_from_s(s_from_dt(date_dt), tz=date_dt.tzinfo).utcoffset():\n return False\n else:\n return True", "def make_aware(value: datetime, timezone=None, is_dst=None) -> datetime:\n\n if timezone is None:\n timezone = get_current_timezone()\n\n if hasattr(timezone, \"localize\"):\n # This method is available for pytz time zones.\n return timezone.localize(value, is_dst=is_dst)\n else:\n # Check that we won't overwrite the timezone of an aware datetime.\n if is_aware(value):\n raise ValueError(\"make_aware expects a naive datetime, got %s\" % value)\n # This may be wrong around DST changes!\n return value.replace(tzinfo=timezone)", "def correct_datetime(record_datetime):\n assert record_datetime.date() == datetime.now(timezone.utc).date()", "def _uses_datetimeblock(dtype: Union[np.dtype, ExtensionDtype]) -> bool:\n vtype = dtype.type\n return issubclass(vtype, np.datetime64)", "def test_freeze_with_timezone_aware_datetime_in_utc():\n utc_now = datetime.datetime.utcnow()\n assert utc_now.tzinfo is None", "def is_after(self, dt: datetime) -> bool:\n return self.target_time >= make_tz_aware(dt)", "def in_between_datetime(now, start, end):\n return start <= now <= end", "def isNaive(self, date):\n return not self.isAware(date)", "def has_datetime_type(obj: _std_typing.Any) -> bool:\n return obj.dtype == sc.DType.datetime64", "def is_date(dt):\n return isinstance(dt, datetime.date) and not isinstance(dt, datetime.datetime)", "def is_naive(value: datetime) -> bool:\n\n return value.utcoffset() is None", "def datetime_has_tz(dt):\n\n if type(dt) != datetime.datetime:\n raise TypeError(f\"dt must be type datetime.datetime, not {type(dt)}\")\n\n return dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None", "def in_datetime_interval(when, *, start=None, end=None):\n when = as_ref_datetime(when) # This is not allowed to be None, but could be str and we need datetimes to compare.\n start = start and as_ref_datetime(start)\n end = end and as_ref_datetime(end)\n return (not start or start <= when) and (not end or end >= when)", "async def datetime(self, aware=False) -> dt.datetime:\n if aware is True:\n now = await self.AD.sched.get_now()\n return now.astimezone(self.AD.tz)\n else:\n return await self.AD.sched.get_now_naive()", "def check_date(created_at, start, end):\n x = get_date(created_at)\n return x <= end and x >= start", "def test_enlighten_dtime(self):\n\n est = pytz.timezone(\"EST\")\n aware_dtime = datetime.datetime(\n year=1985, month=11, day=15,\n hour=6, minute=0,\n tzinfo=est)\n\n enlightened_dtime = enlighten_dtime(aware_dtime)\n # The tzinfo should be untouched.\n self.assertIs(aware_dtime.tzinfo, enlightened_dtime.tzinfo)\n\n # This is a naive object, but has UTC values for hour.\n utcnow = datetime.datetime.now()\n # No tzinfo was present, so that is replaced. hour should be the same.\n enlightened_utcnow = enlighten_dtime(utcnow)\n self.assertEqual(enlightened_utcnow.hour, utcnow.hour)\n self.assertIs(enlightened_utcnow.tzinfo, UTC_TZINFO)", "def office_is_open_on_datetime(iso_datetime):\n is_open = False\n d_time = datetime.fromisoformat(iso_datetime)\n d_date = date(d_time.year, d_time.month, d_time.day)\n schedule = AppointmentService.APPOINTMENT_SCHEDULE.get(d_date.weekday(), {})\n if schedule:\n begin_time = datetime.combine(d_date, schedule['begin'])\n end_time = datetime.combine(d_date, schedule['end'])\n if begin_time <= d_time <= end_time:\n is_open = True\n\n return is_open", "def ensure_datetime(ob: AnyDatetime) -> datetime.datetime:\n if isinstance(ob, datetime.datetime):\n return ob\n date = cast(datetime.date, ob)\n time = cast(datetime.time, ob)\n if isinstance(ob, datetime.date):\n time = datetime.time()\n if isinstance(ob, datetime.time):\n date = datetime.date(1900, 1, 1)\n return datetime.datetime.combine(date, time)", "def make_tz_aware(local_dt):\n aware_dt = timezone('US/Eastern').localize(local_dt)\n return aware_dt", "def _is_date_in_range(self, date):\n date_obj = datetime.strptime(date.split('T')[0], '%Y-%m-%d')\n \"\"\"When running under delta feed mode, we need to consider only those vulns which were\n updated between the given offset date and today's date.\"\"\"\n return self.today > date_obj >= self.start_day", "def omniscient_datetime(*args):\n d = original_datetime(*args)\n if settings.USE_TZ:\n d = timezone.make_aware(d, timezone.utc)\n return d", "def validDateTime( dateTime ):\n try:\n datetime.strptime( dateTime, \"%Y-%m-%dT%H:%M:%S.%fZ\" )\n return True\n except ValueError:\n return False", "def adjust_icms_v1_datetime(dt_val: dt.datetime) -> dt.datetime:\n\n if timezone.is_aware(dt_val):\n raise ValueError(f\"Unable to adjust an aware datetime value: {dt_val}\")\n\n # ICMS V1 datetime values are created using this:\n # https://docs.oracle.com/database/121/SQLRF/functions207.htm#SQLRF06124\n # Therefore replace the naive datetime with the correct timezone\n aware_dt = dt_val.replace(tzinfo=UK_TZ)\n\n # Return a datetime that has been offset to UTC\n utc_dt = aware_dt.astimezone(dt.timezone.utc)\n\n return utc_dt", "def timestamp_aware(dt):\n if dt.tzinfo is None:\n dt = dt.replace(tzinfo=tz.tzlocal()) # new object\n return dt", "def get_timezone_aware_datetime(datetime):\n if not datetime:\n return None\n local = timezone.get_current_timezone()\n return (local.localize(parse(datetime), is_dst=None)).astimezone(timezone.utc)", "def aired(self):\n # TODO: timezone\n airdatetime = self.airdatetime\n if airdatetime:\n return datetime.now() >= airdatetime + timedelta(minutes=self.series.runtime)\n else:\n return False", "def is_in_advent() -> bool:\n # Run the code from the 1st to the 24th\n return datetime.now(EST).day in range(1, 25) and datetime.now(EST).month == 12", "def enforce_timezone(self, value):\n try:\n tz = timezone._active.value\n if (self.default_timezone is not None) and not timezone.is_aware(value):\n return timezone.make_aware(value, tz)\n return value\n except AttributeError:\n return super().enforce_timezone(value)", "def almost_same_datetime(dt1, dt2, allowed_delta=timedelta(minutes=1)):\r\n return abs(dt1 - dt2) < allowed_delta", "def test_datetime(self):\n diff = self.machine_date - self.actual_date < datetime.timedelta(0, 20, 0)", "def is_datetime(s: Union[str, int, float]):\n if is_number(s):\n return False\n\n try:\n parse_datetime(s)\n return True\n except Exception:\n return False", "def date_make_timezone_aware(datetime_object, timezone_string=None):\n if timezone_string:\n # make the date timezone aware using the given timezone_string\n timezone_object = pytz_timezone_object(timezone_string)\n timezone_aware_datetime_object = timezone_object.localize(datetime_object)\n else:\n # make the date timezone aware using the timezone of the current system\n timezone_aware_datetime_object = datetime_object.astimezone()\n\n return timezone_aware_datetime_object", "def is_interpretable(self):\n return bool(self.as_date() or self.as_time())", "def test_paid_at(self):\n\n self.assertIsInstance(self.obj.paid_at, datetime)", "def check_date(date):\n import datetime\n correctDate = None\n date = str(date)\n \n if (len(date)!=8):\n return False\n year = int(date[0:4])\n month = int(date[4:6])\n day = int(date[6:8])\n try:\n datetime.datetime(year,month,day)\n correctDate = True\n except ValueError:\n correctDate = False\n return correctDate", "def could_be_datetime(val, fmt):\n\n if val == None or fmt == None:\n return False\n\n if isinstance(val, datetime):\n return True\n\n if isinstance(val, (str, unicode)):\n if Record.is_empty_str(val) or Record.is_empty_str(fmt):\n return False\n\n try:\n d = datetime.strptime(val, fmt)\n if not isinstance(d, datetime):\n raise ValueError\n else:\n return True\n except Exception as e:\n logging.error(e)\n return False\n\n #otherwise\n return False", "def has_time(self):\n return isinstance(self._start, datetime.datetime)", "def is_in_the_future(dt):\n if dt > datetime.now(pytz.utc):\n return True\n return False", "def is_available_at(self, datetime):\n for booking in self.booking_set.all():\n if booking.schedule_start <= datetime < booking.schedule_end and not booking.is_cancelled():\n return False\n return True", "def test_update_at(self):\n self.assertIsInstance(self.obj.update_at, datetime)", "def test_update_at(self):\n self.assertIsInstance(self.obj.update_at, datetime)", "def test_update_at(self):\n self.assertIsInstance(self.obj.update_at, datetime)", "def tz_aware(value: datetime) -> datetime:\n if settings.USE_TZ:\n value = value.replace(tzinfo=timezone.utc)\n\n return value", "def test_missing_report_datetime(self):\n self.assertEqual(datetime.datetime.min, self.__uft.datetime('raise'))", "def is_starttrimester(today):\n if isinstance(today, datetime):\n if today.day == 1 and today.month == 1:\n return True\n elif today.day == 1 and today.month == 4:\n return True\n elif today.day == 1 and today.month == 7:\n return True\n elif today.day == 1 and today.month == 10:\n return True\n return False\n else:\n raise Exception(\"{} is not a datetime instance\".format(today))", "def datetime_checkinput(year, month, day):\n try:\n datetime.datetime(year, month, day)\n except:\n raise Invaliddatetimeinput\n return 0", "def tz_aware(dt: datetime, default: tzinfo = tzutc()) -> datetime:\n if dt.tzinfo is None:\n dt = dt.replace(tzinfo=default)\n return dt", "def test_14_digit_datetime_detection(self):\n obj = awstats_reader.awstats_datetime('20091130165230')\n self.assertTrue(isinstance(obj, awstats_reader.AwstatsDateTime))", "def is_valid_date_offset(user_date, offset):\n return", "def skip_or_run_datetime_test(func):\n\n return skip_or_run_test_pcall_require(func, 'datetime',\n 'does not support datetime type')", "def is_auto_assigned_date_column(column):\n return (\n (\n isinstance(column.type, sa.DateTime) or\n isinstance(column.type, sa.Date)\n )\n and\n (\n column.default or\n column.server_default or\n column.onupdate or\n column.server_onupdate\n )\n )", "def simulatedate_checkinput(start, end):\n start_year, start_month, start_day = parse_string_datetime(start)\n end_year, end_month, end_day = parse_string_datetime(end)\n if datetime_checkinput(start_year, start_month, start_day) == 0 and datetime_checkinput(end_year, end_month, end_day) == 0:\n start_time = datetime.datetime(start_year, start_month, start_day)\n end_time = datetime.datetime(end_year, end_month, end_day)\n if start_time < end_time:\n return 0\n else:\n raise Invaliddatetimeinput", "def test_format_builtin(self):\n with self.settings(TIME_ZONE='UTC'):\n datetime = aware_datetime(2015, 1, 1, 5, 7)\n assert_equal(format_datetime(datetime, 'time'), '05:07 UTC')", "def is_soon(dt, window):\r\n soon = (utcnow() + datetime.timedelta(seconds=window))\r\n return normalize_time(dt) <= soon", "def is_soon(dt, window):\n soon = (utcnow() + datetime.timedelta(seconds=window))\n return normalize_time(dt) <= soon", "def _test_df_datetime(self, df):\n date_raw = df['DateListed'].iloc[0] # e.g. '2016-01-07 00:00:00'\n first_date_time = datetime.strptime(date_raw, \"%Y-%m-%d %H:%M:%S\")\n assert first_date_time >= datetime(2016, 1, 1, 0, 0) and \\\n first_date_time < datetime(2017, 1, 1, 0, 0)", "def is_available(iso_datetime):\n d_time = datetime.fromisoformat(AppointmentService.closest_half(iso_datetime))\n av_slots = AppointmentService.get_available_slots(iso_datetime)\n is_available = (d_time in av_slots)\n\n return is_available", "def acceptable(self):\n now = datetime.datetime.now()\n origin = datetime.datetime.combine(self.date, datetime.time.min)\n start = origin + datetime.timedelta(hours=6)\n end = origin + datetime.timedelta(days=1)\n morning = end + datetime.timedelta(hours=6)\n if now < origin or now > morning:\n return 0\n if now >= end or now <= start:\n return 1\n return 3", "def validate_date(start_date_str, end_date_str):\n # NOTE: datetime.fromisoformat() New in version Python 3.7\n\n try:\n start_date_iso = datetime.fromisoformat(start_date_str)\n end_date_iso = datetime.fromisoformat(end_date_str)\n return True\n except ValueError as date_error:\n print(f\"Incorrect data format, datetime should be in ISO format, {date_error}\")\n return False\n if start_date_iso > end_date_iso:\n return False\n\n return True", "def test_datetime_with_naive_duedate_only_fails(self):\n # setup\n specify_wd = self.project.activity('Specify wheel diameter')\n\n # save old values\n old_start, old_due = datetime.strptime(specify_wd._json_data.get('start_date'), ISOFORMAT), \\\n datetime.strptime(specify_wd._json_data.get('due_date'), ISOFORMAT)\n naive_duedate = datetime(2017, 6, 5, 5, 0, 0)\n with warnings.catch_warnings(record=False) as w:\n warnings.simplefilter(\"ignore\")\n specify_wd.edit(due_date=naive_duedate)\n\n # teardown\n with warnings.catch_warnings(record=False) as w:\n warnings.simplefilter(\"ignore\")\n specify_wd.edit(due_date=old_due)", "def compare_datetime(self_datetime, other_datetime):\n # pylint: disable=superfluous-parens\n if (isinstance(self_datetime and other_datetime, (datetime, type(None)))):\n return (\n (self_datetime == other_datetime\n if all(str(_.time()) != \"00:00:00\"\n for _ in [self_datetime, other_datetime])\n else self_datetime.date() == other_datetime.date())\n if self_datetime and other_datetime\n else self_datetime == other_datetime)\n else:\n Representation.attrs_values_types_error(\n self_attr=self_datetime, other_attr=other_datetime,\n expected_types=(datetime.__name__, type(None).__name__))", "def are_all_datetimes(values: List[Union[str, int, float]]):\n for value in values:\n if not is_datetime(value):\n return False\n return True", "def isdt(self):\n return self.Units.isreftime and self._subarray.dtype == _dtype_object", "def test_date_change_fails_on_naive_datetime(self):\n event = Event(\n Guild(12345),\n 'Some title',\n datetime(2020, 10, 10, 10, 10, tzinfo=utc),\n 'Some description')\n with self.assertRaises(ValueError):\n event.date = datetime(2020, 10, 10, 12, 10)", "async def test_has_group_address_localtime(self):\n xknx = XKNX()\n self.datetime = DateTime(\n xknx,\n \"TestDateTime\",\n group_address=\"1/2/3\",\n group_address_state=\"1/2/4\",\n localtime=True,\n )\n assert self.datetime.has_group_address(GroupAddress(\"1/2/3\"))\n # group_address_state ignored when using localtime\n assert not self.datetime.has_group_address(GroupAddress(\"1/2/4\"))", "def _is_ready(self, as_of):\n if self.is_one_off():\n return self.initial_billing_cycle.date_range.lower <= as_of\n else:\n return True", "def date_temporal_paradox_free(self):\n valid_date = True\n new_val = self.date_edit.text()\n datetime_object = datetime.strptime(new_val, \"%Y-%m-%d\")\n\n if datetime_object > datetime.now():\n valid_date = False\n return valid_date", "def is_outdated(self):\n today = datetime.datetime.today()\n day = datetime.datetime.combine(self.date, self.start_time)\n return day <= today", "def test_8_digit_date_detection(self):\n obj = awstats_reader.awstats_datetime('20091130')\n self.assertTrue(isinstance(obj, awstats_reader.AwstatsDate))", "def test_created_at(self):\n self.assertIsInstance(self.obj.created_at, datetime)", "def test_created_at(self):\n self.assertIsInstance(self.obj.created_at, datetime)", "def test_created_at(self):\n self.assertIsInstance(self.obj.created_at, datetime)", "def test_datetime_from(self):\n dt = sync.datetime_from('2012-09-09T00:00:00') # EDT\n self.assertEqual(2012, dt.year)\n self.assertEqual(9, dt.month)\n self.assertEqual(10, dt.day)\n self.assertEqual(3, dt.hour)\n self.assertEqual(59, dt.minute)\n self.assertEqual(59, dt.second)\n self.assertEqual(dt.tzname(), 'UTC')\n\n dt = sync.datetime_from('2012-12-09T00:00:00') # EST\n self.assertEqual(2012, dt.year)\n self.assertEqual(12, dt.month)\n self.assertEqual(10, dt.day)\n self.assertEqual(4, dt.hour)\n self.assertEqual(59, dt.minute)\n self.assertEqual(59, dt.second)\n self.assertEqual(dt.tzname(), 'UTC')", "def is_primary_time(time_string):\n return ':00' in time_string or ':30' in time_string", "def is_dst ():\n\n x = datetime(datetime.now().year, 1, 1, 0, 0, 0, tzinfo=pytz.timezone(timezonename)) # Jan 1 of this year\n y = datetime.now(pytz.timezone(timezonename))\n\n # if DST is in effect, their offsets will be different\n return not (y.utcoffset() == x.utcoffset())", "def test_validate(self):\n # Instances of datetime.datetime simply pass through as-is.\n self.assertEquals(self.dt_when,\n self.TDTT.validate_when(self.dt_when))\n\n # Date/time in string form should be in ISO-8601 format.\n self.assertEquals(self.dt_when,\n self.TDTT.validate_when(self.txt_when))\n\n self.assertEquals(None,\n self.TDTT.validate_when(self.NOT_DATE_AND_TIME))\n\n encoded = {'when': self.txt_when, 'unused': 'ignored'}\n decoded_props = {'when': self.dt_when}\n self.check_validate(encoded, decoded_props, self.TDTT.validate)", "def CheckTransactionDate(trans_dt):\n return ConvertTransactionDateToEpochGM(trans_dt) > 0", "def check_dates(dates):\n for date in dates:\n if type(date) != datetime.datetime:\n raise TypeError('Input date, %s, not datetime object' % date)", "def is_date_dtype(df, col_name):\n dtype = df.dtypes[col_name]\n return np.issubdtype(dtype, np.datetime64) or np.issubdtype(dtype, np.timedelta64)", "def test_datetime_with_tzinfo_provides_correct_offset(self):\n # setup\n specify_wd = self.project.activity('Specify wheel diameter')\n # save old values\n old_start, old_due = datetime.strptime(specify_wd._json_data.get('start_date'), ISOFORMAT), \\\n datetime.strptime(specify_wd._json_data.get('due_date'), ISOFORMAT)\n\n tz = pytz.timezone('Europe/Amsterdam')\n tzaware_due = tz.localize(datetime(2017, 7, 1))\n tzaware_start = tz.localize(datetime(2017, 6, 30, 0, 0, 0))\n\n specify_wd.edit(start_date=tzaware_start)\n self.assertTrue(specify_wd._json_data['start_date'], tzaware_start.isoformat(sep='T'))\n self.assertRegexpMatches(specify_wd._json_data['start_date'], r'^.*(\\+02:00|\\+01:00)$')\n\n specify_wd.edit(due_date=tzaware_due)\n self.assertTrue(specify_wd._json_data['due_date'], tzaware_due.isoformat(sep='T'))\n self.assertRegexpMatches(specify_wd._json_data['due_date'], r'^.*(\\+02:00|\\+01:00)$')\n\n # teardown\n with warnings.catch_warnings(record=False) as w:\n warnings.simplefilter(\"ignore\")\n specify_wd.edit(start_date=old_start, due_date=old_due)", "def insure_localisation(dt, timezone=str(get_localzone())):\n\n if isinstance(dt, datetime):\n tz = pytz.timezone(timezone)\n if dt.tzinfo is None:\n dt = tz.localize(dt)\n return dt\n elif isinstance(dt, date):\n return dt\n else:\n raise TypeError('\"date\" or \"datetime\" object expected, not {!r}.'.format(dt.__class__.__name__))", "def to_aware_datetime(ts: Timestamp):\n return utc.localize(ts.ToDatetime())", "def opened_at(self, datetime: datetime) -> None:", "def in_window(window, date):\n win = datetime.timedelta(window)\n if date == None:\n return False\n date = date.replace(tzinfo=None)\n delta = UTCNOW - date\n return delta <= win", "def test_interpret_datetime():\n timestamps = [\n \"2019-01-01 01:01:01\",\n \"2019-01-01 01:01:01.000001\",\n \"2019-01-01T01:01:01Z\",\n \"2019-01-01T01:01:01.000001Z\",\n \"2019-01-01_01:01:01.000001\",\n \"2019-01-01_01-01-01-000000\",\n ]\n\n for timestamp in timestamps:\n dt = interpret_datetime(timestamp)\n assert isinstance(dt, datetime)\n if \".\" in timestamp:\n assert dt == datetime(2019, 1, 1, 1, 1, 1, 1)\n else:\n assert dt == datetime(2019, 1, 1, 1, 1, 1)", "def validate_datetime(self, current_date):\n valid_minute = None\n valid_hour = None\n MIN_HOUR = 0\n MAX_HOUR = 23\n MIN_MINUTE = 0\n MAX_MINUTE = 59\n TIME_SEPARATOR = u':'\n\n hour, minute = current_date.split(TIME_SEPARATOR)\n\n try:\n if ((MIN_HOUR <= int(hour) <= MAX_HOUR) and\n (MIN_MINUTE <= int(minute) <= MAX_MINUTE)):\n valid_minute = int(minute)\n valid_hour = int(hour)\n except ValueError as e:\n logging.error(u'Given current time is invalid %s', e)\n\n valid_datetime = {u'hour': valid_hour, u'minute': valid_minute}\n\n return valid_datetime", "def contact_now(date: str) -> bool:\n\n time_date = string_to_datetime(date)\n return date_is_today(time_date) or date_is_in_past(time_date)", "def _valid_day(self, date_find):\n try:\n datetime.strptime(date_find, settings.TIME_FORMAT)\n valid = True\n except ValueError:\n valid = False\n return valid", "def make_tz_aware(time_string):\n naive_dt = datetime.datetime.strptime(time_string.strip(), '%m/%d/%Y')\n aware_dt = pytz.timezone('Asia/Manila').localize(naive_dt)\n return aware_dt.astimezone(pytz.UTC)", "def is_opening(self):\n now = timezone.now()\n return self.start_date.date() >= now.date()", "def __eq__(self, t):\n if not isinstance(t, DateTime):\n return False\n return (self._micros, self._tz) == (t._micros, t._tz)", "def valid_until(self) -> datetime:\n return self._valid_until", "def match(self, dt):\n raise NotImplemented" ]
[ "0.7423152", "0.6706246", "0.656491", "0.65267324", "0.6137775", "0.60896856", "0.6019291", "0.59568536", "0.57755363", "0.57708514", "0.5679119", "0.5660917", "0.56022364", "0.55941117", "0.5527686", "0.55162454", "0.54947275", "0.5432787", "0.5420078", "0.5379313", "0.5340649", "0.5322148", "0.5298153", "0.5293154", "0.52929807", "0.52778995", "0.52714974", "0.5259795", "0.5222237", "0.5187491", "0.5122782", "0.5093069", "0.50930434", "0.50884366", "0.50757617", "0.5070136", "0.50533664", "0.50315714", "0.50046873", "0.4992302", "0.49912512", "0.49733782", "0.49728695", "0.4943792", "0.49412808", "0.49187753", "0.49183458", "0.49183458", "0.49183458", "0.4893785", "0.48789874", "0.48788083", "0.4866971", "0.48667616", "0.4854196", "0.4842199", "0.48249328", "0.48244613", "0.4812351", "0.48117363", "0.48114142", "0.48085472", "0.48025632", "0.47908926", "0.47836158", "0.47832185", "0.47677326", "0.4736424", "0.4735857", "0.47207585", "0.4705436", "0.47045606", "0.4693998", "0.46798086", "0.4678321", "0.4671177", "0.4665842", "0.4665842", "0.4665842", "0.46651402", "0.46594054", "0.46457368", "0.46456045", "0.4627478", "0.46231112", "0.46202415", "0.46185857", "0.461694", "0.46167815", "0.46130997", "0.46123692", "0.46101803", "0.46082205", "0.46035638", "0.4602636", "0.45974898", "0.45961443", "0.45913842", "0.45883387", "0.45868957" ]
0.6891435
1
For date and time values shows how many seconds, minutes or hours ago compared to current timestamp returns representing string.
def naturaltime(value): if not isinstance(value, date): # datetime is a subclass of date return value now = datetime.now(utc if is_aware(value) else None) if value < now: delta = now - value if delta.days != 0: return 'hace %(delta)s' % {'delta': defaultfilters.timesince(value)} elif delta.seconds == 0: return 'ahora' elif delta.seconds < 60: return u'hace %(count)s segundos' % {'count': delta.seconds} elif delta.seconds // 60 < 60: count = delta.seconds // 60 return u'hace %(count)s minutos' % {'count': count} else: count = delta.seconds // 60 // 60 return u'hace %(count)s horas' % {'count': count}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def humanize_ts(timestamp=False):\n now = datetime.now()\n diff = now - datetime.fromtimestamp(timestamp)\n second_diff = diff.seconds\n day_diff = diff.days\n\n if day_diff < 0:\n return ''\n\n if day_diff == 0:\n if second_diff < 10:\n return \"just now\"\n if second_diff < 60:\n return str(int(second_diff)) + \" seconds ago\"\n if second_diff < 120:\n return \"a minute ago\"\n if second_diff < 3600:\n return str(int(second_diff / 60)) + \" minutes ago\"\n if second_diff < 7200:\n return \"an hour ago\"\n if second_diff < 86400:\n return str(int(second_diff / 3600)) + \" hours ago\"\n if day_diff == 1:\n return \"Yesterday\"\n if day_diff < 7:\n return str(day_diff) + \" days ago\"\n if day_diff < 31:\n return str(int(day_diff / 7)) + \" weeks ago\"\n if day_diff < 365:\n return str(int(day_diff / 30)) + \" months ago\"\n return str(int(day_diff / 365)) + \" years ago\"", "def pretty_date(time=False):\n now = datetime.now()\n if type(time) is int:\n diff = now - datetime.fromtimestamp(time)\n elif isinstance(time, datetime):\n diff = now - time\n elif not time:\n diff = now - now\n else:\n diff = now - now\n second_diff = diff.seconds\n day_diff = diff.days\n\n if day_diff < 0:\n return ''\n\n if day_diff == 0:\n if second_diff < 10:\n return \"just now\"\n if second_diff < 60:\n return str(int(round(second_diff, 0))) + \" seconds ago\"\n if second_diff < 120:\n return \"a minute ago\"\n if second_diff < 3600:\n return str(int(round(second_diff / 60, 0))) + \" minutes ago\"\n if second_diff < 7200:\n return \"an hour ago\"\n if second_diff < 86400:\n return str(int(round(second_diff / 3600, 0))) + \" hours ago\"\n if day_diff == 1:\n return \"Yesterday\"\n if day_diff < 7:\n return str(int(round(day_diff, 0))) + \" days ago\"\n if day_diff < 31:\n return str(int(round(day_diff / 7, 0))) + \" weeks ago\"\n if day_diff < 365:\n return str(int(round(day_diff / 30, 0))) + \" months ago\"\n return str(int(round(day_diff / 365, 0))) + \" years ago\"", "def ago(self):\n return human(self.timestamp/1000.0, precision=1, abbreviate=True)", "def time_since(timestamp=None):\n rstr = \"\"\n if not timestamp or not isinstance(timestamp, datetime.datetime):\n return rstr\n\n now = timezone.now()\n timediff = now - timestamp\n days = timediff.days\n weeks = days//7\n months = days//30\n minutes = timediff.seconds % 3600 // 60\n seconds = timediff.seconds % 3600 % 60\n hours = minutes // 60\n\n if days > 365:\n return \"> a year\"\n if months > 0:\n if months == 1:\n tstr = \"month\"\n else:\n tstr = \"months\"\n rstr = rstr + \"%s %s\" % (months, tstr)\n return rstr\n if weeks > 0:\n if weeks == 1:\n tstr = \"week\"\n else:\n tstr = \"weeks\"\n rstr = rstr + \"%s %s\" % (weeks, tstr)\n return rstr\n if days > 0:\n if days == 1:\n tstr = \"day\"\n else:\n tstr = \"days\"\n rstr = rstr + \"%s %s\" % (days, tstr)\n return rstr\n elif hours > 0:\n if hours == 1:\n tstr = \"hour\"\n else:\n tstr = \"hours\"\n rstr = rstr + \"%s %s\" % (hours, tstr)\n return rstr\n elif minutes > 0:\n if minutes == 1:\n tstr = \"min\"\n else:\n tstr = \"mins\"\n rstr = rstr + \"%s %s\" % (minutes, tstr)\n return rstr\n elif seconds > 0:\n if seconds == 1:\n tstr = \"sec\"\n else:\n tstr = \"secs\"\n rstr = rstr + \"%s %s\" % (seconds, tstr)\n return rstr\n else:\n return \"Now\"", "def relativeTime(date):\n diff = datetime.utcnow() - date\n\n if diff.days > 7 or diff.days < 0:\n return date.ctime()\n elif diff.days == 1:\n return '1 day ago'\n elif diff.days > 1:\n return '%d days ago' % diff.days\n elif diff.seconds <= 1:\n return 'just now'\n elif diff.seconds < 60:\n return '%d seconds ago' % diff.seconds\n elif diff.seconds < (60 * 2):\n return '1 minute ago'\n elif diff.seconds < (60 * 60):\n return '%d minutes ago' % (diff.seconds / 60)\n elif diff.seconds < (60 * 60 * 2):\n return '1 hour ago'\n else:\n return '%d hours ago' % (diff.seconds / (60 * 60))", "def time_since_as_text(time=False):\n now = datetime.now(timezone.utc)\n if type(time) is int:\n diff = now - datetime.fromtimestamp(time)\n elif isinstance(time,datetime):\n diff = now - time\n elif not time:\n diff = now - now\n second_diff = diff.seconds\n day_diff = diff.days\n\n if day_diff < 0:\n return ''\n\n if day_diff == 0:\n if second_diff < 10:\n return \"nyss\"\n if second_diff < 60:\n return str(second_diff) + \" sekunder sedan\"\n if second_diff < 120:\n return \"a minute ago\"\n if second_diff < 3600:\n return str(floor(second_diff / 60)) + \" minuter sedan\"\n if second_diff < 7200:\n return \"en timme sedan\"\n if second_diff < 86400:\n return str(floor(second_diff / 3600)) + \" timmar sedan\"\n if day_diff == 1:\n return \"Igår\"\n if day_diff < 7:\n return str(day_diff) + \" dagar sedan\"\n if day_diff < 31:\n return str(floor(day_diff / 7)) + \" veckor sedan\"\n if day_diff < 365:\n return str(floor(day_diff / 30)) + \" månder sedan\"\n return str(day_diff / 365) + \" år sedan\"", "def pretty_date(time=False):\n from datetime import datetime\n now = datetime.now()\n if type(time) is int:\n diff = now - datetime.fromtimestamp(time)\n elif isinstance(time,datetime):\n diff = now - time \n elif not time:\n diff = now - now\n second_diff = diff.seconds\n day_diff = diff.days\n\n if day_diff < 0:\n return ''\n\n if day_diff == 0:\n if second_diff < 10:\n return \"just now\"\n if second_diff < 60:\n return str(second_diff) + \" seconds ago\"\n if second_diff < 120:\n return \"a minute ago\"\n if second_diff < 3600:\n return str( second_diff / 60 ) + \" minutes ago\"\n if second_diff < 7200:\n return \"an hour ago\"\n if second_diff < 86400:\n return str( second_diff / 3600 ) + \" hours ago\"\n if day_diff == 1:\n return \"Yesterday\"\n if day_diff < 7:\n return str(day_diff) + \" days ago\"\n if day_diff < 31:\n return str(day_diff/7) + \" weeks ago\"\n if day_diff < 365:\n return str(day_diff/30) + \" months ago\"\n return str(day_diff/365) + \" years ago\"", "def pretty_date(time=False):\n from datetime import datetime\n\n now = datetime.now()\n if type(time) is int:\n diff = now - datetime.fromtimestamp(time)\n elif isinstance(time, datetime):\n diff = now - time\n elif not time:\n diff = now - now\n second_diff = diff.seconds\n day_diff = diff.days\n\n if day_diff < 0:\n return \"\"\n\n if day_diff == 0:\n if second_diff < 10:\n return \"just now\"\n if second_diff < 60:\n return str(second_diff) + \" seconds ago\"\n if second_diff < 120:\n return \"a minute ago\"\n if second_diff < 3600:\n return str(second_diff / 60) + \" minutes ago\"\n if second_diff < 7200:\n return \"an hour ago\"\n if second_diff < 86400:\n return str(second_diff / 3600) + \" hours ago\"\n if day_diff == 1:\n return \"Yesterday\"\n if day_diff < 7:\n return str(day_diff) + \" days ago\"\n if day_diff < 31:\n return str(day_diff / 7) + \" weeks ago\"\n if day_diff < 365:\n return str(day_diff / 30) + \" months ago\"\n return str(day_diff / 365) + \" years ago\"", "def pretty_date(time=False):\r\n from datetime import datetime\r\n import dateutil.parser\r\n now = datetime.now()\r\n if type(time) is str or type(time) is unicode:\r\n time = dateutil.parser.parse(time)\r\n if type(time) is int:\r\n diff = now - datetime.fromtimestamp(time)\r\n elif isinstance(time, datetime):\r\n diff = now - time\r\n elif not time:\r\n diff = now - now\r\n second_diff = diff.seconds\r\n day_diff = diff.days\r\n\r\n if day_diff < 0:\r\n return ''\r\n\r\n if day_diff == 0:\r\n if second_diff < 10:\r\n return \"just now\"\r\n if second_diff < 60:\r\n return str(second_diff) + \" seconds ago\"\r\n if second_diff < 120:\r\n return \"a minute ago\"\r\n if second_diff < 3600:\r\n return ' '.join([str(second_diff / 60), \"minutes ago\"])\r\n if second_diff < 7200:\r\n return \"an hour ago\"\r\n if second_diff < 86400:\r\n return ' '.join([str(second_diff / 3600), \"hours ago\"])\r\n if day_diff == 1:\r\n return \"Yesterday\"\r\n if day_diff < 7:\r\n return ' '.join([str(day_diff), \"days ago\"])\r\n if day_diff < 31:\r\n return ' '.join([str(day_diff / 7), \"weeks ago\"])\r\n if day_diff < 60:\r\n return ' '.join([str(day_diff / 30), \"month ago\"])\r\n if day_diff < 365:\r\n return ' '.join([str(day_diff / 30), \"months ago\"])\r\n if day_diff < (365 * 2):\r\n return ' '.join([str(day_diff / 365), \"year ago\"])\r\n return ' '.join([str(day_diff / 365), \"years ago\"])", "def timeago(time=False):\n\n return arrow.get(time).humanize()", "def relative_datetime(self):\n now = datetime.now(timezone.utc)\n created_at = self.created_at.astimezone(timezone.utc)\n\n delta = humanize.naturaldelta(abs(created_at - now))\n tense = \"from now\" if now < created_at else \"ago\"\n\n return f\"{delta} {tense}\"", "def naturaltime(value):\n try:\n value = datetime.datetime(value.year, value.month, value.day, value.hour, value.minute, value.second)\n except AttributeError:\n return value\n except ValueError:\n return value\n\n if getattr(value, 'tzinfo', None):\n now = datetime.datetime.now(LocalTimezone(value))\n else:\n now = datetime.datetime.now()\n now = now - timedelta(0, 0, now.microsecond)\n if value < now:\n delta = now - value\n if delta.days != 0:\n return pgettext(\n 'naturaltime', '%(delta)s ago'\n ) % {'delta': defaultfilters.timesince(value)}\n elif delta.seconds == 0:\n return _(u'now')\n elif delta.seconds < 60:\n return ungettext(\n u'a second ago', u'%(count)s seconds ago', delta.seconds\n ) % {'count': delta.seconds}\n elif delta.seconds // 60 < 60:\n count = delta.seconds // 60\n return ungettext(\n u'a minute ago', u'%(count)s minutes ago', count\n ) % {'count': count}\n else:\n count = delta.seconds // 60 // 60\n return ungettext(\n u'an hour ago', u'%(count)s hours ago', count\n ) % {'count': count}\n else:\n delta = value - now\n if delta.days != 0:\n return pgettext(\n 'naturaltime', '%(delta)s from now'\n ) % {'delta': defaultfilters.timeuntil(value)}\n elif delta.seconds == 0:\n return _(u'now')\n elif delta.seconds < 60:\n return ungettext(\n u'a second from now', u'%(count)s seconds from now', delta.seconds\n ) % {'count': delta.seconds}\n elif delta.seconds // 60 < 60:\n count = delta.seconds // 60\n return ungettext(\n u'a minute from now', u'%(count)s minutes from now', count\n ) % {'count': count}\n else:\n count = delta.seconds // 60 // 60\n return ungettext(\n u'an hour from now', u'%(count)s hours from now', count\n ) % {'count': count}", "def howLongAgo(time=False):\n now = timezone.now()\n if type(time) is int:\n diff = now - datetime.fromtimestamp(time)\n elif isinstance(time,datetime):\n diff = now - time\n elif not time:\n diff = now - now\n second_diff = diff.seconds\n day_diff = diff.days\n\n if day_diff < 0:\n return ''\n\n if day_diff == 0:\n if second_diff < 10:\n return \"genau jetzt\"\n if second_diff < 60:\n return \"vor \" + str(second_diff) + \" Sek.\"\n if second_diff < 120:\n return \"vor einer Min.\"\n if second_diff < 3600:\n return \"vor \" + str( second_diff / 60 ) + \" Min.\"\n if second_diff < 7200:\n return \"vor einer St.\"\n if second_diff < 86400:\n return \"vor \" + str( second_diff / 3600 ) + \" St.\"\n if day_diff == 1:\n return \"Gestern\"\n if day_diff < 7:\n return \"vor \" + str(day_diff) + \" Tagen\"\n if day_diff < 31:\n return \"vor \" + str(day_diff/7) + \" Wochen\"\n if day_diff < 365:\n return \"vor \" + str(day_diff/30) + \" Monaten\"\n return \"vor \" + str(day_diff/365) + \" Jahren\"", "def get_formatted_time() -> datetime.strftime:\n\t\n\tnow = datetime.now() # time now\n\thalf_hour = (now - timedelta(minutes = 30)) # time 30 min ago\n\t# returns half hour ago to accommodate for failed checks\n\t# (bc twint behaves as if none found if check failed)\n\tcurrent_time = half_hour.strftime(\"%Y-%m-%d %H:%M:%S\")\n\treturn current_time", "def time_now() -> str:\n return datetime_to_str(datetime_now())", "def pretty_date(date: datetime):\n if not isinstance(date, datetime) or date > NOW:\n raise ValueError('pretty_date() only accepts datetime objects in the past')\n diff = NOW - date\n seconds = int(diff.total_seconds())\n minutes = seconds // 60\n hours = minutes // 60\n # This doesn't _feel_ very pythonic…\n if seconds < 10:\n return 'just now'\n if seconds < 60:\n return f'{seconds} seconds ago'\n if minutes < 2:\n return 'a minute ago'\n if minutes < 60:\n return f'{minutes} minutes ago'\n if hours < 2:\n return 'an hour ago'\n if hours < 24:\n return f'{hours} hours ago'\n if hours < 48:\n return 'yesterday'\n return date.strftime('%m/%d/%y')", "def pretty_date(time=False):\n now = datetime.datetime.utcnow()\n if type(time) is int:\n diff = now - datetime.datetime.fromtimestamp(time)\n elif isinstance(time, datetime.datetime):\n diff = now - time\n elif not time:\n diff = now - now\n second_diff = diff.seconds\n day_diff = diff.days\n\n if day_diff < 0:\n day_diff *= -1\n second_diff *= -1\n if day_diff < 1:\n if second_diff < 10:\n return ugettext('imminently')\n if second_diff < 60:\n return ungettext('{n} second from now', '{n} seconds from now', second_diff).format(n=second_diff)\n if second_diff < 120:\n return ugettext('in a minute')\n if second_diff < 3600:\n return ungettext('{n} minute from now', '{n} minutes from now', second_diff / 60).format(n=second_diff / 60)\n if second_diff < 7200:\n return ugettext('in an hour')\n if second_diff < 86400:\n return ungettext('{n} hour from now', '{n} hours from now', second_diff / 3600).format(n=second_diff / 3600)\n if day_diff == 1:\n return ugettext('tomorrow')\n if day_diff < 7:\n return ungettext('{n} day from now', '{n} days from now', day_diff).format(n=day_diff)\n if day_diff < 31:\n return ungettext('{n} week from now', '{n} weeks from now', day_diff / 7).format(n=day_diff / 7)\n if day_diff < 365:\n return ungettext('{n} month from now', '{n} months from now', day_diff / 30).format(n=day_diff / 30)\n return ungettext('{n} year from now', '{n} years from now', day_diff / 365).format(n=day_diff / 365)\n\n if day_diff == 0:\n if second_diff < 10:\n return ugettext('just now')\n if second_diff < 60:\n return ungettext('{n} second ago', '{n} seconds ago', second_diff).format(n=second_diff)\n if second_diff < 120:\n return ugettext('a minute ago')\n if second_diff < 3600:\n return ungettext('{n} minute ago', '{n} minutes ago', second_diff / 60).format(n=second_diff / 60)\n if second_diff < 7200:\n return ugettext('an hour ago')\n if second_diff < 86400:\n return ungettext('{n} hour ago', '{n} hours ago', second_diff / 3600).format(n=second_diff / 3600)\n if day_diff == 1:\n return ugettext('yesterday')\n if day_diff < 7:\n return ungettext('{n} day ago', '{n} days ago', day_diff).format(n=day_diff)\n if day_diff < 31:\n return ungettext('{n} week ago', '{n} weeks ago', day_diff / 7).format(n=day_diff / 7)\n if day_diff < 365:\n return ungettext('{n} month ago', '{n} months ago', day_diff / 30).format(n=day_diff / 30)\n return ungettext('{n} year ago', '{n} years ago', day_diff / 365).format(n=day_diff / 365)", "def get_time(self):\n return ''", "async def humanize_time(self, value):\n if value is None:\n return \"None\"\n return str(datetime.timedelta(seconds=value))", "def shorttimesince(value, arg=None):\r\n from django.utils.timesince import timesince\r\n if not value:\r\n return u''\r\n if arg:\r\n return calculate_shorttimesince(arg, value)\r\n return calculate_shorttimesince(value)", "def get_date():\n return (datetime.now() - TIMEDELTA).isoformat()", "def formatted_time() -> datetime.datetime:\r\n return datetime.datetime.now()", "def get_current_timestamp_str(self):\n return str(time.mktime(datetime.datetime.now().timetuple()))", "def nowStr(time=None):\n if time is None:\n time = datetime.now().time()\n if time.minute < 10:\n return time.strftime(\"%H ноль %m\")\n else:\n return time.strftime(\"%H %M\")", "def human_date(self, date):\n return timeago.format(date)", "def str_current_time():\n return strftime(\"%Y_%m_%d_%H_%M_%S_%Z\", gmtime())", "def get_now_time():\r\n return '[' + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') + ']: '", "def get_elapsed_timestamp(self) -> str:\n t = self.elapsed_time\n minutes = int(t / 60)\n seconds = int(t - (60 * minutes))\n millis = int(100 * (t - int(t)))\n return '{:>02d}:{:>02d}.{:<02d}'.format(minutes, seconds, millis)", "def time_created(self) -> str:\n return pulumi.get(self, \"time_created\")", "def time_created(self) -> str:\n return pulumi.get(self, \"time_created\")", "def time_created(self) -> str:\n return pulumi.get(self, \"time_created\")", "def time( self, mess, args):\n return str(datetime.datetime.now())", "def time( self, mess, args):\n return str(datetime.datetime.now())", "def now_short(_format=\"%Y%m%d-%H%M%S\"):\n timeString = time.strftime(_format, time.localtime()) + \"\\t\"\n return timeString", "def since(self):\n return str(datetime.datetime.now() - self._refreshed_at)", "def get_answer_time(self):\n sec = (self.updated_at - self.created_at).total_seconds()\n return f'{int((sec / 60) % 60):02d}:{int(sec):02d}'", "def get_time():\n ct = time.time()\n lt = time.gmtime(ct)\n msec = int((ct - int(ct)) * 1000)\n return f'{time.strftime(DATE_FMT, lt)}.{msec:0>3}'", "def now(time):\n a = datetime.fromtimestamp(time).strftime('%Y-%m-%d %H:%M:%S')\n return a", "def now(time):\n a = datetime.fromtimestamp(time).strftime('%Y-%m-%d %H:%M:%S')\n return a", "def create_human_readable_timestamp(self, now):\n hrdatetime = now.strftime('%c')\n return hrdatetime", "def timestr():\n return dt.strftime(dt.now(),'%H:%M:%S')", "def adjustedCompareValue(self, value):\n if value.startswith('now'):\n return repr(GenTime())\n return value", "def age(self) -> str:\n tdelta = dt.now() - self.created_timestamp\n if tdelta.days >= 548: # enough to round it up to 2 years\n return f'about {tdelta.days/365:.0f} years'\n elif tdelta.days >= 345: # enough to round it up to 1 year (so it doesn't report '12 months')\n return f'about a year'\n elif tdelta.days > 45: # beyond 1 month (after rounding)\n return f'about {tdelta.days/30:.0f} months'\n elif tdelta.days > 24: # enough to round it up to 1 month (so it doesn't report '4 weeks')\n return f'about a month'\n elif tdelta.days > 7:\n # round to nearest half, dropping '.0' when whole\n return f'{round((tdelta.days/7)*2)/2:g} weeks'\n elif tdelta.days == 7:\n return 'a week'\n elif tdelta.days > 1:\n return f'{tdelta.days} days'\n elif tdelta.days == 1:\n return f'a day'\n # break it down into parts of a day\n hours = tdelta.seconds // 3600\n if hours > 1:\n return f'{hours:.0f} hours'\n elif hours == 1:\n return f'an hour'\n minutes = tdelta.seconds % 3600 / 60\n if minutes > 1:\n return f'{minutes:.0f} minutes'\n elif minutes == 1:\n return f'a minute'\n return 'moments'", "def time_since(dt, default=\"just now\"):\n\t\n\tnow = datetime.utcnow()\n\tdiff = now - dt\n\t\n\tperiods = (\n\t\t(diff.days / 365, \"year\", \"years\"),\n\t\t(diff.days / 30, \"month\", \"months\"),\n\t\t(diff.days / 7, \"week\", \"weeks\"),\n\t\t(diff.days, \"day\", \"days\"),\n\t\t(diff.seconds / 3600, \"hour\", \"hours\"),\n\t\t(diff.seconds / 60, \"minute\", \"minutes\"),\n\t\t(diff.seconds, \"second\", \"seconds\"),\n\t)\n\n\tfor period, singular, plural in periods:\n\t\tif period:\n\t\t\treturn \"%d %s ago\" % (period, singular if period == 1 else plural)\n\n\treturn default", "def get_formatted_duration(self, prev_time):\n duration = time() - prev_time\n if duration < 60:\n unit = 's'\n elif duration < 3600:\n duration /= 60\n unit = 'm'\n else:\n duration /= 3600\n unit = 'h'\n return self.format_num(duration) + unit", "def now(self):\n return time.strftime(r'[%d/%b/%Y:%H:%M:%S]')", "def _time_delta_from_info(info):\n now = datetime.datetime.now()\n then = info.start_time\n return str(now.replace(microsecond=0) - then.replace(microsecond=0))", "def __str__(self):\n return \"({0}:{1}:{2})\".format(self.hours, self.minutes, self.seconds)", "def now_short(_format=\"%Y%m%d-%H%M%S\"):\n return time.strftime(_format, time.localtime()) + \"\\t\"", "def _time_str(self):\n try:\n if not self._time:\n raise ValueError\n format_ = '%a, %d %b %Y %H:%M:%S'\n return datetime.fromtimestamp(float(self._time)).strftime(format_)\n except ValueError:\n return plastic_date()", "def _getUpTime(self):\n diff = (datetime.datetime.now() - self._startTime).__str__()\n return diff[:diff.find('.')]", "def get_time_string(self):\n return f\"{self.year} {self.month:02} \" \\\n f\"{self.start_day:02} {self.start_hour:02} 00 {self.get_duration():6}\"", "def get_timestamp():\n now, s=get_date()\n return (now, \"%s%s%s%s\" % (s, str(now.hour).zfill(2), str(now.minute).zfill(2), str(now.second).zfill(2)))", "def pretty_date(time=None):\n now = datetime.now()\n if type(time) is int:\n diff = now - datetime.fromtimestamp(time)\n elif isinstance(time, datetime):\n diff = now - time\n elif not time:\n diff = now - now\n second_diff = diff.seconds\n day_diff = diff.days\n\n if day_diff < 0:\n return ''\n\n if day_diff == 0:\n if second_diff < 10:\n return \"刚刚\"\n if second_diff < 60:\n return str(second_diff) + \" 秒前\"\n if second_diff < 120:\n return \"1一分钟前\"\n if second_diff < 3600:\n return str(second_diff / 60) + \" 分钟前\"\n if second_diff < 7200:\n return \"1小时前\"\n if second_diff < 86400:\n return str(second_diff / 3600) + \" 小时前\"\n if day_diff == 1:\n return \"昨天\"\n if day_diff < 7:\n return str(day_diff) + \" 天前\"\n if day_diff < 31:\n return str(day_diff / 7) + \" 周前\"\n if day_diff < 365:\n return str(day_diff / 30) + \" 个月前\"\n return str(day_diff / 365) + \" 年前\"", "def time_elapsed(sec):\n if sec < 60:\n return str(sec) + \" sec\"\n elif sec < (60 * 60):\n return str(sec / 60) + \" min\"\n else:\n return str(sec / (60 * 60)) + \" hr\"", "def __get_timestamp() -> str:\n return str(datetime.now().astimezone())", "def __get_timestamp() -> str:\n return str(datetime.now().astimezone())", "def timesince(dt, default=\"just now\"):\n\n now = datetime.datetime.now()\n diff = now - dt\n \n periods = (\n (diff.days / 365, \"year\", \"years\"),\n (diff.days / 30, \"month\", \"months\"),\n (diff.days / 7, \"week\", \"weeks\"),\n (diff.days, \"day\", \"days\"),\n (diff.seconds / 3600, \"hour\", \"hours\"),\n (diff.seconds / 60, \"minute\", \"minutes\"),\n (diff.seconds, \"second\", \"seconds\"),\n )\n\n for period, singular, plural in periods:\n \n if period:\n return \"%d %s ago\" % (period, singular if period == 1 else plural)\n\n return default", "def ts(self, which='now'):\n if which == 'now':\n return '%d%02d%02dT%02d%02d%02d' % self.date['now'][0:6]\n else:\n return '%05dT%02d%02d%02d' % (self.date['cc'][0], self.date['cc'][1], self.date['cc'][2], self.date['cc'][3])", "def getFormattedText(self):\r\n h = \"00\"\r\n m = \"00\"\r\n s = \"00\"\r\n if(self.seconds < 10):\r\n s = \"0\" + str(self.seconds)\r\n else:\r\n s = str(self.seconds)\r\n\r\n if(self.minutes < 10):\r\n m = \"0\" + str(self.minutes)\r\n else:\r\n m = str(self.minutes)\r\n\r\n if(self.hours < 10):\r\n h = \"0\" + str(self.hours)\r\n else:\r\n h = str(self.hours)\r\n\r\n return h + \":\" + m + \":\" + s", "def time_now():\n cur_time = str(datetime.now().strftime(\"%d-%m-%Y %H:%M:%S\"))\n return cur_time", "def time():\n return datetime.datetime.now().strftime(\"%Y%m%dT%H%M%SZ\")", "def __str__(self):\n return str(self.elapsed) + ' sec'", "def get_time(self):\n return \"%02u:%02u:%02u (%d)\" % self.rtc.datetime()[4:8]", "def time_str(num):\n if num > 3600:\n return \"%0.2f hrs\" % (num / 3600)\n elif num > 60:\n return \"%0.2f mins\" % (num / 60)\n else:\n return \"%d seconds\" % num", "def now():\n now = datetime.datetime.now()\n return \"%04d-%02d-%02d %02d:%02d:%02d.%03d\" % ( now.year, now.month,now.day,\n now.hour,now.minute,now.second,int(now.microsecond/1e3))", "def _get_time_since_tell_send(tell):\n tell_time_sent = int(tell[3])\n\n current_time = int(time.time())\n\n dt1 = datetime.fromtimestamp(tell_time_sent)\n dt2 = datetime.fromtimestamp(current_time)\n rd = dateutil.relativedelta.relativedelta(dt2, dt1)\n\n out = ''\n\n if rd.days == 1:\n out += f'{rd.days} day, '\n elif rd.days != 0:\n out += f'{rd.days} days, '\n\n if rd.hours == 1:\n out += f'{rd.hours} hour, '\n elif rd.hours != 0:\n out += f'{rd.hours} hours, '\n\n if rd.minutes == 1:\n out += f'{rd.minutes} minute and '\n elif rd.minutes != 0:\n out += f'{rd.minutes} minutes and '\n\n if rd.seconds == 1:\n out += f'{rd.seconds} second ago'\n elif rd.seconds != 0:\n out += f'{rd.seconds} seconds ago'\n elif current_time - tell_time_sent == 0:\n out = 'just now'\n\n return out", "def timestamp_now():\n return datetime.now().strftime(\"%A, %B %d, %Y, %I:%M %p\")", "def time_str(self):\n return datetime.now().strftime('%c')", "def current_time(cls) -> float:", "def get_timestamp():\n return datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M\")", "def _get_inner_time(self):\n timestamp = self.last_two_timestamps[0]\n if not timestamp:\n return '0000-00-00 00:00:00'\n return str(timestamp)", "def current_time():\n now = datetime.datetime.now()\n time = now.strftime(\"%Y-%m-%d %H:%M:%S:%f\")\n return time", "def timestamp():\n my_date_object = datetime.utcnow()\n my_date_string = my_date_object.strftime('%d-%m-%Y %H:%M:%S')\n return my_date_string", "def time_stamper() :\n\treturn datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")", "def _get_timestamp():\n return str(int(time.time()))", "def get_now():\r\n now = dt.datetime.now()\r\n now_str = now.strftime(\"%d/%m %H:%M\")\r\n return now_str", "def get_time():\n\n time_format = \"%Y-%m-%d %H:%M:%S\"\n now = str(datetime.datetime.now().strftime(time_format))\n\n return now", "def getTimeString():\n\tfrom time import strftime\n\treturn strftime(\"%d-%m-%Y__%H-%M-%S\")", "def get_now():\n right_now = datetime.datetime.now()\n return (\"%04d%02d%02d-%02d:%02d:%02d\"\n % (right_now.year, right_now.month, right_now.day,\n right_now.hour, right_now.minute, right_now.second))", "def get_time_stamp_str() -> str:\n return datetime.datetime.now().strftime(DateFormat)", "def format_time(self, created):\n return time.strftime(\n '%Y-%m-%d %H:%M:%S',\n time.localtime(created)\n )", "def datetime_timeplotxml(self):\n if self.time:\n return self.date.strftime(\"%b %d %Y\") + \" \" + self.time.strftime(\"%H:%M:%S\") + \" GMT\"\n else:\n return self.date.strftime(\"%b %d %Y\") + \" \" + \"00:00:00\" + \" GMT\"", "def get_time_string(time):\r\n mins = time // 60\r\n secs = time % 60\r\n time_string = ''\r\n\r\n if mins < 10:\r\n time_string += ' '\r\n elif mins < 100:\r\n time_string += ' '\r\n\r\n time_string += '%dm ' % mins\r\n\r\n if secs < 10:\r\n time_string += ' '\r\n\r\n time_string += '%ds' % secs\r\n\r\n return time_string", "def timeStamp():\n import time\n return str(time.strftime(\"%a %d %b %Y %I:%M:%S %p\"))", "def time_hack(self):\n now = datetime.datetime.now()\n monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun',\n 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']\n month = monthnames[now.month - 1].capitalize()\n return ('[%02d/%s/%04d:%02d:%02d:%02d.%06d]' %\n (now.day, month, now.year, now.hour, now.minute, now.second, now.microsecond))", "def get_now():\n\treturn datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")", "def adjustedCompareValue(self, value):\n if value.startswith('now'):\n return repr(GenDate())\n return value", "def get_current_datetime_string ( ) :\n return get_current_datetime( ).strftime( \"%Y%m%d-%H%M%S\" )", "def get_time(self):\n return time.strftime(\"%d/%m/%y %M:%H:%S\", self.time)", "def timestamp():\n return datetime.now().strftime(\"%Y%m%dT%H%M%S\")", "def timestamp():\n return datetime.now().strftime('%H:%M:%S %m-%d')", "def get_timestamp():\n return time.strftime('%Y-%m-%d %H:%M:%S')", "def pending_time_descriptive(self):\n return get_time_descriptive(self.pending_time.seconds)", "def timestamp():\n tmptz = time.timezone\n sign_str = '+'\n if tmptz > 0:\n sign_str = '-'\n tmptz_hours = int(tmptz / 3600)\n\n return str(\"%s%s%02d:%02d\" % (time.strftime(\"%Y-%m-%dT%H:%M:%S\", time.localtime()), sign_str, abs(tmptz_hours),\n int(tmptz / 60 - tmptz_hours * 60)))", "def currentTime():\n return strftime(\"%H:%M:%S\", time.localtime())", "def render_date_time_with_relative_into(into, date_time, add_ago):\n into.append(format(date_time, DATETIME_FORMAT_CODE))\n \n into.append(' [*')\n into.append(elapsed_time(date_time))\n if add_ago:\n into.append(' ago')\n into.append('*]')\n \n return into", "def human_datetime(date_time):\n current_datetime = datetime.datetime.now()\n delta = str(current_datetime - date_time)\n if delta.find(',') > 0:\n days, hours = delta.split(',')\n days = int(days.split()[0].strip())\n hours, minutes = hours.split(':')[0:2]\n else:\n hours, minutes = delta.split(':')[0:2]\n days = 0\n days, hours, minutes = int(days), int(hours), int(minutes)\n datelets = []\n years, months, xdays = None, None, None\n plural = lambda x: 's' if x != 1 else ''\n if days >= 365:\n years = int(days / 365)\n datelets.append('%d year%s' % (years, plural(years)))\n days = days % 365\n if days >= 30 and days < 365:\n months = int(days / 30)\n datelets.append('%d month%s' % (months, plural(months)))\n days = days % 30\n if not years and days > 0 and days < 30:\n xdays = days\n datelets.append('%d day%s' % (xdays, plural(xdays)))\n if not (months or years) and hours != 0:\n datelets.append('%d hour%s' % (hours, plural(hours)))\n if not (xdays or months or years):\n datelets.append('%d minute%s' % (minutes, plural(minutes)))\n return ', '.join(datelets) + ' ago.'", "def timesince_limited(d):\n today = datetime.datetime.now()\n delta = datetime.timedelta\n interval = today - d\n if today.strftime('%Y-%m-%d') == d.strftime('%Y-%m-%d'):\n if interval < delta(days=0, hours=1):\n return timesince(d) + ' ago '\n else:\n return d.strftime('%H:%M')\n else:\n return d", "def SAgeDdt(ddt):\n if ddt.days < 0:\n return \"in the future?\"\n months = int(ddt.days*12/365)\n years = int(ddt.days/365)\n if years >= 1:\n return \"%d year%s ago\" % (years, SPlural(years))\n if months >= 3:\n return \"%d months ago\" % months \n if ddt.days == 1:\n return \"yesterday\"\n if ddt.days > 1:\n return \"%d days ago\" % ddt.days\n hrs = int(ddt.seconds/60/60)\n if hrs >= 1:\n return \"%d hour%s ago\" % (hrs, SPlural(hrs))\n minutes = round(ddt.seconds/60)\n if minutes < 1:\n return \"seconds ago\"\n return \"%d minute%s ago\" % (minutes, SPlural(minutes))" ]
[ "0.7112443", "0.70603174", "0.6994772", "0.6923336", "0.69147646", "0.689994", "0.68880904", "0.68262726", "0.6814668", "0.6745085", "0.67083603", "0.6695963", "0.66525495", "0.6583381", "0.6518557", "0.65105355", "0.6455629", "0.63705647", "0.62662446", "0.6263275", "0.625927", "0.6259118", "0.62478876", "0.6245946", "0.6245096", "0.6238159", "0.6220305", "0.6217721", "0.62121916", "0.62121916", "0.62121916", "0.6211421", "0.6211421", "0.62045765", "0.6173178", "0.6170328", "0.61701286", "0.61687005", "0.61687005", "0.61621976", "0.6161123", "0.6160084", "0.61595446", "0.61437213", "0.61397415", "0.6136463", "0.6123348", "0.61174184", "0.6113688", "0.61133593", "0.6101396", "0.6098509", "0.60966706", "0.6095897", "0.6092564", "0.6090794", "0.6090794", "0.6086942", "0.60807735", "0.60782504", "0.6073077", "0.6054249", "0.6049769", "0.6042392", "0.6037919", "0.60333496", "0.6026184", "0.6024505", "0.6022521", "0.6010516", "0.60090345", "0.60078967", "0.6006256", "0.60046566", "0.59818894", "0.59792125", "0.5977696", "0.59538424", "0.595124", "0.5946832", "0.5943667", "0.59374386", "0.59322894", "0.59168607", "0.59125215", "0.5912224", "0.59083045", "0.59076345", "0.590649", "0.59053975", "0.59019303", "0.5898147", "0.58885205", "0.5880376", "0.58761483", "0.58718795", "0.58677995", "0.5866482", "0.586327", "0.5855926" ]
0.64102644
17