query
stringlengths 9
9.05k
| document
stringlengths 10
222k
| metadata
dict | negatives
listlengths 30
30
| negative_scores
listlengths 30
30
| document_score
stringlengths 4
10
| document_rank
stringclasses 2
values |
---|---|---|---|---|---|---|
Return a sudoku matrix | def get_sudoku_matrix(self):
return self.sudoku_matrix | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create_sudoku(self)->list:\n grid = [[None for x in range(9)] for row in range(9)]\n for row in range(0,9):\n for column in range(0,9):\n if row <= 2 and column <=2:\n grid[row][column] = cell.Cell(0)\n elif row <= 2 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(1)\n elif row <= 2 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(2)\n elif 3 <= row <= 5 and column <= 2:\n grid[row][column] = cell.Cell(3)\n elif 3 <= row <= 5 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(4)\n elif 3 <= row <= 5 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(5)\n elif 6 <= row <= 8 and column <= 2:\n grid[row][column] = cell.Cell(6)\n elif 6 <= row <= 8 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(7)\n elif 6 <= row <= 8 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(8)\n return grid",
"def make_sudoku(size):\r\n def mutate_list_1(lst, size):\r\n \"\"\"Helper function for removing part of a list from the beginning and add it to the end.\"\"\"\r\n count = 0\r\n while count < size:\r\n elem = lst[0]\r\n lst.remove(elem)\r\n lst.append(elem)\r\n count += 1\r\n return lst\r\n\r\n def mutate_list_2(lst):\r\n \"\"\"Helper function for removing element from the beginning of a list and add it to the end.\"\"\"\r\n elem = lst[0]\r\n lst.remove(elem)\r\n lst.append(elem)\r\n return lst\r\n\r\n count = 0\r\n matrix_length = size ** 2 # define a size of matrix\r\n matrix = [[] * matrix_length] # create an empty matrix\r\n matrix[0] = range(1, matrix_length + 1) # set a first row to a range from 1 to size ** 2\r\n while count < matrix_length - 1:\r\n l = matrix[count][:] # create a new list object that is a copy of previous row in a matrix\r\n if (count + 1) % size == 0: # check if a row in inner square of a matrix\r\n l = matrix[count - (size-1)][:] # if it is, l set to the first row of previous square\r\n matrix.append(mutate_list_2(l))\r\n else:\r\n matrix.append(mutate_list_1(l, size)) # mutate l and add it to the matrix\r\n count += 1\r\n\r\n\r\n return matrix",
"def solve_sudoku(sudoku):\n # Define the solution matrix that represents the sudoku puzzle\n solution = Matrix(9, 9, 1, 9)\n\n # Set up the model\n model = Model()\n\n # Set the constraints for the filled in cells\n for i in xrange(0, 9):\n for j in xrange(0, 9):\n if sudoku[i, j] > 0:\n model.add(solution[i, j] == int(sudoku[i, j]))\n\n # Add the constraint that all rows need to be different\n model.add([AllDiff(x) for x in solution.row])\n # Add the constraint that all columns need to be different\n model.add([AllDiff(y) for y in solution.col])\n \n # Add the constraint that all cells need to be different\n for i in xrange(0, 3):\n for j in xrange(0, 3):\n # Generate the constraint for each cell\n # x goes over the rows in each cell\n # y goes over the columns in each cell\n model.add(AllDiff(\n [solution[x, y] for x in xrange(i*3, (i+1)*3) for y in xrange(j*3, (j+1)*3)]))\n\n # Load a solver and solve the problem\n solver = model.load('MiniSat')\n solver.solve()\n return solution",
"def get_sudoku_matrix_solved(self):\n return self.sudoku_matrix_solved",
"def sudoku_solver(m):\n square_sides = int(sqrt(len(m)))\n dicts = initialize_dicts(m, square_sides)\n dicts, square_coords = populate_dicts(m, square_sides, dicts)\n dicts = get_missing(dicts)\n candidates = get_candidates(m, dicts, square_coords)\n m, candidates = scan_sudoku(m, dicts, square_coords, candidates)\n single_candidates = single_candidate(candidates, square_coords, dicts)\n m, candidates = fill_fit(m, dicts, square_coords, single_candidates=single_candidates)\n candidates = get_candidates(m, dicts, square_coords)\n naked_sets_fields_row, naked_sets_fields_cols = find_naked_sets(candidates, dicts, setlength=2)\n candidates, naked_sets = remove_naked_sets_from_candidates(candidates, naked_sets_fields_row, naked_sets_fields_cols)\n candidates = get_candidates(m, dicts, square_coords, naked_sets)\n naked_sets_fields_row, naked_sets_fields_cols = find_naked_sets(candidates, dicts, setlength=3)\n return m",
"def set_up_matrix():\n matrix= []\n row= \"1 9 3 4 5\"\n row= to_int(row)\n matrix.append(row)\n row= \"2 30 4 5 6\"\n row= to_int(row)\n matrix.append(row)\n row= \"3 8 5 6 7\"\n row= to_int(row)\n matrix.append(row)\n row= \"4 5 6 7 8\"\n row= to_int(row)\n matrix.append(row)\n row= \"5 6 7 8 9\"\n row= to_int(row)\n matrix.append(row)\n return matrix",
"def get_table(self):\n \n # During testing, this'll speed the process update\n \n row = 0\n while row < 9:\n sudoku_row = input(\"Please enter the contents of row {}, using 0 to represent blanks:\".format(row+1))\n if len(sudoku_row) == 9:\n column = 0\n while column < 9:\n number_in_box = int(sudoku_row[column])\n self.table[row][column] = number_in_box\n column += 1\n row += 1\n else:\n print(\"You can only enter 9 numbers. Not letters. Not more. Not fewer. 9 numbers.\")",
"def print_sudoku_matrix(self):\n row_list = 'ABCDEFGHI'\n\n print \" 1 2 3 4 5 6 7 8 9 \"\n for i in range(9):\n if i % 3 == 0:\n print \" +-------+-------+-------+\"\n var = row_list[i] + \" \"\n for j in range(9):\n if j % 3 == 0:\n var += \"| \"\n if self.sudoku_matrix[i][j].get_cell_value() == 0:\n var += \".\"\n else:\n var += str(self.sudoku_matrix[i][j].get_cell_value())\n var += \" \"\n print var + \"|\"\n print \" +-------+-------+-------+ \\n\"",
"def getSudoku(puzzleNumber=None):\n inital = SudokuGrid.SudokuGrid()\n current = SudokuGrid.SudokuGrid()\n solution = SudokuGrid.SudokuGrid()\n \n inital.createGrid(27, puzzleNumber)\n current.createGrid(27, puzzleNumber)\n solution.createGrid(81, puzzleNumber)\n\n return inital, current, solution",
"def make_matrix():\n row, col = [int(x) for x in input().split()]\n island = [[int(x) for x in input().split()] for _ in range(row)]\n return row, col, island",
"def solveSudoku(self, board: List[List[str]]) -> None:\n def getLocs(board):#初始化,获取需要填充的位置,记录为一个栈\n locs = []\n for row in range(9):\n for col in range(9):\n if board[row][col] == '.':\n locs.append((row, col))\n return locs\n\n def getMaps(board):#定义三个字典,跟踪9行、9列和9块的已填充数字,采用数据结构为defaultdict\n from collections import defaultdict as dd\n rowMap = [dd(int) for _ in range(9)]\n colMap = [dd(int) for _ in range(9)]\n blockMap = [dd(int) for _ in range(9)]\n for row in range(9):\n for col in range(9):\n if board[row][col] != '.':\n num = int(board[row][col])\n rowMap[row][num] += 1\n colMap[col][num] += 1\n bolckIndex = int(row/3)*3+int(col/3)\n blockMap[bolckIndex][num] += 1\n return rowMap, colMap, blockMap\n\n def fillBoard(board, locs):#递归填充剩余的数独空位置\n if not locs:\n return True\n row, col = locs.pop()#弹出一个待填充位置\n bolckIndex = int(row/3)*3+int(col/3)\n found = False\n for num in range(1, 10):\n if found:\n break\n if not rowMap[row][num] and not colMap[col][num] and not blockMap[bolckIndex][num]:\n ##如果当前行、当前列和当前块均不存在该数字,则将数字更新到相应行、列、块,并尝试填充\n rowMap[row][num] = 1\n colMap[col][num] = 1\n blockMap[bolckIndex][num] = 1\n board[row][col] = str(num)\n found = fillBoard(board, locs)#递归到下一层填充\n rowMap[row][num] = 0##状态回溯,将填充的位置清空\n colMap[col][num] = 0\n blockMap[bolckIndex][num] = 0\n if not found:##如果本轮都无法求解,则回溯到初始状态,继续从前面再填充\n locs.append((row, col))\n board[row][col] = '.'\n return found\n\n rowMap, colMap, blockMap = getMaps(board)\n locs = getLocs(board)\n fillBoard(board, locs)",
"def parse_sudokus():\n # Open the url with the sudokus for the challenge\n data = urllib2.urlopen('https://projecteuler.net/project/resources/p096_sudoku.txt')\n sudokus = [] # List to hold all sudokus\n current_sudoku = None # Current sudoku we are building\n current_sudoku_row = 0 # Current line of the current sudoku we are building\n for line in data:\n # Check if the line is the start of a new sudoku\n result = re.match(r'(Grid \\d\\d)', line.strip())\n if not result is None:\n # New sudoku\n current_sudoku = np.zeros((9,9), dtype=np.int8)\n current_sudoku_row = 0\n # store the new sudoku\n sudokus.append(current_sudoku)\n else:\n # Get the numbers\n result = re.match(r'(\\d{9})', line.strip())\n col_string = result.groups()[0]\n # Fill up sudoku\n for col in xrange(0, 9):\n current_sudoku[current_sudoku_row, col] = int(col_string[col])\n current_sudoku_row += 1\n return sudokus",
"def makeMatrix():\n listOfChars = []\n for ascii in range(32, 128):\n listOfChars.append(chr(ascii))\n random.shuffle(listOfChars)\n matrix = Grid(8, 12)\n i = 0\n for row in range(matrix.getHeight()):\n for column in range(matrix.getWidth()):\n matrix[row][column] = listOfChars[i]\n i += 1\n return matrix",
"def read_sudokus():\n with open(\"sudoku.txt\", \"r\") as f:\n lines = f.readlines()\n sudoku_strs = []\n for line in lines:\n if line[0] == 'G':\n sudoku_strs.append(\"\")\n else:\n sudoku_strs[-1] += line.replace(\"\", \" \")[1:]\n sudokus = []\n for sudoku_str in sudoku_strs:\n sudokus.append(np.fromstring(sudoku_str, sep=' ',\n dtype=np.int).reshape((9, 9)))\n return sudokus",
"def create_matrix(self):\n import random\n random.seed()\n #pecas_disponiveis = [\"escudo\", \"espada\", \"espada_dupla\", \"machadinha\", \"adaga\", \"punhais\"]\n pecas_disponiveis = [\"escudo\", \"punhais\", \"espada_dupla\", \"machadinha\", \"adaga\"]\n x_start, y_start = 10, self.running.top_bar\n x, y = x_start, y_start\n anterior_esq = [None] * self.running.linhas\n anterior_acima = None\n\n for i in range(self.running.colunas):\n coluna = []\n for j in range(self.running.linhas):\n possiveis_escolhas = pecas_disponiveis.copy()\n if possiveis_escolhas.count(anterior_esq[j]) > 0: possiveis_escolhas.remove(anterior_esq[j])\n if possiveis_escolhas.count(anterior_acima) > 0 : possiveis_escolhas.remove(anterior_acima)\n \n e_type = random.choice(possiveis_escolhas)\n tile = Tile(self.game, x, y, e_type)\n coluna.append(tile)\n self.running.game_images.append(tile.game_image)\n y += self.running.y_space\n anterior_esq[j] = e_type\n anterior_acima = e_type\n self.running.tabuleiro.append(coluna)\n x += self.running.x_space\n y = y_start\n return",
"def solveSudoku(self, board: List[List[str]]) -> None:\n def dfs(idx):\n if idx == len(blankIdx):\n return True\n else:\n i, j = blankIdx[idx]\n for num in rg:\n num += 1\n if (num not in rows[i] and\n num not in cols[j] and\n num not in boxs[i//3][j//3]):\n board[i][j]=str(num)\n rows[i].add(num)\n cols[j].add(num)\n boxs[i//3][j//3].add(num)\n if dfs(idx+1):\n return True\n board[i][j] = blank\n rows[i].remove(num)\n cols[j].remove(num)\n boxs[i//3][j//3].remove(num)\n \n rg,blank = range(9), \".\"\n rows = [set() for _ in rg]\n cols = [set() for _ in rg]\n boxs = [[set() for _ in range(3)] for j in range(3)]\n blankIdx = list()\n for i in rg:\n for j in rg:\n if board[i][j]!=blank:\n ele = int(board[i][j])\n rows[i].add(ele)\n cols[j].add(ele)\n boxs[i//3][j//3].add(ele)\n else:\n blankIdx.append((i,j))\n dfs(0)",
"def make_board():\n return [[0 for i in range(8)] for i in range(8)]",
"def solveSudoku(board):\n # represents all numbers in a specific row, col, box\n # format: if (5,9) is in rows, that means row 5 contains digit 9\n\t\t# format: if (3, 2) is in cols, that means col 3 contains digit 2\n\t\t# format: if (0,2,8) is in boxes, that means box (0,2) contains 8\n\t\t# cellsToFill is a stack that holds all the (i,j) cells we need to fill\n rows, cols, boxes = set(), set(), set()\n cellsToFill = []\n m, n = len(board), len(board[0])\n \n def initDataSets():\n for i in range(m):\n for j in range(n):\n char = board[i][j]\n if char == '.':\n cellsToFill.append((i,j))\n else:\n addToDataSets((i, char), (j, char), (i//3, j//3, char))\n\n def addToDataSets(curRow, curCol, curBox):\n rows.add(curRow)\n cols.add(curCol)\n boxes.add(curBox)\n \n def removeFromDataSets(curRow, curCol, curBox):\n rows.remove(curRow)\n cols.remove(curCol)\n boxes.remove(curBox)\n \n def backtrack():\n if not cellsToFill:\n return True\n \n i, j = cellsToFill.pop()\n for char in '123456789':\n # check if the number is already in a row/col/box, if it is then skip to the next number\n curRow, curCol, curBox = (i, char), (j, char), (i//3, j//3, char)\n if curRow in rows or curCol in cols or curBox in boxes: continue\n \n # if not, add the number to the row/col/box\n addToDataSets(curRow, curCol, curBox)\n board[i][j] = char\n \n # start the recursive call for inserting the next number\n if (backtrack()):\n return True\n \n # backtrack wasn't successful, remove the number from the row/col/box\n removeFromDataSets(curRow, curCol, curBox)\n board[i][j] = '.'\n \n cellsToFill.append((i,j))\n return False\n \n initDataSets()\n print(board)\n backtrack()",
"def make_matrix(rows, columns):\n\tmatrix = []\n\tfor row in range(rows):\n\t\tmatrix += [[0] * columns]\n\t\t\n\treturn matrix",
"def generate_s_matrix(number: int):\n matrix_zero = np.ones((number, number))\n matrix_zero[1:-1, 1:-1] = 0\n return matrix_zero",
"def generate_sudoku(self):\n\n # randomly generate the first row \n random_order_number = [x for x in range(1, 10)]\n random.shuffle(random_order_number)\n for x in range(9):\n value = random_order_number[x]\n this_cell = self.grid[0][x]\n this_cell.value = value\n self.remove_value(this_cell, 0, x, value)\n\n row = 1\n column = 0\n while row <9 and column < 9:\n time.sleep(0.05)\n # search for options\n # should only be done once for each cell\n this_cell = self.grid[row][column]\n if this_cell.options == None:\n this_cell.options = self.find_options(row, column, this_cell.grid)\n\n if not this_cell.options:\n # backtrace should only happen when there is no options for this cell\n row, column = self.backtrace(this_cell, row, column)\n\n else:\n # case 3: the number has options and the number returned from the cell is valid\n if this_cell.value != None:\n self.add_value(this_cell, row, column)\n this_cell.get_value_from_options()\n # when you switch the value for a value from the option, put the current value back into the row\n self.remove_value(this_cell, row, column, this_cell.value)\n if column == 8:\n row += 1\n column = 0\n else:\n column += 1\n try:\n self.print_detail(this_cell, row, column)\n except IndexError:\n pass",
"def solveSudoku(self, board: 'List[List[str]]') -> 'None':\n\n select = '.'\n row_set = []\n col_set = []\n arr_set = []\n\n for row in range(9):\n for col in range(9):\n if col == 0:\n row_set.append(set('123456789'))\n if row == 0:\n col_set.append(set('123456789'))\n if row % 3 == 0 and col % 3 == 0:\n arr_set.append(set('123456789'))\n\n if board[row][col].isdigit():\n row_set[row].remove(board[row][col])\n col_set[col].remove(board[row][col])\n arr_index = (row - row % 3) + col // 3\n arr_set[arr_index].remove(board[row][col])",
"def sudoku(puzzle):\n search_manager = SearchManager(DepthFirstStateStream(SudokoState(puzzle)))\n return search_manager.resolution()",
"def reducer(sudoku_grid):\n for i in range(9):\n sudoku_grid = reduce_row(i,sudoku_grid)\n sudoku_grid = reduce_col(i,sudoku_grid)\n sudoku_grid = reduce_sub(i,sudoku_grid)\n return sudoku_grid",
"def solveSudoku(self, board: List[List[str]]) -> None:\n\n def deepCopy(src, tar):\n n = len(src)\n for i in range(n):\n for j in range(n):\n tar[i][j] = src[i][j]\n\n def getNums(board, x, y):\n used_nums_x = []\n used_nums_y = []\n used_nums_square = []\n for i in range(n):\n if board[i][y] != '.':\n used_nums_y.append(board[i][y])\n for j in range(n):\n if board[x][j] != '.':\n used_nums_x.append(board[x][j])\n\n x1 = (x // 3) * 3\n x2 = ((x // 3) + 1) * 3 - 1\n y1 = (y // 3) * 3\n y2 = ((y // 3) + 1) * 3 - 1\n\n for i in range(x1, x2 + 1):\n for j in range(y1, y2 + 1):\n if board[i][j] != '.':\n used_nums_square.append(board[i][j])\n\n used_nums = set(used_nums_x + used_nums_y + used_nums_square)\n nums = set([str(i) for i in range(1, 10)]) - used_nums\n return nums\n\n def helper(board, points, result):\n n = len(board)\n if len(points) == 0:\n deepCopy(board, result)\n return\n\n x, y = points[-1]\n nums = getNums(board, x, y)\n for num in nums:\n board[x][y] = num\n points.pop()\n helper(board, points, result)\n points.append((x, y))\n board[x][y] = '.'\n\n n = len(board)\n points = [(i, j) for i in range(n) for j in range(n) if board[i][j] == '.']\n result = [['0'] * n for _ in range(n)]\n helper(board, points, result)\n deepCopy(result, board)",
"def get_stain_matrix(I):",
"def solveSudoku(self, board: List[List[str]]) -> None:\n # initialize the hashmaps\n for row in range(self.size):\n for col in range(self.size):\n value = board[row][col]\n if value != '.':\n self.rows[row].add(value)\n self.cols[col].add(value)\n self.cells[self.cell_idx(row, col)].add(value)\n \n # start backtracking at the first field\n self.backtrack(board, 0)\n return board",
"def solveSudoku(self, board: List[List[str]]) -> None:\n row = collections.defaultdict(set)\n col = collections.defaultdict(set)\n block = collections.defaultdict(set)\n pos = []\n\n for i in range(9):\n for j in range(9):\n if board[i][j] != '.':\n row[i].add(board[i][j])\n col[j].add(board[i][j])\n block[i // 3 * 3 + j // 3].add(board[i][j])\n else:\n pos.append((i, j))\n\n self.dfs(board, pos, row, col, block)\n return",
"def sudoku_solver(board):\n row, col= find_empty(board)\n if row == -1 and col == -1:\n return True\n for i in range(1, 10):\n if valid(board, row, col, i):\n board[row][col] = i\n if sudoku_solver(board):\n return True\n board[row][col] = 0\n return False",
"def sudoku_clauses():\n res = []\n # for all cells, ensure that the each cell:\n for i in range(1, 10):\n for j in range(1, 10):\n # denotes (at least) one of the 9 digits (1 clause)\n res.append([v(i, j, d) for d in range(1, 10)])\n # does not denote two different digits at once (36 clauses)\n for d in range(1, 10):\n for dp in range(d + 1, 10):\n res.append([-v(i, j, d), -v(i, j, dp)])\n\n def valid(cells):\n # Append 324 clauses, corresponding to 9 cells, to the result.\n # The 9 cells are represented by a list tuples. The new clauses\n # ensure that the cells contain distinct values.\n for i, xi in enumerate(cells):\n for j, xj in enumerate(cells):\n if i < j:\n for d in range(1, 10):\n res.append([-v(xi[0], xi[1], d), -v(xj[0], xj[1], d)])\n\n # ensure rows and columns have distinct values\n for i in range(1, 10):\n valid([(i, j) for j in range(1, 10)])\n valid([(j, i) for j in range(1, 10)])\n # ensure 3x3 sub-grids \"regions\" have distinct values\n for i in 1, 4, 7:\n for j in 1, 4 ,7:\n valid([(i + k % 3, j + k // 3) for k in range(9)])\n\n assert len(res) == 81 * (1 + 36) + 27 * 324\n return res"
]
| [
"0.7414889",
"0.73049223",
"0.6996909",
"0.69424963",
"0.6888324",
"0.6766059",
"0.670372",
"0.6643427",
"0.65220934",
"0.65055496",
"0.6481994",
"0.64216775",
"0.6404281",
"0.63746357",
"0.63286084",
"0.6325936",
"0.6321227",
"0.62994",
"0.6287115",
"0.6272716",
"0.6261372",
"0.62520665",
"0.6201039",
"0.6194734",
"0.61843735",
"0.61732984",
"0.6135698",
"0.61223483",
"0.6120934",
"0.6120501"
]
| 0.79606533 | 0 |
Return a sudoku matrix solved | def get_sudoku_matrix_solved(self):
return self.sudoku_matrix_solved | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def solve_sudoku(sudoku):\n # Define the solution matrix that represents the sudoku puzzle\n solution = Matrix(9, 9, 1, 9)\n\n # Set up the model\n model = Model()\n\n # Set the constraints for the filled in cells\n for i in xrange(0, 9):\n for j in xrange(0, 9):\n if sudoku[i, j] > 0:\n model.add(solution[i, j] == int(sudoku[i, j]))\n\n # Add the constraint that all rows need to be different\n model.add([AllDiff(x) for x in solution.row])\n # Add the constraint that all columns need to be different\n model.add([AllDiff(y) for y in solution.col])\n \n # Add the constraint that all cells need to be different\n for i in xrange(0, 3):\n for j in xrange(0, 3):\n # Generate the constraint for each cell\n # x goes over the rows in each cell\n # y goes over the columns in each cell\n model.add(AllDiff(\n [solution[x, y] for x in xrange(i*3, (i+1)*3) for y in xrange(j*3, (j+1)*3)]))\n\n # Load a solver and solve the problem\n solver = model.load('MiniSat')\n solver.solve()\n return solution",
"def get_sudoku_matrix(self):\n return self.sudoku_matrix",
"def sudoku_solver(m):\n square_sides = int(sqrt(len(m)))\n dicts = initialize_dicts(m, square_sides)\n dicts, square_coords = populate_dicts(m, square_sides, dicts)\n dicts = get_missing(dicts)\n candidates = get_candidates(m, dicts, square_coords)\n m, candidates = scan_sudoku(m, dicts, square_coords, candidates)\n single_candidates = single_candidate(candidates, square_coords, dicts)\n m, candidates = fill_fit(m, dicts, square_coords, single_candidates=single_candidates)\n candidates = get_candidates(m, dicts, square_coords)\n naked_sets_fields_row, naked_sets_fields_cols = find_naked_sets(candidates, dicts, setlength=2)\n candidates, naked_sets = remove_naked_sets_from_candidates(candidates, naked_sets_fields_row, naked_sets_fields_cols)\n candidates = get_candidates(m, dicts, square_coords, naked_sets)\n naked_sets_fields_row, naked_sets_fields_cols = find_naked_sets(candidates, dicts, setlength=3)\n return m",
"def solve(self):\n if not self.solvable:\n print('Suduko not Solvable')\n return False\n res=self.back(0, 0)\n # if self.a[0][0]!=0:\n # res=self.back(0, 1)\n # else:\n # for i in range(1, 10):\n # self.a[0][0]=i\n # res=self.back(0, 1)\n # if res:\n # break\n if res:\n self.check_if_solvable()\n print(\"Sudoku Solved!\")\n print(self.a)\n return self.a\n else: print(\"Not Solvable\")\n return False",
"def make_sudoku(size):\r\n def mutate_list_1(lst, size):\r\n \"\"\"Helper function for removing part of a list from the beginning and add it to the end.\"\"\"\r\n count = 0\r\n while count < size:\r\n elem = lst[0]\r\n lst.remove(elem)\r\n lst.append(elem)\r\n count += 1\r\n return lst\r\n\r\n def mutate_list_2(lst):\r\n \"\"\"Helper function for removing element from the beginning of a list and add it to the end.\"\"\"\r\n elem = lst[0]\r\n lst.remove(elem)\r\n lst.append(elem)\r\n return lst\r\n\r\n count = 0\r\n matrix_length = size ** 2 # define a size of matrix\r\n matrix = [[] * matrix_length] # create an empty matrix\r\n matrix[0] = range(1, matrix_length + 1) # set a first row to a range from 1 to size ** 2\r\n while count < matrix_length - 1:\r\n l = matrix[count][:] # create a new list object that is a copy of previous row in a matrix\r\n if (count + 1) % size == 0: # check if a row in inner square of a matrix\r\n l = matrix[count - (size-1)][:] # if it is, l set to the first row of previous square\r\n matrix.append(mutate_list_2(l))\r\n else:\r\n matrix.append(mutate_list_1(l, size)) # mutate l and add it to the matrix\r\n count += 1\r\n\r\n\r\n return matrix",
"def solveSudoku(self, board: List[List[str]]) -> None:\n def getLocs(board):#初始化,获取需要填充的位置,记录为一个栈\n locs = []\n for row in range(9):\n for col in range(9):\n if board[row][col] == '.':\n locs.append((row, col))\n return locs\n\n def getMaps(board):#定义三个字典,跟踪9行、9列和9块的已填充数字,采用数据结构为defaultdict\n from collections import defaultdict as dd\n rowMap = [dd(int) for _ in range(9)]\n colMap = [dd(int) for _ in range(9)]\n blockMap = [dd(int) for _ in range(9)]\n for row in range(9):\n for col in range(9):\n if board[row][col] != '.':\n num = int(board[row][col])\n rowMap[row][num] += 1\n colMap[col][num] += 1\n bolckIndex = int(row/3)*3+int(col/3)\n blockMap[bolckIndex][num] += 1\n return rowMap, colMap, blockMap\n\n def fillBoard(board, locs):#递归填充剩余的数独空位置\n if not locs:\n return True\n row, col = locs.pop()#弹出一个待填充位置\n bolckIndex = int(row/3)*3+int(col/3)\n found = False\n for num in range(1, 10):\n if found:\n break\n if not rowMap[row][num] and not colMap[col][num] and not blockMap[bolckIndex][num]:\n ##如果当前行、当前列和当前块均不存在该数字,则将数字更新到相应行、列、块,并尝试填充\n rowMap[row][num] = 1\n colMap[col][num] = 1\n blockMap[bolckIndex][num] = 1\n board[row][col] = str(num)\n found = fillBoard(board, locs)#递归到下一层填充\n rowMap[row][num] = 0##状态回溯,将填充的位置清空\n colMap[col][num] = 0\n blockMap[bolckIndex][num] = 0\n if not found:##如果本轮都无法求解,则回溯到初始状态,继续从前面再填充\n locs.append((row, col))\n board[row][col] = '.'\n return found\n\n rowMap, colMap, blockMap = getMaps(board)\n locs = getLocs(board)\n fillBoard(board, locs)",
"def sudoku(puzzle):\n positions = all_pos(puzzle)\n if solve(puzzle, positions, 0):\n return puzzle\n return None",
"def solveSudoku(self, board) -> None:\n # Get size of board\n n = len(board)\n \n # Initialise Hashmaps\n rowMap, colMap, boxMap = {}, {}, {}\n \n # Create set for each index in row, col and box hashmaps\n for i in range(n):\n \n rowMap[i] = set()\n colMap[i] = set()\n boxMap[i] = set()\n\n # Add values to board\n for i in range(n):\n for j in range(n):\n \n # Get value on board\n val = board[i][j]\n valBoxId = self.getBoxId(i,j)\n \n # Insert to respective hashmaps\n if val != \".\":\n rowMap[i].add(val)\n colMap[j].add(val)\n boxMap[valBoxId].add(val)\n \n # Perform backtracking\n self.solveBacktrack(board, rowMap, colMap, boxMap, 0, 0)\n\n return board",
"def solveSudoku(self, board: List[List[str]]) -> None:\n\n def deepCopy(src, tar):\n n = len(src)\n for i in range(n):\n for j in range(n):\n tar[i][j] = src[i][j]\n\n def getNums(board, x, y):\n used_nums_x = []\n used_nums_y = []\n used_nums_square = []\n for i in range(n):\n if board[i][y] != '.':\n used_nums_y.append(board[i][y])\n for j in range(n):\n if board[x][j] != '.':\n used_nums_x.append(board[x][j])\n\n x1 = (x // 3) * 3\n x2 = ((x // 3) + 1) * 3 - 1\n y1 = (y // 3) * 3\n y2 = ((y // 3) + 1) * 3 - 1\n\n for i in range(x1, x2 + 1):\n for j in range(y1, y2 + 1):\n if board[i][j] != '.':\n used_nums_square.append(board[i][j])\n\n used_nums = set(used_nums_x + used_nums_y + used_nums_square)\n nums = set([str(i) for i in range(1, 10)]) - used_nums\n return nums\n\n def helper(board, points, result):\n n = len(board)\n if len(points) == 0:\n deepCopy(board, result)\n return\n\n x, y = points[-1]\n nums = getNums(board, x, y)\n for num in nums:\n board[x][y] = num\n points.pop()\n helper(board, points, result)\n points.append((x, y))\n board[x][y] = '.'\n\n n = len(board)\n points = [(i, j) for i in range(n) for j in range(n) if board[i][j] == '.']\n result = [['0'] * n for _ in range(n)]\n helper(board, points, result)\n deepCopy(result, board)",
"def sudoku(puzzle):\n search_manager = SearchManager(DepthFirstStateStream(SudokoState(puzzle)))\n return search_manager.resolution()",
"def solveSudoku(self, board: List[List[str]]) -> None:\n # initialize the hashmaps\n for row in range(self.size):\n for col in range(self.size):\n value = board[row][col]\n if value != '.':\n self.rows[row].add(value)\n self.cols[col].add(value)\n self.cells[self.cell_idx(row, col)].add(value)\n \n # start backtracking at the first field\n self.backtrack(board, 0)\n return board",
"def sudoku_solver(board):\n row, col= find_empty(board)\n if row == -1 and col == -1:\n return True\n for i in range(1, 10):\n if valid(board, row, col, i):\n board[row][col] = i\n if sudoku_solver(board):\n return True\n board[row][col] = 0\n return False",
"def solve_with_bruteforce(grid):\n\n res = check_sudoku(grid)\n if res is None or res is False:\n return res\n \n for row in range(0, 9):\n for col in range(0, 9):\n if grid[row][col] == 0:\n for n in range(1,10):\n grid[row][col] = n\n solution = solve_with_bruteforce(grid)\n if solution is False:\n grid[row][col] = 0\n else:\n return solution\n return False\n return grid",
"def solveSudoku(self, board: List[List[str]]) -> None:\n def dfs(idx):\n if idx == len(blankIdx):\n return True\n else:\n i, j = blankIdx[idx]\n for num in rg:\n num += 1\n if (num not in rows[i] and\n num not in cols[j] and\n num not in boxs[i//3][j//3]):\n board[i][j]=str(num)\n rows[i].add(num)\n cols[j].add(num)\n boxs[i//3][j//3].add(num)\n if dfs(idx+1):\n return True\n board[i][j] = blank\n rows[i].remove(num)\n cols[j].remove(num)\n boxs[i//3][j//3].remove(num)\n \n rg,blank = range(9), \".\"\n rows = [set() for _ in rg]\n cols = [set() for _ in rg]\n boxs = [[set() for _ in range(3)] for j in range(3)]\n blankIdx = list()\n for i in rg:\n for j in rg:\n if board[i][j]!=blank:\n ele = int(board[i][j])\n rows[i].add(ele)\n cols[j].add(ele)\n boxs[i//3][j//3].add(ele)\n else:\n blankIdx.append((i,j))\n dfs(0)",
"def create_sudoku(self)->list:\n grid = [[None for x in range(9)] for row in range(9)]\n for row in range(0,9):\n for column in range(0,9):\n if row <= 2 and column <=2:\n grid[row][column] = cell.Cell(0)\n elif row <= 2 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(1)\n elif row <= 2 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(2)\n elif 3 <= row <= 5 and column <= 2:\n grid[row][column] = cell.Cell(3)\n elif 3 <= row <= 5 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(4)\n elif 3 <= row <= 5 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(5)\n elif 6 <= row <= 8 and column <= 2:\n grid[row][column] = cell.Cell(6)\n elif 6 <= row <= 8 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(7)\n elif 6 <= row <= 8 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(8)\n return grid",
"def solveSudoku(board):\n # represents all numbers in a specific row, col, box\n # format: if (5,9) is in rows, that means row 5 contains digit 9\n\t\t# format: if (3, 2) is in cols, that means col 3 contains digit 2\n\t\t# format: if (0,2,8) is in boxes, that means box (0,2) contains 8\n\t\t# cellsToFill is a stack that holds all the (i,j) cells we need to fill\n rows, cols, boxes = set(), set(), set()\n cellsToFill = []\n m, n = len(board), len(board[0])\n \n def initDataSets():\n for i in range(m):\n for j in range(n):\n char = board[i][j]\n if char == '.':\n cellsToFill.append((i,j))\n else:\n addToDataSets((i, char), (j, char), (i//3, j//3, char))\n\n def addToDataSets(curRow, curCol, curBox):\n rows.add(curRow)\n cols.add(curCol)\n boxes.add(curBox)\n \n def removeFromDataSets(curRow, curCol, curBox):\n rows.remove(curRow)\n cols.remove(curCol)\n boxes.remove(curBox)\n \n def backtrack():\n if not cellsToFill:\n return True\n \n i, j = cellsToFill.pop()\n for char in '123456789':\n # check if the number is already in a row/col/box, if it is then skip to the next number\n curRow, curCol, curBox = (i, char), (j, char), (i//3, j//3, char)\n if curRow in rows or curCol in cols or curBox in boxes: continue\n \n # if not, add the number to the row/col/box\n addToDataSets(curRow, curCol, curBox)\n board[i][j] = char\n \n # start the recursive call for inserting the next number\n if (backtrack()):\n return True\n \n # backtrack wasn't successful, remove the number from the row/col/box\n removeFromDataSets(curRow, curCol, curBox)\n board[i][j] = '.'\n \n cellsToFill.append((i,j))\n return False\n \n initDataSets()\n print(board)\n backtrack()",
"def solve(self) -> None:\n sudoku = Sudoku(self.get_data())\n solver = SudokuSolver(sudoku)\n validation = solver.validate_sudoku()\n if validation == 1:\n solver.main_sequence()\n self.get_result(solver)\n elif validation == -1:\n self.status_bar.config(text='This sudoku array contains invalid digits.', fg='red')\n return None",
"def solveSudoku(self, board: 'List[List[str]]') -> 'None':\n\n select = '.'\n row_set = []\n col_set = []\n arr_set = []\n\n for row in range(9):\n for col in range(9):\n if col == 0:\n row_set.append(set('123456789'))\n if row == 0:\n col_set.append(set('123456789'))\n if row % 3 == 0 and col % 3 == 0:\n arr_set.append(set('123456789'))\n\n if board[row][col].isdigit():\n row_set[row].remove(board[row][col])\n col_set[col].remove(board[row][col])\n arr_index = (row - row % 3) + col // 3\n arr_set[arr_index].remove(board[row][col])",
"def solveSudoku(self, board: List[List[str]]) -> None:\n self.board = board\n self.boardx = self.init_board(board)\n self.num_dict_origin = {'1':1, '2':1, '3':1, '4':1, '5':1, '6':1, '7':1, '8':1, '9':1}\n \n row, col = -1, -1\n while row < 9-1:\n row += 1\n col = -1\n while col < 9-1:\n col += 1\n if self.boardx[row][col][-1] == False: # 跳过给定值的位置\n continue\n if self.add_tree(row, col): # 如果可以继续分支,则继续\n continue\n else: # 否则,返回分叉点,剪枝\n result = self.back_fork(row, col)\n if result is None: # 无解情况\n print('无解')\n return\n else: # 返回分叉点\n row, col = result\n self.boardx[row][col].pop(0)\n \n self.fill_board() # 填充棋盘",
"def solveSudoku(self, board: List[List[str]]) -> None:\n n19 = set(list('123456789'))\n conn = defaultdict(set)\n center = [(i,j) for i in {1,4,7} for j in {1,4,7}]\n def get_conn(i,j):\n for x in range(0, 9):\n conn[(i,j)].add((x,j))\n conn[(i,j)].add((i,x))\n for ci, cj in center:\n if abs(i-ci)<=1 and abs(j-cj)<=1:\n for ii in range(-1,2):\n for jj in range(-1,2):\n ni, nj = ci + ii, cj + jj\n conn[(i,j)].add((ni, nj))\n break\n conn[(i,j)].discard((i,j))\n\n\n for i in range(9):\n for j in range(9):\n get_conn(i,j)\n\n def get_avail(i, j):\n choices = set(n19)\n for ni, nj in conn[(i,j)]:\n choices.discard(board[ni][nj])\n return choices\n\n to_fill = set()\n for i, row in enumerate(board):\n for j, v in enumerate(row):\n if v == '.':\n to_fill.add((i,j))\n\n def solve():\n if not to_fill:\n return True\n min_avail = n19\n ci, cj = None, None\n for i, j in to_fill:\n val = get_avail(i,j)\n if not val:\n return False\n if len(val) < len(min_avail):\n min_avail = val\n ci, cj = i, j\n to_fill.discard((ci, cj))\n for x in min_avail:\n board[ci][cj] = x\n if solve():\n return True\n board[ci][cj] = '.'\n to_fill.add((ci, cj))\n return False\n print(solve())",
"def solveSudoku(self, board: List[List[str]]) -> None:\n row = collections.defaultdict(set)\n col = collections.defaultdict(set)\n block = collections.defaultdict(set)\n pos = []\n\n for i in range(9):\n for j in range(9):\n if board[i][j] != '.':\n row[i].add(board[i][j])\n col[j].add(board[i][j])\n block[i // 3 * 3 + j // 3].add(board[i][j])\n else:\n pos.append((i, j))\n\n self.dfs(board, pos, row, col, block)\n return",
"def solveSudoku(self, board: List[List[str]]) -> None:\n\n def test(row, column, value) -> bool:\n for i in range(9):\n if board[i][column] == str(value):\n return False\n for j in range(9):\n if board[row][j] == str(value):\n return False\n startR = math.floor(row / 3)\n startC = math.floor(column / 3)\n for p in range(3):\n for q in range(3):\n if board[startR * 3 + p][startC * 3 + q] == str(value):\n return False\n return True\n\n i, j = 0, 0\n fixed = [[int] * 2]\n while i < 9:\n j = 0\n while j < 9:\n # 预记录所有预设值\n if board[i][j] != '.' and [i, j] not in fixed:\n fixed.append([i, j])\n # 在预设值位置\n elif [i, j] in fixed:\n None\n # 不在预设位置\n else:\n isFind = False\n # 从0到9进行尝试\n for k in range(1, 10):\n if test(i, j, k):\n board[i][j] = str(k)\n isFind = True\n break\n # 本次没有找到,退回非固定上一列,且上一列数字加一,\n if not isFind:\n while i >= 0:\n if [i, j] not in fixed:\n board[i][j] = \".\"\n # 回退一列\n j -= 1\n # 如果回退至第一列,换上一行继续\n if j < 0:\n i -= 1\n j = 8\n # 回退至起点\n if i < 0:\n break\n\n # 已经尝试该位置所有可能性,或者说明是预设位置\n if board[i][j] == '9' or [i, j] in fixed:\n continue\n # 否则该空位值加一,继续探索\n else:\n start = int(board[i][j]) + 1\n isFindBack = False\n for k in range(start, 10):\n if test(i, j, k):\n board[i][j] = str(k)\n isFindBack = True\n break\n if isFindBack:\n break\n j += 1\n i += 1\n print(board)",
"def solveSudoku(self, board: List[List[str]]) -> None:\n size=len(board)\n sqr_size=int(math.sqrt(size))\n \n def insert_into_board(i,j,element):\n board[i][j]=element\n update_row_and_column(i,j)\n update_sqr(i,j)\n \n #updating columns and rows after interting an element into a cell so the columns can't use it anymore\n #return the list of updated cell by this change so we can update them back inside the recursive back track function\n def update_row_and_column(i,j):\n list_updated=[]\n for k in range(size):\n if type(board[i][k])!=str and board[i][j] in board[i][k] :\n list_updated.append((i,k))\n board[i][k].discard(board[i][j])\n if type(board[k][j])!=str and board[i][j] in board[k][j] :\n list_updated.append((k,j))\n board[k][j].discard(board[i][j])\n return list_updated\n \n #updating columns and rows after interting an element into a cell so the columns can't use it anymore\n #return the list of updated cell by this change so we can update them back inside the recursive back track function\n def update_sqr(i,j):\n list_updated=[]\n sqr_i=sqr_size*int(i/sqr_size)\n sqr_j=sqr_size*int(j/sqr_size)\n for k in range(sqr_size):\n for l in range(sqr_size):\n if type(board[sqr_i+k][sqr_j+l])!=str and board[i][j] in board[sqr_i+k][sqr_j+l]:\n list_updated.append((sqr_i+k,sqr_j+l))\n board[sqr_i+k][sqr_j+l].discard(board[i][j])\n return list_updated\n \n def scan():\n for i in range(size):\n for j in range(size):\n if type(board[i][j])!=str and len(board[i][j])==1:\n insert_into_board(i,j,list(board[i][j])[0])\n \n def check_to_continue():\n for i in range(size):\n for j in range(size):\n if len(board[i][j])==0:\n return False\n return True\n \n def check_is_finished():\n for i in range(size):\n for j in range(size):\n if type(board[i][j])!=str:\n return False\n return True\n \n list_not_filled=[]\n \n def solve_backtrack():\n if check_is_finished():\n return True\n if not check_to_continue():\n return False\n (i,j)=list_not_filled.pop()\n if type(board[i][j])!=str:\n temp=board[i][j]\n for el in temp:\n board[i][j]=el\n index_row_column=update_row_and_column(i,j)\n index_sqr=update_sqr(i,j)\n check=solve_backtrack()\n if check:\n return True\n board[i][j]=temp\n for (o,p) in index_row_column:\n board[o][p].add(el)\n for (o,p) in index_sqr:\n board[o][p].add(el)\n list_not_filled.append((i,j))\n else:\n return solve_backtrack()\n return False\n \n \n #initializing the board ans updating none cells to a list of potential elements\n for i in range(size):\n for j in range(size):\n if board[i][j]=='.':\n board[i][j]=set([str(d) for d in range(1,size+1)])\n \n #updating the rows and columns and smal sqrs for inital elements\n for i in range(size):\n for j in range(size):\n if type(board[i][j])==str:\n update_row_and_column(i,j)\n update_sqr(i,j)\n \n #scaning to solve for simple cases in the start\n #We solve this to reduce the number of iteration in the back track function \n for i in range(size*size):\n scan()\n \n #updating list_not_filled for backtrack\n for i in range(size):\n for j in range(size):\n if type(board[i][j])!=str:\n list_not_filled.append((i,j))\n \n # starting backtrack after initial process\n solve_backtrack()",
"def sudoku_clauses():\n res = []\n # for all cells, ensure that the each cell:\n for i in range(1, 10):\n for j in range(1, 10):\n # denotes (at least) one of the 9 digits (1 clause)\n res.append([v(i, j, d) for d in range(1, 10)])\n # does not denote two different digits at once (36 clauses)\n for d in range(1, 10):\n for dp in range(d + 1, 10):\n res.append([-v(i, j, d), -v(i, j, dp)])\n\n def valid(cells):\n # Append 324 clauses, corresponding to 9 cells, to the result.\n # The 9 cells are represented by a list tuples. The new clauses\n # ensure that the cells contain distinct values.\n for i, xi in enumerate(cells):\n for j, xj in enumerate(cells):\n if i < j:\n for d in range(1, 10):\n res.append([-v(xi[0], xi[1], d), -v(xj[0], xj[1], d)])\n\n # ensure rows and columns have distinct values\n for i in range(1, 10):\n valid([(i, j) for j in range(1, 10)])\n valid([(j, i) for j in range(1, 10)])\n # ensure 3x3 sub-grids \"regions\" have distinct values\n for i in 1, 4, 7:\n for j in 1, 4 ,7:\n valid([(i + k % 3, j + k // 3) for k in range(9)])\n\n assert len(res) == 81 * (1 + 36) + 27 * 324\n return res",
"def solve_sudoku(self, grid_basic_format):\n raise NotImplementedError(\"Solve sudoku method not implemented in Base Class\")",
"def sudoku_solver(filename):\n with open(filename, \"r\") as f:\n lines = f.read().splitlines()\n\n # format grid\n grid = []\n for line in lines:\n row = []\n for char in line.split(\" \"):\n row += [char if char == \"x\" else int(char)]\n grid.append(row)\n\n solution, flag = solve(grid)\n if flag:\n # display solution\n for row in solution:\n print(\" \" + str(row))\n else:\n print(\"Unsolvable\")",
"def parse_sudokus():\n # Open the url with the sudokus for the challenge\n data = urllib2.urlopen('https://projecteuler.net/project/resources/p096_sudoku.txt')\n sudokus = [] # List to hold all sudokus\n current_sudoku = None # Current sudoku we are building\n current_sudoku_row = 0 # Current line of the current sudoku we are building\n for line in data:\n # Check if the line is the start of a new sudoku\n result = re.match(r'(Grid \\d\\d)', line.strip())\n if not result is None:\n # New sudoku\n current_sudoku = np.zeros((9,9), dtype=np.int8)\n current_sudoku_row = 0\n # store the new sudoku\n sudokus.append(current_sudoku)\n else:\n # Get the numbers\n result = re.match(r'(\\d{9})', line.strip())\n col_string = result.groups()[0]\n # Fill up sudoku\n for col in xrange(0, 9):\n current_sudoku[current_sudoku_row, col] = int(col_string[col])\n current_sudoku_row += 1\n return sudokus",
"def solveSudoku(self, board):\n self.back_track(board)\n print(board)",
"def solveSudoku(self, board: List[List[str]]) -> None:\n self.helper(board, 0, 0)",
"def solve_puzzle(grid):\n solutions = []\n if not grid.valid():\n return solutions\n # Backtracking, iterating over (first) smallest list of candidates for empty vertices\n candidates = grid.candidate_map()\n min_number_of_candidates = min([9] + [len(candidates[ln][rw]) for ln in range(9) for rw in range(9) if grid.grid[ln][rw] is None])\n for (line, row) in [(ln, rw) for ln in range(9) for rw in range(9)]:\n if grid.grid[line][row] is None and len(candidates[line][row]) == min_number_of_candidates:\n for guess in candidates[line][row]:\n grid.grid[line][row] = guess\n for solution in solve_puzzle(grid):\n solutions.append(solution)\n grid.grid[line][row] = None\n break\n else:\n solutions.append(Sudoku(grid.__str__()))\n return solutions"
]
| [
"0.77757585",
"0.77223897",
"0.7591495",
"0.70577496",
"0.7051063",
"0.69597095",
"0.6938633",
"0.6924226",
"0.68827355",
"0.6863994",
"0.68432903",
"0.6843008",
"0.68340075",
"0.68323416",
"0.68165493",
"0.67729443",
"0.6767388",
"0.67517245",
"0.66757387",
"0.6663423",
"0.66260785",
"0.6589537",
"0.6569112",
"0.6566724",
"0.65546745",
"0.6553933",
"0.6549203",
"0.65346605",
"0.65259916",
"0.6525783"
]
| 0.7767136 | 1 |
Method that compare if two SudokuMatrix object are equals | def __eq__(self, other_sudoku_matrix):
equals = False
for row in range(9):
for col in range(9):
if int(self.get_cell(row, col).get_cell_value()) == int(
other_sudoku_matrix.get_cell(row, col).get_cell_value()):
equals = True
else:
return False
return equals | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __eq__(self, other):\n if not issubclass(type(other), Matrix):\n return False\n\n if self.rows != other.rows or self.columns != other.columns:\n return False\n\n return self.data == other.data",
"def __eq__(self, other):\r\n return self.id_map == other.id_map and self.matrix == other.matrix\\\r\n and self.size == other.size",
"def __eq__(self, other):\n if not isinstance(other, Matrix):\n return NotImplemented\n\n if self.num_cols != other.num_cols:\n return False\n\n if self.num_rows != other.num_rows:\n return False\n\n for i in xrange(self.num_rows):\n for j in xrange(self.num_cols):\n if self[i, j] != other[i, j]:\n return False\n\n return True",
"def __eq__(self, other):\n for row in range( self.n ):\n if self.board[row] != other.board[row]:\n return False\n return True",
"def __eq__(self, other):\n if isinstance(other, Matrix):\n return self.data == other.data\n else:\n raise TypeError('other is not a Matrix')",
"def __eq__(self, other):\n for i in range(len(self.puzzle)):\n for j in range(len(self.puzzle[0])):\n if(self.puzzle[i][j] != other.puzzle[i][j]):\n return False\n return True",
"def all_equal(self, mtrx):\n if not isinstance(mtrx, Matrix):\n return False\n if not (self.m == mtrx.m and self.n == mtrx.n):\n return False\n if not type(self) == type(mtrx):\n return False\n for i in range(self.m):\n for j in range(self.n):\n if self[i, j] != mtrx[i, j]:\n return False\n return True",
"def __eq__(self, other):\n return type(self) == type(other) and \\\n self.row == other.row and \\\n self.column == other.column",
"def similar(self, other):\r\n if self.rows == other.rows and self.columns == other.columns:\r\n return True\r\n else:\r\n return False",
"def IsEqual(*args, **kwargs):\n return _gdi_.GraphicsMatrix_IsEqual(*args, **kwargs)",
"def __eq__(self, other):\n return (type(self) == type(other) and\n self.n == other.n and self.m == other.m and\n self.from_grid == other.from_grid and\n self.to_grid == other.to_grid)",
"def is_identical(self, other):\n return (self.compounddatatype == other.compounddatatype and\n self.min_row == other.min_row and\n self.max_row == other.max_row)",
"def __eq__(self, other) -> None:\n\t\tfor k, v in enumerate(self.board):\n\t\t\tif v != other.board[k]:\n\t\t\t\treturn False\n\t\treturn True",
"def __eq__(self, other) -> bool:\r\n if isinstance(other, Square):\r\n if (self.board, self.file, self.rank) == (\r\n other.board, other.file, other.rank):\r\n return True\r\n \r\n return False",
"def __eq__(self, other):\n if self.rows != other.rows or self.cols != other.cols:\n return False\n for i in range(self.rows):\n for j in range(self.cols):\n # Need isclose (Python >= 3.5) for float precision\n if not math.isclose(self[i, j], other[i, j]):\n return False\n return True",
"def __eq__(self, other):\n return (type(self) == type(other) and\n self.from_grid == other.from_grid and\n self.to_grid == other.to_grid and\n self.m == other.m and\n self.n == other.n)",
"def __eq__(self, other):\n h1 = [item for row in self.arr for item in row]\n h2 = [item for row in other.arr for item in row]\n for i in range(self.board_size * self.board_size):\n if h1[i] != h2[i]:\n return False\n return True",
"def __eq__(self, other):\n # Check the number of rows and columns\n if self.getNumRows() != other.getNumRows() or\\\n self.getNumCols() != other.getNumCols():\n return False\n\n # Check items\n for i in range(1, self.getNumRows() + 1):\n for j in range(1, self.getNumCols() + 1):\n if self.getItem(i, j) != other.getItem(i, j):\n return False\n\n return True",
"def equal_size(self, other):\n if not isinstance(other, Matrix):\n raise ValueError(\"Can only compare two matrices\")\n return other.m == self.m and other.n == self.n",
"def is_equal(self, a, b):\n return a.X[0] == b.X[0]",
"def _equal_values(self, val1, val2):\n if self._is_supported_matrix(val1):\n if self._is_supported_matrix(val2):\n\n serial_string1 = self._serialize_matrix(val1)\n serial_string2 = self._serialize_matrix(val2)\n\n return serial_string1 == serial_string2\n else:\n return False\n else:\n return super(SparseParameter, self)._equal_values(val1, val2)",
"def __eq__(self, other):\n rows_same = self.row_start == other.row_start and self.row_end == other.row_end\n cols_same = self.col_start == other.col_start and self.col_end == other.col_end\n\n return rows_same and cols_same",
"def __eq__(self, other):\n return np.all(self.grid == other.grid) and np.all(self.pos == other.pos)",
"def __eq__(self, other):\n return other and self.cells == other.cells",
"def board_equals(board, newboard):\n return (newboard == board).all()",
"def board_equals(board, newboard):\n return (newboard == board).all()",
"def board_equals(board, newboard):\n return (newboard == board).all()",
"def __eq__(self, matrix: 'MatrixBoolean') -> bool:\n\t\tif not isinstance(matrix, MatrixBoolean) or \\\n\t\t\tself.dimM != matrix.dimM or self.dimN != matrix.dimN or self.matrix != matrix.matrix:\n\t\t\treturn False\n\t\treturn True",
"def __eq__(self, other):\n try:\n return self.row == other.row and self.col == other.col\n except AttributeError: # Can also take a tuple (row, col)\n return self.row == other[0] and self.col == other[1]",
"def __eq__(self,rkm):\n K1=np.vstack([self.A,self.b])\n K2=np.vstack([rkm.A,rkm.b])\n if K1.shape!=K2.shape:\n return False\n else:\n return (np.vstack([self.A,self.b])==np.vstack([rkm.A,rkm.b])).all()"
]
| [
"0.76117563",
"0.74743104",
"0.7445452",
"0.74154025",
"0.73595923",
"0.7285003",
"0.72529274",
"0.71656305",
"0.71012837",
"0.7078154",
"0.7041093",
"0.7036643",
"0.7032445",
"0.70261645",
"0.697155",
"0.6970848",
"0.69523555",
"0.69491196",
"0.693337",
"0.6912804",
"0.68703324",
"0.6866897",
"0.68548524",
"0.68491393",
"0.68304884",
"0.68304884",
"0.68304884",
"0.6825131",
"0.68040884",
"0.6799828"
]
| 0.8493141 | 0 |
Create HYPER_FILE from SCHEMA_FILE and load with metadata query results | def create_extract():
with open(SCHEMA_FILE, "r") as f:
SCHEMA = yaml.safe_load(f)
with open(TOKEN_FILE, "r") as f:
TOKEN = yaml.safe_load(f)
hc = HyperCreator(SCHEMA, HYPER_FILE)
ts = Tableau(TOKEN["server"], TOKEN["site"], TOKEN["name"], TOKEN["value"])
for table in SCHEMA["tables"]:
with open(f"{CONTENT_MANAGEMENT}/{table['query']}", "r") as f:
query = f.read()
data = ts.query_metadata(query)
data_map = getattr(GraphQL, table["name"])(data)
hc.populate_extract(table["name"], data_map) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _create_schema(self, cypher_file):\n if len(self.graph.nodes) > 0:\n msg = \"Cypher file specified but the graph is not empty. Aborting.\"\n raise ValueError(msg)\n cyp = open(cypher_file, 'r').read()\n self.graph.run(cyp)",
"def schema_load(filename):\n print(uc.schema_load(filename))",
"def import_schemas_from_file():\n with open('./tblSchemas') as schemas_file:\n schemas = {}\n for line in schemas_file:\n line = line.split()\n if len(line) == 0: continue\n if line[0] == 'tblname':\n tbl_name = line[1]\n schemas[tbl_name] = []\n else:\n schemas[tbl_name].append(line)\n return schemas",
"def _generate_schema_from_datafiles(datasets):\n\n schema = {}\n for dataset in datasets:\n schema.update(dataset.native_schema)\n\n return schema",
"def create_tabular_resources(self, file, skip_lines, encoding):\n engine = Engine()\n self.encoding = encoding\n engine.encoding = encoding\n table_val = Table(str(file), header_rows=skip_lines)\n table = engine.auto_create_table(table_val, filename=file, make=False)\n clean_table = table.__dict__\n resource_dict = {}\n path_to_table = os.path.basename(clean_table[\"name\"])\n print(\"Processing... {file_name}\".format(file_name=path_to_table))\n r_name = os.path.splitext(path_to_table)[0].lower()\n resource_dict[\"name\"] = clean_table_name(r_name)\n resource_dict[\"path\"] = path_to_table\n resource_dict[\"schema\"] = {}\n resource_dict[\"dialect\"] = {\"delimiter\": \",\"}\n resource_dict[\"schema\"][\"fields\"] = []\n for cname, ctuple in clean_table[\"columns\"]:\n if len(ctuple) >= 2:\n if ctuple[0] == \"char\":\n # char sizes need quotes\n char_size = \"{a}\".format(a=ctuple[1])\n resource_dict[\"schema\"][\"fields\"].append({\n \"name\": cname,\n \"type\": ctuple[0],\n \"size\": char_size\n })\n else:\n resource_dict[\"schema\"][\"fields\"].append({\n \"name\": cname,\n \"type\": ctuple[0],\n \"size\": ctuple[1]\n })\n else:\n resource_dict[\"schema\"][\"fields\"].append({\n \"name\": cname,\n \"type\": ctuple[0]\n })\n resource_dict[\"url\"] = \"fill\"\n return resource_dict",
"def __json_schema_generator(file):\n try:\n data = json.load(file)\n metadata_set = set()\n try:\n for datum in data['meta']['view']['columns']:\n metadata_set.add(datum['name'])\n except Exception as e:\n metadata_set.clear()\n for datum in data:\n if isinstance(datum, str):\n metadata_set.add(datum)\n else:\n for datum_property in datum:\n metadata_set.add(str(datum_property))\n\n metadata_list = list(metadata_set)\n # assumes list of objects with sparsse data\n # OR\n # for data_property in data[0]:\n # metadata_list.append(data_property)\n # assumes list of objects and that first entry has full list of properties\n\n return SchemaGenerator.__build_schema(metadata_list)\n except Exception as e:\n logging.error('Failed to parse json file into schema: ' + str(e))\n raise FailedCreatingSchemaException(\"Failed to create schema from json file.\")",
"def create_metadata_shell_for_csv(csv_file_path: str) -> str:\n metadata_file = f\"{csv_file_path}-metadata.json\"\n if path.exists(metadata_file):\n raise Exception(f\"Metadata file {metadata_file} already exists.\")\n if not path.exists(csv_file_path):\n raise Exception(f\"CSV file {csv_file_path} does not exist.\")\n\n label = map_file_path_to_label(csv_file_path)\n concept_scheme_uri = generate_concept_scheme_root_uri(label)\n\n # Just inserting basic structure at this point as already exists in standard files. Additional metadata will be\n # added as the script continues to run.\n metadata = {\n \"@context\": \"http://www.w3.org/ns/csvw\",\n \"@id\": concept_scheme_uri,\n \"url\": csv_file_path,\n \"rdfs:label\": label,\n \"dc:title\": label,\n \"tableSchema\": {\n \"columns\": [],\n },\n \"prov:hadDerivation\": {\n \"@id\": concept_scheme_uri,\n \"@type\": [\n \"skos:ConceptScheme\",\n f\"{pmdcat_base_uri}DatasetContents\"\n ]\n }\n }\n\n table_schema: Dict = metadata[\"tableSchema\"]\n columns: List[Dict] = table_schema[\"columns\"]\n\n with open(csv_file_path, newline=\"\") as csv_file:\n reader = csv.reader(csv_file, delimiter=\",\", quotechar=\"\\\"\")\n column_names: List[str] = next(reader)\n\n for column_name in column_names:\n column = generate_schema_for_column(column_name, concept_scheme_uri)\n columns.append(column)\n\n columns.append({\n \"virtual\": True,\n \"propertyUrl\": \"rdf:type\",\n \"valueUrl\": \"skos:Concept\"\n })\n columns.append({\n \"virtual\": True,\n \"propertyUrl\": \"skos:inScheme\",\n \"valueUrl\": concept_scheme_uri\n })\n\n if \"notation\" in [c.lower() for c in column_names]:\n override(table_schema, {\n \"primaryKey\": \"notation\",\n \"aboutUrl\": concept_scheme_uri + \"/{notation}\"\n })\n else:\n print(\"WARNING: could not determine primary key. As a result, `aboutUrl` property is not specified and \" +\n \"so each row will not have a true URI. This is basically required. Manual configuration required.\")\n\n with open(metadata_file, 'w+') as file:\n file.write(json.dumps(metadata, indent=4))\n\n return str(metadata_file)",
"def create_schema(self, schema: str):\n return",
"def load_schema_dataset(self, dataset_raw):\r\n\r\n self._dataset_raw = dataset_raw\r\n return self\r\n # self._parse_schemas_raw()\r\n # print(schemas)\r",
"def generate_vegalite_schema_wrapper(schema_file):\n # TODO: generate simple tests for each wrapper\n with open(schema_file) as f:\n rootschema = json.load(f)\n contents = [HEADER,\n \"from altair.utils.schemapi import SchemaBase, Undefined\",\n LOAD_SCHEMA.format(schemafile='vega-lite-schema.json')]\n contents.append(schema_class('Root', schema=rootschema,\n schemarepr=CodeSnippet('load_schema()')))\n for name in rootschema['definitions']:\n defschema = {'$ref': '#/definitions/' + name}\n defschema_repr = {'$ref': '#/definitions/' + name}\n\n contents.append(schema_class(get_valid_identifier(name),\n schema=defschema, schemarepr=defschema_repr,\n rootschema=rootschema,\n rootschemarepr=CodeSnippet(\"Root._schema\")))\n contents.append('') # end with newline\n return '\\n'.join(contents)",
"def createTable(self):\n ## reading the source file\n\n \n ## building the hive script\n\n ## creating the metastore table by executing the Hive script on the remote machine (SSH)",
"def load_schema_for_modelling():\n filename = \"modelling_schema.csv\"\n folder = os.path.abspath(os.path.dirname(__file__))\n path = os.path.join(folder, filename)\n return pd.read_csv(path).set_index('table_name')",
"def mongoalchemy(schema_file, output=\"-\"):\n schema = read_yaml(schema_file)\n with open_output_stream(output) as f:\n print(Template(models_template).render(schema=schema), f)",
"def create_schema(conn, schemapath):\n with open(schemapath, 'r') as f:\n sql = f.read()\n with conn.cursor() as curs:\n curs.execute(sql)",
"def load_metadata(self):\n self.meta[\"user_tables\"] = pd.read_sql(self.SQL[\"User Tables\"], self.engine)\n self.meta[\"all_tables\"] = pd.read_sql(self.SQL[\"All Tables\"], self.engine)\n self.meta[\"all_databases\"] = pd.read_sql(self.SQL[\"All Databases\"], self.engine)",
"def generate_bq_schema(self, file_name, schema_file_name=None):\n if not schema_file_name:\n schema_file_name = f'{self.directory}/schema_temp.json'\n os.system(f\"generate-schema --keep_nulls < {file_name} > {schema_file_name}\")\n\n schema = open(schema_file_name, 'r').read()\n\n os.remove(schema_file_name)\n\n return json.loads(schema)",
"def main(input_file, output):\n path = pathlib.Path(input_file)\n click.echo(\n click.style(f\"Read a datapackage: \", fg=\"green\")\n + click.style(f\"{path}\", fg=\"green\", bold=True)\n )\n package = datapackage.Package(str(path))\n header = jinja2.Template(TEMPLATE_SQL_HEADER).render(\n now=datetime.datetime.now(), tables=package.resource_names\n )\n output.write(header)\n template = jinja2.Template(TEMPLATE_SQL_CREATE)\n for r in package.resources:\n s = r.schema\n click.echo(\n click.style(f\"Resource \", fg=\"blue\")\n + click.style(f\"{r.name}\", fg=\"blue\", bold=True)\n + click.style(f\" has \", fg=\"blue\")\n + click.style(f\"{len(s.fields)}\", fg=\"blue\", bold=True)\n + click.style(f\" fields\", fg=\"blue\")\n )\n path = None\n if r.local:\n path = r.source\n output.write(\n template.render(\n name=r.name, title=r.descriptor.get(\"title\"), fields=s.fields, path=path\n )\n )\n output.write(\"\\n\")",
"def create(self):\n c = self.cursor()\n byte_schema = pkgutil.get_data(__package__, 'schema.sql')\n c.executescript(byte_schema.decode('utf-8'))\n self.commit()",
"def create_schemas():\n\n # TEXT: the field is indexed, analyzed. By default it is not stored.\n # phrase=False does not allow to search for phrases.\n # sortable=True allows to sort the indexed values\n # ID: the file is indexed, without being analyzed.\n # STORED: the file is saved but not indexed.\n\n pub_schema = Schema(\n pubtype=TEXT(stored=True),\n key=STORED,\n author=TEXT(stored=True),\n title=TEXT(stored=True),\n pages=STORED,\n year=TEXT(stored=True),\n journal=STORED,\n volume=STORED,\n number=STORED,\n url=STORED,\n ee=STORED,\n crossref=ID(stored=True),\n )\n\n ven_schema = Schema(\n pubtype=STORED,\n key=ID(stored=True),\n author=STORED,\n title=TEXT(stored=True),\n journal=STORED,\n publisher=TEXT(stored=True),\n url=STORED,\n ee=STORED,\n year=STORED,\n isbn=STORED,\n )\n\n return pub_schema, ven_schema",
"def __load_handler(self):\n with open(self.path) as file:\n for line in file:\n if line.startswith(\"\"\"# TABLE: \"\"\"):\n self.columndefinition = (line.strip('\\n')\n .replace(\"\"\"# TABLE: \"\"\", ''))\n self.tablename = self.name.replace('.', '_')\n self.tablename = self.tablename.replace('-', '_')\n self.md5_tablename = (hashlib.md5(self.tablename)\n .hexdigest()[:30])\n for columnelement in self.columndefinition.split(','):\n column = columnelement.split(':')[0].strip()\n self.columnnames.append(column)\n\n self.is_mime_handler = True",
"def __init__(self, schema, input_files, output_path):\n self.schema = schema\n self.input_files = input_files\n self.output_path = output_path",
"def _create_schema(self):\n self._conn.executescript(self._db_schema)",
"def load_schema(self, schema_file):\n with open(schema_file) as fp:\n for line in io.lines_in(fp):\n parts = line.strip().split('\\t')\n if len(parts) != 3:\n raise ValueError('invalid type declaration %r' % line.strip())\n self.declare_relation(parts[0], parts[1], parts[2])",
"def init_from_file(filename, verbose=False):\n\n SalesShare._data.clear()\n SalesShare._calibration_data.clear()\n\n if verbose:\n omega_log.logwrite('\\nInitializing database from %s...' % filename)\n\n input_template_name = __name__\n input_template_version = 0.13\n input_template_columns = {'market_class_id', 'start_year', 'annual_vmt',\n 'price_amortization_period', 'share_weight', 'discount_rate',\n 'o_m_costs', 'average_occupancy', 'logit_exponent_mu'\n }\n\n template_errors = validate_template_version_info(filename, input_template_name, input_template_version,\n verbose=verbose)\n\n if not template_errors:\n # read in the data portion of the input file\n df = pd.read_csv(filename, skiprows=1)\n\n template_errors = validate_template_column_names(filename, input_template_columns, df.columns,\n verbose=verbose)\n\n if not template_errors:\n validation_dict = {'market_class_id': omega_globals.options.MarketClass.market_classes}\n\n template_errors += validate_dataframe_columns(df, validation_dict, filename)\n\n if not template_errors:\n SalesShare._data = df.set_index(['market_class_id', 'start_year']).sort_index().to_dict(orient='index')\n\n for mc in df['market_class_id'].unique():\n SalesShare._data[mc] = {'start_year': np.array(df['start_year'].loc[df['market_class_id'] == mc])}\n\n return template_errors",
"def create_schema():\n schema = Schema(idx=ID(stored=True),\n data=STORED,\n body=TEXT(analyzer=StemmingAnalyzer()),\n )\n print(\"schema creation successful\")\n return schema",
"def get_meta(filename):\n with fiona.open(filename) as collection:\n return collection.meta",
"def _create_schema(self, *, remote_operation: bool = False):\n uid = random_string()\n file_path = self.DATADIR.joinpath(f'{uid}.npy')\n m = open_memmap(file_path,\n mode='w+',\n dtype=self.schema_dtype,\n shape=(COLLECTION_SIZE, *self.schema_shape))\n self.wFp[uid] = m\n self.w_uid = uid\n self.hIdx = 0\n\n process_dir = self.REMOTEDIR if remote_operation else self.STAGEDIR\n Path(process_dir, f'{uid}.npy').touch()",
"def __xlsx_schema_generator(file):\n try:\n # Loads the temporary file into a workbook.\n workbook = openpyxl.load_workbook(file)\n\n # Gets the name of all the sheets in the workbook.\n sheet_names = workbook.sheetnames\n \n # The first row on the first sheet is then added into a list.\n metadata_list = list()\n for cell in workbook[sheet_names[0]][1]:\n metadata_list.append(str(cell.value))\n return SchemaGenerator.__build_schema(metadata_list)\n except Exception as e:\n logging.error('Failed to parse xlsx file into schema: ' + str(e))\n raise FailedCreatingSchemaException(\"Failed to create schema from xlsx file.\")",
"def createSchema(schema):\n return \"CREATE SCHEMA \\\"{name}\\\";\\n\".format(name = schema.name)",
"def assemble(metadata_file):\n\n def read(file):\n with open(file) as yaml:\n return load(yaml.read())\n\n def add_name(info):\n info['name'] = slugify(info['title'], separator='_')\n return info\n\n def get_files(filetype):\n filename = metadata_file.replace('metadata', filetype)\n folder = dirname(metadata_file)\n schema_files_pattern = join(folder, filename)\n return glob(schema_files_pattern)\n\n descriptor = add_name(read(metadata_file))\n resources = [add_name(read(file)) for file in get_files('resource')]\n model = get_files('model')\n\n descriptor['resources'] = resources\n if model and len(model) == 1:\n descriptor['model'] = model.pop()\n\n return DataPackage(descriptor)"
]
| [
"0.59176916",
"0.59136146",
"0.5844736",
"0.57159436",
"0.5710448",
"0.56559134",
"0.5615624",
"0.55509704",
"0.55457634",
"0.5543642",
"0.5458917",
"0.54457504",
"0.5437242",
"0.5436896",
"0.5408508",
"0.54038095",
"0.53769493",
"0.53524965",
"0.5347491",
"0.53469646",
"0.5343799",
"0.5342964",
"0.53184795",
"0.5317697",
"0.5293802",
"0.52895045",
"0.527094",
"0.52652377",
"0.5265107",
"0.52624637"
]
| 0.6245237 | 0 |
Replace current extract in TDSX_FILE with HYPER_FILE | def update_datasource():
ds = Datasource(TDSX_FILE)
ds.replace_extract(HYPER_FILE) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def hxlreplace():\n run_script(hxlreplace_main)",
"def replace_extract(self, file_path):\n tmp_file = shutil.copy2(self.path, \"tmpzip\")\n with ZipFile(tmp_file) as src, ZipFile(self.path, \"w\") as dst:\n for src_info in src.infolist():\n _, src_tail = path.split(src_info.filename)\n _, file_tail = path.split(file_path)\n if src_tail == file_tail:\n dst.write(file_path, src_info.filename)\n else:\n with src.open(src_info) as src_file:\n dst.writestr(src_info, src_file.read())\n\n remove(tmp_file)",
"def marker_replace_template(in_file, out_file, old, new):\n replace_file = open(in_file, \"r\")\n template_content = replace_file.read()\n result_content = template_content.replace(old, new)\n replace_file = open(out_file, \"w\")\n replace_file.write(result_content)\n replace_file.close()",
"def change_content(options):\n call_command('''grep -r -l -- '%(patrn)s' . | tr '\\\\n' '\\\\0' | xargs -0 sed -i \"s/%(patrn)s/%(repl)s/g\"''', options)",
"def target_test_file_content():\n return 'initial content'",
"def updateTemplateFile(self, source, placeHolder, value):\n source_file = open(source).read()\n source_file = source_file.replace(placeHolder, value)\n updated_file = open(source, 'w')\n updated_file.write(source_file)\n updated_file.close()",
"def process_tempita(fromfile):\n if not fromfile.endswith('.in'):\n raise ValueError(\"Unexpected extension: %s\" % fromfile)\n\n from_filename = tempita.Template.from_filename\n template = from_filename(fromfile,\n encoding=sys.getdefaultencoding()) \n\n content = template.substitute()\n\n outfile = os.path.splitext(fromfile)[0]\n with open(outfile, 'w') as f:\n f.write(content)",
"def replace_with_file_contents(fname):\n try:\n with open(os.path.expanduser(fname[0])) as source_file:\n result = source_file.read()\n except IOError:\n result = '< %s' % fname[0] # wasn't a file after all\n\n # TODO: IF pyparsing input parser logic gets fixed to support empty file, add support to get from paste buffer\n return result",
"def substitute(var_list, data, template_location, save_location):\r\n with open(template_location) as template_file:\r\n template_text = template_file.read()\r\n\r\n row_number = 1\r\n for row in data:\r\n new_text = template_text\r\n for var, sub in zip(var_list, row):\r\n new_text = new_text.replace(\"<\" + var + \">\", sub)\r\n with open(os.path.join(save_location, f\"{row_number}.txt\"), \"w\") as out_file:\r\n out_file.write(new_text)\r\n row_number += 1",
"def _change_file(file):\n\n with fileinput.FileInput(file, inplace=True, backup='.bak') as f:\n for index, line in enumerate(f):\n if index == 13:\n print(line.replace(line, line[15:]), end='')\n else:\n print(line.replace(line, line), end='')",
"def do_single_file_preprocess(pdf_file):",
"def update_file(filename, items):\n # TODO: Implement something in the templates to denote whether the value\n # being replaced is an XML attribute or a value. Perhaps move to dyanmic\n # XML tree building rather than string replacement.\n should_escape = filename.endswith('addon.xml')\n\n with open(filename, 'r') as inp:\n text = inp.read()\n\n for key, val in items.items():\n if should_escape:\n val = saxutils.quoteattr(val)\n text = text.replace('{%s}' % key, val)\n output = text\n\n with open(filename, 'w') as out:\n out.write(output)",
"def replace(file,original_text,replacement_text):\n with open(file, \"rt\") as fin:\n with open(str(file+\"temp\"), \"wt\") as fout:\n for line in fin:\n fout.write(line.replace(original_text,replacement_text))\n os.rename(str(file+\"temp\"),file)\n return",
"def fix_xfer_syntax(filename):\n\n current_syntax = get_xfer_syntax(filename)\n if (current_syntax == '1.2.840.10008.1.2.1' or \n current_syntax is None or\n current_syntax == '1.2.840.10008.1.2'):\n return (filename, filename)\n else:\n print(current_syntax)\n\n new_filename = tempfile.mktemp(prefix='iffpy')\n\n subprocess.run([\"gdcmconv\",\n \"-w\",\n \"-i\", filename,\n \"-o\", new_filename])\n\n if os.path.exists(new_filename):\n print(f\"Successfully converted file: {new_filename}\")\n return (new_filename, f\"decompressed;{filename}\")\n else:\n print(f\"Looks like this one failed: {new_filename}\")\n return (None, None)",
"def process_tempita(source_name):\n if source_name.endswith(\"pyx.in\"):\n with open(source_name, \"r\", encoding=\"utf-8\") as templated:\n pyx_template = templated.read()\n pyx = Tempita.sub(pyx_template)\n pyx_filename = source_name[:-3]\n with open(pyx_filename, \"w\", encoding=\"utf-8\") as pyx_file:\n pyx_file.write(pyx)\n file_stats = os.stat(source_name)\n try:\n os.utime(\n pyx_filename,\n ns=(file_stats.st_atime_ns, file_stats.st_mtime_ns),\n )\n except AttributeError:\n os.utime(pyx_filename, (file_stats.st_atime, file_stats.st_mtime))\n source_name = pyx_filename\n return source_name",
"def template_replace(template, replace_map, result):\n # Read content of source file.\n with open(template) as fp:\n lines = fp.readlines()\n # Replace placeholders.\n for key, value in list(replace_map.items()):\n for i, line in enumerate(lines):\n # Ignore VHDL comments\n if not line.strip().startswith('--'):\n lines[i] = line.replace(key, value)\n # Write content to destination file.\n with open(result, 'w') as fp:\n fp.write(''.join(lines))",
"def repl_file(self, dir, file, dirkey, filekey, txtkey):\n startloc = os.path.join(self.loc, dir, file)\n newdir = self.dictreplace(dir, dirkey)\n newfile = self.dictreplace(file, filekey)\n enddir = os.path.join(self.loc, newdir)\n endloc = os.path.join(enddir, newfile)\n if not os.path.exists(enddir):\n os.makedirs(enddir)\n if startloc != endloc:\n print(\"Reading \" + startloc)\n print(\"Writing \" + endloc)\n self.replace_all_vals(startloc, endloc, txtkey)",
"def rewrite_xdmf_files(metadata):\n # NT could be done by function.rename('desired name','label') in FEniCS, applied to functions in GeneralProblem\n os.chdir(metadata['dir'])\n for f in os.listdir('.'):\n if f.endswith('xdmf'):\n name = f[5:-5]\n print('Rewriting file: %-40s new vector name:' % f, name)\n os.rename(f, 'temp')\n try:\n reader = open('temp', 'r')\n writer = open(f, 'w')\n for line in reader:\n if re.search(regex, line):\n s = line.split('\\\"')\n newline = line.replace(s[1], name)\n else:\n newline = line\n writer.write(newline)\n reader.close()\n writer.close()\n except IOError:\n print('IOError:', f)\n try:\n os.remove('temp')\n except OSError:\n print('temp file already removed')",
"def substitute_string_in_tstest_file(file_name, replacements):\n lines = []\n infile = codecs.open(file_name, 'r', encoding='utf-16')\n for line in infile:\n for src, target in replacements.iteritems():\n line = line.replace(src, target)\n lines.append(line)\n infile.close()\n\n outfile = codecs.open(file_name, 'w', encoding='utf-16')\n outfile.writelines(lines)\n outfile.close()",
"def fix_page_content(filename, content):\n return JournalStaticPage(filename, content).body",
"def remove_dtd_information(fulltext_file):\n # Remove DTD line\n cleaned_lines = []\n for line in codecs.open(fulltext_file, \"r\", \"utf-8\"):\n line = RE_ARTICLE_DTD.sub('', line)\n cleaned_lines.append(line)\n\n # Overwrite file\n new_file = os.path.splitext(fulltext_file)[0] + \"_cleaned.xml\"\n fulltext_file = codecs.open(new_file, 'w', \"utf-8\")\n fulltext_file.writelines(cleaned_lines)\n fulltext_file.close()\n return new_file",
"def preprocess (self, filecontents):\n\t\treturn filecontents",
"def process(self, zip_processor):\n for filename in os.listdir(zip_processor.temp_dir):\n with open(zip_processor._full_filename(filename)) as file:\n contents = file.read()\n contents = contents.replace(self.search_string, self.replace_string)\n with open(zip_processor._full_filename(filename), \"w\") as file:\n file.write(contents)",
"def set_temp_file(self):\n\n index = self.filename.rfind('/') + 1\n self.temp_filename = self.filename[:index] + \"tmp_\" + self.filename[index:]",
"def sanitizeXML(filename):\n #we have to remove all illegal characters from crossref xml\n full_path = os.path.abspath(filename)\n path, filename = os.path.split(full_path)\n with open(full_path, 'r') as in_file:\n with open(os.path.join(path,\"tmp\"+filename), 'w') as out_file:\n for line in in_file:\n out_file.write(line.replace(r'&', r'&'))\n os.remove(full_path)\n os.rename(os.path.join(path, \"tmp\"+filename), os.path.join(path, filename))\n \n return full_path",
"def test_replacement1(engine_contents, engine_locations):\n file_name = 'Triangle.java.xml'\n new_contents = copy.deepcopy(engine_contents)\n new_locations = copy.deepcopy(engine_locations)\n target1 = (file_name, 'expr_stmt', 3)\n assert not XmlEngine.do_replace(engine_contents, engine_locations, new_contents, new_locations, target1, target1)",
"def __get_packed_xwalk_app_template(self, dest_dir):\n input_file = urllib2.urlopen(self.updated_url)\n contents = input_file.read()\n input_file.close()\n file_path = os.path.join(dest_dir, self.file_name)\n if os.path.isfile(file_path):\n os.remove(file_path)\n file_dir = dest_dir + '/' + self.file_name.split('.tar.gz')[0]\n if os.path.exists(file_dir):\n shutil.rmtree(file_dir)\n output_file = open(file_path, 'w')\n output_file.write(contents)\n output_file.close()",
"def update_page(temp, fileDict, fileName, index=False):\r\n temp.seek(0)\r\n soup = BeautifulSoup(temp.read())\r\n update_file_urls(soup, fileDict, index)\r\n update_css_urls(soup, fileDict, index)\r\n update_image_urls(soup, fileDict, index)\r\n update_page_urls(soup, fileDict, index)\r\n strip_script(soup)\r\n write_page(soup, fileName)",
"def originalIncrementalUpdate(self, pdffilename):\n original = ReadBinaryFile(pdffilename)\n fPDF = open(self.filename, 'wb')\n if sys.version_info[0] == 2:\n fPDF.write(original)\n else:\n fPDF.write(bytes(original, 'ascii'))\n fPDF.close()\n startxrefs = re.findall(r'startxref\\s+(\\d+)', original)\n if startxrefs == []:\n return None, None, None\n oMatch = re.search(r'trailer\\s+', original[int(startxrefs[-1]):])\n if oMatch == None:\n return None, None, None\n positionDictionaryTrailer = oMatch.end() + int(startxrefs[-1])\n dictionaryTrailer = self.MatchDictionary(original[positionDictionaryTrailer:])\n if dictionaryTrailer == None:\n return None, None, None\n oDictionaryTrailer = cDictionary(dictionaryTrailer)\n idRoot = oDictionaryTrailer.GetID('Root')\n if idRoot == None:\n return None, None, None\n oMatch = re.search(r'\\s+%d\\s+0\\s+obj\\s+' % idRoot, original)\n if oMatch == None:\n return None, None, None\n dictionaryRoot = self.MatchDictionary(original[oMatch.end():])\n if dictionaryRoot == None:\n return None, None, None\n oDictionaryRoot = cDictionary(dictionaryRoot)\n return oDictionaryTrailer, oDictionaryRoot, int(startxrefs[-1])",
"def update_template():\n\n # Open, and read, the template file\n with open(\"template.html\", \"r\") as f:\n soup = BeautifulSoup(f.read(), features=\"html5lib\")\n\n # Add the plots in the correct places\n for div in soup.find_all(\"div\", class_=\"plot\"):\n with open(div[\"src\"], \"r\") as f:\n plot = BeautifulSoup(f.read(), features=\"html5lib\")\n div.replace_with(plot.html.body.div)\n\n # Write the finished report to document.html\n with open(\"document.html\", \"w\") as f:\n f.write(soup.prettify())"
]
| [
"0.56128925",
"0.52839655",
"0.51912695",
"0.51302993",
"0.5060519",
"0.5014787",
"0.4933431",
"0.48355338",
"0.48326963",
"0.48252073",
"0.4821674",
"0.4805997",
"0.4800545",
"0.4786376",
"0.47716513",
"0.47544852",
"0.4748637",
"0.4746096",
"0.47163612",
"0.46886548",
"0.4668823",
"0.46605507",
"0.46514738",
"0.4640115",
"0.46397448",
"0.46322072",
"0.46283427",
"0.46254048",
"0.46174398",
"0.46107575"
]
| 0.6904954 | 0 |
Gets the current desired count of the specified ECS Service. | def get_desired_count(cluster_name, service_name):
response = ecs_client.describe_services(
cluster=cluster_name, services=[service_name],
)
for service in response["services"]:
return service["desiredCount"]
raise Exception(
f"desiredCount not found for cluster: {cluster_name} service: {service_name}"
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def service_count(self) -> str:\n return pulumi.get(self, \"service_count\")",
"def retrieve_num_instances(service):\n instance_counts = service[\"instance-counts\"]\n return instance_counts[\"healthy-instances\"] + instance_counts[\"unhealthy-instances\"]",
"def eventcount(self):\n return self.serviceinstance_set.aggregate(Count('service__category', distinct=True))['service__category__count']",
"def get_new_service_num(route53_zone, service_name):\n\n # Match records belonging to the service for particular service and\n # environment.\n match_regex = \"(?<={})\\d+(?=\\.{}\\.?)\" \\\n .format(service_name, route53_zone.name)\n\n # Initialize with 0 because we want 1-indexing\n service_nums = [0]\n for record in route53_zone.get_records():\n match = re.search(match_regex, record.name)\n if match:\n service_num = int(match.group(0))\n service_nums.append(service_num)\n\n return max(service_nums) + 1",
"def count(cls, client) :\n try :\n obj = nshttpprofile()\n option_ = options()\n option_.count = True\n response = obj.get_resources(client, option_)\n if response :\n return response[0].__dict__['___count']\n return 0\n except Exception as e :\n raise e",
"def count(cls, client) :\n\t\ttry :\n\t\t\tobj = bfdsession()\n\t\t\toption_ = options()\n\t\t\toption_.count = True\n\t\t\tresponse = obj.get_resources(client, option_)\n\t\t\tif response :\n\t\t\t\treturn response[0].__dict__['___count']\n\t\t\treturn 0\n\t\texcept Exception as e :\n\t\t\traise e",
"def __len__(self, context=None):\n if context is not None:\n context = self._repair_context(context)\n uri = self.rest_services[\"size\"]\n payload=dict()\n if context:\n context = context.n3()\n payload[\"context\"] = context\n r = requests.get(uri, params = payload)\n return int(r.text)",
"def service_ranking(self) -> ConfigNodePropertyInteger:\n return self._service_ranking",
"def get_count(self, cf_name, key, start='', finish='', keyspace_name=None):\n return self._Get_Count(\n cf_name=cf_name, key=key, start=start, finish=finish,\n keyspace_name=keyspace_name)",
"def count(self) -> int:\n return pulumi.get(self, \"count\")",
"def count(cls, client) :\n\t\ttry :\n\t\t\tobj = lbprofile()\n\t\t\toption_ = options()\n\t\t\toption_.count = True\n\t\t\tresponse = obj.get_resources(client, option_)\n\t\t\tif response :\n\t\t\t\treturn response[0].__dict__['___count']\n\t\t\treturn 0\n\t\texcept Exception as e :\n\t\t\traise e",
"def count(cls, client) :\n\t\ttry :\n\t\t\tobj = lsntransportprofile()\n\t\t\toption_ = options()\n\t\t\toption_.count = True\n\t\t\tresponse = obj.get_resources(client, option_)\n\t\t\tif response :\n\t\t\t\treturn response[0].__dict__['___count']\n\t\t\treturn 0\n\t\texcept Exception as e :\n\t\t\traise e",
"def count(cls, client) :\n\t\ttry :\n\t\t\tobj = appfwlearningsettings()\n\t\t\toption_ = options()\n\t\t\toption_.count = True\n\t\t\tresponse = obj.get_resources(client, option_)\n\t\t\tif response :\n\t\t\t\treturn response[0].__dict__['___count']\n\t\t\treturn 0\n\t\texcept Exception as e :\n\t\t\traise e",
"def count(options=None):\n if options is None:\n return requests.get(\"/count\")\n else:\n return requests.get(\"/count\", options)",
"def get_service_status(self, service_params={}):\n return {}",
"def service(self):\n payload = {\n 'time_zone': self.timezone,\n 'query': self._service_key,\n 'include[]': 'escalation_policies'\n }\n r = self._get_url(payload, 'services')\n return r['services'][0]",
"def GetCount(self):\n return self._server.get_count()",
"def GetCount(name):\n counter = StrongCounter.get_or_insert(key_name=name)\n return counter.count",
"def get_entity_contracts_count():\n url = 'http://www.base.gov.pt/base2/rest/contratos?adjudicatariaid=%d' \\\n '&sort(-id)' % entity.base_id\n\n response = requests.get(url, headers={'Range': 'items=0-24'})\n\n results_range = response.headers['content-range']\n _, count = results_range.split('/')\n\n return int(count)",
"def service_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"service_id\")",
"def service_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"service_id\")",
"def count(self):\n return self.get_count()",
"def get_count(self, asset=None):\n if asset is None or 'pc:count' not in asset.properties:\n return self.item.properties.get('pc:count')\n else:\n return asset.properties.get('pc:count')",
"def get_count(self):\n return self.count",
"def get_count(self):\n return self.count",
"def cfCount(self, key, item):\n params = [key, item]\n\n return self.execute_command(self.CF_COUNT, *params)",
"def service_id(self) -> str:\n return pulumi.get(self, \"service_id\")",
"def get_count(cls):\n total = 0\n for counter in SimpleCounterShard.objects.all():\n total += counter.count\n return total",
"def get_count(self):\r\n return self.count",
"def _get_service_version(service):\n\n return int(service.split(':')[4])"
]
| [
"0.74331915",
"0.6553271",
"0.6328588",
"0.5801972",
"0.57700175",
"0.5674347",
"0.5674174",
"0.5630112",
"0.5587021",
"0.553864",
"0.55281293",
"0.54909825",
"0.5474824",
"0.5431807",
"0.54288244",
"0.5426716",
"0.54190874",
"0.54064065",
"0.5393235",
"0.5391078",
"0.5391078",
"0.53768516",
"0.5374896",
"0.53696436",
"0.53696436",
"0.5356902",
"0.5353227",
"0.53519464",
"0.5347084",
"0.53460646"
]
| 0.78649575 | 0 |
divise les bloc si leur taille est trop grande | def divide_block(env, blc):
t = 0
size = env.size_block
div = np.zeros(blc[-1] / size + blc.shape[0])
print(div.shape)
index = 0
for k in blc:
while k - t > size:
t += size
if k - t > size:
div[index] = t
index += 1
div[index] = k
index += 1
t = k
div = div[np.where(div)]
return (div) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def GetScaleBlocks(width):\n\n rord=numpy.log10(abs(width)/2.0)\n nrord=rord % 1\n\n if nrord < numpy.log10(2):\n spc=0.2*pow(10,numpy.floor(rord))\n smallspc=spc\n bigspc=5*spc\n newspc=[0,smallspc,smallspc*2,smallspc*3,smallspc*4,smallspc*5]\n elif nrord < numpy.log10(5):\n spc=0.5*pow(10,numpy.floor(rord))\n smallspc=spc\n bigspc=5*spc\n newspc=[0,smallspc,smallspc*2,smallspc*3,smallspc*4]\n else:\n spc=pow(10,numpy.floor(rord))\n smallspc=spc\n bigspc=spc*5\n newspc=[0,smallspc,smallspc*2,smallspc*3,smallspc*4,smallspc*5]\n\n if len(newspc) == 5:\n #labels=['0',None,\"%g\" % smallspc*2,None,\"%g\" % (smallspc*4)]\n labels=['0',None,None,None,\"%g\" % (smallspc*4)]\n else:\n labels=['0',None,None,None,None,\"%g\" % (smallspc*5)]\n\n temp_max=newspc[len(newspc)-1]\n start=temp_max\n for temp in numpy.arange(start,width-bigspc/2,bigspc):\n temp_max=temp_max+bigspc\n newspc.append(temp_max)\n labels.append(\"%g\" % temp_max)\n\n #start=temp_max\n #for temp in Numeric.arange(start,width-smallspc/2,smallspc):\n # labels.append(None)\n # temp_max=temp_max+smallspc \n # newspc.append(temp_max) \n\n return (numpy.array(newspc,numpy.float32),labels)",
"def mover_bm_izquierda(self):\n self.nueva_posicion_posible_parte_superior = self.mapa.consultar_casilla_por_movimiento([self.casilla[0] - 1,\n self.casilla[1]],\n [self.vertice_1[0] - self.velocidad,self.vertice_1[1]], \n [self.vertice_1[0] - 5 - 5, self.vertice_1[1]])\n self.nueva_posicion_posible_parte_inferior = self.mapa.consultar_casilla_por_movimiento([self.casilla[0] - 1,\n self.casilla[1] + 1],\n [self.vertice_3[0] - self.velocidad,self.vertice_3[1]],\n [self.vertice_3[0] - 5,self.vertice_3[1]]) \n if self.nueva_posicion_posible_parte_superior[0] != 1 and self.nueva_posicion_posible_parte_inferior[0] != 1:\n self.x -= self.velocidad * (self.x >= 15)\n self.posicion = [self.x,self.posicion[1]]\n self.casilla = [self.casilla[0] - self.nueva_posicion_posible_parte_superior[1] *(self.nueva_posicion_posible_parte_inferior[0] != 1) * (self.nueva_posicion_posible_parte_superior[0] != 1), self.casilla[1]]\n self.redefinir_vertices()",
"def mezclar_bolsa(self):",
"def afficher(self):\n bordRect = (self.pos[0]-5, self.pos[1]-5, self.dim[0]+5, self.dim[1]+5)\n Fond = pygame.draw.rect(self.ecran.surface, self.ecran.couleur, bordRect, 0) # Efface le precedant text\n\n rang = 0\n verif = \"\"\n compteur = 0\n self.lignes = []\n if self.txt == \"\": self.txt = \" \"\n \n while verif != self.txt:\n verif =\"\"\n rang += self.correction(self.txt[rang:], compteur)\n compteur += 1\n for k in self.lignes:\n verif += k.txt\n\n for compteur in range(len(self.lignes)):\n self.lignes[compteur].afficher()\n\n self.dim = (self.dim[0], self.hLigne*(compteur+1)) # +1 -> Boucle for\n \n pygame.display.flip()",
"def reemplaza_tildes(palabra):",
"def getChunks():",
"def line_pack(plateau, num_lig, debut, sens):\n if check_room(plateau,num_lig,debut) == False or (sens != 1 and sens != 0): # si plateau n est pas valide ou sens != 1 et 0\n return \"Erreur !\"\n if sens == 1:\n i = debut # tasser a partie de i\n while i <3:\n set_value(plateau, num_lig, i, get_value(plateau, num_lig, i+1)) # remplacer la valeur avec la valeur suivant\n i += 1\n set_value(plateau, num_lig, i, 0)\n else:\n i = debut\n while i > 0:\n set_value(plateau, num_lig, i, get_value(plateau, num_lig, i-1)) # remplacer la valeur avec la valeur precedente\n i -= 1\n set_value(plateau, num_lig, i, 0)",
"def calculatePieces(self):\n pass;",
"def ubicar_fragata():\n tamano = Fragata.tamano #se importa el tamano del barco desde su clase\n cantidad = Fragata.cantidad #se importa la cantidad de barcos desde su clase\n orientacion = orientaciones[(randint(0, 1))] #elige aleatoriamente el index de la tupla orientaciones = (\"Vertical\", \"Horizontal\")\n seguir_coordenadas = True\n while seguir_coordenadas:\n mal_ubicado = \"no\"\n if orientacion == \"Vertical\":\n #se eligen random las filas y las columnas\n coor_fila = randint(1, numero_filas)\n coor_columna = randint (1, numero_columnas)\n while (coor_fila + tamano) > 10: #como su orientacion es vertical la fila incial del barco mas su tamano (2) no puede ser mayor que 10 porque se saldria del mapa\n coor_fila = randint(1,numero_filas)\n ubicacion = (coor_fila, coor_columna)\n lista_temporal.append(ubicacion) #lista donde se ubicaran temporalmente las ubicaciones de los barcos\n while len(lista_temporal) < tamano: #sacar las posiciones restantes \n coor_fila += 1\n ubicacion = (coor_fila, coor_columna)\n lista_temporal.append(ubicacion)\n for x in lista_ubicacion_barco:\n for y in lista_temporal:\n if x == y:\n mal_ubicado = \"si\" #si alguna coordenada de este barco coincide con la de otro la variable mal ubicado sera \"si\" \n elif (y[0] == x[0] or (y[0]+1) == x[0] or (y[0]-1) == x[0]) and ((y[1]) == x[1] or (y[1]+1) == x[1] or (y[1]- 1) == x[1]): #validacion para que no se ubique el barco al lado o diagonalmente contiguo a otro\n mal_ubicado = \"si\" #si esta validacion sucede la variable mal ubicado sera \"si\" \n if orientacion == \"Horizontal\":\n #se eligen random las filas y las columnas\n coor_fila = randint(1, numero_filas)\n coor_columna = randint(1, numero_columnas)\n while (coor_columna + tamano) > 10: #como su orientacion es horizontal la columna incial del barco mas su tamano (2) no puede ser mayor que 10 porque se saldria del mapa\n coor_columna = randint(1, numero_columnas)\n ubicacion = (coor_fila, coor_columna)\n lista_temporal.append(ubicacion) #lista donde se ubicaran temporalmente las ubicaciones de los barcos\n while len(lista_temporal) < tamano: #sacar las posiciones restantes \n coor_columna += 1\n ubicacion = (coor_fila, coor_columna)\n lista_temporal.append(ubicacion)\n for x in lista_ubicacion_barco:\n for y in lista_temporal:\n if x == y:\n mal_ubicado = \"si\" #si alguna coordenada de este barco coincide con la de otro la variable mal ubicado sera \"si\" \n elif (y[0] == x[0] or (y[0]+1) == x[0] or (y[0]-1) == x[0]) and ((y[1]) == x[1] or (y[1]+1) == x[1] or (y[1]- 1) == x[1]): #validacion para que no se ubique el barco al lado o diagonalmente contiguo a otro\n mal_ubicado = \"si\" #si esta validacion sucede la variable mal ubicado sera \"si\" \n if mal_ubicado == \"si\": #si la variable mal ubicado es \"si\" se repetira el proceso otra vez\n seguir_coordenadas = True\n lista_temporal.clear()\n elif mal_ubicado == \"no\": #si la variable es \"no\" se continuara el proceso\n for x in lista_temporal:\n lista_ubicacion_barco.append(x) #se agregan las posiciones a la lista general\n coordenadas_fragata.append(x) \n lista_temporal.clear() #se limpia la lista temporal para usarla en el otro barco\n seguir_coordenadas = False",
"def _divide_pattern(self, page_size):\n\n tmp_chart = self.floss_num_chart.copy()\n floss_size = (float(len(tmp_chart[0])), float(len(tmp_chart)))\n chart_size = (int(math.ceil(floss_size[0]/page_size[0])), int(math.ceil(floss_size[1]/page_size[1])))\n num_patterns = chart_size[0] * chart_size[1]\n divided_patterns = []\n\n #TEST CODE\n print(\"chart_size = \", chart_size)\n print(\"num_patterns = \", num_patterns)\n\n while len(tmp_chart):\n divided_rows = tmp_chart[:60]\n while len(divided_rows[0]):\n templist = []\n for row in divided_rows:\n templist.append(row[:60])\n del row[:60] # Note, decrease this number compared to number above to have repeated rows in table break\n divided_patterns.append(templist)\n del tmp_chart[:60]\n\n return divided_patterns",
"def check_removal(self, multiplier=1):\n children = self.ids.grid.children\n groups = []\n points = 0\n\n # Recursively check all children and creates groups with them\n for i, child in enumerate(children):\n if not child.visited:\n groups.append(self.recursive_check(i))\n\n # Reset visit status for the next pass\n for child in children:\n child.visited = False\n\n # Get the groups that contain more than 3 blocks of the same colour, calculate points and let new blocks fall\n high_groups = [x for x in groups if len(x) > 3]\n for g in high_groups:\n # I sort the blocks by reversed id, this helps in the implementation of how blocks fall\n # If this was unsorted, a block might get the colour of the block above that actually should get removed\n g.sort(reverse=True)\n points += multiplier * len(g)\n multiplier += 1\n for button_id in g:\n self.fall(button_id)\n if len(high_groups) > 0:\n return self.check_removal(multiplier) + points\n else:\n return 0",
"def split_necessity(self):\n return max(self._color_var_rel) * self.n_pix\n # return reduce(int.__mul__, (l-u for u,l in self.bounds)) * self.n_pix",
"def get_mvts(self, plateau):\n if self.type == \"p\": #Pion\n if self.color == \"w\":\n diags = [[self.x-1, self.y+1],[self.x+1, self.y+1]] #Mouvements possibles de diagonales\n faces = [[self.x, self.y+1]] #Mouvements possibles de face\n if not self.moved: #Si le pion n'a pas encore bougé de la partie\n faces.append([self.x, self.y+2])\n else:\n diags = [[self.x-1, self.y-1], [self.x+1, self.y-1]]\n faces = [[self.x, self.y-1]] #Mouvements possibles de \n if not self.moved:\n faces.append([self.x, self.y-2])\n pos = [] #Position de déplacement validées\n for d in diags:\n if verif_case(d[0], d[1]): #Si la case est sur le plateau \n pion = plateau.get_pion(d[0],d[1])\n if pion != None and pion.color != self.color: #Si il y a un pion ennemi\n pos.append(d)\n for f in faces: \n if verif_case(f[0],f[1]):\n pion = plateau.get_pion(f[0], f[1])\n if pion == None: #Si il n'y a pas de pion\n pos.append(f)\n return pos\n elif self.type == \"t\": #Tour\n pos = []\n dir = [[1,0],[-1,0],[0,1],[0,-1]] #4 directions possibles\n for d in dir:\n x,y = self.x+d[0],self.y+d[1] #Projection de position\n while verif_case(x,y): #Tant que (x, y) est sur le plateau\n pion = plateau.get_pion(x, y)\n if pion != None: #Si il y a un pion\n if pion.color != self.color: #Si il n'est pas allié\n pos.append([x,y])\n break\n pos.append([x,y])\n x += d[0]\n y += d[1]\n return pos\n elif self.type == \"c\": #Cavalier\n l = [-2,-1,1,2]\n mvts = [[x,y] for x in l for y in l if abs(x)!=abs(y)]\n pos = []\n for m in mvts:\n x = self.x + m[0]\n y = self.y + m[1]\n if verif_case(x,y):\n pion = plateau.get_pion(x, y)\n if pion == None or pion.color != self.color:\n pos.append([x, y])\n return pos\n elif self.type == \"f\": #Fou\n dir = [[1,1],[-1,1],[-1,-1],[1,-1]]\n pos = []\n for d in dir:\n x,y = self.x+d[0],self.y+d[1]\n while verif_case(x,y):\n pion = plateau.get_pion(x, y)\n if pion != None:\n if pion.color != self.color:\n pos.append([x,y])\n break\n pos.append([x,y])\n x += d[0]\n y += d[1]\n return pos\n elif self.type == \"k\": #Roi\n mvts = [[1,0],[-1,1],[0,-1],[-1,-1],[-1,0],[-1,1],[0,1],[1,1]] #4 mouvements possibles\n pos = []\n for m in mvts:\n x = self.x + m[0]\n y = self.y + m[1]\n if verif_case(x, y):\n pion = plateau.get_pion(x, y)\n if pion == None or pion.color != self.color:\n pos.append([self.x + m[0], self.y + m[1]])\n return pos\n elif self.type == \"q\": #Dame\n pos = []\n dir = [[1,0],[1,-1],[0,-1],[-1,-1],[-1,0],[-1,1],[0,1],[1,1]]\n for d in dir:\n x,y = self.x+d[0],self.y+d[1]\n while verif_case(x,y):\n pion = plateau.get_pion(x, y)\n if pion != None:\n if pion.color != joueur:\n pos.append([x,y])\n break\n pos.append([x,y])\n x += d[0]\n y += d[1]\n return pos",
"def bloqueio_de_bifurcacao_4(tab,jog): \r\n if len(bifurcacao_3(tab,-1*jog)) == 1 :\r\n return bifurcacao_3(tab,-1*jog)[0]\r\n else:\r\n for i in range(1,4):\r\n if obter_coluna(tab,i).count(jog)==1:\r\n col = obter_coluna(tab,i)\r\n for j in range(3):\r\n if col[j]==0:\r\n pos1=3*j+i\r\n newtab = marcar_posicao(tab, jog, pos1)\r\n if len(bifurcacao_3(newtab,-1*jog)) == 0:\r\n return pos1\r\n \r\n if obter_linha(tab,i).count(jog)==1:\r\n linha = obter_linha(tab,i)\r\n for j in range(3):\r\n if linha[j]==0:\r\n pos1=j+1+3*(i-1)\r\n newtab = marcar_posicao(tab, jog, pos1)\r\n if len(bifurcacao_3(newtab,-1*jog)) == 0:\r\n return pos1\r\n \r\n if i < 3 and obter_diagonal(tab,i).count(jog)==1:\r\n diagonal = obter_diagonal(tab,i)\r\n for j in range(3):\r\n if i==1:\r\n if diagonal[j]==0:\r\n pos1=4*j+i\r\n newtab = marcar_posicao(tab, jog, pos1)\r\n if len(bifurcacao_3(newtab,-1*jog)) == 0:\r\n return pos1\r\n else:\r\n if diagonal[j]==0:\r\n pos1=7-2*j\r\n newtab = marcar_posicao(tab, jog, pos1)\r\n if len(bifurcacao_3(newtab,-1*jog)) == 0:\r\n return pos1",
"def split(self,i):\n alpha = 0.6\n eps = 2.6\n\n if self.n > self.maxn-3:\n print \"cannot refine any further\"\n return False\n \n # The son \n self.m[i] = self.m[i] / 4.0\n #self.h[i] = self.h[i] * alpha\n\n # Daughter 1\n self.r[self.n] = self.r[i] + eps*np.array([0,1])\n self.m[self.n] = self.m[i] \n self.v[self.n] = self.v[i]\n \n # Daughter 2\n self.r[self.n+1] = self.r[i] + eps*np.array([0.866025,-0.5])\n self.m[self.n+1] = self.m[i] \n self.v[self.n+1] = self.v[i]\n \n # Daughter 3\n self.r[self.n+2] = self.r[i] + eps*np.array([-0.866025,-0.5])\n self.m[self.n+2] = self.m[i] \n self.v[self.n+2] = self.v[i]\n \n self.n = self.n+3\n #print \"There are now \",self.n,\"particles\"\n return True",
"def tick(self, iteration):\n frame = self.cam.get_current_fram()\n hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)\n # on recupaire la zone qui on les zone de couleur\n mask = cv2.inRange(hsv, self.greenLower, self.greenUpper)\n # on fait une erosion et dilation pour supprimer les petit pixel blanc qui peut être pris pour la balle\n mask = cv2.erode(mask, None, iterations=iteration)\n mask = cv2.dilate(mask, None, iterations=iteration)\n\n cv2.imshow(\"mask\", mask)\n\n # Recherche de contour pour savoir ou est la balle\n cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2]\n center = None\n if len(cnts) > 0:\n # on charche le contour le plus grand (celui de la balle)\n c = max(cnts, key=cv2.contourArea)\n ((x, y), radius) = cv2.minEnclosingCircle(c)\n moment_cont = cv2.moments(c)\n # on recupaire le centre de la balle\n center = (int(moment_cont[\"m10\"] / moment_cont[\"m00\"]), int(moment_cont[\"m01\"] / moment_cont[\"m00\"]))\n # on controle la taille du rayon de la balle\n if radius > 30:\n # Puis on dessin la boulle\n cv2.circle(frame, (int(x), int(y)), int(radius), (0, 255, 255), 2)\n cv2.circle(frame, center, 5, (0, 0, 255), -1)\n return center, frame",
"def mover_bm_derecha(self):\n self.nueva_posicion_posible_parte_superior = self.mapa.consultar_casilla_por_movimiento([self.casilla[0] + 1,\n self.casilla[1]],\n [self.vertice_2[0] + self.velocidad ,\n self.vertice_2[1]],\n [self.vertice_1[0] + 5, self.vertice_1[1]])\n self.nueva_posicion_posible_parte_inferior = self.mapa.consultar_casilla_por_movimiento([self.casilla[0] + 1,\n self.casilla[1] + 1],\n [self.vertice_4[0] + self.velocidad,\n self.vertice_4[1]],\n self.vertice_1)\n if self.nueva_posicion_posible_parte_superior[0] != 1 and self.nueva_posicion_posible_parte_inferior[0] != 1:\n self.x += self.velocidad * (self.x <= 655)\n self.posicion = [self.x,self.posicion[1]]\n self.casilla = [self.casilla[0] + self.nueva_posicion_posible_parte_superior[1], self.casilla[1]]\n self.redefinir_vertices()",
"def disaggregate_chunk(self, test_mains):\n raise NotImplementedError()",
"def mover_bm_arriba(self):\n self.nueva_posicion_posible_parte_superior = self.mapa.consultar_casilla_por_movimiento([self.casilla[0],\n self.casilla[1] - 1],\n [self.vertice_1[0] ,self.vertice_1[1] - self.velocidad],\n [self.vertice_1[0], self.vertice_1[1] - 5 -5])\n self.nueva_posicion_posible_parte_inferior = self.mapa.consultar_casilla_por_movimiento([self.casilla[0] + 1,\n self.casilla[1] - 1],\n [self.vertice_2[0] ,self.vertice_2[1] - self.velocidad],\n [self.vertice_1[0],self.vertice_1[1]])\n self.y -= self.velocidad * (self.y >= 15) *(self.nueva_posicion_posible_parte_inferior[0] != 1) * (self.nueva_posicion_posible_parte_superior[0] != 1)\n self.posicion = [self.posicion[0],self.y]\n self.casilla = [self.casilla[0],self.casilla[1] - self.nueva_posicion_posible_parte_superior[1] * (self.nueva_posicion_posible_parte_inferior[0] != 1) * (self.nueva_posicion_posible_parte_superior[0] != 1)]\n self.redefinir_vertices()",
"def homogeneite_type(list_allele, list_hauteur, log):\n log = log + \"Normalisation des données..........................\\n\"\n iteration = 2\n Allele = []\n Hauteur = []\n Allele.append(list_allele[0])\n Allele.append(list_allele[1])\n Hauteur.append(list_hauteur[0])\n Hauteur.append(list_hauteur[1])\n if len(list_allele) > 32:\n iteration = 3\n Allele.append(list_allele[2])\n Hauteur.append(list_hauteur[2])\n for i in range(iteration, len(list_allele)):\n Al = []\n Ht = []\n for j in range(3):\n Al.append(float(list_allele[i][j]))\n Ht.append(float(list_hauteur[i][j]))\n Allele.append(Al)\n Hauteur.append(Ht)\n log = log + \"Normalisation effectuée..............................\\n\"\n return Allele, Hauteur, log",
"def vol_rameaux(x): \r\n return sum([vol_rameau_cat(x, cat) for cat in ['small', 'medium', 'large']])",
"def glow_boundary(bound):\n assert bound < 4\n global layout\n temp = len(layout) - 1\n for i in range(bound, bound + len_square(bound)):\n for j in range(bound, bound + len_square(bound)): # TODO: assign this to a variable\t\n layout[i][j] = 1",
"def cleanup(self):\n self.subpixel, self.pixel = self.stepup(self.subpixel, self.pixel, AxisDistance.pixelsize)\n self.pixel, self.tile = self.stepup(self.pixel, self.tile, AxisDistance.tilesize)",
"def remove_subdivison(self):\n temp_sub_vertices = []\n for index in range(0, len(self.subdivision_list) - 1, 4):\n v0 = Vec3d(0, 0, 0, 0)\n v1 = Vec3d(0, 0, 0, 0)\n v2 = Vec3d(0, 0, 0, 0)\n\n v0.x = self.subdivision_list[index + 1][0].x\n v0.y = self.subdivision_list[index + 1][0].y\n v0.z = self.subdivision_list[index + 1][0].z\n v0.w = self.subdivision_list[index + 1][0].w\n\n v1.x = self.subdivision_list[index + 2][0].x\n v1.y = self.subdivision_list[index + 2][0].y\n v1.z = self.subdivision_list[index + 2][0].z\n v1.w = self.subdivision_list[index + 2][0].w\n\n v2.x = self.subdivision_list[index + 3][0].x\n v2.y = self.subdivision_list[index + 3][0].y\n v2.z = self.subdivision_list[index + 3][0].z\n v2.w = self.subdivision_list[index + 3][0].w\n\n temp_sub_vertices.append([v0, v1, v2])\n\n self.subdivision_list = temp_sub_vertices",
"def affiche_cartes():\n for i in range(nbjoueurs):\n fenetre.blit(dos_carte,(920+i*145,100)) # soit 920,1065,1210,1355 pour 4 joueurs",
"def podziel(self):\n def fraktal(dlugosc, alpha, poziom):\n \"\"\"Metoda wyznaczajaca fraktal.\n\n Metoda ta przyjmuje dlugosc, kat oraz poziom drzewa.\n Na bazie podanych parametrow wylicza fraktal z podanych w zadaniu wzorow.\n Zwraca liste zawierajaca punkX oraz punktY fraktalu.\n \"\"\"\n#obliczanie punktow punktu Abis dla kazdego poziomu galezi\n x = float(self.p2[0] + self.dlugosc * cos(alpha))\n y = float(self.p2[1] + self.dlugosc * sin(alpha))\n return [round(x), round(y)]\n\n#petla przechodzaca po wszystkich poziomach drzewa\n while self.tmp <= self.poziom:\n#obliczanie grubosci, dlugosci galezi oraz kolorowanie jej\n self.grubosc = float((2 * self.grubosc + 1) / 3)\n self.dlugosc = float((2 * self.dlugosc) / 3)\n self.kolor += 6\n\n #sprawdzenie czy kolor nie wyszedl po za skale maksymalnej wartosci\n if self.kolor > 255:\n self.kolor = 255\n\n#rozbicie obliczen na poziom 1 i wyzej\n#Abis jest to punkt prawy dla kazdej galezi\n#B jest to punkt srodkowy dla kazdej galezi\n#C jest to punkt srodkowy dla kazdej galezi\n\n#obliczenia dla pierwszego poziomu\n if self.tmp < 2:\n#obliczenie fraktalu, prawa galaz dla kazdej galezi\n#podstawienie obliczonych wartosci z punktu Abis do pozostalych wedlug podanych wzorow\n Abis = fraktal(self.dlugosc, self.alpha, self.poziom)\n B = [round(self.p2[0]), round(Abis[1])]\n C = [round(-Abis[0] + 2 * self.p2[0]), round(Abis[1])]\n\n#zwiekszenie poziomu drzewa o jeden\n self.tmp += 1\n\n#tutaj nastepuje zwrocenie obiektow typu Branch z nowo obliczonymi wartosciami\n return [Branch(self.p2, Abis, self.dlugosc, self.grubosc, self.kolor, self.alpha, self.tmp),\n Branch(self.p2, B, self.dlugosc, self.grubosc, self.kolor, self.alpha, self.tmp),\n Branch(self.p2, C, self.dlugosc, self.grubosc, self.kolor, self.alpha, self.tmp)]\n#obliczenia poziomow wyzej niz pierwszy\n else:\n#obliczanie kata dla punktu prawego\n self.zetprim = randint(-1, 1) * randint(1, self.s)\n self.beta = self.alpha + self.zetprim\n\n#obliczanie kata dla punktu srodkowego\n self.zetbis = randint(-1, 1) * randint(1, self.s)\n self.gamma = self.alpha + self.zetbis\n\n#obliczanie kata dla punktu lewego\n self.zetter = randint(-1, 1) * randint(1, self.s)\n self.teta = self.alpha + self.zetter\n\n#obliczenie fraktalu, prawa galaz dla kazdej galezi\n#podstawienie obliczonych wartosci z punktu Abis do pozostalych wedlug podanych wzorow\n Abis = fraktal(self.dlugosc, self.beta, self.poziom)\n B = [round(self.p2[0]), round(Abis[1])]\n C = [round(-Abis[0] + 2 * self.p2[0]), round(Abis[1])]\n\n#zwiekszenie poziomu drzewa o jeden\n self.tmp += 1\n\n#tutaj nastepuje zwrocenie obiektow typu Branch z nowo obliczonymi wartosciami\n return [Branch(self.p2, Abis, self.dlugosc, self.grubosc, self.kolor, self.beta, self.tmp),\n Branch(self.p2, B, self.dlugosc, self.grubosc, self.kolor, self.gamma, self.tmp),\n Branch(self.p2, C, self.dlugosc, self.grubosc, self.kolor, self.teta, self.tmp)]",
"def browse_block(t_env, a, blc, ti, div):\n\t### a supprimer ###\n\tb = a.copy()\n\t###################\n\n\tprint('parcours', blc.shape[0])\n\t#parcours des blocs\n\tb_save = 0\n\tfor k in range(blc.shape[0]):\n\t\tprint('entre block', k)\n\t\tl = small_time(t_env.al, k, t_env.size)\n\t\texploration = np.zeros(l.shape[0])\n\t\tbij = small_bij(t_env.big_bij, k, t_env.size)\n\t\tb_save = bij.copy()\n\t\tbeta_ij = small_bij(t_env.big_beta, k, t_env.size)\n\t\tbij_bool = np.zeros(bij.shape, dtype = bool)\n\t\tbol = 0\n\t\tbij2 = bij\n\t\twhile is_explored(exploration, bij_bool):\n\t\t\tb_prec = bij2.copy()\n\t\t\tbol = part_aij(t_env, bij, a, exploration, bij_bool, l, beta_ij, div, k, b, b_save, b_prec)",
"def vertical_core(block,cut,laser):\r\n\r\n\tlayers = int(block[\"thickness\"]/laser[\"z_spacing\"])\r\n\tangle = math.radians(laser[\"kerf_angle\"]/2)\r\n\ttaper = math.tan(angle) * laser[\"z_spacing\"]\r\n\r\n\tu = math.tan(2 * angle) * (block[\"thickness\"] + laser[\"z_final_overshoot\"])\r\n\tz_0 = block[\"thickness\"]*math.cos(angle) + math.sin(angle)*((cut[\"final_dimension_y\"])/2 - block[\"origin_y\"] + u)\r\n\tz_1 = block[\"thickness\"]*math.cos(angle) + math.sin(angle)*((cut[\"final_dimension_x\"])/2 + block[\"origin_x\"] + u)\r\n\tz_2 = block[\"thickness\"]*math.cos(angle) + math.sin(angle)*((cut[\"final_dimension_y\"])/2 + block[\"origin_y\"] + u)\r\n\tz_3 = block[\"thickness\"]*math.cos(angle) + math.sin(angle)*((cut[\"final_dimension_x\"])/2 - block[\"origin_x\"] + u)\r\n\t\r\n\tcutlist = []\r\n\tcutlist.append([\"a_abs\", f\"{math.degrees(angle):.6f}\"])\r\n\tcutlist.append([\"c_abs\", str(block[\"physical_rotation\"])])\r\n\tcutlist.append([\"z_abs\", f\"{z_0:.6f}\"])\r\n\r\n\ty_start_wide = ((u + cut[\"final_dimension_x\"]/2)* math.cos(angle) \r\n\t\t\t\t - block[\"thickness\"]*math.sin(angle) \r\n\t\t\t\t - u/math.cos(angle))\r\n\ty_start_length = ((u + cut[\"final_dimension_y\"]/2)* math.cos(angle) \r\n\t\t\t\t - block[\"thickness\"]*math.sin(angle) \r\n\t\t\t\t - u/math.cos(angle))\r\n\r\n\tdepth_cut = (block[\"thickness\"] + laser[\"z_final_overshoot\"]) * math.cos(angle)/math.cos(2*angle)\r\n\r\n\tcut1 = json.loads(line(block[\"width\"]/2 - block[\"origin_x\"],y_start_length - block[\"origin_y\"],-block[\"width\"]/2 - block[\"origin_x\"],y_start_length - block[\"origin_y\"],depth_cut,laser))\r\n\r\n\tcut2 = json.loads(line(block[\"length\"]/2 + block[\"origin_y\"],y_start_wide - block[\"origin_x\"],-block[\"length\"]/2 + block[\"origin_y\"],y_start_wide - block[\"origin_x\"],depth_cut,laser))\r\n\r\n\tcut3 = json.loads(line(block[\"width\"]/2 + block[\"origin_x\"],y_start_length + block[\"origin_y\"],-block[\"width\"]/2 + block[\"origin_x\"],y_start_length + block[\"origin_y\"],depth_cut,laser))\r\n\r\n\tcut4 = json.loads(line(block[\"length\"]/2 - block[\"origin_y\"],y_start_wide + block[\"origin_x\"],-block[\"length\"]/2 - block[\"origin_y\"],y_start_wide + block[\"origin_x\"],depth_cut,laser))\r\n\r\n\t#cut1 = json.loads(line(block[\"width\"]/2,y_start_length,-block[\"width\"]/2,y_start_length,depth_cut,laser))\r\n\r\n\t#cut2 = json.loads(line(block[\"length\"]/2,y_start_wide,-cut[\"final_dimension_y\"]/2,y_start_wide,depth_cut,laser))\r\n\r\n\t#cut3 = json.loads(line(block[\"width\"]/2,y_start_length,-cut[\"final_dimension_x\"]/2,y_start_length,depth_cut,laser))\r\n\r\n\t#cut4 = json.loads(line(cut[\"final_dimension_y\"]/2,y_start_wide,-cut[\"final_dimension_y\"]/2,y_start_wide,depth_cut,laser))\r\n\r\n\tcutlist = (cutlist + cut1\r\n\t + [[\"c_rel\", \"90\"],[\"z_abs\", f\"{z_1:.6f}\"],] \r\n\t + cut2\r\n\t + [[\"c_rel\", \"90\"],[\"z_abs\", f\"{z_2:.6f}\"]] \r\n\t\t\t\t\t + cut3 \r\n\t\t\t\t\t + [[\"z_abs\", f\"{z_3:.6f}\"],[\"c_rel\", \"90\"]] \r\n\t\t\t\t\t + cut4)\r\n\r\n\tcutlist.insert(0, [\"set_trigger4\", \"1\", \"0\", \"7\", \"8\", \"45\"])\r\n\tcutlist.append([\"stop_trigger\"])\r\n\r\n\treturn json.dumps(cutlist)",
"def bndy_plasma(self):\n self.ne[0], self.ne[-1] = 1e11, 1e11\n self.ni[0], self.ni[-1] = 1e11, 1e11\n self.nn[0], self.nn[-1] = 1e11, 1e11\n self.Te[0], self.Te[-1] = 0.1, 0.1\n self.Ti[0], self.Ti[-1] = 0.01, 0.01\n # self.coll_em[0], self.coll_em[-1] = 1e5, 1e5\n # self.coll_im[0], self.coll_im[-1] = 1e5, 1e5",
"def _area(self):\n self.area = 0.0\n for sail in self.sails:\n self.area += sail.area"
]
| [
"0.5597102",
"0.5440409",
"0.5413998",
"0.5407248",
"0.53982776",
"0.53873897",
"0.53426784",
"0.53319544",
"0.5323098",
"0.52931225",
"0.52336675",
"0.52102226",
"0.52063864",
"0.52051187",
"0.519937",
"0.5191247",
"0.5171056",
"0.51698107",
"0.51631874",
"0.5141693",
"0.5135437",
"0.5119076",
"0.50996387",
"0.5092363",
"0.5092042",
"0.50907695",
"0.50901055",
"0.50676584",
"0.50608945",
"0.5051149"
]
| 0.5479173 | 1 |
word 2 tuple by char_id | def word2tuple(word, char_id):
tup = []
for c in word:
if str(c) in char_id:
tup.append(char_id[c])
else:
tup.append(len(char_id))
return tuple(tup) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def line_2_words(wordid_list, id2word):\n word_list = []\n for word_id in wordid_list:\n word_list.append(id2word[word_id])\n return word_list",
"def create_word(char_list):",
"def word_to_tuple(word):\n # since strings are sequences of letters\n # `sorted` will automatically convert a string\n # to a list, then sort it\n word = tuple(sorted(word))\n return word",
"def extract_letters( idx1, idx2, text ):\r\n return text[idx1] + text[idx2]",
"def get_pairs(self, word: List[str]) -> List[Tuple[str, str]]:\n pairs: List[Tuple[str, str]] = []\n prev_char = word[0]\n for char in word[1:]:\n pairs.append((prev_char, char))\n prev_char = char\n return pairs",
"def get_new_key(key, word):\n return (key[1], word)",
"def get_vocab(shi):\n \n # 构建字与id的相互映射\n id2char = dict(enumerate(set(''.join(shi))))\n char2id = {j:i for i,j in id2char.items()}\n print(f'length {len(id2char)}')\n\n \n return id2char, char2id",
"def zip_letters(xl, yl, dxl, dyl, rl, word):\n return (\n ([pl.pop(0) if pl else None for pl in (xl, yl, dxl, dyl, rl)], char)\n for char in word)",
"def map_word(word, charmap):\n return [charmap[c] for c in word]",
"def process_data(words,puncts,word_to_id):\n\tids = []\n\tp_ids = []\n\tfor i in range(len(words)):\n\t\tids.append(word_to_id[words[i]])\n\t\tp_ids.append(punct_to_id[puncts[i]])\n\treturn ids,p_ids",
"def test_find_word2(self):\n self.assertEqual(find_word2('GREEN'), [(1, 1), (1, 1), (0, 9)])\n self.assertEqual(find_word2('ABSENT'), [])\n self.assertEqual(find_word2('PW'), [(1, 7), (3, 7), (0, 8)])",
"def find_pairs(words): \n pass",
"def get_pairs(word):\r\n pairs = set()\r\n prev_char = word[0]\r\n for char in word[1:]:\r\n pairs.add((prev_char, char))\r\n prev_char = char\r\n return pairs",
"def pass_word(word, model, input_embedding, char2idx, device, use_head=True):\n inp = torch.LongTensor([char2idx['START']] + [char2idx[c] for c in word]).to(device)\n inp = pack([inp])\n out, hidden = model(input_embedding.unsqueeze(0), inp, use_head=use_head)\n return out, hidden",
"def char_mapping(sentences, lower):\n chars = [[x[0].lower() if lower else x[0] for x in s] for s in sentences]\n dico = create_dico(chars)\n dico[\"<PAD>\"] = 10000001\n dico['<UNK>'] = 10000000\n char_to_id, id_to_char = create_mapping(dico)\n print(\"Found %i unique words (%i in total)\" % (\n len(dico), sum(len(x) for x in chars)\n ))\n return dico, char_to_id, id_to_char",
"def word_ids_to_words(data, id_to_word):\n return [id_to_word[i] for i in data]",
"def get_pairs(word):\n pairs = set()\n prev_char = word[0]\n for char in word[1:]:\n pairs.add((prev_char, char))\n prev_char = char\n return pairs",
"def get_pairs(word):\n pairs = set()\n prev_char = word[0]\n for char in word[1:]:\n pairs.add((prev_char, char))\n prev_char = char\n return pairs",
"def edit_distance2(self, word: str) -> tuple:\n #TODO: Should be obsolete now\n return set(edit2 for edit in self.edit_distance1(word) for edit2\n in self.edit_distance1(edit))",
"def process_pair(words: tuple) -> Optional[tuple]:\n\n # Replace all reflexive forms\n to_replace = [\"[se]\", \"|se|\", \"[-și]\", \"[o]\", \"|-și|\", \"|și|\", \"[-i]\", \"[i]\", \"[și]\", \"a \"]\n raw_line = \" \".join(words)\n for sub in to_replace:\n raw_line = raw_line.replace(sub, \"\")\n\n # Replace multiple spaces, strip beginning / ending spaces\n processed_line = re.sub('\\s{2,}', ' ', raw_line).strip()\n\n words = processed_line.split(' ')\n\n # Return the new tuple\n # Or the empty string if the words are the same or contain each other, or ar capital nouns\n if len(words) != 2:\n return None\n if words[1] in words[0] or words[0] in words[1]:\n return None\n if words[1][0].isupper() or words[0][0].isupper():\n return None\n return tuple(words)",
"def make_idx2word():\n idx2word = {}\n d = train_data.shared['word2idx']\n for word, idx in d.items():\n print(word)\n idx2word[idx] = word\n if config.use_glove_for_unk:\n d2 = train_data.shared['new_word2idx']\n for word, idx in d2.items():\n print(word)\n idx2word[idx+len(d)] = word\n return idx2word",
"def unigram_pairs(id1, id2):\n uni_pairs = []\n for word1 in id1.tolist():\n for word2 in id2.tolist():\n uni_pairs.append([word1, word2])\n # print(\"uni_pair is {}\".format(uni_pairs))\n # print(\"the # of uni pair is {}\".format(len(uni_pairs)))\n return uni_pairs",
"def arrange_trigram(t, word):\n return t[1:] + (word,) # returns new tuple",
"def getWordKey(word):\n # BEGIN_YOUR_ANSWER (our solution is 1 lines of code, but don't worry if you deviate from this)\n return len(word), word\n # END_YOUR_ANSWER",
"def translate(word1, key, word2):\n key = dict(zip(word1, key))\n return ''.join(key[sym] for sym in word2)",
"def words_to_word_ids(data, word_to_id):\n # if isinstance(data[0], six.string_types):\n # print(type(data[0]))\n # # exit()\n # print(data[0])\n # print(word_to_id)\n # return [word_to_id[str(word)] for word in data]\n # else:\n return [word_to_id[word] for word in data]\n\n # if isinstance(data[0], str):\n # # print('is a string object')\n # return [word_to_id[word] for word in data]\n # else:#if isinstance(s, bytes):\n # # print('is a unicode object')\n # # print(data[0])\n # return [word_to_id[str(word)] f",
"def sub_word(word):\n bytes = [(word >> i & 0xff) for i in (24, 16, 8, 0)]\n return create_word([(s_box[bytes[i]]) for i in range(4)])",
"def char_mapping(sentences):\n chars = [\"\".join([w[0] for w in s]) for s in sentences]\n dico = create_dico(chars)\n dico['<PAD>'] = 10000000\n # dico[';'] = 0\n char_to_id, id_to_char = create_mapping(dico)\n print(\"Found %i unique characters\" % len(dico))\n return dico, char_to_id, id_to_char",
"def wordoftuples_to_tupleofwords(wordoftuples):\n if not equal(len(t) for t in wordoftuples):\n raise ValueError(\"Not all entries of input have the same length.\")\n def remove_empty_letters(word):\n return [letter for letter in word if letter is not None]\n return tuple(remove_empty_letters(word)\n for word in zip(*wordoftuples))",
"def co_gram(sa, sb):\n gram = [(wa, wb) for wa in sa for wb in sb]\n return gram"
]
| [
"0.6389353",
"0.6234126",
"0.6034827",
"0.59308624",
"0.58816594",
"0.5818911",
"0.5749407",
"0.57084274",
"0.5694023",
"0.56378114",
"0.5637573",
"0.5630778",
"0.5624193",
"0.55718493",
"0.5571338",
"0.5556244",
"0.55537975",
"0.55537975",
"0.55444837",
"0.5500204",
"0.5498021",
"0.5489969",
"0.5478406",
"0.54766464",
"0.54606205",
"0.5452561",
"0.5447667",
"0.5423576",
"0.5405533",
"0.53930485"
]
| 0.77017784 | 0 |
we are going to split out the domain from the url and lookup the ip address then we get both whois ip and domain name info | def get_whois(doc):
#extract domain info
domain = tldextract.extract(doc['url']).registered_domain
hostname = doc['url'].split('/')[2]
doc['hostname'] = hostname
doc['ip'] = ''
doc['whois'] = {}
try:
#lookup ip address
doc['ip'] = socket.gethostbyname(hostname)
except:
syslog.syslog('[*] Failed to get ip addr for %s' % hostname)
print('[*] Failed to get ip addr for %s' % hostname)
return doc
#now lets lookup ip whois
try:
doc['whois']['nets'] = IPWhois(doc['ip']).lookup()['nets']
except:
syslog.syslog('[*] Failed to get ip whois for %s' % doc['ip'])
print('[*] Failed to get ip whois for %s' % doc['ip'])
#now lets try to get domain name registrar
try:
doc['whois']['registrar'] = whois.query(domain).registrar
except:
syslog.syslog('[*] Failed to get registrar info for %s' % domain)
print('[*] Failed to get registrar info for %s' % domain)
return doc | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def WhoisLocation(url):\n location=[]\n location_str_list=[]\n try: # first try this\n #trying with pythonwhois to see if the location exists\n obj=pythonwhois.get_whois(url)\n for key in obj['contacts']['registrant']:\n location.append(obj['contacts']['registrant'][key])\n #turn the list into a string with a space between them\n str=' '.join(location)\n location_str_list.append(str)\n except TypeError:\n pass\n except Exception:# dealing wit hthe rest of the socket errros and whois.parser.PywhoisError\n pass\n if len(location_str_list)==0:\n try: # if the first try doesn't work try this\n w = whois.whois(url)\n #if any of these are none we get a TypeError so to get rid of that we check if they are none or not to deal wit hthat issue\n if whois.whois(url)[\"address\"]==None:\n pass\n else:\n location.append(whois.whois(url)[\"address\"])\n if whois.whois(url)[\"city\"]==None:\n pass\n else:\n location.append(whois.whois(url)[\"city\"])\n if whois.whois(url)[\"state\"]==None:\n pass\n else:\n location.append(whois.whois(url)[\"state\"])\n if whois.whois(url)[\"zipcode\"]==None:\n pass\n else:\n location.append(whois.whois(url)[\"zipcode\"])\n if whois.whois(url)[\"country\"]==None:\n pass\n else:\n location.append(whois.whois(url)[\"country\"])\n\n #turn the list into a string\n str=' '.join(location)\n location_str_list.append(str)\n except Exception:# dealing with socket errors\n pass\n except KeyError:\n pass\n except TypeError:# there were maybe multiple addresses and my code coudln't figure that out\n print(\"multiple\")\n pass\n except whois.parser.PywhoisError:\n pass\n return(location_str_list)",
"def getDomain(self):\n # ui = UrlInfo(url)\n # urlBytes = [ord(i) for i in url]\n host = self.url[self.host_head:self.host_tail]\n domain = self.url[self.domain_head:self.domain_tail]\n\n # domain = url[ui.getDomainHead():ui.getDomainTail()]\n m = re.match(self.ipUrlPattern, host)\n if m:\n domain = m.group(1)\n return domain",
"def __resolve_domain(self, domain=''):\n _ip = []\n if self.__is_ip_address(domain):\n # print hostname + \" is IP address\"\n _ip.append(domain)\n return _ip\n r = dns.resolver.get_default_resolver()\n r.nameservers = ['8.8.8.8']\n #answers = dns.resolver.query(hostname, 'A')\n try:\n answers = r.query(domain, 'A')\n for rdata in answers:\n # print rdata.address\n _ip.append(rdata.address)\n except dns.resolver.NoAnswer:\n print \"no answer\"\n\n if domain.find(\"www.\") != 0:\n domain = \"www.\" + domain\n # print \"querying \" + hostname\n try:\n answers = dns.resolver.query(domain, 'A')\n for rdata in answers:\n # print rdata.address\n _ip.append(rdata.address)\n except dns.resolver.NoAnswer:\n print \"no answer\"\n # print(\"processed %s, it has %d ips.\" % (hostname, len(_ip)))\n\n return list(set(_ip))",
"def run_whois(self,domain):\n try:\n who = whois.whois(domain)\n results = {}\n # Check if info was returned before proceeding because sometimes records are protected\n if who.registrar:\n results['domain_name'] = who.domain_name\n results['registrar'] = who.registrar\n results['expiration_date'] = who.expiration_date\n results['registrant'] = who.name\n results['org'] = who.org\n results['admin_email'] = who.emails[0]\n results['tech_email'] = who.emails[1]\n results['address'] = \"{}, {}{}, {}, {}\".format(who.address,who.city,who.zipcode,who.state,who.country)\n results['dnssec'] = who.dnssec\n else:\n click.secho(\"[*] WHOIS record for {} came back empty. You might try looking at dnsstuff.com.\".format(domain),fg=\"yellow\")\n return results\n except Exception as error:\n click.secho(\"[!] The WHOIS lookup for {} failed!\".format(domain),fg=\"red\")\n click.secho(\"L.. Details: {}\".format(error),fg=\"red\")",
"def findwhois_server(self, buf, hostname, query):\n nhost = None\n match = re.compile('Domain Name: {}\\s*.*?Whois Server: (.*?)\\s'.format(query), flags=re.IGNORECASE | re.DOTALL).search(buf)\n if match:\n nhost = match.groups()[0]\n # if the whois address is domain.tld/something then\n # s.connect((hostname, 43)) does not work\n if nhost.count('/') > 0:\n nhost = None\n elif hostname == NICClient.ANICHOST:\n for nichost in NICClient.ip_whois:\n if buf.find(nichost) != -1:\n nhost = nichost\n break\n return nhost",
"def resolve_ip(self, url):\n ext = tldextract.extract(url)\n if ext.subdomain:\n # ToDo: possibly check for exceptions\n return socket.gethostbyname(\n ext.subdomain + \".\" + ext.registered_domain\n )\n else:\n return socket.gethostbyname(ext.registered_domain)",
"def run_whoxy_whois(self,domain):\n if self.whoxy_api_key:\n try:\n results = requests.get(self.whoxy_api_endpoint.format(self.whoxy_api_key,domain),timeout=self.requests_timeout).json()\n if results['status'] == 1:\n whois_results = self.parse_whoxy_results(results)\n return whois_results\n else:\n click.secho(\"[*] WhoXY returned status code 0, error/no results, for WHOIS lookup on {}.\".format(domain),fg=\"yellow\")\n except requests.exceptions.Timeout:\n click.secho(\"\\n[!] The connection to WhoXY timed out!\",fg=\"red\")\n except requests.exceptions.TooManyRedirects:\n click.secho(\"\\n[!] The connection to WhoXY encountered too many redirects!\",fg=\"red\")\n except requests.exceptions.RequestException as error:\n click.secho(\"[!] Error connecting to WhoXY for WHOIS on {}!\".format(domain),fg=\"red\")\n click.secho(\"L.. Details: {}\".format(error),fg=\"red\")",
"def whois(self, domain_or_ip):\n return self.apiquery('/v1/{}/whois/'.format(domain_or_ip))",
"def extract_domains(self, resp):\n return",
"def getHostInfo():",
"def getDomain(url):\n domain = string.replace(url,\"https://www.\",\"\")\n domain = string.replace(domain,\"http://www.\",\"\")\n domain = string.replace(domain,\"http://\",\"\")\n domain = string.replace(domain,\".com/\",\"\")\n domain = string.replace(domain,\".com\",\"\")\n return domain",
"def rdap_domain_lookup(url: str, http_client: Optional[Any] = None) -> PyWhoIs:\n whois = PyWhoIs._rdap_domain_from_url(url, http_client)\n return whois",
"def split_addr(self, a):\n a = a.replace('http://', '')\n a = a.replace('https://', '')\n\n addr = tlde.extract(a)\n is_ip = tlde.tldextract.looks_like_ip(addr.domain)\n if is_ip:\n ip = addr.domain\n path_and_params = a[a.index(ip)+len(ip):].split('?')\n path = path_and_params[0]\n if len(path_and_params) > 1:\n params = path_and_params[1:]\n else:\n params = ''\n return {'ip': ip, 't3': None, 't2': None, 'path': path, 'params': params, 'url/ip': 'ip'}\n else:\n t3 = addr.subdomain\n t2 = addr.registered_domain\n path_and_params = a[a.index(addr.fqdn)+len(addr.fqdn):].split('?')\n path = path_and_params[0]\n if len(path_and_params) > 1:\n params = path_and_params[1:]\n else:\n params = ''\n return {'t3': t3, 't2': t2, 'ip': None, 'path': path, 'params': params, 'url/ip': 'url'}",
"def get_whois(ip_address):\n # search the RIPE Database for the given IP\n res = IPWhois(ip_address).lookup_rdap(rate_limit_timeout=30)\n\n # get the country of the IP Address\n countries = get_countries()\n net = res['network']\n country = countries[net['country']]\n company_name = net['name']\n\n # get the name of the company behind the IP Address\n # clean the names of the company from Provider description\n # since sometimes it is saved in RIPE as 'Provider Name + Company Name'\n for regex, n in settings.COMPANY_REGEXES:\n m = re.match(regex, company_name)\n if m:\n company_name = m.group(n)\n break\n\n logger.debug('Country: {} / Possible Entity Name: {}'.format(country, company_name))\n\n # get the Address of the Company; in rare cases this is an actual company name and not the\n # one found in the description field\n try:\n full_address = res['objects'][res['entities'][0]]['contact']['address'][0]['value']\n except KeyError:\n full_address = ''\n\n address = full_address.split('\\n')[0]\n full_address = full_address.replace('\\n', ' ')\n\n return country, company_name, address, full_address",
"def get_whois_info(self):\n msg = self._get_whois_data(self.dest_ip)\n country_code, originAS = self._get_as_num(msg)\n if country_code is None:\n country_code = \"Not found\"\n if originAS is None:\n originAS = \"Not found\"\n return msg, country_code, originAS",
"def get_domain_ip_via_sni(self, path_tracefile, domain):\n packets = self.get_client_hello_packets(path_tracefile)\n for packet in packets:\n servername = self.get_client_hello_servername(packet)\n if servername == domain:\n ip = packet.getlayer(IP).dst\n return ip\n return -1",
"def _domain_whois_record(domain, ti_prov):\n dom_record = pd.DataFrame()\n whois_result = whois(domain)\n if whois_result.domain_name is not None:\n # Create domain record from whois data\n dom_record = pd.DataFrame(\n {\n \"Domain\": [domain],\n \"Name\": [whois_result.get(\"name\", None)],\n \"Org\": [whois_result.get(\"org\", None)],\n \"DNSSec\": [whois_result.get(\"dnssec\", None)],\n \"City\": [whois_result.get(\"city\", None)],\n \"State\": [whois_result.get(\"state\", None)],\n \"Country\": [whois_result.get(\"country\", None)],\n \"Registrar\": [whois_result.get(\"registrar\", None)],\n \"Status\": [whois_result.get(\"status\", None)],\n \"Created\": [whois_result.get(\"creation_date\", None)],\n \"Expiration\": [whois_result.get(\"expiration_date\", None)],\n \"Last Updated\": [whois_result.get(\"updated_date\", None)],\n \"Name Servers\": [whois_result.get(\"name_servers\", None)],\n }\n )\n ns_domains = []\n\n # Identity domains popularity with Open Page Rank\n if \"OPR\" in ti_prov.loaded_providers:\n page_rank = ti_prov.result_to_df(\n ti_prov.lookup_ioc(domain, providers=[\"OPR\"])\n )\n if page_rank[\"RawResult\"][0]:\n page_rank_score = page_rank[\"RawResult\"][0][\"response\"][0][\n \"page_rank_integer\"\n ]\n else:\n page_rank_score = 0\n dom_record[\"Page Rank\"] = [page_rank_score]\n else:\n nb_markdown(\"OPR TI provider needed to calculate Page Rank score.\")\n dom_record[\"Page Rank\"] = [\"Not known - OPR provider needed\"]\n\n # Get a list of subdomains for the domain\n if \"VirusTotal\" in ti_prov.loaded_providers:\n url_ti = ti_prov.result_to_df(\n ti_prov.lookup_ioc(domain, providers=[\"VirusTotal\"])\n )\n try:\n sub_doms = url_ti[\"RawResult\"][0][\"subdomains\"]\n except (TypeError, KeyError):\n sub_doms = \"None found\"\n dom_record[\"Sub Domains\"] = [sub_doms]\n else:\n nb_markdown(\"VT TI provider needed to get sub-domains.\")\n dom_record[\"Page Rank\"] = [\"Not known - OPR provider needed\"]\n\n # Work out domain entropy to identity possible DGA\n dom_ent = entropy(domain)\n dom_record[\"Domain Name Entropy\"] = [dom_ent]\n\n # Remove duplicate Name Server records\n for server in whois_result[\"name_servers\"]:\n _, ns_domain, ns_tld = tldextract.extract(server)\n ns_dom = ns_domain.lower() + \".\" + ns_tld.lower()\n if domain not in ns_domains:\n ns_domains.append(ns_dom)\n return dom_record",
"def domain_command():\n # 1. Get input host from Demisto\n domain = demisto.args().get('domain')\n # 2. Get the host reputation from SlashNext API\n response = domain_lookup(domain=domain)\n if response.get('errorNo') != 0:\n return\n # 3. Parse and format the response\n dbot_score_cont, domain_cont = get_dbot_std_context(\n domain, 'Domain', response.get('threatData').get('verdict'), response.get('threatData').get('threatType'))\n\n snx_ioc_cont = get_snx_host_ioc_context(domain, 'Domain', response.get('threatData'))\n\n ec = {\n 'SlashNext.Domain(val.Value === obj.Value)': snx_ioc_cont,\n 'DBotScore': dbot_score_cont,\n 'Domain': domain_cont\n }\n\n domain = domain.encode('idna')\n\n title = 'SlashNext Phishing Incident Response - Domain Lookup\\n' \\\n '##### domain = {}'.format(domain.decode())\n\n md = tableToMarkdown(\n title,\n snx_ioc_cont,\n ['Value',\n 'Type',\n 'Verdict',\n 'ThreatStatus',\n 'ThreatName',\n 'ThreatType',\n 'FirstSeen',\n 'LastSeen']\n )\n\n return_outputs(md, ec, snx_ioc_cont)",
"def whois_parsed(self, domain):\n return self.apiquery('/v1/{}/whois/parsed/'.format(domain))",
"async def aio_rdap_domain_lookup(url: str, http_client: Optional[Any] = None) -> PyWhoIs:\n whois = await PyWhoIs._aio_rdap_domain_from_url(url, http_client)\n return whois",
"def get_domain(self, response):\n parts = urllib.parse.urlparse(response.url)\n domain = parts.netloc\n return domain",
"def domain_lookup(domain):\n # Create the required data dictionary for Host/Reputation\n api_data = {\n 'host': domain\n }\n response = http_request(endpoint=HOST_REPUTE_API, data=api_data)\n\n if response.get('errorNo') != 0:\n return_error('API Returned, {}:{}'.format(response.get('errorNo'), response.get('errorMsg')))\n\n return response",
"def _resolve_url_to_ip_and_netloc(self, url):\n netloc = urlparse.urlsplit(url).netloc\n url_base = netloc.split(':')[0] if ':' in netloc else netloc\n return gethostbyname(url_base), url_base",
"def getFeatures(url, label, w):\r\n result = []\r\n url = str(url)\r\n \r\n #add the url to feature set\r\n result.append(url)\r\n \r\n #parse the URL and extract the domain information\r\n path = urlparse(url)\r\n ext = tldextract.extract(url)\r\n \r\n #counting number of dots in subdomain \r\n result.append(countdots(ext.subdomain))\r\n \r\n #checking hyphen in domain \r\n result.append(CountSoftHyphen(path.netloc))\r\n \r\n #length of URL \r\n result.append(length(url))\r\n \r\n #checking @ in the url \r\n result.append(CountAt(path.netloc))\r\n \r\n #checking presence of double slash \r\n result.append(CountDSlash(path.path))\r\n \r\n #Count number of subdir \r\n result.append(countSubDir(path.path))\r\n \r\n #number of sub domain \r\n result.append(countSubDomain(ext.subdomain))\r\n \r\n #length of domain name \r\n path2 = urlparse(url_format(url))\r\n result.append(len(path2.netloc))\r\n \r\n #count number of queries \r\n result.append(len(path.query))\r\n \r\n #Adding domain information\r\n \r\n #if IP address is being used as a URL \r\n result.append(containsip(ext.domain))\r\n \r\n #presence of Suspicious_TLD\r\n result.append(1 if ext.suffix in Suspicious_TLD else 0)\r\n \r\n #Get domain information by asking whois\r\n avg_month_time=365.2425/12.0\r\n \r\n #calculate creation age in months\r\n \r\n if w.creation_date == None or type(w.creation_date) is str :\r\n result.append(-1)\r\n \r\n else:\r\n if(type(w.creation_date) is list): \r\n create_date=w.creation_date[-1]\r\n else:\r\n create_date=w.creation_date\r\n\r\n if(type(create_date) is datetime.datetime):\r\n today_date=datetime.datetime.now()\r\n create_age_in_mon=((today_date - create_date).days)/avg_month_time\r\n create_age_in_mon=round(create_age_in_mon)\r\n result.append(create_age_in_mon)\r\n \r\n else:\r\n result.append(-1)\r\n \r\n #calculate expiry age in months\r\n \r\n if(w.expiration_date==None or type(w.expiration_date) is str):\r\n result.append(-1)\r\n else:\r\n if(type(w.expiration_date) is list):\r\n expiry_date=w.expiration_date[-1]\r\n else:\r\n expiry_date=w.expiration_date\r\n if(type(expiry_date) is datetime.datetime):\r\n today_date=datetime.datetime.now()\r\n expiry_age_in_mon=((expiry_date - today_date).days)/avg_month_time\r\n expiry_age_in_mon=round(expiry_age_in_mon)\r\n\r\n # appending in months Appended to the Vector\r\n result.append(expiry_age_in_mon)\r\n else:\r\n # expiry date error so append -1\r\n result.append(-1)\r\n\r\n #find the age of last update\r\n \r\n if(w.updated_date==None or type(w.updated_date) is str):\r\n result.append(-1)\r\n else:\r\n if(type(w.updated_date) is list):\r\n update_date=w.updated_date[-1]\r\n else:\r\n update_date=w.updated_date\r\n if(type(update_date) is datetime.datetime):\r\n today_date=datetime.datetime.now()\r\n update_age_in_days=((today_date - update_date).days)\r\n result.append(update_age_in_days)\r\n # appending updated age in days\r\n else:\r\n result.append(-1)\r\n\r\n \r\n #find the country that is hosting this domain\r\n if(w.country == None):\r\n result.append(\"None\")\r\n else:\r\n if isinstance(w.country,str):\r\n result.append(w['country'])\r\n else:\r\n result.append(w['country'][0])\r\n if get_ext(path.path) == '':\r\n result.append(\"None\")\r\n else:\r\n result.append(get_ext(path.path))\r\n result.append(str(label))\r\n return result",
"def extractDomain(self, url):\n domain = ''\n pattern = re.compile(r'http[s]?://([^/]+)/', re.U | re.M)\n url_match = pattern.search(url)\n if(url_match and url_match.lastindex > 0):\n domain = url_match.group(1)\n\n return domain",
"def get_ip_info(ip_addr):\n\n ip_info = {}\n fields = ['range', 'name', 'country', 'description', 'emails']\n\n try:\n info = ipwhois.IPWhois(ip_addr).lookup_whois()\n\n for field in fields:\n value = info['nets'][0].get(field, 'N/A')\n ip_info[field] = value\n\n except ipwhois.BaseIpwhoisException as ip_err:\n ip_info['error'] = 'Unable to get IP details ({0})'.format(ip_err)\n\n return ip_info",
"def info(self):\n\n return self.call(method='getDomain', args=[self.domainname])",
"def domain_info(self, domain):\n endpoint = '/Domain/Info'\n\n params = {\n 'Domain' : domain\n }\n\n response = self.__perform_get_request(endpoint, params)\n \n if response.status_code == 200:\n parsed_response = response.json()\n return parsed_response",
"def domainlist_reversewhois(self, response):\n data = response.json()\n for domain in data['response']['domains']:\n yield(domain.lower())",
"def get_site_ip(domain: str = None) -> str:\n try:\n return socket.gethostbyname(domain)\n except Exception as ex:\n return 'error'"
]
| [
"0.68971765",
"0.68036586",
"0.67154217",
"0.666049",
"0.6616179",
"0.6454138",
"0.6347413",
"0.6310537",
"0.6232186",
"0.6190189",
"0.61824244",
"0.615423",
"0.6112629",
"0.60812247",
"0.60718006",
"0.60692465",
"0.6044799",
"0.6040898",
"0.6024612",
"0.5985132",
"0.59559214",
"0.59323",
"0.5903551",
"0.58985037",
"0.5849487",
"0.58459485",
"0.5842356",
"0.5817558",
"0.5817338",
"0.58171314"
]
| 0.6846455 | 1 |
We are going to use a headless browser to hit the target homepage and enumerate all of the other urls that we hit, cookies and the page source | def interrogate_homepage(doc):
socket.setdefaulttimeout(30)
doc['browser'] = {}
#empty page
empty = u'<html><head></head><body></body></html>'
#set the path to our compiled phantomjs
phantomjs = '/phantom_bin/bin/phantomjs'
#set server args to ignore certificate errors
serv_arg = ['--ignore-ssl-errors=true']
ua = ('Mozilla/4.0 (compatible; MSIE 6.01; Windows NT 6.0)')
driver = webdriver.PhantomJS(phantomjs,
service_args=serv_arg,
desired_capabilities={
'phantomjs.page.settings.userAgent' : ua })
#driver.set_page_load_timeout(10)
try:
#going to add a little sleep here, just to make sure phantomjs has finished loading up...
time.sleep(1)
driver.get(doc['url'])
#add the page source to doc
src = driver.page_source
#lets check if the page is 'blank', this usually means there is no website
if src == empty:
print('[*] Recieved an empty page for url %s ' % (doc['url']))
#first we are going to see if we hit it over ssl, if so try over http
if 'https' in doc['url']:
newurl = doc['url'].replace('https', 'http')
#if it doesn't have https, so assume http, and there was some ssl stuff returned try https page
if 'https' not in doc['url'] and doc.has_key('ssl'):
newurl = doc['url'].replace('http', 'https')
print('[*] Trying url %s' % newurl)
driver.get(newurl)
src = driver.page_source
if src != empty:
doc['url'] = newurl
doc['browser']['src'] = src
log = json.loads(driver.get_log('har')[0]['message'])
#lets get every url we requested
tmp = []
urls = []
for entry in log['log']['entries']:
tmp.append(entry['request']['url'])
#quick dedup
urls = list(set(tmp))
doc['browser']['urls'] = urls
#final check to see if our page is empty
if doc['browser']['src'] == empty:
doc['browser'].pop('src')
return doc
except:
print('[*] Something went wrong browsing %s falling back to requests' % doc['url'])
try:
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.812.0 Safari/535.1'}
res = requests.get(doc['url'], headers=headers, verify=False)
doc['browser']['src'] = res.content
return doc
except:
print('[*] Failed to get home page with requests for %s , giving up' % doc['url'])
doc.pop('browser')
return doc | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def visit_homepage(url):\n response = requests.get(url, timeout=10)\n soup = BeautifulSoup(response.content, 'html.parser')\n return soup",
"async def main():\n #launching the browser in headless mode\n browser = await launch({'headless': True})\n page = await browser.newPage()\n #removing the timeout\n page.setDefaultNavigationTimeout(100000)\n #adding the stealth mode to be undetected\n await stealth(page)\n global userAgent\n userAgent = await page.evaluate('navigator.userAgent')\n #capture the response of every request and save the ones we want\n page.on('response', lambda response: asyncio.ensure_future(interceptResponse(response)))\n await page.goto(urlChallenge)\n await page.waitFor(1000)\n #scroll down to trigger the requests to get video data\n for _ in range(5):\n await page.evaluate(\"\"\"{window.scrollBy(0, document.body.scrollHeight);}\"\"\")\n await page.waitFor(1000)\n await page.waitFor(3000)\n await browser.close()",
"def get_all_headlines_from_chrome_2(site,URL_exclusions):\r\n headlines = []\r\n #Initial URL to pass to return search:\r\n URL = f'https://www.google.co.uk/search?as_q=&as_epq=irish+travellers&as_oq=&as_eq=&as_nlo=&as_nhi=&lr=&cr=&as_qdr=all&as_sitesearch={site}&as_occt=any&safe=active&as_filetype=&tbs='\r\n n = 0\r\n while n < 10:\r\n n += 1\r\n driver = launch_chrome()\r\n try:\r\n return_search(URL,driver)\r\n except:\r\n continue\r\n sleep_time = np.random.random() * np.random.randint(1,6) \r\n time.sleep(sleep_time) #Slow down to avoid bot detection\r\n timeout = 0\r\n start = time.time()\r\n while timeout < 120:\r\n try:\r\n page_headlines = get_headlines_from_one_page(driver,site,URL_exclusions)\r\n break\r\n except:\r\n end = time.time()\r\n timeout = end - start\r\n for headline in page_headlines:\r\n headlines.append(headline)\r\n try:\r\n next_button = driver.find_element_by_id('pnnext')\r\n URL = next_button.get_attribute('href') #Pass new URL to return_search()\r\n except NoSuchElementException:\r\n driver.quit() #Quit driver if can't find next button \r\n break\r\n driver.quit() #Quit driver each iteration to avoid triggering recaptcha.\r\n return headlines",
"def _get_scrape_urls():\n\n driver = webdriver.PhantomJS(service_args=['--ignore-ssl-errors=true'])\n driver.get(_base_url)\n list_data = []\n\n while True:\n time.sleep(3)\n data = driver.find_elements_by_xpath(\"//td[@class='ms-vb']\")\n _generate_entities(data)\n try:\n next_a = driver.find_element_by_xpath(\"//img[@src='/_layouts/images/next.gif']/parent::a\")\n next_a.click()\n except NoSuchElementException:\n break\n driver.close()\n return list_data",
"def get_all_headlines_from_firefox_2(site,URL_exclusions):\r\n headlines = []\r\n #Initial URL to pass to return search:\r\n URL = f'https://www.google.co.uk/search?as_q=&as_epq=irish+travellers&as_oq=&as_eq=&as_nlo=&as_nhi=&lr=&cr=&as_qdr=all&as_sitesearch={site}&as_occt=any&safe=active&as_filetype=&tbs='\r\n n = 0\r\n while n < 10:\r\n n += 1\r\n driver = launch_firefox()\r\n try:\r\n return_search(URL,driver)\r\n except:\r\n continue\r\n sleep_time = np.random.random() * np.random.randint(1,6) \r\n time.sleep(sleep_time) #Slow down to avoid bot detection\r\n timeout = 0\r\n start = time.time()\r\n while timeout < 120:\r\n try:\r\n page_headlines = get_headlines_from_one_page(driver,site,URL_exclusions)\r\n break\r\n except:\r\n end = time.time()\r\n timeout = end - start\r\n for headline in page_headlines:\r\n headlines.append(headline)\r\n try:\r\n next_button = driver.find_element_by_id('pnnext')\r\n URL = next_button.get_attribute('href') #Pass new URL to return_search()\r\n except NoSuchElementException:\r\n driver.quit() #Quit driver if can't find next button \r\n break\r\n driver.quit() #Quit driver each iteration to avoid triggering recaptcha.\r\n return headlines",
"def get_all_headlines_from_firefox(site,URL_exclusions):\r\n headlines = []\r\n #Initial URL to pass to return search:\r\n URL = f'https://www.google.co.uk/search?hl=en&as_q=&as_epq=&as_oq=travellers&as_eq=quarantine+travel+train+flight+tourist+archive+airport+covid+coronavirus+hotel+holiday+honeymoon&as_nlo=&as_nhi=&lr=&cr=&as_qdr=all&as_sitesearch={site}&as_occt=title&safe=active&as_filetype=&tbs='\r\n n = 0\r\n while n < 10:\r\n n += 1\r\n driver = launch_firefox()\r\n try:\r\n return_search(URL,driver)\r\n except:\r\n continue\r\n sleep_time = np.random.random() * np.random.randint(1,6) \r\n time.sleep(sleep_time) #Slow down to avoid bot detection\r\n timeout = 0\r\n start = time.time()\r\n while timeout < 120:\r\n try:\r\n page_headlines = get_headlines_from_one_page(driver,site,URL_exclusions)\r\n break\r\n except:\r\n end = time.time()\r\n timeout = end - start\r\n for headline in page_headlines:\r\n headlines.append(headline)\r\n try:\r\n next_button = driver.find_element_by_id('pnnext')\r\n URL = next_button.get_attribute('href') #Pass new URL to return_search()\r\n except NoSuchElementException:\r\n driver.quit()\r\n break\r\n driver.quit() #Quit driver each iteration to avoid triggering recaptcha.\r\n return headlines",
"def get_all_headlines_from_chrome(site,URL_exclusions):\r\n headlines = []\r\n #Initial URL to pass to return search:\r\n URL = f'https://www.google.co.uk/search?hl=en&as_q=&as_epq=&as_oq=travellers&as_eq=quarantine+travel+train+flight+tourist+archive+airport+covid+coronavirus+hotel+holiday+honeymoon&as_nlo=&as_nhi=&lr=&cr=&as_qdr=all&as_sitesearch={site}&as_occt=title&safe=active&as_filetype=&tbs='\r\n n = 0\r\n while n < 10:\r\n n += 1\r\n driver = launch_chrome()\r\n try:\r\n return_search(URL,driver)\r\n except:\r\n continue\r\n sleep_time = np.random.random() * np.random.randint(1,6) \r\n time.sleep(sleep_time) #Slow down to avoid bot detection\r\n timeout = 0\r\n start = time.time()\r\n while timeout < 120:\r\n try:\r\n page_headlines = get_headlines_from_one_page(driver,site,URL_exclusions)\r\n break\r\n except:\r\n end = time.time()\r\n timeout = end - start\r\n for headline in page_headlines:\r\n headlines.append(headline)\r\n try:\r\n next_button = driver.find_element_by_id('pnnext')\r\n URL = next_button.get_attribute('href') #Pass new URL to return_search()\r\n except NoSuchElementException:\r\n driver.quit() #Quit driver if can't find next button \r\n break\r\n driver.quit() #Quit driver each iteration to avoid triggering recaptcha.\r\n return headlines",
"async def main():\n #launching the browser in headless mode\n browser = await launch({'headless': True})\n page = await browser.newPage()\n #removing the timeout\n page.setDefaultNavigationTimeout(0)\n #adding the stealth mode to be undetected\n await stealth(page)\n global userAgent\n userAgent = await page.evaluate('navigator.userAgent')\n #capture the request response of every request and save the ones we want\n page.on('response', lambda response: asyncio.ensure_future(interceptResponse(response)))\n await page.goto('https://www.tiktok.com/trending/?lang=en')\n await page.waitFor(3000)\n await browser.close()",
"def fetch_urls(browser, number_publications):\n links = []\n links.extend(re.findall(\"/p/([^/]+)/\", browser.page_source))\n n_scrolls = scrolls(number_publications)\n\n for i in range(\n n_scrolls\n ): # collecting all the pictures links in order to see which ones contains location data\n print(\n Fore.WHITE +\n \"Scrolling the Instagram target profile, scraping pictures URLs ...\"\n + str(100 * i // n_scrolls) + \"% of the profile scrolled \",\n end=\"\\r\")\n browser.execute_script(\n \"window.scrollTo(0, document.body.scrollHeight)\")\n links.extend(re.findall(\"/p/([^/]+)/\", browser.page_source))\n time.sleep(\n 1\n ) # dont change this, otherwise some scrolls won't be effective and all the data won't be scrapped\n\n print(Fore.WHITE + \"\\nPictures links collected: \" + Fore.GREEN + \"OK\")\n return list(dict.fromkeys(links)) # remove duplicates",
"def test_home(self):\n self.selenium.get('{}/'.format(self.live_server_url))",
"def crawl(self) -> Dict[str, List[req.Response]]:\n loop = asyncio.get_event_loop()\n try:\n web_pages = loop.run_until_complete(\n asyncio.ensure_future(self.initialiser_crawler())\n )\n except KeyboardInterrupt:\n loop.close()\n raise KeyboardInterrupt\n return web_pages",
"async def main():\n #launching the browser in headless mode\n browser = await launch({'headless': True})\n page = await browser.newPage()\n #removing the timeout\n page.setDefaultNavigationTimeout(40000)\n #adding the stealth mode to be undetected\n await stealth(page)\n global userAgent\n userAgent = await page.evaluate('navigator.userAgent')\n #capture the url of every request and save the ones we want\n page.on('request', lambda request: checkUrl(request.url,browser))\n await page.goto('https://www.tiktok.com/trending/?lang=en')\n await page.waitFor(2000)\n #scroll down to trigger the second request to get trending video data\n await page.evaluate(\"\"\"{window.scrollBy(0, document.body.scrollHeight);}\"\"\")\n await page.waitFor(2000)\n await browser.close()",
"def session_relevance():\r\n\r\n # instantiation a session object in the first step\r\n session = requests.Session()\r\n\r\n # interface 1\r\n response = session.get(base_url + '/cookies/set/user/tom')\r\n print(response.text)\r\n\r\n # get the cookie in the same domain\r\n response = session.get(base_url + '/cookies')\r\n print(response.cookies)\r\n print(response.text)\r\n # In fact this is the rule you must obey in the test website\r\n # response = session.get(base_url + '/cookies/set/user/george')\r\n # print(response.status_code)\r\n # print(response.text)\r",
"def crawler(self):\n\n\t\tfor page in range(self.first_page, self.last_page+1):\n\t\t\tprint(\"\\nCrawling Page \" + str(page))\n\t\t\tpage_url = self.site_url + \"?page=\" + str(page) +\\\n\t\t\t \"&index=prod_all_products_term_optimization\"\n\t\t\t\n\t\t\tself.scrape_features(page_url)",
"def run(self):\n\n for url in self.urls:\n try:\n # Use requests to retrieve web page data\n print(url)\n response = session.get(url, ) # allow_redirects=True)\n\n if response.status_code != 200:\n print('Failed to retrieve page, URL: {0}, error: {1}\\n'.format(url, response.status_code))\n return\n\n # Get web page data from HTML response\n content = get_json_data(response.text)\n\n # Compile data into dictionary to be used for reporting\n summary_data = generate_report(content)\n\n # Generate/print report\n print_report(summary_data)\n\n except Exception as error:\n print('Scraper failed to run for URL {0}, error: {1}, {2}\\n'.format(\n url, type(error).__name__, error\n ))\n\n # time.sleep(1) # for load concerns",
"def readBrowserHistory():\n history_db = os.path.expanduser(\n '~') + \"/Library/Application Support/Google/Chrome/Default/history\"\n # copy history_db to workaround Chrome history permissions\n copy_db = os.path.expanduser('~') + \"/History\"\n copyfile(history_db, copy_db)\n c = sqlite3.connect(copy_db)\n cursor = c.cursor()\n select_statement = \"SELECT urls.url FROM urls, visits WHERE urls.id = visits.url;\"\n cursor.execute(select_statement)\n results = cursor.fetchall()\n c.close()\n sites = set()\n for result in results:\n sites.add(parse(result[0]))\n return sites",
"def scrape_page(url):\n print(f\"{get_time()} [SELENIUM] Page rendering started.\")\n browser = webdriver.Firefox(executable_path=f\"{dir_path}/geckodriver.exe\",\n options=get_firefox_options(),\n firefox_profile=get_firefox_profile(),\n service_log_path=os.devnull)\n try:\n browser.get(url)\n body = browser.page_source\n\n links = browser.find_elements(By.XPATH, '//a[@href]')\n\n links_bs4 = BeautifulSoup(body, \"lxml\").find_all(\"a\")\n links_bs4 = list(filter(lambda x: x.get(\"href\") is not None, links_bs4))\n\n # Delete all the old crawler links of the page.\n db_delete_all_page_links(url=url)\n\n for i, link in enumerate(links):\n try:\n href = add_scheme(link.get_attribute(\"href\"))\n text = strip_html_tags(link.get_attribute(\"innerHTML\"))\n x_position = str(link.location.get('x'))\n y_position = str(link.location.get('y'))\n # True if the element is contained in a list container.\n parents = [parent.name for parent in links_bs4[i].parents]\n in_list = int(\"li\" in parents)\n in_nav = int(\"nav\" in parents)\n\n # Skip PDF files.\n if href[-3:] in [\"pdf\", \"jpg\", \"png\"]:\n continue\n\n # If the link links to the same page, discard it.\n hash_position = href.find(\"/#\")\n if href[:hash_position] == url or len(text) == 0:\n continue\n\n except StaleElementReferenceException:\n continue\n # Update link in database.\n db_insert_page_link(\n page_url=url, link_url=href, link_text=text,\n x_position=x_position, y_position=y_position, in_list=in_list, in_nav=in_nav)\n\n except Exception as e:\n print(red(f\"[SELENIUM] Can't access this website: {url}\"))\n print(e)\n body = \"<html></html>\"\n\n print(f\"{get_time()} [SELENIUM] Page rendering finished.\")\n browser.quit()\n return body",
"def allocine_connect(url):\n #go to allocine page\n driver.get(url)\n #sleep until the page load\n sleep(10)\n #click on cookies button\n print(\"cookies checking\")\n cookies_check_v2()\n sleep(1)\n driver.get(url)",
"def get_headlines_from_one_page(driver,site,URL_exclusions):\r\n headlines = []\r\n links = get_links_from_one_page(driver,site,URL_exclusions)\r\n for i in range(len(links)):\r\n start = time.time()\r\n timeout = 0\r\n while timeout < 120: #Someimtes the page doesn't load. Quit the page after two minutes.\r\n try:\r\n results = driver.find_elements_by_class_name(\"g\") #Pages contained in class=\"g\" elements\r\n button = results[i].find_element_by_tag_name(\"a\") #Links under <a> tag\r\n link = button.get_attribute('href') #URL contained under 'href' \r\n if link.find(site) != -1: #Some \"g\" elements are not search results\r\n find = np.zeros(len(URL_exclusions))\r\n for j in range(len(URL_exclusions)):\r\n find[j] = bool(link.find(URL_exclusions[j]) == -1)\r\n if all(find) == True: #If no exclusion words found in UR\r\n button.click()\r\n sleep_time = np.random.random() * np.random.randint(1,6) #Sleep for random time between 1 and 5s to reduce chance of bot detection.\r\n time.sleep(sleep_time)\r\n headline = get_headline(driver)\r\n if headline != '': #Only interested if we succesfully find headline\r\n headlines.append(headline)\r\n driver.back()\r\n sleep_time = np.random.random() * np.random.randint(1,6)\r\n time.sleep(sleep_time) #Slow down to avoid bot detection\r\n break\r\n except:\r\n end = time.time()\r\n timeout = end - start\r\n if timeout >= 120:\r\n break #If results hasn't loaded after 120 seconds, we need to break the for loop\r\n return headlines",
"def browse(self, initial=None):\n logging.info(\"start browsing\")\n if not initial:\n initial = self.base_url\n\n self.to_browse.appendleft(initial)\n self.explored.add(initial)\n\n while self.to_browse:\n current = self.to_browse.pop()\n if not self.can_fetch(self.headers[\"User-Agent\"], current):\n logging.info(f\"forbidden: {cut_url(current)}\")\n continue\n\n logging.info(f\"downloading {cut_url(current)}\")\n content = self.download_page(url=current, timeout=self.timeout)\n time.sleep(self.browse_delay)\n\n # if download failed, push URL back to queue and\n # remove proxy from list\n if content is None:\n logging.info(\n \"pushing URL back into queue, getting new Tor session\"\n )\n self.to_browse.appendleft(current)\n self.session = self.get_session(\n max_retries=self.max_retries,\n backoff_factor=self.backoff_factor,\n retry_on=self.retry_on,\n )\n self.headers = self.choose_headers()\n continue\n\n logging.info(\"parsing page\")\n soup = self.html_parser(content)\n\n # get list of links to home details\n for child in self.get_parsable(soup):\n logging.info(f\"found to parse: {cut_url(child)}\")\n if self.explored.contains(child):\n continue\n self.explored.add(child)\n self.push_queue(child)\n\n # check if we're at the last page\n # if yes return, else get next page of listings\n if self.stop_test(soup):\n logging.info(\"reached last page to browse, stopping\")\n return\n\n for child in self.get_browsable(current):\n logging.info(f\"found to browse next {cut_url(child)}\")\n if self.explored.contains(child):\n continue\n self.explored.add(child)\n self.to_browse.append(child)",
"def __call__(self):\n self.page1() # GET web (request 101)\n\n grinder.sleep(1000)\n self.page2() # GET web (request 201)\n\n grinder.sleep(1000)\n self.page3() # GET web (request 301)\n\n grinder.sleep(1000)\n self.page4() # GET web (request 401)\n\n grinder.sleep(1000)\n self.page5() # GET web (request 501)\n\n grinder.sleep(1000)\n self.page6() # GET web (request 601)\n\n grinder.sleep(1000)\n self.page7() # GET web (request 701)\n\n grinder.sleep(1000)\n self.page8() # GET web (request 801)\n\n grinder.sleep(1000)\n self.page9() # GET web (request 901)\n\n grinder.sleep(1000)\n self.page10() # GET web (request 1001)\n\n grinder.sleep(1000)\n self.page11() # GET web (request 1101)\n\n grinder.sleep(1000)\n self.page12() # GET web (request 1201)\n\n grinder.sleep(1000)\n self.page13() # GET web (request 1301)\n\n grinder.sleep(1000)\n self.page14() # GET web (request 1401)\n\n grinder.sleep(1000)\n self.page15() # GET web (request 1501)\n\n grinder.sleep(1000)\n self.page16() # GET web (request 1601)\n\n grinder.sleep(1000)\n self.page17() # GET web (request 1701)\n\n grinder.sleep(1000)\n self.page18() # GET web (request 1801)\n\n grinder.sleep(1000)\n self.page19() # GET web (request 1901)\n\n grinder.sleep(1000)\n self.page20() # GET web (request 2001)\n\n grinder.sleep(1000)\n self.page21() # GET web (request 2101)\n\n grinder.sleep(1000)\n self.page22() # GET web (request 2201)\n\n grinder.sleep(1000)\n self.page23() # GET web (request 2301)\n\n grinder.sleep(1000)\n self.page24() # GET web (request 2401)\n\n grinder.sleep(1000)\n self.page25() # GET web (request 2501)\n\n grinder.sleep(1000)\n self.page26() # GET web (request 2601)\n\n grinder.sleep(1000)\n self.page27() # GET web (request 2701)\n\n grinder.sleep(1000)\n self.page28() # GET web (request 2801)\n\n grinder.sleep(1000)\n self.page29() # GET web (request 2901)\n\n grinder.sleep(1000)\n self.page30() # GET web (request 3001)\n\n grinder.sleep(1000)\n self.page31() # GET web (request 3101)\n\n# grinder.sleep(1000)\n# self.page32() # POST downloads (request 3201)\n\n# grinder.sleep(1000)\n# self.page33() # GET goog-malware-shavar_s_10501-10520.10501.10502-10520: (request 3301)\n\n grinder.sleep(1000)\n self.page34() # GET web (request 3401)\n\n grinder.sleep(1000)\n self.page35() # GET web (request 3501)\n# self.page36() # GET goog-malware-shavar_a_9606-9610.9606-9609.9610: (request 3601)\n\n# grinder.sleep(1000)\n# self.page37() # GET goog-phish-shavar_s_36981-36985.36981-36985.: (request 3701)\n\n# grinder.sleep(1000)\n# self.page38() # GET goog-phish-shavar_s_36986-36990.36986-36987.36988-36990: (request 3801)\n\n# grinder.sleep(1000)\n# self.page39() # GET goog-phish-shavar_a_46491-46500.46491-46499.46500: (request 3901)\n\n grinder.sleep(1000)\n self.page40() # GET web (request 4001)\n\n grinder.sleep(1000)\n self.page41() # GET web (request 4101)\n\n grinder.sleep(1000)\n self.page42() # GET web (request 4201)\n\n grinder.sleep(1000)\n self.page43() # GET web (request 4301)\n\n grinder.sleep(1000)\n self.page44() # GET web (request 4401)\n\n grinder.sleep(1000)\n self.page45() # GET web (request 4501)\n\n grinder.sleep(1000)\n self.page46() # GET web (request 4601)\n\n grinder.sleep(1000)\n self.page47() # GET web (request 4701)\n\n grinder.sleep(1000)\n self.page48() # GET web (request 4801)\n\n grinder.sleep(1000)\n self.page49() # GET web (request 4901)\n\n grinder.sleep(1000)\n self.page50() # GET web (request 5001)\n\n grinder.sleep(1000)\n self.page51() # GET web (request 5101)\n\n grinder.sleep(1000)\n self.page52() # GET web (request 5201)\n\n grinder.sleep(1000)\n self.page53() # GET web (request 5301)",
"async def initialiser_crawler(self) -> Dict[str, List[req.Response]]:\n web_pages = {}\n with ThreadPoolExecutor(max_workers=NUM_WORKERS) as exe:\n try:\n loop = asyncio.get_event_loop()\n tasks = [\n loop.run_in_executor(exe, self.collect_webpages, keyword)\n for keyword in self.keywords \n ]\n for res in await asyncio.gather(*tasks):\n web_pages.update(res)\n except KeyboardInterrupt:\n loop.close()\n raise KeyboardInterrupt\n return web_pages",
"def start_requests(self):\n\n yield SeleniumRequest(\n url='https://www.iayt.org/search/newsearch.asp',\n callback=self.get_iframe_document_src,\n wait_time=5\n )",
"def test_iterate_next_urls_not_html(self):\n self.mini_spider_thread.grab_url('http://example.com/iterate_next_urls/not_html_webpage') \n self.assertTrue(self.mini_spider_thread.grab_url_success)\n self.assertEqual(len(list(self.mini_spider_thread.iterate_next_urls(self.url_obj))), 0)",
"def init_home_page(self):\n rps = self.session.get(home_url, headers = BROWSER_HEADERS)\n # with open('first_get.html', 'w') as f: f.write(rps.text)\n if CAPTCHA_ELEMENT_ID in rps.text:\n # print(\"CAPTCHA ELEMENT DETECTED!\")\n return self.bypass_captcha(rps.text)\n else:\n print(\"NO CAPTCHA\")\n return True",
"def crawl_web(seed):\n tocrawl = [seed]\n crawled = []\n index = []\n while tocrawl:\n url = tocrawl.pop()\n if url not in crawled:\n content = get_page(url)\n add_page_to_index(index, url, content)\n union(tocrawl, get_all_links(content))\n return index",
"def start_requests(self):\n if not self.login_page:\n for u in self.start_urls:\n yield Request(url=u, dont_filter=True)\n\n if (self.need_selenium):\n login_cookie = self._selenium_login()\n # extract links from response to be crawled, since redirected links get\n # filtered automatically\n links = self.browser.find_elements_by_xpath('//a[@href]')\n links = [l.get_attribute(\"href\") for l in links]\n links = [self._get_full_url(l, self.browser.current_url) for l in links]\n links = [l for l in links if self._is_link_allowed(l)]\n self.start_urls = [self.browser.current_url] + links + self.start_urls\n\n for u in self.start_urls:\n req = Request(url=u, cookies=login_cookie, dont_filter=True)\n if self.login_cookie is None:\n self.login_cookie = req.cookies\n yield req\n else:\n yield Request(url=self.login_page, callback=self.login, dont_filter=True)",
"def visit_homepage(self) -> None:\n if self.home_page is not None:\n webbrowser.open(self.home_page)",
"def GetUrlFirst(self):\n self.url = \"https://www.taobao.com/\"\n self.host = \"www.taobao.com\"\n self.referer = \"https://www.taobao.com/\"\n content = self.GetContent()\n __clear__ = '<a href=.*?</a>'\n match = open(self.base_dir_url+\"url_first.html\", 'w')\n try:\n all_link = re.findall(__clear__, content, re.S)\n print \"All links of the web page is: \", len(all_link)\n self.DealUrlFirst(match, all_link)\n except:\n print \"Something wrong is happening!\"\n finally:\n match.close()\n match.close()",
"def __call__(self):\n self.page1() # GET supercars.do (requests 101-111)\n\n grinder.sleep(2117)\n self.page2() # GET cars.do (requests 201-202)\n\n grinder.sleep(1867)\n self.page3() # GET car.do (request 301)\n\n grinder.sleep(4351)\n self.page4() # GET enquire.do (requests 401-402)\n\n grinder.sleep(16341)\n self.page5() # POST enquire.do (request 501)\n\n grinder.sleep(1309)\n self.page6() # GET supercars.do (request 601)\n\n grinder.sleep(669)\n self.page7() # GET cars.do (requests 701-702)\n\n grinder.sleep(1260)\n self.page8() # GET car.do (request 801)\n\n grinder.sleep(837)\n self.page9() # GET car.do (request 901)\n\n grinder.sleep(1108)\n self.page10() # GET search.do (request 1001)\n\n grinder.sleep(3146)\n self.page11() # POST search.do (requests 1101-1102)\n\n grinder.sleep(2822)\n self.page12() # POST search.do (request 1201)\n\n grinder.sleep(1333)\n self.page13() # GET sell.do (request 1301)\n\n grinder.sleep(17417)\n self.page14() # POST sell.do (request 1401)\n\n grinder.sleep(6680)\n self.page15() # GET insurance.do (request 1501)\n\n grinder.sleep(600)\n self.page16() # GET about.do (requests 1601-1602)\n\n grinder.sleep(584)\n self.page17() # GET supercars.do (request 1701)\n\n grinder.sleep(1049)\n self.page18() # GET cars.do (requests 1801-1802)\n\n grinder.sleep(2901)\n self.page19() # GET car.do (request 1901)\n\n grinder.sleep(1441)\n self.page20() # GET car.do (request 2001)\n\n grinder.sleep(791)\n self.page21() # GET supercars.do (request 2101)\n\n grinder.sleep(1365)\n self.page22() # GET cars.do (request 2201)\n\n grinder.sleep(1067)\n self.page23() # GET supercars.do (request 2301)\n\n grinder.sleep(1284)\n self.page24() # GET cars.do (request 2401)\n\n grinder.sleep(879)\n self.page25() # GET supercars.do (request 2501)\n\n grinder.sleep(1066)\n self.page26() # GET cars.do (request 2601)\n\n grinder.sleep(974)\n self.page27() # GET supercars.do (request 2701)"
]
| [
"0.66040343",
"0.63707286",
"0.633517",
"0.62551135",
"0.625036",
"0.62199724",
"0.6212913",
"0.61651903",
"0.6112558",
"0.6048476",
"0.60329455",
"0.59965056",
"0.59907174",
"0.5961944",
"0.5914689",
"0.5893945",
"0.5886431",
"0.5857849",
"0.58555067",
"0.5851134",
"0.58385015",
"0.58328104",
"0.58254784",
"0.57537353",
"0.5752469",
"0.574143",
"0.5726095",
"0.5700235",
"0.56996477",
"0.5678881"
]
| 0.69595367 | 0 |
this will parse the eve log and get some information from it and add it to our document. We are just going to get the signature name for now | def get_ids_logs(doc):
doc['ids'] = []
with open('/var/log/suricata/eve.json', 'r') as f:
for line in f:
#lets go ahead and deserialize the log and pull out sig field
sig = json.loads(line)['alert']['signature']
#blergh, too lazy to comment out of suricata, or change UA
if sig != "ET POLICY Python-urllib/ Suspicious User Agent":
doc['ids'].append(sig)
#add a check for empty ids file and lets just remove the field
if len(doc['ids']) == 0:
doc.pop('ids')
return doc
return doc | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add_log(self):\n self.stack = []\n diff = self.diff(self.original, self.doc)\n entry = {\"_id\": utils.get_iuid(),\n \"doctype\": constants.DOCTYPE_LOG,\n \"docid\": self.doc[\"_id\"],\n \"diff\": diff,\n \"timestamp\": utils.get_time()}\n self.modify_log_entry(entry)\n if hasattr(flask.g, \"current_user\") and flask.g.current_user:\n entry[\"username\"] = flask.g.current_user[\"username\"]\n else:\n entry[\"username\"] = None\n if flask.has_request_context():\n entry[\"remote_addr\"] = str(flask.request.remote_addr)\n entry[\"user_agent\"] = str(flask.request.user_agent)\n else:\n entry[\"remote_addr\"] = None\n entry[\"user_agent\"] = os.path.basename(sys.argv[0])\n flask.g.db.put(entry)",
"def modsecLog2Info(singleEntry):\r\n modsec_dict = OrderedDict()\r\n a_header = singleEntry[0]\r\n if version3:\r\n e_separator = a_header[a_header.find('^---')+ 4:a_header.find('---A--')]\r\n else:\r\n e_separator = a_header[a_header.find('^--')+3:a_header.find('-A-')]\r\n itemNumber = 0\r\n itemKV = OrderedDict()\r\n try:\r\n for item in singleEntry:\r\n if item.__contains__(e_separator):\r\n itemKV[item.rstrip()[-3:-2:]] = itemNumber\r\n itemNumber+=1\r\n item_keys = list(itemKV.keys())\r\n itemKVFull = OrderedDict()\r\n for item_letter in item_keys:\r\n if item_letter in modsec_event_types:\r\n i = int(itemKV[item_letter]) + 1\r\n j = itemKV[item_keys[item_keys.index(item_letter) + 1 ] ]\r\n itemKVFull[item_letter] = singleEntry[i:j]\r\n\r\n modsec_a = itemKVFull['A'][0]\r\n modsec_b = itemKVFull['B']\r\n modsec_f = itemKVFull['F']\r\n modsec_h = itemKVFull['H']\r\n\r\n modsec_b_headers = dict(map(lambda s: [s[0:s.find(': ')],s[s.find(': ')+2:]], modsec_b[1:-1]))\r\n modsec_f_headers = dict(map(lambda s: [s, '-'] if len(s.split(': ')) == 1 else [s[0:s.find(': ')], s[s.find(': ') + 2:]], modsec_f[1:-1]))\r\n\r\n modsec_h_dict = OrderedDict()\r\n for elem in modsec_h:\r\n if elem.startswith('Message:') or elem.startswith('ModSecurity:'):\r\n if 'messages' not in modsec_h_dict:\r\n modsec_h_dict['messages'] = [elem]\r\n else:\r\n modsec_h_dict['messages'].append(elem)\r\n elif elem.startswith('Apache-Handler:'):\r\n if 'handlers_messages' not in modsec_h_dict:\r\n modsec_h_dict['handlers_messages'] = [elem]\r\n else:\r\n modsec_h_dict['handlers_messages'].append(elem)\r\n elif elem.startswith('Apache-Error:'):\r\n if 'error_messages' not in modsec_h_dict:\r\n modsec_h_dict['error_messages'] = [elem]\r\n else:\r\n modsec_h_dict['error_messages'].append(elem)\r\n elif elem.startswith('Producer:'):\r\n modsec_h_dict['producer'] = elem.split(': ')[1].strip(' .').split('; ')\r\n elif elem.startswith('Engine-Mode:'):\r\n modsec_h_dict['Engine-Mode'] = elem.split(': ')[1].strip('\"')\r\n elif elem.startswith('Server:'):\r\n modsec_h_dict['server'] = elem.split(': ')[1]\r\n elif elem.startswith('Action: '):\r\n modsec_h_dict['action'] = {}\r\n if 'ntercepted' in elem:\r\n modsec_h_dict['action']['intercepted'] = True\r\n modsec_h_dict['action']['phase'] = int(elem[elem.find('phase')+6])\r\n modsec_h_dict['action']['message'] = modsec_h_dict['messages'][-1].split('.')[1].strip()\r\n elif elem.startswith('Stopwatch2'):\r\n modsec_h_dict['stopwatch'] = {}\r\n for stopw in elem.split(' '):\r\n if '=' in stopw:\r\n modsec_h_dict['stopwatch'][stopw.split('=')[0]] = int(stopw.split('=')[1].strip(','))\r\n\r\n else:\r\n pass\r\n modsec_a_split = modsec_a.split()\r\n modsec_dict['transaction'] = {'time' : modsec_a_split[0].replace('[','') + ' ' + modsec_a_split[1].replace(']',''), 'transaction_id': modsec_a_split[2], 'remote_address' : modsec_a_split[3],\r\n 'remote_port': modsec_a_split[4], 'local_address': modsec_a_split[5], 'local_port': modsec_a_split[6] }\r\n if len(modsec_b) > 0:\r\n modsec_dict['request'] = {'request_line': modsec_b[0], 'headers': modsec_b_headers}\r\n else:\r\n modsec_dict['request'] = 'None'\r\n\r\n if len(modsec_f_headers) > 3:\r\n modsec_dict['response'] = OrderedDict()\r\n try:\r\n modsec_dict['response'] = {'protocol': modsec_f[0].split(' ')[0], 'status': modsec_f[0].split(' ')[1], 'status_text': ' '.join(modsec_f[0].split(' ')[2:]), 'headers': modsec_f_headers}\r\n except Exception as e:\r\n print('Exception at modsec_dict[\"response\"] :', e)\r\n modsec_dict['response'] = 'None'\r\n else:\r\n modsec_dict['response'] = 'None'\r\n modsec_dict['audit_data'] = OrderedDict()\r\n modsec_dict['audit_data'] = modsec_h_dict\r\n except Exception as e:\r\n print('modsecLog2Info() error found :', e, ' when processing :', singleEntry)\r\n modsec_dict = 'ERROR'\r\n\r\n return modsec_dict",
"def parse(self):\n i = 0\n while i < len(self.__lines):\n line = self.__lines[i]\n dt = re.match(r\"(\\d{4}-\\d{1,2}-\\d{1,2}\\s\\d{1,2}:\\d{1,2}:\\d{1,2})\", line)\n if not dt:\n i += 1\n continue\n log = {\n \"datetime\": dt.group()\n }\n line = line[dt.end()+1:].rstrip(\"\\n\")[::-1]\n qq_flag = line.find(\"(\")\n log[\"qq\"] = line[qq_flag-1:0:-1]\n log[\"name\"] = line[:qq_flag:-1].strip(\" \")\n i += 1\n log[\"content\"] = self.__lines[i].rstrip(\"\\n\")\n while self.__lines[i+1] != \"\\n\":\n i += 1\n log[\"content\"] += \" \" + self.__lines[i].rstrip(\"\\n\")\n self.__logs.append(log)\n i += 2",
"def parseLog(self, log):\n return 0",
"def metadata(self) -> dict:\n\n with evtx.Evtx(self.file_path) as log:\n for record in log.records():\n # Get the lxml object\n event = self.parse_record(record.lxml())\n break\n\n return {\"hostname\": event[\"Computer\"]}",
"def __parse(self):\n lines = self.file.readlines()\n name_idx = 2\n name_idx_found = False\n pathre = re.compile(r\"^[A-Z]:[\\\\/]\\w+\")\n for i in range(0, len(lines)):\n line = lines[i]\n if line.strip() != \"\": # check if line isn't empty\n if pathre.match(line):\n self.path = line.strip()\n continue\n tokens = line.split()\n time_str = tokens[0] + \" \" + tokens[1]\n try:\n time = datetime.strptime(time_str, \"%m/%d/%y %H:%M:%S\")\n except ValueError:\n raise LogParseError('Invalid log format. Date must be first \\\n token for each log event.') \n if not name_idx_found:\n name_idx = tokens.index('Monitoring')\n name_idx_found = True\n name = \"\"\n if tokens[name_idx].strip() == 'Monitoring':\n name = tokens[name_idx].lower() + \" \" + tokens[name_idx + 1].lower()\n duration = 0.0\n else:\n name = tokens[name_idx].lower()\n duration = tokens[name_idx + 1]\n self.events[name] = Event(time, name, duration)\n self.start = self.events['monitoring started']\n self.end = self.events['monitoring stopped']",
"def parse_event_attlog(self):\n uid = ''\n ver_type = -1\n date_str = ''\n if self.last_event_code == DEFS.EF_ATTLOG:\n uid = self.last_payload_data[0:9].decode('ascii').\\\n replace('\\x00', '')\n ver_type = struct.unpack('<H', self.last_payload_data[24:26])[0]\n date_str = \"20%i/%i/%i %i:%i:%i\" %\\\n tuple(self.last_payload_data[26:32])\n\n return [uid, ver_type, date_str]",
"def _readin_evtx(file):\n\tcontent = []\n\tunparsed_entries = 0\n\twith evtx.Evtx(file) as log:\n\t\tc = 0\n\t\tsources = []\n\t\tfor record in log.records():\n\t\t\tc += 1\n\t\t\t_print_progress(c)\n\t\t\ttry:\n\t\t\t\tobj = untangle.parse(record.xml())#untangle can produce an OSError on Windows, since Windows uses a different format for timestamps\n\t\t\texcept OSError:\n\t\t\t\tc -= 1\n\t\t\t\tunparsed_entries += 1\n\t\t\t\tcontinue\n\t\t\tcurr_obj = obj.Event.System\n\t\t\tdate = curr_obj.TimeCreated['SystemTime']\n\t\t\tif '.' in date:\n\t\t\t\tdate = datetime.datetime.strptime(date,\"%Y-%m-%d %H:%M:%S.%f\")\n\t\t\telse:\n\t\t\t\tdate = datetime.datetime.strptime(date,\"%Y-%m-%d %H:%M:%S\")\n\t\t\tfull_line = record.xml()\n\t\t\tif hasattr(curr_obj,'Provider'):\n\t\t\t\tsource = curr_obj.Provider['Name']\n\t\t\telse:\n\t\t\t\tsource = ''\n\t\t\tif ( (not source in sources) and (not sources == '')):\n\t\t\t\tsources.append(source)\n\t\t\tline_nr = curr_obj.EventRecordID.cdata\n\t\t\tcontent.append(logfile_entry(int(line_nr), file, curr_obj.EventID.cdata, full_line, date, curr_obj.Computer.cdata, source))\n\t\t_delete_print()\n\tif unparsed_entries > 0:\n\t\tprint('Unfortunately, {} entries could not be parsed. Please see the documentation'.format(unparsed_entries))\n\t\tprint()\n\treturn logfile(file, len(content), 'evtx', content, sources)",
"def processEventLog(log):\n pass",
"def read_linelog():",
"def __parse_logger_ramose(self):\n with open(\"ramose.log\") as l_f:\n logs = \"\".join(l_f.readlines())\n rev_list = set()\n rev_list_add = rev_list.add\n rev_list = [\n x\n for x in list(reversed(logs.splitlines()))\n if not (x in rev_list or rev_list_add(x))\n ]\n\n html = \"\"\"\n <p></p>\n <aside>\n <h4>RAMOSE API DASHBOARD</h4>\n <ul id=\"sidebar_menu\" class=\"sidebar_menu\">\"\"\"\n\n for api_url, api_dict in self.conf_doc.items():\n html += \"\"\"\n <li><a class=\"btn active\" href=\"%s\">%s</a></li>\n \"\"\" % (\n api_url,\n api_dict[\"conf_json\"][0][\"title\"],\n )\n\n html += \"\"\"\n </ul>\n </aside>\n <header class=\"dashboard\">\n <h1>API MONITORING</h1>\"\"\"\n\n for api_url, api_dict in self.conf_doc.items():\n clean_list = [l for l in rev_list if api_url in l and \"debug\" not in l]\n api_logs_list = \"\".join(\n [\n \"<p>\" + self.clean_log(l, api_url) + \"</p>\"\n for l in clean_list\n if self.clean_log(l, api_url) != \"\"\n ]\n )\n api_title = api_dict[\"conf_json\"][0][\"title\"]\n html += \"\"\"\n <div class=\"info_api\">\n <h2>%s</h2>\n <a id=\"view_doc\" href=\"%s\">VIEW DOCUMENTATION</a><br/>\n <a href=\"%s\">GO TO SPARQL ENDPOINT</a><br/>\n </div>\n <div class=\"api_calls\">\n <h4>Last calls</h4>\n <div>\n %s\n </div>\n\n </div>\n \"\"\" % (\n api_title,\n api_url,\n api_dict[\"tp\"],\n api_logs_list,\n )\n return html",
"def ngx_log_to_requestinfo(log=None):\n # pat is defined due to default nginx access.log format\n pat = (r''\n '(\\d+.\\d+.\\d+.\\d+)\\s-\\s-\\s'\n '\\[(.+)\\]\\s'\n '\"GET\\s(.+)\\s\\w+/.+\"\\s'\n '(\\d+)\\s'\n '(\\d+)\\s'\n '\"(.+)\"\\s'\n '\"(.+)\"'\n )\n if log:\n request_info = re.findall(pat, log)[0]\n if request_info:\n request_info = RequestInfo(request_info[0], request_info[1], request_info[2], request_info[3],\n request_info[4], request_info[5], request_info[6])\n return request_info",
"def carve(self, bs, dataFile, verbose=False):\n _bs = bs\n records = []\n headers = []\n\n i = 0\n # Find all occurrences of the magic string\n found = _bs.findall(evt_header.MagicString, bytealigned=False)\n readSoFarBits = 0\n for idx in found:\n _bs.pos = idx\n r = EvtRecord()\n r.setPathname(dataFile)\n r.setPosition(_bs.pos)\n\n # Read an EVT header field:\n # The algorithm here is to find the message separator \n # and use that as a basis for locating the other fields.\n # Since we split large input files, \"offset\" fields are\n # invalid. \n\n # Message length\n fieldBits = 32\n lenIdx = idx - fieldBits # Set position to idx of length\n _bs.pos = lenIdx\n recordLength = _bs.read(fieldBits).uintle\n r.setField(\"length\", recordLength)\n readSoFarBits += fieldBits\n\n # Calculate size of variable data at end of record \n varDataSize = evt_record.FixedSize - recordLength \n # When reading the size in a header\n if varDataSize < 0: \n varDataSize = 0\n\n # Reset stream position\n _bs.pos = idx\n\n # Message separator\n fieldBits = 32 \n # Check to see if we are reading past end of stream\n data = self.carveField(_bs, \"reserved\", \"uint\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"reserved\", data)\n\n # Record number\n fieldBits = 32 \n data = self.carveField(_bs, \"recordNumber\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"recordNumber\", data)\n\n # Date created\n fieldBits = 32 \n data = self.carveField(_bs, \"timeGenerated\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"timeGenerated\", data)\n\n # Date written\n fieldBits = 32 \n data = self.carveField(_bs, \"timeWritten\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"timeWritten\", data)\n\n # Event ID\n fieldBits = 16 \n data = self.carveField(_bs, \"eventID\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"eventID\", data)\n \n # Event RVA offset\n fieldBits = 16 \n data = self.carveField(_bs, \"eventRVA\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"eventRVA\", data)\n\n # Event type\n fieldBits = 16 \n data = self.carveField(_bs, \"eventType\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"eventType\", data)\n\n # Num strings\n fieldBits = 16 \n data = self.carveField(_bs, \"numStrings\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"numStrings\", data)\n\n # Category\n fieldBits = 16 \n data = self.carveField(_bs, \"eventCategory\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"eventCategory\", data)\n\n # Reserved flags \n fieldBits = 16 \n data = self.carveField(_bs, \"reservedFlags\", \"uint\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"reservedFlags\", data)\n\n # Closing record number\n fieldBits = 32 \n data = self.carveField(_bs, \"closingRecordNumber\", \"uint\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"closingRecordNumber\", data)\n\n # String offset\n fieldBits = 32 \n data = self.carveField(_bs, \"stringOffset\", \"uint\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"stringOffset\", data)\n\n # User SID length\n fieldBits = 32\n data = self.carveField(_bs, \"userSidLength\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"userSidLength\", data)\n\n # User SID offset\n fieldBits = 32 \n data = self.carveField(_bs, \"userSidOffset\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"userSidOffset\", data)\n\n # Data length\n fieldBits = 32 \n data = self.carveField(_bs, \"dataLength\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"dataLength\", data)\n\n # Data offset\n fieldBits = 32\n data = self.carveField(_bs, \"dataOffset\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"dataOffset\", data)\n\n # Variable data\n # FIXME: dont rely on peek() to avoid reading past end of stream\n fieldBits = int(r.getField(\"length\"))\n try:\n data = _bs.peek(\"bytes\" + \":\" + str(fieldBits))\n except bitstring.ReadError:\n if verbose:\n print \"[EVT]: Unable to read EVT data field; \"\\\n \"it would be truncated\"\n break\n data = self.carveField(_bs, \"varData\", \"bytes\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"varData\", data)\n\n # SID\n # FIXME: find out why sidLength is so weird\n #sidLength = r.getField(\"userSidLength\")\n #if sidLength > 0:\n # sidOffset = r.getField(\"userSidOffset\")\n # if sidOffset <= _bs.length:\n # _bs.pos = sidOffset\n # fieldBits = sidLength\n # if readSoFarBits + fieldBits >= _bs.len:\n # fieldBits = _bs.len - _bs.pos\n # sid = _bs.read(fieldBits).uint\n # r.setField(\"sid\", sid)\n # break\n # sid = _bs.read(fieldBits).uint\n # r.setField(\"sid\", sid)\n #readSoFarBits += fieldBits\n records.append(r)\n return (headers, records)",
"def parseLog(self, log_lines):\n abstract",
"def extract_summary(self):\n metadata = {}\n\n ## document Id\n documentId = self.tree.find(\"./id\")\n documentId = documentId.attrib['root'] if documentId is not None and \"root\" in documentId.attrib else \"\"\n metadata[\"documentId\"] = documentId\n\n ## setId\n setid = self.tree.find(\"./setId\")\n setid = setid.attrib['root'] if setid is not None and \"root\" in setid.attrib else \"\"\n metadata[\"setId\"] = setid\n\n ## version number\n splversion = self.tree.find(\"./versionNumber\")\n versionNumber = \"\"\n if splversion is not None:\n if \"value\" in splversion.attrib:\n versionNumber = splversion.attrib[\"value\"]\n metadata[\"versionNumber\"] = versionNumber\n\n ## product type \n code = self.tree.find(\"./code\")\n check_if_attrib_exists = lambda x, key: x[key] if key in x else ''\n product_type = check_if_attrib_exists(code.attrib, \"displayName\")\n metadata[\"productType\"] = product_type\n\n ## title\n title_text = self.tree_et.xpath(\"./title//text()\")\n title = (\" \".join([self.strip_newline_tab(t) for t in title_text]) if len(title_text) > 0 else \"\")\n metadata[\"title\"] = title\n\n ## manufacturer\n manufacturer = self.tree.find(\"./author//representedOrganization/name\")\n if manufacturer != None and manufacturer.text != None:\n manufacturer = self.strip_newline_tab(manufacturer.text)\n else:\n manufacturer = \"\"\n metadata[\"manufacturer\"] = manufacturer\n\n ## effectivetime\n effectiveTime = self.tree_et.xpath(\"./effectiveTime/@value\")\n effectiveTime = self.__normalize_date(effectiveTime)\n\n metadata[\"effectiveTime\"] = effectiveTime\n metadata[\"publishedDate\"] = effectiveTime\n\n ## From manufacturedProduct section\n brand_name = self.tree_et.xpath(\".//manufacturedProduct//name\")\n brand_name = self.strip_newline_tab(brand_name[0].text) if len(brand_name) > 0 else \"\"\n metadata[\"drugName\"] = brand_name\n\n route = self.tree_et.xpath(\".//manufacturedProduct//formCode/@code\")\n route = self.strip_newline_tab(route[0]) if len(route) > 0 else \"\"\n metadata[\"routeOfAdministration\"] = route\n\n product_ndc = self.tree_et.xpath(\".//manufacturedProduct//code/@code\")\n product_ndc = self.strip_newline_tab(product_ndc[0]) if len(product_ndc) > 0 else \"\"\n metadata[\"ndcCode\"] = product_ndc\n\n generic_name = self.tree_et.xpath(\".//manufacturedProduct//asEntityWithGeneric//genericMedicine/name\")\n generic_name = self.strip_newline_tab(generic_name[0].text) if len(generic_name) > 0 else \"\"\n metadata[\"genericName\"] = generic_name\n\n ## dosage form\n dosage_form = self.tree_et.xpath(\".//manufacturedProduct//formCode/@displayName\")\n dosage_form = dosage_form[0] if len(dosage_form) > 0 else \"\"\n metadata[\"dosageForm\"] = dosage_form\n\n # active ingredients\n substance_name = sorted([self.strip_newline_tab(a.text) for a in\n self.tree_et.xpath(\".//.//manufacturedProduct//activeMoiety/activeMoiety/name\")])\n substance_name = \", \".join(set(substance_name))\n metadata[\"substanceName\"] = substance_name\n\n ## inactive ingredients\n inactive_ingredients = sorted([self.strip_newline_tab(inactive.text) for inactive in self.tree_et.xpath(\n \".//manufacturedProduct//inactiveIngredient/inactiveIngredientSubstance/name\")])\n\n if len(inactive_ingredients) == 0:\n inactive_ingredients = \"\"\n else:\n inactive_ingredients = \",\".join(set(inactive_ingredients))\n\n metadata[\"inactiveIngredients\"] = inactive_ingredients\n\n ## other ingredients\n ingredients = sorted([self.strip_newline_tab(ingredient.text) for ingredient in\n self.tree_et.xpath(\".//manufacturedProduct//ingredient/ingredientSubstance/name\")])\n\n if len(ingredients) == 0:\n ingredients = \"\"\n else:\n ingredients = \", \".join(set(ingredients))\n metadata[\"ingredients\"] = ingredients\n\n # marketing_category\n marketing_category = self.tree_et.xpath(\".//manufacturedProduct/subjectOf/approval/code/@displayName\")\n marketing_category = self.strip_newline_tab(marketing_category[0]) if len(marketing_category) > 0 else \"\"\n metadata[\"marketingCategory\"] = marketing_category\n\n # consumed in\n consumed_in = self.tree_et.xpath(\n \".//manufacturedProduct//consumedIn/substanceAdministration/routeCode/@displayName\")\n consumed_in = consumed_in[0] if len(consumed_in) > 0 else \"\"\n metadata[\"consumedIn\"] = consumed_in\n\n # revision date\n marketing_date = self.tree_et.xpath(\".//manufacturedProduct//marketingAct/effectiveTime/low/@value\")\n marketing_date = self.__normalize_date(marketing_date)\n metadata[\"marketingDate\"] = marketing_date\n\n return metadata",
"def parse_log_file(self):\n # Open log file\n log_file_data = utils.open_file(self.log_file)\n for line in log_file_data:\n algo = line.strip(\"\\n\").split(\":\")[1]\n if len(algo) > 3:\n hash_algo = algo.split(\"$\")[1]\n if hash_algo not in self.used_algo:\n self.used_algo.append(hash_algo)",
"def log_parts():\n return get_log_parts('derivaweb_start_time',\n 'derivaweb_request_guid',\n 'derivaweb_request_content_range',\n 'derivaweb_content_type')",
"def parse_header(self):",
"def _parse_audit_entry(entry):\n try:\n integralstor_action_dict = {\n \"create_alert_notification\": \"Alert notification created.\",\n \"delete_alert_notification\": \"Alert notification deleted.\",\n \"create_audit_notification\": \"Audit notification created.\",\n \"delete_audit_notification\": \"Audit notification deleted.\",\n \"update_system_datetimezone\": \"Updated system date/time/timezone\",\n \"update_manifest\": \"System manifest updated\",\n \"update_ntp_servers\": \"Updated NTP server configuration\",\n \"ntp_sync\": \"Performed manual NTP time sync\",\n 'delete_remote_monitoring_server': 'Removed remote monitoring server',\n 'update_remote_monitoring_server': 'Created/updated remote monitoring server',\n \"factory_defaults_reset\": \"Factory defaults reset\",\n \"delete_certificate\": \"Deleted a SSL certificate\",\n \"edit_aces\": \"Access control entry modified\",\n \"add_aces\": \"Access control entry created\",\n \"delete_ace\": \"Access control entry removed\",\n \"create_dir\": \"Directory created\",\n \"create_self_signed_certificate\": \"Created a self signed SSL certificate\",\n \"upload_certificate\": \"Uploaded a SSL certificate\",\n \"add_zfs_spares\": \"Spare disk(s) added to pool\",\n \"schedule_zfs_snapshot\": \"Snapshot scheduling added/modified\",\n \"remove_zfs_spare\": \"Spare disk removed from pool\",\n \"remove_zfs_quota\": \"Removed ZFS quota\",\n \"set_zfs_quota\": \"Set ZFS quota\",\n \"create_vlan\": \"Created network VLAN\",\n \"remove_vlan\": \"Removed network VLAN\",\n \"modify_local_user_gid\": \"Local user's primary group set\",\n \"modify_local_user_grp_membership\": \"Local user's group membership modified\",\n \"create_local_user\": \"Local user created\",\n \"create_local_group\": \"Local group created\",\n \"delete_local_group\": \"Local group removed\",\n \"delete_local_user\": \"Local user removed\",\n \"change_local_user_password\": \"Local user password modified\",\n \"modify_dir_owner_permissions\": \"Directory ownership/permissions modified\",\n \"modify_dir_sticky_bit\": \"Directory sticky bit modified\",\n \"modify_cifs_share\": \"CIFS share modified\",\n \"delete_cifs_share\": \"CIFS share removed\",\n \"create_cifs_share\": \"CIFS share created\",\n \"modify_samba_settings\": \"CIFS authentication settings modified\",\n \"delete_nfs_share\": \"NFS share removed\",\n \"edit_nfs_share\": \"NFS share modified\",\n \"create_nfs_share\": \"NFS share created\",\n \"create_iscsi_target\": \"ISCSI target created\",\n \"delete_iscsi_target\": \"ISCSI target removed\",\n \"create_iscsi_lun\": \"ISCSI LUN created\",\n \"delete_iscsi_lun\": \"ISCSI LUN removed\",\n \"add_iscsi_target_authentication\": \"ISCSI target authentication added\",\n \"remove_iscsi_target_authentication\": \"ISCSI target authentication removed\",\n \"add_iscsi_acl\": \"ISCSI ACL added\",\n \"remove_iscsi_acl\": \"ISCSI ACL removed\",\n \"change_service_status\": \"Service status modified\",\n \"set_interface_state\": \"Network interface state modified\",\n \"edit_interface_address\": \"Network interface address modified\",\n \"create_bond\": \"Network interface bond created\",\n \"remove_bond\": \"Network interface bond removed\",\n \"edit_hostname\": \"System hostname modified\",\n \"set_dns_nameservers\": \"DNS nameservers modified\",\n \"modify_admin_password\": \"Administrator password modified\",\n \"create_zfs_pool\": \"ZFS pool created\",\n \"expand_zfs_pool\": \"ZFS pool expanded\",\n \"import_zfs_pool\": \"ZFS pool imported\",\n \"export_zfs_pool\": \"ZFS pool exported\",\n \"scrub_zfs_pool\": \"ZFS pool scrub initiated\",\n \"delete_zfs_pool\": \"ZFS pool removed\",\n \"edit_zfs_slog\": \"ZFS pool write cache modified\",\n \"remove_zfs_slog\": \"ZFS pool write cache removed\",\n \"edit_zfs_l2arc\": \"ZFS pool read cache modified\",\n \"remove_zfs_l2arc\": \"ZFS pool read cache removed\",\n \"edit_zfs_dataset\": \"ZFS dataset modified\",\n \"delete_zfs_dataset\": \"ZFS dataset removed\",\n \"create_zfs_zvol\": \"ZFS block device volume created\",\n \"delete_zfs_zvol\": \"ZFS block device volume removed\",\n \"create_zfs_dataset\": \"ZFS dataset created\",\n \"create_zfs_snapshot\": \"ZFS snapshot created\",\n \"delete_zfs_snapshot\": \"ZFS snapshot removed\",\n \"rollback_zfs_snapshot\": \"ZFS snapshot rolled back\",\n \"replace_disk_offline_disk\": \"Disk replacement - old disk offlined\",\n \"replace_disk_replaced_disk\": \"Disk replacement - disk replaced successfully\",\n \"rename_zfs_snapshot\": \"ZFS snapshot renamed\",\n \"create_rsync_share\": \"Created new RSync share \",\n \"edit_rsync_share\": \"Edited RSync share \",\n \"delete_rsync_share\": \"Deleted RSync share \",\n \"remove_background_task\": \"Removed background task \",\n \"create_remote_replication\": \"Created remote replication \",\n \"modify_remote_replication\": \"Modified remote replication \",\n \"remove_remote_replication\": \"Removed remote replication \",\n \"task_fail\": \"Task failed \",\n \"task_start\": \"Task started \",\n \"task_complete\": \"Task completed \",\n \"remove_ssh_user_key\": \"Removed ssh user key \",\n \"upload_ssh_user_key\": \"Uploaded ssh user key \",\n \"remove_ssh_host_key\": \"Removed ssh host key \",\n \"upload_ssh_host_key\": \"Uploaded ssh host key \",\n }\n\n action_dict = integralstor_action_dict\n\n d = {}\n\n d['time'], err = datetime_utils.convert_from_epoch(\n entry['audit_time'], return_format='str', str_format='%c', to='local')\n if err:\n raise Exception(err)\n\n d[\"ip\"] = entry['source_ip']\n d[\"username\"] = entry['username']\n action = entry['audit_code']\n if action in action_dict:\n d[\"action\"] = action_dict[action]\n else:\n d[\"action\"] = \"Unknown\"\n d[\"action_str\"] = entry['audit_str']\n d[\"audit_id\"] = entry['audit_id']\n\n except Exception, e:\n return None, 'Error decoding audit entry: %s' % (e)\n else:\n return d, None",
"def parse_log_file(log_filename, pod, filters=None, make_dict=False, objref_dict=None):\n log = gcs_async.read(log_filename).get_result()\n if log is None:\n return {}, False if make_dict else None\n if pod:\n bold_re = regex.wordRE(pod)\n else:\n bold_re = regex.error_re\n if objref_dict is None:\n objref_dict = {}\n if make_dict and pod:\n return kubelet_parser.make_dict(log.decode('utf8', 'replace'), bold_re, objref_dict)\n else:\n return log_parser.digest(log.decode('utf8', 'replace'),\n error_re=bold_re, filters=filters, objref_dict=objref_dict)",
"def parse(self) :\n self._curname = None\n self._curattributes = None\n \n self.setVersion((ord(self._data[0]), ord(self._data[1])))\n self.setOperationId(unpack(\">H\", self._data[2:4])[0])\n self.setRequestId(unpack(\">I\", self._data[4:8])[0])\n self.position = 8\n endofattributes = self.tagvalues[\"end-of-attributes-tag\"]\n maxdelimiter = self.tagvalues[\"event_notification-attributes-tag\"]\n nulloffset = lambda : 0\n #try :\n if 1:\n tag = ord(self._data[self.position])\n while tag != endofattributes :\n self.position += 1\n name = self.tags[tag]\n if name is not None :\n func = getattr(self, name.replace(\"-\", \"_\"), nulloffset)\n self.position += func()\n if ord(self._data[self.position]) > maxdelimiter :\n self.position -= 1\n continue\n oldtag = tag\n tag = ord(self._data[self.position])\n if tag == oldtag :\n self._curattributes.append([])\n #except IndexError :\n # raise IPPError, \"Unexpected end of IPP message.\"\n \n self.data = self._data[self.position+1:]\n self.parsed = True",
"def logversion(self):\n logger.info(\"using EMANE version: %s\", emane.VERSIONSTR)",
"def parse_header(self, header):\n # \n \n # this is what the line'll look like:\n # e.g.: /logs/1\n # e.g.: /detailed/host/timestamp\n\n # get index of first slash\n first_slash = header.index('/')\n \n \n # splice the string now and remove any spaces\n requested_folder = header.strip('/')\n \n # check if it's just a slash\n if not requested_folder:\n # return a 0 meaning we want the latest log file\n return (0, 0)\n else:\n # check that it's a valid request\n detailed_request = requested_folder.split('/')\n # detailed_request should be of form /log/* where * is a number\n # two types of requests:\n # type 1: /log/* where * is a number\n # type 2: /detailed/node_name/timestamp\n # node_name: node name\n # timetamp is the timestamp of the run\n \n \n if len(detailed_request) == 2:\n # type 1 request\n # first entry is '' since there's a leading '/'\n if detailed_request[0] == 'log':\n # now get a valid number for a folder request\n try:\n log_number = int(detailed_request[1])\n except Exception, e:\n print \"Error obtaining log (request: \"+requested_folder+\")\"\n return (-1, str(e))\n else:\n return (0, log_number)\n else:\n return (-1, 'Invalid request (len 2)')\n elif len(detailed_request) == 3:\n # type 2 request\n if detailed_request[0] == 'detailed':\n nodename = detailed_request[1]\n timestamp = detailed_request[2]\n # verify that timestamp is a valid #\n try:\n timestamp_int = int(timestamp)\n except ValueError, ve:\n print 'Invalid timestamp requested, '+timestamp\n print ve\n return (-1, 'Invalid timestamp')\n else:\n # return the filepath as our response\n return (1, './detailed_logs/'+nodename+'/'+timestamp)\n \n \n else:\n return (-1, 'Invalid request (len 3)')\n \n else:\n # invalid!\n return (-1, 'Invalid detailed log request ('+str(detailed_request)+')')",
"def event_log(self):\n pass",
"def parse_log_entry(line):\n\n line_pattern = r\"^(?P<host>.*) - - \\[(?P<timestamp>.*)\\] \" \\\n \"\\\"(?P<request>.*)\\\" (?P<http_code>\\d\\d\\d) (?P<bytes>.*)$\"\n line_groups = re.match(line_pattern, line)\n request_pattern = r\"^(?P<request_method>[A-Z]*) (?P<resource>\\S+) ?.*$\"\n request_groups = re.match(request_pattern, line_groups.group('request'))\n host = line_groups.group('host')\n timestamp = line_groups.group('timestamp')\n timestamp = parse_date(line_groups.group('timestamp'))\n http_code = int(line_groups.group('http_code'))\n num_bytes = line_groups.group('bytes')\n num_bytes = 0 if num_bytes == '-' else int(num_bytes)\n if request_groups:\n request_method = request_groups.group('request_method')\n resource = request_groups.group('resource')\n else:\n request_method = None\n resource = None\n return ParsedRequest(\n host, timestamp, request_method,\n resource, http_code, num_bytes)",
"def process_entry(self,\n log_entry: str):\n elem = ET.fromstring(log_entry)\n rev = elem.attrib['revision']\n values = {}\n for sub in ['author', 'date', 'msg']:\n try:\n values[sub] = elem.find(f'./{sub}').text\n except (AttributeError, SyntaxError):\n log.warning('failed to retrieve %s in %s', sub, log_entry)\n values[sub] = None\n if values['msg']:\n values['msg'] = values['msg'].replace('\\n', ' ')\n rel_url_slash = self.relative_url + '/'\n for path_elem in elem.findall('*/path'):\n other = {}\n for sub in ['text-mods', 'kind', 'action', 'prop-mods',\n 'copyfrom-rev', 'copyfrom-path']:\n try:\n other[sub] = path_elem.attrib[sub]\n except (AttributeError, SyntaxError, KeyError):\n other[sub] = np.nan\n try:\n path = path_elem.text.replace(rel_url_slash, '')\n except (AttributeError, SyntaxError, ValueError) as err:\n log.warning(f'{err} processing rev {rev}')\n path = None\n entry = scm.LogEntry(rev, values['author'], to_date(values['date']),\n path=path, message=values['msg'],\n textmods=to_bool(other['text-mods']),\n kind=other['kind'], action=other['action'],\n propmods=to_bool(other['prop-mods']),\n copyfromrev=other['copyfrom-rev'],\n copyfrompath=other['copyfrom-path'],\n added=np.nan, removed=np.nan)\n yield entry",
"def log_builder(self, log_level, hrtimestamp, datestamp, timestamp, log_msg, tags):\n log_body = {}\n log_body[\"filename\"] = self.filename\n log_body[\"log_level\"] = log_level\n log_body[\"hrtimestamp\"] = hrtimestamp\n log_body[\"datestamp\"] = datestamp\n log_body[\"timestamp\"] = timestamp\n log_body[\"log_msg\"] = log_msg\n log_body[\"tags\"] = tags\n return log_body",
"def summarise(thislog):\n\n # Logfile name\n print(\"Summary for \" + thislog.filename() + \"\\n\")\n # Was it from CCP4i?\n if thislog.isccp4i():\n print(\"This is a CCP4i logfile\\n\")\n # Number of programs or pseudo-programs\n print(str(thislog.nfragments()) + \" logfile fragments\\n\")\n print(\"Fragments:\")\n for i in range(0, thislog.nfragments()):\n fragment = thislog.fragment(i)\n if fragment.isprogram():\n if fragment.has_attribute(\"name\"):\n print(\"\\tProgram: \" + str(fragment.name))\n else:\n print(\"\\tProgram: <no name>\")\n else:\n if fragment.isccp4i_info():\n print(\"\\tCCP4i info\")\n elif fragment.isfragment():\n print(\"\\tFragment\")\n if fragment.ntables():\n print(\"\\t\\t\" + str(fragment.ntables()) + \" tables\")\n if fragment.nkeytexts():\n print(\"\\t\\t\" + str(fragment.nkeytexts()) + \" keytexts\")\n\n print(\"\")\n # Summarise program logfile fragments\n if thislog.nprograms() > 0:\n print(str(thislog.nprograms()) + \" program logfiles\\n\")\n print(\"Programs:\")\n for i in range(0, thislog.nprograms()):\n prog = thislog.program(i)\n # Is it a CCP4 program?\n if prog.isccp4():\n # Print name, version (and CCP4 version)\n print(\n \"\\t\"\n + prog.name\n + \"\\tv\"\n + prog.version\n + \"\\t(CCP4 \"\n + prog.ccp4version\n + \")\"\n )\n else:\n # Print name and version\n if prog.has_attribute(\"name\") and prog.has_attribute(\"version\"):\n print(\"\\t\" + prog.name + \"\\t\" + prog.version)\n else:\n print(\"\\t<No name and/or version>\")\n if prog.termination():\n print(\"\\tTerminated with: \" + prog.termination_message)\n else:\n print(\"\\tNo termination message found\")\n # Keytexts\n if prog.nkeytexts():\n print(\"\\n\\t\\tKeytext messages:\")\n for j in range(0, prog.nkeytexts()):\n print(\n \"\\t\\t\"\n + str(prog.keytext(j).name())\n + ': \"'\n + str(prog.keytext(j).message())\n + '\"'\n )\n # Tables\n if prog.ntables():\n print(\"\\n\\t\\tTables:\")\n for table in prog.tables():\n print('\\t\\tTable: \"' + table.title() + '\"')\n print(\"\")\n else:\n print(\"No program logfiles found\")\n print(\"\")\n # Total set of CCP4i information messages in the file\n print(\"CCP4i messages in file:\")\n if thislog.nccp4i_info():\n for i in range(0, thislog.nccp4i_info()):\n print('\\tCCP4i info: \"' + thislog.ccp4i_info(i).message + '\"')\n else:\n print(\"\\tNo messages found\")\n print(\"\")\n # Total set of tables in the file\n print(\"Tables in file:\")\n if thislog.ntables():\n for table in thislog.tables():\n print('\\tTable: \"' + table.title() + '\" (' + str(table.nrows()) + \" rows)\")\n else:\n print(\"\\tNo tables found\")\n print(\"\")\n # Total set of keytexts in the file\n print(\"Keytext messages in file:\")\n if thislog.nkeytexts():\n for i in range(0, thislog.nkeytexts()):\n print(\n \"\\t\"\n + str(thislog.keytext(i).name())\n + ': \"'\n + thislog.keytext(i).message()\n + '\"'\n )\n else:\n print(\"\\tNo keytext messages found\")\n print(\"\")",
"def _add_filename_metadata(self, extra_metadata): \n \n # Make sure product_info section exists\n extra_metadata.setdefault('product_info', {})\n \n file_name = os.path.basename(self.fname)\n fn_comps = file_name.split(\"_\")\n \n if self.__class__ == SAFESentinel1:\n component = fn_comps[2]\n if len(component) < 4: \n resolution = 'N/A'\n else:\n resolution = component[-1]\n \n extra_metadata['product_info']['Resolution'] = resolution\n \n # Add file/scan name \n extra_metadata['product_info']['Name'] = os.path.splitext(file_name)[0]\n \n # Add Satellite and Mission from the file path\n comp_1 = fn_comps[0].upper()\n extra_metadata['platform']['Mission'] = \"Sentinel-%s\" % comp_1[1]\n extra_metadata['platform']['Satellite'] = \"Sentinel-%s\" % comp_1[1:]",
"def format(self, record: logging.LogRecord = None) -> str:\n # s = super().format(record)\n s = None\n e = {}\n e['id'] = uuid.uuid4().hex\n e['message'] = record.getMessage()\n # log.warning('record.message: %r', record.getMessage())\n # log.warning('record.args: %r', record.args)\n e['created'] = record.created\n e['priority'] = record.levelname\n e['args'] = record.args\n e['source_code'] = {}\n e['source_code']['pathname'] = record.pathname\n e['source_code']['funcName'] = record.funcName\n e['source_code']['lineno'] = record.lineno\n ctx = record.args.get(PIPELINE_CONTEXT_KEY, None)\n if ctx:\n e[PIPELINE_CONTEXT_KEY] = ctx.toDict()\n # use array enclosure a[] to mainain the log file\n # yaml compliant as new events are appended\n # - event1:\n # - event2:\n # - ...\n a = [e]\n s = yaml.dump(a)\n return s"
]
| [
"0.58203495",
"0.55198073",
"0.54499614",
"0.5366643",
"0.536272",
"0.5305847",
"0.5242875",
"0.51955664",
"0.515198",
"0.51442206",
"0.5136993",
"0.51346636",
"0.5060666",
"0.5046243",
"0.5008593",
"0.5006592",
"0.49796999",
"0.49733102",
"0.49716076",
"0.49515978",
"0.4940416",
"0.4935828",
"0.4931003",
"0.4913603",
"0.4906886",
"0.49023217",
"0.48972318",
"0.48626754",
"0.48622388",
"0.48354182"
]
| 0.5648422 | 1 |
Compute a the cocitation graph using a citation network | def cocite(G,min_citations = 2):
if not G.is_directed():
msg = "The cocitation algorithm requires a directed citation graph as an input."
raise nx.NetworkXError(msg)
#assert type(G) == nx.classes.digraph.DiGraph
edges = {}
#for each node
for n in G.nodes():
# for each outward edge (citing)
out = G.out_edges(n)
for i in out:
for j in out:
if i==j: break
pair = tuple(set([i[1],j[1]]))
try: edges[pair] = edges[pair] + 1
except: edges[pair] = 1
CC = G.to_undirected() # this returns a deepcopy
CC = nx.create_empty_copy(CC, with_data=True)
edgelist = [(i[0][0],i[0][1],i[1]) for i in edges.viewitems() if i[1]>min_citations]
CC.add_weighted_edges_from(edgelist)
return CC | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def neato_graph_from_corpus( corpus, max_nodes ) :\n\n O, row_dois, column_dois = cites_matrix( corpus )\n neato_cooccurrence_graph( O, column_dois )\n return None\n\n \n v = total_occurrences( O ) \n nv = v.astype( float32 ) / v.max()\n C = cooccurrence_matrix ( O )\n nC = normalized_cooccurrence_matrix( O )\n\n # now find our cutoff!\n # find the max number of cocites and start there\n cocite_cutoff = C.max()\n num_nodes = nodes_from_c( C[C >= cocite_cutoff] )\n # then reduce the number until we exceed max_nodes\n while num_nodes < max_nodes :\n cocite_cutoff = cocite_cutoff - 1\n num_nodes = nodes_from_c( C[C >= cocite_cutoff] )\n\n if num_nodes > max_nodes :\n cocite_cutoff = cocite_cutoff + 1\n \n C = C.copy()\n C[ C < cocite_cutoff ]= 0\n\n graph = pydot.Dot( graph_type = 'graph' )\n graph.set_overlap(\"false\")\n coords = zip(*(C >= cocite_cutoff).nonzero())\n\n # make a dict of all nodes which are mentioned in the coords\n nodes = {}\n index = 1\n for coord in set(chain.from_iterable(coords)) :\n if not nodes.has_key( coord ) :\n node = pydot.Node( str(coord) )\n if v != None :\n doi = column_dois[coord]\n node.set_label( str(index) )\n node.set_penwidth( nv[ coord ] )\n node.set_fixedsize(\"true\")\n node.set_width( 1.0 *nv[ coord ] )\n #node.set_shape(\"circle\")\n nodes[ coord ] = node\n graph.add_node( node )\n index = index + 1\n\n for coord in coords :\n \n edge = pydot.Edge( nodes[coord[0]], nodes[coord[1]] )\n edge.set_weight( nC[coord] )\n edge.set_penwidth( nC[coord]*5 )\n #edge.set_label( str(int(m[coord]) ))\n graph.add_edge(edge)\n\n \n legend = pydot.Node( \"legend\" )\n nodelist = nodes.items()\n nodelist.sort( lambda a,b : cmp(node_index(a[1].get_label()),node_index(b[1].get_label())) )\n legend.set_label( \"\\l\".join([x[1].get_label()+\":\"+column_dois[x[0]] for x in nodelist])+\"\\l\" )\n legend.set_shape(\"box\")\n graph.add_node(legend)\n\n print graph.to_string()\n #graph.write_dot('test.dot', prog='neato' )\n #graph.write_png('test.png', prog='neato' )\n #graph.write_pdf('test.pdf', prog='neato' )",
"def citation_network(db, filename, max_edges=1e6):\n\tnodes = dict()\n\tedges = set()\n\tnum_edges = 0\n\n\thashes = get_to_read_hashes()\n\n\twith open(filename, 'w', errors='backslashreplace') as f:\n\n\t\tf.write(\"graph [\\n\\tdirected 1\\n\")\n\n\t\tfor parent_hash, ref_list in db.parents.items():\n\n\t\t\tif len(ref_list) > 0:\n\n\t\t\t\tadd_node(db, db.all_papers[parent_hash], parent_hash, f, nodes, hashes)\n\t\t\t\t\n\t\t\t\tfor child in ref_list:\n\n\t\t\t\t\tadd_node(db, child, child.hash, f, nodes, hashes)\n\n\t\t\t\t\tsource_id = nodes[parent_hash]\n\t\t\t\t\ttarget_id = nodes[child.hash]\n\t\t\t\t\tdb.add_edge(source_id, target_id, f, nodes, edges)\n\t\t\t\t\tnum_edges += 1\n\n\t\t\t\t\tif num_edges > max_edges:\n\t\t\t\t\t\tprint(\"MAX EDGES REACHED\", num_edges, len(nodes))\n\t\t\t\t\t\tf.write(\"]\")\n\t\t\t\t\t\treturn\n\n\t\tprint(num_edges, \"edges written with max_edges =\", max_edges)\n\t\tf.write(\"]\")",
"def parseArnetminerDataset():\n\n inputFile = open(os.path.join(projectRoot, 'data','DBLP-citation-Feb21.txt'))\n graph = MultiDiGraph()\n\n # Sets for authors, papers, conferences, and terms found so far\n indexToPaperIdMap = {}\n citationCountMap = {}\n indexSet = set()\n\n beginning = inputFile.tell()\n\n print \"Parsing nodes for graph...\"\n\n # Counts for statistics\n VALID_PAPERS = 1566322 # 99.62% of total papers in DBLP dataset\n papersProcessed = 0\n skippedPaperIndices = set()\n invalidPaperIndices = set()\n\n # Add each paper to graph (adding missing associated terms, authors, and conferences)\n for title, authors, conference, terms, citationCount, index in __papersFromFile(inputFile, skippedPaperIndices, invalidPaperIndices):\n\n # Check that index is unique, and record it\n assert index not in indexSet\n indexSet.add(index)\n\n # Create unique identifier with paper index & title\n paperId = '%d----%s' % (index, title)\n citationCountMap[paperId] = citationCount\n indexToPaperIdMap[index] = paperId\n\n # Add symmetric edges & nodes (if they don't already exist in the network)\n for author in authors:\n graph.add_edges_from([(author, paperId), (paperId, author)])\n graph.add_edges_from([(conference, paperId), (paperId, conference)])\n for term in terms:\n graph.add_edges_from([(term, paperId), (paperId, term)])\n\n # Output progress\n papersProcessed += 1\n sys.stdout.write(\"\\r Processed %d / %d papers...\" % (papersProcessed, VALID_PAPERS))\n\n # Rewind file\n inputFile.seek(beginning)\n\n print \"Parsing citations for graph...\"\n\n # Counts for statistics\n papersProcessed = 0\n successfulCitations = 0\n omittedPaperCitations = 0\n invalidPaperCitations = 0\n invalidCitations = 0\n\n # Add citations to the graph\n for title, index, citations in __citationsFromFile(inputFile):\n citingId = '%d----%s' % (index, title)\n for citationIndex in citations:\n\n # Add citation edge if it was found\n if citationIndex in indexToPaperIdMap:\n successfulCitations += 1\n graph.add_edge(citingId, indexToPaperIdMap[citationIndex])\n\n # Tally missing citation appropriately\n elif citationIndex in skippedPaperIndices:\n omittedPaperCitations += 1\n elif citationIndex in invalidPaperIndices:\n invalidPaperCitations += 1\n else:\n print \"\\nCitation '%d' not found for '%s'\" % (citationIndex, title)\n invalidCitations += 1\n\n # Output progress\n papersProcessed += 1\n sys.stdout.write(\"\\r Processed Citations for %d / %d papers...\" % (papersProcessed, VALID_PAPERS))\n\n # Basic statistics about cleanliness of citations\n totalCitations = invalidCitations + successfulCitations\n successfulCitationsPercent = 100 * float(successfulCitations) / totalCitations\n omittedPaperCitationsPercent = 100 * float(omittedPaperCitations) / totalCitations\n invalidPaperCitationsPercent = 100 * float(invalidPaperCitations) / totalCitations\n invalidCitationsPercent = 100 * float(invalidCitations) / totalCitations\n print \"\\n\\nTotal Citations: %d\" % totalCitations\n print \" Citations Added (Successful): %d (%2.2f%%)\" % (successfulCitations, successfulCitationsPercent)\n print \" Citations Skipped (Skipped Paper): %d (%2.2f%%)\" % (omittedPaperCitations, omittedPaperCitationsPercent)\n print \" Citations Skipped (Invalid Paper): %d (%2.2f%%)\" % (invalidPaperCitations, invalidPaperCitationsPercent)\n print \" Citations Invalid (Unknown): %d (%2.2f%%)\" % (invalidCitations, invalidCitationsPercent)\n\n return graph",
"def create_graph_network(start_node, connections):\n graph = nx.Graph()\n graph.add_node(start_node)\n print(connections.index)\n graph.add_nodes_from(connections.index)\n edge_list = list(zip(itertools.repeat(start_node), connections.index))\n print(\"edge list is \", edge_list)\n graph.add_edges_from(edge_list)\n for i in graph.edges():\n graph[i[0]][i[1]]['weight'] = connections.loc[i[1]]['count']\n # graph[i[0]][i[1]]['proposal_number'] = connections.loc[i[1]]['proposal_number']\n # graph[i[0]][i[1]]['institution'] = connections.loc[i[1]]['institution']\n # graph[i[0]][i[1]]['proposal_title'] = connections.loc[i[1]]['proposal_title']\n # graph[i[0]][i[1]]['project_status'] = connections.loc[i[1]]['project_status']\n\n # Adding random position data to the graph.\n # pos = nx.spring_layout(graph, k=1)\n pos = nx.circular_layout(graph)\n nx.set_node_attributes(graph, 'pos', pos)\n return graph",
"def generateGraph(mids, chaptersField, labelsField):\n output = \"digraph G { \\n\"\n # On ne traite que les chapitres qui ont actives le graphe\n chapts = chapters.graphChapters()\n # le dico nodes contient une liste pour chaque chapitre. Chaque liste\n # contient tous les neuds (un par note) presents dans ce chapitre, et\n # representes par des tuples (noteId, label)\n nodes = {}\n for mid in mids:\n chapterField = chaptersField[mid]\n labelField = labelsField[mid]\n for id, flds in mw.col.db.execute(\"\"\"\n SELECT id, flds FROM notes WHERE mid=%d\n \"\"\" % mid):\n fields = splitFields(flds)\n chapter = fields[chapterField]\n if not chapter in chapts:\n continue\n label = fields[labelField]\n if(not chapter in nodes):\n nodes[chapter] = []\n nodes[chapter].append((id, label))\n # On genere les noeuds, dans des clusters (un par chapitre)\n notes = []\n for chap in nodes:\n output += \"\"\"subgraph cluster_%d {\n node [style=filled];\n label = \"%s\";\n color=blue;\n \"\"\" % (chapts[chap], chap)\n for n in nodes[chap]:\n output += \"\"\"n%d [label=\"%s\", URL=\"%d\"];\\n\"\"\" % (n[0], n[1], n[0])\n notes.append(n)\n output += \"\"\"\n }\\n\"\"\"\n # Puis on ajoute tous les liens ..\n for n in notes:\n for nid in mw.col.db.execute(\"\"\"SELECT N.noteId FROM `PATH.links` AS L\n JOIN `PATH.match` AS M ON M.id = L.matchId\n JOIN `PATH.nodes` AS N ON M.nodeId = N.id\n WHERE L.noteId = %d\"\"\" % (n[0])):\n output += \"\"\"n%d -> n%d;\\n\"\"\" % (nid[0], n[0])\n output += \"}\"\n generateGraphImage(output)",
"def process_citation(style, reference_list, citation):\n processed_citation = Element(\"span\", attrib={\"class\":\"citation\"})\n\n for reference in reference_list:\n citeref = SubElement(processed_citation, \"span\")\n for style_node in style.citation.layout:\n process_node(citeref, style_node, style.macros, reference) \n\n return(processed_citation)",
"def complex_network_mapping(graph):\n vect = []\n\n n = nx.number_of_nodes(graph)\n e = nx.number_of_edges(graph)\n print n, e\n\n# adj = nx.adjacency_matrix(graph).toarray()\n# adj_bin = np.where(adj > 0, 1., 0.)\n# adj_conn = 1 - adj\n adj_bin = nx.adjacency_matrix(graph).toarray()\n adj_bin = np.array(adj_bin, dtype=np.float)\n\n # Node Betweenness binary\n bt_bin = nx.betweenness_centrality(graph).values()\n avg_btb = np.mean(bt_bin)\n vect.append(avg_btb)\n\n # Edge betweenness\n ebt = np.array(nx.edge_betweenness_centrality(graph).values())\n vect.append(np.mean(ebt))\n\n # Eigen vector centrality binary\n evc_bin = eigenvector_centrality_und(adj_bin)\n avg_evcb = np.mean(evc_bin)\n vect.append(avg_evcb)\n\n # Flow coefficient\n _, flow_bin, _ = flow_coef_bd(adj_bin)\n avg_flow = np.mean(flow_bin)\n vect.append(avg_flow)\n\n # Kcoreness centrality\n kcor_bin, _ = kcoreness_centrality_bu(adj_bin)\n avg_kcor = np.mean(kcor_bin)\n vect.append(avg_kcor)\n\n # Degree assortivity\n dac = nx.degree_assortativity_coefficient(graph)\n vect.append(dac)\n\n # Page rank centrality\n# pgr_wei = pagerank_centrality(adj_bin, d=0.85)\n# avg_pgr = np.mean(pgr_wei)\n# vect.append(avg_pgr)\n\n # Rich club coefficient\n# rcc = nx.rich_club_coefficient(graph).values()\n# avg_rcc = np.mean(rcc)\n# vect.append(avg_rcc)\n\n # Transitivity\n tr = nx.transitivity(graph)\n vect.append(tr)\n\n # average clustering\n avg_clst = nx.average_clustering(graph)\n vect.append(avg_clst)\n\n glb_ef = efficiency_bin(adj_bin)\n vect.append(glb_ef)\n\n return vect",
"def parse_pubchem():\n g = Graph()\n print \"Starting to parse...\"\n g.parse(\"./pubchem_rdf/compound/general/pc_comp_00000001_00100000.ttl\", format=\"turtle\")\n print \"next doing comp...\"\n g.parse(\"./pubchem_rdf/descriptor/compound/pc_comp_descr_00000001_00100000.ttl\", format=\"turtle\")\n print \"next doing inchikey...\"\n g.parse(\"./pubchem_rdf/inchikey/compound/pc_inchikey_comp_00000001_00100000.ttl\", format=\"turtle\")\n return g",
"def get_graph_blogcatalog():\n from scipy.io import loadmat\n\n def sparse2graph(x):\n from collections import defaultdict\n from six import iteritems\n\n G = defaultdict(lambda: set())\n cx = x.tocoo()\n for i, j, v in zip(cx.row, cx.col, cx.data):\n G[i].add(j)\n return {str(k): [str(x) for x in v] for k, v in iteritems(G)}\n\n mat = loadmat('./samples/blogcatalog.mat')\n A = mat['network']\n data = sparse2graph(A)\n\n G = eg.Graph()\n for u in data:\n for v in data[u]:\n G.add_edge(u, v)\n\n return G",
"def _create_reference_connectivity_graph(self):\n #take the self._residue_graph and create a replicate (without the misc attributes) with the atoms_with_positions\n _reference_connectivity_graph = nx.Graph()\n atoms_with_positions = set(self._atoms_with_positions)\n\n #iterate over all the bonds\n for bond in self._residue_graph.edges():\n if set(bond).issubset(atoms_with_positions):\n #if both of the atoms in the bond are in atoms_with_positions, we can add the atoms/bonds to the reference\n _reference_connectivity_graph.add_edge(*bond)\n\n return _reference_connectivity_graph",
"def CirculantGraph(n, adjacency):\n from sage.graphs.graph_plot import _circle_embedding\n\n if not isinstance(adjacency,list):\n adjacency=[adjacency]\n\n G = Graph(n, name=\"Circulant graph (\"+str(adjacency)+\")\")\n _circle_embedding(G, list(range(n)))\n\n for v in G:\n G.add_edges([(v,(v+j)%n) for j in adjacency])\n\n return G",
"def load_network(self,network):\n\n # Node coordination, periodicity and node crds\n self.pbc = network.pbc\n self.mic = network.mic\n self.node_crds = network.get_node_crds()\n self.mean_ring_size = 6 # self.node_cnxs = []\n\n # Make connections accounting for periodicity\n self.node_cnxs = network.get_edges(unique=False)\n self.node_cnx_crds = np.zeros((self.node_cnxs.shape[0],4))\n crd_i = np.zeros(2)\n crd_j = np.zeros(2)\n for i,p in enumerate(self.node_cnxs):\n crd_i[:] = self.node_crds[p[0]][:]\n crd_j[:] = self.node_crds[p[1]][:]\n x = crd_j[0]-crd_i[0]\n y = crd_j[1]-crd_i[1]\n if x>self.mic[0]: x-=self.pbc[0]\n elif x<-self.mic[0]: x+=self.pbc[0]\n if y>self.mic[1]: y-=self.pbc[1]\n elif y<-self.mic[1]: y+=self.pbc[1]\n self.node_cnx_crds[i,0] = crd_i[0]\n self.node_cnx_crds[i,1] = crd_i[0]+x/2\n self.node_cnx_crds[i,2] = crd_i[1]\n self.node_cnx_crds[i,3] = crd_i[1]+y/2\n\n # Make rings accounting for periodicity\n self.node_rings = network.get_rings()\n self.ring_crds = []\n self.max_ring_size = 0\n for ring in self.node_rings:\n crds = np.zeros((ring.size,2))\n for i,j in enumerate(ring):\n crds[i,:] = self.node_crds[j,:]\n for i in range(1,ring.size):\n x = crds[i,0] - crds[i-1,0]\n y = crds[i,1] - crds[i-1,1]\n if x>self.mic[0]: x -= self.pbc[0]\n elif x<-self.mic[0]: x += self.pbc[0]\n if y>self.mic[1]: y -= self.pbc[1]\n elif y<-self.mic[1]: y += self.pbc[1]\n crds[i,0] = crds[i-1,0] + x\n crds[i,1] = crds[i-1,1] + y\n self.ring_crds.append(crds)\n if ring.size > self.max_ring_size:\n self.max_ring_size = ring.size\n self.perimeter_ring = network.get_rings(True)\n self.perimeter_ring_crds = []\n for i in self.perimeter_ring:\n self.perimeter_ring_crds.append(self.node_crds[i,:])\n self.perimeter_ring_crds = np.array(self.perimeter_ring_crds)\n self.init_ring_colours(self.mean_ring_size,self.max_ring_size)",
"def getGraph():\n graph = ConjunctiveGraph()\n graph.parse(\"trains.n3\", format=\"n3\", publicID=TT['disk#context'])\n return graph",
"def build_graph(self):\n for node in self.graph.nodes():\n self.c2py[node] = PyNode(node)\n for _input in node.inputs():\n if _input not in self.c2py:\n self.c2py[_input] = PyNode(_input, True)\n if _input in self.forward_edge:\n self.forward_edge[_input].append(node)\n else:\n self.forward_edge[_input] = [node]\n for output in node.outputs():\n if output not in self.c2py:\n self.c2py[output] = PyNode(output, True)\n if node in self.forward_edge:\n self.forward_edge[node].append(output)\n else:\n self.forward_edge[node] = [output]",
"def gen_graph(self):",
"def test_unweighted_directed_networkx_to_cugraph():\n dpr = mg.resolver\n networkx_graph_data = [\n (0, 1),\n (0, 2),\n (2, 0),\n (1, 2),\n (3, 2),\n ]\n networkx_graph_unwrapped = nx.DiGraph()\n networkx_graph_unwrapped.add_edges_from(networkx_graph_data)\n x = dpr.wrappers.Graph.NetworkXGraph(networkx_graph_unwrapped)\n\n sources = [0, 0, 1, 2, 3]\n destinations = [1, 2, 2, 0, 2]\n cdf = cudf.DataFrame({\"source\": sources, \"destination\": destinations})\n g = cugraph.DiGraph()\n g.from_cudf_edgelist(cdf, source=\"source\", destination=\"destination\")\n intermediate = dpr.wrappers.Graph.CuGraph(g, None)\n y = dpr.translate(x, CuGraph)\n dpr.assert_equal(y, intermediate)\n assert len(dpr.plan.translate(x, CuGraph)) == 1",
"def chain_graph(self) -> nx.DiGraph:\n edg_lst = [\n (f\"p{idx}\", f\"p{idx+1}\", self.d[f\"p{idx+1}\"]) for idx in range(self.n)\n ]\n chain_graph = nx.DiGraph()\n chain_graph.add_weighted_edges_from(edg_lst)\n return chain_graph",
"def connectivity_graph(rdm):\n rdm = _rd_chem.AddHs(rdm)\n atms = rdm.GetAtoms()\n bnds = rdm.GetBonds()\n asbs = dict(enumerate((rda.GetSymbol(), 0, None) for rda in atms))\n cnns = {frozenset([rdb.GetBeginAtomIdx(), rdb.GetEndAtomIdx()]): (1, None)\n for rdb in bnds}\n return (asbs, cnns)",
"def neato_cooccurrence_graph( nC, v, labels, max_nodes = 10, fnam_stem = \"test\", label_nodes_directly = False, scale=1.0, min_node_size = 0.1 ):\n \n nv = v.astype( float32 ) / v.max()\n\n cutoff = cooccur_cutoff( nC, max_nodes );\n\n graph = pydot.Dot( graph_type = 'graph' )\n graph.set_overlap(\"false\")\n coords = zip(*(nC >= cutoff).nonzero())\n\n # make a dict of all nodes which are mentioned in the coords\n nodes = {}\n index = 1\n for coord in set(chain.from_iterable(coords)) :\n if not nodes.has_key( coord ) :\n node = pydot.Node( str(coord) )\n if v != None :\n #print coord\n label = labels[coord]\n if label_nodes_directly :\n node.set_label( label )\n else :\n node.set_label( str(index) )\n #node.set_penwidth( nv[ coord ] )\n node.set_fixedsize(\"true\")\n node.set_width( max(min_node_size,scale *nv[ coord ]) )\n node.set_shape(\"circle\")\n nodes[ coord ] = node\n graph.add_node( node )\n index = index + 1\n\n for coord in coords :\n \n edge = pydot.Edge( nodes[coord[0]], nodes[coord[1]] )\n edge.set_weight( nC[coord] )\n edge.set_penwidth( nC[coord]*5 )\n #edge.set_label( str(int(m[coord]) ))\n graph.add_edge(edge)\n\n if not label_nodes_directly : \n legend = pydot.Node( \"legend\" )\n nodelist = nodes.items()\n nodelist.sort( lambda a,b : cmp(node_index(a[1].get_label()),node_index(b[1].get_label())) )\n legend.set_label( \"\\l\".join([x[1].get_label()+\":\"+labels[x[0]] for x in nodelist])+\"\\l\" )\n legend.set_shape(\"box\")\n graph.add_node(legend)\n\n #print graph.to_string()\n graph.write_dot(fnam_stem+'.dot', prog='neato' )\n graph.write_png(fnam_stem+'.png', prog='neato' )\n #graph.write_pdf(fnam_stem+'.pdf', prog='neato' )",
"def chain_graph(self) -> nx.DiGraph:\n edg_lst = [\n (f\"p{idx}\", f\"p{idx+1}\", self.a[f\"p{idx+1}\"]) for idx in range(self.n)\n ]\n chain_graph = nx.DiGraph()\n chain_graph.add_weighted_edges_from(edg_lst)\n return chain_graph",
"def get_conn_as_graph(self, d1, d2):\n import scipy.sparse as sps\n\n self.setup_connectivity(d1, d2)\n conn = self.get_conn(d1, d2)\n\n graph = sps.csr_matrix((np.ones(conn.indices.shape[0], dtype=bool),\n np.array(conn.indices, copy=True,\n dtype=np.int32),\n np.array(conn.offsets, copy=True,\n dtype=np.int32)))\n\n return graph",
"def _construct_graph(self):\n raise NotImplementedError",
"def calc_cc(graph):\n\tclustering_coeffs = {}\n\tfor node in graph.nodes():\n\t\tclustering_coeffs[node] = { \"cc\" : nx.clustering(graph, node)}\n\tnx.set_node_attributes(graph, clustering_coeffs)",
"def compute_cpts_network(df, network):\n P = dict()\n for idx, pair in enumerate(network):\n if pair.parents is None:\n cpt = CPT.from_factor(Factor.from_data(df, cols=[pair.node])).normalize()\n # cpt = CPT(marginal_distribution, conditioned=[pair.node]).normalize()\n else:\n # todo: there should be a from_data at CPT\n cpt = CPT.from_factor(Factor.from_data(df, cols=[*pair.parents, pair.node])).normalize()\n # cpt = CPT(joint_distribution, conditioned=[pair.node]).normalize()\n\n # add conditional distribution to collection\n P[pair.node] = cpt\n return P",
"def wsngraph():\n G = nx.Graph()\n G.add_node(1)\n G.add_node(2)\n G.add_node(3)\n G.add_node(4)\n G.add_node(5)\n G.add_node(6)\n G.add_node(7)\n G.add_node(8)\n G.add_node(9)\n G.add_node(10)\n G.add_node(11)\n G.add_node(12)\n G.add_edge(1,3,weight=1)\n G.add_edge(1,2,weight=6)\n G.add_edge(1,12,weight=16)\n G.add_edge(2,11,weight=12)\n G.add_edge(2,6,weight=10)\n G.add_edge(2,5,weight=11)\n G.add_edge(3,4,weight=10)\n G.add_edge(3,7,weight=11)\n G.add_edge(3,8,weight=14)\n G.add_edge(3,9,weight=11)\n G.add_edge(4,7,weight=9)\n G.add_edge(5,6,weight=7)\n G.add_edge(5,9,weight=12)\n G.add_edge(6,9,weight=9)\n G.add_edge(7,10,weight=10)\n G.add_edge(8,10,weight=2)\n G.add_edge(8,11,weight=11)\n G.add_edge(8,9,weight=12)\n G.add_edge(9,11,weight=8)\n G.add_edge(10,12,weight=3)\n G.pos={}\n G.pos[1]=(6,4)\n G.pos[2]=(-1,3.7)\n G.pos[3]=(4.7,3.5)\n G.pos[4]=(5.3,3.2)\n G.pos[5]=(0,3)\n G.pos[6]=(1.4,3.4)\n G.pos[7]=(5,2.6)\n G.pos[8]=(4.7,0)\n G.pos[9]=(1.4,2.4)\n G.pos[10]=(5.2,0.5)\n G.pos[11]=(1.3,0)\n G.pos[12]=(6,2.4)\n elarge=[(u,v) for (u,v,d) in G.edges(data=True) if d['weight'] > 8]\n esmall=[(u,v) for (u,v,d) in G.edges(data=True) if d['weight'] <= 8]\n nx.draw_networkx_nodes(G,G.pos,node_color='w')\n nx.draw_networkx_edges(G,G.pos,elarge,width=3,edge_color='r',alpha=0.3)\n nx.draw_networkx_edges(G,G.pos,esmall,width=1,edge_color='b',alpha=0.3)\n nx.draw_networkx_labels(G,G.pos)\n ax=plt.gca()\n ax.axison = False\n label = {} \n for (u,v) in G.edges():\n d = G.get_edge_data(u,v)\n label[(u,v)]=d['weight']\n edge_label=nx.draw_networkx_edge_labels(G,G.pos,edge_labels=label)\n\n return(G)",
"def construct_network_from_neighbours_list(related_characters: list):\n graph = nx.Graph()\n for edge in related_characters:\n sentiment = edge[1]\n color = ''\n if sentiment == 'Positive':\n color = 'g'\n elif sentiment == 'Negative':\n color = 'r'\n elif sentiment == 'Neutral':\n color = 'k'\n # graph.add_node(edge[0][0], popularity=\n graph.add_edge(edge[0][0], edge[0][1], color=color, weight=edge[2])\n\n return graph",
"def graph(self):\n ...",
"def create_computational_graph(node):\n graph = CompGraph()\n graph.build_graph(node)\n return graph",
"def __init__(self, graphs: List[Graph], graph_ids: Set[str]) -> None:\n self.graph_ids = graph_ids\n\n # count of link given source & object\n self.c_l_given_so: Dict[Tuple[bytes, bytes], Dict[bytes, int]] = {}\n # count of nodes\n self.c_n: Dict[bytes, int] = {}\n # count of link given source\n self.c_l_given_s: Dict[bytes, Dict[bytes, int]] = {}\n\n # COMPUTE counting\n for g in graphs:\n for link in g.iter_links():\n s = link.get_source_node().label\n o = link.get_target_node().label\n\n # COMPUTE c_l_given_s\n if s not in self.c_l_given_s:\n self.c_l_given_s[s] = {}\n if link.label not in self.c_l_given_s[s]:\n self.c_l_given_s[s][link.label] = 0\n self.c_l_given_s[s][link.label] += 1\n\n # COMPUTE c_l_given_so\n if link.get_target_node().is_data_node():\n # no need to estimate this prob, since it will be result from semantic labeling\n pass\n else:\n if (s, o) not in self.c_l_given_so:\n self.c_l_given_so[(s, o)] = {}\n if link.label not in self.c_l_given_so[(s, o)]:\n self.c_l_given_so[(s, o)][link.label] = 0\n self.c_l_given_so[(s, o)][link.label] += 1\n\n # COMPUTE c_n\n for n in g.iter_nodes():\n if n.label not in self.c_n:\n self.c_n[n.label] = 0\n self.c_n[n.label] += 1\n\n # cached\n self.p_critical_l_given_s = {}\n for s, counts in self.c_l_given_s.items():\n l, c_l = max(counts.items(), key=lambda x: x[1])\n self.p_critical_l_given_s[s] = (l, c_l / self.c_n[s])",
"def _build_graph(self):\n pass"
]
| [
"0.63316435",
"0.6326704",
"0.62020963",
"0.60203207",
"0.5940459",
"0.5838418",
"0.5737226",
"0.5707589",
"0.56594205",
"0.55725867",
"0.5538882",
"0.55271745",
"0.54878485",
"0.546096",
"0.54601884",
"0.5455653",
"0.5437921",
"0.5426384",
"0.5410883",
"0.5399209",
"0.5382729",
"0.538231",
"0.5370581",
"0.53683084",
"0.53519535",
"0.534407",
"0.5343535",
"0.53246826",
"0.5321789",
"0.5289548"
]
| 0.73921865 | 0 |
Find arguments suitable for forest.parse_args.parse_args e.g. bokeh serve args [ARGS] or forest [ARGS] | def parse_forest_args(argv=None):
if argv is None:
argv = sys.argv
if "bokeh" in os.path.basename(argv[0]):
i = argv.index("--args")
return argv[i + 1 :]
else:
_, argv = forest.cli.main.parse_args(argv)
return argv[1:] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def parse_arguments(args):",
"def _parse_args():\n args = sys.argv[1:]\n cmd_parser = argparse.ArgumentParser()\n cmd_parser.add_argument(\n '--produce-sub',\n dest='produce_sub',\n help='Produce submision file',\n default=False,\n action='store_true',\n )\n cmd_parser.add_argument(\n '--search-cv',\n dest='search_cv',\n help='Perform Search of parameters',\n default=False,\n action='store_true',\n )\n cmd_opts = cmd_parser.parse_args(args=args)\n return cmd_opts",
"def parseArgs():\n parser = argparse.ArgumentParser()\n parser.add_argument('--dataset', default='fsod', help='training dataset') # use fsod dataset for default\n parser.add_argument('--cfg', dest='cfg_file', required=True, help='optional config file')\n parser.add_argument('--load_ckpt', help='path to load checkpoint')\n parser.add_argument('--load_detectron', help='path to load detectron weight pickle file')\n parser.add_argument('--output_dir', help='output directory to save the testing results.')\n parser.add_argument('--range', help='[start, end)', type=int, nargs=2)\n parser.add_argument('--visualize', dest='visualize', help='output images of detection', action='store_true')\n return parser.parse_args()",
"def get_args():\n parser = argparse.ArgumentParser('Find a featureclass, database, mxd, or service in ArcGIS Server',\n epilog='For search strings inlcuding spaces, enclose the query in double-quotes')\n parser.add_argument('name', help='string for which to search (blank returns info on all services)',\n nargs='?', default='')\n parser.add_argument('-q', '--quiet', help='only display service names and URLs', action='store_true')\n parser.add_argument('-qq', '--veryquiet', help='only display service URLs, comma delimited', action='store_true')\n parser.add_argument('-cs', '--configstore', help='explicitly provide full path to config store', action='store')\n parser.add_argument('-csv', '--tocsv', help='create csv output', action='store_true')\n parser.add_argument('-md', '--markdown', help='create Markdown output', action='store_true')\n return parser.parse_args()",
"def parse_args():\n parser = argparse.ArgumentParser(\n description=\"Reads datapacket pcds, interpolates quaternions and generates scans from dataset in config file\")\n parser.add_argument(\"--visualization\", \"-v\", action=\"store_true\", help=\"if generated clouds should be visualized\")\n parser.add_argument(\"--directory\", \"-d\",\n help=\"if only specified directory should be interpolated, e.g. 'fragments/fragment0'\")\n args = parser.parse_args()\n return args.visualization, args.directory",
"def parse_args():\n\n import cdr_cleaner.args_parser as parser\n\n additional_arguments = [{\n parser.SHORT_ARGUMENT: '-e',\n parser.LONG_ARGUMENT: '--ehr_dataset_id',\n parser.ACTION: 'store',\n parser.DEST: 'ehr_dataset_id',\n parser.HELP: 'ehr_dataset_id',\n parser.REQUIRED: True\n }, {\n parser.SHORT_ARGUMENT: '-v',\n parser.LONG_ARGUMENT: '--validation_dataset_id',\n parser.ACTION: 'store',\n parser.DEST: 'validation_dataset_id',\n parser.HELP: 'validation_dataset_id',\n parser.REQUIRED: True\n }]\n args = parser.default_parse_args(additional_arguments)\n return args",
"def parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"auth\",\n help=\"authentication string for Infermedica API: \"\n \"APP_ID:APP_KEY or path to file containing it.\")\n parser.add_argument(\"--model\",\n help=\"use non-standard Infermedica model/language, \"\n \"e.g. infermedica-es\")\n args = parser.parse_args()\n return args",
"def parse_args():\n parser = argparse.ArgumentParser(description=\"Bandits algorithms on a click-through \"\n \"rate dataset.\")\n parser.add_argument('--plot', action='store_true')\n return parser.parse_args()",
"def parse_args():\n parser = argparse.ArgumentParser(\"Plot time series figures.\")\n parser.add_argument('--log-file', type=str, nargs=\"+\", required=True,\n help=\"path to a testing log file.\")\n parser.add_argument('--trace-file', type=str, default=None,\n help=\"path to a trace file.\")\n parser.add_argument('--save-dir', type=str, default=None,\n help=\"path to save.\")\n parser.add_argument('--noise', type=float, default=0)\n\n args, unknown = parser.parse_known_args()\n return args",
"def getPositionalArgs():",
"def parse_cmdline_args():\n parser = argparse.ArgumentParser(description=\"Guesses the functional element for host.\")\n ##\n ## Internal options\n ##\n parser.add_argument(\"--json\", dest=\"json\", action='store_true', help=\"output in JSON\")\n\n ##\n ## PuppetDB options\n ##\n pdbconf = PdbConfig()\n pdbconf.add_standard_args(parser)\n\n parser.add_argument(\"host\", metavar=\"HOST\",\n help=\"hostnames to query for FE\")\n\n return parser.parse_args()",
"def parseArgs():\n parser = argparse.ArgumentParser(description='Runs RHEAS simulation.')\n parser.add_argument('config', help='configuration file')\n parser.add_argument('-d', metavar='DB', help='name of database to connect')\n parser.add_argument('-u', help='update database', action='store_true')\n args = parser.parse_args()\n return args.config, args.d, args.u",
"def parse_arguments(cls):\r\n parser = argparse.ArgumentParser(description='Easy Infer for model benchmark')\r\n cls.base_arg_parse(parser)\r\n cls.model_arg_parse(parser)\r\n cls.task_arg_parse(parser)\r\n args = parser.parse_args()\r\n return args",
"def parse_args(args=None):\n return AP.parse_args(args=args)",
"def parse_args():\n parser = default_argument_parser()\n parser.add_argument(\"--label-map\",\n dest=\"label_map\",\n type=pathlib.Path,\n help=\"Label map in YAML format which maps from category \"\n \"ID to name.\")\n parser.add_argument(\"--train-csv\",\n dest=\"train_csv\",\n required=True,\n type=pathlib.Path,\n help=\"Path to training data CSV file.\")\n parser.add_argument(\"--valid-csv\",\n dest=\"valid_csv\",\n required=False,\n type=pathlib.Path,\n help=\"Optional path to validation data CSV file.\")\n parser.add_argument(\n \"--image-width\",\n type=int,\n help=\"Image width (optional, used to speed up dataset processing).\")\n parser.add_argument(\n \"--image-height\",\n type=int,\n help=\"Image height (optional, used to speed up dataset processing).\")\n return parser.parse_args()",
"def parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"group\", metavar='<group>')\n parser.add_argument(\"artifact\", metavar='<artifact>')\n parser.add_argument('version', metavar='<version>')\n\n args = parser.parse_args()\n if None in [args.group, args.artifact, args.version]:\n parser.print_help()\n sys.exit(1)\n\n return DepthFinderArgs(args.group, args.artifact, args.version)",
"def process_args():\n\n parser = argparse.ArgumentParser(\n description='Runs MNIST Kubeflow Pipeline Sample E2E.')\n parser.add_argument(\n '--mode',\n default='all',\n help='execution mode, choose between eval, train, or all.'\n 'Default is all')\n parser.add_argument(\n '--output_path',\n default=None,\n help='output path for saving the output model file from'\n 'training step. same path is used to load the model from'\n 'for evaluation step' )\n parser.add_argument(\n '--epochs',\n default=5,\n help='number of epochs to run the training, default is 5.')\n args, _ = parser.parse_known_args()\n return args",
"def Args(parser):",
"def get_args():\n parser = argparse.ArgumentParser(\n description='Utterance level classification Leave one '\n 'speaker out schema pipeline' )\n parser.add_argument('-i', '--input_features_paths', nargs='+',\n help='File paths of the features you want to '\n 'concatenate and the classify')\n args = parser.parse_args()\n return args",
"def parse_args():\n parser = argparse.ArgumentParser(\n description='Tool to run defense in docker.')\n parser.add_argument('--defense_dir', required=True,\n help='Location of defense.')\n parser.add_argument('--input_dir', required=True,\n help='Location of input.')\n parser.add_argument('--output_dir', required=True,\n help='Location of output')\n parser.add_argument('--gpu', dest='use_gpu', action='store_true')\n parser.add_argument('--nogpu', dest='use_gpu', action='store_false')\n parser.set_defaults(use_gpu=True)\n return parser.parse_args()",
"def _parse_arguments():\n parser = get_base_arguments(get_parser())\n parser = get_tc_arguments(parser)\n args, unknown = parser.parse_known_args()\n return args, unknown",
"def __parse_args(self):\n for argument in self.args:\n source_arg = re.match(\"^(--source=(([A-Z]|[a-z]|/|_|[0-9]|.)+))$\", argument)\n input_arg = re.match(\"^(--input=(([A-Z]|[a-z]|/|_|[0-9]|.)+))$\", argument)\n stats_arg = re.match(\"^(--stats=(([A-Z]|[a-z]|/|_|[0-9]|.)+))$\", argument)\n help_arg = re.match(\"^--help$\", argument)\n vars_arg = re.match(\"^--vars$\", argument)\n insts_arg = re.match(\"^--insts$\", argument)\n if source_arg:\n self.sourceFile = source_arg.group(2)\n self.passedArgs.append(\"source\")\n elif input_arg:\n self.inputFile = input_arg.group(2)\n self.passedArgs.append(\"input\")\n elif help_arg:\n print(\"napoveda\")\n sys.exit(0)\n elif stats_arg:\n self.statsFile = stats_arg.group(2)\n self.passedArgs.append(\"stats\")\n elif vars_arg:\n self.passedArgs.append(\"vars\")\n if self.first_stat_arg is None:\n self.first_stat_arg = \"vars\"\n elif insts_arg:\n self.passedArgs.append(\"insts\")\n if self.first_stat_arg is None:\n self.first_stat_arg = \"insts\"\n else:\n raise ArgError(\"Unknown argument or format of the argument! (\" + argument + \")\")",
"def parse_arguments(self,parser):\r\n return parser.parse_args()",
"def _parse_command_line_arguments():\n parser = ArgumentParser(\n description=(\n 'Command-line tool to generate a list of unique from a TS file from FermiFAST'\n ),\n )\n parser.add_argument(\n 'ts-file',\n type=str,\n help=(\n 'A file containing the TS sky map'\n ),\n )\n parser.add_argument('--skiprows',\n type=int,\n help='number of rows to skip at the top (default 0)',\n required=False)\n parser.set_defaults(skiprows=0)\n arguments = vars(parser.parse_args())\n return arguments",
"def handle_args():\n parser = argparse.ArgumentParser(description=\"Faster-RCNN Implementation\")\n parser.add_argument(\"-handle-gpu\", action=\"store_true\", help=\"Tensorflow 2 GPU compatibility flag\")\n args = parser.parse_args()\n return args",
"def parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"auth\",\n help=\"authentication string for Infermedica API: \"\n \"APP_ID:APP_KEY or path to file containing it.\")\n parser.add_argument(\"--model\",\n help=\"use non-standard Infermedica model/language, \"\n \"e.g. infermedica-es\")\n # TODO: Check if `verbose` actually does anything.\n parser.add_argument(\"-v\", \"--verbose\",\n dest=\"verbose\", action=\"store_true\", default=False,\n help=\"dump internal state\")\n args = parser.parse_args()\n return args",
"def get_arguments():\n parser = argparse.ArgumentParser(description=\"DeepLab-ResNet Network\")\n parser.add_argument(\"--mode\", choices={\"SUM\", \"VAL\"}, default=\"VAL\", help=\"\")\n parser.add_argument(\"--sdf-path\", type=str, default=SDF_PATH, help=\"\")\n parser.add_argument(\"--summary-file\", type=str, default=SUMMARY_FILE, help=\"\")\n\n return parser.parse_args()",
"def get_args():\n parser = argparse.ArgumentParser(description=\"Arguments for data exploration\")\n parser.add_argument(\"--tokenize\",\n dest=\"tokenize\",\n action=\"store_true\",\n help=\"Tokenize by words and sentences, counting averages/sd for each.\")\n return parser",
"def parse_args():\n parser = argparse.ArgumentParser(description=\"SLOWFAST for AVA Dataset\")\n parser.add_argument(\"--pipeline\", type=str,\n default=\"../data/config/slowfast.pipeline\", help=\"SDK infer pipeline\")\n parser.add_argument(\"--data_dir\", type=str, default=\"../data/input\",\n help=\"Dataset contain frames and ava_annotations\")\n args_opt = parser.parse_args()\n return args_opt",
"def parseArgs(arguments=None):\n\tparser = generateParser(None)\n\treturn parser.parse_known_args(arguments)"
]
| [
"0.7618636",
"0.70508236",
"0.7009241",
"0.70071197",
"0.69731104",
"0.6911572",
"0.6889888",
"0.6851242",
"0.6837971",
"0.6809132",
"0.6802966",
"0.6788127",
"0.6716655",
"0.670859",
"0.6699846",
"0.6699645",
"0.6676694",
"0.6676029",
"0.6669289",
"0.6668221",
"0.6665668",
"0.66449535",
"0.6639533",
"0.66358215",
"0.6627037",
"0.66240287",
"0.6614024",
"0.6610966",
"0.66046196",
"0.6604176"
]
| 0.75391984 | 1 |
Enables global stackwisevirtual on target device | def configure_global_stackwise_virtual(device, domain=None):
# build a list of commands to send
# Add stackwise-virtual as first element in the list
# Add domain only if domain argument has been provided
command_list = ['stackwise-virtual']
if domain:
command_list.append(f'domain {domain}')
try:
output = device.configure(command_list)
except SubCommandFailure:
raise SubCommandFailure('Failed to configure global stackwise-virtual')
return output | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def unconfigure_global_stackwise_virtual(device):\n # Single command 'no stackwise-virtual' will remove configuration\n command = 'no stackwise-virtual'\n try:\n output = device.configure(command)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to remove global stackwise-virtual')\n return output",
"def configure_global_dual_active_recovery_reload_disable(device):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Disables dual-active recovery-reload\n command_list = ['stackwise-virtual']\n command_list.append(f'dual-active recovery-reload-disable')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to Enable global stackwise-virtual dual-active recovery-reload')\n return output",
"def configure_stackwise_virtual_dual_active_pagp(device, port_channel):\n # build a list of commands to send\n command_list = ['stackwise-virtual']\n command_list.append(f'dual-active detection pagp')\n if port_channel:\n command_list.append(f'dual-active detection pagp trust channel-group {port_channel}')\n try:\n output = device.configure(command_list) \n except SubCommandFailure:\n raise SubCommandFailure('Failed to configure a port-channel interface as stackwise-virtual dual-active-detection pagp interfaces')\n return output",
"def unconfigure_global_dual_active_recovery_reload_disable(device):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Enables dual-active recovery-reload\n command_list = ['stackwise-virtual']\n command_list.append(f'no dual-active recovery-reload-disable')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to Disable global stackwise-virtual dual-active recovery-reload')\n return output",
"def configure_stackwise_virtual_interfaces(device, svl_links):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Add domain only if domain argument has been provided\n command_list = []\n for interface, link_id in svl_links.items():\n command_list.append(f'interface {interface}')\n command_list.append(f'stackwise-virtual link {link_id}')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to configure stackwise-virtual interfaces')\n return output",
"def unconfigure_stackwise_virtual_dual_active_pagp(device, port_channel):\n # build a list of commands to send\n command_list = ['stackwise-virtual']\n if port_channel:\n command_list.append(f'no dual-active detection pagp trust channel-group {port_channel}')\n try:\n output = device.configure(command_list) \n except SubCommandFailure:\n raise SubCommandFailure('Failed to unconfigure a port-channel interface as stackwise-virtual dual-active-detection pagp interfaces')\n return output",
"def test_is_virtual0001(self, monkeypatch):\n\n def fake_collect_dmesg_lines(_):\n return [\n 'real mem = 17074860032 (16283MB)',\n 'avail mem = 16550350848 (15783MB)',\n 'virtio3 at pci0 dev 4 function 0 \"OpenBSD VMM Control\" rev 0x00',\n ]\n monkeypatch.setattr(OpenBSDPlatform, \"_collect_dmesg_lines\", fake_collect_dmesg_lines)\n platform = OpenBSDPlatform(None, None)\n assert platform.is_virtual()",
"def setup_vm_env(self, driver='default'):\n if self.env_done:\n return\n\n # bind to default driver\n self.bind_nic_driver(self.dut_ports[:2], driver=\"\")\n\n self.used_dut_port_0 = self.dut_ports[0]\n self.host_intf0 = self.dut.ports_info[self.used_dut_port_0]['intf']\n tester_port = self.tester.get_local_port(self.used_dut_port_0)\n self.tester_intf0 = self.tester.get_interface(tester_port)\n\n self.dut.generate_sriov_vfs_by_port(\n self.used_dut_port_0, 1, driver=driver)\n self.sriov_vfs_port_0 = self.dut.ports_info[\n self.used_dut_port_0]['vfs_port']\n self.vf0_mac = \"00:10:00:00:00:00\"\n self.dut.send_expect(\"ip link set %s vf 0 mac %s\" %\n (self.host_intf0, self.vf0_mac), \"# \")\n\n self.used_dut_port_1 = self.dut_ports[1]\n self.host_intf1 = self.dut.ports_info[self.used_dut_port_1]['intf']\n self.dut.generate_sriov_vfs_by_port(\n self.used_dut_port_1, 1, driver=driver)\n self.sriov_vfs_port_1 = self.dut.ports_info[\n self.used_dut_port_1]['vfs_port']\n tester_port = self.tester.get_local_port(self.used_dut_port_1)\n self.tester_intf1 = self.tester.get_interface(tester_port)\n\n self.vf1_mac = \"00:20:00:00:00:00\"\n self.dut.send_expect(\"ip link set %s vf 0 mac %s\" %\n (self.host_intf1, self.vf1_mac), \"# \")\n\n try:\n\n for port in self.sriov_vfs_port_0:\n port.bind_driver('pci-stub')\n\n for port in self.sriov_vfs_port_1:\n port.bind_driver('pci-stub')\n\n time.sleep(1)\n vf0_prop = {'opt_host': self.sriov_vfs_port_0[0].pci}\n vf1_prop = {'opt_host': self.sriov_vfs_port_1[0].pci}\n\n # set up VM0 ENV\n self.vm0 = QEMUKvm(self.dut, 'vm0', 'vf_vlan')\n self.vm0.set_vm_device(driver='pci-assign', **vf0_prop)\n self.vm0.set_vm_device(driver='pci-assign', **vf1_prop)\n self.vm_dut_0 = self.vm0.start()\n if self.vm_dut_0 is None:\n raise Exception(\"Set up VM0 ENV failed!\")\n\n except Exception as e:\n self.destroy_vm_env()\n raise Exception(e)\n\n self.env_done = True",
"def enable(self):\n # Netmiko reports enable and config mode as being enabled\n if not self.native.check_enable_mode():\n self.native.enable()\n # Ensure device is not in config mode\n if self.native.check_config_mode():\n self.native.exit_config_mode()\n\n log.debug(\"Host %s: Device enabled.\", self.host)",
"def EnableCPU():\n global option\n option['device'] = 'CPU'",
"def configure_stackwise_virtual_dual_active_interfaces(device, dad_links):\n # build a list of commands to send\n command_list = []\n output = ''\n for interface in dad_links:\n command_list.append(f'interface {interface}')\n command_list.append(f'stackwise-virtual dual-active-detection')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to configure stackwise-virtual dual-active-detection interfaces')\n return output",
"def set_virtual_gpus_to_at_least(num_virtual_gpus):\n if num_virtual_gpus < 1:\n raise ValueError('`num_virtual_gpus` must be at least 1 not %r' %\n (num_virtual_gpus,))\n config = tf.config\n physical_devices = config.list_physical_devices('GPU')\n if not physical_devices:\n raise RuntimeError('No GPUs found')\n configs = config.get_logical_device_configuration(physical_devices[0])\n if configs is None:\n logical_devices = [\n tf.python.eager.context.LogicalDeviceConfiguration(memory_limit=4000)\n for _ in range(num_virtual_gpus)\n ]\n config.set_logical_device_configuration(physical_devices[0],\n logical_devices)\n else:\n if len(configs) < num_virtual_gpus:\n raise RuntimeError('Already configured with %d < %d virtual GPUs' %\n (len(configs), num_virtual_gpus))",
"def enable_hyperv() -> None:\n try:\n subprocess.check_call(\n [\n 'DISM',\n '/Online',\n '/Enable-Feature',\n '/All',\n '/NoRestart',\n '/FeatureName:Microsoft-Hyper-V',\n ]\n )\n except subprocess.CalledProcessError as e:\n if e.returncode == 3010:\n pass # This is fine, because Windows.\n else:\n raise",
"def enable_custom_kernel():\n global _TF_ADDONS_PY_OPS\n _TF_ADDONS_PY_OPS = False",
"def sm_set_Vsource(self,level,compliance):\n self.sm.set_Vsource()\n self.sm.set_source_level(level)\n self.sm.set_compliance_level(compliance)\n self.sm_output_on()",
"def set_console_xen(self):\n print \"\"\n self.exec_cmd(\"echo \\\"xvc0\\\" >> %s/etc/securetty\" % self.rep_vhosts_vm) \n if os.path.isfile(\"%s/etc/inittab\" % self.rep_vhosts_vm):\n self.exec_cmd(\"echo \\\"7:2345:respawn:/sbin/getty 38400 xvc0\\\" >> %s/etc/inittab\" % self.rep_vhosts_vm) \n\n if os.path.isfile(\"%s/etc/event.d/tty1\" % self.rep_vhosts_vm):\n self.exec_cmd(\"cp %s/etc/event.d/tty1 %s/etc/event.d/xvc0\" % (self.rep_vhosts_vm,self.rep_vhosts_vm))\n self.exec_cmd(\"sed -i \\\"s@tty1@xvc0@\\\" %s/etc/event.d/xvc0\" % self.rep_vhosts_vm)\n \n if os.path.isfile(\"%s/etc/init/tty1.conf\" % self.rep_vhosts_vm):\n self.exec_cmd(\"cp %s/etc/init/tty1.conf %s/etc/init/xvc0.conf\" % (self.rep_vhosts_vm,self.rep_vhosts_vm))\n self.exec_cmd(\"sed -i \\\"s@tty1@xvc0@\\\" %s/etc/init/xvc0.conf\" % self.rep_vhosts_vm)",
"def set_system_flags(self, sNewVmSystemFlags):\n\t\tcall_sdk_function('PrlVmCfg_SetSystemFlags', self.handle, sNewVmSystemFlags)",
"def setup_vm_env(self, driver='igb_uio'):\n if self.env_done is False:\n self.bind_nic_driver(self.dut_ports[:1], driver=\"igb_uio\")\n self.used_dut_port = self.dut_ports[0]\n tester_port = self.tester.get_local_port(self.used_dut_port)\n self.tester_intf = self.tester.get_interface(tester_port)\n self.dut.generate_sriov_vfs_by_port(\n self.used_dut_port, 1, driver=driver)\n self.sriov_vfs_port = self.dut.ports_info[\n self.used_dut_port]['vfs_port']\n for port in self.sriov_vfs_port:\n port.bind_driver(self.vf_driver)\n time.sleep(1)\n self.dut_testpmd = PmdOutput(self.dut)\n time.sleep(1)\n vf0_prop = {'opt_host': self.sriov_vfs_port[0].pci}\n # set up VM0 ENV\n self.vm0 = QEMUKvm(self.dut, 'vm0', 'ddp_gtp')\n self.vm0.set_vm_device(driver=self.vf_assign_method, **vf0_prop)\n try:\n self.vm0_dut = self.vm0.start()\n if self.vm0_dut is None:\n raise Exception(\"Set up VM0 ENV failed!\")\n except Exception as e:\n self.destroy_vm_env()\n raise Exception(e)\n self.vm0_dut_ports = self.vm0_dut.get_ports('any')\n self.vm0_testpmd = PmdOutput(self.vm0_dut)\n self.env_done = True",
"def setUp(self):\n super().setUp()\n self.vapi.ip_reassembly_enable_disable(\n sw_if_index=self.src_if.sw_if_index,\n enable_ip4=True,\n type=VppEnum.vl_api_ip_reass_type_t.IP_REASS_TYPE_SHALLOW_VIRTUAL,\n )\n self.vapi.ip_reassembly_set(\n timeout_ms=0,\n max_reassemblies=1000,\n max_reassembly_length=1000,\n type=VppEnum.vl_api_ip_reass_type_t.IP_REASS_TYPE_SHALLOW_VIRTUAL,\n expire_walk_interval_ms=10,\n )\n self.virtual_sleep(0.25)\n self.vapi.ip_reassembly_set(\n timeout_ms=1000000,\n max_reassemblies=1000,\n max_reassembly_length=1000,\n type=VppEnum.vl_api_ip_reass_type_t.IP_REASS_TYPE_SHALLOW_VIRTUAL,\n expire_walk_interval_ms=10000,\n )",
"def set_enabled(self, bEnabled):\n\t\tcall_sdk_function('PrlVmDev_SetEnabled', self.handle, bEnabled)",
"def tempest_cinder_glance_swift_tun(self):\n self.helper_cinder_glance_swift('tun')",
"def set_smart_guard_enabled(self, bEnabled):\n\t\tcall_sdk_function('PrlVmCfg_SetSmartGuardEnabled', self.handle, bEnabled)",
"def setUp(self):\n super().setUp()\n self.vapi.ip_reassembly_enable_disable(\n sw_if_index=self.src_if.sw_if_index,\n enable_ip6=True,\n type=VppEnum.vl_api_ip_reass_type_t.IP_REASS_TYPE_SHALLOW_VIRTUAL,\n )\n self.vapi.ip_reassembly_set(\n timeout_ms=0,\n max_reassemblies=1000,\n max_reassembly_length=1000,\n type=VppEnum.vl_api_ip_reass_type_t.IP_REASS_TYPE_SHALLOW_VIRTUAL,\n expire_walk_interval_ms=10,\n is_ip6=1,\n )\n self.virtual_sleep(0.25)\n self.vapi.ip_reassembly_set(\n timeout_ms=1000000,\n max_reassemblies=1000,\n max_reassembly_length=1000,\n type=VppEnum.vl_api_ip_reass_type_t.IP_REASS_TYPE_SHALLOW_VIRTUAL,\n expire_walk_interval_ms=10000,\n is_ip6=1,\n )",
"def _set_evdev_state(self):\n if NIX:\n self._evdev = True",
"def load_kernel_module(params) -> None:\n print(\"Loading kernel module...\")\n os.system(\"modprobe -r v4l2loopback >/dev/null 2>&1\")\n cmd = \"modprobe v4l2loopback devices=1 video_nr=\" + params['loopback_nr'] + \\\n \" card_label=\" + params['loopback_name'] + \\\n \" exclusive_caps=\" + params['loopback_exclusive'] + \" >/dev/null 2>&1\"\n if os.system(cmd) == 0:\n print(\" Success !\")\n else:\n print(\" Failure !\")",
"def set_virtual_stage(self, virtual_stage: int) -> None:\n self.virtual_stage = virtual_stage",
"def turnOnSdkMode(self):\n \n command = b\"\\x90\\x01\\x01\"\n #print(\"turnOnSdkMode run, command: \")\n #print(command)\n \n self.sendCommand(command)",
"def set_device(sys_device_id):\n device_id = -1\n cuda = (sys_device_id != -1)\n if cuda:\n # CUDA_VISIBLE_DEVICE is a list, and device_id is the index of its members.\n import os\n os.environ['CUDA_VISIBLE_DEVICES'] = str(sys_device_id)\n device_id = 0\n TVT = TransferVarTensor(device_id)\n TMO = TransferModulesOptims(device_id)\n return TVT, TMO",
"def test_deploy_with_vdev(self):\n # back up user_root_vdev value in config file\n def _restore_conf(root_vdev_back):\n CONF.zvm.user_root_vdev = root_vdev_back\n root_vdev_back = CONF.zvm.user_root_vdev\n self.addCleanup(_restore_conf, root_vdev_back)\n\n new_root = '123'\n CONF.zvm.user_root_vdev = new_root\n disks = [\n {'size': '3G',\n 'format': 'xfs',\n 'is_boot_disk': True,\n 'disk_pool': CONF.zvm.disk_pool},\n {'size': '200M',\n 'format': 'ext3',\n 'is_boot_disk': False,\n 'disk_pool': 'ECKD:xcateckd'}]\n\n self.sdkapi.guest_create(self.userid, 1, 1024, disk_list=disks)\n self.sdkapi.guest_deploy(self.userid,\n self.image_name,\n vdev=new_root)\n self.sdkapi.guest_start(self.userid)\n powered_on = self.test_util.wait_until_guest_in_power_state(\n self.userid, 'on')\n self.assertTrue(powered_on)",
"def test_patch_pci_switch(self):\n pass"
]
| [
"0.68342483",
"0.6480229",
"0.60656047",
"0.597289",
"0.58074814",
"0.5528802",
"0.55048174",
"0.5476486",
"0.5420422",
"0.5391336",
"0.53740263",
"0.5351046",
"0.5321348",
"0.5261659",
"0.5255115",
"0.5211061",
"0.5154982",
"0.5148179",
"0.51353693",
"0.5134788",
"0.50914985",
"0.5083398",
"0.5079256",
"0.5072",
"0.5039995",
"0.5030483",
"0.50248134",
"0.50171393",
"0.49843362",
"0.49807817"
]
| 0.7372025 | 0 |
Disable global stackwisevirtual on target device | def unconfigure_global_stackwise_virtual(device):
# Single command 'no stackwise-virtual' will remove configuration
command = 'no stackwise-virtual'
try:
output = device.configure(command)
except SubCommandFailure:
raise SubCommandFailure('Failed to remove global stackwise-virtual')
return output | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def unconfigure_global_dual_active_recovery_reload_disable(device):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Enables dual-active recovery-reload\n command_list = ['stackwise-virtual']\n command_list.append(f'no dual-active recovery-reload-disable')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to Disable global stackwise-virtual dual-active recovery-reload')\n return output",
"def configure_global_dual_active_recovery_reload_disable(device):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Disables dual-active recovery-reload\n command_list = ['stackwise-virtual']\n command_list.append(f'dual-active recovery-reload-disable')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to Enable global stackwise-virtual dual-active recovery-reload')\n return output",
"def configure_global_stackwise_virtual(device, domain=None):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Add domain only if domain argument has been provided\n command_list = ['stackwise-virtual']\n if domain:\n command_list.append(f'domain {domain}')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to configure global stackwise-virtual')\n return output",
"def unconfigure_stackwise_virtual_dual_active_pagp(device, port_channel):\n # build a list of commands to send\n command_list = ['stackwise-virtual']\n if port_channel:\n command_list.append(f'no dual-active detection pagp trust channel-group {port_channel}')\n try:\n output = device.configure(command_list) \n except SubCommandFailure:\n raise SubCommandFailure('Failed to unconfigure a port-channel interface as stackwise-virtual dual-active-detection pagp interfaces')\n return output",
"def disable_custom_kernel():\n global _TF_ADDONS_PY_OPS\n _TF_ADDONS_PY_OPS = True",
"def systemOff():\n # Updated 11/19/16\n I2C.write_byte_data(Valve_bus, pinOut_O, 0x00 )\n I2C.write_byte_data(Pump_Mag_bus, pinOut_O, 0x00)",
"def unconfigure_stackwise_virtual_dual_active_interfaces(device, dad_links):\n # build a list of commands to send\n command_list = []\n output = ''\n for interface in dad_links:\n command_list.append(f'interface {interface}')\n command_list.append(f'no stackwise-virtual dual-active-detection')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to unconfigure stackwise-virtual dual-active-detection interfaces')\n return output",
"def unconfigure_stackwise_virtual_interfaces(device, svl_links, timeout=60):\n # Single command 'no stackwise-virtual' will remove configuration'\n dialog = Dialog([\n Statement(\n pattern=r\"WARNING\\: Unconfiguring last active port\\, this may result in stack-split\\. Are you sure\\? \\[yes\\/no\\]\\:\",\n action='sendline(y)',\n loop_continue=True,\n continue_timer=False)\n ])\n command_list = []\n for interface, link_id in svl_links.items():\n command_list.append(f'interface {interface}')\n command_list.append(f'no stackwise-virtual link {link_id}')\n try:\n output = device.configure(\n command_list,\n reply=dialog,\n timeout=timeout,\n append_error_pattern=['.*Command cannot be executed.*'])\n except SubCommandFailure:\n raise SubCommandFailure('Failed to unconfigure stackwise-virtual interfaces')",
"def test_configure_lvm_storage_unforced_remove_default(self, reduce_lvm):\n devices = ['/dev/fakevbd']\n cinder_utils.configure_lvm_storage(devices, 'test', False, True)\n reduce_lvm.assert_called_with('test')",
"def _doDisableRegulation(self):\n self._cmdRegulOff()",
"def allOff():\n # Get/set special slice IDs\n root_xid = bwlimit.get_xid(\"root\")\n default_xid = bwlimit.get_xid(\"default\")\n kernelhtbs = gethtbs(root_xid, default_xid)\n if len(kernelhtbs):\n logger.log(\"bwmon: Disabling all running HTBs.\")\n for htb in kernelhtbs.keys(): bwlimit.off(htb, dev = dev_default)",
"def turnLightingSystemOff():\n dislin.light('OFF')",
"def test_no_overprovision(self):\n command_line = (\n self._MENU + [self._POOLNAME] + self._DEVICES + [\"--no-overprovision\"]\n )\n TEST_RUNNER(command_line)",
"def configure_stackwise_virtual_dual_active_pagp(device, port_channel):\n # build a list of commands to send\n command_list = ['stackwise-virtual']\n command_list.append(f'dual-active detection pagp')\n if port_channel:\n command_list.append(f'dual-active detection pagp trust channel-group {port_channel}')\n try:\n output = device.configure(command_list) \n except SubCommandFailure:\n raise SubCommandFailure('Failed to configure a port-channel interface as stackwise-virtual dual-active-detection pagp interfaces')\n return output",
"def disable_vae_slicing(self):\n self.vae.disable_slicing()",
"def test_is_virtual0001(self, monkeypatch):\n\n def fake_collect_dmesg_lines(_):\n return [\n 'real mem = 17074860032 (16283MB)',\n 'avail mem = 16550350848 (15783MB)',\n 'virtio3 at pci0 dev 4 function 0 \"OpenBSD VMM Control\" rev 0x00',\n ]\n monkeypatch.setattr(OpenBSDPlatform, \"_collect_dmesg_lines\", fake_collect_dmesg_lines)\n platform = OpenBSDPlatform(None, None)\n assert platform.is_virtual()",
"def unload_kernel_module(params) -> None:\n print(\"Unloading kernel module...\")\n if os.system(\"modprobe -r v4l2loopback >/dev/null 2>&1\") == 0:\n print(\" Success !\")\n else:\n print(\" Failure !\")",
"async def disable_paging(self):\n\n # Display info message\n log.info(\"disable_paging\")\n\n # Send command to the device to disable paging\n await self.send_command(self.cmd_disable_paging)",
"def _force_off(self):\n self._interface.set('fw_wp_vref', self._fw_wp_vref)\n self._interface.set('fw_wp_en', 'on')\n self._interface.set('fw_wp', 'off')",
"def _kill_kernel(self):",
"def safe_boot_disabled(self, safe_boot_disabled):\n\n self._safe_boot_disabled = safe_boot_disabled",
"def disableDebugLoadOutput(self):\n key = \"NatlinkmainDebugLoad\"\n self.userregnl.delete(key)",
"def is_custom_kernel_disabled():\n return _TF_ADDONS_PY_OPS",
"def resetDeviceStates(self):",
"def detach(target, sysip):\n click.secho(\"Attempting to detach template.\")\n\n payload = {\n \"deviceType\":\"vedge\",\n \"devices\":[ \n {\n \"deviceId\":str(target),\n \"deviceIP\":str(sysip)\n }\n ]\n }\n\n url = base_url + \"/template/config/device/mode/cli\"\n\n response = requests.post(url=url, data=json.dumps(payload), headers=header, verify=False)\n if response.status_code == 200:\n id = response.json()[\"id\"]\n url = base_url + \"/device/action/status/\" + str(id)\n while(1):\n status_res = requests.get(url,headers=header,verify=False)\n if status_res.status_code == 200:\n push_status = status_res.json()\n if push_status['summary']['status'] == \"done\":\n if 'Success' in push_status['summary']['count']:\n print(\"Changed configuration mode to CLI\")\n elif 'Failure' in push_status['summary']['count']:\n print(\"Failed to change configuration mode to CLI\")\n exit()\n break\n else:\n print(\"Failed to detach template with error \" + response.text)\n exit()",
"def DisableByRunIf(self):\n self.run_if = 'False'",
"def disable(ctx):\n config_db = ConfigDBConnector()\n config_db.connect()\n config_db.mod_entry(\"NAT_GLOBAL\", \"Values\", {\"admin_mode\": \"disabled\"})",
"def test_configure_lvm_storage_force_removemissing(self, reduce_lvm):\n devices = ['/dev/fakevbd']\n cinder_utils.configure_lvm_storage(\n devices, 'test', False, True, remove_missing_force=True)\n reduce_lvm.assert_called_with('test', extra_args=['--force'])",
"def disable_detector():\n global enable_detector, enable_detection, detector\n\n detector = None\n\n if detector is None:\n print(\"Detector stopped...\")\n enable_detection = False\n enable_detector = ''\n\n return render_settings_view()",
"def remove_environment_texture(self):\n self.UseImageBasedLightingOff()\n self.SetEnvironmentTexture(None)\n self.Modified()"
]
| [
"0.706992",
"0.68339354",
"0.64037496",
"0.6261719",
"0.5786086",
"0.5731109",
"0.57200205",
"0.5654615",
"0.5543812",
"0.5446901",
"0.5438316",
"0.5315058",
"0.52804273",
"0.52761424",
"0.52732956",
"0.52600944",
"0.5244586",
"0.52351683",
"0.5188351",
"0.518542",
"0.5179736",
"0.514233",
"0.5133458",
"0.5126455",
"0.5118603",
"0.51156354",
"0.5110472",
"0.50972974",
"0.5089345",
"0.5070939"
]
| 0.79329497 | 0 |
Enables interface as dualactivedetection interface on target device | def configure_stackwise_virtual_dual_active_interfaces(device, dad_links):
# build a list of commands to send
command_list = []
output = ''
for interface in dad_links:
command_list.append(f'interface {interface}')
command_list.append(f'stackwise-virtual dual-active-detection')
try:
output = device.configure(command_list)
except SubCommandFailure:
raise SubCommandFailure('Failed to configure stackwise-virtual dual-active-detection interfaces')
return output | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def enable(self):\n # Netmiko reports enable and config mode as being enabled\n if not self.native.check_enable_mode():\n self.native.enable()\n # Ensure device is not in config mode\n if self.native.check_config_mode():\n self.native.exit_config_mode()\n\n log.debug(\"Host %s: Device enabled.\", self.host)",
"def target_interfaces(self):",
"def target_interfaces(self):",
"def unconfigure_stackwise_virtual_dual_active_interfaces(device, dad_links):\n # build a list of commands to send\n command_list = []\n output = ''\n for interface in dad_links:\n command_list.append(f'interface {interface}')\n command_list.append(f'no stackwise-virtual dual-active-detection')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to unconfigure stackwise-virtual dual-active-detection interfaces')\n return output",
"def SetAutoDetectUi(Ui):\n SPI.DeviceList[\"spi_rpi_ui\"]= Ui \n I2C.DeviceList[\"i2c_rpi_ui\"]= Ui",
"def enable_radio(self):\n self.acquire_response(b'AT*R1')",
"def _is_vlan_router_interface_supported(self):",
"def on(config: dict):\n switch_device(config, config[\"inching\"], \"on\")",
"def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901\n\n hass.data[DOMAIN] = {}\n\n # Parse configuration into a dict of device name to physical address\n # represented as a list of four elements.\n device_aliases = {}\n devices = base_config[DOMAIN].get(CONF_DEVICES, {})\n _LOGGER.debug(\"Parsing config %s\", devices)\n device_aliases.update(parse_mapping(devices))\n _LOGGER.debug(\"Parsed devices: %s\", device_aliases)\n\n platform = base_config[DOMAIN].get(CONF_PLATFORM, SWITCH)\n\n loop = (\n # Create own thread if more than 1 CPU\n hass.loop\n if multiprocessing.cpu_count() < 2\n else None\n )\n host = base_config[DOMAIN].get(CONF_HOST)\n display_name = base_config[DOMAIN].get(CONF_DISPLAY_NAME, DEFAULT_DISPLAY_NAME)\n if host:\n adapter = TcpAdapter(host, name=display_name, activate_source=False)\n else:\n adapter = CecAdapter(name=display_name[:12], activate_source=False)\n hdmi_network = HDMINetwork(adapter, loop=loop)\n\n def _adapter_watchdog(now=None):\n _LOGGER.debug(\"Reached _adapter_watchdog\")\n event.call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job)\n if not adapter.initialized:\n _LOGGER.info(\"Adapter not initialized; Trying to restart\")\n hass.bus.fire(EVENT_HDMI_CEC_UNAVAILABLE)\n adapter.init()\n\n _adapter_watchdog_job = HassJob(_adapter_watchdog, cancel_on_shutdown=True)\n\n @callback\n def _async_initialized_callback(*_: Any):\n \"\"\"Add watchdog on initialization.\"\"\"\n return event.async_call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job)\n\n hdmi_network.set_initialized_callback(_async_initialized_callback)\n\n def _volume(call: ServiceCall) -> None:\n \"\"\"Increase/decrease volume and mute/unmute system.\"\"\"\n mute_key_mapping = {\n ATTR_TOGGLE: KEY_MUTE_TOGGLE,\n ATTR_ON: KEY_MUTE_ON,\n ATTR_OFF: KEY_MUTE_OFF,\n }\n for cmd, att in call.data.items():\n if cmd == CMD_UP:\n _process_volume(KEY_VOLUME_UP, att)\n elif cmd == CMD_DOWN:\n _process_volume(KEY_VOLUME_DOWN, att)\n elif cmd == CMD_MUTE:\n hdmi_network.send_command(\n KeyPressCommand(mute_key_mapping[att], dst=ADDR_AUDIOSYSTEM)\n )\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n _LOGGER.info(\"Audio muted\")\n else:\n _LOGGER.warning(\"Unknown command %s\", cmd)\n\n def _process_volume(cmd, att):\n if isinstance(att, (str,)):\n att = att.strip()\n if att == CMD_PRESS:\n hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))\n elif att == CMD_RELEASE:\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n else:\n att = 1 if att == \"\" else int(att)\n for _ in range(0, att):\n hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n\n def _tx(call: ServiceCall) -> None:\n \"\"\"Send CEC command.\"\"\"\n data = call.data\n if ATTR_RAW in data:\n command = CecCommand(data[ATTR_RAW])\n else:\n src = data.get(ATTR_SRC, ADDR_UNREGISTERED)\n dst = data.get(ATTR_DST, ADDR_BROADCAST)\n if ATTR_CMD in data:\n cmd = data[ATTR_CMD]\n else:\n _LOGGER.error(\"Attribute 'cmd' is missing\")\n return\n if ATTR_ATT in data:\n if isinstance(data[ATTR_ATT], (list,)):\n att = data[ATTR_ATT]\n else:\n att = reduce(lambda x, y: f\"{x}:{y:x}\", data[ATTR_ATT])\n else:\n att = \"\"\n command = CecCommand(cmd, dst, src, att)\n hdmi_network.send_command(command)\n\n def _standby(call: ServiceCall) -> None:\n hdmi_network.standby()\n\n def _power_on(call: ServiceCall) -> None:\n hdmi_network.power_on()\n\n def _select_device(call: ServiceCall) -> None:\n \"\"\"Select the active device.\"\"\"\n if not (addr := call.data[ATTR_DEVICE]):\n _LOGGER.error(\"Device not found: %s\", call.data[ATTR_DEVICE])\n return\n if addr in device_aliases:\n addr = device_aliases[addr]\n else:\n entity = hass.states.get(addr)\n _LOGGER.debug(\"Selecting entity %s\", entity)\n if entity is not None:\n addr = entity.attributes[\"physical_address\"]\n _LOGGER.debug(\"Address acquired: %s\", addr)\n if addr is None:\n _LOGGER.error(\n \"Device %s has not physical address\", call.data[ATTR_DEVICE]\n )\n return\n if not isinstance(addr, (PhysicalAddress,)):\n addr = PhysicalAddress(addr)\n hdmi_network.active_source(addr)\n _LOGGER.info(\"Selected %s (%s)\", call.data[ATTR_DEVICE], addr)\n\n def _update(call: ServiceCall) -> None:\n \"\"\"Update if device update is needed.\n\n Called by service, requests CEC network to update data.\n \"\"\"\n hdmi_network.scan()\n\n def _new_device(device):\n \"\"\"Handle new devices which are detected by HDMI network.\"\"\"\n key = f\"{DOMAIN}.{device.name}\"\n hass.data[DOMAIN][key] = device\n ent_platform = base_config[DOMAIN][CONF_TYPES].get(key, platform)\n discovery.load_platform(\n hass,\n ent_platform,\n DOMAIN,\n discovered={ATTR_NEW: [key]},\n hass_config=base_config,\n )\n\n def _shutdown(call):\n hdmi_network.stop()\n\n def _start_cec(callback_event):\n \"\"\"Register services and start HDMI network to watch for devices.\"\"\"\n hass.services.register(\n DOMAIN, SERVICE_SEND_COMMAND, _tx, SERVICE_SEND_COMMAND_SCHEMA\n )\n hass.services.register(\n DOMAIN, SERVICE_VOLUME, _volume, schema=SERVICE_VOLUME_SCHEMA\n )\n hass.services.register(\n DOMAIN,\n SERVICE_UPDATE_DEVICES,\n _update,\n schema=SERVICE_UPDATE_DEVICES_SCHEMA,\n )\n hass.services.register(DOMAIN, SERVICE_POWER_ON, _power_on)\n hass.services.register(DOMAIN, SERVICE_STANDBY, _standby)\n hass.services.register(DOMAIN, SERVICE_SELECT_DEVICE, _select_device)\n\n hdmi_network.set_new_device_callback(_new_device)\n hdmi_network.start()\n\n hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_cec)\n hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)\n return True",
"def recognize_device(self, device):\n return False",
"def enable(self, coil):\n self.log.info(\"RASPDriver.Enable(%s %s)\" % (coil.config['label'], coil.hw_driver.number))\n self.platform.communicator.driver_enable(coil.hw_driver.number)\n pass",
"def force_switch_on(self):\n self.turn_on_modem()",
"async def async_turn_on(self):\n path = \"/interface\"\n param = \"default-name\"\n if \"-\" in self._data[\"port-mac-address\"]:\n param = \"name\"\n value = self._data[param]\n mod_param = \"disabled\"\n mod_value = False\n self._ctrl.set_value(path, param, value, mod_param, mod_value)\n\n if self._data[\"poe-out\"] == \"off\":\n path = \"/interface/ethernet\"\n self._ctrl.set_value(path, param, value, \"poe-out\", \"auto-on\")\n\n await self._ctrl.force_update()",
"def enable_robot(self):\n self._franka_robot_enable_interface.enable()",
"def use_spi():\n _LIB.oled_click_use_spi()",
"def configure_stackwise_virtual_dual_active_pagp(device, port_channel):\n # build a list of commands to send\n command_list = ['stackwise-virtual']\n command_list.append(f'dual-active detection pagp')\n if port_channel:\n command_list.append(f'dual-active detection pagp trust channel-group {port_channel}')\n try:\n output = device.configure(command_list) \n except SubCommandFailure:\n raise SubCommandFailure('Failed to configure a port-channel interface as stackwise-virtual dual-active-detection pagp interfaces')\n return output",
"def _support_vlan_router_interfaces(self):\n pass",
"def turnOnSdkMode(self):\n \n command = b\"\\x90\\x01\\x01\"\n #print(\"turnOnSdkMode run, command: \")\n #print(command)\n \n self.sendCommand(command)",
"def configure_global_dual_active_recovery_reload_disable(device):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Disables dual-active recovery-reload\n command_list = ['stackwise-virtual']\n command_list.append(f'dual-active recovery-reload-disable')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to Enable global stackwise-virtual dual-active recovery-reload')\n return output",
"def enable_dual_TTS(self):\n\t\t\n\t\tself.isActiveDualTTS = True\n\t\tself.change_TTS_engine()",
"def SetAutodetectLCD(lcd):\n SPI.DeviceList[\"spi_lcd\"]= lcd \n I2C.DeviceList[\"i2c_lcd\"]= lcd",
"def setLoopback(self, enable): \n if enable == True:\n DPxEnableDoutDinLoopback()\n else:\n DPxDisableDoutDinLoopback()",
"def set_toggle_devices_enabled(self, track, xclip, ident, value = None):\n for device in track.devices:\n if(hasattr(device, 'parameters')):\n self._parent._device_actions.set_device_on_off(device, track, xclip, ident);",
"def EnableI2c(self):\n\n try:\n\n if os.path.exists('/sys/bus/i2c/devices/i2c-0/0-0060'):\n result = \" - I2C device already enabled!\"\n\n else:\n\n with open('/sys/bus/i2c/devices/i2c-0/new_device', 'a') as f:\n # 'echo '+i2c_device.driver+' '+i2c_device.addr+ '\n f.write('mpl3115 0x60')\n result = \" - I2C device enabled!\"\n\n LOG.info(result)\n\n except Exception as err:\n LOG.error(\"Error enabling I2C (device1): \" + str(err))",
"def turn_on(self, **kwargs):\n self._state = True\n if(self._device['type'] == '_DT-PLUG' or self._device['type'] == '_THIMR'):\n self._send_cmd(self._device,'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"op\":1 }', 5)\n if(self._device['type'] == '_REALY2' or self._device['type'] == '_REALY4'):\n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"'+ self._data_key +'\":1 }', 5)",
"def use_ethernet_connection(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def enable(self):\n if not self.labExperiment:\n super().enable()\n else:\n self.connection.command('open_dm', self.DMserial)\n status = self.connection.query('get_status')\n assert status == 0, 'Error connecting to DM. Error: ' + str(status)\n numActProfile = self.connection.query('num_actuators')\n assert numActProfile == self.numActProfile, 'Wrong number of profile actuators entered'\n print(\"'BM1k' is now enabled\")",
"async def test_device_mode_roller(hass, coap_wrapper, monkeypatch):\n assert coap_wrapper\n\n monkeypatch.setitem(coap_wrapper.device.settings, \"mode\", \"roller\")\n hass.async_create_task(\n hass.config_entries.async_forward_entry_setup(coap_wrapper.entry, SWITCH_DOMAIN)\n )\n await hass.async_block_till_done()\n assert hass.states.get(\"switch.test_name_channel_1\") is None",
"def device_connect(self):\n pass",
"def test_multi_ap_disabled_on_ap(dev, apdev):\n run_multi_ap_association(dev, apdev, 0, wait_connect=False)\n ev = dev[0].wait_event([\"CTRL-EVENT-DISCONNECTED\",\n \"CTRL-EVENT-CONNECTED\"],\n timeout=5)\n dev[0].request(\"DISCONNECT\")\n if ev is None:\n raise Exception(\"Connection result not reported\")\n if \"CTRL-EVENT-DISCONNECTED\" not in ev:\n raise Exception(\"Unexpected connection result\")"
]
| [
"0.59566253",
"0.58459336",
"0.58459336",
"0.58156496",
"0.5751097",
"0.5745842",
"0.55513084",
"0.55439985",
"0.55354583",
"0.55209297",
"0.54976976",
"0.54834133",
"0.5471087",
"0.5468166",
"0.5451829",
"0.5439863",
"0.5418576",
"0.54149604",
"0.5405614",
"0.5400771",
"0.53993297",
"0.5391024",
"0.53650147",
"0.5353434",
"0.5348492",
"0.5347391",
"0.5337085",
"0.5330112",
"0.5309748",
"0.5303928"
]
| 0.6329942 | 0 |
Disables interface as dualactivedetection interface on target device | def unconfigure_stackwise_virtual_dual_active_interfaces(device, dad_links):
# build a list of commands to send
command_list = []
output = ''
for interface in dad_links:
command_list.append(f'interface {interface}')
command_list.append(f'no stackwise-virtual dual-active-detection')
try:
output = device.configure(command_list)
except SubCommandFailure:
raise SubCommandFailure('Failed to unconfigure stackwise-virtual dual-active-detection interfaces')
return output | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def disable_radio(self):\n self.acquire_response(b'AT*R0')",
"def test_multi_ap_disabled_on_ap(dev, apdev):\n run_multi_ap_association(dev, apdev, 0, wait_connect=False)\n ev = dev[0].wait_event([\"CTRL-EVENT-DISCONNECTED\",\n \"CTRL-EVENT-CONNECTED\"],\n timeout=5)\n dev[0].request(\"DISCONNECT\")\n if ev is None:\n raise Exception(\"Connection result not reported\")\n if \"CTRL-EVENT-DISCONNECTED\" not in ev:\n raise Exception(\"Unexpected connection result\")",
"def switch_off(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def turn_off(self):\n self._state = False\n if(self._device['type'] == '_DT-PLUG' or self._device['type'] == '_THIMR'):\n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"op\":0 }', 5)\n if(self._device['type'] == '_REALY2' or self._device['type'] == '_REALY4'): \n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"'+ self._data_key +'\":0 }', 5)",
"def disable_discovery(self):",
"def unconfigure_global_dual_active_recovery_reload_disable(device):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Enables dual-active recovery-reload\n command_list = ['stackwise-virtual']\n command_list.append(f'no dual-active recovery-reload-disable')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to Disable global stackwise-virtual dual-active recovery-reload')\n return output",
"def test_inactive_one_selected(self):\n with InterfacesSettings() as s:\n PageObject.click_element(s, 'interfaces', 'interface_checkbox', 0)\n self.assertFalse(s.bond_interfaces.is_enabled())\n self.assertFalse(s.unbond_interfaces.is_enabled())",
"async def async_turn_off(self):\n path = \"/interface\"\n param = \"default-name\"\n if \"-\" in self._data[\"port-mac-address\"]:\n param = \"name\"\n value = self._data[param]\n mod_param = \"disabled\"\n mod_value = True\n self._ctrl.set_value(path, param, value, mod_param, mod_value)\n\n if self._data[\"poe-out\"] == \"auto-on\":\n path = \"/interface/ethernet\"\n self._ctrl.set_value(path, param, value, \"poe-out\", \"off\")\n\n await self._ctrl.async_update()",
"def disable_switch_port(self, mgr, interface):\n confstr = snipp.CMD_NO_SWITCHPORT % (interface)\n confstr = self.create_xml_snippet(confstr)\n LOG.debug(\"NexusDriver: %s\" % confstr)\n mgr.edit_config(target='running', config=confstr)",
"def disable_detector(self):\n detector_id = self.list_detector()\n if detector_id:\n try:\n response = self.client.update_detector(DetectorId=detector_id, Enable=False)\n print(detector_id, 'has been disabled')\n return True\n except ClientError as e:\n print(e.response['Error']['Code'])\n return False\n else:\n print('no detector has been found.')\n return False",
"def disable():\n ret = _LIB.oled_click_disable()\n if ret < 0:\n raise Exception(\"oled click disable failed\")",
"def is_disable_apic(self):\n\t\treturn bool(call_sdk_function('PrlVmCfg_IsDisableAPIC', self.handle))",
"def disable_detector():\n global enable_detector, enable_detection, detector\n\n detector = None\n\n if detector is None:\n print(\"Detector stopped...\")\n enable_detection = False\n enable_detector = ''\n\n return render_settings_view()",
"def unconfigure_stackwise_virtual_dual_active_pagp(device, port_channel):\n # build a list of commands to send\n command_list = ['stackwise-virtual']\n if port_channel:\n command_list.append(f'no dual-active detection pagp trust channel-group {port_channel}')\n try:\n output = device.configure(command_list) \n except SubCommandFailure:\n raise SubCommandFailure('Failed to unconfigure a port-channel interface as stackwise-virtual dual-active-detection pagp interfaces')\n return output",
"def on_disable(self) -> None:\n self._cancel_automation()",
"def usb_connectivity_disabled(self):\n return self._usb_connectivity_disabled",
"def disable_irq() -> int:",
"def disable(self):\n logging.debug(\"Disabling switch %s\" % self.name)\n self.disabled = True",
"def disable():\n if _status_apf():\n return __apf_cmd(\"-f\")",
"def _disable(self):\n self.enabled = False",
"def test_bond_buttons_inactive(self):\n self.assertFalse(PageObject.find_element\n (InterfacesSettings(), 'bond_interfaces').\n is_enabled())\n self.assertFalse(PageObject.find_element\n (InterfacesSettings(), 'unbond_interfaces').\n is_enabled())",
"def disable(self) -> None:",
"def _doDisableRegulation(self):\n self._cmdRegulOff()",
"def turn_off(self, **kwargs):\n self.smartplug.turn_off()",
"async def async_turn_off(self):\n path = \"/ip/firewall/nat\"\n param = \".id\"\n value = None\n for uid in self._ctrl.data[\"nat\"]:\n if (\n self._ctrl.data[\"nat\"][uid][\"name\"]\n == f\"{self._data['protocol']}:{self._data['dst-port']}\"\n ):\n value = self._ctrl.data[\"nat\"][uid][\".id\"]\n\n mod_param = \"disabled\"\n mod_value = True\n self._ctrl.set_value(path, param, value, mod_param, mod_value)\n await self._ctrl.async_update()",
"def DisableByRunIf(self):\n self.run_if = 'False'",
"def _nixie_disable():\n # type: () -> None\n GPIO.output(NIXIE_nOE, GPIO.HIGH)",
"def _disable_wifi_ap(self):\n call(['systemctl', 'disable', 'hostapd', ])\n call(['systemctl', 'disable', 'dnsmasq', ])\n\n context = self._get_ap_context()\n self._write_system_template('/etc/network/interfaces', 'interfaces.conf', context)\n self._write_system_template('/etc/dhcpcd.conf', 'dhcpcd.conf', context)",
"def incompatible_device(self) -> bool:\n return pulumi.get(self, \"incompatible_device\")",
"def the_user_should_not_be_able_to_connect_to_another_device():\n print(\"Trying to connect 2 devices at once\")\n bln_result = web_app.connect_to_device2()\n assert(bln_result, False)"
]
| [
"0.66088223",
"0.6341192",
"0.63072246",
"0.6244302",
"0.62327534",
"0.61326903",
"0.6128818",
"0.60196",
"0.59937257",
"0.59796137",
"0.5866469",
"0.5817195",
"0.5772791",
"0.5772595",
"0.57619154",
"0.5754597",
"0.5747553",
"0.5739801",
"0.57308346",
"0.57193846",
"0.5710361",
"0.5681966",
"0.5680179",
"0.56778544",
"0.56719124",
"0.56636983",
"0.56543696",
"0.5644131",
"0.5625907",
"0.56235003"
]
| 0.65191627 | 1 |
Enables global stackwisevirtual dualactive recovery reload on target device | def configure_global_dual_active_recovery_reload_disable(device):
# build a list of commands to send
# Add stackwise-virtual as first element in the list
# Disables dual-active recovery-reload
command_list = ['stackwise-virtual']
command_list.append(f'dual-active recovery-reload-disable')
try:
output = device.configure(command_list)
except SubCommandFailure:
raise SubCommandFailure('Failed to Enable global stackwise-virtual dual-active recovery-reload')
return output | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def unconfigure_global_dual_active_recovery_reload_disable(device):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Enables dual-active recovery-reload\n command_list = ['stackwise-virtual']\n command_list.append(f'no dual-active recovery-reload-disable')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to Disable global stackwise-virtual dual-active recovery-reload')\n return output",
"def _doEnableRegulation(self):\n self._cmdRegulOn()",
"def set_recovery_tool(enabled):\n if k8s.exists('configmap', 'kube-system', 'ibm-worker-recovery-checks'):\n config_map = k8s.get('configmap', 'kube-system', 'ibm-worker-recovery-checks')\n for check in config_map['data']:\n check_config = json.loads(config_map['data'][check])\n if check_config['Check'] == 'KUBEAPI':\n check_config['Enabled'] = enabled\n config_map['data'][check] = json.dumps(check_config, ensure_ascii=False) # TODO: Remove ensure_ascii when migration to py3 is complete\n k8s.apply(config_map)\n else:\n logger.info('IBM Auto-Recovery tool is not present')",
"def schedule_system_restart():\n global _force_system_restart\n _force_system_restart = True",
"def _reloadFabric(self, fabric):\n\n # Execute command to poweroff/on\n self.device.configure(\n 'poweroff xbar {}\\nno poweroff xbar {}'.format(fabric, fabric))",
"def enableDebugLoadOutput(self):\n key = \"NatlinkmainDebugLoad\"\n self.userregnl.set(key, 1)",
"def reboot_fpga(self):\n log.info(\"Booting FPGA from SPI prom\")\n self.set(\"FPGA_CTRL\", \"boot_fpga\", 1);",
"def reload_config():\n subprocess.run([SUPERVISOR_CMD, \"reload\"])",
"def unconfigure_global_stackwise_virtual(device):\n # Single command 'no stackwise-virtual' will remove configuration\n command = 'no stackwise-virtual'\n try:\n output = device.configure(command)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to remove global stackwise-virtual')\n return output",
"def reconfiguration_scalability(self):\n\n self.check_run('reconfiguration_scalability')\n self.show_step(1, initialize=True)\n self.env.revert_snapshot(\"reconfigure_nova_ephemeral_disk\")\n\n self.show_step(2)\n cluster_id = self.fuel_web.get_last_created_cluster()\n config = utils.get_config_template('nova_disk')\n structured_config_nova = get_structured_config_dict(config)\n config = utils.get_config_template('keystone')\n structured_config_keystone = get_structured_config_dict(config)\n self.fuel_web.client.upload_configuration(config,\n cluster_id,\n role='controller')\n controllers = self.fuel_web.get_nailgun_cluster_nodes_by_roles(\n cluster_id, ['controller'])\n\n self.show_step(3)\n task = self.fuel_web.client.apply_configuration(cluster_id,\n role='controller')\n self.fuel_web.assert_task_success(task, timeout=900, interval=5)\n\n self.show_step(4)\n self.check_config_on_remote(controllers, structured_config_keystone)\n\n self.show_step(5)\n os_conn = os_actions.OpenStackActions(\n self.fuel_web.get_public_vip(cluster_id))\n time_expiration = config[\n 'keystone_config']['token/expiration']['value']\n self.check_token_expiration(os_conn, time_expiration)\n\n self.show_step(6)\n bs_nodes = [x for x in self.env.d_env.get_nodes()\n if x.name == 'slave-05' or x.name == 'slave-06']\n self.env.bootstrap_nodes(bs_nodes)\n self.fuel_web.update_nodes(\n cluster_id,\n {'slave-05': ['compute', 'cinder']})\n self.fuel_web.update_nodes(\n cluster_id,\n {'slave-06': ['controller']})\n\n self.show_step(7)\n self.fuel_web.verify_network(cluster_id)\n\n self.show_step(8)\n self.fuel_web.deploy_cluster_wait(cluster_id)\n\n self.show_step(9)\n self.fuel_web.run_ostf(cluster_id=cluster_id)\n\n self.show_step(10)\n controllers = self.fuel_web.get_nailgun_cluster_nodes_by_roles(\n cluster_id, ['controller'])\n computes = self.fuel_web.get_nailgun_cluster_nodes_by_roles(\n cluster_id, ['compute'])\n target_controller = [x for x in controllers\n if 'slave-06' in x['name']]\n target_compute = [x for x in computes\n if 'slave-05' in x['name']]\n self.check_config_on_remote(target_controller,\n structured_config_keystone)\n\n self.show_step(11)\n self.check_config_on_remote(target_compute, structured_config_nova)\n\n self.show_step(12)\n self.show_step(13)\n self.show_step(14)\n self.show_step(15)\n self.show_step(16)\n\n os_conn = os_actions.OpenStackActions(\n self.fuel_web.get_public_vip(cluster_id))\n hypervisor_name = target_compute[0]['fqdn']\n self.check_nova_ephemeral_disk(os_conn, cluster_id,\n hypervisor_name=hypervisor_name)\n\n self.show_step(17)\n self.check_token_expiration(os_conn, time_expiration)\n\n self.env.make_snapshot(\"reconfiguration_scalability\", is_make=True)",
"def reset_10gbe():\n snap.write_int('valid_en',0)\n snap.write_int('rst',1)\n time.sleep(1)\n snap.write_int('rst',0)\n snap.write_int('valid_en',3)",
"def restart(self):\n self.iic.set_flag(REG.CTRL_REG2.RST)\n time.sleep(0.01)\n self.conf = Configuration()",
"def configure_global_stackwise_virtual(device, domain=None):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Add domain only if domain argument has been provided\n command_list = ['stackwise-virtual']\n if domain:\n command_list.append(f'domain {domain}')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to configure global stackwise-virtual')\n return output",
"def _doDisableRegulation(self):\n self._cmdRegulOff()",
"def reconfig_runtime(remote):\n env = get_env()\n freconfig = remote.get_function(\"tvm.contrib.vta.reconfig_runtime\")\n freconfig(env.pkg.cfg_json)",
"def supports_reboot(self):\n self.__not_implemented()",
"def prelaunch_wind(conf,logger):\n threshold=conf.getint('config','min_wind_for_init',0)\n wind=conf.syndat.wmax\n if wind<threshold:\n jlogger.info('Wind %d < %d so disabling GSI and relocation.'%(\n wind,threshold))\n logger.info('Wind %d<%d - run_gsi, run_vortexinit and run_ens_vortexinit overridden to \"no\"'%(\n wind,threshold))\n conf.set('config','run_gsi','no')\n conf.set('config','run_vortexinit','no')\n conf.set('config','run_ens_vortexinit','no')",
"def reboot(self, node):",
"async def reload_platform(self) -> None:",
"def system_upgrade(cfg, target):\n mycfg = {'system_upgrade': {'enabled': False}}\n config.merge_config(mycfg, cfg)\n mycfg = mycfg.get('system_upgrade')\n if not isinstance(mycfg, dict):\n LOG.debug(\"system_upgrade disabled by config. entry not a dict.\")\n return\n\n if not config.value_as_boolean(mycfg.get('enabled', True)):\n LOG.debug(\"system_upgrade disabled by config.\")\n return\n\n util.system_upgrade(target=target)",
"def reboot(self):\n raise NotImplementedError",
"def onToggleRt(self, enabled):\n if not self.terrain:\n self._props = self._parent.exportSettings\n self.terrain = TerrainSync(self._parent, self._props.terrainLOD)\n self.workpool = self._parent.workpool\n if enabled:\n self.simrt = self._parent.simrt\n else:\n self.simrt = None",
"def keep_vifs_bound():\n global EVT_TIMEOUT, NETSRS\n\n while bEndless:\n with xenapi_session() as x:\n try:\n vms = x.VM.get_all_records() # Get a list of VMs for multiple uses\n\n # If the host is in maintenance mode end it and auto start VMs\n host_ref = x.host.get_by_uuid(get_host_ref())\n if not x.host.get_enabled(host_ref):\n x.host.enable(host_ref) # End maintenance mode\n\n # Get a list of suitable VMs to start, ordered by ha restart priority\n autostart_list = [(vms[k]['order'], k, float(vms[k]['start_delay'])) for k in vms.keys() if (\n (not vms[k]['is_a_snapshot']) and\n (not vms[k]['is_a_template']) and\n (not vms[k]['is_control_domain']) and\n (('auto_poweron' in vms[k]['other_config']) and (vms[k]['other_config']['auto_poweron'])) and\n ('Halted' == vms[k]['power_state'])\n )]\n # We avoid .sort with a lambda to be able to delete the vms list\n from operator import itemgetter\n autostart_list.sort(key=itemgetter(0))\n\n # Attempt to start the VMs, while respecting the delays\n for i in range(len(autostart_list)):\n vm_ref = autostart_list[i][1]\n try:\n x.VM.start(vm_ref, False, False)\n except:\n pass\n finally:\n from time import sleep\n if i < (len(autostart_list) - 1):\n sleep(autostart_list[i][2])\n\n del autostart_list # Clean up\n\n # Find the ObscureRef of the target VM\n try:\n vmref = [k for k in vms.keys() if vms[k]['name_label'] == VMNAME][0]\n except:\n print('Unable to find a VM named \"{}\"'.format(VMNAME))\n exit(4)\n vifs = x.VM.get_VIFs(vmref)\n bNetworkFound = False\n for vif in vifs:\n if SSPNNAME == x.network.get_record(x.VIF.get_network(vif))['name_label']:\n bNetworkFound = True\n break\n if not bNetworkFound:\n print('Unable to find a network named \"{}\" attached to the \"{}\" VM'.format(SSPNNAME, VMNAME))\n exit(5)\n\n # Clean up\n del vifs\n del vms\n\n # Non-blocking listen for VM events\n token = '' # Initial token\n while bEndless:\n output = x.event_from(['VM'], token, EVT_TIMEOUT)\n token = output['token'] # Current token\n\n for event in output['events']:\n # Check the IP assigned to the VIFs of the target VM, if it's running\n if (('add' == event['operation']) or\n ('mod' == event['operation'])) and \\\n (vmref == event['ref']) and \\\n ('Running' == x.VM.get_power_state(vmref)):\n if 'snapshot' not in event:\n continue\n vifs = event['snapshot']['VIFs'] # Virtual interfaces list\n for vif in vifs:\n net = x.VIF.get_network(vif) # Network ref\n netrec = x.network.get_record(net) # Network record\n if SSPNNAME != netrec['name_label']:\n continue\n if rebind_ip_address(netrec['bridge']) and NETSRS:\n for sr in NETSRS:\n # Check if the destination is live for a maximum of 2 minutes and a half,\n # and if it goes live replug the target SRs\n (sr_uuid, pingTarget), = sr.items()\n p = Ping(pingTarget, 5000)\n if p.live(30):\n plug_pbds(x, sr_uuid)\n\n except (socket.error, XmlRPCFault):\n # Toolstack restarted/unavailable or SIGTERM'd\n pass",
"def event_m20_11_5000():\n \"\"\"State 0,2: [Preset] Living Altar_SubState\"\"\"\n assert event_m20_11_x82()\n \"\"\"State 1: Rerun\"\"\"\n RestartMachine()",
"def reloadMode(self): \n\t\tpass",
"def enable_sensor_power():\n sen = digital.SensorPower(\"senpwr\") \n sen.set()",
"def preservation_config_after_reset_and_preconfigured_deploy(self):\n\n self.show_step(1, initialize=True)\n self.env.revert_snapshot(\"reconfigure_ml2_vlan_range\")\n\n self.show_step(2)\n cluster_id = self.fuel_web.get_last_created_cluster()\n self.fuel_web.stop_reset_env_wait(cluster_id)\n\n self.show_step(3)\n config = utils.get_config_template('nova_cpu')\n structured_config_nova = get_structured_config_dict(config)\n self.fuel_web.client.upload_configuration(config,\n cluster_id,\n role='controller')\n config = utils.get_config_template('neutron')\n structured_config_neutron = get_structured_config_dict(config)\n\n self.show_step(4)\n self.fuel_web.wait_nodes_get_online_state(\n self.env.d_env.nodes().slaves[:4], timeout=10 * 60)\n\n self.fuel_web.deploy_cluster_wait(cluster_id)\n\n self.show_step(5)\n self.fuel_web.run_ostf(\n cluster_id=cluster_id)\n\n self.show_step(6)\n controllers = self.fuel_web.get_nailgun_cluster_nodes_by_roles(\n cluster_id, ['controller'])\n structured_config = {}\n structured_config.update(structured_config_neutron)\n structured_config.update(structured_config_nova)\n self.check_config_on_remote(controllers, structured_config)\n\n self.show_step(7)\n self.show_step(8)\n os_conn = os_actions.OpenStackActions(\n self.fuel_web.get_public_vip(cluster_id))\n self.check_ml2_vlan_range(os_conn)\n\n self.show_step(9)\n self.show_step(10)\n self.check_overcommit_ratio(os_conn, cluster_id)\n\n snapshot = \"preservation_config_after_reset_and_preconfigured_deploy\"\n self.env.make_snapshot(snapshot, is_make=True)",
"def test_enable_dpdk(self):\n logging.info('Pre-flight check')\n self._dpdk_pre_post_flight_check()\n self._ovs_br_ex_port_is_system_interface()\n\n self.enable_hugepages_vfio_on_hvs_in_vms(4)\n with self.config_change(\n {\n 'enable-dpdk': False,\n 'dpdk-driver': '',\n },\n {\n 'enable-dpdk': True,\n 'dpdk-driver': 'vfio-pci',\n },\n application_name='ovn-chassis'):\n logging.info('Checking openvswitch-switch-dpdk is installed')\n self._openvswitch_switch_dpdk_installed()\n logging.info('Checking DPDK is configured in OVS')\n self._ovs_dpdk_init_configured()\n logging.info('Checking DPDK is successfully initialized in OVS')\n self._ovs_dpdk_initialized()\n logging.info('Checking that br-ex configed with DPDK interface...')\n self._ovs_br_ex_port_is_dpdk_interface()\n logging.info('and is not in error.')\n self._ovs_br_ex_interface_not_in_error()\n\n logging.info('Post-flight check')\n self._dpdk_pre_post_flight_check()\n\n self.disable_hugepages_vfio_on_hvs_in_vms()\n self._ovs_br_ex_port_is_system_interface()",
"def test_patch_pci_switch(self):\n pass",
"def pre_global_vrouter_config_update(self, resource_id, resource_dict):\n pass"
]
| [
"0.7184057",
"0.58807933",
"0.57247466",
"0.56831115",
"0.5650668",
"0.5400074",
"0.53629565",
"0.5327328",
"0.5324608",
"0.5303408",
"0.5283625",
"0.52727073",
"0.5241073",
"0.5214699",
"0.5210306",
"0.5202273",
"0.52008003",
"0.51674765",
"0.51405495",
"0.51349705",
"0.5121158",
"0.5077522",
"0.5061854",
"0.5056652",
"0.5054048",
"0.5038127",
"0.5032996",
"0.49971187",
"0.49880168",
"0.49816877"
]
| 0.7698362 | 0 |
Enables global stackwisevirtual dualactive recovery reload on target device | def unconfigure_global_dual_active_recovery_reload_disable(device):
# build a list of commands to send
# Add stackwise-virtual as first element in the list
# Enables dual-active recovery-reload
command_list = ['stackwise-virtual']
command_list.append(f'no dual-active recovery-reload-disable')
try:
output = device.configure(command_list)
except SubCommandFailure:
raise SubCommandFailure('Failed to Disable global stackwise-virtual dual-active recovery-reload')
return output | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def configure_global_dual_active_recovery_reload_disable(device):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Disables dual-active recovery-reload\n command_list = ['stackwise-virtual']\n command_list.append(f'dual-active recovery-reload-disable')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to Enable global stackwise-virtual dual-active recovery-reload')\n return output",
"def _doEnableRegulation(self):\n self._cmdRegulOn()",
"def set_recovery_tool(enabled):\n if k8s.exists('configmap', 'kube-system', 'ibm-worker-recovery-checks'):\n config_map = k8s.get('configmap', 'kube-system', 'ibm-worker-recovery-checks')\n for check in config_map['data']:\n check_config = json.loads(config_map['data'][check])\n if check_config['Check'] == 'KUBEAPI':\n check_config['Enabled'] = enabled\n config_map['data'][check] = json.dumps(check_config, ensure_ascii=False) # TODO: Remove ensure_ascii when migration to py3 is complete\n k8s.apply(config_map)\n else:\n logger.info('IBM Auto-Recovery tool is not present')",
"def schedule_system_restart():\n global _force_system_restart\n _force_system_restart = True",
"def _reloadFabric(self, fabric):\n\n # Execute command to poweroff/on\n self.device.configure(\n 'poweroff xbar {}\\nno poweroff xbar {}'.format(fabric, fabric))",
"def enableDebugLoadOutput(self):\n key = \"NatlinkmainDebugLoad\"\n self.userregnl.set(key, 1)",
"def reboot_fpga(self):\n log.info(\"Booting FPGA from SPI prom\")\n self.set(\"FPGA_CTRL\", \"boot_fpga\", 1);",
"def reload_config():\n subprocess.run([SUPERVISOR_CMD, \"reload\"])",
"def unconfigure_global_stackwise_virtual(device):\n # Single command 'no stackwise-virtual' will remove configuration\n command = 'no stackwise-virtual'\n try:\n output = device.configure(command)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to remove global stackwise-virtual')\n return output",
"def reconfiguration_scalability(self):\n\n self.check_run('reconfiguration_scalability')\n self.show_step(1, initialize=True)\n self.env.revert_snapshot(\"reconfigure_nova_ephemeral_disk\")\n\n self.show_step(2)\n cluster_id = self.fuel_web.get_last_created_cluster()\n config = utils.get_config_template('nova_disk')\n structured_config_nova = get_structured_config_dict(config)\n config = utils.get_config_template('keystone')\n structured_config_keystone = get_structured_config_dict(config)\n self.fuel_web.client.upload_configuration(config,\n cluster_id,\n role='controller')\n controllers = self.fuel_web.get_nailgun_cluster_nodes_by_roles(\n cluster_id, ['controller'])\n\n self.show_step(3)\n task = self.fuel_web.client.apply_configuration(cluster_id,\n role='controller')\n self.fuel_web.assert_task_success(task, timeout=900, interval=5)\n\n self.show_step(4)\n self.check_config_on_remote(controllers, structured_config_keystone)\n\n self.show_step(5)\n os_conn = os_actions.OpenStackActions(\n self.fuel_web.get_public_vip(cluster_id))\n time_expiration = config[\n 'keystone_config']['token/expiration']['value']\n self.check_token_expiration(os_conn, time_expiration)\n\n self.show_step(6)\n bs_nodes = [x for x in self.env.d_env.get_nodes()\n if x.name == 'slave-05' or x.name == 'slave-06']\n self.env.bootstrap_nodes(bs_nodes)\n self.fuel_web.update_nodes(\n cluster_id,\n {'slave-05': ['compute', 'cinder']})\n self.fuel_web.update_nodes(\n cluster_id,\n {'slave-06': ['controller']})\n\n self.show_step(7)\n self.fuel_web.verify_network(cluster_id)\n\n self.show_step(8)\n self.fuel_web.deploy_cluster_wait(cluster_id)\n\n self.show_step(9)\n self.fuel_web.run_ostf(cluster_id=cluster_id)\n\n self.show_step(10)\n controllers = self.fuel_web.get_nailgun_cluster_nodes_by_roles(\n cluster_id, ['controller'])\n computes = self.fuel_web.get_nailgun_cluster_nodes_by_roles(\n cluster_id, ['compute'])\n target_controller = [x for x in controllers\n if 'slave-06' in x['name']]\n target_compute = [x for x in computes\n if 'slave-05' in x['name']]\n self.check_config_on_remote(target_controller,\n structured_config_keystone)\n\n self.show_step(11)\n self.check_config_on_remote(target_compute, structured_config_nova)\n\n self.show_step(12)\n self.show_step(13)\n self.show_step(14)\n self.show_step(15)\n self.show_step(16)\n\n os_conn = os_actions.OpenStackActions(\n self.fuel_web.get_public_vip(cluster_id))\n hypervisor_name = target_compute[0]['fqdn']\n self.check_nova_ephemeral_disk(os_conn, cluster_id,\n hypervisor_name=hypervisor_name)\n\n self.show_step(17)\n self.check_token_expiration(os_conn, time_expiration)\n\n self.env.make_snapshot(\"reconfiguration_scalability\", is_make=True)",
"def reset_10gbe():\n snap.write_int('valid_en',0)\n snap.write_int('rst',1)\n time.sleep(1)\n snap.write_int('rst',0)\n snap.write_int('valid_en',3)",
"def restart(self):\n self.iic.set_flag(REG.CTRL_REG2.RST)\n time.sleep(0.01)\n self.conf = Configuration()",
"def configure_global_stackwise_virtual(device, domain=None):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Add domain only if domain argument has been provided\n command_list = ['stackwise-virtual']\n if domain:\n command_list.append(f'domain {domain}')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to configure global stackwise-virtual')\n return output",
"def _doDisableRegulation(self):\n self._cmdRegulOff()",
"def reconfig_runtime(remote):\n env = get_env()\n freconfig = remote.get_function(\"tvm.contrib.vta.reconfig_runtime\")\n freconfig(env.pkg.cfg_json)",
"def supports_reboot(self):\n self.__not_implemented()",
"def prelaunch_wind(conf,logger):\n threshold=conf.getint('config','min_wind_for_init',0)\n wind=conf.syndat.wmax\n if wind<threshold:\n jlogger.info('Wind %d < %d so disabling GSI and relocation.'%(\n wind,threshold))\n logger.info('Wind %d<%d - run_gsi, run_vortexinit and run_ens_vortexinit overridden to \"no\"'%(\n wind,threshold))\n conf.set('config','run_gsi','no')\n conf.set('config','run_vortexinit','no')\n conf.set('config','run_ens_vortexinit','no')",
"def reboot(self, node):",
"async def reload_platform(self) -> None:",
"def system_upgrade(cfg, target):\n mycfg = {'system_upgrade': {'enabled': False}}\n config.merge_config(mycfg, cfg)\n mycfg = mycfg.get('system_upgrade')\n if not isinstance(mycfg, dict):\n LOG.debug(\"system_upgrade disabled by config. entry not a dict.\")\n return\n\n if not config.value_as_boolean(mycfg.get('enabled', True)):\n LOG.debug(\"system_upgrade disabled by config.\")\n return\n\n util.system_upgrade(target=target)",
"def reboot(self):\n raise NotImplementedError",
"def onToggleRt(self, enabled):\n if not self.terrain:\n self._props = self._parent.exportSettings\n self.terrain = TerrainSync(self._parent, self._props.terrainLOD)\n self.workpool = self._parent.workpool\n if enabled:\n self.simrt = self._parent.simrt\n else:\n self.simrt = None",
"def keep_vifs_bound():\n global EVT_TIMEOUT, NETSRS\n\n while bEndless:\n with xenapi_session() as x:\n try:\n vms = x.VM.get_all_records() # Get a list of VMs for multiple uses\n\n # If the host is in maintenance mode end it and auto start VMs\n host_ref = x.host.get_by_uuid(get_host_ref())\n if not x.host.get_enabled(host_ref):\n x.host.enable(host_ref) # End maintenance mode\n\n # Get a list of suitable VMs to start, ordered by ha restart priority\n autostart_list = [(vms[k]['order'], k, float(vms[k]['start_delay'])) for k in vms.keys() if (\n (not vms[k]['is_a_snapshot']) and\n (not vms[k]['is_a_template']) and\n (not vms[k]['is_control_domain']) and\n (('auto_poweron' in vms[k]['other_config']) and (vms[k]['other_config']['auto_poweron'])) and\n ('Halted' == vms[k]['power_state'])\n )]\n # We avoid .sort with a lambda to be able to delete the vms list\n from operator import itemgetter\n autostart_list.sort(key=itemgetter(0))\n\n # Attempt to start the VMs, while respecting the delays\n for i in range(len(autostart_list)):\n vm_ref = autostart_list[i][1]\n try:\n x.VM.start(vm_ref, False, False)\n except:\n pass\n finally:\n from time import sleep\n if i < (len(autostart_list) - 1):\n sleep(autostart_list[i][2])\n\n del autostart_list # Clean up\n\n # Find the ObscureRef of the target VM\n try:\n vmref = [k for k in vms.keys() if vms[k]['name_label'] == VMNAME][0]\n except:\n print('Unable to find a VM named \"{}\"'.format(VMNAME))\n exit(4)\n vifs = x.VM.get_VIFs(vmref)\n bNetworkFound = False\n for vif in vifs:\n if SSPNNAME == x.network.get_record(x.VIF.get_network(vif))['name_label']:\n bNetworkFound = True\n break\n if not bNetworkFound:\n print('Unable to find a network named \"{}\" attached to the \"{}\" VM'.format(SSPNNAME, VMNAME))\n exit(5)\n\n # Clean up\n del vifs\n del vms\n\n # Non-blocking listen for VM events\n token = '' # Initial token\n while bEndless:\n output = x.event_from(['VM'], token, EVT_TIMEOUT)\n token = output['token'] # Current token\n\n for event in output['events']:\n # Check the IP assigned to the VIFs of the target VM, if it's running\n if (('add' == event['operation']) or\n ('mod' == event['operation'])) and \\\n (vmref == event['ref']) and \\\n ('Running' == x.VM.get_power_state(vmref)):\n if 'snapshot' not in event:\n continue\n vifs = event['snapshot']['VIFs'] # Virtual interfaces list\n for vif in vifs:\n net = x.VIF.get_network(vif) # Network ref\n netrec = x.network.get_record(net) # Network record\n if SSPNNAME != netrec['name_label']:\n continue\n if rebind_ip_address(netrec['bridge']) and NETSRS:\n for sr in NETSRS:\n # Check if the destination is live for a maximum of 2 minutes and a half,\n # and if it goes live replug the target SRs\n (sr_uuid, pingTarget), = sr.items()\n p = Ping(pingTarget, 5000)\n if p.live(30):\n plug_pbds(x, sr_uuid)\n\n except (socket.error, XmlRPCFault):\n # Toolstack restarted/unavailable or SIGTERM'd\n pass",
"def event_m20_11_5000():\n \"\"\"State 0,2: [Preset] Living Altar_SubState\"\"\"\n assert event_m20_11_x82()\n \"\"\"State 1: Rerun\"\"\"\n RestartMachine()",
"def reloadMode(self): \n\t\tpass",
"def enable_sensor_power():\n sen = digital.SensorPower(\"senpwr\") \n sen.set()",
"def preservation_config_after_reset_and_preconfigured_deploy(self):\n\n self.show_step(1, initialize=True)\n self.env.revert_snapshot(\"reconfigure_ml2_vlan_range\")\n\n self.show_step(2)\n cluster_id = self.fuel_web.get_last_created_cluster()\n self.fuel_web.stop_reset_env_wait(cluster_id)\n\n self.show_step(3)\n config = utils.get_config_template('nova_cpu')\n structured_config_nova = get_structured_config_dict(config)\n self.fuel_web.client.upload_configuration(config,\n cluster_id,\n role='controller')\n config = utils.get_config_template('neutron')\n structured_config_neutron = get_structured_config_dict(config)\n\n self.show_step(4)\n self.fuel_web.wait_nodes_get_online_state(\n self.env.d_env.nodes().slaves[:4], timeout=10 * 60)\n\n self.fuel_web.deploy_cluster_wait(cluster_id)\n\n self.show_step(5)\n self.fuel_web.run_ostf(\n cluster_id=cluster_id)\n\n self.show_step(6)\n controllers = self.fuel_web.get_nailgun_cluster_nodes_by_roles(\n cluster_id, ['controller'])\n structured_config = {}\n structured_config.update(structured_config_neutron)\n structured_config.update(structured_config_nova)\n self.check_config_on_remote(controllers, structured_config)\n\n self.show_step(7)\n self.show_step(8)\n os_conn = os_actions.OpenStackActions(\n self.fuel_web.get_public_vip(cluster_id))\n self.check_ml2_vlan_range(os_conn)\n\n self.show_step(9)\n self.show_step(10)\n self.check_overcommit_ratio(os_conn, cluster_id)\n\n snapshot = \"preservation_config_after_reset_and_preconfigured_deploy\"\n self.env.make_snapshot(snapshot, is_make=True)",
"def test_enable_dpdk(self):\n logging.info('Pre-flight check')\n self._dpdk_pre_post_flight_check()\n self._ovs_br_ex_port_is_system_interface()\n\n self.enable_hugepages_vfio_on_hvs_in_vms(4)\n with self.config_change(\n {\n 'enable-dpdk': False,\n 'dpdk-driver': '',\n },\n {\n 'enable-dpdk': True,\n 'dpdk-driver': 'vfio-pci',\n },\n application_name='ovn-chassis'):\n logging.info('Checking openvswitch-switch-dpdk is installed')\n self._openvswitch_switch_dpdk_installed()\n logging.info('Checking DPDK is configured in OVS')\n self._ovs_dpdk_init_configured()\n logging.info('Checking DPDK is successfully initialized in OVS')\n self._ovs_dpdk_initialized()\n logging.info('Checking that br-ex configed with DPDK interface...')\n self._ovs_br_ex_port_is_dpdk_interface()\n logging.info('and is not in error.')\n self._ovs_br_ex_interface_not_in_error()\n\n logging.info('Post-flight check')\n self._dpdk_pre_post_flight_check()\n\n self.disable_hugepages_vfio_on_hvs_in_vms()\n self._ovs_br_ex_port_is_system_interface()",
"def test_patch_pci_switch(self):\n pass",
"def pre_global_vrouter_config_update(self, resource_id, resource_dict):\n pass"
]
| [
"0.769706",
"0.588012",
"0.5724654",
"0.56823856",
"0.56489444",
"0.53998864",
"0.5360985",
"0.5327254",
"0.5325075",
"0.53030884",
"0.5281263",
"0.52710766",
"0.5242105",
"0.52126044",
"0.5210789",
"0.5200721",
"0.52006316",
"0.516452",
"0.5139679",
"0.5134392",
"0.5118748",
"0.50773287",
"0.5061656",
"0.5055505",
"0.50528294",
"0.50374717",
"0.5033121",
"0.49969575",
"0.4986531",
"0.49821055"
]
| 0.7182636 | 1 |
Enables portchannel interface as pagp dualactivedetection interface on target device | def configure_stackwise_virtual_dual_active_pagp(device, port_channel):
# build a list of commands to send
command_list = ['stackwise-virtual']
command_list.append(f'dual-active detection pagp')
if port_channel:
command_list.append(f'dual-active detection pagp trust channel-group {port_channel}')
try:
output = device.configure(command_list)
except SubCommandFailure:
raise SubCommandFailure('Failed to configure a port-channel interface as stackwise-virtual dual-active-detection pagp interfaces')
return output | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def unconfigure_stackwise_virtual_dual_active_pagp(device, port_channel):\n # build a list of commands to send\n command_list = ['stackwise-virtual']\n if port_channel:\n command_list.append(f'no dual-active detection pagp trust channel-group {port_channel}')\n try:\n output = device.configure(command_list) \n except SubCommandFailure:\n raise SubCommandFailure('Failed to unconfigure a port-channel interface as stackwise-virtual dual-active-detection pagp interfaces')\n return output",
"def setup_logical_port_connectivity(self, context, port_db):\n pass",
"def enable_cable_ports(cid):\n\n SQL.execute('''\n SELECT \n guid,\n port\n FROM \n cable_ports \n WHERE\n cid = ?\n ''',(\n cid,\n ))\n\n for row in SQL.fetchall(): \n if not DISABLE_PORT_STATE_CHANGE: \n ib_mgt.enable_port(int(row['guid']), int(row['port']))",
"def port_nic():",
"def enable_cable_ports(cid):\n\n SQL.execute('''\n SELECT \n cpid,\n guid,\n port,\n hca\n FROM \n cable_ports \n WHERE\n cid = ?\n ''',(\n cid,\n ))\n\n for row in SQL.fetchall(): \n if row['hca']:\n vlog(3, 'skip enabling hca for p%s' % ( row['cpid'] ))\n elif not DISABLE_PORT_STATE_CHANGE:\n ib_mgt.enable_port(int(row['guid']), int(row['port']))",
"def kk_assign_ports():\n print('******* Assigning ports')\n for device in ci_addrs.switches_kk_all:\n print ('******* Connecting to ', device.get('ip'))\n net_connect = ConnectHandler(**device)\n output_vlan_cp = net_connect.send_config_set(cp_kk_config_commands)\n print (output_vlan_cp)\n output_vlan_cloud = net_connect.send_config_set(cloud_kk_config_commands)\n print (output_vlan_cloud)\n output_vlan_f5 = net_connect.send_config_set(f5_kk_config_commands)\n print (output_vlan_f5)\n net_connect.send_config_set('wr')",
"def set_PIenable(self,highlow): \n GPIO.output(self.chanlist[4], highlow)",
"def test_p2p_device_concurrent_scan(dev, apdev):\n with HWSimRadio(use_p2p_device=True) as (radio, iface):\n wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')\n wpas.interface_add(iface)\n wpas.p2p_find()\n time.sleep(0.1)\n wpas.request(\"SCAN\")\n ev = wpas.wait_event([\"CTRL-EVENT-SCAN-STARTED\"], timeout=15)\n if ev is None:\n raise Exception(\"Station mode scan did not start\")",
"def enable_port_trunk(self, mgr, interface):\n confstr = snipp.CMD_PORT_TRUNK % (interface)\n confstr = self.create_xml_snippet(confstr)\n LOG.debug(\"NexusDriver: %s\" % confstr)\n mgr.edit_config(target='running', config=confstr)",
"def _start_device(self):\r\n enabled = [1,1,1,0]\r\n self._data = [np.empty(self._samples,dtype=np.int16) for i in range(3)]\r\n self._data_buffer = [x.ctypes for x in self._data]\r\n self._timebase = self.get_timebase(self._sampling_time)\r\n self.v_rangeAPI = [7,7,7,0] # 5V range\r\n self.v_range = [CHANNEL_RANGE[i][\"rangeV\"] for i in self.v_rangeAPI]\r\n with self._driver_lock:\r\n for i,v,en in zip(range(4),self.v_rangeAPI,enabled): # three active channels\r\n m = self._lib.ps2000aSetChannel(self._handle,\r\n c_int32(i), # channel\r\n c_int16(en), # enabled\r\n c_int32(1), # DC coupling\r\n c_int32(v), # voltage range (API value)\r\n c_float(0)) # 0V offset\r\n check_result(m)\r\n\r\n if en:\r\n m = self._lib.ps2000aSetDataBuffer(self._handle,\r\n c_int32(i), # channel\r\n self._data_buffer[i],\r\n c_int32(self._samples),\r\n c_uint32(0), # segment index\r\n c_int32(0)) # ratio mode\r\n check_result(m)\r\n\r\n threshold_v = 3\r\n threshold_adc = int(threshold_v * MAX_EXT / self.v_range[2])\r\n m = self._lib.ps2000aSetSimpleTrigger(self._handle,\r\n c_int16(1), # enabled\r\n c_int32(2), # Trigger off Channel C\r\n c_int16(threshold_adc),\r\n c_int32(2), # direction = rising\r\n c_uint32(0), # no delay\r\n c_int16(2000)) # autotrigger after 2 seconds if no trigger occurs\r\n check_result(m)\r\n\r\n # Send AWG Info to Picoscope\r\n delta_phase = c_uint32()\r\n output_freq = 1/self._sampling_duration\r\n # output_freq = 1E6\r\n m = self._lib.ps2000aSigGenFrequencyToPhase(self._handle,\r\n c_double(output_freq),\r\n c_int32(0),\r\n c_uint32(len(self._waveform)),\r\n byref(delta_phase))\r\n check_result(m)\r\n delta_phase = int(delta_phase.value)\r\n offset_voltage = 1\r\n pk2pk = 2\r\n # output_freq = 1E6\r\n # wave_type = {'sine':0,'square':1,'triangle':2,'DC':3,\r\n # 'rising sawtooth':4,'falling sawtooth':5,'sin(x)/x':6,\r\n # 'Gaussian':7,'half-sine':8}\r\n waveformPtr = self._waveform.ctypes\r\n trigger_type = 2 # siggen gate high\r\n trigger_source = 4 # software trigger\r\n m = self._lib.ps2000aSetSigGenArbitrary(self._handle,\r\n c_int32(int(offset_voltage*1E6)), \r\n c_uint32(int(pk2pk*1E6)),\r\n c_uint32(delta_phase), # start delta phase\r\n c_uint32(delta_phase), # stop delta phase\r\n c_uint32(0), # delta phase increment\r\n c_uint32(0), # dwell count\r\n waveformPtr, # arbitrary waveform\r\n c_int32(self._samples), # arbitrary waveform size\r\n c_int32(0), # sweep type for delta phase\r\n c_int32(0), # extra operations\r\n c_int32(0), # index mode\r\n c_uint32(1), # shots\r\n c_uint32(0), # sweeps\r\n c_int32(trigger_type),\r\n c_int32(trigger_source),\r\n c_int16(0)) # extIn threshold\r\n check_result(m)\r\n # m = self._lib.ps2000aSetSigGenBuiltIn(self._handle,\r\n # c_int32(int(offset_voltage*1E6)), # offset voltage\r\n # c_uint32(int(pk2pk*1E6)),# peak to peak voltage\r\n # c_int32(wave_type['square']), # wave type\r\n # c_float(output_freq), # start frequency\r\n # c_float(output_freq), # stop frequency\r\n # c_float(0), # increment\r\n # c_float(0), # dwell count\r\n # c_int32(0), # sweep type\r\n # c_int32(0), # operation\r\n # c_uint32(4), # shots\r\n # c_uint32(0), # sweeps\r\n # c_int32(trigger_type), \r\n # c_int32(trigger_source),\r\n # c_int16(0)) # extIn threshold\r\n # check_result(m)\r\n\r\n # for i in enabled:\r\n # if i:\r\n # m = self._lib.ps2000aSetDataBuffer(self._handle,\r\n # c_int32(i), # channel\r\n # self._data_buffer[i],\r\n # c_int32(self._samples),\r\n # c_uint32(0), # segment index\r\n # c_int32(0)) # ratio mode\r\n # check_result(m)\r\n\r\n self._save_thread = Thread(target=self.save,args=(self._save_queue,))\r\n self._save_thread.daemon = True\r\n self._save_thread.start()\r\n\r\n self._process_thread = Thread(target=self.process,args=(self._process_queue,self._save_queue))\r\n self._process_thread.daemon = True\r\n self._process_thread.start()\r\n\r\n self._collect_thread = Thread(target=self.run_loop,args=(self._process_queue,))\r\n self._collect_thread.daemon = True\r\n self._collect_thread.start()\r\n\r\n return True",
"def pibooth_configure(cfg):",
"def connectAdapter(self):\n self.canusb = pycanusb.CanUSB(bitrate='500')\n print('CanUSB: ',self.canusb)\n Msg = Switch_to_Operational_State_Msg()\n QTimer.singleShot(50,lambda msg = Msg : self.initialization(Msg))",
"def connect_port(self, iface):\n self.iface_config(iface, adminMode='Up')",
"def setLoopback(self, enable): \n if enable == True:\n DPxEnableDoutDinLoopback()\n else:\n DPxDisableDoutDinLoopback()",
"def activate(self, ext_ip, ext_port):\n self.sql_manager.port_update(self.id, external_ip=ext_ip, external_port=ext_port)\n self.external_port = ext_port\n self.external_ip = ext_ip",
"def process_port_learn(self, dp_name, port, mac, vlan):\n with self._lock:\n device = self._port_device_mapping.setdefault((dp_name, port), DeviceEntry())\n device.mac = mac\n device.vlan = vlan\n device.port_up = True\n device.assigned = self._mac_assignments.get(mac)\n self._send_device_port_event(device)",
"def rr2_assign_ports():\n print('******* Assigning ports')\n for device in ci_addrs.switches_rr1_12:\n print ('******* Connecting to ', device.get('ip'))\n net_connect = ConnectHandler(**device)\n output_vlan_cp = net_connect.send_config_set(cp_rr2_config_commands)\n print (output_vlan_cp)\n output_vlan_cloud = net_connect.send_config_set(cloud_rr1_config_commands)\n print (output_vlan_cloud)\n output_vlan_f5 = net_connect.send_config_set(f5_config_commands)\n print (output_vlan_f5)\n net_connect.send_config_set('wr')\n for device in ci_addrs.switches_rr2_12:\n print ('******* Connecting to ', device.get('ip'))\n net_connect = ConnectHandler(**device)\n output_vlan_cp = net_connect.send_config_set(cp_rr2_config_commands)\n print (output_vlan_cp)\n output_vlan_cloud = net_connect.send_config_set(cloud_rr2_config_commands)\n print (output_vlan_cloud)\n output_vlan_f5 = net_connect.send_config_set(f5_config_commands)\n print (output_vlan_f5)\n net_connect.send_config_set('wr')\n assign_ports_n5k34()",
"def enable(self, coil):\n self.log.info(\"RASPDriver.Enable(%s %s)\" % (coil.config['label'], coil.hw_driver.number))\n self.platform.communicator.driver_enable(coil.hw_driver.number)\n pass",
"def configure_dcbx_pfc(self, ports, **kwargs):\n pass",
"def enable(self):\n # Netmiko reports enable and config mode as being enabled\n if not self.native.check_enable_mode():\n self.native.enable()\n # Ensure device is not in config mode\n if self.native.check_config_mode():\n self.native.exit_config_mode()\n\n log.debug(\"Host %s: Device enabled.\", self.host)",
"def _support_vlan_router_interfaces(self):\n pass",
"def disable_lldp_on_device_ports(self, ports=None):\n pass",
"def enable_radio(self):\n self.acquire_response(b'AT*R1')",
"def _is_vlan_router_interface_supported(self):",
"def _assign_port_to_device(self):\n for i in range(0, len(self.stlink_devices)):\n self.stlink_devices[i]['usb_port'] = self.get_port_from_serial(self.stlink_devices[i]['serial'])",
"def power_on(self, port, data_sync=True):\n port = int(port)\n self._validate_port(\"power_on\", port)\n if data_sync:\n self.set_mode(SYNC, port)\n else:\n self.set_mode(CHARGE, port)",
"def dummy_set_comm_port(port):\n pass",
"def set_externals_state(self, dp_name, externals_up):\n dp_conf = self._get_faucet_conf()['dps'][dp_name]\n for port_num, port_conf in dp_conf['interfaces'].items():\n if port_conf.get('loop_protect_external'):\n if externals_up:\n self.set_port_up(port_num, dp_conf.get('dp_id'))\n else:\n self.set_port_down(port_num, dp_conf.get('dp_id'))",
"def enable_reporting(self):\n self.reporting = True\n msg = chr(REPORT_DIGITAL + self.port_number)\n msg += chr(1)\n self.board.sp.write(msg)\n for pin in self.pins:\n if pin.mode == INPUT:\n pin.reporting = True # TODO Shouldn't this happen at the pin?",
"def enable(self, port):\n assert port in self.ports, 'bad port name'\n port = ord(port[4:]) - ord('A')\n (_, reg, ofs) = gpio_info[self.device.soc_name]\n hw = self.device.RCC.registers[reg]\n port += ofs\n val = hw.rd()\n val &= ~(1 << port)\n val |= 1 << port\n hw.wr(val)"
]
| [
"0.65177065",
"0.61992884",
"0.60917014",
"0.6039055",
"0.5977526",
"0.5824682",
"0.56919676",
"0.56499",
"0.5644209",
"0.5620075",
"0.5562014",
"0.5530952",
"0.5519772",
"0.5477032",
"0.54757065",
"0.54713",
"0.54712033",
"0.54548246",
"0.54346675",
"0.5419358",
"0.5413673",
"0.53912306",
"0.5388182",
"0.5381071",
"0.53809357",
"0.5366536",
"0.5365036",
"0.5364276",
"0.5357456",
"0.53448737"
]
| 0.72997487 | 0 |
Disables portchannel interface as pagp dualactivedetection interface on target device | def unconfigure_stackwise_virtual_dual_active_pagp(device, port_channel):
# build a list of commands to send
command_list = ['stackwise-virtual']
if port_channel:
command_list.append(f'no dual-active detection pagp trust channel-group {port_channel}')
try:
output = device.configure(command_list)
except SubCommandFailure:
raise SubCommandFailure('Failed to unconfigure a port-channel interface as stackwise-virtual dual-active-detection pagp interfaces')
return output | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def disable_cable_ports(cid):\n\n SQL.execute('''\n SELECT \n cpid,\n guid,\n port,\n hca\n FROM \n cable_ports \n WHERE\n cid = ?\n ''',(\n cid,\n ))\n\n for row in SQL.fetchall(): \n if row['hca']:\n vlog(1, 'ignoring request to disable HCA p%s.' % (row['cpid']))\n continue\n\n if not DISABLE_PORT_STATE_CHANGE: \n ib_mgt.disable_port(int(row['guid']), int(row['port']))\n\n SQL.execute('''\n UPDATE\n cables \n SET\n online = 0\n WHERE\n cid = ?\n ;''', (\n cid,\n ));",
"def disable_switch_port(self, mgr, interface):\n confstr = snipp.CMD_NO_SWITCHPORT % (interface)\n confstr = self.create_xml_snippet(confstr)\n LOG.debug(\"NexusDriver: %s\" % confstr)\n mgr.edit_config(target='running', config=confstr)",
"def disable_lldp_on_device_ports(self, ports=None):\n pass",
"def port_disable(self, port_num: int) -> None:\n raise NotImplementedError",
"def port_nic_remove(switch, port):\n client.port.detach_nic(switch, port)",
"def disconnect_port(self, iface):\n self.iface_config(iface, adminMode='Down')",
"def port_nic():",
"def firewallOff():\n pass",
"async def async_turn_off(self):\n path = \"/ip/firewall/nat\"\n param = \".id\"\n value = None\n for uid in self._ctrl.data[\"nat\"]:\n if (\n self._ctrl.data[\"nat\"][uid][\"name\"]\n == f\"{self._data['protocol']}:{self._data['dst-port']}\"\n ):\n value = self._ctrl.data[\"nat\"][uid][\".id\"]\n\n mod_param = \"disabled\"\n mod_value = True\n self._ctrl.set_value(path, param, value, mod_param, mod_value)\n await self._ctrl.async_update()",
"def disable_radio(self):\n self.acquire_response(b'AT*R0')",
"def disable_discovery(self):",
"def turn_off(self):\n self._state = False\n if(self._device['type'] == '_DT-PLUG' or self._device['type'] == '_THIMR'):\n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"op\":0 }', 5)\n if(self._device['type'] == '_REALY2' or self._device['type'] == '_REALY4'): \n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"'+ self._data_key +'\":0 }', 5)",
"def port_disable(self, port_num: int) -> None:\n return self._current_dev_manager.port_disable(port_num=port_num)",
"async def async_turn_off(self):\n path = \"/interface\"\n param = \"default-name\"\n if \"-\" in self._data[\"port-mac-address\"]:\n param = \"name\"\n value = self._data[param]\n mod_param = \"disabled\"\n mod_value = True\n self._ctrl.set_value(path, param, value, mod_param, mod_value)\n\n if self._data[\"poe-out\"] == \"auto-on\":\n path = \"/interface/ethernet\"\n self._ctrl.set_value(path, param, value, \"poe-out\", \"off\")\n\n await self._ctrl.async_update()",
"def test_multi_ap_disabled_on_ap(dev, apdev):\n run_multi_ap_association(dev, apdev, 0, wait_connect=False)\n ev = dev[0].wait_event([\"CTRL-EVENT-DISCONNECTED\",\n \"CTRL-EVENT-CONNECTED\"],\n timeout=5)\n dev[0].request(\"DISCONNECT\")\n if ev is None:\n raise Exception(\"Connection result not reported\")\n if \"CTRL-EVENT-DISCONNECTED\" not in ev:\n raise Exception(\"Unexpected connection result\")",
"def teardown_logical_port_connectivity(self, context, port_db):\n pass",
"def configure_stackwise_virtual_dual_active_pagp(device, port_channel):\n # build a list of commands to send\n command_list = ['stackwise-virtual']\n command_list.append(f'dual-active detection pagp')\n if port_channel:\n command_list.append(f'dual-active detection pagp trust channel-group {port_channel}')\n try:\n output = device.configure(command_list) \n except SubCommandFailure:\n raise SubCommandFailure('Failed to configure a port-channel interface as stackwise-virtual dual-active-detection pagp interfaces')\n return output",
"def unconfigure_stackwise_virtual_dual_active_interfaces(device, dad_links):\n # build a list of commands to send\n command_list = []\n output = ''\n for interface in dad_links:\n command_list.append(f'interface {interface}')\n command_list.append(f'no stackwise-virtual dual-active-detection')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to unconfigure stackwise-virtual dual-active-detection interfaces')\n return output",
"def swo_disable(self, port_mask):\n res = self._dll.JLINKARM_SWO_DisableTarget(port_mask)\n if res != 0:\n raise errors.JLinkException(res)\n return None",
"def disable_irq() -> int:",
"def disable_reporting(self):\n self.reporting = False\n msg = chr(REPORT_DIGITAL + self.port_number)\n msg += chr(0)\n self.board.sp.write(msg)",
"def set_all_ports_admin_disabled(self):\n pass",
"def usb_connectivity_disabled(self, usb_connectivity_disabled):\n\n self._usb_connectivity_disabled = usb_connectivity_disabled",
"def pswitchoff(chan) :\n s.phaseSwitching(False, chan)",
"def _ovs_br_ex_port_is_dpdk_interface(self):\n cmd = (\n 'dpdk-devbind.py --status-dev net '\n '| grep ^$(ovs-vsctl --bare --columns options '\n 'find interface external_ids:charm-ovn-chassis=br-ex '\n '|cut -f2 -d=)'\n '|grep \"drv=vfio-pci unused=$\"')\n for unit in zaza.model.get_units(self.application_name):\n zaza.utilities.juju.remote_run(\n unit.name, cmd, model_name=self.model_name, fatal=True)",
"def clean_rebind_test(**kwargs):\n if 'verify_traffic' not in kwargs:\n kwargs['verify_traffic'] = False\n prepare_subscriber_traffic(**kwargs)\n device_id = kwargs.get('device_id', bbe.get_devices(device_tags='dut', id_only=True)[0])\n switch_id = kwargs.get('switch_id', 'r1')\n switch_handle = t.get_handle(switch_id)\n switch_access_intf = bbe.get_interfaces(switch_id, interfaces='access')\n status = True\n for iteration in range(1, int(kwargs.get('iteration', 1)) + 1):\n t.log(\"disable access ports in switch in iteration #{}\".format(iteration))\n port_command_list = []\n status = True\n for access in switch_access_intf:\n port_command_list.append(\"set interfaces {} disable\".format(access.interface_pic))\n switch_handle.config(command_list=port_command_list)\n switch_handle.commit()\n t.log(\"verify access ports in down state\")\n for access in switch_access_intf:\n resp = switch_handle.pyez('get_interface_information', level_extra='terse',\n interface_name=access.interface_pic).resp\n if resp.findtext('physical-interface/admin-status') == 'down' and resp.findtext(\n 'physical-interface/oper-status') == 'down':\n t.log(\"interface {} is in down state\".format(access.interface_pic))\n else:\n t.log('WARN', \"interface {} is in state {}\".format(access.interface_pic, resp))\n status = False\n\n if not status:\n for access in switch_access_intf:\n port_command_list.append(\"delete interfaces {} disable\".format(access.interface_pic))\n switch_handle.config(command_list=port_command_list)\n switch_handle.commit()\n raise Exception(\"some interfaces failed to be in down state after disable\")\n base_time = time.time()\n while time.time() - base_time < 1800:\n router_count = get_router_sub_summary(device_id)['client']\n tester_count = get_rt_subs_info()['rt_sessions_up']\n if router_count == 0 and tester_count == 0:\n duration = time.time() - base_time\n t.log(\"all subscribers cleared from tester and router after {}s in iteration #{}\".format(duration,\n iteration))\n break\n t.log(\"sleep 30s , waiting for clients cleared\")\n time.sleep(30)\n\n result = get_router_sub_summary(device_id)\n\n if result['client'] != 0 or 'terminated' in result or 'terminating' in result or 'init' in result:\n status = False\n t.log('WARN', 'some subscribers stuck in unexpected state in iteration #{}'.format(iteration))\n\n for access in switch_access_intf:\n port_command_list.append(\"delete interfaces {} disable\".format(access.interface_pic))\n switch_handle.config(command_list=port_command_list)\n switch_handle.commit()\n time.sleep(10)\n t.log(\"verify access ports in up state in iteration {}\".format(iteration))\n for access in switch_access_intf:\n resp = switch_handle.pyez('get_interface_information', level_extra='terse',\n interface_name=access.interface_pic).resp\n if resp.findtext('physical-interface/admin-status') == 'up' and resp.findtext(\n 'physical-interface/oper-status') == 'up':\n t.log(\"interface {} is in up state\".format(access.interface_pic))\n else:\n t.log('WARN', \"interface {} is in state {}\".format(access.interface_pic, resp))\n status = False\n\n if not status:\n raise Exception(\"clean test failed\")\n ##set the rt subscriber state to stopped, since it is not teared down by actions\n t.log(\"login subscriber and verify traffic after restore the connection in iteration #{}\".format(iteration))\n prepare_subscriber_traffic(**kwargs)",
"def usb_connectivity_disabled(self):\n return self._usb_connectivity_disabled",
"def off(self, include_ethernet=False):\n if not self.healthy:\n self.health_check()\n if self._pre_off_func:\n self._pre_off_func()\n switchboard = self._get_switchboard_if_initialized()\n if self._power_and_data_share_cable:\n if switchboard:\n switchboard.add_log_note(\n f\"comm_power.off() called on {self._device_name} set communication \"\n f\"port {self.port_number} to charge as device has a single USB \"\n \"cable for data and power.\")\n switchboard.close_all_transports()\n self._hub.switch_power.power_on(self.port_number, data_sync=False)\n if self.secondary_port_number is not None:\n self._hub.switch_power.power_on(\n self.secondary_port_number, data_sync=False)\n else:\n if switchboard:\n switchboard.close_all_transports()\n self._hub.switch_power.power_off(self.port_number)\n if self.secondary_port_number is not None:\n self._hub.switch_power.power_off(self.secondary_port_number)\n if include_ethernet:\n self.ethernet_off()",
"def killCAN(mIface, bbid):\n for i in xrange(100):\n mIface.can_pass(bbid, ModuleIface.CAN_NONE)",
"def enable_cable_ports(cid):\n\n SQL.execute('''\n SELECT \n guid,\n port\n FROM \n cable_ports \n WHERE\n cid = ?\n ''',(\n cid,\n ))\n\n for row in SQL.fetchall(): \n if not DISABLE_PORT_STATE_CHANGE: \n ib_mgt.enable_port(int(row['guid']), int(row['port']))"
]
| [
"0.6764529",
"0.6580187",
"0.654981",
"0.6532149",
"0.6376193",
"0.60966444",
"0.6051892",
"0.60355073",
"0.60077417",
"0.59775054",
"0.59527487",
"0.59240466",
"0.59167486",
"0.59125966",
"0.5908374",
"0.5862209",
"0.58325285",
"0.5819661",
"0.58103955",
"0.5798171",
"0.57847714",
"0.5742924",
"0.57368094",
"0.56810457",
"0.5661461",
"0.56367373",
"0.5636679",
"0.56345415",
"0.56323004",
"0.56021917"
]
| 0.72111315 | 0 |
Create the base project folder named `.wcscanner` This folder will contain projects folders | def create_base_projects_folder():
if '.wcscanner' not in os.listdir(context.__BASE_PATH__):
os.mkdir(context.__PROJECTS_PATH__, mode=0o777)
log.info("Base folder '.wcscanner' created in %s", context.__BASE_PATH__)
else:
log.info("Base folder '.wcscanner' already in %s", context.__BASE_PATH__) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create_project_folder(self):\n\t\tif not os.path.exists(self.segment_path):\n\t\t\tfileutil.makedirs(self.segment_path)",
"def create_project_dir():\r\n with settings(warn_only=True):\r\n run('mkdir -p %s/packages' % (env.path,))\r\n run('mkdir %s/log' % (env.path,))\r\n run('mkdir -p %s/media/uploads' % (env.path,))\r\n run('mkdir -p %s/collected_static' % (env.path,))\r\n # change permissions for writable folder\r\n cmd = env.host_settings.get('make_folder_world_writeable','chown -R www-data:www-data')\r\n if cmd:\r\n run('%s %s/media' % (cmd, env.path))\r\n run('%s %s/collected_static' % (cmd, env.path))",
"def create_files(project_name, root_dir):\r\n root_dir = projectfolders.create_path(root_dir, project_name) #Modify the root\r\n \r\n write_setup(project_name, root_dir)\r\n write_inits(project_name, root_dir)\r\n write_tests(project_name, root_dir)",
"def make_source_dir():\n\n os.makedirs(files['source_dir'].rel)",
"def makeProjects(self, *versions):\n baseDirectory = FilePath(self.mktemp())\n baseDirectory.createDirectory()\n for version in versions:\n self.makeProject(version, baseDirectory)\n return baseDirectory",
"def makeProject(self, version, baseDirectory=None):\n if baseDirectory is None:\n baseDirectory = FilePath(self.mktemp())\n segments = version[0].split(\".\")\n directory = baseDirectory\n for segment in segments:\n directory = directory.child(segment)\n if not directory.exists():\n directory.createDirectory()\n directory.child(\"__init__.py\").setContent(b\"\")\n directory.child(\"newsfragments\").createDirectory()\n directory.child(\"_version.py\").setContent(genVersion(*version).encode())\n return Project(directory)",
"def makeProject(self, version, baseDirectory=None):\n if baseDirectory is None:\n baseDirectory = FilePath(self.mktemp())\n baseDirectory.createDirectory()\n segments = version.package.split('.')\n directory = baseDirectory\n for segment in segments:\n directory = directory.child(segment)\n if not directory.exists():\n directory.createDirectory()\n directory.child('__init__.py').setContent('')\n directory.child('topfiles').createDirectory()\n directory.child('topfiles').child('README').setContent(version.base())\n replaceProjectVersion(\n directory.child('_version.py').path, version)\n return Project(directory)",
"def mkweb(project_name, mode):\n\n MAIN_FOLDER = data.get_base_path(data.WEB)\n\n if mode != 'MAIN':\n MAIN_FOLDER += f'{mode}/'\n \n webproject = folders.WebProject(project_name, MAIN_FOLDER)\n\n webproject.create_project()\n click.echo(f'Project created succesfull in {webproject.project_path}')\n cli_commands.start_git(webproject.project_path)\n cli_commands.show_dir_path(webproject.project_path)\n # cli_commands.start_vscode(webproject.project_path)\n\n click.echo('Project Path copied to clipboard...')",
"def createBaseFolder(self):\n if not os.path.isdir(self.gdocs_folder):\n os.mkdir(self.gdocs_folder, 0755)",
"def init(cx):\n\n\n # create the folder structure\n for d in PROJECT_DIRS:\n cx.run(\"mkdir -p {}\".format(d))\n cx.run(\"touch {}/.keep\".format(d))",
"def create_new_project():\n readline.parse_and_bind('tab: complete')\n\n print \\\n\"\"\"\n xbmcswift2 - A micro-framework for creating XBMC plugins.\n [email protected]\n --\n\"\"\"\n print 'I\\'m going to ask you a few questions to get this project' \\\n ' started.'\n\n # noinspection PyDictCreation\n opts = {}\n\n # Plugin Name\n opts['plugin_name'] = get_valid_value(\n 'What is your plugin name?',\n validate_nonblank\n )\n\n # Plugin ID\n opts['plugin_id'] = get_valid_value(\n 'Enter your plugin id.',\n validate_pluginid,\n 'plugin.video.%s' % (opts['plugin_name'].lower().replace(' ', ''))\n )\n\n # Parent Directory\n opts['parent_dir'] = get_valid_value(\n 'Enter parent folder (where to create project)',\n validate_isfolder,\n getcwd()\n )\n opts['plugin_dir'] = os.path.join(opts['parent_dir'], opts['plugin_id'])\n assert not os.path.isdir(opts['plugin_dir']), \\\n 'A folder named %s already exists in %s.' % (opts['plugin_id'],\n opts['parent_dir'])\n\n # Provider\n opts['provider_name'] = get_valid_value(\n 'Enter provider name',\n validate_nonblank,\n )\n\n # Create the project folder by copying over skel\n copytree(SKEL, opts['plugin_dir'], ignore=ignore_patterns('*.pyc'))\n\n # Walk through all the new files and fill in with out options\n for root, dirs, files in os.walk(opts['plugin_dir']):\n for filename in files:\n update_file(os.path.join(root, filename), opts)\n\n print 'Projects successfully created in %s.' % opts['plugin_dir']\n print 'Done.'",
"def startproject(self):\n\n path = os.path.join(self.path, self.project_name)\n if os.path.exists(path):\n raise exceptions.ProjectDirectoryAlreadyExistsError(self.project_name)\n else:\n os.makedirs(path)\n\n context = {\n 'project_name': self.project_name,\n 'default_region': self.region,\n 'random': hashlib.sha1(six.text_type(random.random()).encode('utf-8')).hexdigest()[:8]\n }\n\n self._clone_defaults(\n os.path.join(self.root, 'defaults', 'project'),\n path,\n context\n )",
"def create_folder_structure(self):\n # create the parent folder holding the project\n self.proj_folder.mkdir(exist_ok=False)\n # once we have setup the parent folder we can create the subfolder\n # structure\n create_subfolder = [self.aiida_subfolder, self.env_subfolder]\n if self.has_source():\n create_subfolder += [self.src_subfolder]\n for subfolder in create_subfolder:\n project_subfolder = self.proj_folder / subfolder\n project_subfolder.mkdir(exist_ok=False)",
"def setUp_base(self):\n self._create_main_project_and_root()",
"def makeProjects(self, *versions):\n baseDirectory = FilePath(self.mktemp())\n for version in versions:\n self.makeProject(version, baseDirectory)\n return baseDirectory",
"def initilize(self):\n if not self.project_path.exists():\n self.project_path.mkdir()",
"def createproject(destinationdir):\n print(f\"Writing json data files to {destinationdir}\")\n return",
"def create_project_dir(path):\n if not os.path.exists(path):\n print('Creating directory: ' + path)\n os.makedirs(path)",
"def create_project_structure(self, version):\n import os\n\n project_path = version.absolute_path\n for path in self.project_structure:\n # TODO: use exist_ok=True for Python 3.x\n try:\n os.makedirs(os.path.normpath(os.path.join(project_path, path)))\n except OSError:\n # exists_ok\n pass",
"def new(root: str = \".\", name: str = \"piccolo_project\"):\n tree = os.walk(TEMPLATE_DIR)\n\n router = get_routing_framework()\n\n template_context = {\n \"router\": router,\n \"router_dependencies\": ROUTER_DEPENDENCIES.get(router) or [router],\n \"server\": get_server(),\n \"project_identifier\": name.replace(\" \", \"_\").lower(),\n }\n\n for directory in tree:\n dir_path, sub_dir_names, file_names = directory # type: ignore\n\n output_dir_path = os.path.join(root, dir_path.split(TEMPLATE_DIR)[-1])\n\n if not os.path.exists(output_dir_path):\n folder_name = output_dir_path.split(\"/\")[-1]\n if folder_name.startswith((\"_\", \".\")):\n continue\n os.mkdir(dir_path)\n\n for sub_dir_name in sub_dir_names:\n if sub_dir_name.startswith(\"_\"):\n continue\n\n sub_dir_path = os.path.join(output_dir_path, sub_dir_name)\n if not os.path.exists(sub_dir_path):\n os.mkdir(sub_dir_path)\n\n for file_name in file_names:\n if file_name.startswith(\"_\") and file_name != \"__init__.py.jinja\":\n continue\n\n extension = file_name.rsplit(\".\")[0]\n if extension in (\"pyc\",):\n continue\n\n if file_name.endswith(\".jinja\"):\n output_file_name = file_name.replace(\".jinja\", \"\")\n template = Environment(\n loader=FileSystemLoader(searchpath=dir_path)\n ).get_template(file_name)\n\n output_contents = template.render(**template_context)\n\n if output_file_name.endswith(\".py\"):\n try:\n output_contents = black.format_str(\n output_contents,\n mode=black.FileMode(line_length=80),\n )\n except Exception as exception:\n print(f\"Problem processing {output_file_name}\")\n raise exception from exception\n\n with open(\n os.path.join(output_dir_path, output_file_name), \"w\"\n ) as f:\n f.write(output_contents)\n else:\n if file_name.endswith(\".jinja_raw\"):\n output_file_name = file_name.replace(\n \".jinja_raw\", \".jinja\"\n )\n else:\n output_file_name = file_name\n\n shutil.copy(\n os.path.join(dir_path, file_name),\n os.path.join(output_dir_path, output_file_name),\n )\n\n print(\n \"Run `pip install -r requirements.txt` and `python main.py` to get \"\n \"started.\"\n )",
"def assemble_resource_directories(project, base_dir):\n resource_path = os.path.join(base_dir, project.resources_path)\n os.makedirs(os.path.join(resource_path, 'images'))\n os.makedirs(os.path.join(resource_path, 'fonts'))\n os.makedirs(os.path.join(resource_path, 'data'))",
"def setUp(self):\n self.workspace_dir = tempfile.mkdtemp()\n if not os.path.exists(self.workspace_dir):\n os.makedirs(self.workspace_dir)",
"def build_project_dir(self, instance, private_data_dir):\n project_dir = os.path.join(private_data_dir, 'project')\n if not os.path.exists(project_dir):\n os.mkdir(project_dir)",
"def create_main_dir(self):\n\n set_of_dirs= [ self.root_laas_ws,\n self.root_laas_ws+'/src',\n self.root_laas_ws+'/install']\n \n # Creates set_of_dirs if they do not exist\n for a_dir in list_of_dirs:\n if not os.path(a_dir).is_dir():\n os.makedirs(a_dir,0o777,True)",
"def project_mkdirs():\n with cd(env.code_dir):\n run('mkdir -pv conf')\n run('mkdir -pv %s' % env.log_dir)\n\n dirs = 'uploads'\n run('mkdir -pv {}'.format(dirs))\n run('sudo chown -v {} {}'.format(env.webuser, dirs))",
"def create_folder(self):\n Path(self.root_name).mkdir(parents=True, exist_ok=True)\n Path(self.root_name + \"/VOC2021/\").mkdir(parents=True, exist_ok=True)\n Path(self.image_folder_path).mkdir(parents=True, exist_ok=True)\n Path(self.annot_path).mkdir(parents=True, exist_ok=True)\n Path(self.root_name + \"/VOC2021/ImageSets/\").mkdir(parents=True, exist_ok=True)\n Path(self.txt_path).mkdir(parents=True, exist_ok=True)",
"def create_app_folders(self):\n\t\tif not os.path.exists(self.TEMP_FOLDER):\n\t\t\tos.makedirs(self.TEMP_FOLDER)\n\t\tif not os.path.exists(self.SAVE_FOLDER):\n\t\t\tos.makedirs(self.SAVE_FOLDER)",
"def create_project(self,*pages,config_folder = \"config\",FunctionBased = False):\n\n self._make_initial_directories()\n self._make_initial_files(*pages,FunctionBased = FunctionBased)",
"def create(self, basedir, outdir, name, prefix=None):",
"def CreateProject(projectName='project'):\r\n projectName = input('''The project's name: ''')\r\n if not os.path.exists(projectName):\r\n os.mkdir(projectName)\r\n else:\r\n print('There is a file with the same name.')\r\n\r\n for dir in ['OPT', 'SCF', 'PHO']:\r\n if not os.path.exists(projectName + os.sep + dir):\r\n os.mkdir(projectName + os.sep + dir)"
]
| [
"0.6659889",
"0.6641856",
"0.64917487",
"0.6437589",
"0.6368039",
"0.6359022",
"0.63302505",
"0.6301878",
"0.62901366",
"0.6244791",
"0.62232673",
"0.6204468",
"0.6195332",
"0.6174261",
"0.61507595",
"0.61412406",
"0.6134815",
"0.6131861",
"0.6122213",
"0.61108935",
"0.6099372",
"0.60796547",
"0.60736555",
"0.6073142",
"0.6068983",
"0.6035838",
"0.6019343",
"0.6015985",
"0.60050756",
"0.6000003"
]
| 0.8622508 | 0 |
Function to remove a project entirely | def remove_single_project(project_name):
p = subprocess.Popen('rm -rf {}/{}'.format(context.__PROJECTS_PATH__, project_name), shell=True)
p.wait() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_remove_project(self):\n pass",
"def delete_project(arn=None):\n pass",
"def destroy(config, args):\n log = logging.getLogger('kraftwerk.destroy')\n if confirm(\"Remove project %s from node %s along with all services and data?\" % \n (args.project.name, args.node.hostname)):\n args.node.ssh(config.template(\"scripts/project_destroy.sh\", project=args.project))\n print \"Project %s removed from node %s\" % \\\n (args.project.name, args.node.hostname )\n for service in args.project.services(args.node):\n args.node.ssh(service.destroy_script)",
"def __remove_all_projects__():\n p = subprocess.Popen('rm -rf {}/.wcscanner/*'.format(context.__BASE_PATH__), shell=True)\n p.wait()",
"def do_project_delete(cs, args):\n key = args.project\n if cs.projects.is_id(key):\n id = key\n else:\n id = cs.projects.get_id_by_name(key)\n cs.projects.delete(id)\n print(\"Delete Project '%s' successfully.\" % key)",
"def _on_del_project(self):\n project = self.ddnCurProject.get()\n# if len(project) > 0:\n if project:\n if '.prj'!= project[-4:]:\n project += '.prj'\n if os.path.exists(self.BibTerm + '/'+ project):\n os.remove(self.BibTerm + '/'+ project)\n self.list_projects = [f.rstrip('.prj') \\\n for f in os.listdir(self.BibTerm) \\\n if f.endswith('.prj')]\n self.ddnCurProject['values'] = self.list_projects\n# if len(self.list_projects) > 0:\n if self.list_projects:\n self.ddnCurProject.set(self.list_projects[0])\n else:\n self.ddnCurProject.set('')\n pass",
"def delete_project(\n name\n):\n\n cmd = dict()\n cmd[\"type_\"] = \"delete_project\"\n cmd[\"name_\"] = name\n\n comm.send(cmd)",
"def cleanUp(self):\n import evoware.fileutil as F\n F.tryRemove(self.f_project, verbose=(self.VERBOSITY>1), tree=1)",
"def delete_project(project):\n with BMI(_username, _password, constants.BMI_ADMIN_PROJECT) as bmi:\n ret = bmi.delete_project(project)\n if ret[constants.STATUS_CODE_KEY] == 200:\n click.echo(\"Success\")\n else:\n click.echo(ret[constants.MESSAGE_KEY])",
"def delete_project(self, project_name):\n # type(project_name) == unicode\n project = self.db.get_project_by_name(project_name)\n if not project:\n print(u\"*** Error: The project '{}' was not found.\"\n \"\".format(project_name))\n return\n print('Caution! The related tracking will be deleted as well.{eol}'\n 'Do you really want to delete the project? [y/N] '\n ''.format(eol=os.linesep), end='')\n if not helpers.get_yes_no(default='n'):\n return\n self.db.delete_project_by_name(project_name)\n print(u\"The project '%s' has been deleted.\" % project_name)\n self.set_prompt()",
"def delete_project(proj_id):\n project_obj = Project.objects.get(id=proj_id)\n print('Deleting project the fastq files within the project: ', project_obj.description)\n\n description = project_obj.description.replace(' ', '') # remove any space in the project name\n project_dir = 'documents/%s/%s' % (str(project_obj.date.date()), description)\n shutil.rmtree(project_dir, ignore_errors=True)\n print(\"Files deleted.\")",
"def test_delete_project(self):\n pass",
"def test_delete_project(self):\n pass",
"def remove():\n run('pew rm {0}'.format(package_name()))",
"def delete(self):\n _url = f\"{self.connector.base_url}/projects/{self.project_id}\"\n\n self.connector.http_call(\"delete\", _url)\n\n self.project_id = None\n self.name = None",
"def DelProject(projname):\n\tif projname == \"\" or projname == None:\n\t\tpjnm = raw_input(\"\\nNombre del proyecto: \").lower()\n\t\tif pjnm == \"\" or pjnm == None:\n\t\t\tcancel()\n\telse:\n\t\t# Proceso para borrar todo el proyecto\n\t\tpass\n\n\tpa = open(\"author_name.txt\", \"r\")\t#Abre el archivo con el nombre del autor\n\tpa.read()\n\tpc = open(\"project_code.txt\", \"r\")\t#Abre el archivo con el codigo de proyecto\n\tpc.read()\n\n\tuserpa = raw_input(\"Ingrese el nombre del autor: \").lower()\n\tuserpc = raw_input(\"Ingrese el codigo del proyecto: \").lower()\n\n\tif userpa == pa and userpc == pc:\t#Se verifica que userpa(nombre del autor por el usuario) sea igual a pa(nombre original del autor) y lo mismo con el codigo del proyecto\n\t\tprint \"Iniciando el Borrado del Proyecto...\"\n\t\tpcommands.del_project()\n\t\tprint \"El proyecto se ha borrado con exito!\"\n\telse:\n\t\tprint \"El codigo del proyecto o el nombre del autor no es correcto.\"\n\t\tcancel()",
"def delete_project(projectname):\n response = jsonify(admin.delete_project(current_app.scoped_session(), projectname))\n return response",
"def cleanup(self):\r\n session = self.get_session()\r\n project = session.create(self._config.name)\r\n \r\n session.home = self._config['dir']\r\n \r\n result = self.__find_project(project)\r\n \r\n path = os.path.join(session.home, project.name)\r\n project.work_area(False, True, True, path=path)\r\n \r\n if (result != None):\r\n _logger.info(\"Project found: '%s'\" % result)\r\n role = session.role\r\n co_role = ccm.get_role_for_purpose(session, str(self._config['purpose']))\r\n session.role = co_role\r\n try:\r\n delResult = result.delete(scope='project_and_subproject_hierarchy')\r\n finally:\r\n session.role = role\r\n ccm.log_result(delResult, ccm.CHECKOUT_LOG_RULES, _logger)",
"def delete(self, request, p_name):\n project = Project.objects.get(name=p_name)\n connectors = project.connector_set.all()\n connectors.delete()\n if os.path.isfile(project.project_location):\n os.remove(project.project_location)\n project.delete()\n return HttpResponse(HTTPStatus.OK)",
"def unlink(self):\n analytic_accounts_to_delete = self.env['account.analytic.account']\n for project in self:\n if project.analytic_account_id and not project.analytic_account_id.line_ids:\n analytic_accounts_to_delete |= project.analytic_account_id\n result = super(Project, self).unlink()\n analytic_accounts_to_delete.unlink()\n return result",
"def remove():",
"def pre_project_delete(self, resource_id):\n pass",
"def delete_project_file(self, project=None):\n if type(project) is not Project:\n return False\n\n path = self.data_path + self.project_dir\n\n # generate filenames\n filename = path + '/' + self.us(project.project_id()) + '.flproject'\n\n # check if the file exists and delete it\n if os.path.isfile(filename):\n os.remove(filename)\n return True\n else:\n return False",
"def remove_project(project_id):\n response_object = {'status': 'success'}\n with database.engine.begin() as connection:\n\n stmt = select([models.projects.c.path]).where(\n models.projects.c.project_id == project_id)\n project = connection.execute(stmt).first()\n\n if project:\n app = flask.current_app\n project_path = os.path.join(\n app.root_path, app.config['DATA_DIRECTORY'], project['path'])\n if 'morphocut' in project_path and app.config['DATA_DIRECTORY'] in project_path:\n print('removing project with id {}'.format(project_id))\n if os.path.exists(project_path):\n helpers.remove_directory(project_path)\n\n stmt = models.projects.delete().where( # pylint: disable=no-value-for-parameter\n models.projects.c.project_id == project_id)\n\n connection.execute(stmt)\n\n return jsonify(response_object)",
"def delete_orphan_project(apps, schema_editor):\n Project = apps.get_model('data_manager', 'Project')\n Project.objects.filter(dataset__isnull=True).delete()\n return",
"def delete_project_by_name(self, project_name):\n with self._transaction.cursor() as cur:\n # delete associations between this project and any barcodes\n cur.execute(\"DELETE FROM barcodes.project_barcode \"\n \"WHERE project_id in (\"\n \"SELECT project_id FROM barcodes.project \"\n \"WHERE project = %s)\",\n (project_name,))\n\n # now delete the project itself\n cur.execute(\"DELETE FROM barcodes.project WHERE project = %s\",\n (project_name,))\n return cur.rowcount == 1",
"def remove_project(self, project=None, settings=None):\n is_project = type(project) is Project\n pid_exists = project.project_id() in [p.project_id() for p in self.project_list]\n\n # cancel if it's no project or project_id does not exist\n if not is_project or not pid_exists:\n return False\n\n # try to remove the project\n try:\n self.project_list.pop(self.project_list.index(project))\n self.delete_project_file(project=project)\n\n # update the inactive_list\n self.update_inactive_list(settings=settings)\n\n return True\n except Exception:\n return False",
"def remove_project(self, project_id):\n project_file_path = '{}/{}'.format(self._storage_location, project_id)\n if os.path.exists(project_file_path):\n os.remove(project_file_path)\n else:\n raise ValueError('The project id {} does not exist!'.format(project_id))",
"def delete(conn, project):\n with conn:\n c = conn.cursor()\n c.execute(\"DELETE FROM projects WHERE project =?\", (project,))",
"def delete_project(request, project_id):\n\n profile = get_object_or_404(Profile, user=request.user)\n project = get_object_or_404(GameProject, pk=project_id)\n\n if not profile.is_creator:\n messages.error(request, 'Sorry, only creators can do that.')\n return redirect(reverse('home'))\n if project.owner != profile:\n messages.error(request, 'Sorry, only the project owner can do that.')\n return redirect(reverse('home'))\n\n project = get_object_or_404(GameProject, pk=project_id)\n project.delete()\n messages.success(request, 'Project deleted!')\n return redirect(reverse('all_projects'))"
]
| [
"0.78088254",
"0.7433542",
"0.71992093",
"0.7100344",
"0.6999118",
"0.69928163",
"0.69796044",
"0.69558465",
"0.6908469",
"0.68370533",
"0.679688",
"0.6792618",
"0.6792618",
"0.67563593",
"0.6748435",
"0.6722493",
"0.6663393",
"0.665515",
"0.66471046",
"0.6595111",
"0.65836304",
"0.6554876",
"0.65498596",
"0.65315676",
"0.65169734",
"0.65169626",
"0.65137535",
"0.6492884",
"0.64866614",
"0.64528394"
]
| 0.7941272 | 0 |
Dev function used to remove all projects | def __remove_all_projects__():
p = subprocess.Popen('rm -rf {}/.wcscanner/*'.format(context.__BASE_PATH__), shell=True)
p.wait() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_remove_project(self):\n pass",
"def cleanUp(self):\n import evoware.fileutil as F\n F.tryRemove(self.f_project, verbose=(self.VERBOSITY>1), tree=1)",
"def clean(self):\n\n if not self.__projects:\n return\n\n Console.info(\"Cleaning session...\")\n Console.indent()\n\n for project in self.__projects:\n project.clean()\n\n path = os.path.abspath(os.path.join(\".jasy\", \"locale\"))\n if os.path.exists(path):\n Console.info(\"Cleaning up locale project...\")\n shutil.rmtree(path)\n\n path = os.path.abspath(os.path.join(\".jasy\", \"virtual\"))\n if os.path.exists(path):\n Console.info(\"Cleaning up virtual project...\")\n shutil.rmtree(path)\n\n Console.outdent()",
"def remove_single_project(project_name):\n p = subprocess.Popen('rm -rf {}/{}'.format(context.__PROJECTS_PATH__, project_name), shell=True)\n p.wait()",
"def destroy(config, args):\n log = logging.getLogger('kraftwerk.destroy')\n if confirm(\"Remove project %s from node %s along with all services and data?\" % \n (args.project.name, args.node.hostname)):\n args.node.ssh(config.template(\"scripts/project_destroy.sh\", project=args.project))\n print \"Project %s removed from node %s\" % \\\n (args.project.name, args.node.hostname )\n for service in args.project.services(args.node):\n args.node.ssh(service.destroy_script)",
"def clear(self):\n for project in Project.objects:\n project.delete()",
"def tearDown(self):\n Project.objects.all().delete()",
"def reset():\n local('cd {{ project_name }} && \\\n rm -rf static && rm -rf gzip && rm -rf build')",
"def test_delete_project(self):\n pass",
"def test_delete_project(self):\n pass",
"def tearDownClass(cls):\n projects = ['arc_project_for_testing_delete_after_usage1', 'arc_project_for_testing_delete_after_usage2',\n 'ar c', 'ar:c', 'ar<c', 'ar%c']\n for project in projects:\n project_directory = os.path.join(arc_path, 'Projects', project)\n shutil.rmtree(project_directory)",
"def cleanup(self):\r\n session = self.get_session()\r\n project = session.create(self._config.name)\r\n \r\n session.home = self._config['dir']\r\n \r\n result = self.__find_project(project)\r\n \r\n path = os.path.join(session.home, project.name)\r\n project.work_area(False, True, True, path=path)\r\n \r\n if (result != None):\r\n _logger.info(\"Project found: '%s'\" % result)\r\n role = session.role\r\n co_role = ccm.get_role_for_purpose(session, str(self._config['purpose']))\r\n session.role = co_role\r\n try:\r\n delResult = result.delete(scope='project_and_subproject_hierarchy')\r\n finally:\r\n session.role = role\r\n ccm.log_result(delResult, ccm.CHECKOUT_LOG_RULES, _logger)",
"def clean_project(self, app_name=None, delete_all=False):\n\n if not app_name and not delete_all:\n ConuException(\"You need to specify either app_name or set delete_all=True\")\n\n if delete_all:\n args = [\"--all\"]\n logger.info('Deleting all objects in current project')\n else:\n args = \"-l app=%s\" % app_name\n logger.info('Deleting all objects with label app=%s', app_name)\n\n try:\n o = run_cmd(self._oc_command([\"delete\", \"all\", args]),\n return_output=True)\n o_lines = o.split('\\n')\n for line in o_lines:\n logger.info(line)\n except subprocess.CalledProcessError as ex:\n raise ConuException(\"Cleanup failed because of exception: %s\" % ex)",
"def delete_all_projects():\n client = RequestManager()\n client.set_method(\"GET\")\n client.set_endpoint(\"/projects\")\n response = client.execute_request()\n for project in response.json():\n try:\n ProjectHelper.delete_project(project[\"id\"])\n except TypeError:\n LOGGER.info(project)",
"def delete_project(arn=None):\n pass",
"def _on_del_project(self):\n project = self.ddnCurProject.get()\n# if len(project) > 0:\n if project:\n if '.prj'!= project[-4:]:\n project += '.prj'\n if os.path.exists(self.BibTerm + '/'+ project):\n os.remove(self.BibTerm + '/'+ project)\n self.list_projects = [f.rstrip('.prj') \\\n for f in os.listdir(self.BibTerm) \\\n if f.endswith('.prj')]\n self.ddnCurProject['values'] = self.list_projects\n# if len(self.list_projects) > 0:\n if self.list_projects:\n self.ddnCurProject.set(self.list_projects[0])\n else:\n self.ddnCurProject.set('')\n pass",
"def test_remove_trusted_project(self):\n pass",
"def delete(self, request, p_name):\n project = Project.objects.get(name=p_name)\n connectors = project.connector_set.all()\n connectors.delete()\n if os.path.isfile(project.project_location):\n os.remove(project.project_location)\n project.delete()\n return HttpResponse(HTTPStatus.OK)",
"def test_remove_trusted_project1(self):\n pass",
"def _clean_up_project_file(self):\n\n print \"Reading in project file...\"\n with open(self.project_file,'r') as f_in:\n project_json = json.load(f_in)\n\n # Go through design_files references\n configurations = project_json['Project']['Configurations']\n n = len(configurations)\n indices_to_delete = []\n for i in range(n):\n if not os.path.basename(configurations[i]) in self.design_files:\n indices_to_delete.append(i)\n\n indices_to_delete.reverse()\n for i in indices_to_delete:\n del configurations[i]\n\n # Go through design_space_files references\n design_space_models = project_json['Project']['DesignSpaceModels']\n n = len(design_space_models)\n indices_to_delete = []\n for i in range(n):\n if not os.path.basename(design_space_models[i]) in self.design_space_files:\n indices_to_delete.append(i)\n\n indices_to_delete.reverse()\n for i in indices_to_delete:\n del design_space_models[i]\n\n # Go through test_bench_files references\n #test_benches = project_json['Project']['TestBenches']\n #n = len(test_benches)\n #indices_to_delete = []\n #for i in range(n):\n # if not os.path.basename(test_benches[i]) in self.test_bench_files:\n # indices_to_delete.append(i)\n #\n #for i in indices_to_delete.reverse():\n # del test_benches[i]\n\n # Write out the new, reduced in size, project dictionary\n with open(self.project_file,'wb') as f_out:\n json.dump(project_json, f_out, indent=4)\n\n print \"Written out cleaned up project dictionary.\"",
"def cmd_apps__destroy(args):\n \n if args.name is None and in_git_repo():\n args.name = _get_current_project_name()\n\n if args.name is None:\n print \"Please provide a project name.\"\n sys.exit(1)\n\n print \"Destroying project %s...\" % args.name\n remote.destroy_project(args.name)\n print \"Project %s destroyed.\" % args.name\n if in_git_repo() and _get_current_project_name() == args.name:\n git(None, 'remote', 'rm', 'tinyserv')\n print \"Removed remote '%s'.\" % args.name",
"def delete_project(\n name\n):\n\n cmd = dict()\n cmd[\"type_\"] = \"delete_project\"\n cmd[\"name_\"] = name\n\n comm.send(cmd)",
"def do_project_delete(cs, args):\n key = args.project\n if cs.projects.is_id(key):\n id = key\n else:\n id = cs.projects.get_id_by_name(key)\n cs.projects.delete(id)\n print(\"Delete Project '%s' successfully.\" % key)",
"def remove():\n run('pew rm {0}'.format(package_name()))",
"def unlink(self):\n analytic_accounts_to_delete = self.env['account.analytic.account']\n for project in self:\n if project.analytic_account_id and not project.analytic_account_id.line_ids:\n analytic_accounts_to_delete |= project.analytic_account_id\n result = super(Project, self).unlink()\n analytic_accounts_to_delete.unlink()\n return result",
"def delete(self):\n _url = f\"{self.connector.base_url}/projects/{self.project_id}\"\n\n self.connector.http_call(\"delete\", _url)\n\n self.project_id = None\n self.name = None",
"def delete_orphan_project(apps, schema_editor):\n Project = apps.get_model('data_manager', 'Project')\n Project.objects.filter(dataset__isnull=True).delete()\n return",
"def delete():\n run('rm -r {}'.format(utils.home('apps', env.PROJECT_NAME)))",
"def project_delete(cursor, project):\n haystack = (project['_id'], )\n\n query = \"DELETE FROM projects WHERE _id=?\"\n try:\n cursor.execute(query, haystack)\n except Exception as e:\n on_error(e)\n\n query = \"DELETE FROM namespaces WHERE project_id=?\"\n try:\n cursor.execute(query, haystack)\n except Exception as e:\n on_error(e)\n else:\n cursor.connection.commit()\n raise Return((True, None))",
"def clean(self):\n \n with current_context() as ctx:\n project_outputs = ctx.get('current.project_outputs')\n if project_outputs is not None:\n if self._project in project_outputs:\n del project_outputs[self._project]\n \n path = self.path\n if os.path.isfile(path):\n args = [self.command, '-f', path, '-t', 'clean', '-g']\n try:\n check_call(args)\n except CalledProcessError as ex:\n return ex.returncode\n self.remove()\n return 0"
]
| [
"0.78051233",
"0.7530776",
"0.7457813",
"0.73433113",
"0.7220841",
"0.70927864",
"0.7014415",
"0.7013246",
"0.70059735",
"0.70059735",
"0.6994109",
"0.69718444",
"0.6850931",
"0.68286365",
"0.67603636",
"0.6698705",
"0.66784424",
"0.6565345",
"0.65251285",
"0.6489362",
"0.64347804",
"0.64284617",
"0.6422401",
"0.6414664",
"0.638747",
"0.6379775",
"0.6365258",
"0.63630307",
"0.6357719",
"0.63550043"
]
| 0.8361376 | 0 |
Dev method used to remove the base directory of the application | def __remove_base_directory__():
p = subprocess.Popen('rm -rf {}/.wcscanner'.format(context.__BASE_PATH__), shell=True)
p.wait() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def remove_basepath(self, basepath):\n\n # remove leading '/' from basepath so it doesn't screw stuff up\n target = self.install_dir / basepath.lstrip('/')\n\n try:\n # move all contents to the mod-root\n for f in target.iterdir():\n f.rename(self.install_dir / f.name)\n except Exception as e:\n self.LOGGER.exception(e)\n raise\n\n # folder should be empty; try to remove it\n try:\n target.rmdir()\n except OSError as e:\n self.LOGGER.error(f\"Could not remove directory '{target}'\")\n self.LOGGER.exception(e)",
"def cleanup_step(self):\n self.clean_home_subdir()\n\n super(IntelBase, self).cleanup_step()",
"def remove_app(self):\n \n pass",
"def clean():\n sudo(\"rm -rf %(admin_webroot)s\" % env)",
"def delete():\n run('rm -r {}'.format(utils.home('apps', env.PROJECT_NAME)))",
"def reset(self):\r\n self._root_dir = None",
"def remove(self): \n self.doRoot(self.removeDir)\n settings.getChanged('mosh.resourceReplacer.applied').remove(self.file)",
"def clean():\n local('rm -fr %s' % os.path.abspath(env.config['destination']))",
"def module_cleanup():\n\n os.chdir(os.path.dirname(__file__))\n shutil.rmtree(TestModule.full_path())",
"def clear_base_files(self):\r\n compilelock.get_lock()\r\n try:\r\n for base_dir in ('cuda_ndarray', 'cutils_ext', 'lazylinker_ext',\r\n 'scan_perform'):\r\n to_delete = os.path.join(self.dirname, base_dir + '.delete.me')\r\n if os.path.isdir(to_delete):\r\n try:\r\n shutil.rmtree(to_delete)\r\n _logger.debug('Deleted: %s', to_delete)\r\n except Exception:\r\n _logger.warning('Could not delete %s', to_delete)\r\n continue\r\n to_rename = os.path.join(self.dirname, base_dir)\r\n if os.path.isdir(to_rename):\r\n try:\r\n shutil.move(to_rename, to_delete)\r\n except Exception:\r\n _logger.warning('Could not move %s to %s',\r\n to_rename, to_delete)\r\n finally:\r\n compilelock.release_lock()",
"def reset(self):\n def remove_auxiliary_dir():\n egg_info_dir = self.project_name_sc + \".egg-info\"\n remove_directories([\n egg_info_dir,\n \".env\",\n \".eggs\",\n \".pytest_cache\",\n \"build\",\n \"dist\",\n \".cache\",\n \".benchmark\",\n \".tox\",\n \".vagrant\",\n \".tox\"])\n remove_files([\n \".coverage\",\n \".doit.db\",\n \".doit.bak\",\n \".doit.dat\",\n \".doit.dir\",\n ])\n\n # TODO(lschneider): Remove unnecessary files without command lines.\n # This code could be run directly from this function. However\n # the pathlib library is not part of the standard python 2.\n prefix = \"python -c \\\"import pathlib; \"\n delete_pyfiles = prefix + \"import pathlib; [p.unlink() for p in pathlib.Path('.').rglob('*.py[co]')]\\\"\"\n delete_dirs = prefix + \"import pathlib; [p.rmdir() for p in pathlib.Path('.').rglob('__pycache__')]\\\"\"\n\n return {\n \"actions\": [\n delete_pyfiles,\n delete_dirs,\n remove_auxiliary_dir,\n ],\n \"verbosity\": 2\n }",
"def unlink(self):\n\t\tadiff = ApplicationDifferencer()\n\n\t\t# Determine the differences between what's in the\n\t\t# application's directory and what's currently\n\t\t# available from the root filesystem (in relation\n\t\t# to this application).\n\t\tresults = adiff.scan(\n\t\t\t\tos.path.join(\n\t\t\t\t\tAppFolders.get(self.type),\n\t\t\t\t\tself.name + \"/\" + self.version\n\t\t\t\t\t),\n\t\t\t\tTrue\n\t\t\t\t);\n\t\t\n\t\tsafe_app_dir = os.path.join(\n AppFolders.get(self.type),\n self.name # We exclude the version here because we could be\n # checking against a link that's under Current or\n # a specific version.\n )\n\t\t\n\t\t# Preemptively go through the list of directories, removing those\n\t\t# that are symlinks to the application folder. This is from the legacy\n\t\t# link system and unfortunatly if you let the block below this run\n\t\t# through a system with said symlinks, you'll end up annihilating the\n\t\t# the application files (because it'll walk through the symlink into\n\t\t# the application directory and start rm'ing stuff we don't want to)\n\t\t# The solution here is to go through and remove directory symlinks before\n\t\t# hand, with a reversed result list (in effect reversing the walk process\n\t\t# in adiff.scan) so that we elimate the top level symlinks first, preventing\n\t\t# it from annihilating symlinked directories inside the application folder.\n\t\t# Very annoying stuff.\n\t\t#\n\t\t# XXX: I almost hosed the entire Elementary system with this. Apparently it\n\t\t# that removing symlinked directories included some of the base ones\n\t\t# such as /lib and /bin (because the Python install contains those dirs\n\t\t# too :P). The only_sub variable defines that only paths that resolve\n\t\t# to a *subdirectory* of those specified can be removed if it's a symlinked\n\t\t# directory. This prevents removal of /bin, /lib, etc.. symlinks.\n\t\t#\n\t\tonly_sub = [\n\t\t\t\t\"/System/Utilities/Applications\",\n\t\t\t\t\"/System/Utilities/Libraries\",\n\t\t\t\t\"/Applications\",\n\t\t\t\t\"/Users\"\n\t\t\t]\n\t\tresults.reverse()\n\t\ttrip_safety = False\n\t\tfor i in results:\n\t\t\t# Legacy removal is a special case because directories will be detected\n\t\t\t# as file entries (because they are symlinks). Therefore, we need to use\n\t\t\t# os.path.realpath and os.path.isdir to find out whether it's really a directory\n\t\t\t# or not.\n\t\t\tis_directory = os.path.isdir(os.path.realpath(i[2]))\n\n\t\t\t# Get file information.\n\t\t\ttry:\n\t\t\t\tpstat = os.lstat(i[2])[stat.ST_MODE]\n\t\t\texcept:\n\t\t\t\t# Likely broken when we removed a directory symlink.\n\t\t\t\tcontinue\n\t\t\t\n\t\t\t# Determine whether we should proceed with this entry.\n\t\t\tif (not is_directory):\n\t\t\t\tcontinue\n\t\t\tif (not stat.S_ISLNK(pstat)):\n\t\t\t\tcontinue\n\n\t\t\t# Determine whether it's safe to remove this symlinked dir.\n\t\t\tif (not self.isApplicationOwned(i[2], safe_app_dir)):\n\t\t\t\tcontinue\n\t\t\t\n\t\t\t# Double-check before we go unlinking (in case of a logic oversight).\n\t\t\tif (is_directory and stat.S_ISLNK(pstat)):\n\t\t\t\ttrip_safety = True\n\t\t\t\ttry:\n\t\t\t\t\tself.oper_unlink(i[2])\n\t\t\t\t\tlog.showWarningW(\"Removed symlinked directory at: \" + i[2])\n\t\t\t\t\tlog.showWarningW(\"The full path was: \" + rpath)\n\t\t\t\texcept:\n\t\t\t\t\tpass\n\t\tresults.reverse()\t\t\n\n\t\tif (trip_safety):\n\t\t\tlog.showErrorW(\"Legacy system safety switch was tripped. This indicates you have\")\n\t\t\tlog.showErrorO(\"symlinked directories on your system (from legacy linkage systems).\")\n\t\t\tlog.showErrorO(\"The unlinking process has removed at least one of those symlinked\")\n\t\t\tlog.showErrorO(\"directories. In order to make sure application files don't get\")\n\t\t\tlog.showErrorO(\"removed, you need to run the unlink process again to ensure the system\")\n\t\t\tlog.showErrorO(\"is scanned without symlinked directories. If the process shows this\")\n\t\t\tlog.showErrorO(\"message twice, then STOP and REMOVE THE SYMLINKS MANUALLY. You risk\")\n\t\t\tlog.showErrorO(\"destroying application installations if you continue.\")\n\t\t\tsys.exit(1)\n\t\t\n\n\t\t# Now go through the results, removing directories (if they're\n\t\t# empty) and un-symlinking files (but making sure that we only\n\t\t# remove symlinks and not normal files).\n\t\tattempt_successes = list()\n\t\tattempt_failures = list()\n\t\tattempt_notexists = list()\n\t\ttotal_files = 0\n\t\tfor i in results:\n\t\t\ttotal_files += 1\n\t\t\ttry:\n\t\t\t\tpstat = os.lstat(i[2])[stat.ST_MODE]\n\t\t\texcept:\n\t\t\t\t# File doesn't exist. Likely got removed while we unlinked\n\t\t\t\t# a `symlinked' directory (from old linkage system).\n\t\t\t\tcontinue\n\n\t\t\t# Check to make sure that the file we're going to remove is located\n\t\t\t# within a safe directory.\n\t\t\tif (not self.isApplicationOwned(i[2], safe_app_dir)):\n\t\t\t\t# This check only applies to symlinks, not real directories.\n\t\t\t\tif ((i[0] == \"file\" or i[0] == \"directory\") and stat.S_ISLNK(pstat)):\n\t\t\t\t\tlog.showInfoW(\"Ignoring \" + i[2] + \" because it's not owned by the application.\")\n\t\t\t\t\tcontinue\n\n\t\t\tif (i[0] == \"directory\" and not stat.S_ISLNK(pstat)):\n\t\t\t\ttry:\n\t\t\t\t\tself.oper_rmdir(i[2])\n\t\t\t\t\tattempt_successes.append(i[2])\n\t\t\t\texcept:\n\t\t\t\t\tlog.showInfoW(\"Still in use: \" + i[2])\n\t\t\t\t\t# Failure to remove a directory should not be counted\n\t\t\t\t\t# as a failure since quite often directories will not be\n\t\t\t\t\t# removed because they are still in use by other applications.\n\t\t\t\t\t#attempt_failures.append(i[2])\n\t\t\telif ((i[0] == \"file\" or i[0] == \"directory\") and stat.S_ISLNK(pstat)):\n\t\t\t\ttry:\n\t\t\t\t\tself.oper_unlink(i[2])\n\t\t\t\t\tattempt_successes.append(i[2])\n\t\t\t\texcept:\n\t\t\t\t\tlog.showErrorW(\"Unable to symlink file \" + i[2])\n\t\t\t\t\tattempt_failures.append(i[2])\n\t\t\telif (i[0] == \"notexists\"):\n\t\t\t\tlog.showInfoW(\" N \" + i[2])\n\t\t\t\tattempt_notexists.append(i[2])\n\t\t\telif (i[0] != \"notexists\" and i[0] != \"file\" and i[0] != \"directory\"):\n\t\t\t\tlog.showWarningW(\"Unknown operation for \" + i[1])\n\n\t\treturn attempt_successes, attempt_failures, total_files",
"def _cleanPackageDir(self, *_):\r\n for _, path in self._pkgDir:\r\n os.rmdir(os.path.join(self._rootfs, path))\r\n\r\n assert len(self._containers) == 0",
"def remove_fake_project_dir(request):\n def fin_remove_fake_project_dir():\n if os.path.isdir('fake-project'):\n utils.rmtree('fake-project')\n request.addfinalizer(fin_remove_fake_project_dir)",
"def devclean():\n click.echo(\"start clean your output folder...\")\n rm(OUTPUTDIR, recursive=True)",
"def cleanUp(self):\n import evoware.fileutil as F\n F.tryRemove(self.f_project, verbose=(self.VERBOSITY>1), tree=1)",
"def clean():\n if os.path.exists('_build'):\n shutil.rmtree('_build')",
"def _remove(self):\n self._system.remove(self.get_install_path())\n self._system.remove(self._source_path)",
"def _unshare_dir(target):\n logging.debug(\"Un-sharing directory %s\" % target)\n os.rmdir(target)",
"def clean_home_subdir(self):\n\n self.log.debug(\"Cleaning up %s...\" % self.home_subdir_local)\n try:\n for tree in os.listdir(self.home_subdir_local):\n self.log.debug(\"... removing %s subtree\" % tree)\n path = os.path.join(self.home_subdir_local, tree)\n if os.path.isfile(path):\n os.remove(path)\n else:\n shutil.rmtree(path)\n except OSError, err:\n self.log.error(\"Cleaning up intel dir %s failed: %s\" % (self.home_subdir_local, err))",
"def clean_directory():\n print(\"INFO: Cleaning old files...\")\n if os.path.exists(os.path.join(os.path.dirname(__file__), 'Scripts')):\n try:\n shutil.rmtree(os.path.join(os.path.dirname(__file__), 'Scripts'))\n except OSError as error:\n print(\"Error: %s - %s.\" % (error.filename, error.strerror))",
"def remove(self):\n mod_file = self.layout.filename\n if os.path.exists(mod_file):\n try:\n os.remove(mod_file) # Remove the module file\n os.removedirs(\n os.path.dirname(mod_file)\n ) # Remove all the empty directories from the leaf up\n except OSError:\n # removedirs throws OSError on first non-empty directory found\n pass",
"def tearDown(self):\n os.rmdir(self.cur_source)\n super().tearDown()",
"def cleanup(self):\r\n try:\r\n LOGGER.info('Deleting deployment cwd={0}'.format(self.cwd))\r\n # Clean up working directory\r\n delete_directory_tree(self.cwd)\r\n LOGGER.info('Deleted deploy deployment cwd.')\r\n except Exception as e:\r\n err_message = \\\r\n 'Failed to clean up deployment cwd \"{0}\".' \\\r\n .format(self.cwd)\r\n LOGGER.exception(err_message)",
"def clean(self):\n if os.path.exists(self.paths['build_dir']):\n shutil.rmtree(self.paths['build_dir'])\n if os.path.exists(os.path.join(self.base_dir, 'docs')):\n shutil.rmtree(os.path.join(self.base_dir, 'docs'))",
"def clean(self, util_mod):\n super(ConanContainer, self).clean(util_mod)\n build = container.named_cache_dir(\"cmake-build\", ephemeral=True)\n util_mod.force_remove_tree(os.path.join(build, \"bin\"))\n util_mod.force_remove_tree(os.path.join(build, \"lib\"))",
"def reset():\n local('cd {{ project_name }} && \\\n rm -rf static && rm -rf gzip && rm -rf build')",
"def tearDown(self):\n # unittest.TestCase.tearDown(self)\n\n root = os.path.join(\".\", \"files\")\n endingList = os.listdir(root)\n rmList = [fn for fn in endingList if fn not in self.startingList]\n\n if self.oldRoot == root:\n for fn in rmList:\n fnFullPath = os.path.join(root, fn)\n if os.path.isdir(fnFullPath):\n os.rmdir(fnFullPath)\n else:\n os.remove(fnFullPath)\n\n os.chdir(self.oldRoot)",
"def _clean(base_dir):\n # remove the snakemake cache\n shutil.rmtree(os.path.join(base_dir, \".snakemake\"), ignore_errors=True)\n\n # remove seq2science caches\n shutil.rmtree(os.path.expanduser(os.path.join(xdg.XDG_CACHE_HOME, \"seq2science\")), ignore_errors=True)\n\n # remove historic seq2science cache location\n shutil.rmtree(os.path.expanduser(f\"~/.config/seq2science/\"), ignore_errors=True)\n\n print(\"All cleaned up!\")",
"def clear_config():\n check_config()\n fs.truncate(PYWS_DIR_BIN)"
]
| [
"0.6996567",
"0.66437256",
"0.6626104",
"0.65613693",
"0.65441066",
"0.65218043",
"0.6485134",
"0.6461955",
"0.6430868",
"0.63802505",
"0.63655853",
"0.6357699",
"0.6335903",
"0.6290628",
"0.62636834",
"0.62542003",
"0.62300324",
"0.62172204",
"0.6212399",
"0.6205832",
"0.6205412",
"0.6202617",
"0.6202229",
"0.6186563",
"0.61855626",
"0.6183513",
"0.61694443",
"0.616666",
"0.6130917",
"0.6123081"
]
| 0.8166111 | 0 |
Create a new revision for each instance of the requested model | def create_revisions_for(model):
total = model.objects.count()
for idx, obj in enumerate(model.objects.iterator()):
with create_revision():
obj.save()
if idx % 100 == 0:
logger.info('Created revision for %s: %s / %s',
model._meta.verbose_name, idx + 1, total) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def NewRevision(self, ids=[], default=None):\n newID, newIndex = [ False, 0 ]\n \n thisContext={ 'internal_writing':True, 'new_revision':True, }\n for tmpObject in self.browse(getListIDs(ids)):\n latestIDs = self.GetLatestIds( [(tmpObject.engineering_code, tmpObject.engineering_revision, False)] )\n for oldObject in self.browse(latestIDs):\n if isAnyReleased(self, oldObject.id):\n note={\n 'type': 'revision process',\n 'reason': \"Creating new revision for '{old}'.\".format(old=oldObject.name),\n }\n self._insertlog(oldObject.id, note=note)\n newIndex = int(oldObject.engineering_revision) + 1\n default = {\n 'engineering_writable': False,\n 'state': 'undermodify',\n }\n oldObject.with_context(thisContext).write(default)\n default={\n 'name': oldObject.name,\n 'engineering_revision': newIndex,\n 'engineering_writable': True,\n 'state': 'draft',\n }\n \n # Creates a new \"old revision\" object\n tmpID = oldObject.with_context(thisContext).copy(default)\n if tmpID:\n wf_message_post(self, [oldObject.id], body='Created : New Revision.')\n newID = tmpID.id\n tmpID.write({'name': oldObject.name, })\n note={\n 'type': 'revision process',\n 'reason': \"Created new revision '{index}' for product '{name}'.\".format(index=newIndex,name=oldObject.name),\n }\n self._insertlog(newID, note=note)\n oldObject.with_context(thisContext)._copy_productBom(newID, [\"normal\",\"spbom\"])\n tmpID.with_context(thisContext).write( {'name': oldObject.name, } )\n note={\n 'type': 'revision process',\n 'reason': \"Copied BoM to new revision '{index}' for product '{name}'.\".format(index=newIndex,name=oldObject.name),\n }\n self._insertlog(newID, note=note)\n break\n return (newID, newIndex)",
"def create_versions_after_migration(**kwargs):\n migrations = [migration\n for migration, rollback in kwargs.get('plan', [])\n if not rollback]\n models: Set[Any] = set()\n for migration in migrations:\n models.update(getattr(migration, 'REVISED_MODELS', []))\n\n with transaction.atomic():\n for model in reversion_models(models):\n create_revisions_for(model)",
"def test_one_revision_created(self):\n with factories.single_commit():\n source = factories.ProgramFactory()\n destination = factories.ObjectiveFactory()\n\n data = [{\n \"relationship\": {\n \"context\": None,\n \"destination\": {\n \"id\": source.id,\n \"type\": \"Program\",\n \"href\": \"/api/programs/{}\".format(source.id)\n },\n \"source\": {\n \"id\": destination.id,\n \"type\": \"Objective\",\n \"href\": \"/api/objectives/{}\".format(destination.id)\n }\n }\n }]\n response = self.api.client.post(\n \"/api/relationships\",\n data=json.dumps(data),\n headers=self.headers\n )\n self.assert200(response)\n rel_id = all_models.Relationship.query.one().id\n revs_count = all_models.Revision.query.filter_by(\n source_type=\"Objective\", destination_type=\"Program\"\n ).count()\n events_count = all_models.Event.query.filter_by(\n resource_id=rel_id, resource_type=\"Relationship\",\n ).count()\n self.assertEqual(revs_count, 1)\n self.assertEqual(events_count, 1)\n\n response = self.api.client.post(\n \"/api/relationships\",\n data=json.dumps(data),\n headers=self.headers\n )\n self.assert200(response)\n new_revs_count = all_models.Revision.query.filter_by(\n source_type=\"Objective\", destination_type=\"Program\"\n ).count()\n events_count = all_models.Event.query.filter_by(\n resource_id=rel_id, resource_type=\"Relationship\",\n ).count()\n self.assertEqual(new_revs_count, 1)\n self.assertEqual(events_count, 1)",
"def revision(self) -> \"Collection\":\n revision_collection = clone(\n self.db_object, primary_key=dict(id=self.id, visibility=CollectionVisibility.PRIVATE)\n )\n self.session.add(revision_collection)\n for link in self.links:\n self.session.add(clone(link, collection_id=self.id, collection_visibility=CollectionVisibility.PRIVATE))\n self.session.commit()\n for dataset in self.datasets:\n Dataset(dataset).create_revision()\n return Collection(revision_collection)",
"def create_models( self ):",
"def create_versions(instances, operation, versional_comment):\n return [create_version(instance, operation, versional_comment)\n for instance in instances]",
"def test_item_revision_history(testapp, registry):\n objv1 = {\n 'title': \"Testing1\",\n 'description': \"This is testing object 1\",\n }\n objv2 = {\n 'title': \"Testing2\",\n 'description': \"This is testing object 2\",\n }\n objv3 = {\n 'title': \"Testing3\",\n 'description': \"This is testing object 3\",\n }\n item_uuid = testapp.post_json('/embedding-tests', objv1, status=201).json['@graph'][0]['uuid']\n testapp.patch_json('/' + item_uuid, objv2, status=200)\n testapp.patch_json('/' + item_uuid, objv3, status=200)\n\n # now get revision history\n revisions = testapp.get('/' + item_uuid + '/@@revision-history').json['revisions']\n assert len(revisions) == 3 # we made 3 edits\n\n # lets make some more\n testapp.patch_json('/' + item_uuid, objv2, status=200)\n testapp.patch_json('/' + item_uuid, objv1, status=200)\n revisions = testapp.get('/' + item_uuid + '/@@revision-history').json['revisions']\n assert len(revisions) == 5 # now we made 5 edits\n # they should be ordered by sid, recall the patch order above\n for patched_metadata, revision in zip([objv1, objv2, objv3, objv2, objv1], revisions):\n assert revision['title'] == patched_metadata['title']",
"def save(self, *args, **kwargs):\n self.pk = None\n super(AbstractRevision, self).save(*args, **kwargs)\n self.tracked_model.current_revision = self\n self.tracked_model.save()",
"def save(self, *a, **kw):\n self.sha1 = hashlib.sha1(\n force_unicode(self.delta).encode(\"utf-8\")\n ).hexdigest()\n if self.id is None:\n try:\n self.revision = Revision.objects.get_for_object(\n self.content_object\n ).latest().revision + 1\n except self.DoesNotExist:\n self.revision = 1\n attempt = 0\n while True:\n try:\n super(Revision, self).save(*a, **kw)\n break\n except IntegrityError:\n self.revision += 1\n attempt += 1\n if attempt > 20:\n raise",
"def _populateModel(self):\n\n self.repoPath = self.argv[1]\n self.rev = self.argv[2]\n self.model.rev = self.rev\n self.model.repo = os.path.split(self.repoPath)[-1]\n self.prefix = (self.addRepoPrefix() and ('/' + self.model.repo)) or ''\n\n # First, get the user and log message\n lines = self._svnlook('info')\n self.model.user = lines[0][:-1]\n self.model.log = ''.join(lines[3:]).strip()\n\n # Now build an initial tree of file and tree changes\n for line in self._svnlook('changed'):\n action = self.actions[line[0]]\n target = '/' + line[4:-1]\n\n if target.endswith('/'):\n directory = self.model.directory(self.prefix + target)\n directory.action = action\n else:\n parts = target.split('/')\n name = parts[-1]\n directoryPath = '/' + '/'.join(parts[0:-1]) + '/'\n\n file = File(name, self.model.directory(self.prefix + directoryPath), action)\n\n # Markers to tell us when we hit a new diff\n markers = ['Modified', 'Added', 'Copied', 'Deleted', 'Property changes on']\n\n # Recontruct each diff by parsing through the output of svnlook line by line\n diffs = []\n partialDiff = None\n\n #A marker word after a \"____\" line is a change in a property and shouldn't be added as a change\n #in a file. InProperty keeps track of this. If it's 0 this is a normal line, any larger \n #and it's a property line.\n inProperty = 1\n for line in self.getDiffLines():\n inProperty = max(0, inProperty-1)\n if line == \"___________________________________________________________________\\n\":\n inProperty = 2\n\n # Look for Modified:, Added:, etc.\n if line[0:line.find(':')] in markers and not inProperty > 0:\n # Handle starting a new diff\n partialDiff = [line]\n diffs.append(partialDiff)\n elif partialDiff:\n partialDiff.append(line)\n\n if len(diffs) == 0:\n for file in self.model.files():\n file.delta = '<Unavailable>'\n file.diff = ''\n\n # And finally parse through the diffs and save them into our tree of changes\n for diff in diffs:\n # Use [:-1] to leave of the trailing \\n\n start = diff[0].find(': ') + 2\n stop = diff[0].find('(') - 1 # -1 ignores the space before the paren\n if stop == -2: stop = len(diff[0])\n\n filePath = '/' + diff[0][:-1][start:stop]\n\n # This could be a file or a directory - going ahead with the .file()\n # call for most directories is fine as it will just return null.\n #\n # Howeever, root / will exception out as an invalid file path so\n # just special case it\n if filePath == '/':\n file = None\n else:\n file = self.model.file(self.prefix + filePath)\n\n # Maybe its a directory\n if file:\n isFile = True\n else:\n file = self.model.directory(self.prefix + filePath + '/')\n isFile = False\n\n if not diff[0].startswith('Property changes on:'):\n file.delta, file.diff = self._parse_diff(diff)\n else:\n if file.diff:\n # Only files will already have a diff set\n file.diff = file.diff + '\\n\\n' + ''.join(diff)\n else:\n # If the 'Property changes on' line is here without a\n # file.diff, that file.diff will never come because it would\n # have been printed before us\n if isFile:\n sep = '===================================================================\\n\\n'\n file.diff = ''.join([sep] + diff)\n file.delta = '+0 -0'\n else:\n file.diff = ''.join(diff)",
"def _build(cls, update, others, self_id, entities, client):",
"def create_revision(self, model_definition_uid):\n WMLResource._chk_and_block_create_update_for_python36(self)\n\n if self._client.ICP_30 is None and not self._client.CLOUD_PLATFORM_SPACES and not self._client.ICP_PLATFORM_SPACES:\n raise WMLClientError(\n u'Revisions APIs are not supported in this release.')\n\n self._client._check_if_either_is_set()\n\n model_defn_id = str_type_conv(model_definition_uid)\n ModelDefinition._validate_type(model_defn_id, u'model_defn_id', STR_TYPE, True)\n\n print(\"Creating model_definition revision...\")\n\n # return self._get_required_element_from_response(\n # self._create_revision_artifact_for_assets(model_defn_id, 'Model definition'))\n\n response = self._get_required_element_from_response(\n self._create_revision_artifact_for_assets(model_defn_id, 'Model definition'))\n\n if not self._client.CLOUD_PLATFORM_SPACES and not self._client.ICP_PLATFORM_SPACES:\n return response\n else:\n entity = response[u'entity']\n\n try:\n del entity[u'wml_model_definition'][u'ml_version']\n except KeyError:\n pass\n\n final_response = {\n \"metadata\": response[u'metadata'],\n \"entity\": entity\n }\n\n return final_response",
"def revision_history(self, uuid):\n return self.write.revision_history(rid=uuid)",
"def _build_revision(item):\n errors = [Error(message=e['message'], code=e['code']) for e in (item['errors'] or [])]\n rev = Revision(\n revision=item['revision'],\n version=item['version'],\n created_at=parser.parse(item['created-at']),\n status=item['status'],\n errors=errors,\n )\n return rev",
"def list_revisions(self, model_definition_uid, limit=None):\n ##For CP4D, check if either spce or project ID is set\n if self._client.ICP_30 is None and not self._client.CLOUD_PLATFORM_SPACES and not self._client.ICP_PLATFORM_SPACES:\n raise WMLClientError(\n u'Revisions APIs are not supported in this release.')\n self._client._check_if_either_is_set()\n href = self._href_definitions.get_model_definition_assets_href() + \"/\" + model_definition_uid +\\\n u'/revisions'\n params = self._client._params()\n #params = None\n if limit is not None:\n ModelDefinition._validate_type(limit, u'limit', int, False)\n params.update( {\n \"limit\": limit\n })\n if not self._ICP:\n response = requests.get(href, params, headers=self._client._get_headers())\n else:\n response = requests.get(href, params=params, headers=self._client._get_headers(), verify=False)\n self._handle_response(200, u'model_definition revision assets', response)\n asset_details = self._handle_response(200, u'model_definition revision assets', response)[\"results\"]\n model_def_values = [\n (m[u'metadata'][u'asset_id'],\n m[u'metadata'][u'revision_id'],\n m[u'metadata'][u'name'],\n m[u'metadata'][u'asset_type'],\n m[u'metadata'][u'commit_info'][u'committed_at']) for\n m in asset_details]\n\n self._list(model_def_values, [u'GUID', u'REV_ID', u'NAME', u'ASSET_TYPE', u'REVISION_COMMIT'],\n limit,\n _DEFAULT_LIST_LENGTH)",
"def revision_list():\n for rev in orm.DataRevision.select():\n click.echo(rev.name)",
"def save(self, *args, **kwargs):\n super(AbstractAuditModel, self).save(*args, **kwargs)\n if not self.current_revision:\n revision = self.__class__.current_revision.field.related_model()\n revision.tracked_model = self\n revision.save()",
"def GetRevisionsSample():\n client = CreateClient()\n for entry in client.GetResources(limit=55).entry:\n revisions = client.GetRevisions(entry)\n for revision in revisions.entry:\n print revision.publish, revision.GetPublishLink()",
"def build(self):\n labelled_documents = self.get_labelled_documents_queryset()\n\n self.model = self.build_model(labelled_documents)\n self.save_model()",
"def set_version_db(apps, schema_editor):\n Version = apps.get_model(\"reversion\", \"Version\")\n content_types = Version.objects.values_list(\"content_type\", flat=True).distinct()\n for content_type in content_types:\n model_class = content_type.model_class()\n db = router.db_for_write(model_class)\n Version.objects.filter(content_type=content_type).update(db=db)",
"def do_create_version(**kwargs):\n version_params = {\n \"name\": kwargs['dag_run'].conf.get('model_version'),\n \"description\": 'Version 1',\n \"runtimeVersion\": kwargs['dag_run'].conf.get('tf_version'),\n \"deploymentUri\": 'gs://{}/{}'.format(COMPOSER_BUCKET_NAME, PREFIX_FINAL_MODEL)\n }\n\n ti = kwargs['ti']\n\n mle = MLEngineHook()\n\n model_name = kwargs['dag_run'].conf.get('model_name')\n model_versions = ti.xcom_pull(key='model_versions', task_ids='list_versions')\n\n version_path = 'projects/{}/models/{}/versions/{}'.format(PROJECT,\n model_name,\n version_params['name'])\n\n if version_path in [v['name'] for v in model_versions]:\n logging.info(\"Delete previously version of the model to overwrite.\")\n mle.delete_version(PROJECT, model_name, version_params['name'])\n\n mle.create_version(PROJECT, model_name, version_params)",
"def _update_revision_list_indexes(self):\n for i, rev in enumerate(self.revisions):\n rev.list_index = i\n for i in xrange(len(self.revisions)):\n if i:\n self.revisions[i].previous_revision = self.revisions[i - 1]\n if i < len(self.revisions) - 1:\n self.revisions[i].next_revision = self.revisions[i + 1]",
"def _fix_type_revisions(type_, rows):\n model = getattr(all_models, type_, None)\n revisions_table = all_models.Revision.__table__\n if not model:\n logger.warning(\"Failed to update revisions for invalid model: %s\", type_)\n return\n\n ids = [row.resource_id for row in rows]\n objects = model.eager_query().filter(model.id.in_(ids))\n\n obj_content_map = {obj.id: obj.log_json() for obj in objects}\n\n for row in rows:\n # This if statement checks that we only update content for objects that\n # exist. If an object has been deleted via import or in some other way that\n # its delete revision was not created, this if statement will prevent a\n # false error.\n # Note: there will be other migrations that deal with adding missing\n # revisions for those deleted objects.\n if row.resource_id in obj_content_map:\n db.session.execute(\n revisions_table.update()\n .where(revisions_table.c.id == row.id)\n .values(content=obj_content_map[row.resource_id])\n )\n db.session.commit()",
"def new(self, metadata, deleted_at = None):\n return Revisions.new(self.auth, metadata, deleted_at)",
"def get_revisions_queryset(self):\n pass",
"def revision(self):\n self.r3.reuse = self.r2\n self.r3.case = self.case\n self.r3.revise()\n self.r2.predictionGenre = self.case.playlist_genre\n self.r2.predictionSubGenre =self.case.playlist_subgenre\n print()",
"def revision(self, message):\n alembic.command.revision(self.alembic_config(), message=message)",
"def db_setup(self):\n revision: Table = Table(self.revision_table,\n self.sql_metadata,\n Column(self._MigrationTableColumns.revisions.value, Text, primary_key=True),\n schema=self.revision_table_schema)\n revision.create(self.psql_engine)",
"def GenerateRevisionFile(self):\n\n print 'Saving revision to %s' % self.revisions_path\n Write(\n self.revisions_path,\n ('{\"chromium_revision\":%d, \"webkit_revision\":%d, '\n '\"v8_revision\":%d}') % (self._chromium_revision,\n self._webkit_revision,\n self._v8_revision))",
"def test_create_version_itterates_versions_by_item(self):\n # arrange\n item1 = self.collection.create_item(_rs(), _rs())\n item2 = self.collection.create_item(_rs(), _rs())\n item1_version_numbers = [1, 2, 3]\n item2_version_numbers = [1, 2]\n\n # act\n versions1 = [\n item1.create_version(),\n item1.create_version(),\n item1.create_version(),\n ]\n versions2 = [\n item2.create_version(),\n item2.create_version(),\n ]\n\n # assert\n assert item1_version_numbers == [v.version for v in versions1]\n assert item2_version_numbers == [v.version for v in versions2]\n\n for v in versions1:\n assert v not in versions2\n for v in versions2:\n assert v not in versions1"
]
| [
"0.64780706",
"0.6246466",
"0.6127586",
"0.60455424",
"0.6027535",
"0.5952222",
"0.5776394",
"0.5773421",
"0.572619",
"0.5694227",
"0.5680662",
"0.55927366",
"0.55635387",
"0.55238825",
"0.54905146",
"0.53948104",
"0.53815556",
"0.5357851",
"0.53216803",
"0.5307395",
"0.52862716",
"0.52827483",
"0.52775866",
"0.52357495",
"0.5233463",
"0.52298814",
"0.5203755",
"0.5192461",
"0.51711136",
"0.51647544"
]
| 0.8095211 | 0 |
Given a sequence of (app_label, model_name) pairs, determine which are still Django models and registered with reversions | def reversion_models(model_pairs):
for app_label, model_name in model_pairs:
try:
model = apps.get_model(app_label, model_name)
if reversion.is_registered(model):
yield model
else:
logger.warn("Model not registered with reversions %s %s",
app_label, model_name)
except LookupError:
logger.warn("Couldn't find model %s %s", app_label, model_name) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def register_models(self, app_label, *models):\n for model in models:\n # Store as 'name: model' pair in a dictionary\n # in the app_models dictionary\n model_name = model._meta.model_name\n model_dict = self.app_models.setdefault(app_label, SortedDict())\n if model_name in model_dict:\n # The same model may be imported via different paths (e.g.\n # appname.models and project.appname.models). We use the source\n # filename as a means to detect identity.\n fname1 = os.path.abspath(upath(\n sys.modules[model.__module__].__file__))\n fname2 = os.path.abspath(upath(\n sys.modules[model_dict[model_name].__module__].__file__))\n # Since the filename extension could be .py the first time and\n # .pyc or .pyo the second time, ignore the extension when\n # comparing.\n if os.path.splitext(fname1)[0] == os.path.splitext(fname2)[0]:\n continue\n model_dict[model_name] = model\n self._get_models_cache.clear()",
"def _validate_models_on_save_pre_17(app_name):\n from django.db.models.loading import load_app, cache, get_models, get_app\n\n load_app(app_name)\n loaded_models = cache.app_models[app_name]\n for (key, model) in loaded_models.items():\n validate_model_on_save(model)",
"def _on_post_syncdb(app, verbosity=2, db=DEFAULT_DB_ALIAS, **kwargs):\n app_models = [m for m in get_models(app) if issubclass(m, ContentItem)]\n for model in app_models:\n update_model_prefix(model, verbosity=verbosity, db=db)",
"def _autodiscover(registry):\n import copy\n from django.conf import settings\n from django.utils.importlib import import_module\n from django.utils.module_loading import module_has_submodule\n\n for app in settings.INSTALLED_APPS:\n mod = import_module(app)\n # Attempt to import the app's.\n try:\n before_import_registry = copy.copy(registry)\n import_module('%s.models_changes_registry' % app)\n except:\n registry = before_import_registry\n\n # Decide whether to bubble up this error. If the app just\n # doesn't have an admin module, we can ignore the error\n # attempting to import it, otherwise we want it to bubble up.\n if module_has_submodule(mod, 'models_changes_registry'):\n raise",
"def get_installed_models():\n global _installed_models_cache\n if _installed_models_cache is not None:\n return _installed_models_cache\n _installed_models_cache = []\n for a in settings.INSTALLED_APPS:\n try:\n _installed_models_cache.append(__import__(a + '.models', '', '', ['']))\n except ImportError:\n pass\n return _installed_models_cache",
"def _unload(apps, schema_editor):\n for modelname in models:\n model = apps.get_model(appname, modelname)\n model.objects.all().delete()",
"def validate_models_on_save(app_name):\n\n if django_gte_17():\n _validate_models_on_save_post_17(app_name)\n else:\n _validate_models_on_save_pre_17(app_name)",
"def get_live_tracked_models(self, model_class):\n return self.update_models[model_class] + self.create_models[model_class]",
"def for_app_models(self, *args, **kwargs):\n content_types = []\n for app_model in args:\n app, model = app_model.split(\".\")\n content_types.append(ContentType.objects.get(app_label=app, \n model=model))\n return self.for_content_types(content_types, **kwargs)",
"def get_my_app_list(app_list):\n all_excluded_models = getattr(settings, 'EXCLUDE_ADMIN_APPS_MODELS', {})\n\n for app in app_list:\n models = app['models']\n match_app_models = [app_model_name.split('.')[1] for app_model_name in all_excluded_models if\n app_model_name.split('.')[0] == str(app['name'])]\n filter_models = [model for model in models if model['object_name'] not in match_app_models]\n app['models'] = filter_models\n\n return app_list",
"def handle(self, *args, **options):\n app_labels = [app.split('.')[-1] for app in settings.INSTALLED_APPS]\n if not args:\n args = app_labels\n for app in args:\n if app not in app_labels:\n print \"%s is not a valid application\" % app\n continue\n\n app_module = get_app(app_label=app, emptyOK=True)\n if app_module is None:\n continue\n\n print \"Models of %s:\" % app\n for model in get_models(app_module):\n print \" - %s has %d entries\" % (\n model.__name__,\n model.objects.count()\n )",
"def update_contenttypes(sender, verbosity=2, db=DEFAULT_DB_ALIAS, **kwargs):\n from django.contrib.contenttypes.models import ContentType\n \n if verbosity >= 2:\n print(\"Running Djangae version of update_contenttypes on {}\".format(sender))\n\n try:\n apps.get_model('contenttypes', 'ContentType')\n except LookupError:\n return\n\n if hasattr(router, \"allow_migrate_model\"):\n if not router.allow_migrate_model(db, ContentType):\n return\n\n ContentType.objects.clear_cache()\n app_models = sender.get_models()\n if not app_models:\n return\n # They all have the same app_label, get the first one.\n app_label = sender.label\n app_models = dict(\n (model._meta.model_name, model)\n for model in app_models\n )\n\n created_or_existing_pks = []\n created_or_existing_by_unique = {}\n\n for (model_name, model) in app_models.items():\n # Go through get_or_create any models that we want to keep\n defaults = {}\n if django.VERSION < (1, 9):\n defaults['name'] = smart_text(model._meta.verbose_name_raw)\n\n ct, created = ContentType.objects.get_or_create(\n app_label=app_label,\n model=model_name,\n defaults=defaults,\n )\n\n if verbosity >= 2 and created:\n print(\"Adding content type '%s | %s'\" % (ct.app_label, ct.model))\n\n created_or_existing_pks.append(ct.pk)\n created_or_existing_by_unique[(app_label, model_name)] = ct.pk\n\n # Now lets see if we should remove any\n\n to_remove = [x for x in ContentType.objects.filter(app_label=app_label) if x.pk not in created_or_existing_pks]\n\n # Now it's possible that our get_or_create failed because of consistency issues and we create a duplicate.\n # Then the original appears in the to_remove and we remove the original. This is bad. So here we go through the\n # to_remove list, and if we created the content type just now, we delete that one, and restore the original in the\n # cache\n for ct in to_remove:\n unique = (ct.app_label, ct.model)\n if unique in created_or_existing_by_unique:\n # We accidentally created a duplicate above due to HRD issues, delete the one we created\n ContentType.objects.get(pk=created_or_existing_by_unique[unique]).delete()\n created_or_existing_by_unique[unique] = ct.pk\n ct.save() # Recache this one in the context cache\n\n to_remove = [ x for x in to_remove if (x.app_label, x.model) not in created_or_existing_by_unique ]\n\n # Now, anything left should actually be a stale thing. It's still possible we missed some but they'll get picked up\n # next time. Confirm that the content type is stale before deletion.\n if to_remove:\n if kwargs.get('interactive', False):\n content_type_display = '\\n'.join([\n ' %s | %s' % (x.app_label, x.model)\n for x in to_remove\n ])\n ok_to_delete = input(\"\"\"The following content types are stale and need to be deleted:\n\n%s\n\nAny objects related to these content types by a foreign key will also\nbe deleted. Are you sure you want to delete these content types?\nIf you're unsure, answer 'no'.\n\n Type 'yes' to continue, or 'no' to cancel: \"\"\" % content_type_display)\n else:\n ok_to_delete = False\n\n if ok_to_delete == 'yes':\n for ct in to_remove:\n if verbosity >= 2:\n print(\"Deleting stale content type '%s | %s'\" % (ct.app_label, ct.model))\n ct.delete()\n else:\n if verbosity >= 2:\n print(\"Stale content types remain.\")",
"def autodiscover():\n import copy\n from django.conf import settings\n from django.utils.importlib import import_module\n from django.utils.module_loading import module_has_submodule\n from modeltranslation.translator import translator\n from modeltranslation.settings import TRANSLATION_FILES\n\n for app in settings.INSTALLED_APPS:\n mod = import_module(app)\n # Attempt to import the app's translation module.\n module = '%s.translation' % app\n before_import_registry = copy.copy(translator._registry)\n try:\n import_module(module)\n except ImportError:\n # Reset the model registry to the state before the last import as\n # this import will have to reoccur on the next request and this\n # could raise NotRegistered and AlreadyRegistered exceptions\n translator._registry = before_import_registry\n # Re-raise ImportError in case it was raised from the\n # 'translation' module.\n if module_has_submodule(mod, 'translation'):\n raise\n\n for module in TRANSLATION_FILES:\n import_module(module)",
"def get_installed_model_modules(core_models=None):\n global _installed_modules_cache\n if _installed_modules_cache is not None:\n return _installed_modules_cache\n _installed_modules_cache = []\n\n # django.models is a special case.\n for submodule in (core_models or []):\n _installed_modules_cache.append(__import__('django.models.%s' % submodule, '', '', ['']))\n for m in get_installed_models():\n for submodule in getattr(m, '__all__', []):\n mod = __import__('django.models.%s' % submodule, '', '', [''])\n try:\n mod._MODELS\n except AttributeError:\n pass # Skip model modules that don't actually have models in them.\n else:\n _installed_modules_cache.append(mod)\n return _installed_modules_cache",
"def check_default_models():\r\n\tfrom django.db import models\r\n\tfrom django.core.exceptions import ObjectDoesNotExist\r\n\tfrom reserver.models import EventCategory, Organization, EmailTemplate\r\n\r\n\t# Check default organization\r\n\ttry:\r\n\t\tgunnerus_org = Organization.objects.get(name=\"R/V Gunnerus\")\r\n\texcept Organization.DoesNotExist:\r\n\t\tgunnerus_org = Organization(name=\"R/V Gunnerus\", is_NTNU=True)\r\n\t\tgunnerus_org.save()\r\n\r\n\t# Check event categories\r\n\tfor ec in default_event_categories:\r\n\t\ttry:\r\n\t\t\tevent_category = EventCategory.objects.get(name=ec[0])\r\n\t\t\tif not event_category.is_default:\r\n\t\t\t\tevent_category.is_default = True\r\n\t\t\t\tevent_category.save()\r\n\t\texcept EventCategory.DoesNotExist:\r\n\t\t\tevent_category = EventCategory(name=ec[0], icon=[1], colour=[2], is_default=True)\r\n\t\t\tevent_category.save()\r\n\r\n\t# Check email templates\r\n\tfor df in default_email_templates:\r\n\t\ttry:\r\n\t\t\ttemplate = EmailTemplate.objects.get(title=df[0])\r\n\t\t\tif not template.is_default:\r\n\t\t\t\ttemplate.is_default = True\r\n\t\t\t\ttemplate.save()\r\n\t\texcept EmailTemplate.DoesNotExist:\r\n\t\t\ttemplate = EmailTemplate(title=df[0], group=df[1], message=df[2], time_before=df[3], date=df[4], is_active=df[5], is_muteable=df[6], is_default=True)\r\n\t\t\ttemplate.save()",
"def _populate(self):\n unique_models = {}\n ambiguous_models = []\n\n all_models = apps.all_models\n\n for app_model in all_models.values():\n for name, model in app_model.items():\n if name not in unique_models:\n unique_models[name] = model\n else:\n ambiguous_models.append(name)\n\n for name in ambiguous_models:\n unique_models.pop(name, None)\n\n self._ambiguous_models = ambiguous_models\n self._unique_models = unique_models",
"def test_same_models(self):\n\t\t\n\t\t# TODO: finish\n\t\tpass",
"def autodiscover():\n global _RACE_PROTECTION\n\n if _RACE_PROTECTION:\n return\n _RACE_PROTECTION = True\n try:\n return filter(None, [find_related_module(app, 'tasks')\n for app in settings.INSTALLED_APPS])\n finally:\n _RACE_PROTECTION = False",
"def models(self):\r\n return self.get_field('model')",
"def models(self):\r\n return self.get_field('model')",
"def check_model_signals(app_configs=None, **kwargs):\n # Avoid circular import\n from django.db import models\n\n errors = []\n for name in dir(models.signals):\n obj = getattr(models.signals, name)\n if isinstance(obj, models.signals.ModelSignal):\n for reference, receivers in obj.unresolved_references.items():\n for receiver, _, _ in receivers:\n # The receiver is either a function or an instance of class\n # defining a `__call__` method.\n if isinstance(receiver, types.FunctionType):\n description = \"The '%s' function\" % receiver.__name__\n else:\n description = \"An instance of the '%s' class\" % receiver.__class__.__name__\n errors.append(\n Error(\n \"%s was connected to the '%s' signal \"\n \"with a lazy reference to the '%s' sender, \"\n \"which has not been installed.\" % (\n description, name, '.'.join(reference)\n ),\n obj=receiver.__module__,\n hint=None,\n id='signals.E001'\n )\n )\n return errors",
"def get_models(self, app_name):\n try:\n models = list(apps.get_app_config(app_name).get_models())\n return models\n except:\n raise LookupError(f\"this is no such app {app_name}\")",
"def reg_admin():\n for model in models.get_models():\n if _check_name(model):\n admin.site.unregister(model)\n admin.site.register(model, LocationAuditAdmin)",
"def get_seo_models():\n seo_models = []\n for model_name in getattr(settings, setting_name_seo_models, ()):\n if \".\" in model_name:\n # TODO: Test this block\n app_label, model_name = model_name.split(\".\", 1)\n model = models.get_model(app_label, model_name)\n if model:\n seo_models.append(model)\n else:\n app = models.get_app(model_name)\n if app:\n seo_models.extend(models.get_models(app))\n\n return seo_models",
"def register_all_models(module=None,path=None):\n if module is None:\n module='models'\n if path is None:\n path=os.path.dirname(os.path.abspath(__file__))\n classes = pyclbr.readmodule(module,[path])\n elif type(path) is str:\n classes = pyclbr.readmodule(module,[path])\n else:\n classes = pyclbr.readmodule(module,path)\n for model in classes:\n # now the dirty part, check that the models are classes that inherit from models.Model\n # if this inhertance is not explicit in the class call it will not be registered\n for superclass in classes[model].super:\n if re.search('models.Model',superclass):\n # this could be a from module import * above this loop\n exec('from %s import %s'%(module,classes[model].name))\n exec('admin.site.register(%s)'%classes[model].name)",
"def autodiscover():\n from django.utils.importlib import import_module\n global LOADED\n if LOADED:\n return\n LOADED = True\n for app in settings.INSTALLED_APPS:\n try:\n import_module(\"%s.page_widgets\" % app)\n except ImportError, e:\n if \"WidgetModel\" in \"%s\" % e:\n traceback.print_exc(file=sys.stdout)\n pass",
"def from_app(cls, app, database):\n app_sig = cls(app_id=get_app_label(app))\n\n for model in get_models(app):\n # Only include those models that can be synced.\n #\n # On Django 1.7 and up, we need to check if the model allows for\n # migrations (using allow_migrate_model).\n #\n # On older versions of Django, we check if the model allows for\n # synchronization to the database (allow_syncdb).\n if (db_router_allows_migrate(database, get_app_label(app),\n model) or\n db_router_allows_syncdb(database, model)):\n app_sig.add_model(model)\n\n return app_sig",
"def load_models():\n vectorizer = ModelStorage.objects.all().values_list(\"vectorizer\", flat = True)[0]\n classifier = ModelStorage.objects.all().values_list(\"classifier\", flat = True)[0]\n\n return vectorizer, classifier",
"def get_models_by_name(class_names: Iterable[str]) -> List[models.Model]:\n matching_models = []\n for model in apps.get_models():\n parent_class_names = set([parent.__name__ for parent in model.mro()])\n if parent_class_names.intersection(class_names):\n matching_models.append(model)\n return matching_models",
"def sort_dependencies(app_list):\n from django.db.models import get_model, get_models\n # Process the list of models, and get the list of dependencies\n model_dependencies = []\n models = set()\n for app, model_list in app_list:\n if model_list is None:\n model_list = get_models(app)\n\n for model in model_list:\n models.add(model)\n # Add any explicitly defined dependencies\n if hasattr(model, 'natural_key'):\n deps = getattr(model.natural_key, 'dependencies', [])\n if deps:\n deps = [get_model(*d.split('.')) for d in deps]\n else:\n deps = []\n\n # Now add a dependency for any FK or M2M relation with\n # a model that defines a natural key\n for field in model._meta.fields:\n if hasattr(field.rel, 'to'):\n rel_model = field.rel.to\n if hasattr(rel_model, 'natural_key') and rel_model != model:\n deps.append(rel_model)\n for field in model._meta.many_to_many:\n rel_model = field.rel.to\n if hasattr(rel_model, 'natural_key') and rel_model != model:\n deps.append(rel_model)\n model_dependencies.append((model, deps))\n\n model_dependencies.reverse()\n # Now sort the models to ensure that dependencies are met. This\n # is done by repeatedly iterating over the input list of models.\n # If all the dependencies of a given model are in the final list,\n # that model is promoted to the end of the final list. This process\n # continues until the input list is empty, or we do a full iteration\n # over the input models without promoting a model to the final list.\n # If we do a full iteration without a promotion, that means there are\n # circular dependencies in the list.\n model_list = []\n while model_dependencies:\n skipped = []\n changed = False\n while model_dependencies:\n model, deps = model_dependencies.pop()\n\n # If all of the models in the dependency list are either already\n # on the final model list, or not on the original serialization list,\n # then we've found another model with all it's dependencies satisfied.\n found = True\n for candidate in ((d not in models or d in model_list) for d in deps):\n if not candidate:\n found = False\n if found:\n model_list.append(model)\n changed = True\n else:\n skipped.append((model, deps))\n if not changed:\n raise CommandError(\"Can't resolve dependencies for %s in serialized app list.\" %\n ', '.join('%s.%s' % (model._meta.app_label, model._meta.object_name)\n for model, deps in sorted(skipped, key=lambda obj: obj[0].__name__))\n )\n model_dependencies = skipped\n\n return model_list"
]
| [
"0.637576",
"0.6375394",
"0.630516",
"0.6215255",
"0.61491334",
"0.6054366",
"0.5984944",
"0.58667654",
"0.5846883",
"0.58225566",
"0.5815786",
"0.5760193",
"0.5675553",
"0.5650057",
"0.5580755",
"0.55793256",
"0.5577224",
"0.5542611",
"0.5505628",
"0.5505628",
"0.5504065",
"0.54890245",
"0.5470362",
"0.5468758",
"0.5465486",
"0.54435295",
"0.5441824",
"0.5418061",
"0.5403693",
"0.54033643"
]
| 0.73699176 | 0 |
A post_migrate signal handler which creates revisions for models listed in appropriately annotated migrations. | def create_versions_after_migration(**kwargs):
migrations = [migration
for migration, rollback in kwargs.get('plan', [])
if not rollback]
models: Set[Any] = set()
for migration in migrations:
models.update(getattr(migration, 'REVISED_MODELS', []))
with transaction.atomic():
for model in reversion_models(models):
create_revisions_for(model) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def model_post_migrate(*args, **kwargs):\n global IN_MIGRATIONS\n IN_MIGRATIONS = False",
"def post_migrations(self):",
"def model_pre_migrate(*args, **kwargs):\n global IN_MIGRATIONS\n IN_MIGRATIONS = True",
"def test_migrating_with_post_migrate_function(self):\n yield self.mk_simple_models_old(3)\n model_migrator = self.make_migrator(\n post_migrate_function=fqpn(post_migrate_function))\n loads, stores = self.recorded_loads_and_stores(model_migrator)\n\n yield model_migrator.run()\n self.assertEqual(model_migrator.output, [\n \"Migrating ...\",\n \"Done, 3 objects migrated.\",\n ])\n self.assertEqual(sorted(loads), [u\"key-%d\" % i for i in range(3)])\n self.assertEqual(sorted(stores), [u\"key-%d\" % i for i in range(3)])\n for i in range(3):\n obj = yield self.model.load(u\"key-%d\" % i)\n self.assertEqual(obj.a, u\"value-%d-modified\" % i)",
"def migrate(self):\n\tpass",
"def _on_post_syncdb(app, verbosity=2, db=DEFAULT_DB_ALIAS, **kwargs):\n app_models = [m for m in get_models(app) if issubclass(m, ContentItem)]\n for model in app_models:\n update_model_prefix(model, verbosity=verbosity, db=db)",
"def create_revisions_for(model):\n total = model.objects.count()\n for idx, obj in enumerate(model.objects.iterator()):\n with create_revision():\n obj.save()\n if idx % 100 == 0:\n logger.info('Created revision for %s: %s / %s',\n model._meta.verbose_name, idx + 1, total)",
"def setup_before_migration(self, apps):",
"def migration():",
"def migrate(cr, version):\n pass",
"def run_post_apply(self, migrations, force=False):\n for m in migrations.post_apply:\n self.apply_one(m, mark=False, force=force)",
"def test_migrating_old_and_new_with_post_migrate_function(self):\n yield self.mk_simple_models_old(1)\n yield self.mk_simple_models_new(1, start=1)\n yield self.mk_simple_models_old(1, start=2)\n model_migrator = self.make_migrator(\n post_migrate_function=fqpn(post_migrate_function))\n loads, stores = self.recorded_loads_and_stores(model_migrator)\n\n yield model_migrator.run()\n self.assertEqual(model_migrator.output, [\n \"Migrating ...\",\n \"Done, 3 objects migrated.\",\n ])\n self.assertEqual(sorted(loads), [u\"key-%d\" % i for i in range(3)])\n self.assertEqual(sorted(stores), [u\"key-%d\" % i for i in range(3)])\n for i in range(3):\n obj = yield self.model.load(u\"key-%d\" % i)\n self.assertEqual(obj.a, u\"value-%d-modified\" % i)",
"def post_migrate_function_new_only(obj):\n if obj.was_migrated:\n return post_migrate_function(obj)\n return False",
"def register_migrations(self, migrations):\n with self.internal_db.begin() as conn:\n for migration in migrations:\n conn.execute(\n \"INSERT INTO migration (name) \" \"VALUES ('%s');\" % migration\n )",
"def test_migrating_old_and_new_with_new_only_post_migrate_function(self):\n yield self.mk_simple_models_old(1)\n yield self.mk_simple_models_new(1, start=1)\n yield self.mk_simple_models_old(1, start=2)\n model_migrator = self.make_migrator(\n post_migrate_function=fqpn(post_migrate_function_new_only))\n loads, stores = self.recorded_loads_and_stores(model_migrator)\n\n yield model_migrator.run()\n self.assertEqual(model_migrator.output, [\n \"Migrating ...\",\n \"Done, 3 objects migrated.\",\n ])\n self.assertEqual(sorted(loads), [u\"key-0\", u\"key-1\", u\"key-2\"])\n self.assertEqual(sorted(stores), [u\"key-0\", u\"key-2\"])\n\n obj_0 = yield self.model.load(u\"key-0\")\n self.assertEqual(obj_0.a, u\"value-0-modified\")\n obj_1 = yield self.model.load(u\"key-1\")\n self.assertEqual(obj_1.a, u\"value-1\")\n obj_2 = yield self.model.load(u\"key-2\")\n self.assertEqual(obj_2.a, u\"value-2-modified\")",
"def migrations(request):\n marker = request.node.get_marker('migrations')\n migrations = [] if marker is None else marker.args\n\n # we don't need to test all the variations of migration comments here, only\n # in the parser. If this requirement changes in the future, the statements\n # here should just use the up_stmt and down_stmt fixtures.\n up_cmd = '%s %s' % (TEST_COMMENTS[0], UP_CMD)\n down_cmd = '%s %s' % (TEST_COMMENTS[0], DOWN_CMD)\n\n migrations_dir = py.path.local.mkdtemp()\n\n for name, (up, down) in migrations:\n migration = '\\n'.join([up_cmd, up, down_cmd, down])\n migrations_dir.join(name).write(migration)\n\n yield migrations_dir\n\n migrations_dir.remove()",
"def migrate(args=''):\n run_commands('python manage.py migrate %s' % args)",
"def configure_ext_migrate(app):\n migrate = Migrate(app, models.db)",
"def _walk_versions(self, config, engine, downgrade=True, snake_walk=False):\n\n revisions = self._revisions()\n for dest, curr in revisions:\n self._migrate_up(config, engine, dest, curr, with_data=True)\n\n if snake_walk and dest != 'None':\n # NOTE(I159): Pass reversed arguments into `_migrate_down`\n # method because we have been upgraded to a destination\n # revision and now we going to downgrade back.\n self._migrate_down(config, curr, dest, with_data=True)\n self._migrate_up(config, dest, curr, with_data=True)\n\n if downgrade:\n revisions = self._revisions(downgrade)\n for dest, curr in revisions:\n self._migrate_down(config, engine, dest, curr, with_data=True)\n if snake_walk:\n self._migrate_up(config, engine, curr, dest,\n with_data=True)\n self._migrate_down(config, engine, dest, curr,\n with_data=True)",
"def test_migrating_with_deferred_post_migrate_function(self):\n yield self.mk_simple_models_old(3)\n model_migrator = self.make_migrator(\n post_migrate_function=fqpn(post_migrate_function_deferred))\n loads, stores = self.recorded_loads_and_stores(model_migrator)\n\n yield model_migrator.run()\n self.assertEqual(model_migrator.output, [\n \"Migrating ...\",\n \"Done, 3 objects migrated.\",\n ])\n self.assertEqual(sorted(loads), [u\"key-%d\" % i for i in range(3)])\n self.assertEqual(sorted(stores), [u\"key-%d\" % i for i in range(3)])\n for i in range(3):\n obj = yield self.model.load(u\"key-%d\" % i)\n self.assertEqual(obj.a, u\"value-%d-modified\" % i)",
"def migrate():\n puts(yellow(\"Run South migrations\"))\n django_manage('migrate')",
"def log_m2m_changes(signal, action, **kwargs):\n if action not in [\"post_add\", \"post_clear\", \"post_remove\"]:\n return\n\n if action == \"post_clear\":\n changed_queryset = kwargs[\"model\"].objects.all()\n else:\n changed_queryset = kwargs[\"model\"].objects.filter(pk__in=kwargs[\"pk_set\"])\n\n if action in [\"post_add\"]:\n log_entry = LogEntry.objects.log_m2m_changes(\n changed_queryset,\n kwargs[\"instance\"],\n \"add\",\n field_name,\n )\n m2m_log_created.send(\n sender=LogEntry,\n instance=kwargs[\"instance\"],\n changed_queryset=changed_queryset,\n action=action,\n field_name=field_name,\n log_instance=log_entry,\n )\n elif action in [\"post_remove\", \"post_clear\"]:\n log_entry = LogEntry.objects.log_m2m_changes(\n changed_queryset,\n kwargs[\"instance\"],\n \"delete\",\n field_name,\n )\n m2m_log_created.send(\n sender=LogEntry,\n instance=kwargs[\"instance\"],\n changed_queryset=changed_queryset,\n action=action,\n field_name=field_name,\n log_instance=log_entry,\n )",
"def set_version_db(apps, schema_editor):\n Version = apps.get_model(\"reversion\", \"Version\")\n content_types = Version.objects.values_list(\"content_type\", flat=True).distinct()\n for content_type in content_types:\n model_class = content_type.model_class()\n db = router.db_for_write(model_class)\n Version.objects.filter(content_type=content_type).update(db=db)",
"def migrate(ctx):\n connecter = ScalingoInterface(ctx.obj)\n connecter.manage_py(\"migrate\")",
"def _revisions(self, downgrade=False):\n\n revisions = list(self.script_dir.walk_revisions(\"base\", \"head\"))\n\n if not downgrade:\n revisions = list(reversed(revisions))\n\n if not revisions:\n raise exc.DbMigrationError('There is no suitable migrations.')\n\n for rev in revisions:\n if downgrade:\n # Destination, current\n yield rev.down_revision, rev.revision\n else:\n # Destination, current\n yield rev.revision, rev.down_revision",
"def post_migrate_function(obj):\n obj.a = obj.a + u\"-modified\"\n return True",
"def PostModelVersionsDeployment(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def do_list_versions(**kwargs):\n mle = MLEngineHook()\n model_name = kwargs['dag_run'].conf.get('model_name')\n model_versions = mle.list_versions(PROJECT, model_name)\n kwargs['ti'].xcom_push(key='model_versions', value=model_versions)",
"def ready(self):\n post_migrate.connect(create_default_options)",
"def makemigration(self):\n template = os.path.join(os.path.dirname(__file__),\n 'migration_template.py')\n ver = self.latest(quiet=True) + 1\n destination = os.path.abspath(self.config.get('migrate', 'location'))\n if not os.path.exists(destination):\n os.makedirs(destination)\n fname = 'version_{}.py'.format(ver)\n shutil.copyfile(template, os.path.join(destination, fname))\n self.logger.info('Migration \\'{}\\' created'.format(fname))\n self.latest()"
]
| [
"0.7168965",
"0.68205076",
"0.60526645",
"0.57672065",
"0.5748166",
"0.5628349",
"0.5577539",
"0.55665785",
"0.5546331",
"0.5457541",
"0.5434504",
"0.541682",
"0.5385622",
"0.5384686",
"0.5284687",
"0.5222851",
"0.51773095",
"0.5169075",
"0.51123387",
"0.51044285",
"0.5022356",
"0.49949622",
"0.49928752",
"0.49754527",
"0.49434066",
"0.49277675",
"0.49247473",
"0.4915185",
"0.49035186",
"0.48895043"
]
| 0.703361 | 1 |
bsort simple sorting algorithm that uses any comparison function seq a list to be sorted cmp a function for comparing two elements of seq | def bsort(seq, cmp):
sorted = False # assume the seq is not sorted to start with
while not sorted:
sorted = True # assume it's already sorted correctly
for index, value in enumerate(seq): # for every element in seq
if index > 0: # past the first..
if not cmp(seq[index-1], value): # if this element is out of order
sorted = False # then the list is not sorted yet
seq[index-1], seq[index] = seq[index], seq[index-1] # and swap it | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sort(values, comp_func):\n\n \"\"\"\n 昇順\n comp_func = lambda a, b: a if a<b else b\n\n 降順\n comp_func = lambda a, b: a if a>b else b\n\n 偶数昇順、奇数昇順\n comp_func = lambda a, b: a if \\\n a % 2 == 0 and b % 2 == 1 else \\\n (b if b%2==0 and a%2==1 else (a if a<b else b))\n \"\"\"\n\n num = len(values)\n for i in range(0, num):\n tmp_value = values[i]\n tmp_index = i\n for j in range(i, num):\n if tmp_value != comp_func(values[j], tmp_value):\n tmp_index = j\n tmp_value = comp_func(values[j], tmp_value)\n values[tmp_index] = values[i]\n values[i] = tmp_value\n \"\"\"\n values.remove(tmp_value)\n values.insert(0, tmp_value)\n new_list.insert(0, tmp_value)\n \"\"\"\n print(values)\n\n return",
"def cmp(a, b):\n return (a > b) - (a < b)",
"def custom_sort(arr):\n pass",
"def mysort(lst: List[T], compare: Callable[[T, T], int]) -> List[T]:\n for i in range(1, len(lst)): #loops through each element starting at the second one\n for j in range(i, 0, -1): #loops through each element coming before i starting at i and going backwards\n if compare(lst[j], lst[j-1]) < 0: #checks to see if the previous element is smaller than the current (by saying <0 we keep the sort stable as well)\n lst[j], lst[j-1] = lst[j-1], lst[j] #if they are, we switch them\n else:\n break #if they are not, we know that the element is in its proper place\n return lst",
"def buble_sort(l):\r\n for i in range(len(l)):\r\n for j in range(i+1, len(l)):\r\n if (l[j-1]>l[j]):\r\n l[j-1], l[j] = l[j], l[j-1]",
"def keysort(*args, **kwargs): # real signature unknown\n pass",
"def bubbleSort(list):",
"def test_sort_sorted():\n assert bubble_sort([1, 2, 3, 4, 5, 6]) == [1, 2, 3, 4, 5, 6]",
"def sort_1(l):\n pass",
"def test_bubblesort_sorts_list():\n from bubblesort import bubblesort\n unsorted_list = [6, 4, 7, 9, 0, 2]\n assert bubblesort(unsorted_list) == [0, 2, 4, 6, 7, 9]",
"def mysort(lst: List[T], compare: Callable[[T, T], int]) -> List[T]:\n temp = lst\n switched = True\n while switched:\n switched = False\n for i in range(len(temp) - 1):\n if compare(temp[i], temp[i + 1]) == 1:\n temp[i], temp[i + 1] = temp[i + 1], temp[i]\n switched = True\n\n return temp",
"def test_sort_all_equal():\n assert bubble_sort([1, 1, 1, 3, 4, 10, 2, 3]) == [1, 1, 1, 2, 3, 3, 4, 10]",
"def custom_sort(x, y):\n if x[1] == y[1]:\n return 1 if x[0] > y[0] else -1\n return cmp(y[1], x[1])",
"def test_sort_sorted():\n sorted_data = [1, 2, 3, 4, 5]\n sorted_list = bubble_sort(sorted_data)\n\n for small, large in zip(sorted_list[:-1], sorted_list[1:]):\n assert small <= large",
"def sort(smth_to_sort, sort_func, reverse=False, cmp=None):\n if sort_func is not radix_sort:\n return sort_func(\n smth_to_sort,\n reverse=reverse,\n cmp=eval(cmp) if cmp is not None else cmp)\n else:\n try:\n return radix_sort(smth_to_sort, reverse=reverse)\n except TypeError as e:\n print(\"Error: only integer and float numbers can be sorted using radix sort.\")",
"def sorter(a, b):\n ret = 0\n if isinstance(a, list):\n for key in args.sort:\n if key >= len(a):\n ret = -1\n break\n elif key >= len(b):\n ret = 1\n break\n elif a[key] != b[key]:\n ret = cmp(to_numeric(a[key]), to_numeric(b[key]))\n break\n else:\n for key in args.sort:\n if (key not in a) and (key in b):\n ret = -1\n break\n elif (key in a) and (key not in b):\n ret = 1\n break\n elif (key in a) and (key in b) and (a[key] != b[key]):\n ret = cmp(to_numeric(a[key]), to_numeric(b[key]))\n break\n return ret",
"def test_two_ordered():\n run_mergesort([1, 2], [1, 2])",
"def sort_by_another(to_sort, basis):\n return [x for (y, x) in sorted(zip(basis, to_sort), key=lambda pair: pair[0])]",
"def bubble_sort(first):\n # iterate len(lst) times\n for i in range(len(first)):\n\n # integrate [len(lst) - i - 1] times\n for j in range(len(first) - i - 1):\n\n # sort two number if not sorted\n if first[j] > first[j + 1]:\n # swap element at j with element at j + 1\n # and element ad j + 1 with element j\n first[j], first[j + 1] = first[j + 1], first[j]",
"def pyargsort(seq,cmp=None,key=lambda x:x):\n return sorted(list(range(len(seq))),key=lambda x:key(seq.__getitem__(x)),cmp=None)",
"def buble_sort(lst):\n lst_sorted = copy.copy(lst)\n for i in range(len(lst_sorted)):\n for j in range(len(lst_sorted)):\n if j == len(lst_sorted) - 1:\n continue\n if lst_sorted[j][1] > lst_sorted[j + 1][1]:\n lst_sorted[j], lst_sorted[j+1] = lst_sorted[j+1], lst_sorted[j]\n\n return lst_sorted",
"def test_bubble_sort():\n\n assert myFunction.bubble_sort(np.array([8, 34, 2, 71, 14, 30])) == array([2, 8, 14, 30, 34, 71])\n assert myFunction.bubble_sort(np.array([10, 1, 12, 9, 2])) ==array([1, 2, 9, 10, 12])\n assert myFunction.bubble_sort(np.array([77, 3, 11, 5])) == array([3, 5, 11, 77])\n\n\n\n\n\n\n\n\n\n\n assert myFunction.quick_sort([1,3,66,7,5,23,17])\n assert myFunction.quick_sort([88,26,16,75,45])\n assert myFunction.quick_sort([-9, 7, 4, 1, -1, -99])",
"def test_bubblesort_on_long_list():\n from bubblesort import bubblesort\n unsorted_list = []\n for i in range(100):\n unsorted_list.append(random.randint(0, 1000))\n\n sorted_list = bubblesort(unsorted_list)\n\n assert sorted_list == sorted(unsorted_list)",
"def partial_sort(seq):\n for i in range(1, int(0.75 * len(seq))):\n key = seq[i]\n low, up = 0, i\n while up > low:\n middle = (low + up) // 2\n if seq[middle] < key:\n low = middle + 1 \n else:\n up = middle\n seq[:] = seq[:low] + [key] + seq[low:i] + seq[i + 1:]",
"def merge_sort(items):\r\n # TODO: Check if list is so small it's already sorted (base case)\r\n # TODO: Split items list into approximately equal halves\r\n # TODO: Sort each half by recursively calling merge sort\r\n # TODO: Merge sorted halves into one list in sorted order\r",
"def uasort(item, func):\n return sort(item, func)",
"def heap_sort(list):\n pass",
"def python_sort(a_list):\n \n start_time = time.time()\n\n a_list.sort()\n\n end_time = time.time()\n\n run_time = end_time - start_time\n\n return (run_time, a_list)",
"def BubbleSort(ulist):\n done = 0 #This variable is used to break the loop when sorting is done\n while not done:\n done = 1\n for i in range(len(ulist) - 1):\n if ulist[i] > ulist[i+1]:\n ulist[i], ulist[i+1] = ulist[i+1], ulist[i]\n done = 0",
"def bubble_sort(lst: list) -> None:\n n = len(lst)\n if n == 0 or n == 1:\n return\n for boundary in range(n, 1, -1):\n swapped = False\n for i in range(1, boundary):\n if lst[i - 1] > lst[i]:\n swap(lst, i - 1, i)\n swapped = True\n if not swapped: # if list is now sorted\n return"
]
| [
"0.69906473",
"0.6801239",
"0.6798643",
"0.669693",
"0.6635075",
"0.6605437",
"0.6601416",
"0.65782595",
"0.6566951",
"0.65586644",
"0.64441836",
"0.6396774",
"0.6370208",
"0.63533443",
"0.63462526",
"0.63443625",
"0.6331016",
"0.6276416",
"0.6274445",
"0.62692845",
"0.62538075",
"0.6249638",
"0.62480354",
"0.6201101",
"0.6196432",
"0.61877507",
"0.6184269",
"0.61695147",
"0.6135082",
"0.6132019"
]
| 0.77790433 | 0 |
Interprets the input line as command line arguments to the ``law`` executable and runs it in a subprocess using bash. Output and error streams are piped to the cell. | def law(self, line):
line = line.strip()
if not line:
logger.error(r"the command passed to %law must not be empty")
return
# build the full command
cmd = "law " + line
if line_cmd:
cmd = "{} && {}".format(line_cmd, cmd)
logger.debug("running law command '{}'".format(cmd))
# run it
return self._run_bash(cmd) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def main():\n\targs = sys.argv[1:]\n\t# If stdin is not empty (being piped to)\n\tif not sys.stdin.isatty():\n\t\targs += sys.stdin.readlines()\n\tcommand = Main()\n\tcatch = lnk.errors.Catch(1)\n\tcatch.catch(command.main, args, standalone_mode=False)",
"def ilaw(self, line):\n line = line.strip()\n if not line:\n logger.error(r\"the command passed to %ilaw must not be empty\")\n return\n\n argv = shlex.split(line)\n prog = argv.pop(0)\n\n # prog must be a valid law cli prog\n if prog not in law.cli.cli.progs:\n raise ValueError(\"'{}' is not a valid law cli program\".format(prog))\n\n # forward to the actual prog, run special case\n try:\n # call the line_fn when set\n if callable(line_fn):\n logger.info(\"calling line function '{}'\".format(line_fn.__name__))\n line_fn(line)\n\n if prog == \"run\":\n # perform the run call interactively\n return law_run(argv)\n else:\n # forward all other progs to the cli interface\n return law.cli.cli.run([prog] + argv)\n except SystemExit as e:\n # reraise when the exit code is non-zero\n if e.code:\n raise",
"def _invoke_blast(cline):\n import subprocess, sys\n blast_cmd = cline.program_name\n if not os.path.exists(blast_cmd):\n raise ValueError(\"BLAST executable does not exist at %s\" % blast_cmd)\n #We don't need to supply any piped input, but we setup the\n #standard input pipe anyway as a work around for a python\n #bug if this is called from a Windows GUI program. For\n #details, see http://bugs.python.org/issue1124861\n blast_process = subprocess.Popen(str(cline),\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n universal_newlines=True,\n shell=(sys.platform!=\"win32\"))\n blast_process.stdin.close()\n return blast_process.stdout, blast_process.stderr",
"def shell():\n parser = argparse.ArgumentParser(\n \n description='pyrpipe diagnostic utility\\nGenerate shell script.',\n \n usage='''pyrpipe_diagnostic report [<args>] <logfile>\n \n ''') \n parser.add_argument('-o', help='out file \\ndefault: same as input logfile',action=\"store\")\n parser.add_argument('-c',help='Dump command options [(a)ll,fa(i)l,(p)ass]\\ndefault: a',default='a',action=\"store\")\n parser.add_argument('-v',help='verbose',action=\"store_true\")\n parser.add_argument('-f',help='Filter by programs. Provide a comma-separated list e.g., prefetch,STAR,bowtie2 \\ndefault None')\n parser.add_argument('logfile', help='The log file generated by pyrpipe',action=\"store\")\n args = parser.parse_args(sys.argv[2:])\n \n logFile=args.logfile \n #parse args\n vFlag=args.v\n if vFlag:\n print(\"Generating report\")\n outFile=\"\"\n if args.o is None:\n outFile=pu.get_file_basename(logFile)\n else:\n outFile=args.o\n outFile+='.sh'\n \n filters=[]\n if args.f is not None:\n filters= args.f.split(',')\n \n reports.generateBashScript(logFile,outFile,filters,args.c)",
"def run_lammps(lammps_executable, input_file, output_file):\n # run lammps\n lammps_command = f\"{lammps_executable} -in {input_file} \"\n print(\"run command:\", lammps_command)\n with open(\"tmp2False.out\", \"w+\") as fout:\n subprocess.call(lammps_command.split(), stdout=fout)",
"def _run(cmd, **kwargs):\n kwargs.setdefault('combine_stdout_stderr', True)\n kwargs.setdefault('capture_output', True)\n kwargs.setdefault('check', False)\n # Make sure hooks run with stdin disconnected to avoid accidentally\n # interactive tools causing pauses.\n kwargs.setdefault('input', '')\n return rh.utils.run(cmd, **kwargs)",
"def main():\n # Define Parser object and add to Toil\n parser = argparse.ArgumentParser(description=main.__doc__, formatter_class=argparse.RawTextHelpFormatter)\n subparsers = parser.add_subparsers(dest='command')\n # Generate subparsers\n subparsers.add_parser('generate-config', help='Generates an editable config in the current working directory.')\n subparsers.add_parser('generate-manifest', help='Generates an editable manifest in the current working directory.')\n subparsers.add_parser('generate', help='Generates a config and manifest in the current working directory.')\n # Run subparser\n parser_run = subparsers.add_parser('run', help='Runs the BWA alignment pipeline')\n group = parser_run.add_mutually_exclusive_group()\n parser_run.add_argument('--config', default='config-toil-bwa.yaml', type=str,\n help='Path to the (filled in) config file, generated with \"generate-config\".')\n group.add_argument('--manifest', default='manifest-toil-bwa.tsv', type=str,\n help='Path to the (filled in) manifest file, generated with \"generate-manifest\". '\n '\\nDefault value: \"%(default)s\".')\n group.add_argument('--sample', nargs='+', action=required_length(2, 3),\n help='Space delimited sample UUID and fastq files in the format: uuid url1 [url2].')\n # Print docstring help if no arguments provided\n if len(sys.argv) == 1:\n parser.print_help()\n sys.exit(1)\n Job.Runner.addToilOptions(parser_run)\n args = parser.parse_args()\n # Parse subparsers related to generation of config and manifest\n cwd = os.getcwd()\n if args.command == 'generate-config' or args.command == 'generate':\n generate_file(os.path.join(cwd, 'config-toil-bwa.yaml'), generate_config)\n if args.command == 'generate-manifest' or args.command == 'generate':\n generate_file(os.path.join(cwd, 'manifest-toil-bwa.tsv'), generate_manifest)\n # Pipeline execution\n elif args.command == 'run':\n require(os.path.exists(args.config), '{} not found. Please run generate-config'.format(args.config))\n if not args.sample:\n args.sample = None\n require(os.path.exists(args.manifest), '{} not found and no sample provided. '\n 'Please run \"generate-manifest\"'.format(args.manifest))\n # Parse config\n parsed_config = {x.replace('-', '_'): y for x, y in yaml.load(open(args.config).read()).iteritems()}\n config = argparse.Namespace(**parsed_config)\n config.maxCores = int(args.maxCores) if args.maxCores else sys.maxint\n samples = [args.sample[0], args.sample[1:]] if args.sample else parse_manifest(args.manifest)\n # Sanity checks\n require(config.ref, 'Missing URL for reference file: {}'.format(config.ref))\n require(config.output_dir, 'No output location specified: {}'.format(config.output_dir))\n # Launch Pipeline\n Job.Runner.startToil(Job.wrapJobFn(download_reference_files, config, samples), args)",
"def exe(self, inp):\n try:\n spl = shlex.split(inp)\n except:\n self.err_print('Mismatched quotations.')\n self.command_event.set()\n return\n\n if not spl:\n self.err_print(\"\")\n elif spl[0] in self.commands:\n self.err_print(\"\")\n self.commands[spl[0]](spl[1:])\n else:\n self.err_print('Invalid command: ' + spl[0])\n\n self.command_event.set()",
"def main():\n\n BASIC.run(PROGRAM)",
"def run(args, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):\n\n parser = argparse.ArgumentParser(description = 'Validate a HXL dataset.')\n parser.add_argument(\n 'infile',\n help='HXL file to read (if omitted, use standard input).',\n nargs='?',\n type=argparse.FileType('r'),\n default=stdin\n )\n parser.add_argument(\n 'outfile',\n help='HXL file to write (if omitted, use standard output).',\n nargs='?',\n type=argparse.FileType('w'),\n default=stdout\n )\n parser.add_argument(\n '-s',\n '--schema',\n help='Schema file for validating the HXL dataset (if omitted, use the default core schema).',\n metavar='schema',\n type=argparse.FileType('r'),\n default=None\n )\n parser.add_argument(\n '-a',\n '--all',\n help='Include all rows in the output, including those without errors',\n action='store_const',\n const=True,\n default=False\n )\n args = parser.parse_args(args)\n\n source = HXLReader(args.infile)\n if args.schema:\n schema = readHXLSchema(HXLReader(args.schema), baseDir=os.path.dirname(args.schema.name))\n else:\n schema = readHXLSchema()\n filter = HXLValidateFilter(source, schema, args.all)\n writeHXL(args.outfile, filter)",
"def __init__(self, args, shell, userns):\n self._waiting_steps = 0\n self._running_steps = 0\n self.shebang = (\"#!{0} \\n\".format(shell)).encode('utf8', 'replace')\n # Cell output\n self.out, self.err = None, None\n self._userns = userns",
"def main() -> None:\n args = docopt.docopt(__doc__, version=__version__)\n # Set up logging\n log_level = args[\"--log-level\"]\n try:\n logging.basicConfig(\n format=\"%(asctime)-15s %(levelname)s %(message)s\", level=log_level.upper()\n )\n except ValueError:\n logging.critical(\n '\"%s\" is not a valid logging level. Possible values '\n \"are debug, info, warning, and error.\",\n log_level,\n )\n sys.exit(1)\n\n # see if the user is providing any external hash blob data\n hashblob = None\n if args[\"--stdin\"] is True:\n logging.debug(\"Reading hashes from stdin\")\n hashblob = sys.stdin.read()\n elif args[\"--file\"] is not None:\n logging.debug(\"Reading hashes from %s\", args[\"--file\"])\n with open(args[\"--file\"]) as f:\n hashblob = f.read()\n\n exit_code = ioc_scanner.ioc_search(hashblob, args[\"--target\"])\n\n # Stop logging and clean up\n logging.shutdown()\n\n if exit_code:\n sys.exit(exit_code)",
"def execute_tool(description, *args):\n command_line = list(args) + files_and_directories\n click.echo(f\"{description}: {' '.join(command_line)}\")\n rv = call(command_line)\n if rv != 0:\n exit(rv)",
"def run(self, stdout=None, stderr=None):",
"def cli(args): # noqa; pylint: disable=unused-argument",
"def cli() -> None:",
"def cli() -> None:",
"def cli():\n print_help()\n while True:\n formula = input('Please enter formula (or type \"exit\"):\\n')\n if formula == \"exit\":\n return\n elif formula == \"help\":\n print_help()\n break\n try:\n print(\";\" + \"=\"*80)\n print(check_formula_and_create_assembly_code(formula))\n print(\";\" + \"=\"*80)\n except Exception as e:\n print(bcolors.FAIL, e, bcolors.ENDC)",
"def run(self) :\n# print \"evaluating with laban\"\n # currently, labanx reads from a preset file\n os.system('labanx '+str(self.rank)+\" \"+self.input+\" \"+self.output)",
"def main(argv: Sequence[Text]) -> None:\n\n\n print(\"TODO\")",
"def run(argv: Optional[list[str]] = None) -> tuple[str, str]:\n argv = argv if argv is not None else []\n\n with PipeStream() as stdin:\n stdin.writer.close()\n\n with PipeStream() as stdout:\n with PipeStream() as stderr:\n gada.main(\n [\"gada\"] + argv,\n stdin=stdin.reader,\n stdout=stdout.writer,\n stderr=stderr.writer,\n )\n stdout.writer.close()\n stderr.writer.close()\n return (\n stdout.reader.read().decode(errors=\"ignore\"),\n stderr.reader.read().decode(errors=\"ignore\"),\n )",
"def main(lims, args):\n try:\n currentStep = Process(lims, id=args.pid)\n\n if \"MinION QC\" in currentStep.type.name:\n\n minknow_samplesheet_file = minknow_samplesheet_for_qc(currentStep)\n upload_file(\n minknow_samplesheet_file,\n \"ONT sample sheet\",\n currentStep,\n lims,\n )\n shutil.copyfile(\n minknow_samplesheet_file,\n f\"/srv/ngi-nas-ns/samplesheets/nanopore/{dt.now().year}/{minknow_samplesheet_file}\",\n )\n os.remove(minknow_samplesheet_file)\n\n anglerfish_samplesheet_file = anglerfish_samplesheet(currentStep)\n upload_file(\n anglerfish_samplesheet_file,\n \"Anglerfish sample sheet\",\n currentStep,\n lims,\n )\n shutil.copyfile(\n anglerfish_samplesheet_file,\n f\"/srv/ngi-nas-ns/samplesheets/anglerfish/{dt.now().year}/{anglerfish_samplesheet_file}\",\n )\n os.remove(anglerfish_samplesheet_file)\n\n else:\n minknow_samplesheet_file = minknow_samplesheet_default(currentStep)\n upload_file(minknow_samplesheet_file, \"ONT sample sheet\", currentStep, lims)\n shutil.copyfile(\n minknow_samplesheet_file,\n f\"/srv/ngi-nas-ns/samplesheets/nanopore/{dt.now().year}/{minknow_samplesheet_file}\",\n )\n os.remove(minknow_samplesheet_file)\n\n except AssertionError as e:\n sys.stderr.write(str(e))\n sys.exit(2)",
"def _call(self, argstr, need_stderr, dry_run, **kwargs):",
"def command(arguments):\n os.system(\"barrnap --kingdom {} {} > {}\".format(arguments.kingdom, arguments.input, arguments.output))",
"def run_pipelining_example(py_args):\n cwd = os.path.dirname(os.path.abspath(inspect.stack()[0][1]))\n cmd = ['python', 'pipelining.py']\n args = [str(item) for sublist in py_args.items()\n for item in sublist if item != \"\"]\n cmd.extend(args)\n try:\n out = subprocess.check_output(\n cmd, cwd=cwd, stderr=subprocess.PIPE, universal_newlines=True)\n except subprocess.CalledProcessError as e:\n print(f\"TEST FAILED\")\n print(f\"stdout={e.stdout.decode('utf-8',errors='ignore')}\")\n print(f\"stderr={e.stderr.decode('utf-8',errors='ignore')}\")\n raise\n return out",
"def run(*args, **kwargs):\n kwargs[\"check\"] = True\n print(\"+\", \" \".join(args[0]))\n return subprocess.run(*args, **kwargs)",
"def executable():\n\n if len(sys.argv) == 1:\n arguments.get_help()\n sys.exit('\\nGive me something to do and I will do it\\n')\n else:\n # Parse the Arguments that have been provided\n args = arguments.get_args()\n\n # Load The System Logger\n log = logger.load_in(log_level=args.get('log_level', 'info'))\n log.debug('Used Arguments %s', args)\n const(log_method=log)\n\n # Begin Work\n start(set_args=args)",
"def test_execute_job_with_inline_input_values(self):\n cwl = {\n \"cwlVersion\": \"v1.0\",\n \"class\": \"CommandLineTool\",\n \"baseCommand\": [\"python3\", \"script.py\"],\n \"inputs\": {\n \"stringInput\": \"string\",\n \"integerInput\": \"int\",\n \"doubleInput\": \"float\",\n \"stringArrayInput\": {\"type\": {\"type\": \"array\", \"items\": \"string\"}},\n \"integerArrayInput\": {\"type\": {\"type\": \"array\", \"items\": \"int\"}},\n \"floatArrayInput\": {\"type\": {\"type\": \"array\", \"items\": \"float\"}},\n \"measureStringInput\": \"string\",\n \"measureIntegerInput\": \"int\",\n \"measureFloatInput\": \"float\",\n \"measureFileInput\": \"File\"\n },\n \"requirements\": {\n CWL_REQUIREMENT_APP_DOCKER: {\n \"dockerPull\": \"python:3.7-alpine\"\n },\n CWL_REQUIREMENT_INIT_WORKDIR: {\n \"listing\": [\n {\n \"entryname\": \"script.py\",\n \"entry\": cleandoc(\"\"\"\n import json\n import os\n import ast\n input = $(inputs)\n try:\n for key, value in input.items():\n if isinstance(value, dict):\n path_ = value.get(\"path\")\n if path_ and os.path.exists(path_):\n with open (path_, \"r\") as file_:\n file_data = file_.read()\n input[key] = ast.literal_eval(file_data.upper())\n json.dump(input, open(\"./tmp.txt\", \"w\"))\n except Exception as exc:\n print(exc)\n raise\n \"\"\")\n }\n ]\n }\n },\n \"outputs\": [{\"id\": \"output_test\", \"type\": \"File\", \"outputBinding\": {\"glob\": \"tmp.txt\"}}],\n }\n body = {\n \"processDescription\": {\n \"process\": {\n \"id\": self._testMethodName,\n \"title\": \"some title\",\n \"abstract\": \"this is a test\",\n },\n },\n \"deploymentProfileName\": \"http://www.opengis.net/profiles/eoc/wpsApplication\",\n \"executionUnit\": [{\"unit\": cwl}],\n }\n try:\n desc, _ = self.deploy_process(body, describe_schema=\"OLD\")\n except colander.Invalid:\n self.fail(\"Test\")\n\n assert desc[\"process\"] is not None\n\n with contextlib.ExitStack() as stack_exec:\n for mock_exec in mocked_execute_process():\n stack_exec.enter_context(mock_exec)\n tmp_file = stack_exec.enter_context(tempfile.NamedTemporaryFile(mode=\"w\", suffix=\".json\")) # noqa\n tmp_file.write(json.dumps({\"value\": {\"ref\": 1, \"measurement\": 10.3, \"uom\": \"m\"}}))\n tmp_file.seek(0)\n\n exec_body = {\n \"mode\": EXECUTE_MODE_ASYNC,\n \"response\": EXECUTE_RESPONSE_DOCUMENT,\n \"inputs\": {\n \"stringInput\": \"string_test\",\n \"integerInput\": 10,\n \"doubleInput\": 3.14159,\n \"stringArrayInput\": [\"1\", \"2\", \"3\", \"4\", \"5\", \"6\"],\n \"integerArrayInput\": [1, 2, 3, 4, 5, 6],\n \"floatArrayInput\": [1.45, 2.65, 3.5322, 4.86, 5.57, 6.02],\n \"measureStringInput\": {\n \"value\": \"this is a test\"\n },\n \"measureIntegerInput\": {\n \"value\": 45\n },\n \"measureFloatInput\": {\n \"value\": 10.2\n },\n \"measureFileInput\": {\n \"href\": \"file://{}\".format(tmp_file.name)\n }\n },\n \"outputs\": [\n {\"id\": \"output_test\", \"type\": \"File\"},\n ]\n }\n\n proc_url = \"/processes/{}/jobs\".format(self._testMethodName)\n resp = mocked_sub_requests(self.app, \"post_json\", proc_url, timeout=5,\n data=exec_body, headers=self.json_headers, only_local=True)\n assert resp.status_code in [200, 201], \"Failed with: [{}]\\nReason:\\n{}\".format(resp.status_code, resp.json)\n status_url = resp.json.get(\"location\")\n\n results = self.monitor_job(status_url)\n\n job_output_path = results.get(\"output_test\")[\"href\"].split(self.settings[\"weaver.wps_output_path\"])[-1]\n tmp_file = \"{}/{}\".format(self.settings[\"weaver.wps_output_dir\"], job_output_path)\n\n try:\n with open(tmp_file, \"r\") as f:\n processed_values = json.load(f)\n except FileNotFoundError:\n self.fail(\"Output file [{}] was not found where it was expected to resume test\".format(tmp_file))\n except Exception as exception:\n self.fail(\"An error occurred during the reading of the file: {}\".format(exception))\n assert processed_values[\"stringInput\"] == \"string_test\"\n assert processed_values[\"integerInput\"] == 10\n assert processed_values[\"doubleInput\"] == 3.14159\n assert processed_values[\"stringArrayInput\"] == [\"1\", \"2\", \"3\", \"4\", \"5\", \"6\"]\n assert processed_values[\"integerArrayInput\"] == [1, 2, 3, 4, 5, 6]\n assert processed_values[\"floatArrayInput\"] == [1.45, 2.65, 3.5322, 4.86, 5.57, 6.02]\n assert processed_values[\"measureStringInput\"] == \"this is a test\"\n assert processed_values[\"measureIntegerInput\"] == 45\n assert processed_values[\"measureFloatInput\"] == 10.2\n assert processed_values[\"measureFileInput\"] == {\"VALUE\": {\"REF\": 1, \"MEASUREMENT\": 10.3, \"UOM\": \"M\"}}",
"def start_comp(command_line, log='', env='', foreground='no', no_stdin = 'yes'):\n proc_title_argv = command_line.split()\n\n if proc_title_argv[0] == 'taskset':\n real_program = proc_title_argv[3]\n else:\n real_program = proc_title_argv[0]\n\n # first test shared library link\n try:\n can_find_all_shared_libs(real_program)\n except IOError, e:\n print e;\n raise\n\n my_stdout = None\n my_stderr = None\n my_stdin = None\n if (no_stdin == 'yes'):\n my_stdin = open('/dev/null', 'r')\n\n if log:\n dir = os.path.dirname(log)\n if dir:\n try:\n exist_ok_makedirs(dir, 0777)\n except OSError, (errno, strerror):\n sys.stderr.write('%s: %s\\n' % (dir, strerror))\n raise\n try:\n log_fd = open(log, \"w\")\n except IOError, (errno, strerror):\n print 'cannot open %s: %s' % (log, strerror)\n raise\n else:\n my_stdout = log_fd\n my_stderr = subprocess.STDOUT\n\n #command = [ path ]\n #if options:\n # command += options\n\n my_env = {}\n if env != '':\n env_list = env.split('\\t')\n env_val = '%s:%s' % (env_list[1], env_list[2])\n my_env[env_list[0]] = env_val\n\n try:\n p = subprocess.Popen(proc_title_argv, shell = False,\n # stdin = subprocess.PIPE,\\\n #stdin = None,\n stdin = my_stdin,\n stdout = my_stdout,\n stderr = my_stderr,#)\n env = my_env)\n except OSError, (errno, strerror):\n #sys.exit('cannot execute %s: %s' % (path, strerror))\n print 'cannot execute %s: %s' % (real_program, strerror)\n raise\n except ValueError, strerror:\n #sys.exit('subprocess.Popen value error: %s' %strerror)\n print 'subprocess.Popen value error: %s' % (strerror)\n raise\n\n #proc_name = os.path.basename(path)\n if proc_title_argv[0] == 'taskset':\n try:\n proc_name = os.path.basename(proc_title_argv[3])\n except IndexError,e:\n print \"path: \", path\n sys.exit(e)\n else:\n proc_name = os.path.basename(proc_title_argv[0])\n\n max_retry = 20\n retry = 0\n while True:\n if retry == max_retry:\n sys.exit('cannot exec. %s' % proc_name)\n\n #if kill_proc_exact.lookup_process_exact(proc):\n if get_pids_exact(proc_name):\n break;\n else:\n time.sleep(0.1)\n retry += 1\n\n if foreground == 'yes':\n try:\n p.wait()\n except KeyboardInterrupt, strerror:\n pass",
"def main(config):\n command = config.workflow_utils.command\n try:\n subprocess.run(command, shell=True, check=True)\n except AttributeError as exp:\n # add in some backward compatibility for py2.7\n subprocess.check_call(command, shell=True)"
]
| [
"0.611102",
"0.60783684",
"0.57162184",
"0.5651651",
"0.54892546",
"0.5464681",
"0.5447054",
"0.54450333",
"0.53893256",
"0.5328231",
"0.53135204",
"0.529671",
"0.5282843",
"0.5277602",
"0.52730775",
"0.52728844",
"0.52728844",
"0.52692133",
"0.52558976",
"0.5254265",
"0.5190153",
"0.5185751",
"0.5169298",
"0.51643294",
"0.5144464",
"0.51431113",
"0.51125795",
"0.5111849",
"0.5103159",
"0.50958014"
]
| 0.6947543 | 0 |
Interprets the input line as command line arguments to the ``law`` executable, but rather than invoking it in a subprocess, it is evaluated interactively (or inline, thus the i) within the running process. This is especially useful for programmatically running tasks that were defined e.g. in the current notebook. | def ilaw(self, line):
line = line.strip()
if not line:
logger.error(r"the command passed to %ilaw must not be empty")
return
argv = shlex.split(line)
prog = argv.pop(0)
# prog must be a valid law cli prog
if prog not in law.cli.cli.progs:
raise ValueError("'{}' is not a valid law cli program".format(prog))
# forward to the actual prog, run special case
try:
# call the line_fn when set
if callable(line_fn):
logger.info("calling line function '{}'".format(line_fn.__name__))
line_fn(line)
if prog == "run":
# perform the run call interactively
return law_run(argv)
else:
# forward all other progs to the cli interface
return law.cli.cli.run([prog] + argv)
except SystemExit as e:
# reraise when the exit code is non-zero
if e.code:
raise | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def law(self, line):\n line = line.strip()\n if not line:\n logger.error(r\"the command passed to %law must not be empty\")\n return\n\n # build the full command\n cmd = \"law \" + line\n if line_cmd:\n cmd = \"{} && {}\".format(line_cmd, cmd)\n logger.debug(\"running law command '{}'\".format(cmd))\n\n # run it\n return self._run_bash(cmd)",
"def main(argv: Sequence[Text]) -> None:\n\n\n print(\"TODO\")",
"def main(self):\n cmd = \"self.%s(sys.stdin)\" % sys.argv[1]\n exec(cmd)",
"def process_command_line_input():\n\n input_args = sys.argv\n if input_args[0].find('ipython') >= 0:\n input_args = list()\n else:\n input_args.pop(0)\n\n return input_args",
"def process_command_line():\n\n # Add the command lien arguments\n parser = argparse.ArgumentParser(description=\"test autocontext\", formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n\n # Arguments\n parser.add_argument(\"--ilastik\", type=str, required=True,\n help=\"path to the file run_ilastik.sh\")\n\n parser.add_argument(\"--train\", type=str,\n help=\"path to the ilastik project that will be used for training\")\n\n parser.add_argument(\"--batch_predict\", type=str,\n help=\"path of the cache folder of a previously trained autocontext that will be used for batch \"\n \"prediction\")",
"def cli(**_) -> None:\n pass",
"def query_cmdline():",
"def hotfix_deepobs_argparse():\n sys.argv = sys.argv[:1]",
"def cli(args): # noqa; pylint: disable=unused-argument",
"def exe(self, inp):\n try:\n spl = shlex.split(inp)\n except:\n self.err_print('Mismatched quotations.')\n self.command_event.set()\n return\n\n if not spl:\n self.err_print(\"\")\n elif spl[0] in self.commands:\n self.err_print(\"\")\n self.commands[spl[0]](spl[1:])\n else:\n self.err_print('Invalid command: ' + spl[0])\n\n self.command_event.set()",
"def external_cmd(cmd):\n cache_argv = sys.argv\n sys.argv = cmd\n args = do_inputs()\n main(args)\n sys.argv = cache_argv",
"def cli(_):\n pass",
"def cli(_):\n pass",
"def main(argv=None):\n # __package__ should be `development.main`\n run_example_local('examples.run_rl', argv)",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():"
]
| [
"0.6847528",
"0.571055",
"0.55922145",
"0.5512899",
"0.55023026",
"0.54896945",
"0.54449004",
"0.54421085",
"0.54222596",
"0.5415045",
"0.53889924",
"0.5385756",
"0.5385756",
"0.53805405",
"0.5373572",
"0.5373572",
"0.5373572",
"0.5373572",
"0.5373572",
"0.5373572",
"0.5373572",
"0.5373572",
"0.5373572",
"0.5373572",
"0.5373572",
"0.5373572",
"0.5373572",
"0.5373572",
"0.5373572",
"0.5373572"
]
| 0.6991963 | 0 |
register_magics(init_cmd=None, init_fn=None, line_cmd=None, line_fn=None, log_level=None) Registers the two IPython magic methods ``%law`` and ``%ilaw`` which execute law commands either via a subprocess in bash (``%law``) or interactively / inline within the running process (``%ilaw``). init_cmd can be a shell command that is called before the magic methods are registered. Similarly, init_fn can be a callable that is invoked prior to the method setup. line_cmd, a shell command, and line_fn, a callable, are executed before a line magic is called. The former is run before ``%law`` is evaluated, while the latter is called before ``%ilaw`` with the line to interpret as the only argument. log_level conveniently sets the level of the law.contrib.ipython.magic logger that is used within the magic methods. It should be a number, or a string denoting a Python log level. | def register_magics(*args, **kwargs):
ipy = None
magics = None
try:
ipy = get_ipython()
except NameError:
logger.error("no running notebook kernel found")
# create the magics
if ipy:
magics = create_magics(*args, **kwargs)
# register it
if ipy and magics:
ipy.register_magics(magics)
names = list(magics.magics["cell"].keys()) + list(magics.magics["line"].keys())
names = ", ".join("%{}".format(name) for name in names)
logger.info("magics successfully registered: {}".format(names))
else:
logger.error("no magics registered") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _register_magics(ipython):\n ipython.register_magic_function(\n _start_magic,\n magic_kind=\"line\",\n magic_name=\"tensorboard\",\n )",
"def load_ipython_extension(ipython):\n\n for module in _MAGICS:\n ipython.register_magic_function(\n getattr(module, 'magic'),\n magic_kind='line_cell',\n magic_name=getattr(module, 'MAGIC_NAME'))",
"def register_interface_magics(self):\n from sage.repl.interface_magic import InterfaceMagic\n InterfaceMagic.register_all(self.shell)",
"def magic_magic(self, parameter_s = ''):\n\n mode = ''\n try:\n if parameter_s.split()[0] == '-latex':\n mode = 'latex'\n except:\n pass\n\n magic_docs = []\n for fname in self.lsmagic():\n mname = 'magic_' + fname\n for space in (Magic,self,self.__class__):\n try:\n fn = space.__dict__[mname]\n except KeyError:\n pass\n else:\n break\n magic_docs.append('@%s:\\n\\t%s\\n' %(fname,fn.__doc__))\n magic_docs = ''.join(magic_docs)\n\n if mode == 'latex':\n print self.format_latex(magic_docs)\n return\n else:\n magic_docs = self.format_screen(magic_docs)\n \n outmsg = \"\"\"\nIPython's 'magic' functions\n===========================\n\nThe magic function system provides a series of functions which allow you to\ncontrol the behavior of IPython itself, plus a lot of system-type\nfeatures. All these functions are prefixed with a @ character, but parameters\nare given without parentheses or quotes.\n\nExample: typing '@cd mydir' (without the quotes) changes you working directory\nto 'mydir', if it exists.\n\nIf you have 'automagic' enabled (via the command line option or with the\n@automagic function), you don't need to type in the @ explicitly.\n\nYou can define your own magic functions to extend the system. See the supplied\nipythonrc and example-magic.py files for details (in your ipython\nconfiguration directory, typically $HOME/.ipython/).\n\nYou can also define your own aliased names for magic functions. In your\nipythonrc file, placing a line like:\n\n execute __IP.magic_cl = __IP.magic_clear\n\nwill define @cl as a new name for @clear.\n\nFor a list of the available magic functions, use @lsmagic. For a description\nof any of them, type @magic_name?.\n\nCurrently the magic system has the following functions:\\n\"\"\"\n\n outmsg = (\"%s\\n%s\\n\\nSummary of magic functions (from @lsmagic):\"\n \"\\n\\n@%s\\n\\n%s\" % (outmsg,\n magic_docs,\n ' @'.join(self.lsmagic()),\n Magic.auto_status[self.rc.automagic] ) )\n\n page(outmsg,screen_lines=self.rc.screen_length)",
"def load_ipython_extension(ipython):\n _register_magics(ipython)",
"def expose_magic(magicname, func):\n \n from IPython import Magic\n import new\n im = new.instancemethod(func,__IP, __IP.__class__)\n setattr(__IP, \"magic_\" + magicname, im)",
"def magic(self, parameter_s=''):\n\n mode = ''\n try:\n mode = parameter_s.split()[0][1:]\n except IndexError:\n pass\n\n brief = (mode == 'brief')\n rest = (mode == 'rest')\n magic_docs = self._magic_docs(brief, rest)\n\n if mode == 'latex':\n print(self.format_latex(magic_docs))\n return\n else:\n magic_docs = format_screen(magic_docs)\n\n out = [\"\"\"\nIPython's 'magic' functions\n===========================\n\nThe magic function system provides a series of functions which allow you to\ncontrol the behavior of IPython itself, plus a lot of system-type\nfeatures. There are two kinds of magics, line-oriented and cell-oriented.\n\nLine magics are prefixed with the % character and work much like OS\ncommand-line calls: they get as an argument the rest of the line, where\narguments are passed without parentheses or quotes. For example, this will\ntime the given statement::\n\n %timeit range(1000)\n\nCell magics are prefixed with a double %%, and they are functions that get as\nan argument not only the rest of the line, but also the lines below it in a\nseparate argument. These magics are called with two arguments: the rest of the\ncall line and the body of the cell, consisting of the lines below the first.\nFor example::\n\n %%timeit x = numpy.random.randn((100, 100))\n numpy.linalg.svd(x)\n\nwill time the execution of the numpy svd routine, running the assignment of x\nas part of the setup phase, which is not timed.\n\nIn a line-oriented client (the terminal or Qt console IPython), starting a new\ninput with %% will automatically enter cell mode, and IPython will continue\nreading input until a blank line is given. In the notebook, simply type the\nwhole cell as one entity, but keep in mind that the %% escape can only be at\nthe very start of the cell.\n\nNOTE: If you have 'automagic' enabled (via the command line option or with the\n%automagic function), you don't need to type in the % explicitly for line\nmagics; cell magics always require an explicit '%%' escape. By default,\nIPython ships with automagic on, so you should only rarely need the % escape.\n\nExample: typing '%cd mydir' (without the quotes) changes your working directory\nto 'mydir', if it exists.\n\nFor a list of the available magic functions, use %lsmagic. For a description\nof any of them, type %magic_name?, e.g. '%cd?'.\n\nCurrently the magic system has the following functions:\"\"\",\n magic_docs,\n \"Summary of magic functions (from %slsmagic):\" % magic_escapes['line'],\n str(self.lsmagic()),\n ]\n page.page('\\n'.join(out))",
"def load_ipython_extension(ip): # pragma: no cover\n from .sql.magic_sql import register_sql_magics\n from .filehelper.magic_file import register_file_magics\n from .graphhelper.magic_graph import register_graph_magics\n from .notebookhelper.magic_notebook import register_notebook_magics\n\n register_sql_magics(ip)\n register_file_magics(ip)\n register_graph_magics(ip)\n register_notebook_magics(ip)",
"def load_ipython_extension(ipython):\n ipython.register_magics(ConductoMagics)",
"def handle_magic(self, line, continue_prompt=None,\n pre=None,iFun=None,theRest=None):\n\n cmd = '%sipmagic(\"%s\")' % (pre,esc_quotes('%s %s' % (iFun,theRest)))\n self.log(cmd,continue_prompt)\n self.update_cache(line)\n #print 'in handle_magic, cmd=<%s>' % cmd # dbg\n return cmd",
"def register(\n self, name='', magic=(), patterns=(),\n funcwrapper=lambda _:_\n ):\n\n def _decorator(converter):\n if not name:\n raise ValueError('No registration name given')\n if name in self._names:\n raise ValueError(f'Registration name `{name}` already in use for {self._names[name]}')\n if not isinstance(magic, (list, tuple)):\n raise TypeError('Registration parameter `magic` must be list or tuple')\n if not isinstance(patterns, (list, tuple)):\n raise TypeError('Registration parameter `patterns` must be list or tuple')\n converter.format = name\n self._names[name] = converter\n ## magic signatures\n for sequence in magic:\n self._magic.append((Magic(sequence), converter))\n # sort the magic registry long to short to manage conflicts\n self._magic = list(sorted(\n self._magic,\n key=lambda _i:len(_i[0]), reverse=True\n )\n )\n ## glob patterns\n for pattern in (*patterns, f'*.{name}'):\n self._patterns.append((to_pattern(pattern), converter))\n return funcwrapper(converter)\n\n return _decorator",
"def load_ipython_extension(ip):\n ip.register_magics(VentureMagics)\n if found_venture_ripl==1: \n print 'loaded VentureMagics with ripl \"ipy_ripl\"'",
"def load_ipython_extension(ip):\n\n # This is equivalent to `ip.register_magics(JuliaMagics)` (but it\n # let us access the instance of `JuliaMagics`):\n magics = JuliaMagics(shell=ip)\n ip.register_magics(magics)\n\n template = \"Incompatible upstream libraries. Got ImportError: {}\"\n if magics.highlight:\n try:\n from .ipy.monkeypatch_interactiveshell import patch_interactiveshell\n except ImportError as err:\n warnings.warn(template.format(err))\n else:\n patch_interactiveshell(ip)\n\n if magics.completion:\n try:\n from .ipy.monkeypatch_completer import patch_ipcompleter\n except ImportError as err:\n warnings.warn(template.format(err))\n else:\n patch_ipcompleter()\n\n if magics.redirect_output_streams is True or (\n magics.redirect_output_streams == \"auto\" and should_redirect_output_streams()\n ):\n redirect_output_streams()\n\n if magics.revise:\n from .ipy.revise import register_revise_hook\n\n register_revise_hook(ip)",
"def register_command(*parse_args, **options):\n def wrapper(function):\n function._is_command = True\n return function\n return wrapper",
"def register(\n command_list: List[Tuple[re.Pattern, callable]], command_str: str\n) -> callable:\n\n def command_decorator(func: callable) -> callable:\n command_patter = build_command_pattern(command_str)\n command_list.append((command_patter, func))\n return func\n\n return command_decorator",
"def register(self, *args):\n def decorate(f):\n if not len(args) == 1:\n full = f.__name__\n else:\n full = args[0]\n\n # Gather some informations about the arguments of the function, to\n # display them in help() and check for the min / max number of\n # arguments on call.\n spec = inspect.getargspec(f)\n fargs = spec.args if spec.args else []\n nbr_args = len(fargs)\n nbr_filled = len(spec.defaults) if spec.defaults else 0\n reqs = fargs[:nbr_args-nbr_filled+1]\n adds = fargs[nbr_args-nbr_filled+1:]\n\n info = {\n 'function' : f,\n 'required' : reqs,\n 'additional': adds,\n }\n\n self.actions[full] = info\n return f\n return decorate",
"def register(self, regex, auth=False, help=\"\"):\n self.regexes.append(re.compile(regex))\n self.helps.append(help)\n self.auths.append(auth)\n\n def _decorator(func):\n self.funcs.append(func)\n return func\n \n return _decorator",
"def add_cmd_handler(self, cmd, func):\n len_args = len(inspect.getargspec(func)[0])\n def add_meta(f):\n def decorator(*args, **kwargs):\n f(*args, **kwargs)\n decorator.bytes_needed = len_args - 1 # exclude self\n decorator.__name__ = f.__name__\n return decorator\n func = add_meta(func)\n self._command_handlers[cmd] = func",
"def ipmagic(arg_s):\n\n args = arg_s.split(' ',1)\n magic_name = args[0]\n if magic_name.startswith(__IPYTHON__.ESC_MAGIC):\n magic_name = magic_name[1:]\n try:\n magic_args = args[1]\n except IndexError:\n magic_args = ''\n fn = getattr(__IPYTHON__,'magic_'+magic_name,None)\n if fn is None:\n error(\"Magic function `%s` not found.\" % magic_name)\n else:\n magic_args = __IPYTHON__.var_expand(magic_args)\n return fn(magic_args)",
"def magic_lsmagic(self, parameter_s = ''):\n print 'Available magic functions:\\n@'+' @'.join(self.lsmagic())\n print '\\n' + Magic.auto_status[self.rc.automagic]\n return None",
"def register_command(self, func):\n self.commands[func.__name__] = func",
"def load_ipython_extension(ip):\n\n # this fails in both Firefox and Chrome for OS X.\n # I get the error: TypeError: IPython.CodeCell.config_defaults is undefined\n\n # js = \"IPython.CodeCell.config_defaults.highlight_modes['magic_kql'] = {'reg':[/^%%kql/]};\"\n # display_javascript(js, raw=True)\n result = ip.register_magics(Magic)\n for alias in Constants.MAGIC_ALIASES:\n ip.magics_manager.register_alias(alias, Constants.MAGIC_NAME, 'cell')\n ip.magics_manager.register_alias(alias, Constants.MAGIC_NAME, 'line')\n # ip.run_line_magic(\"alias_magic\", \"{0} {1}\".format(alias, Constants.MAGIC_NAME))\n return result",
"def load_ipython_extension(ipython):\n ipython.register_magics(CallGraphMagics)",
"def register():\n PLUGINS = dict()\n def decorator(func):\n @functools.wraps(func)\n def wrapper(self, *args, **kwargs):\n value = func(*args, **kwargs)\n PLUGINS[func.__name__] = func\n return value\n return wrapper\n return decorator",
"def register(name, fn=None):\n def _hook_add(func):\n if name not in _hooks:\n logger.debug(\"Creating new hook %s\" % name)\n _hooks[name] = []\n\n logger.debug('Registering hook %s for function %s' % (name, fn))\n _hooks[name].append(func)\n\n if fn is None:\n # Behave like a decorator\n def decorator(func):\n _hook_add(func)\n return func\n return decorator\n else:\n # Behave like a function, just register hook\n _hook_add(fn)",
"def register_command(func):\n supported_commands.append(func.__name__)\n return func",
"def decorate(self, alias, *decorators):\n pfunc = getattr(self, alias)\n method, args, kargs = pfunc.func, pfunc.args, pfunc.keywords\n for decorator in decorators:\n method = decorator(method)\n self.register(alias, method, *args, **kargs)",
"def add_cmd(self, func, is_admin=False):\n method = {}\n vals = func.func_name.split('_')\n if vals[0] == 'void':\n method['template'] = void_template\n elif vals[0] == 'string':\n method['template'] = string_template\n elif vals[0] == 'begin':\n method['template'] = begin_template\n else:\n method['template'] = list_template\n method['cmd'] = vals[1].upper()\n if not vals[0] in self.templates:\n msg = \"The first part of the function name must be %s\" % str(self.templates)\n raise NamingError(msg)\n if is_admin:\n method['perm'] = 'M'\n else:\n method['perm'] = 'r'\n args = inspect.getargspec(func)[0]\n if 'args' in args:\n method['has_arg'] = True\n method['inst'] = \"Syntax %s <sp> args\" % method['cmd']\n else:\n method['has_arg'] = False\n method['inst'] = \"Syntax %s\" % method['cmd']\n made = self.arg_maker(args)\n method['need'] = made[0]\n method['arg'] = made[1]\n method['func'] = func.__name__\n method['mod'] = func.__module__\n self.methods += [method]",
"def register(dmm, typecls):\n def wraps(fn):\n dmm.register(typecls, fn)\n return fn\n\n return wraps",
"def register(check_environ=False):\n from mundi.loader import register\n from mundi.types.region import REGION_PLUGINS\n\n if check_environ:\n import os\n\n if os.environ.get(\"MUNDI_DEMOGRAPHY\", \"on\").lower() in (\"off\", \"false\", \"no\"):\n return\n\n for k, v in FUNCTIONS.items():\n register(k, v)\n\n REGION_PLUGINS[\"population\"] = lambda x: population(x.id)\n REGION_PLUGINS[\"age_distribution\"] = lambda x: age_distribution(x.id)\n REGION_PLUGINS[\"age_pyramid\"] = lambda x: age_pyramid(x.id)"
]
| [
"0.7008458",
"0.6217587",
"0.5964256",
"0.58934474",
"0.56525946",
"0.5644433",
"0.5603719",
"0.5478493",
"0.546413",
"0.5450855",
"0.5428396",
"0.53762233",
"0.52367175",
"0.52102613",
"0.52080494",
"0.5177763",
"0.5166397",
"0.5158223",
"0.5103159",
"0.5101145",
"0.5062271",
"0.5022527",
"0.5002761",
"0.5000513",
"0.49943778",
"0.49793085",
"0.495965",
"0.4955992",
"0.4910438",
"0.48900026"
]
| 0.6953809 | 1 |
CSIIsilonSpec defines the desired state of CSIIsilon | def __init__(__self__, *,
driver: 'outputs.CSIIsilonSpecDriver'):
pulumi.set(__self__, "driver", driver) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def driver(self) -> 'outputs.CSIIsilonSpecDriver':\n return pulumi.get(self, \"driver\")",
"def __init__(__self__, *,\n common: 'outputs.CSIIsilonSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIIsilonSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIIsilonSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIIsilonSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIIsilonSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIIsilonSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def get_lepsilon_incar(custom_parameters_dictionary=None):\n\n\t\tincar = IncarMaker.get_static_incar()\n\t\tincar['ediff'] = 1.0e-7\n\t\tincar['lepsilon'] = True\n\t\tdel incar['npar'] #lepsilon runs are not parallelizable\n\n\t\tincar.modify_from_dictionary(custom_parameters_dictionary)\n\n\t\treturn incar",
"def common(self) -> 'outputs.CSIIsilonSpecDriverCommon':\n return pulumi.get(self, \"common\")",
"def epsilon():\n return _EPSILON",
"def test_cnot():\n\n program = dedent(\n \"\"\"\\\n register q0[0]\n register q1[1]\n X q0\n CNOT q0 q1\n \"\"\"\n )\n\n result = run(program, run_gate_array, return_distribution=True)\n assert isclose(result, [0.0, 0.0, 0.0, 1.0]).all()",
"def controller(self) -> Optional['outputs.CSIIsilonSpecDriverController']:\n return pulumi.get(self, \"controller\")",
"def node(self) -> Optional['outputs.CSIIsilonSpecDriverNode']:\n return pulumi.get(self, \"node\")",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIIsilonSpecDriverControllerEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIIsilonSpecDriverControllerTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIIsilonSpecDriverCommonEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIIsilonSpecDriverCommonTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def get_initial_epsilon(self):\n return self.epsilon_percentile, True, self.max_rounds == 0",
"def _define_epsilon(n,T,a=1):\n\n return np.sqrt(np.log(n)/T)*a",
"def epsilon_delta(self):",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIIsilonSpecDriverNodeEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIIsilonSpecDriverNodeTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def test_units(tmpdir, units):\n tmpdir.chdir()\n hz = np.ones((5, 5, 5), dtype=\"float32\") * 100\n data = hz.copy()\n\n if units == \"rad/s\":\n data *= 2.0 * np.pi\n\n nb.Nifti1Image(data, np.eye(4), None).to_filename(\"data.nii.gz\")\n out_data = nb.load(\n CheckB0Units(units=units, in_file=\"data.nii.gz\").run().outputs.out_file\n ).get_fdata(dtype=\"float32\")\n\n assert np.allclose(hz, out_data)",
"def Kepsilon(self):\n kE = 2 + 0.1024 / self.r + (0.1124 + 0.1265 * radians(self.sweep25W) + 0.1766 * radians(self.sweep25W)**2) / \\\n (self.r**2)\n kE0 = 2 + 0.1024 / self.r + 0.1124 / (self.r**2)\n return kE / kE0",
"def _dumbCSI(self):\n # get my renderer\n renderer = self.renderer\n # build the 3 bit color generator\n yield from renderer.set(name=\"csi3\", value=\"\")\n yield from renderer.set(name=\"csi8\", value=\"\")\n yield from renderer.set(name=\"csi24\", value=\"\")\n\n # all done\n return",
"def test_iimi1():\n iimi = interactive_intrinsic_mutual_information(n_mod_m(3, 2), rvs=[[0], [1]], crvs=[2], rounds=1)\n assert iimi == pytest.approx(0.0)",
"def test_spires_syntax_detected_naked_a(self):\n converter = search_engine_query_parser.SpiresToInvenioSyntaxConverter()\n spi_search = converter.is_applicable(\"a ellis\")\n self.assertEqual(spi_search, True)",
"def match_label_expressions(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverStorageClassAllowedTopologiesMatchLabelExpressions']]:\n return pulumi.get(self, \"match_label_expressions\")",
"def __init__(self, epsilon=1e-14):\n self.epsilon = epsilon",
"def epsilon(current_episode, num_episodes):\n # return 1 - (current_episode/num_episodes)\n return .5 * .9**current_episode",
"def test_nonlocal_pauli_error_gate_25percent(self):\n qr = QuantumRegister(3, 'qr')\n cr = ClassicalRegister(3, 'cr')\n circuit = QuantumCircuit(qr, cr)\n circuit.cx(qr[0], qr[1])\n circuit.barrier(qr)\n circuit.cx(qr[1], qr[0])\n circuit.barrier(qr)\n circuit.measure(qr, cr)\n backend = QasmSimulator()\n shots = 2000\n # test noise model\n error = pauli_error([('XII', 0.25), ('III', 0.75)])\n noise_model = NoiseModel()\n noise_model.add_nonlocal_quantum_error(error, 'cx', [0, 1], [0, 1, 2])\n # Execute\n target = {'0x0': 3 * shots / 4, '0x4': shots / 4}\n circuit = transpile(circuit, basis_gates=noise_model.basis_gates)\n qobj = assemble([circuit], backend, shots=shots)\n result = backend.run(qobj, noise_model=noise_model).result()\n self.is_completed(result)\n self.compare_counts(result, [circuit], [target], delta=0.05 * shots)",
"def test_number_sci_notation(self):\r\n self.assertEquals(\r\n preview.latex_preview('6.0221413E+23'),\r\n r'6.0221413\\!\\times\\!10^{+23}'\r\n )\r\n self.assertEquals(\r\n preview.latex_preview('-6.0221413E+23'),\r\n r'-6.0221413\\!\\times\\!10^{+23}'\r\n )",
"def _get_lsp_config_isis_ignore_metric(self):\n return self.__lsp_config_isis_ignore_metric",
"def set_epsilon(self,epsilon):\r\n\t\tself.epsilon = epsilon",
"def test_ud_cnot():\n program = dedent(\n \"\"\"\\\n register q0[0]\n register q1[1]\n register q2[2]\n register q3[3]\n X q2\n CNOT q2 q0\n \"\"\"\n )\n\n result = run(program, run_gate_array)\n assert isclose(result, [1.0, 0.0, 1.0, 0.0]).all()",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def isi_calc(self):\n arg = erfinv(0.8)*1.0E6/(self.speedup*self.br_nominal)\n print('arg: ', arg)\n\n # calculate center eye opening with no additional impairments\n self.isi_center = 2.0*erf(arg/self.tc) - self.l_1 # column Z\n\n # calculate center eye opening with residual DJ (DJ - DCD)\n self.isi_dj_center = (erf(arg*(1.0+self.dj_ui)/self.tc) + erf(arg*(1.0-self.dj_ui)/self.tc) - self.l_1) # column AD\n\n # calculate eye closing induced by interferometric effects from link end reflections\n mean_reflection = math.pow(10.0,0.05*(self.rx_reflection + self.tx_reflection)) # cell AB5\n er_lin = math.pow(10.0,0.1*self.er_dB_min) # cell AB7\n\n\n arg1 = np.sqrt(2.0*er_lin*self.isi_dj_center*(er_lin-1.0) + (er_lin+1.0)*self.l_1)\n print('arg1: ', arg1)\n arg2 = np.divide(arg1,self.isi_dj_center)\n arg3 = (2.0*self.ref_nf*np.power(10.0,-0.1*self.chil)*mean_reflection)\n self.isi_reflection = self.l_1-np.multiply(arg2,arg3)\n\n # calculate center eye opening with both residual DJ and reflection degradations included\n self.isi_dj_refl_closed = np.multiply(self.isi_dj_center, self.isi_reflection) # column AA\n print('isi_dj_refl_closed (AA) : ', self.isi_dj_refl_closed)\n \n # calculate eye opening at the corners with no additional impairments\n eff_rx_eye = 2.0*(0.5-self.X2)*self.speedup\n self.isi_corners = (erf(arg*(1.0+eff_rx_eye)/self.tc) + erf(arg*(1.0-eff_rx_eye)/self.tc) - self.l_1) # column AB\n\n # calculate eye opening at the corners with residual DJ impairment\n self.isi_dj_corners = (erf(arg*(1.0+eff_rx_eye+self.dj_ui)/self.tc) + erf(arg*(1.0-eff_rx_eye-self.dj_ui)/self.tc) - self.l_1) # column AC\n self.isi_tp4_rx = (erf(arg*(1.0+eff_rx_eye)/self.rx_1090_rise) + erf(arg*(1.0-eff_rx_eye)/self.rx_1090_rise) - 1) # cell AG5\n\n # end of GbE10.isi_calcdef isi_calc(self):",
"def __init__(__self__, *,\n name: str,\n value: Optional[str] = None,\n value_from: Optional['outputs.CSIIsilonSpecDriverNodeEnvsValueFrom'] = None):\n pulumi.set(__self__, \"name\", name)\n if value is not None:\n pulumi.set(__self__, \"value\", value)\n if value_from is not None:\n pulumi.set(__self__, \"value_from\", value_from)"
]
| [
"0.5846621",
"0.5841295",
"0.51959497",
"0.49822912",
"0.4924128",
"0.49084496",
"0.4894449",
"0.48541084",
"0.4831767",
"0.4813849",
"0.476087",
"0.47559026",
"0.47226313",
"0.46790582",
"0.4671698",
"0.4648874",
"0.46272394",
"0.46069556",
"0.460521",
"0.46032214",
"0.45957366",
"0.4594465",
"0.45812103",
"0.4563926",
"0.45512322",
"0.45474347",
"0.4523813",
"0.45213914",
"0.4509246",
"0.45016047"
]
| 0.6181904 | 0 |
Driver is the specification for the CSI Isilon Driver | def driver(self) -> 'outputs.CSIIsilonSpecDriver':
return pulumi.get(self, "driver") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(__self__, *,\n driver: 'outputs.CSIIsilonSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def __init__(__self__, *,\n driver: 'outputs.CSIVXFlexOSSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def driver(self) -> 'outputs.CSIVXFlexOSSpecDriver':\n return pulumi.get(self, \"driver\")",
"def __init__(__self__, *,\n driver: 'outputs.CSIPowerStoreSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def __init__(__self__, *,\n common: 'outputs.CSIIsilonSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIIsilonSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIIsilonSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIIsilonSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIIsilonSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIIsilonSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def __init__(__self__, *,\n driver: 'outputs.CSIPowerMaxSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def __init__(__self__, *,\n driver: 'outputs.CSIUnitySpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def driver(self) -> 'outputs.CSIUnitySpecDriver':\n return pulumi.get(self, \"driver\")",
"def driver(self) -> 'outputs.CSIPowerMaxSpecDriver':\n return pulumi.get(self, \"driver\")",
"def disk_csi_driver(self) -> Optional[pulumi.Input['ManagedClusterStorageProfileDiskCSIDriverArgs']]:\n return pulumi.get(self, \"disk_csi_driver\")",
"def controller(self) -> Optional['outputs.CSIIsilonSpecDriverController']:\n return pulumi.get(self, \"controller\")",
"def __init__(__self__, *,\n driver: pulumi.Input[str]):\n pulumi.set(__self__, \"driver\", driver)",
"def common(self) -> 'outputs.CSIIsilonSpecDriverCommon':\n return pulumi.get(self, \"common\")",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIIsilonSpecDriverCommonEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIIsilonSpecDriverCommonTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIIsilonSpecDriverControllerEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIIsilonSpecDriverControllerTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def blob_csi_driver(self) -> Optional[pulumi.Input['ManagedClusterStorageProfileBlobCSIDriverArgs']]:\n return pulumi.get(self, \"blob_csi_driver\")",
"def node(self) -> Optional['outputs.CSIIsilonSpecDriverNode']:\n return pulumi.get(self, \"node\")",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIIsilonSpecDriverSideCarsEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIIsilonSpecDriverSideCarsTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIIsilonSpecDriverNodeEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIIsilonSpecDriverNodeTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def driver(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"driver\")",
"def file_csi_driver(self) -> Optional[pulumi.Input['ManagedClusterStorageProfileFileCSIDriverArgs']]:\n return pulumi.get(self, \"file_csi_driver\")",
"def driver_version(self):\n data = fcntl.ioctl(self._fd, _EVIOCGVERSION, '\\x00\\x00\\x00\\x00')\n return struct.unpack(\"i\", data)[0]",
"def idn(self):\n\n if self.driver in [drivers.pyvisa, drivers.lgpib]:\n return self.ask('*idn?')",
"def __init__(__self__, *,\n driver: Optional[pulumi.Input[str]] = None):\n if driver is not None:\n pulumi.set(__self__, \"driver\", driver)",
"def __init__(__self__, *,\n common: 'outputs.CSIPowerMaxSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIPowerMaxSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIPowerMaxSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIPowerMaxSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def get_driver_info(self, aaidee):\n name = create_string_buffer(256)\n guid = GUID()\n system_rate = c_int()\n speaker_mode = c_int()\n channels = c_int()\n ckresult(\n _dll.FMOD_System_GetDriverInfo(\n self._ptr,\n aaidee,\n name,\n 256,\n byref(guid),\n byref(system_rate),\n byref(speaker_mode),\n byref(channels),\n )\n )\n return so(\n name=name.value,\n guid=guid,\n system_rate=system_rate.value,\n speaker_mode=speaker_mode.value,\n speaker_mode_channels=channels.value,\n )",
"def gcp_filestore_csi_driver_config(self) -> Optional[pulumi.Input['GcpFilestoreCsiDriverConfigArgs']]:\n return pulumi.get(self, \"gcp_filestore_csi_driver_config\")",
"def driver(self):\n \n return self.__driver",
"def DRIVER():\n return \"podman\"",
"def gce_persistent_disk_csi_driver_config(self) -> Optional[pulumi.Input['GcePersistentDiskCsiDriverConfigArgs']]:\n return pulumi.get(self, \"gce_persistent_disk_csi_driver_config\")"
]
| [
"0.7755355",
"0.6638118",
"0.659107",
"0.6575005",
"0.6453641",
"0.64318734",
"0.62873507",
"0.6273736",
"0.61924016",
"0.6038243",
"0.5645263",
"0.5640794",
"0.5625848",
"0.5540442",
"0.5512617",
"0.5436864",
"0.5334795",
"0.5277546",
"0.5276694",
"0.5274879",
"0.52567095",
"0.5232512",
"0.52113134",
"0.51968825",
"0.5182981",
"0.5130483",
"0.51203996",
"0.5113016",
"0.50737464",
"0.50704837"
]
| 0.78547376 | 0 |
ConfigVersion is the configuration version of the driver | def config_version(self) -> str:
return pulumi.get(self, "config_version") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def original_config_version(self):\n return self._get_param(\"ConfigVersion\")",
"def get_config_version(config):\n return 2 if is_v2_config(config) else 1",
"def configversion(self, args):\n print(CONFIG_VERSION)",
"def productVersion( self ):\n return Config.ProductVersion",
"def configure_for_version(version, config=config):\n if version == \"red\":\n attrs = configure_for_pokered(config)\n elif version == \"crystal\":\n attrs = configure_for_pokecrystal(config)\n else:\n # TODO: pick a better exception\n raise Exception(\n \"Can't configure for this version.\"\n )\n\n for (key, value) in attrs.iteritems():\n setattr(config, key, value)\n\n # not really needed since it's modifying the same object\n return config",
"def getDriverNameVersion(self):\n return self.driver_name, self.driver_version",
"def get_version(self):\n return self.cur_config['version']['name']",
"def get_version():\n return \".\".join([str(i) for i in config[\"version\"]])",
"def get_cbs_version(cluster_config):\n cluster = load_cluster_config_json(cluster_config)\n return cluster[\"environment\"][\"server_version\"]",
"def getCurrentVersion():\n f_version = configManagement.currentVersion()\n return f_version",
"def driver_version(self):\n data = fcntl.ioctl(self._fd, _EVIOCGVERSION, '\\x00\\x00\\x00\\x00')\n return struct.unpack(\"i\", data)[0]",
"def configure_driver(self, config):\n raise NotImplementedError",
"def _get_cfg_v(self):\n if CONFIG_VERSION_KEY in self[CONFIG_KEY]:\n v_str = self[CONFIG_KEY][CONFIG_VERSION_KEY]\n if not isinstance(v_str, str):\n raise InvalidConfigFileException(\"{} must be a string\".\n format(CONFIG_VERSION_KEY))\n v_bundle = v_str.split(\".\")\n assert len(v_bundle) == 3, \\\n InvalidConfigFileException(\"Version string is not tripartite\")\n try:\n v_bundle = list(map(int, v_bundle))\n except ValueError:\n raise InvalidConfigFileException(\"Version string elements are \"\n \"not coercible to integers\")\n if v_bundle[0] < 2:\n if SAMPLE_MODS_KEY in self[CONFIG_KEY]:\n raise InvalidConfigFileException(\n \"Project configuration file ({p}) subscribes to {c} \"\n \">= 2.0.0, since '{m}' section is defined. Set {c} to \"\n \"2.0.0 in your config\".format(p=self[CONFIG_FILE_KEY],\n c=CONFIG_VERSION_KEY,\n m=SAMPLE_MODS_KEY))\n else:\n self._format_cfg()\n return [\"2\", \"0\", \"0\"]\n return list(map(str, v_bundle))\n else:\n self._format_cfg()\n return [\"2\", \"0\", \"0\"]",
"def upgrade_config_format(self):\n # migrate older config files\n if self.version == 1:\n # capture_init()\n self.version = 3\n\n # If token exists check still valid and can login\n if self.token and self.token != DEFAULT_TOKEN:\n from .api import ping\n\n with suppress(Exception):\n self.username = ping(config=self, cli_login=True, verbose=False)\n\n self.save()\n elif self.version == 2:\n # re-init against new server\n # capture_init()\n self.version = 3\n self.save()",
"def config(self) -> 'outputs.CSIPowerMaxRevProxySpecConfig':\n return pulumi.get(self, \"config\")",
"def version(self):\n return 1",
"def get_version(configuration):\n return hashlib.md5(configuration.SerializeToString()).hexdigest()",
"def configure_driver(self, config: DriverConfig, number: str, platform_settings: dict) -> \"DriverPlatformInterface\":\n raise NotImplementedError",
"def version(self):\n pass",
"def version(self):\n pass",
"def version(self):\n pass",
"def get_sg_version(cluster_config):\n cluster = load_cluster_config_json(cluster_config)\n return cluster[\"environment\"][\"sync_gateway_version\"]",
"def kernel_version(self, kernel_version):\n\n self._kernel_version = kernel_version",
"def get_version(cls):\n if Config.ENV_TYPE == PRD:\n return Config.version + \"/\" + Config.build\n return Config.version + \"/\" + Config.build + \"/\" + Config.generate + ' (' + Config.ENV_NAME + ')'",
"def set_php_version(self, version: str) -> Session:\n data = {\n \"version\": version\n }\n\n return self.configure(data)",
"def version(self):\n report(f\"spd-conf {buildconfig.VERSION}\\n\")\n report(_(\"\"\"Copyright (C) %d-%d Brailcom, o.p.s.\nThis is free software; you can redistribute it and/or modify it\nunder the terms of the GNU General Public License as published by\nthe Free Software Foundation; either version 2, or (at your option)\nany later version. Please see COPYING for more details.\\n\\n\"\"\") % \\\n (2002, 2012))",
"def validate_project_version(config: Dict[str, Any]) -> None:\n spacy_version = config.get(\"spacy_version\", None)\n if spacy_version and not is_compatible_version(about.__version__, spacy_version):\n err = (\n f\"The {PROJECT_FILE} specifies a spaCy version range ({spacy_version}) \"\n f\"that's not compatible with the version of spaCy you're running \"\n f\"({about.__version__}). You can edit version requirement in the \"\n f\"{PROJECT_FILE} to load it, but the project may not run as expected.\"\n )\n msg.fail(err, exits=1)",
"def detect_version(conn):\n try:\n with conn.begin():\n db_version = conn.scalar(text(\n \"SELECT version FROM configuration\"))\n except exc.ProgrammingError:\n with conn.begin():\n packages_exists = bool(conn.scalar(text(\n \"SELECT 1 FROM pg_catalog.pg_tables \"\n \"WHERE schemaname = 'public' AND tablename = 'packages'\")))\n with conn.begin():\n statistics_exists = bool(conn.scalar(text(\n \"SELECT 1 FROM pg_catalog.pg_views \"\n \"WHERE schemaname = 'public' AND viewname = 'statistics'\")))\n with conn.begin():\n files_exists = bool(conn.scalar(text(\n \"SELECT 1 FROM pg_catalog.pg_tables \"\n \"WHERE schemaname = 'public' AND tablename = 'files'\")))\n if not packages_exists:\n # Database is uninitialized\n return None\n elif not files_exists:\n # Database is too ancient to upgrade\n raise RuntimeError(\"Database version older than 0.4; cannot upgrade\")\n elif not statistics_exists:\n return \"0.4\"\n else:\n return \"0.5\"\n else:\n return db_version",
"def provider_version(self):\n raise NotImplementedError",
"def get_version():\n return 1"
]
| [
"0.7028426",
"0.6824842",
"0.68103474",
"0.61671114",
"0.61632943",
"0.5905669",
"0.5839637",
"0.5696008",
"0.5659997",
"0.55798566",
"0.55448544",
"0.5508866",
"0.55024767",
"0.5405574",
"0.5395576",
"0.5393082",
"0.5381021",
"0.5331651",
"0.5284632",
"0.5284632",
"0.5284632",
"0.52731234",
"0.52543426",
"0.5234204",
"0.5232176",
"0.52097297",
"0.51674175",
"0.5159565",
"0.51407516",
"0.51343507"
]
| 0.70424426 | 1 |
Replicas is the count of controllers for Controller plugin | def replicas(self) -> int:
return pulumi.get(self, "replicas") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_num_replicas():\n\n tf_replicator = get_tf_replicator()\n\n if tf_replicator:\n return tf_replicator.num_replicas_in_sync\n elif tf.distribute.has_strategy():\n return tf.distribute.get_strategy().num_replicas_in_sync\n else:\n # I'm assuming replicas and shards are always equal until someone tells me\n # different.\n num_replicas = tpu_function.get_tpu_context().number_of_shards\n if num_replicas:\n return num_replicas\n else:\n return 1",
"def n_replicas(self):\n if self._sampler_states is None:\n return 0\n else:\n return len(self._sampler_states)",
"def replicas(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"replicas\")",
"def num_replicas_per_shard(self) -> pulumi.Output[Optional[int]]:\n return pulumi.get(self, \"num_replicas_per_shard\")",
"def num_replicas_per_shard(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"num_replicas_per_shard\")",
"def num_replicas_per_shard(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"num_replicas_per_shard\")",
"def num_partitions(self): # -> int:\n ...",
"def num_partitions(self): # -> None:\n ...",
"def num_shards(self) -> int:\n return self.db_nodes",
"def get_num_servers():\n return 1",
"def test_redis_increase_replica_count_usual_case():",
"def num_partitions(self): # -> Unknown:\n ...",
"def initialize_replicas(self):\n try:\n self.replicas+1\n except:\n print \"Ensemble MD Toolkit Error: Number of replicas must be \\\n defined for pattern ReplicaExchange!\"\n raise\n\n\n replicas = []\n N = self.replicas\n for k in range(N):\n r = ReplicaP(k)\n replicas.append(r)\n\n return replicas",
"def num_servos(self) -> int:\n return self._num_servos",
"def auto_config(self, num_replicas=1):\n _ = num_replicas\n return {}",
"def num_slaves(self) -> int:\n raise NotImplementedError",
"def get_sg_replicas(cluster_config):\n cluster = load_cluster_config_json(cluster_config)\n return cluster[\"environment\"][\"number_replicas\"]",
"def get_num_instances(self):\n return len( self.get_instances_ids() )",
"def max_replicas(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"max_replicas\")",
"def max_replicas(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"max_replicas\")",
"def bigcouch_quorum_count():\n return (3 if not hasattr(settings, 'BIGCOUCH_QUORUM_COUNT')\n else settings.BIGCOUCH_QUORUM_COUNT)",
"def get_control_count(cmd):\n return len(cmd.control_qubits)",
"def num_masters(self) -> int:\n raise NotImplementedError",
"def nclients(self, r):\r\n return len(self.clients(r))",
"def count(cls, client) :\n\t\ttry :\n\t\t\tobj = rewriteaction()\n\t\t\toption_ = options()\n\t\t\toption_.count = True\n\t\t\tresponse = obj.get_resources(client, option_)\n\t\t\tif response :\n\t\t\t\treturn response[0].__dict__['___count']\n\t\t\treturn 0\n\t\texcept Exception as e :\n\t\t\traise e",
"def client_count(request):\n return request.param",
"def retrieve_num_instances(service):\n instance_counts = service[\"instance-counts\"]\n return instance_counts[\"healthy-instances\"] + instance_counts[\"unhealthy-instances\"]",
"def num_shards(self) -> int:\n return pulumi.get(self, \"num_shards\")",
"def ready_replicas(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"ready_replicas\")",
"def min_replica_count(self) -> Optional[int]:\n return pulumi.get(self, \"min_replica_count\")"
]
| [
"0.6779487",
"0.652515",
"0.63903236",
"0.633039",
"0.62465656",
"0.62465656",
"0.6225653",
"0.6089452",
"0.6044615",
"0.6033277",
"0.60150945",
"0.60065347",
"0.5897805",
"0.5868112",
"0.5853733",
"0.5828359",
"0.5775845",
"0.5618973",
"0.5611121",
"0.5611121",
"0.55873203",
"0.557994",
"0.554232",
"0.5541864",
"0.5533263",
"0.5531813",
"0.55096984",
"0.54762614",
"0.54746825",
"0.54689324"
]
| 0.70805544 | 0 |
AuthSecret is the name of the credentials secret for the driver | def auth_secret(self) -> Optional[str]:
return pulumi.get(self, "auth_secret") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def client_secret(self) -> str:",
"def secret(self):\n return self._secret",
"def secret(self) -> str:\n return pulumi.get(self, \"secret\")",
"def secret(self) -> str:\n return pulumi.get(self, \"secret\")",
"def pull_secret(self):\n return self._pull_secret",
"def pull_secret(self):\n return self._pull_secret",
"def __init__(self, auth_key, auth_secret):\n\n self._auth_key = auth_key\n self._auth_secret = auth_secret",
"def authenticate_password(self, secret=\"\"):\r\n #hexstr = binascii.b2a_hex(secret)\r\n self.sendAndRecv(\"AUTHENTICATE \\\"%s\\\"\\r\\n\"%secret)",
"def secret_key(self, val):\n self.__secret_key = val",
"def __init__(__self__, *,\n auth_type: pulumi.Input[str],\n name: Optional[pulumi.Input[str]] = None,\n secret: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"auth_type\", 'secret')\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if secret is not None:\n pulumi.set(__self__, \"secret\", secret)",
"def auth_password(self, auth_password):\n\n self._auth_password = auth_password",
"def password(self):\n return (self._config.get(\"sasl.password\")\n or self._config.get(\"sasl.oauthbearer.client.secret\"))",
"def get_secret(name):\n config = ConfigParser()\n config.read('/srv/oclubs/secrets.ini')\n return config.get('secrets', name)",
"def _v2_auth(self, url):\n return {\"auth\": {\n \"passwordCredentials\": {\"username\": self.user,\n \"password\": self.secret}}}",
"def aws_credentials_secret_name(self) -> Optional[str]:\n return pulumi.get(self, \"aws_credentials_secret_name\")",
"def get_secret(self, secret_name):\n secret = self._sm.access_secret_version(name=secret_name).payload.data.decode()\n try:\n return json.loads(secret)\n except json.decoder.JSONDecodeError:\n return secret",
"def get_client_secret():\n\n return str(get_account().Get(GOA_ACCOUNT_OAUTH2, 'ClientSecret',\n dbus_interface=PROPERTIES))",
"async def read_secret(self, name: str):\n pass",
"def secret(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"secret\")",
"def secret(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"secret\")",
"def client_secret(self) -> str:\n return self.get_env_var(self.client_secret_var)",
"def client_secret(self) -> str:\n return self.get_env_var(self.client_secret_var)",
"def set_auth_credentials():\n import os\n from passlib.apps import custom_app_context as pwd_context\n\n os.environ[\"AUTH_USERNAME\"] = \"testme\"\n os.environ[\"AUTH_PASSWORD\"] = pwd_context.hash(\"foobar\")",
"def secret(self):\n\n return self.info.get('env', {}).get('APP_SECRET')",
"def _wrap_secret(self, val):\n return {\"SecretString\": val}",
"def secret_name(self) -> str:\n return self._secret_name",
"def secret_name(self, secret_name: str):\n\n self._secret_name = secret_name",
"def getSecret(self):\n\n with open(self._secret_file) as f:\n secret=f.readline().rstrip()\n \n return secret",
"def get_sd_auth(val, sd_auth_pillar_name=\"serverdensity\"):\n sd_pillar = __pillar__.get(sd_auth_pillar_name)\n log.debug(\"Server Density Pillar: %s\", sd_pillar)\n if not sd_pillar:\n log.error(\"Could not load %s pillar\", sd_auth_pillar_name)\n raise CommandExecutionError(\n \"{} pillar is required for authentication\".format(sd_auth_pillar_name)\n )\n\n try:\n return sd_pillar[val]\n except KeyError:\n log.error(\"Could not find value %s in pillar\", val)\n raise CommandExecutionError(\"{} value was not found in pillar\".format(val))",
"def test_secrets() -> Secrets:\n from dotenv import load_dotenv\n from os import getenv\n from pathlib import Path\n env_path = Path('.') / '.env.testing'\n load_dotenv(dotenv_path=env_path)\n return Secrets(\n google_id_token=getenv(\"GOOGLE_ID_TOKEN\"),\n google_user_id=getenv(\"GOOGLE_USER_ID\")\n )"
]
| [
"0.6323842",
"0.62277025",
"0.61372435",
"0.61372435",
"0.6120714",
"0.6120714",
"0.6079962",
"0.60155314",
"0.5946268",
"0.59420717",
"0.58244205",
"0.57821745",
"0.57658476",
"0.5765793",
"0.57578164",
"0.5710769",
"0.56896",
"0.5686426",
"0.56813496",
"0.56813496",
"0.5677389",
"0.5677389",
"0.566386",
"0.5656821",
"0.5653111",
"0.5641872",
"0.5625344",
"0.56100994",
"0.5599374",
"0.55934733"
]
| 0.7148475 | 0 |
SideCars is the specification for CSI sidecar containers | def side_cars(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverSideCars']]:
return pulumi.get(self, "side_cars") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def side_cars(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverSideCars']]:\n return pulumi.get(self, \"side_cars\")",
"def side_cars(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCars']]:\n return pulumi.get(self, \"side_cars\")",
"def side_cars(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverSideCars']]:\n return pulumi.get(self, \"side_cars\")",
"def side_cars(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCars']]:\n return pulumi.get(self, \"side_cars\")",
"def presenetCar():",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCarsEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCarsTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def populate_sidecar(self, **kwargs):\n\n # if it's an ecat it's Siemens\n self.sidecar_template['Manufacturer'] = 'Siemens'\n # Siemens model best guess\n self.sidecar_template['ManufacturersModelName'] = self.ecat_header.get('SERIAL_NUMBER', None)\n self.sidecar_template['TracerRadionuclide'] = self.ecat_header.get('ISOTOPE_NAME', None)\n self.sidecar_template['PharmaceuticalName'] = self.ecat_header.get('RADIOPHARAMCEUTICAL', None)\n\n # collect frame time start and populate various subheader fields\n for subheader in self.subheaders:\n self.sidecar_template['DecayCorrectionFactor'].append(subheader.get('DECAY_CORR_FCTR', None))\n self.sidecar_template['FrameTimesStart'].append(subheader.get('FRAME_START_TIME', None))\n self.sidecar_template['FrameDuration'].append(subheader.get('FRAME_DURATION', None))\n self.sidecar_template['ScaleFactor'].append(subheader.get('SCALE_FACTOR', None))\n\n # note some of these values won't be in the subheaders for the standard matrix image\n # need to make sure to clean up arrays and fields filled w/ none during pruning\n self.sidecar_template['ScatterFraction'].append(subheader.get('SCATTER_FRACTION', None))\n self.sidecar_template['PromptRate'].append(subheader.get('PROMPT_RATE', None))\n self.sidecar_template['RandomRate'].append(subheader.get('RANDOM_RATE', None))\n self.sidecar_template['SinglesRate'].append(subheader.get('SINGLES_RATE', None))\n\n # collect possible reconstruction method from subheader\n recon_method = helper_functions.get_recon_method(self.subheaders[0].get('ANNOTATION'))\n if recon_method:\n self.sidecar_template.update(**recon_method)\n\n # collect and convert start times for acquisition/time zero?\n scan_start_time = self.ecat_header.get('SCAN_START_TIME', None)\n\n if scan_start_time:\n scan_start_time = parse_this_date(scan_start_time)\n self.sidecar_template['AcquisitionTime'] = scan_start_time\n self.sidecar_template['ScanStart'] = scan_start_time\n\n # collect dose start time\n dose_start_time = self.ecat_header.get('DOSE_START_TIME', None)\n if dose_start_time:\n parsed_dose_time = parse_this_date(dose_start_time)\n self.sidecar_template['PharmaceuticalDoseTime'] = parsed_dose_time\n\n # if decay correction exists mark decay correction boolean as true\n if len(self.decay_factors) > 0:\n self.sidecar_template['ImageDecayCorrected'] = \"true\"\n\n # calculate scaling factor\n sca = self.data.max() / 32767\n\n self.sidecar_template['DoseCalibrationFactor'] = sca * self.ecat_header.get('ECAT_CALIBRATION_FACTOR')\n self.sidecar_template['Filename'] = os.path.basename(self.nifti_file)\n self.sidecar_template['ImageSize'] = [\n self.subheaders[0]['X_DIMENSION'],\n self.subheaders[0]['Y_DIMENSION'],\n self.subheaders[0]['Z_DIMENSION'],\n self.ecat_header['NUM_FRAMES']\n ]\n\n self.sidecar_template['PixelDimensions'] = [\n self.subheaders[0]['X_PIXEL_SIZE'] * 10,\n self.subheaders[0]['Y_PIXEL_SIZE'] * 10,\n self.subheaders[0]['Z_PIXEL_SIZE'] * 10\n ]\n\n # add tag for conversion software\n self.sidecar_template['ConversionSoftware'] = 'pypet2bids'\n self.sidecar_template['ConversionSoftwareVersion'] = helper_functions.get_version()\n\n\n\n # include any additional values\n if kwargs:\n self.sidecar_template.update(**kwargs)\n\n if not self.sidecar_template.get('TimeZero', None):\n if not self.sidecar_template.get('AcquisitionTime', None):\n logger.warn(f\"Unable to determine TimeZero for {self.ecat_file}, you need will need to provide this\"\n f\" for a valid BIDS sidecar.\")\n else:\n self.sidecar_template['TimeZero'] = self.sidecar_template['AcquisitionTime']\n\n # lastly infer radio data if we have it\n meta_radio_inputs = dcm2niix4pet.check_meta_radio_inputs(self.sidecar_template)\n self.sidecar_template.update(**meta_radio_inputs)\n\n # clear any nulls from json sidecar and replace with none's\n self.sidecar_template = helper_functions.replace_nones(self.sidecar_template)",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIUnitySpecDriverSideCarsEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIUnitySpecDriverSideCarsTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIIsilonSpecDriverSideCarsEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIIsilonSpecDriverSideCarsTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIPowerStoreSpecDriverSideCarsEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIPowerStoreSpecDriverSideCarsTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def show_sidecar(self, output_path=None):\n self.prune_sidecar()\n self.sidecar_template = helper_functions.replace_nones(self.sidecar_template)\n if output_path:\n if not isinstance(output_path, pathlib.Path):\n output_path = pathlib.Path(output_path)\n\n if len(output_path.suffixes) > 1:\n temp_output_path = str(output_path)\n for suffix in output_path.suffixes:\n temp_output_path = re.sub(suffix, '', temp_output_path)\n output_path = pathlib.Path(temp_output_path).with_suffix('.json')\n\n with open(output_path, 'w') as outfile:\n json.dump(helper_functions.replace_nones(self.sidecar_template), outfile, indent=4)\n else:\n print(json.dumps(helper_functions.replace_nones(self.sidecar_template), indent=4))",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCarsEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCarsTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def _dumbCSI(self):\n # get my renderer\n renderer = self.renderer\n # build the 3 bit color generator\n yield from renderer.set(name=\"csi3\", value=\"\")\n yield from renderer.set(name=\"csi8\", value=\"\")\n yield from renderer.set(name=\"csi24\", value=\"\")\n\n # all done\n return",
"def sidecar_conn():\n \n #Making the sidecar connection\n global _sidecar_\n if not _sidecar_:\n _sidecar_ = client.Client(\n username = getattr(settings, \"SC_USERNAME\"),\n password = getattr(settings, \"SC_PASSWORD\"),\n auth_url = getattr(settings, \"SC_AUTH_URL\"),\n region_name = getattr(settings, \"SC_REGION_NAME\"),\n tenant_name = getattr(settings, \"SC_TENANT_NAME\"),\n timeout = getattr(settings, \"SC_TIMEOUT\"),\n insecure = getattr(settings, \"SC_INSECURE\"))\n return _sidecar_",
"def __init__(__self__, *,\n common: 'outputs.CSIUnitySpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIUnitySpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIUnitySpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIUnitySpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIUnitySpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIUnitySpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def envs(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCarsEnvs']]:\n return pulumi.get(self, \"envs\")",
"def ls():\n # TODO: listing all availabe containers form sequence\n return",
"def volumes(self):",
"def __init__(__self__, *,\n common: 'outputs.CSIVXFlexOSSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIVXFlexOSSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIVXFlexOSSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def envs(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverSideCarsEnvs']]:\n return pulumi.get(self, \"envs\")",
"def test_show_container(self):\n pass",
"def envs(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverSideCarsEnvs']]:\n return pulumi.get(self, \"envs\")",
"def __init__(self, sides):\n self.sides = sides",
"def dcos_aws() -> None:",
"def side_chain_representatives(self):\n\n sc_atoms = []\n for residue_id in self._residue_ids:\n sc_atom = self._side_chain_representative(residue_id)\n sc_atoms.append([residue_id, sc_atom])\n sc_atoms = pd.DataFrame(sc_atoms, columns=[\"residue.id\", \"sc.atom\"])\n\n # Add vectors\n sc_atom_vectors = []\n for sc_atom in sc_atoms[\"sc.atom\"]:\n try:\n sc_atom_vectors.append(sc_atom.get_vector())\n except AttributeError:\n sc_atom_vectors.append(None)\n sc_atoms[\"sc.vector\"] = sc_atom_vectors\n\n return sc_atoms.astype({\"residue.id\": \"Int32\"})",
"def plot_IVS(self, parent_figure=None):\n nivs = len(FD.figure_AllIVs)\n cprint(\"c\", \"plot_IVS.\")\n rows = nivs\n cols = 5\n height = 1.5 * nivs\n width = 8.5\n PD = PData()\n ymin = -125.0\n ymax = 40.0\n calx = 120.0\n\n self.P = PH.regular_grid(\n rows,\n cols,\n order=\"rowsfirst\",\n figsize=(width, height),\n showgrid=False,\n verticalspacing=0.01,\n horizontalspacing=0.05,\n margins={\n \"bottommargin\": 0.1,\n \"leftmargin\": 0.07,\n \"rightmargin\": 0.05,\n \"topmargin\": 0.08,\n },\n labelposition=(-0.05, 1.06),\n parent_figure=parent_figure,\n # panel_labels=['A', 'B', 'C', 'D', 'E', 'F'],\n )\n cellpath = config[\"cellDataDirectory\"]\n png_path = Path(config[\"baseDataDirectory\"], config[\"pngDirectory\"])\n cprint(\"c\", \"prepping fo run\")\n\n for rax, iv in enumerate(FD.figure_AllIVs.keys()):\n cprint(\"r\", f\"Doing Cell VCN_c{iv:02d} -----------------------------------\")\n celln = Path(png_path, f\"VCN_c{iv:02d}.png\")\n if celln.is_file(): # add images from png files\n img = mpimg.imread(str(celln))\n self.P.axarr[rax, 0].imshow(img, aspect=\"equal\")\n ylim = self.P.axarr[rax, 0].get_ylim()\n self.P.axarr[rax, 0].set_xlim(900, 1500)\n PH.noaxes(self.P.axarr[rax, 0])\n # plot 3 dendrite decorations\n for iax, dendmode in enumerate([\"passive\", \"normal\", \"active\"]):\n dendm = self.get_dendmode(dendmode)\n sfi = Path(\n cellpath,\n f\"VCN_c{iv:02d}\",\n \"Simulations\",\n \"IV\",\n FD.figure_AllIVs[iv][dendm],\n )\n if not sfi.is_dir():\n cprint(\"r\", f\"Unable to find dir: {str(sfi):s}\")\n continue\n fn = list(sfi.glob(\"*\"))\n sfi = Path(sfi, fn[0])\n if rax > 0:\n calx = None # only one cal bar on this plot, top row.\n self.parent.PLT.plot_traces(\n self.P.axarr[rax, iax + 1],\n sfi,\n PD,\n protocol=\"IV\",\n ymin=ymin,\n ymax=ymax,\n iax=iax,\n figure=self.P.figure_handle,\n ivaxis=self.P.axarr[rax, 4], # accumulate IV's in right side\n ivcolor=colors[iax],\n iv_spike_color=spike_colors[dendmode],\n spike_marker_size=1.5,\n spike_marker_color=spike_colors[dendmode],\n calx=calx,\n caly=-10.0,\n )\n if rax == 0:\n self.P.axarr[rax, iax + 1].set_title(dendmode)\n if iax == 0:\n self.P.axarr[rax, 0].text(-0.1, 0.5, str(iv))\n if parent_figure is None:\n fig = FigInfo()\n fig.P = self.P\n fig.filename = f\"Fig_M1A_Supplemental.pdf\"\n timestamp_str = datetime.datetime.now().strftime(\"%Y-%m-%d-%H:%M\")\n fig.title[\n \"title\"\n ] = f\"SBEM Project Figure 1 Modeling (Supplemental A) ({timestamp_str:s})\"\n return fig\n else:\n return self.P",
"def test_vs_docking():\n vs = virtualscreening(n_cpu=-1)\n vs.load_ligands('sdf', os.path.join(test_data_dir, 'data/dude/xiap/crystal_ligand.sdf'))\n vs.dock(engine='autodock_vina',\n protein=os.path.join(test_data_dir, 'data/dude/xiap/receptor_rdkit.pdb'),\n auto_ligand=os.path.join(test_data_dir, 'data/dude/xiap/crystal_ligand.sdf'),\n exhaustiveness=1,\n seed=0)\n mols = list(vs.fetch())\n assert_equal(len(mols), 3)\n mol_data = mols[0].data\n assert_in('vina_affinity', mol_data)\n assert_in('vina_rmsd_lb', mol_data)\n assert_in('vina_rmsd_ub', mol_data)",
"def __init__(__self__, *,\n common: 'outputs.CSIIsilonSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIIsilonSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIIsilonSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIIsilonSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIIsilonSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIIsilonSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def envs(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverSideCarsEnvs']]:\n return pulumi.get(self, \"envs\")",
"def _GetOptionDecoderConstructions(cls):\n result = super(ContainerSpec, cls)._GetOptionDecoderConstructions()\n result.update({\n 'image': (option_decoders.StringDecoder, {\n 'default': None\n }),\n 'static_image': (option_decoders.BooleanDecoder, {\n 'default': False\n }),\n 'cpus': (option_decoders.FloatDecoder, {\n 'default': None\n }),\n 'memory': (custom_virtual_machine_spec.MemoryDecoder, {\n 'default': None\n }),\n 'command': (_CommandDecoder, {}),\n 'container_port': (option_decoders.IntDecoder, {\n 'default': 8080\n }),\n })\n return result"
]
| [
"0.7081632",
"0.7079114",
"0.69084525",
"0.6728122",
"0.55585927",
"0.55331933",
"0.55313885",
"0.54547393",
"0.5330515",
"0.52934235",
"0.52900565",
"0.51547766",
"0.51150274",
"0.5092653",
"0.48261854",
"0.48198652",
"0.47952843",
"0.46908915",
"0.46813545",
"0.46591654",
"0.4618514",
"0.46101522",
"0.45597652",
"0.45176354",
"0.44932935",
"0.44790003",
"0.44720083",
"0.44688448",
"0.44633523",
"0.4460341"
]
| 0.71216536 | 0 |
TLSCertSecret is the name of the TLS Cert secret | def tls_cert_secret(self) -> Optional[str]:
return pulumi.get(self, "tls_cert_secret") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_httpstls_secret():\n if 'tls_secret' in DEFINES:\n return DEFINES['tls_secret']\n # The default matches oic-auth-apps flucd manifest defaults\n return DEFAULT_HTTPSTLS_SECRET",
"def get_certificate_from_secret(secret_name, secret_ns):\n kube = kubernetes.KubeOperator()\n secret = kube.kube_get_secret(secret_name, secret_ns)\n\n if not hasattr(secret, 'data'):\n raise Exception('Invalid secret %s\\\\%s' % (secret_ns, secret_name))\n\n data = secret.data\n if 'tls.crt' not in data or 'tls.key' not in data:\n raise Exception('Invalid certificate data from secret %s\\\\%s' %\n (secret_ns, secret_name))\n\n try:\n tls_crt = base64.decode_as_text(data['tls.crt'])\n tls_key = base64.decode_as_text(data['tls.key'])\n except TypeError:\n raise Exception('Certificate secret data is invalid %s\\\\%s' %\n (secret_ns, secret_name))\n\n return tls_crt, tls_key",
"def secret(self) -> str:\n return pulumi.get(self, \"secret\")",
"def secret(self) -> str:\n return pulumi.get(self, \"secret\")",
"def trust_handshake_secret(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"trust_handshake_secret\")",
"def trust_handshake_secret(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"trust_handshake_secret\")",
"def client_secret(self) -> str:",
"def get_client_key_secret(self):\r\n course = self.get_course()\r\n for lti_passport in course.lti_passports:\r\n try:\r\n lti_id, key, secret = [i.strip() for i in lti_passport.split(':')]\r\n except ValueError:\r\n _ = self.runtime.service(self, \"i18n\").ugettext\r\n msg = _('Could not parse LTI passport: {lti_passport}. Should be \"id:key:secret\" string.').format(\r\n lti_passport='{0!r}'.format(lti_passport)\r\n )\r\n raise LTIError(msg)\r\n\r\n if lti_id == self.lti_id.strip():\r\n return key, secret\r\n return '', ''",
"def trust_handshake_secret(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"trust_handshake_secret\")",
"def get_secret(setting, secrets=secrets):\n return secrets[setting]",
"def get_secret_key():\n return get_config_handler().get_secret_key()",
"def get_ssl_certificate():",
"def secret(self):\n return self._secret",
"def secret_key(self, val):\n self.__secret_key = val",
"def secret_key(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"secret_key\")",
"def test_secrets() -> Secrets:\n from dotenv import load_dotenv\n from os import getenv\n from pathlib import Path\n env_path = Path('.') / '.env.testing'\n load_dotenv(dotenv_path=env_path)\n return Secrets(\n google_id_token=getenv(\"GOOGLE_ID_TOKEN\"),\n google_user_id=getenv(\"GOOGLE_USER_ID\")\n )",
"def ssl_cipher(self) -> str:\n return pulumi.get(self, \"ssl_cipher\")",
"def get_client_secret(filename):\n with open(filename) as file:\n json_file = json.load(file)\n\n cyphertext = json_file['CiphertextBlob']\n blob = base64.b64decode(cyphertext)\n client = boto3.client('kms')\n secret = client.decrypt(CiphertextBlob=blob)['Plaintext']\n s = secret.decode('ascii')\n return json.loads(s)",
"def get_ssl_certificate() :",
"async def add_secret(app: Sanic, secret: str, passphrase: str, ttl: Optional[int]) -> str:\n\n key = get_fernet_key(app, passphrase)\n\n sign = hmac.digest(key=key, msg=passphrase.encode(), digest='sha512').hex()\n secret_key = secrets.token_hex(16)\n\n cipher = fernet.Fernet(key)\n encrypted = cipher.encrypt(secret.encode()).decode()\n\n expires = None\n if ttl:\n expires = datetime.utcnow() + timedelta(seconds=ttl)\n\n await app.db.secrets.insert_one({\n 'secret': encrypted,\n 'secret_key': secret_key,\n 'signature': sign,\n 'expires': expires, # for mongo index\n 'ttl': ttl, # for fernet check\n })\n\n return secret_key",
"def _wrap_secret(self, val):\n return {\"SecretString\": val}",
"def test_get_secret_from_vault(key, environment, stage, namespace, table, nkey,\n boto3_resource, boto3_client, monkeypatch):\n # Call to the DynamoDB client to retrieve the encrypted secret\n monkeypatch.setattr(\"boto3.resource\", boto3_resource)\n monkeypatch.setattr(\"boto3.client\", boto3_client)\n secret = lambdautils.utils.get_secret(key,\n namespace=namespace,\n environment=environment,\n stage=stage)\n assert secret == \"dummy\"\n boto3_client(\"dynamodb\").get_item.assert_called_with(\n TableName=table,\n Key={\"id\": {\"S\": nkey}})\n\n # Call to the KMS client to decrypt the secret\n boto3_client('kms').decrypt.assert_called_with(CiphertextBlob=\"encrypted\")",
"def get_secret(setting, secrets=secrets):\n try:\n return secrets[setting]\n except KeyError:\n raise Exception(\"Can't find the key in secrets.json. Make sure the file is properly configured\")",
"def google_kms_encrypted_env_secret(secret_key: str) -> str:\n\n key_id = os.getenv('KEY_ID')\n ciphertext = os.getenv(secret_key)\n\n client = kms.KeyManagementServiceClient()\n # Call the API.\n decrypt_response = client.decrypt(\n request={'name': key_id, 'ciphertext': base64.b64decode(ciphertext)})\n return decrypt_response.plaintext.decode()",
"def test_read_namespaced_secret_list_secrets(self):\n pass",
"def get_key_secret():\n \n config = configparser.ConfigParser()\n config.read('dl.cfg')\n KEY = config['AWS']['AWS_ACCESS_KEY_ID']\n SECRET = config['AWS']['AWS_SECRET_ACCESS_KEY']\n return KEY, SECRET",
"def secret_key(self):\n return self._secret_key",
"def secret_key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"secret_key\")",
"def secret_key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"secret_key\")",
"def secret(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"secret\")"
]
| [
"0.6616211",
"0.5982307",
"0.5846973",
"0.5846973",
"0.57956576",
"0.5743475",
"0.57429445",
"0.5594936",
"0.54812825",
"0.5443535",
"0.544203",
"0.5437286",
"0.54145914",
"0.53607064",
"0.53595865",
"0.5357208",
"0.5354906",
"0.5345842",
"0.5342153",
"0.53241307",
"0.5321885",
"0.531722",
"0.5314973",
"0.5296881",
"0.5262715",
"0.5261969",
"0.5243939",
"0.5216896",
"0.5216896",
"0.52164096"
]
| 0.7376017 | 1 |
Path of the field to select in the specified API version. | def field_path(self) -> str:
return pulumi.get(self, "field_path") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_field_in_version_json(field_name):\n if not os.environ.get(\"create_version_request\"):\n return None\n request = json.loads(os.environ.get(\"create_version_request\"))\n if not request or not isinstance(request, dict):\n return None\n version = request.get(\"version\")\n if not version or not isinstance(version, dict):\n return None\n\n logging.info(\"Found value: %s, for field: %s from create_version_request\",\n version.get(field_name), field_name)\n return version.get(field_name)",
"def api_revision(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"api_revision\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def get_api_version(self):\n major, minor, patch = self.client.config['api_version']\n return '%s.%s.%s' % (major, minor, patch)",
"def api_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"api_version\")"
]
| [
"0.6015512",
"0.58980846",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.5766664",
"0.572038",
"0.57061654",
"0.57061654",
"0.57061654",
"0.57061654",
"0.57061654"
]
| 0.6314984 | 0 |
Effect indicates the taint effect to match. Empty means match all taint effects. When specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute. | def effect(self) -> Optional[str]:
return pulumi.get(self, "effect") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def testEffects(self):\n \n action = Parser.parse_as(drive.split(\"\\n\"), Action, self.domain)\n self.assert_(isinstance(action.effect, SimpleEffect))",
"def test_effect(self):\n self.check_search(\n dict(similar_to=u'icy wind'),\n [ u'Bubble', u'BubbleBeam', u'Constrict',\n u'Icy Wind', u'Mud Shot', u'Rock Tomb' ],\n 'searching by effect',\n exact=True,\n )\n self.check_search(\n dict(similar_to=u'splash'),\n [u'Splash'],\n 'searching by unique effect',\n exact=True,\n )",
"def testConditionalEffects(self):\n \n action = Parser.parse_as(cond_load.split(\"\\n\"), Action, self.domain)\n\n self.assert_(isinstance(action.effect, ConditionalEffect))\n self.assert_(isinstance(action.effect.condition, conditions.LiteralCondition))\n self.assert_(isinstance(action.effect.effect, SimpleEffect))",
"def effect(self) -> str:\n return self._id_data.get(\"effect\", \"\")",
"def _getEmptyStatement(self, effect):\n statement = {\n 'Action': 'execute-api:Invoke',\n 'Effect': effect[:1].upper() + effect[1:].lower(),\n 'Resource': []\n }\n\n return statement",
"def get_effect(self, label: str) -> Effect:\r\n return self._get_resource(label, self._effects, \"effect\")",
"def _check_effect_match(cls, card, effect):\n\t\tif effect:\n\t\t\tfor act in card.actions:\n\t\t\t\tif act.has_effect(effect):\n\t\t\t\t\treturn True\n\t\t\treturn False\n\t\telse:\n\t\t\treturn True",
"def timestamp(effect):\n\n if effect is None:\n return 'never'\n else:\n return effect.edit_time.isoformat()",
"def setTransitionEffect(self, *args):\n return _libsbml.Input_setTransitionEffect(self, *args)",
"def taints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesLabelArgs']]]]:\n return pulumi.get(self, \"taints\")",
"def parse_effect(str_val):\n if \" ^ \" in str_val:\n effects = list()\n for split in str_val.split(\" ^ \"):\n sub_output = Effect.parse_effect(split)\n effects += sub_output.get_sub_effects()\n return Effect(effects)\n\n else:\n if \"Void\" in str_val:\n return Effect(list())\n\n var = \"\"\n val = \"\"\n\n exclusive = True\n negated = False\n\n if \":=\" in str_val:\n var = str_val.split(\":=\")[0]\n val = str_val.split(\":=\")[1]\n val = \"None\" if \"{}\" in val else val\n elif \"!=\" in str_val:\n var = str_val.split(\"!=\")[0]\n val = str_val.split(\"!=\")[1]\n negated = True\n elif \"+=\" in str_val:\n var = str_val.split(\"+=\")[0]\n val = str_val.split(\"+=\")[1]\n exclusive = False\n\n tvar = Template.create(var)\n tval = Template.create(val)\n if tvar.is_under_specified() or tval.is_under_specified():\n return Effect(TemplateEffect(tvar, tval, 1, exclusive, negated))\n else:\n return Effect(BasicEffect(var, ValueFactory.create(val), 1, exclusive, negated))",
"def setTransitionEffect(self, *args):\n return _libsbml.Output_setTransitionEffect(self, *args)",
"def repair(self, *args, **kwargs):\n return self(AbilityId.EFFECT_REPAIR, *args, **kwargs)",
"def testEffects(self):\n\n state = State.from_problem(self.prob)\n \n fold = Fact(StateVariable(self.prob.functions[\"location-of\"][0], [self.prob[\"tru1\"]]), self.prob[\"pos1\"])\n fnew = Fact(StateVariable(self.prob.functions[\"location-of\"][0], [self.prob[\"tru1\"]]), self.prob[\"apt1\"])\n self.assert_(fold in state)\n\n drive = self.dom.get_action(\"drive\")\n with drive.instantiate([\"agent\", \"tru1\", \"apt1\"], self.prob):\n state.apply_effect(drive.effect)\n\n self.assert_(fnew in state)\n self.assertFalse(fold in state)",
"def testProbabilisticEffects(self):\n\n action = Parser.parse_as(prob_load.split(\"\\n\"), Action, self.domain)\n\n self.assert_(isinstance(action.effect, ConjunctiveEffect))\n self.assert_(isinstance(action.effect.parts[0], ProbabilisticEffect))\n self.assert_(isinstance(action.effect.parts[1], ProbabilisticEffect))\n p1, e1 = action.effect.parts[0].effects[0]\n p2, e2 = action.effect.parts[0].effects[1]\n\n ap1, ae1 = action.effect.parts[1].effects[0]\n ap2, ae2 = action.effect.parts[1].effects[1]\n\n self.assert_(isinstance(p1, FunctionTerm))\n self.assertEqual(p1.function, self.domain.functions[\"load_succ_prob\"][0])\n self.assert_(isinstance(e1.args[0], FunctionTerm))\n self.assert_(isinstance(e1.args[1], VariableTerm))\n self.assertEqual(p2, 0.5)\n self.assert_(isinstance(e2.args[0], FunctionTerm))\n self.assert_(isinstance(e2.args[1], FunctionTerm))\n\n self.assertEqual(ae1, e1)\n self.assertEqual(ae2, e2)\n self.assertEqual(ap1, 0.5)\n self.assertEqual(ap2, None)\n\n # self.assertEqual(action.effect.parts[0].getRandomEffect(0), e2)\n # self.assertEqual(action.effect.parts[0].getRandomEffect(1), e1)\n # self.assertEqual(action.effect.parts[0].getRandomEffect(2), None)\n\n # import random\n # random.seed(42)\n # for r in xrange(30):\n # self.assert_(action.effect.parts[0].getRandomEffect() in (e1,e2,None))",
"def __str_healthrule_affects(self,healthrule):\n if 'affects' not in healthrule:\n Affects=\"\"\n elif healthrule['affects']['affectedEntityType']==\"OVERALL_APPLICATION_PERFORMANCE\":\n Affects=\"Overall application performance\"\n elif healthrule['affects']['affectedEntityType']==\"BUSINESS_TRANSACTION_PERFORMANCE\":\n if healthrule['affects']['affectedBusinessTransactions']['businessTransactionScope']==\"ALL_BUSINESS_TRANSACTIONS\":\n Affects=\"All Business Transactions\"\n elif healthrule['affects']['affectedBusinessTransactions']['businessTransactionScope']==\"BUSINESS_TRANSACTIONS_IN_SPECIFIC_TIERS\":\n \n Affects = \"Business Transactions in Tiers \" + tiers\n elif healthrule['affects']['affectedBusinessTransactions']['businessTransactionScope']==\"SPECIFIC_BUSINESS_TRANSACTIONS\":\n Affects = \"Business Transactions in Tiers \" + BTs\n elif healthrule['affects']['affectedBusinessTransactions']['businessTransactionScope']==\"BUSINESS_TRANSACTIONS_MATCHING_PATTERN\":\n patternMatcher = healthrule['affects']['affectedBusinessTransactions']['patternMatcher']\n if patternMatcher['shouldNot'] == \"true\":\n Affects = \"Business Transactions \" + \"NOT\" + patternMatcher['matchTo'] + \" \" + patternMatcher['matchValue']\n else:\n Affects = \"Business Transactions \" + patternMatcher['matchTo'] + \" \" + patternMatcher['matchValue']\n else: Affects=\"\"\n elif healthrule['affects']['affectedEntityType'] in [\"TIER_NODE_TRANSACTION_PERFORMANCE\",\"TIER_NODE_HARDWARE\",\"ADVANCED_NETWORK\"]:\n if healthrule['affects']['affectedEntities']['tierOrNode']==\"TIER_AFFECTED_ENTITIES\":\n if healthrule['affects']['affectedEntities']['affectedTiers']['affectedTierScope']==\"ALL_TIERS\":\n Affects = \"All Tiers\"\n elif healthrule['affects']['affectedEntities']['affectedTiers']['affectedTierScope']==\"SPECIFIC_TIERS\":\n tierList = healthrule['affects']['affectedEntities']['affectedTiers']['tiers']\n tiers = ','.join(map(lambda x: str(x),tierList)) if (len(tierList) > 0) else \"\"\n Affects = \"Specific Tiers \" + tiers\n elif healthrule['affects']['affectedEntities']['tierOrNode']==\"NODE_AFFECTED_ENTITIES\":\n if healthrule['affects']['affectedEntities']['affectedNodes']['affectedNodeScope']==\"ALL_NODES\":\n Affects = \"All Nodes\"\n elif healthrule['affects']['affectedEntities']['affectedNodes']['affectedNodeScope']==\"NODES_OF_SPECIFIC_TIERS\":\n tierList = healthrule['affects']['affectedEntities']['affectedNodes']['specificTiers']\n tiers = ','.join(map(lambda x: str(x),tierList)) if (len(tierList) > 0) else \"\"\n Affects = \"All nodes from Tiers \" + tiers\n elif healthrule['affects']['affectedEntities']['affectedNodes']['affectedNodeScope']==\"SPECIFIC_NODES\":\n Affects = \"Specific Nodes \" + nodes\n elif healthrule['affects']['affectedEntities']['affectedNodes']['affectedNodeScope']==\"NODES_MATCHING_PATTERN\":\n patternMatcher = healthrule['affects']['affectedEntities']['affectedNodes']['patternMatcher']\n if patternMatcher['shouldNot'] == \"true\":\n Affects = \"Nodes \" + \"NOT\" + patternMatcher['matchTo'] + \" \" + patternMatcher['matchValue']\n else:\n Affects = \"Nodes \" + patternMatcher['matchTo'] + \" \" + patternMatcher['matchValue']\n elif healthrule['affects']['affectedEntities']['affectedNodes']['affectedNodeScope']==\"NODES_MATCHING_PROPERTY\":\n patternDict = healthrule['affects']['affectedEntities']['affectedNodes']['patternMatcher']\n patterns = ','.join(map(lambda x: str(x),patternDict.items())) if (len(patternDict) > 0) else \"\"\n Affects = \"Nodes matching \" + patterns\n else: Affects=\"\"\n else: Affects=\"\"\n elif healthrule['affects']['affectedEntityType']==\"ERRORS\":\n if healthrule['affects']['affectedErrors']['errorScope']==\"ALL_ERRORS\":\n Affects = \"All Errors\"\n elif healthrule['affects']['affectedErrors']['errorScope']==\"ERRORS_OF_SPECIFIC_TIERS\":\n tierList = healthrule['affects']['affectedErrors']['specificTiers']\n tiers = ','.join(map(lambda x: str(x),tierList)) if (len(tierList) > 0) else \"\"\n Affects = \"Errors from specific Tiers \" + tiers\n elif healthrule['affects']['affectedErrors']['errorScope']==\"SPECIFIC_ERRORS\":\n errorList = healthrule['affects']['affectedErrors']['errors']\n errors = ','.join(map(lambda x: str(x),errorList)) if (len(errorList) > 0) else \"\"\n Affects = \"Specific errors \" + errors\n elif healthrule['affects']['affectedErrors']['errorScope']==\"ERRORS_MATCHING_PATTERN\":\n patternMatcher = healthrule['affects']['affectedErrors']['patternMatcher']\n if patternMatcher['shouldNot'] == \"true\":\n Affects = \"Errors \" + \"NOT\" + patternMatcher['matchTo'] + \" \" + patternMatcher['matchValue']\n else:\n Affects = \"Errors \" + patternMatcher['matchTo'] + \" \" + patternMatcher['matchValue']\n else: Affects=\"\"\n else: Affects=\"\"\n return Affects",
"def __set_unknown_effect(self, hgvs_str):\n unknown_effect_list = ['c.?', '?']\n if hgvs_str.lower() in unknown_effect_list:\n self.unknown_effect = True\n elif hgvs_str.startswith(\"(\"):\n self.unknown_effect = True\n else:\n self.unknown_effect = False",
"def impact(self, impact):\n if impact is None:\n raise ValueError(\"Invalid value for `impact`, must not be `None`\") # noqa: E501\n if impact is not None and len(impact) < 1:\n raise ValueError(\"Invalid value for `impact`, length must be greater than or equal to `1`\") # noqa: E501\n\n self._impact = impact",
"def test_action_independence_single(self):\n SF1, OUT = ('SET_FIELD', ('IPV4_DST', 0x01010101)), ('OUTPUT', 6)\n DEC_TTL = ('DEC_NW_TTL', None)\n # 0.1.1.0/30 -> ip:1.1.1.1, output:1\n n1 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=0)\n ])\n # 1.1.1.1/32 -> output:1\n # 1.1.1.0/31 -> ip:1.1.1.1, output:1\n n2 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010101, None)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=0)\n ])\n # 1.1.1.0/32 -> ip:1.1.1.1, output1\n # 1.1.1.0/31 -> output:1\n n3 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010100, None)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=0)\n ])\n n4 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010101, None)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DEC_TTL, SF1, OUT])),\n Rule(priority=0)\n ])\n self.assertTrue(check_equal(n1, n2))\n self.assertFalse(check_equal(n1, n4))\n self.assertTrue(check_equal(n2, n3))\n self.assertTrue(check_equal(n1, n3))",
"def run_side_effects(self, label, *args, **kwargs):\n if self._suppress or settings.TEST_MODE:\n self.suppressed_side_effect.send(Registry, label=label)\n else:\n self._run_side_effects(label, *args, **kwargs)",
"def before_activity_control(context: Activity, target_type: str = None,\n target_names: List[str] = None):\n if target_type and context[\"type\"] == target_type:\n context[\"dry\"] = True\n if target_names and context[\"name\"] in target_names:\n context[\"dry\"] = True",
"def act(self, kb, args):\n # check if the preconditions are satisfied\n if not self.check_precond(kb, args):\n raise Exception(\"Action pre-conditions not satisfied\")\n # remove negative literals\n for clause in self.effect_rem:\n kb.retract(self.substitute(clause, args))\n # add positive literals\n for clause in self.effect_add:\n kb.tell(self.substitute(clause, args))",
"def testUniversalEffects(self):\n \n action = Parser.parse_as(univ_unload.split(\"\\n\"), Action, self.domain)\n\n self.assert_(isinstance(action.effect, UniversalEffect))\n self.assertEqual(len(action.effect.args), 1)\n self.assert_(isinstance(action.effect.effect, ConditionalEffect))",
"def taints(self) -> Optional[Sequence['outputs.KubernetesLabelResponse']]:\n return pulumi.get(self, \"taints\")",
"def normal(self):\n self.delegate.diverging = False\n messages = self.delegate.applyEffect(self.user, self.target, None)\n assert messages == [self.message], \"Should get messages from all the Normal Effects\"",
"def test_action_independence_multiple(self):\n DST1, DST2 = ('SET_FIELD', ('IPV4_DST', 0x1)), ('SET_FIELD', ('IPV4_DST', 0x2))\n SRC1, SRC2 = ('SET_FIELD', ('IPV4_SRC', 0x1)), ('SET_FIELD', ('IPV4_SRC', 0x2))\n OUT1, OUT2 = ('OUTPUT', 1), ('OUTPUT', 2)\n n1 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n \"\"\"\n dst:1, src:2 -> output:1, dst:2, src:1, output:2\n dst:0/31 -> dst:1, src:2, output:1, dst:2, src:1, output:2\n \"\"\"\n n2 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 1, None),\n ('IPV4_SRC', 2, None)]),\n instructions=inst_from_acts([OUT1, DST2, SRC1, OUT2])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n \"\"\"\n dst:1 -> src:2, output:1, dst:2, src:1, output:2\n dst:0/31 -> dst:1, src:2, output:1, dst:2, src:1, output:2\n \"\"\"\n n3 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 1, None)]),\n instructions=inst_from_acts([SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n\n self.assertTrue(check_equal(n1, n2))\n self.assertTrue(check_equal(n2, n3))\n self.assertTrue(check_equal(n1, n3))",
"def _enforce(self, req, action, target=None):\n if target is None:\n target = {}\n try:\n self.policy.enforce(req.context, action, target)\n except exception.Forbidden as e:\n LOG.debug(\"User not permitted to perform '%s' action\", action)\n raise webob.exc.HTTPForbidden(explanation=e.msg, request=req)",
"def audit_only(self) -> bool:\n result = True\n for effect in self.allowed_effects:\n if effect not in [\"disabled\", \"audit\", \"auditifnotexists\"]:\n result = False\n return result",
"def test_threat(self):\n metric = verif.metric.Threat()\n obs = np.array([0, 1, 2, 3])\n fcst = np.array([0, 3, 1, 2])\n\n # Hits: 1\n # FA: 1\n # Miss: 1\n # CR: 0\n interval = verif.interval.Interval(1.5, np.inf, True, True)\n f_interval = verif.interval.Interval(1.5, np.inf, True, True)\n value = metric.compute_from_obs_fcst(obs, fcst, interval, f_interval)\n self.assertEqual(value, 1.0/3)",
"def _apply_effects(state, lifted_effects, assignments):\n new_literals = set(state.literals)\n determinized_lifted_effects = []\n # Handle probabilistic effects.\n for lifted_effect in lifted_effects:\n if isinstance(lifted_effect, ProbabilisticEffect):\n chosen_effect = lifted_effect.sample()\n if chosen_effect == \"NOCHANGE\":\n continue\n if isinstance(chosen_effect, LiteralConjunction):\n for lit in chosen_effect.literals:\n determinized_lifted_effects.append(lit)\n else:\n determinized_lifted_effects.append(chosen_effect)\n else:\n determinized_lifted_effects.append(lifted_effect)\n\n for lifted_effect in determinized_lifted_effects:\n effect = ground_literal(lifted_effect, assignments)\n # Negative effect\n if effect.is_anti:\n literal = effect.inverted_anti\n if literal in new_literals:\n new_literals.remove(literal)\n for lifted_effect in determinized_lifted_effects:\n effect = ground_literal(lifted_effect, assignments)\n if not effect.is_anti:\n new_literals.add(effect)\n return state.with_literals(new_literals)"
]
| [
"0.5248231",
"0.5175058",
"0.5093315",
"0.5070223",
"0.46813372",
"0.46715102",
"0.45769897",
"0.45475003",
"0.45334455",
"0.45069087",
"0.44731933",
"0.44663164",
"0.44419047",
"0.4404099",
"0.43879226",
"0.4374986",
"0.43658465",
"0.4363518",
"0.42775276",
"0.42558998",
"0.42469552",
"0.42212865",
"0.4216444",
"0.42159143",
"0.41903147",
"0.41538125",
"0.4153544",
"0.41496065",
"0.41354766",
"0.40902168"
]
| 0.5197251 | 1 |
TolerationSeconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system. | def toleration_seconds(self) -> Optional[int]:
return pulumi.get(self, "toleration_seconds") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def toleration(self, toleration: Dict[str, str]):\n\n self._toleration = toleration",
"def toleration(self) -> Dict[str, str]:\n return self._toleration",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def deletion_grace_period_seconds(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"deletion_grace_period_seconds\")",
"def termination_grace_period_seconds(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"termination_grace_period_seconds\")",
"def effective_lockout_seconds(self):\n return self.lockout_seconds + self.safety_seconds",
"def termination_grace_period_seconds(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"termination_grace_period_seconds\")",
"def Tolerance(self):\n\t\treturn self._get_attribute('tolerance')",
"def secondsLeft(self)->int:\n t = datetime.utcnow()\n if self._scenario == LM_HardDate.Scenario.ValidSince:\n return 0 if t >= self.timeBegin else int((self.timeBegin - t).total_seconds())\n else:\n return 0 if t >= self.timeEnd else int((self.timeEnd - t).total_seconds())",
"def timeout_seconds(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"timeout_seconds\")",
"def calculate_timeout(self):\n return self.total_estimated_words() / self.minimum_wpm * 60"
]
| [
"0.714702",
"0.67864156",
"0.6333408",
"0.6319307",
"0.6268101",
"0.6245598",
"0.61886585",
"0.6141688",
"0.61333245",
"0.6113511",
"0.6008451",
"0.59721786",
"0.59702766",
"0.59684473",
"0.5959957",
"0.5933874",
"0.58801407",
"0.5768864",
"0.57512695",
"0.5694877",
"0.5682685",
"0.5630257",
"0.5075545",
"0.49890757",
"0.49889797",
"0.49417287",
"0.4717929",
"0.47171247",
"0.45858958",
"0.44565502"
]
| 0.742108 | 0 |
ReclaimPolicy is the reclaim policy for the storage class | def reclaim_policy(self) -> Optional[str]:
return pulumi.get(self, "reclaim_policy") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def rebalance_policy(self):\n return self._rebalance_policy",
"def rebalance_policy(self, rebalance_policy):\n allowed_values = [\"auto\", \"manual\"]\n if rebalance_policy is not None and rebalance_policy not in allowed_values:\n raise ValueError(\n \"Invalid value for `rebalance_policy`, must be one of {0}\"\n .format(allowed_values)\n )\n\n self._rebalance_policy = rebalance_policy",
"def unclaim(self):\n if self.state == states.UNCLAIMED:\n return\n self._claimer.unclaim(self, self.owner)\n self._change_state(states.UNCLAIMED)",
"def setKeepPolicy(self, policy):\n if not self.__loaded:\n self.__load()\n \n if policy > self.KeepMax:\n return\n if policy == self.__keepCookies:\n return\n \n self.__keepCookies = policy\n self.__saveTimer.changeOccurred()",
"def abort_resource_claim(self, context, claim):\n if self.disabled:\n return\n\n # un-claim the resources:\n if self.claims.pop(claim.claim_id, None):\n LOG.info(_(\"Aborting claim: %s\") % claim)\n values = claim.undo_claim(self.compute_node)\n self.compute_node = self._update(context, values)\n else:\n # can't find the claim. this may mean the claim already timed\n # out or it was already explicitly finished/aborted.\n LOG.info(_(\"Claim %d not found. It either timed out or was \"\n \"already explicitly finished/aborted\"), claim.claim_id)",
"def delete_resource(\n self,\n namespace: str = None,\n propagation_policy: str = \"Foreground\",\n grace_period_seconds: int = 10,\n ):\n names = [\n \"delete_namespaced_csistorage_capacity\",\n \"delete_csistorage_capacity\",\n ]\n\n body = client.V1DeleteOptions(\n propagation_policy=propagation_policy,\n grace_period_seconds=grace_period_seconds,\n )\n\n _kube_api.execute(\n action=\"delete\",\n resource=self,\n names=names,\n namespace=namespace,\n api_client=None,\n api_args={\"name\": self.metadata.name, \"body\": body},\n )",
"def _reclaim_queued_deletes(self, context):\n interval = CONF.reclaim_instance_interval\n if interval <= 0:\n LOG.debug(\"CONF.reclaim_instance_interval <= 0, skipping...\")\n return\n\n # TODO(comstud, jichenjc): Dummy quota object for now See bug 1296414.\n # The only case that the quota might be inconsistent is\n # the cloud node died between set instance state to SOFT_DELETED\n # and quota commit to DB. When cloud node starts again\n # it will have no idea the reservation is committed or not or even\n # expired, since it's a rare case, so marked as todo.\n quotas = objects.Quotas.from_reservations(context, None)\n\n filters = {'vm_state': vm_states.SOFT_DELETED,\n 'task_state': None,\n 'host': self.host}\n instances = objects.InstanceList.get_by_filters(\n context, filters,\n expected_attrs=objects.instance.INSTANCE_DEFAULT_FIELDS,\n use_slave=True)\n for instance in instances:\n if self._deleted_old_enough(instance, interval):\n bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(\n context, instance.uuid)\n LOG.info(_LI('Reclaiming deleted instance'), instance=instance)\n try:\n self._delete_instance(context, instance, bdms, quotas)\n except Exception as e:\n LOG.warning(_LW(\"Periodic reclaim failed to delete \"\n \"instance: %s\"),\n e, instance=instance)",
"def reset(self):\n super().reset()\n self.policy.reset()",
"def reset(self):\n super().reset()\n self.policy.reset()",
"def reload_storage_policies():\n global _POLICIES\n policy_conf = ConfigParser()\n policy_conf.read(SWIFT_CONF_FILE)\n try:\n _POLICIES = parse_storage_policies(policy_conf)\n except PolicyError as e:\n raise SystemExit('ERROR: Invalid Storage Policy Configuration '\n 'in %s (%s)' % (SWIFT_CONF_FILE, e))",
"def __init__(self, name=None, queue_mode=None, description=None, redrive_policy=None, max_consume_count=None, retention_hours=None):\n \n \n\n self._name = None\n self._queue_mode = None\n self._description = None\n self._redrive_policy = None\n self._max_consume_count = None\n self._retention_hours = None\n self.discriminator = None\n\n self.name = name\n if queue_mode is not None:\n self.queue_mode = queue_mode\n if description is not None:\n self.description = description\n if redrive_policy is not None:\n self.redrive_policy = redrive_policy\n if max_consume_count is not None:\n self.max_consume_count = max_consume_count\n if retention_hours is not None:\n self.retention_hours = retention_hours",
"def shrink_cache(cls, target_memory_use_ratio=None):\n cleanup = False\n if not target_memory_use_ratio:\n target_memory_use_ratio = cls.target_memory_use_ratio\n with cls._lock:\n if cls.memory_usage_ratio() > target_memory_use_ratio:\n cleanup = True\n cls._cache = deque(\n sorted(cls._cache, key=lambda i: i.score, reverse=True))\n start = time.time()\n while (cls.memory_usage_ratio() > target_memory_use_ratio\n and time.time() - start < 1 and cls._cache):\n try:\n cls._cache.pop().delete()\n except IndexError:\n break\n if cleanup:\n gc.collect()",
"def resources_gc_prefix(options, policy_config, policy_collection):\n\n # Classify policies by region\n policy_regions = {}\n for p in policy_collection:\n if p.execution_mode == 'poll':\n continue\n policy_regions.setdefault(p.options.region, []).append(p)\n\n regions = get_gc_regions(options.regions, policy_config)\n for r in regions:\n region_gc(options, r, policy_config, policy_regions.get(r, []))",
"def update_policy(self):\n pass",
"def elf_storage_policy(self, elf_storage_policy):\n\n self._elf_storage_policy = elf_storage_policy",
"def reclaim_unschedulable_nodes(self, new_desired_capacity):\n desired_capacity = min(self.max_size, new_desired_capacity)\n num_unschedulable = len(self.unschedulable_nodes)\n num_schedulable = self.actual_capacity - num_unschedulable\n \n if num_schedulable < desired_capacity:\n for node in self.unschedulable_nodes:\n if node.uncordon():\n num_schedulable += 1\n # Uncordon only what we need\n if num_schedulable == desired_capacity:\n break",
"def pre_network_policy_delete(self, resource_id):\n pass",
"def test_patch_hyperflex_ext_fc_storage_policy(self):\n pass",
"def test_patch_hyperflex_cluster_storage_policy(self):\n pass",
"def rebalance(self):\n log.info(\"Rebalancing partitions for group '%s'\", self.group_name)\n members = sorted(self.members)\n partitions = sorted(self.partitions)\n\n self.mapping = self.allocator_fn(members, partitions)\n\n for topic in self.allocation:\n log.debug(\n \"Allocation for topic '%s': partitions %s\",\n topic, \", \".join(map(str, self.allocation[topic]))\n )\n\n if self.on_rebalance:\n self.on_rebalance()",
"def create_storageclass(\n self,\n blockPool,\n sc_name_prefix=\"autotests-sc\",\n allow_volume_expansion=True,\n reclaim_policy=\"Delete\",\n fstype=\"xfs\",\n clusterNamespace=framework.config.ENV_DATA['cluster_namespace'],\n ):\n if self.name:\n sc_name = self.name\n else:\n sc_name = f\"{sc_name_prefix}-{get_random_str()}\"\n\n sc_data = {}\n sc_data['k8s_api_version'] = defaults.STORAGE_API_VERSION\n sc_data['storageclass_name'] = sc_name\n sc_data['volume_expansion'] = allow_volume_expansion\n sc_data['reclaimPolicy'] = reclaim_policy\n sc_data['blockPool'] = blockPool\n sc_data['clusterNamespace'] = clusterNamespace\n sc_data['fstype'] = fstype\n\n data = generate_yaml_from_jinja2_template_with_data(\n self.template_path,\n **sc_data\n )\n self.service_sc.create(body=data)\n\n return sc_name",
"def refresh(self):\n self._policies = self._get_policies()",
"def __init__(self, resource_name, opts=None, provisioner=None, allow_volume_expansion=None, allowed_topologies=None, metadata=None, mount_options=None, parameters=None, reclaim_policy=None, volume_binding_mode=None, __name__=None, __opts__=None):\n if __name__ is not None:\n warnings.warn(\"explicit use of __name__ is deprecated\", DeprecationWarning)\n resource_name = __name__\n if __opts__ is not None:\n warnings.warn(\"explicit use of __opts__ is deprecated, use 'opts' instead\", DeprecationWarning)\n opts = __opts__\n if not resource_name:\n raise TypeError('Missing resource name argument (for URN creation)')\n if not isinstance(resource_name, str):\n raise TypeError('Expected resource name to be a string')\n if opts and not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError('Expected resource options to be a ResourceOptions instance')\n\n __props__ = dict()\n\n __props__['apiVersion'] = 'storage.k8s.io/v1'\n __props__['kind'] = 'StorageClass'\n if provisioner is None:\n raise TypeError('Missing required property provisioner')\n __props__['provisioner'] = provisioner\n __props__['allowVolumeExpansion'] = allow_volume_expansion\n __props__['allowedTopologies'] = allowed_topologies\n __props__['metadata'] = metadata\n __props__['mountOptions'] = mount_options\n __props__['parameters'] = parameters\n __props__['reclaimPolicy'] = reclaim_policy\n __props__['volumeBindingMode'] = volume_binding_mode\n\n __props__['status'] = None\n\n parent = opts.parent if opts and opts.parent else None\n aliases = [\n pulumi.Alias(type_=\"kubernetes:storage.k8s.io/v1beta1:StorageClass\"),\n ]\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(\n version=version.get_version(),\n aliases=aliases,\n ))\n\n super(StorageClass, self).__init__(\n \"kubernetes:storage.k8s.io/v1:StorageClass\",\n resource_name,\n __props__,\n opts)",
"def reinvocation_policy(self) -> Optional[str]:\n return pulumi.get(self, \"reinvocation_policy\")",
"def reinvocation_policy(self) -> Optional[str]:\n return pulumi.get(self, \"reinvocation_policy\")",
"def test_delete_hyperflex_ext_fc_storage_policy(self):\n pass",
"def normal_policy_class():\n policy_class = ActorCriticCnnPolicy\n _ = locals()\n del _",
"def test_delete_hyperflex_cluster_storage_policy(self):\n pass",
"def pre_qos_forwarding_class_delete(self, resource_id):\n pass",
"def snmpqosqos_policy_reevalrate(self) :\n\t\ttry :\n\t\t\treturn self._snmpqosqos_policy_reevalrate\n\t\texcept Exception as e:\n\t\t\traise e"
]
| [
"0.6216241",
"0.5859391",
"0.5115956",
"0.5060897",
"0.5006874",
"0.49962115",
"0.48800308",
"0.48706514",
"0.48706514",
"0.48609537",
"0.48232886",
"0.48182634",
"0.4793153",
"0.47804967",
"0.47694635",
"0.4755424",
"0.47546563",
"0.475363",
"0.4752715",
"0.47368622",
"0.47350585",
"0.4731967",
"0.473056",
"0.47245854",
"0.47245854",
"0.47098947",
"0.46996152",
"0.4695802",
"0.4689162",
"0.46401563"
]
| 0.6900613 | 1 |
RevProxyConfig represents the reverse proxy configuration | def config(self) -> 'outputs.CSIPowerMaxRevProxySpecConfig':
return pulumi.get(self, "config") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def configure_proxy(self, proxy):\n server_name = self.get_external_domain()\n tls_enabled = self.get_tls()\n ircd_enabled = self.charm_config.get(\"enable-ircd\")\n federation_enabled = self.get_federation()\n\n if tls_enabled:\n self.external_port = 443\n else:\n self.external_port = 80\n\n proxy_config = [\n {\n \"mode\": \"http\",\n \"external_port\": self.external_port,\n \"internal_host\": self.get_internal_host(),\n \"internal_port\": 8008,\n \"subdomain\": server_name,\n },\n ]\n\n if federation_enabled:\n proxy_config.append(\n {\n \"mode\": self.get_federation_mode(),\n \"external_port\": 8448,\n \"internal_host\": self.get_internal_host(),\n \"internal_port\": 8448,\n }\n )\n\n if ircd_enabled:\n proxy_config.append(\n {\n \"mode\": self.get_irc_mode(),\n \"external_port\": self.get_irc_port(),\n \"internal_host\": self.get_internal_host(),\n \"internal_port\": self.irc_internal_port,\n }\n )\n\n proxy.configure(proxy_config)",
"def link_config(self) -> Optional['outputs.CSIPowerMaxRevProxySpecConfigLinkConfig']:\n return pulumi.get(self, \"link_config\")",
"def http_proxy_config(self) -> Optional[pulumi.Input['HttpProxyConfigArgs']]:\n return pulumi.get(self, \"http_proxy_config\")",
"def test_reverse_proxy_config():\n\n class ReverseProxyConfig(TestingConfig):\n REVERSE_PROXY = \"1,2,3,4\"\n\n app = create_ctfd(config=ReverseProxyConfig)\n with app.app_context():\n assert app.wsgi_app.x_for == 1\n assert app.wsgi_app.x_proto == 2\n assert app.wsgi_app.x_host == 3\n assert app.wsgi_app.x_port == 4\n assert app.wsgi_app.x_prefix == 0\n destroy_ctfd(app)\n\n class ReverseProxyConfig(TestingConfig):\n REVERSE_PROXY = \"true\"\n\n app = create_ctfd(config=ReverseProxyConfig)\n with app.app_context():\n assert app.wsgi_app.x_for == 1\n assert app.wsgi_app.x_proto == 1\n assert app.wsgi_app.x_host == 1\n assert app.wsgi_app.x_port == 1\n assert app.wsgi_app.x_prefix == 1\n destroy_ctfd(app)",
"def proxy_settings(self):\n if config.proxy_host is None or config.proxy_host == \"\":\n return\n\n proxy = urllib2.ProxyHandler({\"http\": config.proxy_host})\n opener = urllib2.build_opener(proxy)\n urllib2.install_opener(opener)",
"def configureProxy():\n # config\n port = config.get(\"proxy\", \"port\")\n allowedDomains = config.get(\"proxy\", \"alloweddomains\")\n listeningIP = config.get(\"hotspot\", \"ip\")\n # wan dns\n proxyNSConfig = \"\"\n for dnsServer in wandns:\n proxyNSConfig = f\"{proxyNSConfig}nserver {dnsServer}\\n\"\n # 3proxy configurations\n proxyConfig = f\"\"\"#!/bin/3proxy\n#daemon\npidfile /var/run/3proxy.pid\nchroot /usr/local/3proxy proxy proxy\nnscache 65536\n{proxyNSConfig}\nlog /logs/3proxy-%y%m%d.log D\nrotate 1\ncounter /count/3proxy.3cf\ninclude /conf/counters\ninclude /conf/bandlimiters\nauth iponly\nallow * * {allowedDomains}\ndeny *\nproxy -e{wanip} -i{listeningIP} -p{port}\n\"\"\"\n confFile = open(\"/etc/3proxy/3proxy.cfg\", \"w\")\n confFile.write(proxyConfig)\n confFile.close()",
"def __init__(self, proxy_enabled: ConfigNodePropertyBoolean=None, proxy_host: ConfigNodePropertyString=None, proxy_port: ConfigNodePropertyInteger=None, proxy_user: ConfigNodePropertyString=None, proxy_password: ConfigNodePropertyString=None, proxy_exceptions: ConfigNodePropertyArray=None): # noqa: E501\n self.openapi_types = {\n 'proxy_enabled': ConfigNodePropertyBoolean,\n 'proxy_host': ConfigNodePropertyString,\n 'proxy_port': ConfigNodePropertyInteger,\n 'proxy_user': ConfigNodePropertyString,\n 'proxy_password': ConfigNodePropertyString,\n 'proxy_exceptions': ConfigNodePropertyArray\n }\n\n self.attribute_map = {\n 'proxy_enabled': 'proxy.enabled',\n 'proxy_host': 'proxy.host',\n 'proxy_port': 'proxy.port',\n 'proxy_user': 'proxy.user',\n 'proxy_password': 'proxy.password',\n 'proxy_exceptions': 'proxy.exceptions'\n }\n\n self._proxy_enabled = proxy_enabled\n self._proxy_host = proxy_host\n self._proxy_port = proxy_port\n self._proxy_user = proxy_user\n self._proxy_password = proxy_password\n self._proxy_exceptions = proxy_exceptions",
"def get_server_repo_config_rev(p4, repo_name):\n key = p4gf_const.P4GF_P4KEY_REPO_SERVER_CONFIG_REV.format(\n repo_name=repo_name, server_id=p4gf_util.get_server_id())\n return p4gf_util.get_p4key(p4, key)",
"def proxy_enabled(self) -> ConfigNodePropertyBoolean:\n return self._proxy_enabled",
"def proxy_enabled(self, proxy_enabled: ConfigNodePropertyBoolean):\n\n self._proxy_enabled = proxy_enabled",
"def proxy_url(self):\n return self.__proxy_url",
"def proxy_host(self) -> ConfigNodePropertyString:\n return self._proxy_host",
"def ModifyProxyConfiguration(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"ModifyProxyConfiguration\", params, headers=headers)\n response = json.loads(body)\n model = models.ModifyProxyConfigurationResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))",
"def proxy_port(self) -> ConfigNodePropertyInteger:\n return self._proxy_port",
"def proxy_password(self) -> ConfigNodePropertyString:\n return self._proxy_password",
"def test_302_proxy_server_config(self, auth_api_version=None):\n if self._get_openstack_release() >= self.xenial_queens:\n auth_api_version = auth_api_version or '3'\n else:\n auth_api_version = auth_api_version or '2.0'\n u.log.debug(\"Checking swift proxy-server config auth_api_version={}...\"\n \"\".format(auth_api_version))\n unit = self.swift_proxy_sentry\n conf = '/etc/swift/proxy-server.conf'\n keystone_relation = self.keystone_sentry.relation(\n 'identity-service', 'swift-proxy:identity-service')\n swift_proxy_relation = unit.relation(\n 'identity-service', 'keystone:identity-service')\n swift_proxy_ip = swift_proxy_relation['private-address']\n auth_host = keystone_relation['auth_host']\n auth_protocol = keystone_relation['auth_protocol']\n\n expected = {\n 'DEFAULT': {\n 'bind_port': '8070',\n 'user': 'swift',\n 'log_name': 'swift',\n 'log_facility': 'LOG_LOCAL0',\n 'log_level': 'INFO',\n 'log_headers': 'False',\n 'log_address': '/dev/log'\n },\n 'pipeline:main': {\n 'pipeline': 'gatekeeper healthcheck proxy-logging cache '\n 'swift3 s3token container_sync bulk tempurl '\n 'slo dlo formpost authtoken keystoneauth '\n 'staticweb container-quotas account-quotas '\n 'proxy-logging proxy-server'\n },\n 'app:proxy-server': {\n 'use': 'egg:swift#proxy',\n 'allow_account_management': 'true',\n 'account_autocreate': 'true',\n 'node_timeout': '60',\n 'recoverable_node_timeout': '30'\n },\n 'filter:tempauth': {\n 'use': 'egg:swift#tempauth',\n 'user_system_root': 'testpass .admin https://{}:8080/v1/'\n 'AUTH_system'.format(swift_proxy_ip)\n },\n 'filter:healthcheck': {'use': 'egg:swift#healthcheck'},\n 'filter:cache': {\n 'use': 'egg:swift#memcache',\n 'memcache_servers': '{}:11211'.format(swift_proxy_ip)\n },\n 'filter:account-quotas': {'use': 'egg:swift#account_quotas'},\n 'filter:container-quotas': {'use': 'egg:swift#container_quotas'},\n 'filter:proxy-logging': {'use': 'egg:swift#proxy_logging'},\n 'filter:staticweb': {'use': 'egg:swift#staticweb'},\n 'filter:bulk': {'use': 'egg:swift#bulk'},\n 'filter:slo': {'use': 'egg:swift#slo'},\n 'filter:dlo': {'use': 'egg:swift#dlo'},\n 'filter:formpost': {'use': 'egg:swift#formpost'},\n 'filter:tempurl': {'use': 'egg:swift#tempurl'},\n 'filter:container_sync': {'use': 'egg:swift#container_sync'},\n 'filter:gatekeeper': {'use': 'egg:swift#gatekeeper'},\n 'filter:keystoneauth': {\n 'use': 'egg:swift#keystoneauth',\n 'operator_roles': 'Member,Admin'\n },\n 'filter:authtoken': {\n 'auth_uri': '{}://{}:{}'.format(\n auth_protocol,\n auth_host,\n keystone_relation['service_port']),\n 'delay_auth_decision': 'true',\n 'signing_dir': '/var/cache/swift',\n 'cache': 'swift.cache'\n },\n 'filter:swift3': {'use': 'egg:swift3#swift3'}\n }\n if auth_api_version == '2.0':\n expected['filter:authtoken'].update({\n 'admin_tenant_name': keystone_relation['service_tenant'],\n 'admin_user': keystone_relation['service_username'],\n 'admin_password': keystone_relation['service_password'],\n })\n\n if self._get_openstack_release() >= self.xenial_queens:\n expected['pipeline:main'] = {\n 'pipeline': 'catch_errors gatekeeper healthcheck proxy-logging'\n ' cache authtoken swift3 s3token container_sync bulk tempurl'\n ' slo dlo formpost keystoneauth staticweb'\n ' versioned_writes container-quotas account-quotas'\n ' proxy-logging proxy-server'\n }\n elif self._get_openstack_release() >= self.trusty_mitaka:\n expected['pipeline:main'] = {\n 'pipeline': 'catch_errors gatekeeper healthcheck proxy-logging'\n ' cache swift3 s3token container_sync bulk tempurl slo dlo'\n ' formpost authtoken keystoneauth staticweb'\n ' versioned_writes container-quotas account-quotas'\n ' proxy-logging proxy-server'\n }\n\n s3_token_auth_settings_legacy = {\n 'auth_port': keystone_relation['auth_port'],\n 'auth_host': keystone_relation['auth_host'],\n 'service_host': keystone_relation['service_host'],\n 'service_port': keystone_relation['service_port'],\n 'auth_protocol': keystone_relation['auth_protocol'],\n 'auth_token': keystone_relation['admin_token'],\n 'admin_token': keystone_relation['admin_token']\n }\n\n if self._get_openstack_release() >= self.xenial_queens:\n expected['filter:authtoken'].update({\n 'paste.filter_factory': 'keystonemiddleware.auth_token:'\n 'filter_factory',\n })\n expected['filter:authtoken'].update({\n 'auth_url': '{}://{}:{}'.format(\n auth_protocol,\n auth_host,\n keystone_relation['auth_port']),\n 'auth_plugin': 'password',\n 'username': keystone_relation['service_username'],\n 'password': keystone_relation['service_password'],\n 'project_domain_name': keystone_relation['service_domain'],\n 'user_domain_name': keystone_relation['service_domain'],\n 'project_name': keystone_relation['service_tenant'],\n })\n expected['filter:s3token'] = {\n 'use': 'egg:swift3#s3token',\n 'auth_uri': '{}://{}:{}'.format(\n auth_protocol,\n auth_host,\n keystone_relation['auth_port']),\n 'auth_version': '3'\n }\n elif self._get_openstack_release() >= self.trusty_kilo:\n # Kilo and later\n expected['filter:authtoken'].update({\n 'paste.filter_factory': 'keystonemiddleware.auth_token:'\n 'filter_factory',\n })\n if auth_api_version == '3':\n expected['filter:authtoken'].update({\n 'auth_url': '{}://{}:{}'.format(\n auth_protocol,\n auth_host,\n keystone_relation['auth_port']),\n 'auth_plugin': 'password',\n 'username': keystone_relation['service_username'],\n 'password': keystone_relation['service_password'],\n 'project_domain_name': keystone_relation['service_domain'],\n 'user_domain_name': keystone_relation['service_domain'],\n 'project_name': keystone_relation['service_tenant'],\n })\n else:\n expected['filter:authtoken'].update({\n 'identity_uri': '{}://{}:{}'.format(\n auth_protocol,\n auth_host,\n keystone_relation['auth_port']),\n })\n expected['filter:s3token'] = {\n # No section commonality with J and earlier\n 'paste.filter_factory': 'keystoneclient.middleware.s3_token'\n ':filter_factory',\n }\n expected['filter:s3token'].update(s3_token_auth_settings_legacy)\n\n if self._get_openstack_release() >= self.trusty_mitaka:\n expected['filter:s3token']['paste.filter_factory'] = \\\n 'keystonemiddleware.s3_token:filter_factory'\n\n # NOTE(hopem): this will need extending for newer releases once\n # swift-plugin-s3 is updated in UCA. See LP: #1738063\n else:\n # Juno and earlier\n expected['filter:authtoken'].update({\n 'paste.filter_factory': 'keystoneclient.middleware.'\n 'auth_token:filter_factory',\n 'auth_host': auth_host,\n 'auth_port': keystone_relation['auth_port'],\n 'auth_protocol': auth_protocol,\n })\n expected['filter:s3token'] = {\n # No section commonality with K and later\n 'paste.filter_factory': 'keystoneclient.middleware.'\n 's3_token:filter_factory',\n }\n expected['filter:s3token'].update(s3_token_auth_settings_legacy)\n\n for section, pairs in expected.items():\n ret = u.validate_config_data(unit, conf, section, pairs)\n if ret:\n message = \"proxy-server config error: {}\".format(ret)\n amulet.raise_status(amulet.FAIL, msg=message)",
"def setproxy(self,proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):\r\n self.__proxy = (proxytype,addr,port,rdns,username,password)",
"def refresh(conf):\n try:\n if 'variable' in conf.keys():\n proxy_list = []\n for variable in conf['variable'] :\n proxy_list.extend(parse(conf['url'] % variable, conf))\n else:\n proxy_list = parse(conf)\n\n to_file(proxy_list, conf)\n except Exception , e:\n raise e",
"def _set_advanced_config_for_botocore_client(dbapi):\n config = None\n\n proxies = dbapi.service_parameter_get_all(\n service=constants.SERVICE_TYPE_DOCKER,\n section=constants.SERVICE_PARAM_SECTION_DOCKER_PROXY)\n\n proxies_dict = {}\n for proxy in proxies:\n if proxy.name == constants.SERVICE_PARAM_NAME_DOCKER_HTTP_PROXY:\n proxies_dict.update({'http': str(proxy.value)})\n\n elif proxy.name == constants.SERVICE_PARAM_NAME_DOCKER_HTTPS_PROXY:\n proxies_dict.update({'https': str(proxy.value)})\n\n if proxies_dict:\n config = Config(proxies=proxies_dict)\n return config",
"def proxy(c, path=local.proxy_path):\r\n c = conn(c)\r\n c.run('rm {conf} -rf'.format(conf=local.proxy_conf))\r\n\r\n \"\"\" 这里需要分开安装:先安装 epel-release 之后,才能安装 其他server\r\n \"\"\"\r\n system.install(c, 'source')\r\n system.install(c, 'apt-cacher-ng')\r\n\r\n from common.disk import file_exist\r\n if not file_exist(c, local.proxy_conf):\r\n print(\"conf file {} not exist\".format(local.proxy_conf))\r\n exit(-1)\r\n\r\n c.run('mkdir -p {path}; chmod 777 {path}'.format(path=path))\r\n c.run('''curl https://www.centos.org/download/full-mirrorlist.csv \\\r\n | sed 's/^.*\"http:/http:/' | sed 's/\".*$//' | grep ^http > /etc/apt-cacher-ng/centos_mirrors''')\r\n\r\n \"\"\" 修改配置\r\n \"\"\"\r\n sed.path(local.proxy_conf)\r\n sed.grep(**{'sep': ': '})\r\n sed.append(c, '''VfilePatternEx: ^(/\\\\\\\\?release=[0-9]+&arch=.*|.*/RPM-GPG-KEY-examplevendor)$''', '# WfilePatternEx:')\r\n # sed.append(c, '''VfilePatternEx: ^/\\\\\\\\?release=[0-9]+&arch=''', '# WfilePatternEx:')\r\n sed.append(c, 'Remap-centos: file:centos_mirrors \\/centos', 'Remap-debrep', pos=-1)\r\n sed.append(c, 'PassThroughPattern: (mirrors\\\\\\\\.fedoraproject\\\\\\\\.org|some\\\\\\\\.other\\\\\\\\.repo|yet\\\\\\\\.another\\\\\\\\.repo):443', '# PassThroughPattern: private-ppa', pos=5)\r\n sed.update(c, 'CacheDir', path)\r\n\r\n \"\"\" 启动服务\r\n \"\"\"\r\n if globing.invoke:\r\n c.run('''cat << EOF > /start.sh\r\n#!/bin/bash\r\n\r\necho \"start proxy\"\r\n\r\ntouch /var/log/apt-cacher-ng/a.log\r\n#/etc/init.d/apt-cacher-ng start\r\n\r\n/usr/sbin/apt-cacher-ng -c /etc/apt-cacher-ng pidfile=/var/run/apt-cacher-ng/pid SocketPath=/var/run/apt-cacher-ng/socket foreground=0\r\ntail -f /var/log/apt-cacher-ng/*\r\nEOF''')\r\n else:\r\n c.run('systemctl restart apt-cacher-ng.service')\r\n\r\n system.help(c, '''\r\n http://{host}:3142\r\n http://{host}:3142/acng-report.html\r\n \r\n tail -f /var/log/apt-cacher-ng/*'''.format(host=c.host), 'you can visit')",
"def proxy_dir(self):\n return self.__get_option('proxy_dir')",
"def repo_config(self, repo_config, args=None):\n return repo_config",
"def proxy(self):\n return self.get('proxy', None)",
"def proxy_url(self) -> str:\n return pulumi.get(self, \"proxy_url\")",
"def remove_proxy_config(self):\n self.external_port = 8008",
"def proxy_host(self, proxy_host: ConfigNodePropertyString):\n\n self._proxy_host = proxy_host",
"def proxy_policy(self):\n return self.__get_option('proxy_policy')",
"def proxy_password(self, proxy_password: ConfigNodePropertyString):\n\n self._proxy_password = proxy_password",
"def service_proxy_settings(private_base_url):\n return rawobj.Proxy(private_base_url(\"echo_api\"))",
"def test_proxy_config_default_include(self):\n proxyid = random_string(\"proxy-\")\n root_dir = pathlib.Path(tempfile.mkdtemp(dir=RUNTIME_VARS.TMP))\n self.addCleanup(shutil.rmtree, str(root_dir), ignore_errors=True)\n conf_dir = root_dir / \"conf\"\n conf_file = conf_dir / \"proxy\"\n conf_d_dir = conf_dir / \"proxy.d\"\n proxy_conf_d = conf_d_dir / proxyid\n proxy_conf_d.mkdir(parents=True)\n\n with salt.utils.files.fopen(str(conf_file), \"w\") as wfh:\n wfh.write(\n textwrap.dedent(\n \"\"\"\\\n id: {id}\n root_dir: {root_dir}\n pidfile: run/proxy.pid\n pki_dir: pki\n cachedir: cache\n sock_dir: run/proxy\n log_file: logs/proxy.log\n \"\"\".format(\n id=proxyid, root_dir=root_dir\n )\n )\n )\n\n with salt.utils.files.fopen(str(proxy_conf_d / \"_schedule.conf\"), \"w\") as wfh:\n wfh.write(\n textwrap.dedent(\n \"\"\"\\\n schedule:\n test_job:\n args: [arg1, arg2]\n enabled: true\n function: test.arg\n jid_include: true\n kwargs: {key1: value1, key2: value2}\n maxrunning: 1\n name: test_job\n return_job: false\n \"\"\"\n )\n )\n opts = salt.config.proxy_config(\n str(conf_file),\n minion_id=proxyid,\n cache_minion_id=False,\n )\n self.assertIn(\"schedule\", opts)\n self.assertIn(\"test_job\", opts[\"schedule\"])"
]
| [
"0.59436744",
"0.5851036",
"0.5817962",
"0.5768158",
"0.5693754",
"0.5600076",
"0.55077446",
"0.5401389",
"0.5344445",
"0.52294475",
"0.51776654",
"0.51657885",
"0.5130818",
"0.51041347",
"0.5099259",
"0.50747913",
"0.5042219",
"0.49751547",
"0.49545452",
"0.4948692",
"0.4940408",
"0.49360222",
"0.49359345",
"0.49304548",
"0.4920842",
"0.4911556",
"0.48824486",
"0.48684013",
"0.4839458",
"0.48323017"
]
| 0.623585 | 0 |
LinkConfig is one of the configuration modes for reverse proxy | def link_config(self) -> Optional['outputs.CSIPowerMaxRevProxySpecConfigLinkConfig']:
return pulumi.get(self, "link_config") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_link(self, conf, link_id):\n\t\tpass",
"def configure(self):\n self.configureLinks()\n self.isConfigured = True\n return",
"def server_link_setup(self):\n pass",
"def __init__(__self__, *,\n link_config: Optional['outputs.CSIPowerMaxRevProxySpecConfigLinkConfig'] = None,\n mode: Optional[str] = None,\n port: Optional[int] = None,\n stand_alone_config: Optional['outputs.CSIPowerMaxRevProxySpecConfigStandAloneConfig'] = None):\n if link_config is not None:\n pulumi.set(__self__, \"link_config\", link_config)\n if mode is not None:\n pulumi.set(__self__, \"mode\", mode)\n if port is not None:\n pulumi.set(__self__, \"port\", port)\n if stand_alone_config is not None:\n pulumi.set(__self__, \"stand_alone_config\", stand_alone_config)",
"def test_reverse_proxy_config():\n\n class ReverseProxyConfig(TestingConfig):\n REVERSE_PROXY = \"1,2,3,4\"\n\n app = create_ctfd(config=ReverseProxyConfig)\n with app.app_context():\n assert app.wsgi_app.x_for == 1\n assert app.wsgi_app.x_proto == 2\n assert app.wsgi_app.x_host == 3\n assert app.wsgi_app.x_port == 4\n assert app.wsgi_app.x_prefix == 0\n destroy_ctfd(app)\n\n class ReverseProxyConfig(TestingConfig):\n REVERSE_PROXY = \"true\"\n\n app = create_ctfd(config=ReverseProxyConfig)\n with app.app_context():\n assert app.wsgi_app.x_for == 1\n assert app.wsgi_app.x_proto == 1\n assert app.wsgi_app.x_host == 1\n assert app.wsgi_app.x_port == 1\n assert app.wsgi_app.x_prefix == 1\n destroy_ctfd(app)",
"def extra_links(request):\n return {'zentral_extra_links': settings.get('extra_links', [])}",
"def configure_proxy(self, proxy):\n server_name = self.get_external_domain()\n tls_enabled = self.get_tls()\n ircd_enabled = self.charm_config.get(\"enable-ircd\")\n federation_enabled = self.get_federation()\n\n if tls_enabled:\n self.external_port = 443\n else:\n self.external_port = 80\n\n proxy_config = [\n {\n \"mode\": \"http\",\n \"external_port\": self.external_port,\n \"internal_host\": self.get_internal_host(),\n \"internal_port\": 8008,\n \"subdomain\": server_name,\n },\n ]\n\n if federation_enabled:\n proxy_config.append(\n {\n \"mode\": self.get_federation_mode(),\n \"external_port\": 8448,\n \"internal_host\": self.get_internal_host(),\n \"internal_port\": 8448,\n }\n )\n\n if ircd_enabled:\n proxy_config.append(\n {\n \"mode\": self.get_irc_mode(),\n \"external_port\": self.get_irc_port(),\n \"internal_host\": self.get_internal_host(),\n \"internal_port\": self.irc_internal_port,\n }\n )\n\n proxy.configure(proxy_config)",
"def copyAndLinkConfig(config):\n\n basename = os.path.basename(config)\n new_config_path = os.path.join(basedefs.DIR_CONFIG, basename)\n\n # Verify destination dir exists, create it if necessary\n if not os.path.isdir(basedefs.DIR_CONFIG):\n try:\n logging.debug(\"Creating ovirt-engine config directory\")\n os.makedirs(basedefs.DIR_CONFIG)\n except:\n logging.error(traceback.format_exc())\n raise Exception(output_messages.ERR_EXP_FAILED_CREATE_RHEVM_CONFIG_DIR % basedefs.DIR_CONFIG)\n\n # Verify original config is not already linked\n if os.path.islink(config):\n if (os.readlink(config) == new_config_path):\n logging.debug(\"%s is already linked to %s\"%(config, new_config_path))\n return(os.path.join(basedefs.DIR_CONFIG, basename))\n else:\n raise Exception(output_messages.ERR_EXP_LINK_EXISTS%(config, new_config_path))\n\n # Verify original config is a normal file, and copy it to the new location\n elif os.path.isfile(config):\n try:\n utils.copyFile(config, basedefs.DIR_CONFIG)\n\n # Remove old file\n logging.debug(\"Removing %s\" %(config))\n os.remove(config)\n\n # Linking\n logging.debug(\"Linking %s to %s/%s\" %(config, basedefs.DIR_CONFIG, config))\n os.symlink(new_config_path, config)\n except:\n logging.error(traceback.format_exc())\n raise Exception(output_messages.ERR_EXP_CPY_RHEVM_CFG % (config, \"%s/%s\" % (basedefs.DIR_CONFIG, config)))\n # return new path\n return new_config_path",
"def add_link_setting(self, key, link, default):\n\n setting = self.settings().new_link(key, link, default)\n self._add_to_list_field(\"settings\", setting)",
"def setAddLinks(self,value):\n self.PDFreactorConfiguration.in1[\"addLinks\"] = value",
"def dynamic_links(self) -> bool:\n return pulumi.get(self, \"dynamic_links\")",
"def add_link():\n return True",
"def proxy_settings(self):\n if config.proxy_host is None or config.proxy_host == \"\":\n return\n\n proxy = urllib2.ProxyHandler({\"http\": config.proxy_host})\n opener = urllib2.build_opener(proxy)\n urllib2.install_opener(opener)",
"def getLink(self):",
"def link(self):\n return 'http://{}:{}'.format(self.basic_url, self.port)",
"def config(isamAppliance, instance_id, junction=\"/ivg\", mmfa=True, check_mode=False):\n warnings = []\n json_data = {\n \"junction\": junction,\n \"mmfa\": mmfa\n }\n if check_mode is True:\n return isamAppliance.create_return_object(changed=True, warnings=warnings)\n else:\n return isamAppliance.invoke_post(\n \"IVG configuration for a reverse proxy instance\",\n \"/wga/reverseproxy/{0}/verify_gateway_config\".format(instance_id), json_data, warnings=warnings,\n requires_modules=requires_modules, requires_version=requires_version)\n\n return isamAppliance.create_return_object(warnings=warnings)",
"def config_url(config):\n if 'url' not in config:\n raise Exception('The config file does not contain \"url\"')\n return config['url']",
"def traffic_mirror_config(self) -> Optional['outputs.RuleRuleActionTrafficMirrorConfig']:\n return pulumi.get(self, \"traffic_mirror_config\")",
"def load_gateways_config(self):\n raise NotImplementedError",
"def link_config_files():\n\n require('environment', provided_by=env.environments)\n with settings(warn_only=True):\n sudo('rm /etc/nginx/sites-enabled/default')\n sudo('rm /etc/nginx/sites-enabled/%(project)s-*.conf' % env)\n sudo('rm /etc/supervisor/conf.d/%(project)s-*.conf' % env)\n sudo('ln -s /home/%(deploy_user)s/services/nginx/%(environment)s.conf /etc/nginx/sites-enabled/%(project)s-%(environment)s.conf' % env)\n sudo('ln -s /home/%(deploy_user)s/services/supervisor/%(environment)s.conf /etc/supervisor/conf.d/%(project)s-%(environment)s.conf' % env)",
"def setMergeURLs(self,value):\n self.PDFreactorConfiguration.in1[\"mergeURLs\"] = value",
"def write_last_link(last_link):\n\tconfig = ConfigParser.ConfigParser()\n\tconfig.set(\"DEFAULT\", \"last_link\", last_link)\n\twith open(CONFIG_FILE_NAME, \"w\") as config_file:\n\t\tconfig.write(config_file)",
"def config( **kwargs ):",
"def main(global_config, **settings):\n config = Configurator(settings=settings)\n config.include('clldmpg')\n config.registry.registerUtility(link_attrs, interfaces.ILinkAttrs)\n home_comp = config.registry.settings['home_comp']\n home_comp.append('software')\n home_comp.append('contribute')\n config.add_settings(home_comp=home_comp)\n config.add_route('software', '/software')\n config.add_route('contribute', '/contribute')\n return config.make_wsgi_app()",
"def url(request):\n return request.config.getoption(\"--url\")",
"def configureProxy():\n # config\n port = config.get(\"proxy\", \"port\")\n allowedDomains = config.get(\"proxy\", \"alloweddomains\")\n listeningIP = config.get(\"hotspot\", \"ip\")\n # wan dns\n proxyNSConfig = \"\"\n for dnsServer in wandns:\n proxyNSConfig = f\"{proxyNSConfig}nserver {dnsServer}\\n\"\n # 3proxy configurations\n proxyConfig = f\"\"\"#!/bin/3proxy\n#daemon\npidfile /var/run/3proxy.pid\nchroot /usr/local/3proxy proxy proxy\nnscache 65536\n{proxyNSConfig}\nlog /logs/3proxy-%y%m%d.log D\nrotate 1\ncounter /count/3proxy.3cf\ninclude /conf/counters\ninclude /conf/bandlimiters\nauth iponly\nallow * * {allowedDomains}\ndeny *\nproxy -e{wanip} -i{listeningIP} -p{port}\n\"\"\"\n confFile = open(\"/etc/3proxy/3proxy.cfg\", \"w\")\n confFile.write(proxyConfig)\n confFile.close()",
"def update_link(self, link):\n if self not in (link.endpoint_a, link.endpoint_b):\n return False\n\n if self.link is None or self.link != link:\n self.link = link\n\n if link.endpoint_a == self:\n endpoint = link.endpoint_b\n else:\n endpoint = link.endpoint_a\n\n if endpoint.link is None or endpoint.link != link:\n endpoint.link = link\n\n return True",
"def pullnlink(self,config):\n \n pull = []; link = []\n \n # choose files to pull and link\n for key,value in self.FILES.iteritems():\n \n # link big files\n if key == 'MESH':\n # mesh (merged or partitioned)\n value = expand_part(value,config)\n link.extend(value)\n elif key == 'DIRECT':\n # direct solution\n value = expand_time(value,config)\n link.extend(value)\n elif 'ADJOINT_' in key:\n # adjoint solution\n value = expand_time(value,config)\n link.extend(value)\n #elif key == 'STABILITY':\n #pass\n # copy all other files\n else:\n pull.append(value)\n \n #: for each filename\n \n return pull,link",
"def get_as_link(self, key):\n\n s = self.get(key)\n data = s.get_json()\n data.pop(\"property\", \"\")\n return LinkSetting(self, data)",
"def _add_link(self, linkdesc, linktype=\"link\"):\n # Check the proper lexic has been specified\n link_keys = list(linkdesc.keys())\n issubset = set(link_keys).issubset(self.link_attributes)\n if len(link_keys) != 2 or not issubset:\n raise ValueError(\n \"Box attribute definition: '{0}' defined in '{1}' is \"\n \"not supported. Supported attributes are \"\n \"'{2}'.\".format(\n json.dumps(list(linkdesc.keys())), self._xmlfile,\n self.link_attributes))\n\n # Deal with input/output pipeline link\n # In this case the inner box control is registered as an input/output\n # control of the pipeline\n source = linkdesc[self.link_attributes[0]]\n destination = linkdesc[self.link_attributes[1]]\n linkrep = \"{0}->{1}\".format(source, destination)\n if linktype == \"output\":\n setattr(\n self.outputs, destination, self._get_control(source, False))\n elif linktype == \"input\":\n if source not in self.inputs.controls:\n setattr(\n self.inputs, source, self._get_control(destination, True))\n else:\n src_control = self._get_control(source, False)\n dest_control = self._get_control(destination, True)\n src_control.add_observer(\"value\", dest_control._update_value)\n # Deal with inner pipeline link\n # In this case an observer is registered on the source control that\n # updates the output control when some changes occured.\n elif linktype == \"link\":\n src_control = self._get_control(source, False)\n dest_control = self._get_control(destination, True)\n src_control.add_observer(\"value\", dest_control._update_value)\n else:\n raise ValueError(\"Unrecognized link type '{0}'.\".format(linktype))\n\n # Save the link description\n self._links.append(linkrep)"
]
| [
"0.65031165",
"0.6175159",
"0.59685415",
"0.578361",
"0.56551373",
"0.5577402",
"0.5557687",
"0.5499407",
"0.54680276",
"0.5453428",
"0.5423965",
"0.5338431",
"0.5318767",
"0.51326674",
"0.5110213",
"0.5096941",
"0.50561696",
"0.5044307",
"0.5036249",
"0.5030324",
"0.5007305",
"0.49986774",
"0.49801004",
"0.4963355",
"0.49621904",
"0.49450803",
"0.49307838",
"0.49187914",
"0.49158096",
"0.491048"
]
| 0.6454943 | 1 |
StandAloneConfig is one of the configuration modes for reverse proxy | def stand_alone_config(self) -> Optional['outputs.CSIPowerMaxRevProxySpecConfigStandAloneConfig']:
return pulumi.get(self, "stand_alone_config") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def config():\n config_django()\n config_svisor()",
"def test_reverse_proxy_config():\n\n class ReverseProxyConfig(TestingConfig):\n REVERSE_PROXY = \"1,2,3,4\"\n\n app = create_ctfd(config=ReverseProxyConfig)\n with app.app_context():\n assert app.wsgi_app.x_for == 1\n assert app.wsgi_app.x_proto == 2\n assert app.wsgi_app.x_host == 3\n assert app.wsgi_app.x_port == 4\n assert app.wsgi_app.x_prefix == 0\n destroy_ctfd(app)\n\n class ReverseProxyConfig(TestingConfig):\n REVERSE_PROXY = \"true\"\n\n app = create_ctfd(config=ReverseProxyConfig)\n with app.app_context():\n assert app.wsgi_app.x_for == 1\n assert app.wsgi_app.x_proto == 1\n assert app.wsgi_app.x_host == 1\n assert app.wsgi_app.x_port == 1\n assert app.wsgi_app.x_prefix == 1\n destroy_ctfd(app)",
"def to_standby():\n if env.is_staging:\n print \"Redirecting to STANDBY is only allowed for PRODUCTION!\"\n return\n with cd(env.code_dir):\n run('ln -sf celeryconfig-gostandby.py ./api/celeryconfig.py')\n restart_api()",
"def antenny_config_make_default(self):\n return self.antenny_config.save_as_default_config()",
"def load_gateways_config(self):\n raise NotImplementedError",
"def proxy_settings(self):\n if config.proxy_host is None or config.proxy_host == \"\":\n return\n\n proxy = urllib2.ProxyHandler({\"http\": config.proxy_host})\n opener = urllib2.build_opener(proxy)\n urllib2.install_opener(opener)",
"def default_config():\n return {\n MESSAGE: 'reply -> send*',\n REPLY: 'transitiveReply -> send*',\n FORWARD: 'none*'\n }",
"def test_proxy_config_default_include(self):\n proxyid = random_string(\"proxy-\")\n root_dir = pathlib.Path(tempfile.mkdtemp(dir=RUNTIME_VARS.TMP))\n self.addCleanup(shutil.rmtree, str(root_dir), ignore_errors=True)\n conf_dir = root_dir / \"conf\"\n conf_file = conf_dir / \"proxy\"\n conf_d_dir = conf_dir / \"proxy.d\"\n proxy_conf_d = conf_d_dir / proxyid\n proxy_conf_d.mkdir(parents=True)\n\n with salt.utils.files.fopen(str(conf_file), \"w\") as wfh:\n wfh.write(\n textwrap.dedent(\n \"\"\"\\\n id: {id}\n root_dir: {root_dir}\n pidfile: run/proxy.pid\n pki_dir: pki\n cachedir: cache\n sock_dir: run/proxy\n log_file: logs/proxy.log\n \"\"\".format(\n id=proxyid, root_dir=root_dir\n )\n )\n )\n\n with salt.utils.files.fopen(str(proxy_conf_d / \"_schedule.conf\"), \"w\") as wfh:\n wfh.write(\n textwrap.dedent(\n \"\"\"\\\n schedule:\n test_job:\n args: [arg1, arg2]\n enabled: true\n function: test.arg\n jid_include: true\n kwargs: {key1: value1, key2: value2}\n maxrunning: 1\n name: test_job\n return_job: false\n \"\"\"\n )\n )\n opts = salt.config.proxy_config(\n str(conf_file),\n minion_id=proxyid,\n cache_minion_id=False,\n )\n self.assertIn(\"schedule\", opts)\n self.assertIn(\"test_job\", opts[\"schedule\"])",
"def configure_proxy(self, proxy):\n server_name = self.get_external_domain()\n tls_enabled = self.get_tls()\n ircd_enabled = self.charm_config.get(\"enable-ircd\")\n federation_enabled = self.get_federation()\n\n if tls_enabled:\n self.external_port = 443\n else:\n self.external_port = 80\n\n proxy_config = [\n {\n \"mode\": \"http\",\n \"external_port\": self.external_port,\n \"internal_host\": self.get_internal_host(),\n \"internal_port\": 8008,\n \"subdomain\": server_name,\n },\n ]\n\n if federation_enabled:\n proxy_config.append(\n {\n \"mode\": self.get_federation_mode(),\n \"external_port\": 8448,\n \"internal_host\": self.get_internal_host(),\n \"internal_port\": 8448,\n }\n )\n\n if ircd_enabled:\n proxy_config.append(\n {\n \"mode\": self.get_irc_mode(),\n \"external_port\": self.get_irc_port(),\n \"internal_host\": self.get_internal_host(),\n \"internal_port\": self.irc_internal_port,\n }\n )\n\n proxy.configure(proxy_config)",
"def default_configs(cls):\n return {\n 'redirect_path': None,\n 'nif_page_structure': None,\n 'nif_text_links': None,\n }",
"def is_server_default(self):\n ...",
"def load_gateways(self):\n from django.conf import settings\n return getattr(settings, self.settings_name, [])",
"async def test_manual_configuration_dont_update_configuration(opp, aioclient_mock):\n await setup_deconz_integration(opp, aioclient_mock)\n\n aioclient_mock.get(\n pydeconz.utils.URL_DISCOVER,\n json=[],\n headers={\"content-type\": CONTENT_TYPE_JSON},\n )\n\n result = await opp.config_entries.flow.async_init(\n DECONZ_DOMAIN, context={\"source\": SOURCE_USER}\n )\n\n assert result[\"type\"] == RESULT_TYPE_FORM\n assert result[\"step_id\"] == \"manual_input\"\n\n result = await opp.config_entries.flow.async_configure(\n result[\"flow_id\"],\n user_input={CONF_HOST: \"1.2.3.4\", CONF_PORT: 80},\n )\n\n assert result[\"type\"] == RESULT_TYPE_FORM\n assert result[\"step_id\"] == \"link\"\n\n aioclient_mock.post(\n \"http://1.2.3.4:80/api\",\n json=[{\"success\": {\"username\": API_KEY}}],\n headers={\"content-type\": CONTENT_TYPE_JSON},\n )\n\n aioclient_mock.get(\n f\"http://1.2.3.4:80/api/{API_KEY}/config\",\n json={\"bridgeid\": BRIDGEID},\n headers={\"content-type\": CONTENT_TYPE_JSON},\n )\n\n result = await opp.config_entries.flow.async_configure(\n result[\"flow_id\"], user_input={}\n )\n\n assert result[\"type\"] == RESULT_TYPE_ABORT\n assert result[\"reason\"] == \"already_configured\"",
"def default_empty_config_env(monkeypatch):\n monkeypatch.delenv(\"SOPEL_CONFIG\", raising=False)\n monkeypatch.delenv(\"SOPEL_CONFIG_DIR\", raising=False)",
"def override_config(self):\n super(AuthedConfigFixture, self).override_config()\n self.conf.register_opts(auth_token._OPTS, group='keystone_authtoken')\n self.conf.set_override('auth_uri', 'http://127.0.0.1:35357',\n group='keystone_authtoken')",
"def antenny_config_load_default(self):\n return self.antenny_config.load_default_config()",
"def use_remote_gateways(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"use_remote_gateways\")",
"def use_remote_gateways(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"use_remote_gateways\")",
"def use_remote_gateways(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"use_remote_gateways\")",
"def build_server_conf(self, is_default=False, env=None, server_name_list=[], location_config=None,\n default_config_identifier=None, default_port=None, default_root_directory=None):\n server_conf = Server()\n\n if is_default is False:\n logger.info(\"Building the server section for {}\".format(env))\n if server_name_list:\n for server in server_name_list:\n server_conf.add(\n Key(\n 'server_name', server\n )\n )\n else:\n logger.warning(\"Server Name for env {} are not set\".format(env))\n server_conf.add(\n Key('listen', '[::]:' + str(\n self.default_catch_all_map[default_config_identifier]['port']) + 'default_server ipv6only=on'),\n Key('listen',\n '0.0.0.0:' + str(self.default_catch_all_map[default_config_identifier]['port']) + 'default_server'))\n\n if location_config:\n for key in location_config:\n loc = Location(key)\n loc.add(Key('proxy_pass', 'http://' + env)),\n\n if location_config[key]['ipfilter'] == 'myfilter':\n for cidr in self.cidr_filter_list:\n loc.add(Key('allow', cidr))\n elif location_config[key]['ipfilter'] == 'allowall':\n for cidr in self.cidr_allow_all_list:\n loc.add(Key('allow', cidr))\n loc.add(Key('deny', 'all'))\n\n server_conf.add(loc)\n else:\n logger.warning(\"Location is/are not specified for the env:{}\".format(env))\n\n elif is_default is True:\n if str(default_port) is None or len(str(default_port)) == 0:\n logger.warning(\"Default port not set, Nginx config for default host might now work properly !!\")\n server_conf.add(\n Key('listen', '[::]:' + str(default_port) + 'default_server ipv6only=on'),\n Key('listen', '0.0.0.0:' + str(default_port) + 'default_server'),\n Key('root', default_root_directory)\n )\n if not server_name_list:\n server_conf.add(\n Key('server_name', '_')\n )\n\n loc = Location(next(iter(location_config)))\n key = str(next(iter(location_config)))\n for k, v in location_config[key].items():\n loc.add(Key(k, v))\n\n server_conf.add(loc)\n\n return server_conf",
"def bootstrap_default():\n\treturn default_configuration",
"def configure(self, conf):\n return False",
"def setup_config():\n if CONFIG.get(\"environment\", \"server\") == 'production':\n return 'config.ProductionConfig'\n else:\n return 'config.TestingConfig'",
"def setup_remote_site(self):\n raise NotImplementedError",
"def global_settings(request):\n return {\n 'OFFLINE_MODE_CAPABILITY_ENABLED': settings.OFFLINE_MODE_CAPABILITY_ENABLED\n }",
"def config(self, options):\n group = options.group(\"Nomos settings\")\n _ = group.define\n _('-u', '--url', default='http://localhost', help='The host of the http server url prefix (default %(default)r)')\n _('-d', '--debug', help='Open debug mode (default %(default)r)', action='store_true', default=False)\n _('-p', '--path', help='The test nomos path (default %(default)r)', default=\"\")\n _('-r', '--resource', help='The test nomos resource path (default %(default)r)', default=None)\n _('-m', '--minix', help='The test minix path list (default %(default)r)', default=[])\n _('-c', '--config', default=self.conf_path, help=\"config path (default %(default)r)\", metavar=\"FILE\")\n _(\"-v\", \"--version\", help=\"Show nomos version 0.1\")\n\n group = options.group(\"http settings\")\n _ = group.define\n\n _('--http.timeout', default=30,\n help='http timeout setting: (default %(default)r)')\n _('--http.verify', help='The server TLS certificate (default %(default)r)', default=False)\n _('--http.cert.client_cert', default=None,\n help='Specify a local cert (default %(default)r)')\n _('--http.cert.client_key', default=None,\n help='Specify a local cert (default %(default)r)')",
"def defaultConf():\n from config import lwbdUrl, userAndPass\n baseUrl = lwbdUrl\n lucidAuth = userAndPass\n return LucidSdaConfiguration(baseUrl,\n lucidAuth)",
"def configuration():",
"def config_mode(self, config_command=\"config\", pattern=\">config\"):\n return super().config_mode(config_command=config_command, pattern=pattern)",
"def config():"
]
| [
"0.5734231",
"0.5700284",
"0.56778556",
"0.548069",
"0.54663",
"0.54478127",
"0.54054224",
"0.535424",
"0.5339631",
"0.5334389",
"0.5333183",
"0.52794164",
"0.51880765",
"0.5180448",
"0.51594317",
"0.51446605",
"0.51168114",
"0.51168114",
"0.50815207",
"0.50673157",
"0.5061227",
"0.50230896",
"0.50104934",
"0.5010276",
"0.50068694",
"0.50043136",
"0.49975023",
"0.49927866",
"0.49888423",
"0.49837446"
]
| 0.60077494 | 0 |
Driver is the specification for the CSI PowerMax Driver | def driver(self) -> 'outputs.CSIPowerMaxSpecDriver':
return pulumi.get(self, "driver") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(__self__, *,\n driver: 'outputs.CSIPowerMaxSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def __init__(__self__, *,\n driver: 'outputs.CSIPowerStoreSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def driver(self) -> 'outputs.CSIVXFlexOSSpecDriver':\n return pulumi.get(self, \"driver\")",
"def driver(self) -> 'outputs.CSIUnitySpecDriver':\n return pulumi.get(self, \"driver\")",
"def driver(self) -> 'outputs.CSIIsilonSpecDriver':\n return pulumi.get(self, \"driver\")",
"def __init__(__self__, *,\n driver: 'outputs.CSIVXFlexOSSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def driver(self):\n driver = c_int()\n ckresult(_dll.FMOD_System_GetDriver(self._ptr, byref(driver)))\n return driver.value",
"def __init__(__self__, *,\n common: 'outputs.CSIPowerMaxSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIPowerMaxSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIPowerMaxSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIPowerMaxSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def common(self) -> 'outputs.CSIPowerMaxSpecDriverCommon':\n return pulumi.get(self, \"common\")",
"def __init__(__self__, *,\n driver: 'outputs.CSIUnitySpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def controller(self) -> Optional['outputs.CSIPowerMaxSpecDriverController']:\n return pulumi.get(self, \"controller\")",
"def node(self) -> Optional['outputs.CSIPowerMaxSpecDriverNode']:\n return pulumi.get(self, \"node\")",
"def __init__(__self__, *,\n driver: 'outputs.CSIIsilonSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def driver_version(self):\n data = fcntl.ioctl(self._fd, _EVIOCGVERSION, '\\x00\\x00\\x00\\x00')\n return struct.unpack(\"i\", data)[0]",
"def get_driver_info(self, aaidee):\n name = create_string_buffer(256)\n guid = GUID()\n system_rate = c_int()\n speaker_mode = c_int()\n channels = c_int()\n ckresult(\n _dll.FMOD_System_GetDriverInfo(\n self._ptr,\n aaidee,\n name,\n 256,\n byref(guid),\n byref(system_rate),\n byref(speaker_mode),\n byref(channels),\n )\n )\n return so(\n name=name.value,\n guid=guid,\n system_rate=system_rate.value,\n speaker_mode=speaker_mode.value,\n speaker_mode_channels=channels.value,\n )",
"def chipset_driver_modules(self):\n\t\treturn self.__info_dict['info']['chipset_driver_modules']['value']",
"def limit_max_power(self, params=None):\n if self.inv is None:\n raise der.DERError('DER not initialized')\n\n try:\n if params is not None:\n ena = params.get('Ena')\n if ena is not None:\n if ena is True:\n self.inv.device.write(0xf100, util.u16_to_data(1)) # F142 R/W AdvancedPwrControlEn Int32 0-1\n else:\n self.inv.device.write(0xf100, util.u16_to_data(0)) # F142 R/W AdvancedPwrControlEn Int32 0-1\n wmax = params.get('WMaxPct')\n if wmax is not None:\n self.ts.log('Changing power to %d' % params.get('WMaxPct'))\n self.inv.device.write(0xf002, util.u16_to_data(params.get('WMaxPct')))\n else:\n params = {}\n if util.data_to_u16(self.inv.device.read(0xf100, 1)) == 0:\n params['Ena'] = False\n else:\n params['Ena'] = True\n params['WMaxPct'] = util.data_to_u16(self.inv.device.read(0xf001, 1))\n\n except Exception, e:\n raise der.DERError(str(e))\n\n return params",
"def driver(self):\n \n return self.__driver",
"def max_power(self):\r\n est_max_power = self.model * self.max_pwm / 100\r\n return est_max_power",
"def device_class(self):\n return DEVICE_CLASS_POWER",
"def driver(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"driver\")",
"def driverName(self):\n return Cbytestring2Python(\n self.Rf2Scor.mVehicles[self.__playersDriverNum()].mDriverName)",
"def snapshot_class(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverSnapshotClass']]:\n return pulumi.get(self, \"snapshot_class\")",
"def DRIVER():\n return \"podman\"",
"def _open_device(self):\r\n self._lib = windll.LoadLibrary(\"lib\\\\ps2000a.dll\")\r\n c_handle = c_int16()\r\n with self._driver_lock:\r\n m = self._lib.ps2000aOpenUnit(byref(c_handle),None)\r\n if m == 286:\r\n m = self._lib.ps2000aChangePowerSource(c_handle,\r\n c_int32(m))\r\n check_result(m)\r\n self._handle = c_handle\r\n\r\n return True",
"def driver(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"driver\")",
"def get_max_speed(self):\n if self.mot_type == 'ims':\n return self.get_par(\"max_speed\")\n elif self.mot_type == 'xps8p':\n return self.get_par(\"max_speed_xps\")\n else:\n return self.get_par(\"max_speed\")",
"def high_sensitivity():\n\n nav._cap1166._write_byte(0x00, 0b01000000)\n nav._cap1166._write_byte(0x1f, 0b00100000)",
"def power_mode(self) -> str:\n return self._device_info[\"PowerMode\"]",
"def max_pwm(self):\r\n return self._max_pwm"
]
| [
"0.7835924",
"0.66870284",
"0.66729796",
"0.6314486",
"0.6233547",
"0.6169987",
"0.6137801",
"0.6074996",
"0.58587897",
"0.5742326",
"0.57361585",
"0.5728146",
"0.5667005",
"0.5643347",
"0.54994494",
"0.545136",
"0.54307276",
"0.5390475",
"0.537022",
"0.5338836",
"0.5310073",
"0.52522",
"0.5251005",
"0.5136096",
"0.51286685",
"0.5079112",
"0.5034671",
"0.5026988",
"0.50267243",
"0.50029755"
]
| 0.79797435 | 0 |
Path of the field to select in the specified API version. | def field_path(self) -> str:
return pulumi.get(self, "field_path") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_field_in_version_json(field_name):\n if not os.environ.get(\"create_version_request\"):\n return None\n request = json.loads(os.environ.get(\"create_version_request\"))\n if not request or not isinstance(request, dict):\n return None\n version = request.get(\"version\")\n if not version or not isinstance(version, dict):\n return None\n\n logging.info(\"Found value: %s, for field: %s from create_version_request\",\n version.get(field_name), field_name)\n return version.get(field_name)",
"def api_revision(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"api_revision\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[str]:\n return pulumi.get(self, \"api_version\")",
"def get_api_version(self):\n major, minor, patch = self.client.config['api_version']\n return '%s.%s.%s' % (major, minor, patch)",
"def get_pkg_meta_field(self, pkg, field, version=None):\n pass",
"def api_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"api_version\")",
"def api_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"api_version\")"
]
| [
"0.6015247",
"0.5895961",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.57634807",
"0.5717113",
"0.5705715",
"0.57031804",
"0.57031804",
"0.57031804",
"0.57031804"
]
| 0.6319652 | 1 |
CSIPowerStoreSpec defines the desired state of CSIPowerStore | def __init__(__self__, *,
driver: 'outputs.CSIPowerStoreSpecDriver'):
pulumi.set(__self__, "driver", driver) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def common(self) -> 'outputs.CSIPowerStoreSpecDriverCommon':\n return pulumi.get(self, \"common\")",
"def setWindowStore(s=0):\n sdict = {0:'NOSTORE',1:'STORE'}\n dislin.x11mod(sdict[s])",
"def active_power(self, params=None):\n if self.inv is None:\n raise der.DERError('DER not initialized')\n\n try:\n if params is not None:\n ena = params.get('Ena')\n if ena is not None:\n if ena is True:\n # no enable for sunspec\n pass\n else:\n # no enable for sunspec\n pass\n\n storage_params = {}\n if params['P'] <= 0: # \"charging\"\n storage_params['InWRte'] = params['P']\n else: # \"discharging\"\n storage_params['OutWRte'] = params['P']\n\n storage_params['InOutWRte_WinTms'] = params['WinTms']\n storage_params['InOutWRte_RmpTms'] = params['RmpTms']\n storage_params['InOutWRte_RvrtTms'] = params['RvrtTms']\n\n self.inv.storage(params=storage_params)\n\n else:\n params = {}\n storage_params = self.inv.storage()\n if storage_params['ChaSt'] == 1: # \"off\"\n params['Ena'] = False\n else:\n params['Ena'] = True\n\n if storage_params['StorCtl_Mod'] == 0: # \"charging\"\n params['P'] = storage_params['InWRte']\n else: # \"discharging\"\n params['P'] = storage_params['OutWRte']\n\n params['WinTms'] = storage_params['InOutWRte_WinTms']\n params['RmpTms'] = storage_params['InOutWRte_RmpTms']\n params['RvrtTms'] = storage_params['InOutWRte_RvrtTms']\n\n except Exception, e:\n raise der.DERError(str(e))\n\n return params",
"def test_regulation_mode():\n with expected_protocol(\n DCXS,\n [\n (\"D0\", None),\n (\"c\", 0),\n ],\n ) as inst:\n inst.regulation_mode = \"power\"\n assert \"power\" == inst.regulation_mode",
"def test_str_power(self):\n xknx = XKNX(loop=self.loop)\n sensor = Sensor(\n xknx,\n 'TestSensor',\n group_address_state='1/2/3',\n value_type=\"power\")\n sensor.sensor_value.payload = DPTArray((0x43, 0xC6, 0x80, 00))\n\n self.assertEqual(sensor.resolve_state(), 397)\n self.assertEqual(sensor.unit_of_measurement(), \"W\")\n self.assertEqual(sensor.ha_device_class(), \"power\")",
"def test_str_power(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx, \"TestSensor\", group_address_state=\"1/2/3\", value_type=\"power\"\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0x45,\n 0xCB,\n 0xE2,\n 0x5C,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), 6524.294921875)\n self.assertEqual(sensor.unit_of_measurement(), \"W\")\n self.assertEqual(sensor.ha_device_class(), \"power\")",
"def test_api_ucs_power(self):\n # first power off all servers\n self.set_all_server_power_state(\"off\")\n # verify power state is down\n self.check_all_server_power_state(\"down\")\n # now power on the servers\n self.set_all_server_power_state(\"on\")\n # verify power state is up\n self.check_all_server_power_state(\"up\")",
"def node(self) -> Optional['outputs.CSIPowerStoreSpecDriverNode']:\n return pulumi.get(self, \"node\")",
"def get_power_state(self):\n\n doc = self.client.enumerate(uris.CIM_ComputerSystem)\n\n enabled_state = doc.find(\n './/s:Body/wsen:EnumerateResponse/wsman:Items/wsinst:CIM_HostComputerSystem/wsinst:EnabledState', wsman.NS_MAP_COMPUTER_SYSTEM)\n return constants._get_enabled_state(enabled_state.text)",
"def controller(self) -> Optional['outputs.CSIPowerStoreSpecDriverController']:\n return pulumi.get(self, \"controller\")",
"def power_mode(self) -> str:\n return self._device_info[\"PowerMode\"]",
"def sstcp_enabled():\n return common.POWER_CAP in SYSTEM_CAPS",
"def get_power_state(self, node):",
"def fusion_api_edit_server_hardware_power_state(self, body, uri, api=None, headers=None):\n return self.sh.update(body, uri, api, headers, param='/powerState')",
"def set_power_dbm(self, power=None):\n if power is None:\n power = self.def_power\n self.instr.write('L1 ' + str(power + ' DM'))\n time.sleep(self.sleep_time)",
"def supported_power_interfaces(self):\n return [fake.FakePower]",
"def __init__(__self__, *,\n name: str,\n allow_volume_expansion: Optional[bool] = None,\n allowed_topologies: Optional[Sequence['outputs.CSIPowerStoreSpecDriverStorageClassAllowedTopologies']] = None,\n default: Optional[bool] = None,\n parameters: Optional[Mapping[str, str]] = None,\n reclaim_policy: Optional[str] = None):\n pulumi.set(__self__, \"name\", name)\n if allow_volume_expansion is not None:\n pulumi.set(__self__, \"allow_volume_expansion\", allow_volume_expansion)\n if allowed_topologies is not None:\n pulumi.set(__self__, \"allowed_topologies\", allowed_topologies)\n if default is not None:\n pulumi.set(__self__, \"default\", default)\n if parameters is not None:\n pulumi.set(__self__, \"parameters\", parameters)\n if reclaim_policy is not None:\n pulumi.set(__self__, \"reclaim_policy\", reclaim_policy)",
"def test_skip_store(self):\n machine_uuid = \"955429e1-2125-478a-869c-3b3ce5549c38\"\n\n # Try to create machine's snapshot. The operation fails because\n # there is no reference to the machine in the state store.\n status = self.proxy.snapshot.create(PROVIDER_ID, machine_uuid)\n self.check_xmlrpc_command_result(status, has_error=True)\n\n # Create machine's snapshot with the --skip_store=True.\n status = self.proxy.snapshot.create(\n PROVIDER_ID, machine_uuid, True, False\n )\n self.check_xmlrpc_command_result(status)\n\n # Try to destroy machine's snapshot(s). The operation fails because\n # there is no reference to the machine in the state store.\n status = self.proxy.snapshot.destroy(PROVIDER_ID, machine_uuid)\n self.check_xmlrpc_command_result(status, has_error=True)\n\n # Destroy machine's snapshots with the --skip_store=True.\n status = self.proxy.snapshot.destroy(\n PROVIDER_ID, machine_uuid, True\n )\n self.check_xmlrpc_command_result(status)",
"def get_power_management() -> int:",
"def test_str_power_2byte(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx, \"TestSensor\", group_address_state=\"1/2/3\", value_type=\"power_2byte\"\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0x6D,\n 0x91,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), 116736.0)\n self.assertEqual(sensor.unit_of_measurement(), \"kW\")\n self.assertEqual(sensor.ha_device_class(), \"power\")",
"def set_power(self, dbm=-30):\r\n self.write(\"SOURce1:POWer:POWer \"+str(dbm))",
"def set_power(self, dbm=-30):\r\n self.write(\"SOURce1:POWer:POWer \"+str(dbm))",
"def test_backend_specs_sampling(self):\n dev = qml.device(\n \"orquestra.qiskit\", backend=\"statevector_simulator\", wires=1, shots=1000, analytic=False\n )\n assert dev.backend_specs == qiskit_sampler_specs",
"def enable_sensor_power():\n sen = digital.SensorPower(\"senpwr\") \n sen.set()",
"def power_state(self) -> const.PowerState:\n return self.relay(\"power_state\")",
"def fusion_api_get_power_device_power_state(self, uri=None, api=None, headers=None):\n return self.pd.get(uri=uri, api=api, headers=headers, param='/powerState')",
"def __init__(__self__, *,\n driver: 'outputs.CSIPowerMaxSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def state(self):\n if not self._product:\n return\n if self._type == \"power\":\n return self._product.get_key(\"PWR\")\n elif self._type == \"setpoint\":\n return self._product.get_key(\"SETP\")\n elif self._type == \"fan1\":\n return self._product.get_key(\"F2L\")\n elif self._type == \"fan2\":\n return self._product.get_key(\"F3L\")\n elif self._type == \"fan3\":\n return self._product.get_key(\"F4L\")",
"def test_current_power_w_no_data(self):\n self.port.data = {\"notpower\": 123}\n assert 0 == self.switch.current_power_w",
"def fusion_api_set_power_device_power_state(self, body, uri, api=None, headers=None):\n return self.pd.update(body=body, uri=uri, api=api, headers=headers, param='/powerState')"
]
| [
"0.55531",
"0.53814274",
"0.5287721",
"0.51716346",
"0.5164215",
"0.5054662",
"0.49531534",
"0.49034643",
"0.4887106",
"0.48683786",
"0.48633137",
"0.48585188",
"0.480452",
"0.4790361",
"0.4751951",
"0.47489735",
"0.47005275",
"0.4693841",
"0.4692008",
"0.46839023",
"0.46533865",
"0.46533865",
"0.46525654",
"0.4648574",
"0.4645036",
"0.46198586",
"0.46131927",
"0.46102336",
"0.46097818",
"0.46056733"
]
| 0.6055389 | 0 |
SnapshotClass is the specification for Snapshot Classes | def snapshot_class(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverSnapshotClass']]:
return pulumi.get(self, "snapshot_class") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def snapshot_class(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSnapshotClass']]:\n return pulumi.get(self, \"snapshot_class\")",
"def snapshot_class(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverSnapshotClass']]:\n return pulumi.get(self, \"snapshot_class\")",
"def snapshot_class(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverSnapshotClass']]:\n return pulumi.get(self, \"snapshot_class\")",
"def snapshot_class(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverSnapshotClass']]:\n return pulumi.get(self, \"snapshot_class\")",
"def name(cls):\n return 'Snapshot'",
"def snapshot_type(self) -> str:\n return pulumi.get(self, \"snapshot_type\")",
"def test_snapshot(self):\n converter = InstructionToQobjConverter(PulseQobjInstruction, meas_level=2)\n instruction = Snapshot(name='label', snap_type='type')\n\n valid_qobj = PulseQobjInstruction(\n name='snapshot',\n t0=0,\n label='label',\n type='type'\n )\n\n self.assertEqual(converter(0, instruction), valid_qobj)",
"def test_class_hierarchy(self):\n\n assert issubclass(TIMESTAMP, sqltypes._Binary)\n assert issubclass(ROWVERSION, sqltypes._Binary)",
"def is_snapshot(self):\n return self.proto.display_type == DISPLAY_TYPE.Snapshot.value",
"def serialize_snapshot(self, snapshot, fields=None, version=None):\n fields = fields or self.snapshot_fields\n version = version or self.snapshot_version\n serialized_snapshot = serializers.serialize(\n 'python', [snapshot], fields=fields\n )[0]\n serialized_snapshot['version'] = version\n serialized_snapshot['extra_fields'] = {}\n return serialized_snapshot",
"def snapshot(self) -> Snapshot:\n snapshot = self.open(Snapshot.type).signed\n if not isinstance(snapshot, Snapshot):\n raise RuntimeError(\"Unexpected snapshot type\")\n return snapshot",
"def resource_type(self):\n return 'volume-snapshot'",
"def snapshot(self):\n pass",
"def _get_revision_class(self):\n return perf_revision_state.PerfRevisionState",
"def snapshot(self, snapshot_id):\r\n return self.connection.create_dbsnapshot(snapshot_id, self.id)",
"def test_snapshot(self):\n cmd = Snapshot(name='label', snap_type='type')\n instruction = cmd << 10\n\n qobj = PulseQobjInstruction(name='snapshot', t0=10, label='label', type='type')\n converted_instruction = self.converter(qobj)\n\n self.assertEqual(converted_instruction.timeslots, instruction.timeslots)\n self.assertEqual(converted_instruction.instructions[0][-1], cmd)",
"def snapshot_info(self) -> MetaFile:\n raise NotImplementedError",
"def __init__(self):\n self.id = None\n self.typeInfo['id'] = 'string'\n \"\"\"the account associated with the disk volume\"\"\"\n self.account = None\n self.typeInfo['account'] = 'string'\n \"\"\"the create date of the vm snapshot\"\"\"\n self.created = None\n self.typeInfo['created'] = 'date'\n \"\"\"indiates if this is current snapshot\"\"\"\n self.current = None\n self.typeInfo['current'] = 'boolean'\n \"\"\"the description of the vm snapshot\"\"\"\n self.description = None\n self.typeInfo['description'] = 'string'\n \"\"\"the display name of the vm snapshot\"\"\"\n self.displayname = None\n self.typeInfo['displayname'] = 'string'\n \"\"\"the domain associated with the disk volume\"\"\"\n self.domain = None\n self.typeInfo['domain'] = 'string'\n \"\"\"the ID of the domain associated with the disk volume\"\"\"\n self.domainid = None\n self.typeInfo['domainid'] = 'string'\n \"\"\"the name of the vm snapshot\"\"\"\n self.name = None\n self.typeInfo['name'] = 'string'\n \"\"\"the parent ID of the vm snapshot\"\"\"\n self.parent = None\n self.typeInfo['parent'] = 'string'\n \"\"\"the parent displayName of the vm snapshot\"\"\"\n self.parentName = None\n self.typeInfo['parentName'] = 'string'\n \"\"\"the project name of the vpn\"\"\"\n self.project = None\n self.typeInfo['project'] = 'string'\n \"\"\"the project id of the vpn\"\"\"\n self.projectid = None\n self.typeInfo['projectid'] = 'string'\n \"\"\"the state of the vm snapshot\"\"\"\n self.state = None\n self.typeInfo['state'] = 'state'\n \"\"\"VM Snapshot type\"\"\"\n self.type = None\n self.typeInfo['type'] = 'string'\n \"\"\"the vm ID of the vm snapshot\"\"\"\n self.virtualmachineid = None\n self.typeInfo['virtualmachineid'] = 'string'\n \"\"\"the Zone ID of the vm snapshot\"\"\"\n self.zoneid = None\n self.typeInfo['zoneid'] = 'string'",
"def get_snapshot_object(session, key, snapshot=None):\n # type: (Session, Text, Optional[Text]) -> Any\n url_tail = \"/{}/{}/{}/{}/{}\".format(\n CoordConstsV2.RSC_NETWORKS,\n session.network,\n CoordConstsV2.RSC_SNAPSHOTS,\n session.get_snapshot(snapshot),\n CoordConstsV2.RSC_OBJECTS,\n )\n return _get_stream(session, url_tail, {CoordConstsV2.QP_KEY: key})",
"def snapshot_gen(self):\n \n # Generate snapshot\n snapshot, snapshot_param = make_snapshot.snapshot_gen(self._parent)\n # Save to ICobj\n self._parent.snapshot = snapshot\n self._parent.snapshot_param = snapshot_param",
"def snapshot(self, snapshot):\n self._context[\"snapshot\"] = snapshot",
"def create_snapshot(self, snap_description=None):\n raise NotImplementedError()",
"def test_class_name(self):\n r = Review()\n r_dictionary = r.to_dict()\n self.assertIn('__class__', r_dictionary)",
"def create_snapshot(store, dataset, snapshot, description_fields, snapshot_changes):\n validate_snapshot_name(store, dataset, snapshot)\n validate_datalad_config(store, dataset)\n update_description(store, dataset, description_fields)\n update_changes(store, dataset, snapshot, snapshot_changes)\n save_snapshot(store, dataset, snapshot)\n return get_snapshot(store, dataset, snapshot)",
"def snapshot(snapshot_type, result_q, time_delta):",
"def snapshot_identification(snapshot):\n\t\treturn {\n\t\t\t'user_id': snapshot['user_id'],\n\t\t\t'timestamp': snapshot['timestamp'],\n\t\t\t'snapshot_id': snapshot['snapshot_id']}",
"def load_snapshot(self):\r\n assert self.snapshot is not None\r\n self.gain = self.snapshot[0]\r\n self.block = self.snapshot[1]\r\n self.locked = self.snapshot[2]\r\n self.bucket_num = self.snapshot[3]",
"def record_class_examined(self, cls):\n serialized = self.serialize_type(cls)\n if serialized is not None:\n self.classes_examined.add(serialized)",
"def deserialize_snapshot(self, serialized_snapshot):\n snapshot = list(serializers.deserialize(\n 'python', [serialized_snapshot]\n ))[0].object\n snapshot.__version__ = serialized_snapshot['version']\n snapshot.__extra_fields__ = serialized_snapshot['extra_fields']\n # override extra fields\n for name, value in serialized_snapshot['extra_fields'].items():\n if value:\n if isinstance(value, dict):\n value = self.deserialize_snapshot(value)\n setattr(snapshot, name, value)\n return snapshot",
"def restore_from_snapshot(self, snapshot_state: Mapping[str, jnp.ndarray]):\n def clear(attributes):\n for attr_name in attributes:\n if hasattr(self, attr_name):\n delattr(self, attr_name)\n\n def write(attributes, broadcast=False):\n for attr_name, chk_name in attributes.items():\n value = snapshot_state[chk_name]\n if broadcast:\n value = utils.bcast_local_devices(value)\n setattr(self, attr_name, value)\n\n # Explicitly clear existing attributes first, this (potentially) allows\n # broadcast values to reuse previous allocations leading to reduced\n # fragmentation of device memory.\n clear(self.CHECKPOINT_ATTRS)\n clear(self.NON_BROADCAST_CHECKPOINT_ATTRS)\n write(self.CHECKPOINT_ATTRS, broadcast=True)\n write(self.NON_BROADCAST_CHECKPOINT_ATTRS)"
]
| [
"0.7336533",
"0.72760487",
"0.7070211",
"0.7056704",
"0.669828",
"0.65507406",
"0.5934098",
"0.59296554",
"0.5928252",
"0.58405566",
"0.5718811",
"0.5634103",
"0.56048465",
"0.5574448",
"0.55006576",
"0.5482949",
"0.53952694",
"0.5382935",
"0.5382102",
"0.5359684",
"0.53471845",
"0.53255814",
"0.53096217",
"0.5309208",
"0.526724",
"0.5254382",
"0.525098",
"0.5246751",
"0.5215199",
"0.5195794"
]
| 0.73012376 | 1 |
TolerationSeconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system. | def toleration_seconds(self) -> Optional[int]:
return pulumi.get(self, "toleration_seconds") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def toleration(self, toleration: Dict[str, str]):\n\n self._toleration = toleration",
"def toleration(self) -> Dict[str, str]:\n return self._toleration",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def deletion_grace_period_seconds(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"deletion_grace_period_seconds\")",
"def termination_grace_period_seconds(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"termination_grace_period_seconds\")",
"def effective_lockout_seconds(self):\n return self.lockout_seconds + self.safety_seconds",
"def termination_grace_period_seconds(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"termination_grace_period_seconds\")",
"def Tolerance(self):\n\t\treturn self._get_attribute('tolerance')",
"def secondsLeft(self)->int:\n t = datetime.utcnow()\n if self._scenario == LM_HardDate.Scenario.ValidSince:\n return 0 if t >= self.timeBegin else int((self.timeBegin - t).total_seconds())\n else:\n return 0 if t >= self.timeEnd else int((self.timeEnd - t).total_seconds())",
"def timeout_seconds(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"timeout_seconds\")",
"def calculate_timeout(self):\n return self.total_estimated_words() / self.minimum_wpm * 60"
]
| [
"0.7147842",
"0.67870635",
"0.6334559",
"0.6320272",
"0.6269226",
"0.62464815",
"0.6189818",
"0.6143222",
"0.6134462",
"0.6114868",
"0.60094",
"0.59734225",
"0.5971313",
"0.59695727",
"0.5961522",
"0.5935294",
"0.58811635",
"0.5770352",
"0.57524425",
"0.5696225",
"0.56837475",
"0.5631572",
"0.5076096",
"0.498951",
"0.49888638",
"0.49421757",
"0.4719646",
"0.4717082",
"0.45849764",
"0.4456973"
]
| 0.7421432 | 1 |
NodeSelector is a selector which must be true for the pod to fit on a node. Selector which must match a node's labels for the pod to be scheduled on that node. | def node_selector(self) -> Optional[Mapping[str, str]]:
return pulumi.get(self, "node_selector") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def node_selector(self, node_selector: Dict[str, str]):\n\n self._node_selector = node_selector",
"def list_of_pods_in_a_node(self, node_name, namespace: Optional[str] = None, label_selector: Optional[str] = None):\n pod_list = []\n if namespace is None:\n api_response = self.list_all_pods_in_all_namespaces(label_selector=label_selector)\n else:\n api_response = self.list_pods_in_a_namespace(namespace=namespace, label_selector=label_selector)\n if api_response:\n for item in api_response.items:\n if node_name == item[\"spec\"][\"nodeName\"]:\n pod_name = item[\"metadata\"][\"name\"]\n pod_list.append(pod_name)\n return pod_list",
"def node_topology(self) -> \"LabelSelector\":\n return typing.cast(\n \"LabelSelector\",\n self._properties.get(\"nodeTopology\"),\n )",
"def selector(self) -> Optional[pulumi.Input['_meta.v1.LabelSelectorPatchArgs']]:\n return pulumi.get(self, \"selector\")",
"def selector(self) -> Optional['_meta.v1.outputs.LabelSelectorPatch']:\n return pulumi.get(self, \"selector\")",
"def node_selector(self) -> Dict[str, str]:\n return self._node_selector",
"def selector(self) -> Optional[pulumi.Input['_meta.v1.LabelSelectorArgs']]:\n return pulumi.get(self, \"selector\")",
"def selector(self) -> Optional['_meta.v1.outputs.LabelSelector']:\n return pulumi.get(self, \"selector\")",
"def namespace_selector(self) -> Optional['_meta.v1.outputs.LabelSelectorPatch']:\n return pulumi.get(self, \"namespace_selector\")",
"def namespace_selector(self) -> Optional['_meta.v1.outputs.LabelSelectorPatch']:\n return pulumi.get(self, \"namespace_selector\")",
"def namespace_selector(self) -> Optional['_meta.v1.outputs.LabelSelectorPatch']:\n return pulumi.get(self, \"namespace_selector\")",
"def namespace_selector(self) -> Optional['_meta.v1.outputs.LabelSelector']:\n return pulumi.get(self, \"namespace_selector\")",
"def namespace_selector(self) -> Optional['_meta.v1.outputs.LabelSelector']:\n return pulumi.get(self, \"namespace_selector\")",
"def namespace_selector(self) -> Optional['_meta.v1.outputs.LabelSelector']:\n return pulumi.get(self, \"namespace_selector\")",
"def get_selector(mode):\n selector = 'redis.io/role=node'\n if mode == MODE_RESTRICTED:\n selector = '{},{}'.format(selector, OPERATOR_LABEL)\n return selector",
"def object_selector(self) -> Optional['_meta.v1.outputs.LabelSelectorPatch']:\n return pulumi.get(self, \"object_selector\")",
"def object_selector(self) -> Optional['_meta.v1.outputs.LabelSelectorPatch']:\n return pulumi.get(self, \"object_selector\")",
"def object_selector(self) -> Optional['_meta.v1.outputs.LabelSelectorPatch']:\n return pulumi.get(self, \"object_selector\")",
"def object_selector(self) -> Optional['_meta.v1.outputs.LabelSelector']:\n return pulumi.get(self, \"object_selector\")",
"def object_selector(self) -> Optional['_meta.v1.outputs.LabelSelector']:\n return pulumi.get(self, \"object_selector\")",
"def object_selector(self) -> Optional['_meta.v1.outputs.LabelSelector']:\n return pulumi.get(self, \"object_selector\")",
"def node_topology(self, value: typing.Union[\"LabelSelector\", dict]):\n if isinstance(value, dict):\n value = typing.cast(\n LabelSelector,\n LabelSelector().from_dict(value),\n )\n self._properties[\"nodeTopology\"] = value",
"def get_nodes(self, label_selector=None):\n return self.core_client.list_node(label_selector=label_selector)",
"def getSelector(self, node):\n self.checkModelOpen()\n calcEngine = CalcEngine.factory(self.client_session)\n return calcEngine.getSelector(node)",
"def service_selector(self, service_selector: ConfigNodePropertyString):\n\n self._service_selector = service_selector",
"def select_node(self, *args):\n node = super().select_node(*args)\n self.subscribe_mqtt(self.generate_node_topic(node, '#', type='responses'))\n\n return node",
"def setNodeClassSelector(self, class_selecting_function: callable):\n self.node_class_selector = class_selecting_function",
"def wait_for_pods(self, label_selector, namespace, timeout=300):\n waiting_for_pods = True\n timeout_interval = 0\n while waiting_for_pods:\n if timeout_interval >= timeout:\n raise TimeoutError()\n pods = self.get_pods(label_selector, namespace).items\n if len(pods) == 0 and timeout_interval < 60:\n continue\n\n if len(pods) == 0 and timeout_interval > 60:\n raise PodsNotFoundError(f\"Found no pods with label selector {label_selector}\")\n\n _ = [\n logger.info(f\"{pod.metadata.namespace}\\t{pod.metadata.name}\\t{pod.status.phase}\")\n for pod in pods\n ]\n waiting_for_pods = any(pod.status.phase != \"Running\" for pod in pods)\n\n time.sleep(DEFAULT_WAIT_TIME)\n timeout_interval += DEFAULT_WAIT_TIME",
"def get_pod_node(self, namespace, pod_name):\n api_response = None\n node_name = None\n try:\n api_response = self.ocp_pods.get(namespace=namespace, name=pod_name)\n except ApiException as e:\n logger.error(\"Exception while getting pods: %s\\n\", e)\n if api_response is not None:\n if api_response.spec[\"nodeName\"]:\n node_name = api_response.spec[\"nodeName\"]\n return node_name",
"def selector(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"selector\")"
]
| [
"0.6502156",
"0.5918389",
"0.59019166",
"0.56973386",
"0.56566566",
"0.5647969",
"0.56320477",
"0.5579677",
"0.5537389",
"0.5537389",
"0.5537389",
"0.5397992",
"0.5397992",
"0.5397992",
"0.53265005",
"0.52993214",
"0.52993214",
"0.52993214",
"0.52291834",
"0.52291834",
"0.52291834",
"0.518106",
"0.5090993",
"0.5030758",
"0.49438456",
"0.48901623",
"0.48704374",
"0.48188865",
"0.4778634",
"0.47426236"
]
| 0.62425923 | 1 |
Restrict the node topologies where volumes can be dynamically provisioned. | def allowed_topologies(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverStorageClassAllowedTopologies']]:
return pulumi.get(self, "allowed_topologies") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def allowed_topologies(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverStorageClassAllowedTopologies']]:\n return pulumi.get(self, \"allowed_topologies\")",
"def allowed_topologies(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverStorageClassAllowedTopologies']]:\n return pulumi.get(self, \"allowed_topologies\")",
"def allowed_topologies(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverStorageClassAllowedTopologies']]:\n return pulumi.get(self, \"allowed_topologies\")",
"def allowed_topologies(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverStorageClassAllowedTopologies']]:\n return pulumi.get(self, \"allowed_topologies\")",
"def allowed_topology_access_create(user):\n return user.has_perm(\"vnswww.add_topology\")",
"def _get_allowable_node_list(si, vol_name=None):\n anl = []\n try:\n for hostname in si.keys():\n # Volumes can only be placed on nodes that are ok and are part of\n # the storage pool\n if si[hostname][\"node_status\"] != 0 or si[hostname][\"in_cluster\"] == False:\n continue\n if vol_name and (vol_name in si[hostname][\"volume_list\"]):\n continue\n anl.append(hostname)\n except Exception, e:\n return None, 'Error building allowable node list: %s' % str(e)\n else:\n return anl, None",
"def get_allowed_topologies(user):\n try:\n up = user.get_profile()\n except AttributeError:\n return db.Topology.objects.none()\n\n if user.has_perm(\"vnswww.topology_use_any\"):\n # We can view and use any templates\n topos = db.Topology.objects.filter()\n else:\n q_own = Q(owner=user)\n q_permitted = Q(allowed_users=user)\n q_org = Q(org=user.get_profile().org)\n q_public = Q(public=True)\n if user.has_perm(\"vnswww.topology_use_org\"):\n print \"Allowed all topos in own org\"\n # We can view and use any from the user's organization\n topos = db.Topology.objects.filter(q_permitted | q_org | q_own)\n else:\n print \"NOT allowed all topos in own org\"\n # We can view any from our own organization which are protected\n topos = db.Topology.objects.filter(q_permitted | q_own)\n\n return topos",
"def allowed_volumes(context, requested_volumes, size):\n project_id = context.project_id\n context = context.elevated()\n size = int(size)\n requested_gigabytes = requested_volumes * size\n used_volumes, used_gigabytes = db.volume_data_get_for_project(context,\n project_id)\n quota = get_project_quotas(context, project_id)\n allowed_volumes = _get_request_allotment(requested_volumes, used_volumes,\n quota['volumes'])\n allowed_gigabytes = _get_request_allotment(requested_gigabytes,\n used_gigabytes,\n quota['gigabytes'])\n allowed_volumes = min(allowed_volumes,\n int(allowed_gigabytes // size))\n return min(requested_volumes, allowed_volumes)",
"def volumes(self):",
"def dvs_volume(self):\n self.show_step(1)\n self.show_step(2)\n self.env.revert_snapshot(\"ready_with_5_slaves\")\n plugin.install_dvs_plugin(self.ssh_manager.admin_ip)\n\n self.show_step(3)\n cluster_id = self.fuel_web.create_cluster(\n name=self.__class__.__name__,\n mode=DEPLOYMENT_MODE,\n settings={\n \"net_provider\": 'neutron',\n \"net_segment_type\": NEUTRON_SEGMENT_TYPE\n }\n )\n self.show_step(4)\n plugin.enable_plugin(cluster_id, self.fuel_web)\n\n self.show_step(5)\n self.show_step(6)\n self.show_step(7)\n self.fuel_web.update_nodes(cluster_id,\n {'slave-01': ['controller'],\n 'slave-02': ['compute'],\n 'slave-03': ['cinder'],\n 'slave-04': ['cinder-vmware'],\n 'slave-05': ['compute-vmware']})\n\n self.show_step(8)\n self.show_step(9)\n logger.info('Configure VMware vCenter Settings.')\n target_node_2 = self.node_name('slave-05')\n self.fuel_web.vcenter_configure(cluster_id,\n target_node_2=target_node_2,\n multiclusters=True)\n\n self.show_step(10)\n self.fuel_web.deploy_cluster_wait(cluster_id)\n\n self.fuel_web.run_ostf(cluster_id=cluster_id, test_sets=['smoke'])\n\n # Create connection to openstack\n os_ip = self.fuel_web.get_public_vip(cluster_id)\n os_conn = os_actions.OpenStackActions(\n os_ip, SERVTEST_USERNAME,\n SERVTEST_PASSWORD,\n SERVTEST_TENANT)\n\n # Get default security group\n _s_groups = os_conn.neutron.list_security_groups()['security_groups']\n _srv_tenant = os_conn.get_tenant(SERVTEST_TENANT).id\n default_sg = [sg for sg in _s_groups\n if sg['tenant_id'] == _srv_tenant and\n sg['name'] == 'default'][0]\n\n self.show_step(11)\n network = os_conn.nova.networks.find(label=self.inter_net_name)\n openstack.create_instances(\n os_conn=os_conn,\n nics=[{'net-id': network.id}],\n vm_count=1,\n security_groups=[default_sg['name']])\n openstack.verify_instance_state(os_conn)\n\n self.show_step(12)\n volume_vcenter = openstack.create_volume(os_conn, 'vcenter-cinder')\n volume_nova = openstack.create_volume(os_conn, 'nova')\n instances = os_conn.nova.servers.list()\n _az = 'OS-EXT-AZ:availability_zone'\n instance_vcenter = [inst for inst in instances\n if inst.to_dict()[_az] == 'vcenter'][0]\n instance_nova = [inst for inst in instances\n if inst.to_dict()[_az] == 'nova'][0]\n\n self.show_step(13)\n os_conn.attach_volume(volume_vcenter, instance_vcenter)\n os_conn.attach_volume(volume_nova, instance_nova)\n\n self.show_step(14)\n assert_true(os_conn.cinder.volumes.get(volume_nova.id).status ==\n 'in-use')\n\n assert_true(os_conn.cinder.volumes.get(volume_vcenter.id).status ==\n 'in-use')",
"def allowed_topologytemplate_access_create(user):\n return user.has_perm(\"vnswww.add_topologytemplate\")",
"def volume(nodes, graph):\n ###TODO\n pass",
"def min_system_resources(node):\n\n min_sys_res = True\n\n # CPUs\n if \"layout\" in node[\"cpu\"]:\n total_cpus = len(node[\"cpu\"][\"layout\"])\n if total_cpus < 2:\n print(\n \"\\nThere is only {} CPU(s) available on this system. \"\n \"This is not enough to run VPP.\".format(total_cpus)\n )\n min_sys_res = False\n\n # System Memory\n if (\n \"free\" in node[\"hugepages\"]\n and \"memfree\" in node[\"hugepages\"]\n and \"size\" in node[\"hugepages\"]\n ):\n free = node[\"hugepages\"][\"free\"]\n memfree = float(node[\"hugepages\"][\"memfree\"].split(\" \")[0])\n hugesize = float(node[\"hugepages\"][\"size\"].split(\" \")[0])\n\n memhugepages = MIN_TOTAL_HUGE_PAGES * hugesize\n percentmemhugepages = (memhugepages / memfree) * 100\n if free is \"0\" and percentmemhugepages > MAX_PERCENT_FOR_HUGE_PAGES:\n print(\n \"\\nThe System has only {} of free memory. You will not \"\n \"be able to allocate enough Huge Pages for VPP.\".format(\n int(memfree)\n )\n )\n min_sys_res = False\n\n return min_sys_res",
"def test_volumes_get(self):\n pass",
"def allowed_flex_volumes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AllowedFlexVolumeArgs']]]]:\n return pulumi.get(self, \"allowed_flex_volumes\")",
"def setupVolumeNodeViewLayout(self):\n layoutNodes = slicer.mrmlScene.GetNodesByClass('vtkMRMLLayoutNode')\n layoutNodes.SetReferenceCount(layoutNodes.GetReferenceCount()-1)\n layoutNodes.InitTraversal()\n layoutNode = layoutNodes.GetNextItemAsObject()\n layoutNode.SetViewArrangement(slicer.vtkMRMLLayoutNode.SlicerLayoutTwoOverTwoView)",
"def test_create_hyperflex_cluster_network_policy(self):\n pass",
"def deploy_ocp(self, log_cli_level='DEBUG'):\n super(AWSIPI, self).deploy_ocp(log_cli_level)\n if not self.ocs_operator_deployment:\n volume_size = int(\n config.ENV_DATA.get('device_size', defaults.DEVICE_SIZE)\n )\n self.add_volume(volume_size)",
"def is_sys(self):\n for lv in getattr(self, 'logical_volumes', []):\n if lv.is_sys():\n return True\n return False",
"def test_create_hyperflex_cluster_storage_policy(self):\n pass",
"def list_volumes(self):\n print '# Listing existing volumes'\n self.compute.list_volumes()",
"def dvs_vcenter_security(self):\n # constants\n wait_to_update_rules_on_dvs_ports = 30\n\n self.show_step(1)\n self.env.revert_snapshot(\"dvs_vcenter_systest_setup\")\n\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n os_ip = self.fuel_web.get_public_vip(cluster_id)\n os_conn = os_actions.OpenStackActions(\n os_ip, SERVTEST_USERNAME,\n SERVTEST_PASSWORD,\n SERVTEST_TENANT)\n\n tenant = os_conn.get_tenant(SERVTEST_TENANT)\n\n self.show_step(2)\n net_1 = os_conn.create_network(\n network_name=self.net_data[0].keys()[0],\n tenant_id=tenant.id)['network']\n\n subnet = os_conn.create_subnet(subnet_name=net_1['name'],\n network_id=net_1['id'],\n cidr=self.net_data[0]['net_1'],\n ip_version=4)\n\n logger.info(\"Check that network is created.\")\n assert_true(os_conn.get_network(net_1['name'])['id'] == net_1['id'])\n\n logger.info(\"Add net_1 to default router\")\n router = os_conn.get_router(os_conn.get_network(self.ext_net_name))\n os_conn.add_router_interface(router_id=router[\"id\"],\n subnet_id=subnet[\"id\"])\n\n self.show_step(3)\n openstack.create_instances(os_conn=os_conn,\n nics=[{'net-id': net_1['id']}],\n vm_count=1)\n openstack.verify_instance_state(os_conn)\n\n self.show_step(4)\n net_1 = os_conn.nova.networks.find(label=self.inter_net_name)\n openstack.create_instances(os_conn=os_conn,\n nics=[{'net-id': net_1.id}],\n vm_count=1)\n openstack.verify_instance_state(os_conn)\n\n # Remove default security group\n srv_list = os_conn.get_servers()\n for srv in srv_list:\n srv.remove_security_group(srv.security_groups[0]['name'])\n os_conn.goodbye_security()\n\n self.show_step(5)\n sg1 = os_conn.nova.security_groups.create('SG1', \"descr\")\n self.show_step(6)\n self.tcp[\"security_group_rule\"][\"security_group_id\"] = sg1.id\n os_conn.neutron.create_security_group_rule(self.tcp)\n\n self.show_step(7)\n sg2 = os_conn.nova.security_groups.create('SG2', \"descr\")\n self.show_step(8)\n self.icmp[\"security_group_rule\"][\"security_group_id\"] = sg2.id\n os_conn.neutron.create_security_group_rule(self.icmp)\n\n logger.info(\"Attach SG_1 and SG2 to instances\")\n for srv in srv_list:\n srv.add_security_group(sg1.id)\n srv.add_security_group(sg2.id)\n\n fip = openstack.create_and_assign_floating_ips(os_conn, srv_list)\n\n self.show_step(9)\n ip_pair = dict.fromkeys(fip)\n for key in ip_pair:\n ip_pair[key] = [value for value in fip if key != value]\n openstack.check_connection_vms(ip_pair)\n self.show_step(10)\n openstack.check_connection_vms(ip_pair, command='ssh')\n\n self.show_step(11)\n _sg_rules = os_conn.neutron.list_security_group_rules()[\n 'security_group_rules']\n sg_rules = [sg_rule for sg_rule in _sg_rules\n if sg_rule['security_group_id'] in [sg1.id, sg2.id]]\n for rule in sg_rules:\n os_conn.neutron.delete_security_group_rule(rule['id'])\n\n time.sleep(wait_to_update_rules_on_dvs_ports)\n\n self.show_step(12)\n for ip in fip:\n try:\n openstack.get_ssh_connection(\n ip, self.instance_creds[0], self.instance_creds[1])\n except Exception as e:\n logger.info('{}'.format(e))\n\n self.show_step(13)\n self.tcp[\"security_group_rule\"][\"security_group_id\"] = sg2.id\n os_conn.neutron.create_security_group_rule(self.tcp)\n\n self.tcp[\"security_group_rule\"][\"direction\"] = \"egress\"\n os_conn.neutron.create_security_group_rule(self.tcp)\n\n self.show_step(14)\n openstack.check_connection_vms(\n ip_pair, command='ssh', timeout=wait_to_update_rules_on_dvs_ports)\n\n self.show_step(15)\n openstack.check_connection_vms(ip_pair, result_of_command=1)\n\n self.show_step(16)\n self.icmp[\"security_group_rule\"][\"security_group_id\"] = sg1.id\n os_conn.neutron.create_security_group_rule(self.icmp)\n\n self.icmp[\"security_group_rule\"][\"direction\"] = \"egress\"\n os_conn.neutron.create_security_group_rule(self.icmp)\n\n self.show_step(17)\n openstack.check_connection_vms(\n ip_pair, timeout=wait_to_update_rules_on_dvs_ports)\n\n self.show_step(18)\n self.show_step(19)\n self.show_step(20)\n\n self.show_step(21)\n srv_list = os_conn.get_servers()\n for srv in srv_list:\n for sg in srv.security_groups:\n srv.remove_security_group(sg['name'])\n\n self.show_step(22)\n for srv in srv_list:\n srv.add_security_group('default')\n\n self.show_step(23)\n openstack.check_connection_vms(\n ip_pair, timeout=wait_to_update_rules_on_dvs_ports)\n\n self.show_step(24)\n openstack.check_connection_vms(ip_pair, command='ssh')",
"def test_get_node_partitions(self):\n pass",
"def deploy_group(group_name):\n group=get_entity_by_cond(Group,'group_name==\"%s\"'%group_name)\n nodes=session.query(Node).filter('group_name==\"%s\"'%group_name)\n\n machines_filename = os.path.join(\n spl.config.paths['headnode-config'],\n str(group.network_id),\n 'machines.txt',\n )\n\n machines_dirname = os.path.dirname(machines_filename)\n if not os.path.isdir(machines_dirname):\n os.mkdir(machines_dirname)\n\n with open(machines_filename, 'w') as f:\n for node in nodes:\n f.write((\"%s %s\\n\"%(node.mac_addr,node.manage_ip)))\n\n\n switches=[]\n ports='';\n for node in nodes:\n switches.append(node.port.switch_id)\n ports+=str(node.port.port_no)+','\n #Check all the nodes in the group are connected to the same switch\n switch_id=check_same_non_empty_list(switches)\n if switch_id==False:\n # TODO: raise an exception\n print \"error: ports not in same switch\"\n return\n\n switch=get_entity_by_cond(Switch,'switch_id==%d'%switch_id)\n\n import cisco_snmp\n switch_drivers = {'cisco_snmp.py':cisco_snmp}\n driver = switch_drivers[switch.script]\n\n\n\n\n print group.network_id\n driver.make_remove_vlans(str(group.network_id),True)\n print 'ports'+ports\n driver.edit_ports_on_vlan(ports,str(group.network_id),True)\n\n os.system(('../vm-vlan up %s %s' % (group.network_id,group.vm_name)))\n\n group.deployed = True",
"def test_create_hyperflex_node_config_policy(self):\n pass",
"def allowed_flex_volumes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AllowedFlexVolumePatchArgs']]]]:\n return pulumi.get(self, \"allowed_flex_volumes\")",
"def allow_virtual_network_access(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_virtual_network_access\")",
"def allow_virtual_network_access(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_virtual_network_access\")",
"def creation_validation(**_):\n\n for property_key in constants.VOLUME_REQUIRED_PROPERTIES:\n utils.validate_node_property(property_key, ctx.node.properties)\n\n volume_object = _get_volumes_from_id(utils.get_resource_id())\n\n if ctx.node.properties['use_external_resource'] and not volume_object:\n raise NonRecoverableError(\n 'External resource, but the supplied '\n 'EBS volume does not exist in the account.')\n\n if not ctx.node.properties['use_external_resource'] and volume_object:\n raise NonRecoverableError(\n 'Not external resource, but the supplied '\n 'EBS volume exists in the account.')",
"def _get_pvds(self):\n pvds = []\n for path in self.paths():\n if path.reqstate == ReqState.enabled and path.provider.name not in pvds:\n pvds += [path.provider.name]\n return pvds"
]
| [
"0.5891469",
"0.5813511",
"0.58032465",
"0.56135035",
"0.54997087",
"0.5374481",
"0.5219399",
"0.51760525",
"0.5159506",
"0.5103449",
"0.49977416",
"0.4946259",
"0.49090728",
"0.4903071",
"0.48948416",
"0.48678818",
"0.48345947",
"0.48270988",
"0.48091453",
"0.4797037",
"0.47909364",
"0.47896647",
"0.4788123",
"0.4784509",
"0.4778314",
"0.47700468",
"0.47460952",
"0.47460952",
"0.47341287",
"0.4709187"
]
| 0.5830229 | 1 |
DriverHash is a hash of the driver specification | def driver_hash(self) -> Optional[int]:
return pulumi.get(self, "driver_hash") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def calc_statistics_hash(self) -> bytes:\n return b\"somehash\"",
"def hash(self):\n hash_length = self.conf.get(\"hash_length\", 7)\n if hash_length != 0:\n return self.spec.dag_hash(length=hash_length)\n return None",
"def hash(self) -> str:\n return pulumi.get(self, \"hash\")",
"def get_hash(self):\r\n return",
"def get_hash(self, params):\n return self.sha",
"def hash(self) -> bytes:",
"def hash(self):\n return self.block_header.hash",
"def get_hash(self):\n return self.__hash",
"def _build_driver_dict(self):\n self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, False)",
"def hash(self):\n return self._hash",
"def get_hash(self):\n return freeze_dict(self.get_hash_params())",
"def hash(self):\n return Hash.dhash(bytes(self))",
"def get_hash(self) -> str:\n return self.__hash.hexdigest()",
"def calc_statistics_hash(self) -> bytes:\n raise NotImplementedError()",
"def hash_key(self):",
"def hash(self) -> str:\r\n ...",
"def get_hash(self):\r\n block_data = self.prev_hash\r\n block_data += bytearray(struct.pack(\"!f\", self.time))\r\n block_data += self.user_id.encode()\r\n block_data += self.signature.encode()\r\n block_data += self.choice.encode()\r\n\r\n digest = hashes.Hash(hashes.SHA256())\r\n digest.update(block_data)\r\n return digest.finalize()",
"def current_hash(self):",
"def hash_string(self):\n return self._hash_string",
"def _calculate_hash(self) -> str:\n data_str = str(self.version) + str(self.index) + self.pre_hash + str(self.timestamp) + str(self.data)\n return sha256(data_str.encode('utf-8')).hexdigest()",
"def hash_algo(self) -> str:\n return self._hash_algo",
"def calc_info_hash(self):\n return \"infohash\"",
"def get_hash(self):\n return \"%03d_%03d_%03d\" % (self.chest_region, self.chest_type, self.feature_type)",
"def hash(self):\n return xxhash.xxh64(self._pwm_to_str(3)).hexdigest()",
"def _Hash(self):\n out = [self.key.string_id()]\n properties = self._PropList()\n for prop in properties:\n out.append(unicode(getattr(self, prop, '')))\n to_hash = ''.join(out)\n return hashlib.md5(to_hash.encode('utf-8')).hexdigest()",
"def get_info_hash(self):\n return self.info_hash",
"def get_hash(self) -> str:\n if self.call_hash:\n # Derived state from a call_node.\n return hash_struct([\"Handle\", self.fullname, \"call_hash\", self.key, self.call_hash])\n else:\n # Initial state.\n return hash_struct([\"Handle\", self.fullname, \"init\", self.key, self.args, self.kwargs])",
"def _Hash(self):\n fullhash = util.PrefixHash(self.key_bytes)\n return util.Base64WSEncode(fullhash[:constants.KEY_HASH_SIZE])",
"def distro_hash(self):\n return self._distro_hash",
"def get_hash_from_model(model):\n hyperparams = get_hyperparams_from_model(model)\n model_hash = hyperparams['hash']\n return model_hash"
]
| [
"0.61757976",
"0.6066933",
"0.60375184",
"0.5997764",
"0.594634",
"0.5918709",
"0.5837613",
"0.5741857",
"0.5730527",
"0.57251143",
"0.571917",
"0.56923187",
"0.56780314",
"0.5644666",
"0.56291103",
"0.5626307",
"0.55769527",
"0.55643994",
"0.5558305",
"0.55445856",
"0.55284196",
"0.5527374",
"0.5520205",
"0.54801124",
"0.54710567",
"0.5457799",
"0.5451174",
"0.54487157",
"0.54481775",
"0.54448426"
]
| 0.75255907 | 0 |
CSIUnitySpec defines the desired state of CSIUnity | def __init__(__self__, *,
driver: 'outputs.CSIUnitySpecDriver'):
pulumi.set(__self__, "driver", driver) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(__self__, *,\n common: 'outputs.CSIUnitySpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIUnitySpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIUnitySpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIUnitySpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIUnitySpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIUnitySpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def driver(self) -> 'outputs.CSIUnitySpecDriver':\n return pulumi.get(self, \"driver\")",
"def _dumbCSI(self):\n # get my renderer\n renderer = self.renderer\n # build the 3 bit color generator\n yield from renderer.set(name=\"csi3\", value=\"\")\n yield from renderer.set(name=\"csi8\", value=\"\")\n yield from renderer.set(name=\"csi24\", value=\"\")\n\n # all done\n return",
"def controller(self) -> Optional['outputs.CSIUnitySpecDriverController']:\n return pulumi.get(self, \"controller\")",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIUnitySpecDriverControllerEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIUnitySpecDriverControllerTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def common(self) -> 'outputs.CSIUnitySpecDriverCommon':\n return pulumi.get(self, \"common\")",
"def starts_with_unity(self):\n raise NotImplementedError",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIUnitySpecDriverSideCarsEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIUnitySpecDriverSideCarsTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIUnitySpecDriverCommonEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIUnitySpecDriverCommonTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def __init__(__self__, *,\n driver: 'outputs.CSIVXFlexOSSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def __init__(__self__, *,\n name: str,\n value: Optional[str] = None,\n value_from: Optional['outputs.CSIUnitySpecDriverControllerEnvsValueFrom'] = None):\n pulumi.set(__self__, \"name\", name)\n if value is not None:\n pulumi.set(__self__, \"value\", value)\n if value_from is not None:\n pulumi.set(__self__, \"value_from\", value_from)",
"def controller_status(self) -> Optional['outputs.CSIUnityStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"async def test_vocolinc_vp3_setup(hass):\n accessories = await setup_accessories_from_file(hass, \"vocolinc_vp3.json\")\n await setup_test_accessories(hass, accessories)\n\n await assert_devices_and_entities_created(\n hass,\n DeviceTestInfo(\n unique_id=HUB_TEST_ACCESSORY_ID,\n name=\"VOCOlinc-VP3-123456\",\n model=\"VP3\",\n manufacturer=\"VOCOlinc\",\n sw_version=\"1.101.2\",\n hw_version=\"1.0.3\",\n serial_number=\"EU0121203xxxxx07\",\n devices=[],\n entities=[\n EntityTestInfo(\n entity_id=\"switch.vocolinc_vp3_123456\",\n friendly_name=\"VOCOlinc-VP3-123456\",\n unique_id=\"homekit-EU0121203xxxxx07-48\",\n state=\"on\",\n ),\n EntityTestInfo(\n entity_id=\"sensor.vocolinc_vp3_123456_power\",\n friendly_name=\"VOCOlinc-VP3-123456 Power\",\n unique_id=\"homekit-EU0121203xxxxx07-aid:1-sid:48-cid:97\",\n unit_of_measurement=POWER_WATT,\n capabilities={\"state_class\": SensorStateClass.MEASUREMENT},\n state=\"0\",\n ),\n ],\n ),\n )",
"def __init__(__self__, *,\n name: str,\n value: Optional[str] = None,\n value_from: Optional['outputs.CSIUnitySpecDriverCommonEnvsValueFrom'] = None):\n pulumi.set(__self__, \"name\", name)\n if value is not None:\n pulumi.set(__self__, \"value\", value)\n if value_from is not None:\n pulumi.set(__self__, \"value_from\", value_from)",
"def __init__(__self__, *,\n common: 'outputs.CSIIsilonSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIIsilonSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIIsilonSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIIsilonSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIIsilonSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIIsilonSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def node(self) -> Optional['outputs.CSIUnitySpecDriverNode']:\n return pulumi.get(self, \"node\")",
"def __init__(__self__, *,\n name: str,\n value: Optional[str] = None,\n value_from: Optional['outputs.CSIUnitySpecDriverSideCarsEnvsValueFrom'] = None):\n pulumi.set(__self__, \"name\", name)\n if value is not None:\n pulumi.set(__self__, \"value\", value)\n if value_from is not None:\n pulumi.set(__self__, \"value_from\", value_from)",
"def uCSIsOpticalCharacterRecognition(code):\n ret = libxml2mod.xmlUCSIsOpticalCharacterRecognition(code)\n return ret",
"def __init__(__self__, *,\n common: 'outputs.CSIVXFlexOSSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIVXFlexOSSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIVXFlexOSSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def __init__(__self__, *,\n common: 'outputs.CSIPowerMaxSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIPowerMaxSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIPowerMaxSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIPowerMaxSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def __init__(__self__, *,\n controller_status: Optional['outputs.CSIUnityStatusControllerStatus'] = None,\n driver_hash: Optional[int] = None,\n last_update: Optional['outputs.CSIUnityStatusLastUpdate'] = None,\n node_status: Optional['outputs.CSIUnityStatusNodeStatus'] = None,\n state: Optional[str] = None):\n if controller_status is not None:\n pulumi.set(__self__, \"controller_status\", controller_status)\n if driver_hash is not None:\n pulumi.set(__self__, \"driver_hash\", driver_hash)\n if last_update is not None:\n pulumi.set(__self__, \"last_update\", last_update)\n if node_status is not None:\n pulumi.set(__self__, \"node_status\", node_status)\n if state is not None:\n pulumi.set(__self__, \"state\", state)",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIUnitySpecDriverNodeEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIUnitySpecDriverNodeTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def __init__(__self__, *,\n driver: 'outputs.CSIIsilonSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def snapshot_class(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverSnapshotClass']]:\n return pulumi.get(self, \"snapshot_class\")",
"def __init__(__self__, *,\n name: str,\n value: Optional[str] = None,\n value_from: Optional['outputs.CSIUnitySpecDriverNodeEnvsValueFrom'] = None):\n pulumi.set(__self__, \"name\", name)\n if value is not None:\n pulumi.set(__self__, \"value\", value)\n if value_from is not None:\n pulumi.set(__self__, \"value_from\", value_from)",
"def status(cls):\n return {'type': 'Emulated camera'}",
"def _ansiCSI(self):\n # get my renderer\n renderer = self.renderer\n # build the 3 bit color generator\n yield from renderer.setq(name=\"csi3\", value=f'\"$(esc)[$(1)m\"')\n yield from renderer.setq(name=\"csi8\", value=f'\"$(esc)[$(1);5;$(2)m\"')\n yield from renderer.setq(name=\"csi24\", value=f'\"$(esc)[$(1);2;$(2);$(3);$(4)m\"')\n\n # all done\n return",
"def _component_specs(self):\n specs = dict(pretransformed_input=self._input_spec)\n if self._transform_is_composite:\n specs['bijector'] = self.transform_or_spec\n return specs",
"def _component_specs(self):\n specs = dict(pretransformed_input=self._input_spec)\n if self._transform_is_composite:\n specs['transform_fn'] = self.transform_or_spec\n if self._also_track_spec is not None:\n specs['also_track'] = self._also_track_spec\n return specs",
"def vga_session(self):\n gpu_driver = None\n if self.user['gpu_driver'] is True:\n\n # NVIDIA controller - append packages\n if 'nvidia' in self.user['vga_controller'].lower():\n\n if self.user['gpu_proprietary'] is True:\n hardvideo = self.packages['hardvideo'][3]\n\n if self.user['kernel'] == 'linux':\n gpu_driver = self.packages['gpu_driver'][3]\n\n elif self.user['kernel'] == 'linux-lts':\n gpu_driver = self.packages['gpu_driver'][4]\n\n else:\n gpu_driver = self.packages['gpu_driver'][5]\n\n else:\n gpu_driver = self.packages['gpu_driver'][2]\n hardvideo = self.packages['hardvideo'][2]\n\n # AMD Controller - append packages\n elif ('ATI' in self.user['vga_controller']) or \\\n ('AMD' in self.user['vga_controller']):\n\n gpu_driver = self.packages['gpu_driver'][1]\n hardvideo = self.packages['hardvideo'][1]\n\n # Intel controller - append packages\n elif 'intel' in self.user['vga_controller'].lower():\n gpu_driver = self.packages['gpu_driver'][0]\n hardvideo = self.packages['hardvideo'][0]\n\n # Unreconized controller - append packages\n else:\n gpu_driver = self.packages['gpu_driver'][6]\n hardvideo = self.packages['hardvideo'][4]\n\n # Set model with corresponding driver\n self.user['gpu'] = {'model': self.user['vga_controller'],\n 'driver': gpu_driver,\n 'hardvideo': self.user['hardvideo']}\n\n # Set hardware video acceleration\n if self.user['hardvideo'] is True:\n self.user['gpu']['hardvideo'] = hardvideo"
]
| [
"0.6342253",
"0.58682024",
"0.54167765",
"0.53242034",
"0.5032858",
"0.4980972",
"0.49724",
"0.4922244",
"0.4900498",
"0.4853054",
"0.48514065",
"0.48020718",
"0.47460636",
"0.47310558",
"0.4716699",
"0.4690882",
"0.46598062",
"0.46575856",
"0.46110857",
"0.4596909",
"0.4586633",
"0.45555335",
"0.45228636",
"0.45215985",
"0.45061672",
"0.44892782",
"0.44856593",
"0.44539818",
"0.4449657",
"0.4438326"
]
| 0.64504963 | 0 |
Driver is the specification for the CSI Unity Driver | def driver(self) -> 'outputs.CSIUnitySpecDriver':
return pulumi.get(self, "driver") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(__self__, *,\n driver: 'outputs.CSIUnitySpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def driver(self) -> 'outputs.CSIVXFlexOSSpecDriver':\n return pulumi.get(self, \"driver\")",
"def __init__(__self__, *,\n driver: 'outputs.CSIVXFlexOSSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def __init__(__self__, *,\n common: 'outputs.CSIUnitySpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIUnitySpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIUnitySpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIUnitySpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIUnitySpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIUnitySpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def __init__(__self__, *,\n driver: 'outputs.CSIPowerStoreSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def __init__(__self__, *,\n driver: 'outputs.CSIPowerMaxSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def driver(self) -> 'outputs.CSIPowerMaxSpecDriver':\n return pulumi.get(self, \"driver\")",
"def driver(self) -> 'outputs.CSIIsilonSpecDriver':\n return pulumi.get(self, \"driver\")",
"def __init__(__self__, *,\n driver: 'outputs.CSIIsilonSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def vga_session(self):\n gpu_driver = None\n if self.user['gpu_driver'] is True:\n\n # NVIDIA controller - append packages\n if 'nvidia' in self.user['vga_controller'].lower():\n\n if self.user['gpu_proprietary'] is True:\n hardvideo = self.packages['hardvideo'][3]\n\n if self.user['kernel'] == 'linux':\n gpu_driver = self.packages['gpu_driver'][3]\n\n elif self.user['kernel'] == 'linux-lts':\n gpu_driver = self.packages['gpu_driver'][4]\n\n else:\n gpu_driver = self.packages['gpu_driver'][5]\n\n else:\n gpu_driver = self.packages['gpu_driver'][2]\n hardvideo = self.packages['hardvideo'][2]\n\n # AMD Controller - append packages\n elif ('ATI' in self.user['vga_controller']) or \\\n ('AMD' in self.user['vga_controller']):\n\n gpu_driver = self.packages['gpu_driver'][1]\n hardvideo = self.packages['hardvideo'][1]\n\n # Intel controller - append packages\n elif 'intel' in self.user['vga_controller'].lower():\n gpu_driver = self.packages['gpu_driver'][0]\n hardvideo = self.packages['hardvideo'][0]\n\n # Unreconized controller - append packages\n else:\n gpu_driver = self.packages['gpu_driver'][6]\n hardvideo = self.packages['hardvideo'][4]\n\n # Set model with corresponding driver\n self.user['gpu'] = {'model': self.user['vga_controller'],\n 'driver': gpu_driver,\n 'hardvideo': self.user['hardvideo']}\n\n # Set hardware video acceleration\n if self.user['hardvideo'] is True:\n self.user['gpu']['hardvideo'] = hardvideo",
"def driverName(self):\n return Cbytestring2Python(\n self.Rf2Scor.mVehicles[self.__playersDriverNum()].mDriverName)",
"def controller(self) -> Optional['outputs.CSIUnitySpecDriverController']:\n return pulumi.get(self, \"controller\")",
"def __init__(__self__, *,\n driver: pulumi.Input[str]):\n pulumi.set(__self__, \"driver\", driver)",
"def driver(self):\n driver = c_int()\n ckresult(_dll.FMOD_System_GetDriver(self._ptr, byref(driver)))\n return driver.value",
"def DRIVER():\n return \"podman\"",
"def disk_csi_driver(self) -> Optional[pulumi.Input['ManagedClusterStorageProfileDiskCSIDriverArgs']]:\n return pulumi.get(self, \"disk_csi_driver\")",
"def blob_csi_driver(self) -> Optional[pulumi.Input['ManagedClusterStorageProfileBlobCSIDriverArgs']]:\n return pulumi.get(self, \"blob_csi_driver\")",
"def common(self) -> 'outputs.CSIUnitySpecDriverCommon':\n return pulumi.get(self, \"common\")",
"def driver_version(self):\n data = fcntl.ioctl(self._fd, _EVIOCGVERSION, '\\x00\\x00\\x00\\x00')\n return struct.unpack(\"i\", data)[0]",
"def driver(self):\n \n return self.__driver",
"def get_driver_info(self, aaidee):\n name = create_string_buffer(256)\n guid = GUID()\n system_rate = c_int()\n speaker_mode = c_int()\n channels = c_int()\n ckresult(\n _dll.FMOD_System_GetDriverInfo(\n self._ptr,\n aaidee,\n name,\n 256,\n byref(guid),\n byref(system_rate),\n byref(speaker_mode),\n byref(channels),\n )\n )\n return so(\n name=name.value,\n guid=guid,\n system_rate=system_rate.value,\n speaker_mode=speaker_mode.value,\n speaker_mode_channels=channels.value,\n )",
"def __init__(__self__, *,\n common: 'outputs.CSIVXFlexOSSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIVXFlexOSSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIVXFlexOSSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def __init__(__self__, *,\n common: 'outputs.CSIPowerMaxSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIPowerMaxSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIPowerMaxSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIPowerMaxSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def controller(self) -> Optional['outputs.CSIVXFlexOSSpecDriverController']:\n return pulumi.get(self, \"controller\")",
"def __init__(self, driver):\n\n # Set the command handler attributes\n self.name = driver.id \n self.driver = driver",
"def driver(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"driver\")",
"def driver(self):\n return '<static-vmedia>'",
"def node(self) -> Optional['outputs.CSIUnitySpecDriverNode']:\n return pulumi.get(self, \"node\")",
"def file_csi_driver(self) -> Optional[pulumi.Input['ManagedClusterStorageProfileFileCSIDriverArgs']]:\n return pulumi.get(self, \"file_csi_driver\")",
"def __init__(__self__, *,\n driver: Optional[pulumi.Input[str]] = None):\n if driver is not None:\n pulumi.set(__self__, \"driver\", driver)"
]
| [
"0.77165973",
"0.6981358",
"0.68887204",
"0.6443961",
"0.6386162",
"0.63490826",
"0.6150103",
"0.60056",
"0.5816774",
"0.57003725",
"0.56489235",
"0.5635592",
"0.5631918",
"0.5620819",
"0.5620501",
"0.5566235",
"0.5545292",
"0.55352056",
"0.5488729",
"0.547208",
"0.54156655",
"0.5393467",
"0.5391813",
"0.53618085",
"0.5341825",
"0.5340554",
"0.53360265",
"0.53066313",
"0.52815455",
"0.524419"
]
| 0.78425765 | 0 |
ConfigVersion is the configuration version of the driver | def config_version(self) -> str:
return pulumi.get(self, "config_version") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def original_config_version(self):\n return self._get_param(\"ConfigVersion\")",
"def get_config_version(config):\n return 2 if is_v2_config(config) else 1",
"def configversion(self, args):\n print(CONFIG_VERSION)",
"def productVersion( self ):\n return Config.ProductVersion",
"def configure_for_version(version, config=config):\n if version == \"red\":\n attrs = configure_for_pokered(config)\n elif version == \"crystal\":\n attrs = configure_for_pokecrystal(config)\n else:\n # TODO: pick a better exception\n raise Exception(\n \"Can't configure for this version.\"\n )\n\n for (key, value) in attrs.iteritems():\n setattr(config, key, value)\n\n # not really needed since it's modifying the same object\n return config",
"def getDriverNameVersion(self):\n return self.driver_name, self.driver_version",
"def get_version(self):\n return self.cur_config['version']['name']",
"def get_version():\n return \".\".join([str(i) for i in config[\"version\"]])",
"def get_cbs_version(cluster_config):\n cluster = load_cluster_config_json(cluster_config)\n return cluster[\"environment\"][\"server_version\"]",
"def getCurrentVersion():\n f_version = configManagement.currentVersion()\n return f_version",
"def driver_version(self):\n data = fcntl.ioctl(self._fd, _EVIOCGVERSION, '\\x00\\x00\\x00\\x00')\n return struct.unpack(\"i\", data)[0]",
"def configure_driver(self, config):\n raise NotImplementedError",
"def _get_cfg_v(self):\n if CONFIG_VERSION_KEY in self[CONFIG_KEY]:\n v_str = self[CONFIG_KEY][CONFIG_VERSION_KEY]\n if not isinstance(v_str, str):\n raise InvalidConfigFileException(\"{} must be a string\".\n format(CONFIG_VERSION_KEY))\n v_bundle = v_str.split(\".\")\n assert len(v_bundle) == 3, \\\n InvalidConfigFileException(\"Version string is not tripartite\")\n try:\n v_bundle = list(map(int, v_bundle))\n except ValueError:\n raise InvalidConfigFileException(\"Version string elements are \"\n \"not coercible to integers\")\n if v_bundle[0] < 2:\n if SAMPLE_MODS_KEY in self[CONFIG_KEY]:\n raise InvalidConfigFileException(\n \"Project configuration file ({p}) subscribes to {c} \"\n \">= 2.0.0, since '{m}' section is defined. Set {c} to \"\n \"2.0.0 in your config\".format(p=self[CONFIG_FILE_KEY],\n c=CONFIG_VERSION_KEY,\n m=SAMPLE_MODS_KEY))\n else:\n self._format_cfg()\n return [\"2\", \"0\", \"0\"]\n return list(map(str, v_bundle))\n else:\n self._format_cfg()\n return [\"2\", \"0\", \"0\"]",
"def upgrade_config_format(self):\n # migrate older config files\n if self.version == 1:\n # capture_init()\n self.version = 3\n\n # If token exists check still valid and can login\n if self.token and self.token != DEFAULT_TOKEN:\n from .api import ping\n\n with suppress(Exception):\n self.username = ping(config=self, cli_login=True, verbose=False)\n\n self.save()\n elif self.version == 2:\n # re-init against new server\n # capture_init()\n self.version = 3\n self.save()",
"def config(self) -> 'outputs.CSIPowerMaxRevProxySpecConfig':\n return pulumi.get(self, \"config\")",
"def version(self):\n return 1",
"def get_version(configuration):\n return hashlib.md5(configuration.SerializeToString()).hexdigest()",
"def configure_driver(self, config: DriverConfig, number: str, platform_settings: dict) -> \"DriverPlatformInterface\":\n raise NotImplementedError",
"def version(self):\n pass",
"def version(self):\n pass",
"def version(self):\n pass",
"def get_sg_version(cluster_config):\n cluster = load_cluster_config_json(cluster_config)\n return cluster[\"environment\"][\"sync_gateway_version\"]",
"def kernel_version(self, kernel_version):\n\n self._kernel_version = kernel_version",
"def get_version(cls):\n if Config.ENV_TYPE == PRD:\n return Config.version + \"/\" + Config.build\n return Config.version + \"/\" + Config.build + \"/\" + Config.generate + ' (' + Config.ENV_NAME + ')'",
"def set_php_version(self, version: str) -> Session:\n data = {\n \"version\": version\n }\n\n return self.configure(data)",
"def version(self):\n report(f\"spd-conf {buildconfig.VERSION}\\n\")\n report(_(\"\"\"Copyright (C) %d-%d Brailcom, o.p.s.\nThis is free software; you can redistribute it and/or modify it\nunder the terms of the GNU General Public License as published by\nthe Free Software Foundation; either version 2, or (at your option)\nany later version. Please see COPYING for more details.\\n\\n\"\"\") % \\\n (2002, 2012))",
"def validate_project_version(config: Dict[str, Any]) -> None:\n spacy_version = config.get(\"spacy_version\", None)\n if spacy_version and not is_compatible_version(about.__version__, spacy_version):\n err = (\n f\"The {PROJECT_FILE} specifies a spaCy version range ({spacy_version}) \"\n f\"that's not compatible with the version of spaCy you're running \"\n f\"({about.__version__}). You can edit version requirement in the \"\n f\"{PROJECT_FILE} to load it, but the project may not run as expected.\"\n )\n msg.fail(err, exits=1)",
"def detect_version(conn):\n try:\n with conn.begin():\n db_version = conn.scalar(text(\n \"SELECT version FROM configuration\"))\n except exc.ProgrammingError:\n with conn.begin():\n packages_exists = bool(conn.scalar(text(\n \"SELECT 1 FROM pg_catalog.pg_tables \"\n \"WHERE schemaname = 'public' AND tablename = 'packages'\")))\n with conn.begin():\n statistics_exists = bool(conn.scalar(text(\n \"SELECT 1 FROM pg_catalog.pg_views \"\n \"WHERE schemaname = 'public' AND viewname = 'statistics'\")))\n with conn.begin():\n files_exists = bool(conn.scalar(text(\n \"SELECT 1 FROM pg_catalog.pg_tables \"\n \"WHERE schemaname = 'public' AND tablename = 'files'\")))\n if not packages_exists:\n # Database is uninitialized\n return None\n elif not files_exists:\n # Database is too ancient to upgrade\n raise RuntimeError(\"Database version older than 0.4; cannot upgrade\")\n elif not statistics_exists:\n return \"0.4\"\n else:\n return \"0.5\"\n else:\n return db_version",
"def provider_version(self):\n raise NotImplementedError",
"def get_version():\n return 1"
]
| [
"0.7029714",
"0.6827348",
"0.6811454",
"0.616742",
"0.61624855",
"0.590572",
"0.58411497",
"0.5696876",
"0.5662444",
"0.55812186",
"0.5544786",
"0.55088234",
"0.550328",
"0.540622",
"0.5396156",
"0.53930455",
"0.5383128",
"0.5330674",
"0.5284666",
"0.5284666",
"0.5284666",
"0.52755564",
"0.5253297",
"0.5234637",
"0.52309656",
"0.52085614",
"0.5166787",
"0.516165",
"0.51411116",
"0.5134631"
]
| 0.7043955 | 0 |
Replicas is the count of controllers for Controller plugin | def replicas(self) -> int:
return pulumi.get(self, "replicas") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_num_replicas():\n\n tf_replicator = get_tf_replicator()\n\n if tf_replicator:\n return tf_replicator.num_replicas_in_sync\n elif tf.distribute.has_strategy():\n return tf.distribute.get_strategy().num_replicas_in_sync\n else:\n # I'm assuming replicas and shards are always equal until someone tells me\n # different.\n num_replicas = tpu_function.get_tpu_context().number_of_shards\n if num_replicas:\n return num_replicas\n else:\n return 1",
"def n_replicas(self):\n if self._sampler_states is None:\n return 0\n else:\n return len(self._sampler_states)",
"def replicas(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"replicas\")",
"def num_replicas_per_shard(self) -> pulumi.Output[Optional[int]]:\n return pulumi.get(self, \"num_replicas_per_shard\")",
"def num_replicas_per_shard(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"num_replicas_per_shard\")",
"def num_replicas_per_shard(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"num_replicas_per_shard\")",
"def num_partitions(self): # -> int:\n ...",
"def num_partitions(self): # -> None:\n ...",
"def num_shards(self) -> int:\n return self.db_nodes",
"def get_num_servers():\n return 1",
"def test_redis_increase_replica_count_usual_case():",
"def num_partitions(self): # -> Unknown:\n ...",
"def initialize_replicas(self):\n try:\n self.replicas+1\n except:\n print \"Ensemble MD Toolkit Error: Number of replicas must be \\\n defined for pattern ReplicaExchange!\"\n raise\n\n\n replicas = []\n N = self.replicas\n for k in range(N):\n r = ReplicaP(k)\n replicas.append(r)\n\n return replicas",
"def num_servos(self) -> int:\n return self._num_servos",
"def auto_config(self, num_replicas=1):\n _ = num_replicas\n return {}",
"def num_slaves(self) -> int:\n raise NotImplementedError",
"def get_sg_replicas(cluster_config):\n cluster = load_cluster_config_json(cluster_config)\n return cluster[\"environment\"][\"number_replicas\"]",
"def get_num_instances(self):\n return len( self.get_instances_ids() )",
"def max_replicas(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"max_replicas\")",
"def max_replicas(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"max_replicas\")",
"def bigcouch_quorum_count():\n return (3 if not hasattr(settings, 'BIGCOUCH_QUORUM_COUNT')\n else settings.BIGCOUCH_QUORUM_COUNT)",
"def get_control_count(cmd):\n return len(cmd.control_qubits)",
"def num_masters(self) -> int:\n raise NotImplementedError",
"def nclients(self, r):\r\n return len(self.clients(r))",
"def count(cls, client) :\n\t\ttry :\n\t\t\tobj = rewriteaction()\n\t\t\toption_ = options()\n\t\t\toption_.count = True\n\t\t\tresponse = obj.get_resources(client, option_)\n\t\t\tif response :\n\t\t\t\treturn response[0].__dict__['___count']\n\t\t\treturn 0\n\t\texcept Exception as e :\n\t\t\traise e",
"def client_count(request):\n return request.param",
"def retrieve_num_instances(service):\n instance_counts = service[\"instance-counts\"]\n return instance_counts[\"healthy-instances\"] + instance_counts[\"unhealthy-instances\"]",
"def num_shards(self) -> int:\n return pulumi.get(self, \"num_shards\")",
"def ready_replicas(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"ready_replicas\")",
"def min_replica_count(self) -> Optional[int]:\n return pulumi.get(self, \"min_replica_count\")"
]
| [
"0.67781794",
"0.65245944",
"0.6388949",
"0.63295215",
"0.62453663",
"0.62453663",
"0.62261033",
"0.608981",
"0.60434526",
"0.60335743",
"0.60149395",
"0.6006961",
"0.58982265",
"0.5868115",
"0.58531505",
"0.58276963",
"0.5775041",
"0.56192803",
"0.56107664",
"0.56107664",
"0.5587167",
"0.5579596",
"0.55423456",
"0.5542045",
"0.5535965",
"0.55330443",
"0.5510386",
"0.5474942",
"0.54729205",
"0.5467612"
]
| 0.7079593 | 1 |
AuthSecret is the name of the credentials secret for the driver | def auth_secret(self) -> Optional[str]:
return pulumi.get(self, "auth_secret") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def client_secret(self) -> str:",
"def secret(self):\n return self._secret",
"def secret(self) -> str:\n return pulumi.get(self, \"secret\")",
"def secret(self) -> str:\n return pulumi.get(self, \"secret\")",
"def pull_secret(self):\n return self._pull_secret",
"def pull_secret(self):\n return self._pull_secret",
"def __init__(self, auth_key, auth_secret):\n\n self._auth_key = auth_key\n self._auth_secret = auth_secret",
"def authenticate_password(self, secret=\"\"):\r\n #hexstr = binascii.b2a_hex(secret)\r\n self.sendAndRecv(\"AUTHENTICATE \\\"%s\\\"\\r\\n\"%secret)",
"def secret_key(self, val):\n self.__secret_key = val",
"def __init__(__self__, *,\n auth_type: pulumi.Input[str],\n name: Optional[pulumi.Input[str]] = None,\n secret: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"auth_type\", 'secret')\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if secret is not None:\n pulumi.set(__self__, \"secret\", secret)",
"def auth_password(self, auth_password):\n\n self._auth_password = auth_password",
"def password(self):\n return (self._config.get(\"sasl.password\")\n or self._config.get(\"sasl.oauthbearer.client.secret\"))",
"def get_secret(name):\n config = ConfigParser()\n config.read('/srv/oclubs/secrets.ini')\n return config.get('secrets', name)",
"def _v2_auth(self, url):\n return {\"auth\": {\n \"passwordCredentials\": {\"username\": self.user,\n \"password\": self.secret}}}",
"def aws_credentials_secret_name(self) -> Optional[str]:\n return pulumi.get(self, \"aws_credentials_secret_name\")",
"def get_secret(self, secret_name):\n secret = self._sm.access_secret_version(name=secret_name).payload.data.decode()\n try:\n return json.loads(secret)\n except json.decoder.JSONDecodeError:\n return secret",
"async def read_secret(self, name: str):\n pass",
"def get_client_secret():\n\n return str(get_account().Get(GOA_ACCOUNT_OAUTH2, 'ClientSecret',\n dbus_interface=PROPERTIES))",
"def secret(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"secret\")",
"def secret(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"secret\")",
"def client_secret(self) -> str:\n return self.get_env_var(self.client_secret_var)",
"def client_secret(self) -> str:\n return self.get_env_var(self.client_secret_var)",
"def set_auth_credentials():\n import os\n from passlib.apps import custom_app_context as pwd_context\n\n os.environ[\"AUTH_USERNAME\"] = \"testme\"\n os.environ[\"AUTH_PASSWORD\"] = pwd_context.hash(\"foobar\")",
"def secret(self):\n\n return self.info.get('env', {}).get('APP_SECRET')",
"def _wrap_secret(self, val):\n return {\"SecretString\": val}",
"def secret_name(self) -> str:\n return self._secret_name",
"def secret_name(self, secret_name: str):\n\n self._secret_name = secret_name",
"def getSecret(self):\n\n with open(self._secret_file) as f:\n secret=f.readline().rstrip()\n \n return secret",
"def get_sd_auth(val, sd_auth_pillar_name=\"serverdensity\"):\n sd_pillar = __pillar__.get(sd_auth_pillar_name)\n log.debug(\"Server Density Pillar: %s\", sd_pillar)\n if not sd_pillar:\n log.error(\"Could not load %s pillar\", sd_auth_pillar_name)\n raise CommandExecutionError(\n \"{} pillar is required for authentication\".format(sd_auth_pillar_name)\n )\n\n try:\n return sd_pillar[val]\n except KeyError:\n log.error(\"Could not find value %s in pillar\", val)\n raise CommandExecutionError(\"{} value was not found in pillar\".format(val))",
"def test_secrets() -> Secrets:\n from dotenv import load_dotenv\n from os import getenv\n from pathlib import Path\n env_path = Path('.') / '.env.testing'\n load_dotenv(dotenv_path=env_path)\n return Secrets(\n google_id_token=getenv(\"GOOGLE_ID_TOKEN\"),\n google_user_id=getenv(\"GOOGLE_USER_ID\")\n )"
]
| [
"0.6322999",
"0.6228721",
"0.6138825",
"0.6138825",
"0.61205673",
"0.61205673",
"0.6077916",
"0.6013005",
"0.59475607",
"0.59419864",
"0.58220655",
"0.57813203",
"0.5767015",
"0.5762318",
"0.57585716",
"0.5713077",
"0.5689188",
"0.5688631",
"0.56837416",
"0.56837416",
"0.56768715",
"0.56768715",
"0.565918",
"0.5657281",
"0.5654369",
"0.5644473",
"0.56276417",
"0.56110996",
"0.55986995",
"0.55930156"
]
| 0.7147245 | 1 |
SideCars is the specification for CSI sidecar containers | def side_cars(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverSideCars']]:
return pulumi.get(self, "side_cars") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def side_cars(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverSideCars']]:\n return pulumi.get(self, \"side_cars\")",
"def side_cars(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCars']]:\n return pulumi.get(self, \"side_cars\")",
"def side_cars(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverSideCars']]:\n return pulumi.get(self, \"side_cars\")",
"def side_cars(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCars']]:\n return pulumi.get(self, \"side_cars\")",
"def presenetCar():",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCarsEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCarsTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def populate_sidecar(self, **kwargs):\n\n # if it's an ecat it's Siemens\n self.sidecar_template['Manufacturer'] = 'Siemens'\n # Siemens model best guess\n self.sidecar_template['ManufacturersModelName'] = self.ecat_header.get('SERIAL_NUMBER', None)\n self.sidecar_template['TracerRadionuclide'] = self.ecat_header.get('ISOTOPE_NAME', None)\n self.sidecar_template['PharmaceuticalName'] = self.ecat_header.get('RADIOPHARAMCEUTICAL', None)\n\n # collect frame time start and populate various subheader fields\n for subheader in self.subheaders:\n self.sidecar_template['DecayCorrectionFactor'].append(subheader.get('DECAY_CORR_FCTR', None))\n self.sidecar_template['FrameTimesStart'].append(subheader.get('FRAME_START_TIME', None))\n self.sidecar_template['FrameDuration'].append(subheader.get('FRAME_DURATION', None))\n self.sidecar_template['ScaleFactor'].append(subheader.get('SCALE_FACTOR', None))\n\n # note some of these values won't be in the subheaders for the standard matrix image\n # need to make sure to clean up arrays and fields filled w/ none during pruning\n self.sidecar_template['ScatterFraction'].append(subheader.get('SCATTER_FRACTION', None))\n self.sidecar_template['PromptRate'].append(subheader.get('PROMPT_RATE', None))\n self.sidecar_template['RandomRate'].append(subheader.get('RANDOM_RATE', None))\n self.sidecar_template['SinglesRate'].append(subheader.get('SINGLES_RATE', None))\n\n # collect possible reconstruction method from subheader\n recon_method = helper_functions.get_recon_method(self.subheaders[0].get('ANNOTATION'))\n if recon_method:\n self.sidecar_template.update(**recon_method)\n\n # collect and convert start times for acquisition/time zero?\n scan_start_time = self.ecat_header.get('SCAN_START_TIME', None)\n\n if scan_start_time:\n scan_start_time = parse_this_date(scan_start_time)\n self.sidecar_template['AcquisitionTime'] = scan_start_time\n self.sidecar_template['ScanStart'] = scan_start_time\n\n # collect dose start time\n dose_start_time = self.ecat_header.get('DOSE_START_TIME', None)\n if dose_start_time:\n parsed_dose_time = parse_this_date(dose_start_time)\n self.sidecar_template['PharmaceuticalDoseTime'] = parsed_dose_time\n\n # if decay correction exists mark decay correction boolean as true\n if len(self.decay_factors) > 0:\n self.sidecar_template['ImageDecayCorrected'] = \"true\"\n\n # calculate scaling factor\n sca = self.data.max() / 32767\n\n self.sidecar_template['DoseCalibrationFactor'] = sca * self.ecat_header.get('ECAT_CALIBRATION_FACTOR')\n self.sidecar_template['Filename'] = os.path.basename(self.nifti_file)\n self.sidecar_template['ImageSize'] = [\n self.subheaders[0]['X_DIMENSION'],\n self.subheaders[0]['Y_DIMENSION'],\n self.subheaders[0]['Z_DIMENSION'],\n self.ecat_header['NUM_FRAMES']\n ]\n\n self.sidecar_template['PixelDimensions'] = [\n self.subheaders[0]['X_PIXEL_SIZE'] * 10,\n self.subheaders[0]['Y_PIXEL_SIZE'] * 10,\n self.subheaders[0]['Z_PIXEL_SIZE'] * 10\n ]\n\n # add tag for conversion software\n self.sidecar_template['ConversionSoftware'] = 'pypet2bids'\n self.sidecar_template['ConversionSoftwareVersion'] = helper_functions.get_version()\n\n\n\n # include any additional values\n if kwargs:\n self.sidecar_template.update(**kwargs)\n\n if not self.sidecar_template.get('TimeZero', None):\n if not self.sidecar_template.get('AcquisitionTime', None):\n logger.warn(f\"Unable to determine TimeZero for {self.ecat_file}, you need will need to provide this\"\n f\" for a valid BIDS sidecar.\")\n else:\n self.sidecar_template['TimeZero'] = self.sidecar_template['AcquisitionTime']\n\n # lastly infer radio data if we have it\n meta_radio_inputs = dcm2niix4pet.check_meta_radio_inputs(self.sidecar_template)\n self.sidecar_template.update(**meta_radio_inputs)\n\n # clear any nulls from json sidecar and replace with none's\n self.sidecar_template = helper_functions.replace_nones(self.sidecar_template)",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIUnitySpecDriverSideCarsEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIUnitySpecDriverSideCarsTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIIsilonSpecDriverSideCarsEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIIsilonSpecDriverSideCarsTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIPowerStoreSpecDriverSideCarsEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIPowerStoreSpecDriverSideCarsTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def show_sidecar(self, output_path=None):\n self.prune_sidecar()\n self.sidecar_template = helper_functions.replace_nones(self.sidecar_template)\n if output_path:\n if not isinstance(output_path, pathlib.Path):\n output_path = pathlib.Path(output_path)\n\n if len(output_path.suffixes) > 1:\n temp_output_path = str(output_path)\n for suffix in output_path.suffixes:\n temp_output_path = re.sub(suffix, '', temp_output_path)\n output_path = pathlib.Path(temp_output_path).with_suffix('.json')\n\n with open(output_path, 'w') as outfile:\n json.dump(helper_functions.replace_nones(self.sidecar_template), outfile, indent=4)\n else:\n print(json.dumps(helper_functions.replace_nones(self.sidecar_template), indent=4))",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCarsEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCarsTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def _dumbCSI(self):\n # get my renderer\n renderer = self.renderer\n # build the 3 bit color generator\n yield from renderer.set(name=\"csi3\", value=\"\")\n yield from renderer.set(name=\"csi8\", value=\"\")\n yield from renderer.set(name=\"csi24\", value=\"\")\n\n # all done\n return",
"def sidecar_conn():\n \n #Making the sidecar connection\n global _sidecar_\n if not _sidecar_:\n _sidecar_ = client.Client(\n username = getattr(settings, \"SC_USERNAME\"),\n password = getattr(settings, \"SC_PASSWORD\"),\n auth_url = getattr(settings, \"SC_AUTH_URL\"),\n region_name = getattr(settings, \"SC_REGION_NAME\"),\n tenant_name = getattr(settings, \"SC_TENANT_NAME\"),\n timeout = getattr(settings, \"SC_TIMEOUT\"),\n insecure = getattr(settings, \"SC_INSECURE\"))\n return _sidecar_",
"def __init__(__self__, *,\n common: 'outputs.CSIUnitySpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIUnitySpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIUnitySpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIUnitySpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIUnitySpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIUnitySpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def envs(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCarsEnvs']]:\n return pulumi.get(self, \"envs\")",
"def ls():\n # TODO: listing all availabe containers form sequence\n return",
"def volumes(self):",
"def __init__(__self__, *,\n common: 'outputs.CSIVXFlexOSSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIVXFlexOSSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIVXFlexOSSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def envs(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverSideCarsEnvs']]:\n return pulumi.get(self, \"envs\")",
"def test_show_container(self):\n pass",
"def envs(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverSideCarsEnvs']]:\n return pulumi.get(self, \"envs\")",
"def __init__(self, sides):\n self.sides = sides",
"def dcos_aws() -> None:",
"def side_chain_representatives(self):\n\n sc_atoms = []\n for residue_id in self._residue_ids:\n sc_atom = self._side_chain_representative(residue_id)\n sc_atoms.append([residue_id, sc_atom])\n sc_atoms = pd.DataFrame(sc_atoms, columns=[\"residue.id\", \"sc.atom\"])\n\n # Add vectors\n sc_atom_vectors = []\n for sc_atom in sc_atoms[\"sc.atom\"]:\n try:\n sc_atom_vectors.append(sc_atom.get_vector())\n except AttributeError:\n sc_atom_vectors.append(None)\n sc_atoms[\"sc.vector\"] = sc_atom_vectors\n\n return sc_atoms.astype({\"residue.id\": \"Int32\"})",
"def plot_IVS(self, parent_figure=None):\n nivs = len(FD.figure_AllIVs)\n cprint(\"c\", \"plot_IVS.\")\n rows = nivs\n cols = 5\n height = 1.5 * nivs\n width = 8.5\n PD = PData()\n ymin = -125.0\n ymax = 40.0\n calx = 120.0\n\n self.P = PH.regular_grid(\n rows,\n cols,\n order=\"rowsfirst\",\n figsize=(width, height),\n showgrid=False,\n verticalspacing=0.01,\n horizontalspacing=0.05,\n margins={\n \"bottommargin\": 0.1,\n \"leftmargin\": 0.07,\n \"rightmargin\": 0.05,\n \"topmargin\": 0.08,\n },\n labelposition=(-0.05, 1.06),\n parent_figure=parent_figure,\n # panel_labels=['A', 'B', 'C', 'D', 'E', 'F'],\n )\n cellpath = config[\"cellDataDirectory\"]\n png_path = Path(config[\"baseDataDirectory\"], config[\"pngDirectory\"])\n cprint(\"c\", \"prepping fo run\")\n\n for rax, iv in enumerate(FD.figure_AllIVs.keys()):\n cprint(\"r\", f\"Doing Cell VCN_c{iv:02d} -----------------------------------\")\n celln = Path(png_path, f\"VCN_c{iv:02d}.png\")\n if celln.is_file(): # add images from png files\n img = mpimg.imread(str(celln))\n self.P.axarr[rax, 0].imshow(img, aspect=\"equal\")\n ylim = self.P.axarr[rax, 0].get_ylim()\n self.P.axarr[rax, 0].set_xlim(900, 1500)\n PH.noaxes(self.P.axarr[rax, 0])\n # plot 3 dendrite decorations\n for iax, dendmode in enumerate([\"passive\", \"normal\", \"active\"]):\n dendm = self.get_dendmode(dendmode)\n sfi = Path(\n cellpath,\n f\"VCN_c{iv:02d}\",\n \"Simulations\",\n \"IV\",\n FD.figure_AllIVs[iv][dendm],\n )\n if not sfi.is_dir():\n cprint(\"r\", f\"Unable to find dir: {str(sfi):s}\")\n continue\n fn = list(sfi.glob(\"*\"))\n sfi = Path(sfi, fn[0])\n if rax > 0:\n calx = None # only one cal bar on this plot, top row.\n self.parent.PLT.plot_traces(\n self.P.axarr[rax, iax + 1],\n sfi,\n PD,\n protocol=\"IV\",\n ymin=ymin,\n ymax=ymax,\n iax=iax,\n figure=self.P.figure_handle,\n ivaxis=self.P.axarr[rax, 4], # accumulate IV's in right side\n ivcolor=colors[iax],\n iv_spike_color=spike_colors[dendmode],\n spike_marker_size=1.5,\n spike_marker_color=spike_colors[dendmode],\n calx=calx,\n caly=-10.0,\n )\n if rax == 0:\n self.P.axarr[rax, iax + 1].set_title(dendmode)\n if iax == 0:\n self.P.axarr[rax, 0].text(-0.1, 0.5, str(iv))\n if parent_figure is None:\n fig = FigInfo()\n fig.P = self.P\n fig.filename = f\"Fig_M1A_Supplemental.pdf\"\n timestamp_str = datetime.datetime.now().strftime(\"%Y-%m-%d-%H:%M\")\n fig.title[\n \"title\"\n ] = f\"SBEM Project Figure 1 Modeling (Supplemental A) ({timestamp_str:s})\"\n return fig\n else:\n return self.P",
"def test_vs_docking():\n vs = virtualscreening(n_cpu=-1)\n vs.load_ligands('sdf', os.path.join(test_data_dir, 'data/dude/xiap/crystal_ligand.sdf'))\n vs.dock(engine='autodock_vina',\n protein=os.path.join(test_data_dir, 'data/dude/xiap/receptor_rdkit.pdb'),\n auto_ligand=os.path.join(test_data_dir, 'data/dude/xiap/crystal_ligand.sdf'),\n exhaustiveness=1,\n seed=0)\n mols = list(vs.fetch())\n assert_equal(len(mols), 3)\n mol_data = mols[0].data\n assert_in('vina_affinity', mol_data)\n assert_in('vina_rmsd_lb', mol_data)\n assert_in('vina_rmsd_ub', mol_data)",
"def __init__(__self__, *,\n common: 'outputs.CSIIsilonSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIIsilonSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIIsilonSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIIsilonSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIIsilonSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIIsilonSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def envs(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverSideCarsEnvs']]:\n return pulumi.get(self, \"envs\")",
"def _GetOptionDecoderConstructions(cls):\n result = super(ContainerSpec, cls)._GetOptionDecoderConstructions()\n result.update({\n 'image': (option_decoders.StringDecoder, {\n 'default': None\n }),\n 'static_image': (option_decoders.BooleanDecoder, {\n 'default': False\n }),\n 'cpus': (option_decoders.FloatDecoder, {\n 'default': None\n }),\n 'memory': (custom_virtual_machine_spec.MemoryDecoder, {\n 'default': None\n }),\n 'command': (_CommandDecoder, {}),\n 'container_port': (option_decoders.IntDecoder, {\n 'default': 8080\n }),\n })\n return result"
]
| [
"0.7122332",
"0.7079939",
"0.69095534",
"0.67289656",
"0.55598956",
"0.5531892",
"0.5531867",
"0.5453014",
"0.5329077",
"0.5292361",
"0.52911955",
"0.51534843",
"0.5113812",
"0.50935465",
"0.48243824",
"0.4818962",
"0.4794707",
"0.46911767",
"0.46801996",
"0.4657909",
"0.4617334",
"0.4609073",
"0.45621413",
"0.45178345",
"0.44949907",
"0.44781542",
"0.44719204",
"0.44677553",
"0.44625506",
"0.4459933"
]
| 0.70821893 | 1 |
TLSCertSecret is the name of the TLS Cert secret | def tls_cert_secret(self) -> Optional[str]:
return pulumi.get(self, "tls_cert_secret") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_httpstls_secret():\n if 'tls_secret' in DEFINES:\n return DEFINES['tls_secret']\n # The default matches oic-auth-apps flucd manifest defaults\n return DEFAULT_HTTPSTLS_SECRET",
"def get_certificate_from_secret(secret_name, secret_ns):\n kube = kubernetes.KubeOperator()\n secret = kube.kube_get_secret(secret_name, secret_ns)\n\n if not hasattr(secret, 'data'):\n raise Exception('Invalid secret %s\\\\%s' % (secret_ns, secret_name))\n\n data = secret.data\n if 'tls.crt' not in data or 'tls.key' not in data:\n raise Exception('Invalid certificate data from secret %s\\\\%s' %\n (secret_ns, secret_name))\n\n try:\n tls_crt = base64.decode_as_text(data['tls.crt'])\n tls_key = base64.decode_as_text(data['tls.key'])\n except TypeError:\n raise Exception('Certificate secret data is invalid %s\\\\%s' %\n (secret_ns, secret_name))\n\n return tls_crt, tls_key",
"def secret(self) -> str:\n return pulumi.get(self, \"secret\")",
"def secret(self) -> str:\n return pulumi.get(self, \"secret\")",
"def trust_handshake_secret(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"trust_handshake_secret\")",
"def trust_handshake_secret(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"trust_handshake_secret\")",
"def client_secret(self) -> str:",
"def get_client_key_secret(self):\r\n course = self.get_course()\r\n for lti_passport in course.lti_passports:\r\n try:\r\n lti_id, key, secret = [i.strip() for i in lti_passport.split(':')]\r\n except ValueError:\r\n _ = self.runtime.service(self, \"i18n\").ugettext\r\n msg = _('Could not parse LTI passport: {lti_passport}. Should be \"id:key:secret\" string.').format(\r\n lti_passport='{0!r}'.format(lti_passport)\r\n )\r\n raise LTIError(msg)\r\n\r\n if lti_id == self.lti_id.strip():\r\n return key, secret\r\n return '', ''",
"def trust_handshake_secret(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"trust_handshake_secret\")",
"def get_secret(setting, secrets=secrets):\n return secrets[setting]",
"def get_secret_key():\n return get_config_handler().get_secret_key()",
"def get_ssl_certificate():",
"def secret(self):\n return self._secret",
"def secret_key(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"secret_key\")",
"def secret_key(self, val):\n self.__secret_key = val",
"def test_secrets() -> Secrets:\n from dotenv import load_dotenv\n from os import getenv\n from pathlib import Path\n env_path = Path('.') / '.env.testing'\n load_dotenv(dotenv_path=env_path)\n return Secrets(\n google_id_token=getenv(\"GOOGLE_ID_TOKEN\"),\n google_user_id=getenv(\"GOOGLE_USER_ID\")\n )",
"def ssl_cipher(self) -> str:\n return pulumi.get(self, \"ssl_cipher\")",
"def get_client_secret(filename):\n with open(filename) as file:\n json_file = json.load(file)\n\n cyphertext = json_file['CiphertextBlob']\n blob = base64.b64decode(cyphertext)\n client = boto3.client('kms')\n secret = client.decrypt(CiphertextBlob=blob)['Plaintext']\n s = secret.decode('ascii')\n return json.loads(s)",
"def get_ssl_certificate() :",
"async def add_secret(app: Sanic, secret: str, passphrase: str, ttl: Optional[int]) -> str:\n\n key = get_fernet_key(app, passphrase)\n\n sign = hmac.digest(key=key, msg=passphrase.encode(), digest='sha512').hex()\n secret_key = secrets.token_hex(16)\n\n cipher = fernet.Fernet(key)\n encrypted = cipher.encrypt(secret.encode()).decode()\n\n expires = None\n if ttl:\n expires = datetime.utcnow() + timedelta(seconds=ttl)\n\n await app.db.secrets.insert_one({\n 'secret': encrypted,\n 'secret_key': secret_key,\n 'signature': sign,\n 'expires': expires, # for mongo index\n 'ttl': ttl, # for fernet check\n })\n\n return secret_key",
"def _wrap_secret(self, val):\n return {\"SecretString\": val}",
"def test_get_secret_from_vault(key, environment, stage, namespace, table, nkey,\n boto3_resource, boto3_client, monkeypatch):\n # Call to the DynamoDB client to retrieve the encrypted secret\n monkeypatch.setattr(\"boto3.resource\", boto3_resource)\n monkeypatch.setattr(\"boto3.client\", boto3_client)\n secret = lambdautils.utils.get_secret(key,\n namespace=namespace,\n environment=environment,\n stage=stage)\n assert secret == \"dummy\"\n boto3_client(\"dynamodb\").get_item.assert_called_with(\n TableName=table,\n Key={\"id\": {\"S\": nkey}})\n\n # Call to the KMS client to decrypt the secret\n boto3_client('kms').decrypt.assert_called_with(CiphertextBlob=\"encrypted\")",
"def get_secret(setting, secrets=secrets):\n try:\n return secrets[setting]\n except KeyError:\n raise Exception(\"Can't find the key in secrets.json. Make sure the file is properly configured\")",
"def google_kms_encrypted_env_secret(secret_key: str) -> str:\n\n key_id = os.getenv('KEY_ID')\n ciphertext = os.getenv(secret_key)\n\n client = kms.KeyManagementServiceClient()\n # Call the API.\n decrypt_response = client.decrypt(\n request={'name': key_id, 'ciphertext': base64.b64decode(ciphertext)})\n return decrypt_response.plaintext.decode()",
"def get_key_secret():\n \n config = configparser.ConfigParser()\n config.read('dl.cfg')\n KEY = config['AWS']['AWS_ACCESS_KEY_ID']\n SECRET = config['AWS']['AWS_SECRET_ACCESS_KEY']\n return KEY, SECRET",
"def test_read_namespaced_secret_list_secrets(self):\n pass",
"def secret_key(self):\n return self._secret_key",
"def secret_key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"secret_key\")",
"def secret_key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"secret_key\")",
"def secret(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"secret\")"
]
| [
"0.6617524",
"0.5983317",
"0.58468336",
"0.58468336",
"0.5796945",
"0.57450837",
"0.5744361",
"0.55957747",
"0.548282",
"0.5444516",
"0.54431623",
"0.54390496",
"0.5414196",
"0.5360056",
"0.535964",
"0.53580487",
"0.5354628",
"0.53463376",
"0.5343948",
"0.5324922",
"0.53217083",
"0.5317211",
"0.53159493",
"0.5297392",
"0.52632535",
"0.52619237",
"0.5243628",
"0.5217051",
"0.5217051",
"0.5216614"
]
| 0.73764014 | 0 |
ImagePullPolicy is the image pull policy for the image | def image_pull_policy(self) -> Optional[str]:
return pulumi.get(self, "image_pull_policy") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _determine_image_pull_policy(self) -> ImagePullPolicy:\n if not self.image_pull_policy:\n _, tag = self._get_image_and_tag()\n if tag == \"latest\" or not tag:\n return ImagePullPolicy.ALWAYS\n return ImagePullPolicy.IF_NOT_PRESENT\n return self.image_pull_policy",
"def image_pull_policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"image_pull_policy\")",
"def _should_pull_image(self, docker_client: \"DockerClient\") -> bool:\n image_pull_policy = self._determine_image_pull_policy()\n\n if image_pull_policy is ImagePullPolicy.ALWAYS:\n return True\n elif image_pull_policy is ImagePullPolicy.NEVER:\n return False\n elif image_pull_policy is ImagePullPolicy.IF_NOT_PRESENT:\n try:\n # NOTE: images.get() wants the tag included with the image\n # name, while images.pull() wants them split.\n docker_client.images.get(self.image)\n except docker.errors.ImageNotFound:\n self.logger.debug(f\"Could not find Docker image locally: {self.image}\")\n return True\n return False",
"def pull(self, action, image_name, **kwargs):\n config_id = action.config_id\n registry, __, image = config_id.config_name.rpartition('/')\n login_kwargs = {}\n if _check_insecure_registry(kwargs):\n login_kwargs['insecure_registry'] = True\n if registry and '.' in registry and registry not in self._login_registries:\n self.login(action, registry, **login_kwargs)\n log.info(\"Pulling image %s:%s.\", config_id.config_name, config_id.instance_name)\n res = action.client.pull(repository=config_id.config_name, tag=config_id.instance_name, **kwargs)\n log.debug(\"Done pulling image %s:%s.\", config_id.config_name, config_id.instance_name)\n self._policy.images[action.client_name].refresh_repo(config_id.config_name)\n log.debug(\"Refreshed image cache for repo %s.\", config_id.config_name)\n return res",
"def pull(self, data):\n required = {'token', 'source'}\n api.validate(data, required)\n token = data['token']\n repo = data['source']\n self.credentials_module.authorize(token)\n result = self.docker_module.pull_image(repo)\n # credentials_module.add_image(token, result['image_id'])\n return result",
"def _load(self, resource_handler):\n log.info('[%s] Loading Docker image origin=%r image=%r tag=%r',\n resource_handler.name, self.origin, self.image, self.tag)\n if self.origin == 'dockerhub':\n resource_handler.cli.pull(repository=self.image, tag=self.tag)\n elif self.origin == 'local':\n pass\n else:\n resource_handler.cli.import_image_from_url(\n url=self.origin,\n repository=self.image,\n tag=self.tag\n )",
"def pull_image(image):\n\n subprocess.check_call(['docker', 'pull', image])",
"def _pull_image(self, docker_client: \"DockerClient\"):\n image, tag = self._get_image_and_tag()\n\n return docker_client.images.pull(image, tag)",
"def fetch_image(self, image: Image):\n self.log.info(\"Fetching image %s\", image)\n for line in image.repository.registry.client.pull(str(image.repository), image.tag, stream=True, decode=True):\n self.log.debug(line)",
"def test_pull_logic(self):\n # Assume the image is already recently pulled\n record = self.good_record()\n tag = record['tag'][0]\n basepr = {\n 'system': record['system'],\n 'itype': record['itype'],\n 'tag': tag,\n 'remotetype': 'dockerv2',\n }\n id = self.images.insert(record)\n self.assertIsNotNone(id)\n session = self.m.new_session(self.auth, self.system)\n pr = basepr.copy()\n rec = self.m.pull(session, pr) # ,delay=False)\n self.assertEqual(rec['status'], 'READY')\n\n # reset and test a re-pull of an old image\n self.images.remove({})\n record['last_pull'] = record['last_pull'] - 36000\n id = self.images.insert(record)\n self.assertIsNotNone(id)\n session = self.m.new_session(self.auth, self.system)\n rec = self.m.pull(session, pr) # ,delay=False)\n self.assertEqual(rec['status'], 'INIT')\n\n # Re-pull of new image with ACL change\n self.images.remove({})\n pr = basepr.copy()\n id = self.images.insert(record)\n self.assertIsNotNone(id)\n pr['userACL'] = [1001]\n session = self.m.new_session(self.auth, self.system)\n rec = self.m.pull(session, pr) # ,delay=False)\n self.assertEqual(rec['status'], 'INIT')\n\n # reset and test a re-pull of an old image\n self.images.remove({})\n pr = basepr.copy()\n record['last_pull'] = record['last_pull'] - 36000\n id = self.images.insert(record)\n self.assertIsNotNone(id)\n session = self.m.new_session(self.auth, self.system)\n rec = self.m.pull(session, pr) # ,delay=False)\n self.assertEqual(rec['status'], 'INIT')\n # Now let's do a re-pull with ACL change. We should\n # get back the prev rec. The status will now be\n # pending because we do an update status\n pr['userACL'] = [1001]\n session = self.m.new_session(self.auth, self.system)\n rec2 = self.m.pull(session, pr) # ,delay=False)\n self.assertEqual(rec2['_id'], rec['_id'])\n # TODO: Need to find a way to trigger this test now.\n # self.assertEquals(rec2['status'], 'PENDING')",
"def pull_image(self, tag):\n image_name = self.build_image_name(tag)\n image = self.client.images.pull(image_name)\n return image",
"def pull_image_version(self, app, image_name, image_tag, parent_task, fail_silently=False):\n start_time = datetime.datetime.now().replace(microsecond=0)\n\n assert isinstance(image_name, str)\n assert isinstance(image_tag, str)\n\n # The string \"local\" has a special meaning which means the most recent\n # local image of that name, so we skip the remote call/check.\n if image_tag == \"local\":\n if fail_silently:\n return None\n else:\n raise ImagePullFailure(\n \"Cannot pull a local image\",\n remote_name=None,\n image_tag=image_tag\n )\n\n # Check if the image already exists locally\n # This is an optimization to save a trip to the registry: 1-2 sec per image\n if image_tag != \"latest\":\n try:\n self.host.images.image_version(image_name, image_tag)\n return None\n except ImageNotFoundException:\n # The image will be pulled from the registry\n pass\n\n registry = self.get_registry(app)\n\n # See if the registry is willing to give us a URL (it's logged in)\n if registry:\n registry_url = registry.url(self.host)\n else:\n registry_url = None\n if registry_url is None:\n if fail_silently:\n return None\n else:\n raise ImagePullFailure(\n \"No registry configured\",\n remote_name=None,\n image_tag=image_tag\n )\n\n task = Task(\n \"Pulling remote image {}:{}\".format(image_name, image_tag),\n parent=parent_task,\n progress_formatter=lambda x: \"{} MB\".format(x // (1024 ** 2)),\n )\n\n remote_name = \"{registry_url}/{image_name}\".format(\n registry_url=registry_url,\n image_name=image_name,\n )\n\n stream = self._pull(app, task, remote_name, image_tag)\n\n layer_status = {}\n current = None\n total = None\n for json_line in convert_to_json_stream(stream):\n if 'error' in json_line:\n task.finish(status=\"Failed\", status_flavor=Task.FLAVOR_WARNING)\n if fail_silently:\n return\n else:\n raise ImagePullFailure(\n json_line['error'],\n remote_name=remote_name,\n image_tag=image_tag\n )\n elif 'id' in json_line:\n if json_line['status'].lower() == \"downloading\":\n layer_status[json_line['id']] = json_line['progressDetail']\n\n elif \"complete\" in json_line['status'].lower() and json_line['id'] in layer_status:\n layer_status[json_line['id']]['current'] = layer_status[json_line['id']]['total']\n\n if layer_status:\n statuses = [x for x in layer_status.values()\n if \"current\" in x and \"total\" in x]\n current = sum(x['current'] for x in statuses)\n total = sum(x['total'] for x in statuses)\n\n if total is not None:\n task.update(progress=(current, total))\n\n end_time = datetime.datetime.now().replace(microsecond=0)\n time_delta_str = str(end_time - start_time)\n if time_delta_str.startswith('0:'):\n time_delta_str = time_delta_str[2:]\n task.finish(status='Done [{}]'.format(time_delta_str), status_flavor=Task.FLAVOR_GOOD)\n\n # Tag the remote image as the right name\n self._tag_image(remote_name, image_tag, image_name, image_tag, fail_silently)\n self._tag_image(remote_name, image_tag, image_name, \"latest\", fail_silently)",
"def docker_pull(args, image): # type: (EnvironmentConfig, str) -> None\n if '@' not in image and ':' not in image:\n display.info('Skipping pull of image without tag or digest: %s' % image, verbosity=2)\n return\n\n if docker_image_exists(args, image):\n display.info('Skipping pull of existing image: %s' % image, verbosity=2)\n return\n\n for _iteration in range(1, 10):\n try:\n docker_command(args, ['pull', image], capture=False)\n return\n except SubprocessError:\n display.warning('Failed to pull docker image \"%s\". Waiting a few seconds before trying again.' % image)\n time.sleep(3)\n\n raise ApplicationError('Failed to pull docker image \"%s\".' % image)",
"def pull_image(self):\n status = []\n for key, container in self.containers.items():\n result = container.pull()\n status.append(result)\n return status",
"def singularity_pull(self, image):\n Client.pull(image)",
"def pull_policy(self, target, policy=DEFAULT_POLICY_FILE):\n self.pull_file(policy, target)",
"def cli(ctx, image_file):\n if not image_file:\n return\n for pull_image in image_file.readline():\n pull_image = pull_image.rstrip('\\n')\n if len(pull_image) == 0:\n continue\n docker.pull(pull_image)\n push_image = '%s/%s/%s' % (DEFAULT_REGISTRY,\n DEFAULR_NAMESPACE,\n pull_image.split('/')[-1])\n docker.tag(pull_image, push_image)\n docker.push(push_image)",
"def image_pull_secret(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"image_pull_secret\")",
"def _update_image(name):\n\n if not name or name == 'None':\n return None, None\n\n old = docker.image_digest(name)\n for _ in range(3):\n new = docker.pull(name)\n if new:\n break\n time.sleep(5)\n\n return name, old != new",
"def pull(self, repo, tag):\n check_blacklist(repo)\n logger.info(\"Pulling Docker image {}:{}\".format(repo, tag))\n with SimpleFlock(self.FLOCKFILE, timeout=1200):\n stream = self.client.pull(repo, tag=tag, stream=True, insecure_registry=True)\n log_output(stream)",
"def pull_image(image):\n docker_client = _docker_client()\n response = docker_client.pull(image)\n lines = [line for line in response.splitlines() if line]\n\n # The last line of the response contains the overall result of the pull\n # operation.\n pull_result = json.loads(lines[-1])\n if \"error\" in pull_result:\n raise Exception(\"Could not pull {}: {}\".format(\n image, pull_result[\"error\"]))",
"def registration_image_pull_spec(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"registration_image_pull_spec\")",
"def work_image_pull_spec(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"work_image_pull_spec\")",
"def test_pull(self, mock_docker_environment, snapshot, capsys):\n mock_client = mock_docker_environment\n pull_image(TEST_IMAGE_NAME)\n mock_client.api.pull.assert_called_with(\n TEST_IMAGE_NAME, \"latest\", stream=True, decode=True\n )\n out, err = capsys.readouterr()\n snapshot.assert_match(out)",
"def get_matching_image_from_repository(self, repo: Repository, tag: str) -> Optional[Image]:\n\n # those are default values, and can be candidate on a per-image case.\n distribution = self.distribution\n openstack_release = self.openstack_release\n source_tag_tpl = self.source_tag_tpl\n release_tags_tpl = self.release_tags_tpl\n registries = [r for r in self.registries.values() if not r.source]\n\n override = {}\n # apply any per-image overrides\n for candidate in self.image_overrides:\n if \"image_matcher\" in candidate:\n matcher = candidate[\"image_matcher\"]\n if not re.match(matcher, repo.name):\n continue\n if \"tag_matcher\" in candidate:\n matcher = candidate[\"tag_matcher\"]\n if not re.match(matcher, tag):\n continue\n #self.log.warning(\"Overrides for %s:%s found.\", repo, tag)\n override = candidate\n break\n\n if \"source_tag\" in override:\n source_tag_tpl = override['source_tag']\n\n if \"distribution\" in override:\n distribution = override[\"distribution\"]\n\n if \"registries\" in override:\n registry_names = override[\"registries\"]\n registries = [r for r in self.registries.values() if r.name in registry_names]\n\n if \"release_tags\" in override:\n release_tags_tpl = override[\"release_tags\"]\n\n # image overrides explicitly disabled processing.\n if not registries:\n self.log.info(\"Image %s:%s disabled.\", repo, tag)\n return\n\n source_tag_context = {\n \"openstack_release_lhs\": openstack_release + \"-\",\n \"openstack_release_rhs\": \"-\" + openstack_release,\n \"distribution_lhs\": distribution + \"-\",\n \"distribution_rhs\": \"-\" + distribution,\n \"release\": self.release,\n \"build_no\": self.build_no,\n }\n\n release_tag_context = {\n \"openstack_release_lhs\": openstack_release + \"-\",\n \"openstack_release_rhs\": \"-\" + openstack_release,\n \"distribution_lhs\": distribution + \"-\",\n \"distribution_rhs\": \"-\" + distribution,\n \"release\": self.get_release(),\n \"build_no\": self.get_build_no(),\n }\n\n def render_tag(template, context):\n template = jinja2.Template(template)\n return template.render(context)\n\n source_tag = render_tag(source_tag_tpl, source_tag_context)\n release_tags = [render_tag(release_tag, release_tag_context) for release_tag in release_tags_tpl]\n\n # This method is called for every tag in repository, but we want to process only images that\n # match requested release, build_no and openstack version.\n if source_tag == tag:\n image = Image(repository=repo, tag=source_tag, release_tags=release_tags, push_registries=registries)\n return image\n else:\n return None",
"def pull(release):\n image = f\"breqwatr/rsyslog:{release}\"\n ecr.pull(image)",
"def pull_base_images(ctx, dockerfile, signed_pull=True):\n images = set()\n stages = set()\n\n with open(dockerfile, \"r\") as f:\n for line in f:\n words = line.split()\n # Get source images\n if len(words) < 2 or words[0].lower() != \"from\":\n continue\n images.add(words[1])\n # Get stage names to remove them from pull\n if len(words) < 4 or words[2].lower() != \"as\":\n continue\n stages.add(words[3])\n\n if stages:\n print(\"Ignoring intermediate stage names: {}\".format(\", \".join(stages)))\n images -= stages\n\n print(\"Pulling following base images: {}\".format(\", \".join(images)))\n\n pull_env = {}\n if signed_pull:\n pull_env[\"DOCKER_CONTENT_TRUST\"] = \"1\"\n\n for i in images:\n ctx.run(\"docker pull {}\".format(i), env=pull_env)",
"def __init__(__self__, *,\n config: 'outputs.CSIPowerMaxRevProxySpecConfig',\n image: str,\n tls_secret: str,\n image_pull_policy: Optional[str] = None):\n pulumi.set(__self__, \"config\", config)\n pulumi.set(__self__, \"image\", image)\n pulumi.set(__self__, \"tls_secret\", tls_secret)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)",
"def force_pull(self):\n return self._force_pull",
"def pull_image_then_retag(docker_client=None, repository=None, tag='latest', retag_repository=None):\n try:\n Logger.debug(f\"ready to pull the image {repository}:{tag}\")\n image = docker_client.images.pull(repository, tag)\n Logger.info(f\"pull the image {image.attrs['RepoTags'][0]} completed,ready to re-tag.\")\n return image.tag(retag_repository, tag)\n except docker.errors.APIError as error:\n Logger.error(error)\n return False"
]
| [
"0.8125533",
"0.7769158",
"0.68589365",
"0.62248176",
"0.59526604",
"0.5950211",
"0.5819258",
"0.5774122",
"0.56765014",
"0.5624425",
"0.56165045",
"0.5529986",
"0.54840046",
"0.5459022",
"0.54292417",
"0.54169023",
"0.5408534",
"0.53826547",
"0.53813297",
"0.5363186",
"0.5355253",
"0.5279573",
"0.5239943",
"0.5175288",
"0.5172295",
"0.5152659",
"0.5136572",
"0.510526",
"0.5096656",
"0.50666785"
]
| 0.7893667 | 1 |
AllowVolumeExpansion is a boolean flag which indicates if volumes can be expanded | def allow_volume_expansion(self) -> Optional[bool]:
return pulumi.get(self, "allow_volume_expansion") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def expand_volume(self, vol, new_size):\n self.authenticate_user()\n volume_name = self._get_vipr_volume_name(vol)\n size_in_bytes = vipr_utils.to_bytes(str(new_size) + \"G\")\n\n try:\n self.volume_obj.expand(\n self.configuration.vipr_tenant +\n \"/\" +\n self.configuration.vipr_project +\n \"/\" +\n volume_name,\n size_in_bytes,\n True)\n except vipr_utils.SOSError as e:\n if e.err_code == vipr_utils.SOSError.SOS_FAILURE_ERR:\n raise vipr_utils.SOSError(\n vipr_utils.SOSError.SOS_FAILURE_ERR,\n \"Volume \" + volume_name + \": expand failed\\n\" + e.err_text)\n else:\n with excutils.save_and_reraise_exception():\n LOG.exception(_(\"Volume : %s expand failed\") % volume_name)",
"def allowed_flex_volumes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AllowedFlexVolumeArgs']]]]:\n return pulumi.get(self, \"allowed_flex_volumes\")",
"def expand_volume_helper(self, vol, size_in_gb, existing_vol_size):\n vol_id = vol['volumeId']\n try:\n if size_in_gb < existing_vol_size:\n self.show_error_exit(msg='Current volume size {0} GB is '\n 'greater than {1} GB specified.'.\n format(existing_vol_size, size_in_gb))\n elif size_in_gb > existing_vol_size:\n if 'rdfGroupId' in vol:\n array_id = self.module.params['serial_no']\n array_details = self.common.get_array(array_id=array_id)\n if utils.parse_version(array_details['ucode'])\\\n < utils.parse_version(self.foxtail_version):\n msg = (\"Expansion of SRDF protected volume is\"\n \" supported from v5978.444.444 onward. Please\"\n \" upgrade the array for this support.\")\n self.show_error_exit(msg=msg)\n return self.srdf_volume_expansion(vol, size_in_gb,\n existing_vol_size)\n return self.expand_volume(vol_id, size_in_gb,\n existing_vol_size)\n\n LOG.info('Current volume size and specified volume size'\n ' are equal')\n return False\n except Exception as e:\n error_message = 'Expand volume %s failed with error: %s' \\\n % (vol_id, str(e))\n self.show_error_exit(msg=error_message)",
"def allowed_flex_volumes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AllowedFlexVolumePatchArgs']]]]:\n return pulumi.get(self, \"allowed_flex_volumes\")",
"def assign_volume_letters():\n remove_volume_letters()\n\n # Write script\n script = []\n for vol in get_volumes():\n script.append('select volume {}'.format(vol['Number']))\n script.append('assign')\n\n # Run\n run_diskpart(script)",
"def test_volume_extend(self, volume, volumes_steps):\n volumes_steps.extend_volume(volume.name)",
"async def expand(self, job, id, options):\n pool = await self.middleware.call('pool.get_instance', id)\n if osc.IS_LINUX:\n if options.get('passphrase'):\n raise CallError('Passphrase should not be supplied for this platform.')\n # FIXME: We have issues in ZoL where when pool is created with partition uuids, we are unable\n # to expand pool where all pool related options error out saying I/O error\n # https://github.com/zfsonlinux/zfs/issues/9830\n raise CallError('Expand is not supported on this platform yet because of underlying ZFS issues.')\n else:\n if pool['encrypt']:\n if not pool['is_decrypted']:\n raise CallError('You can only expand decrypted pool')\n\n for error in (\n await self.middleware.call('pool.pool_lock_pre_check', pool, options['geli']['passphrase'])\n ).errors:\n raise CallError(error.errmsg)\n\n all_partitions = {p['name']: p for p in await self.middleware.call('disk.list_all_partitions')}\n\n try:\n if osc.IS_FREEBSD:\n sysctl.filter('kern.geom.debugflags')[0].value = 16\n geli_resize = []\n try:\n for vdev in sum(pool['topology'].values(), []):\n if vdev['type'] != 'DISK':\n logger.debug('Not expanding vdev of type %r', vdev['type'])\n continue\n\n if vdev['status'] != 'ONLINE':\n logger.debug('Not expanding vdev that is %r', vdev['status'])\n continue\n\n part_data = all_partitions.get(vdev['device'])\n if not part_data:\n logger.debug('Unable to find partition data for %s', vdev['device'])\n\n partition_number = part_data['partition_number']\n if not partition_number:\n logger.debug('Could not parse partition number from %r', vdev['device'])\n continue\n\n assert part_data['disk'] == vdev['disk']\n\n if osc.IS_LINUX:\n await run(\n 'sgdisk', '-d', str(partition_number), '-n', f'{partition_number}:0:0',\n '-c', '2:', '-u', f'{partition_number}:{part_data[\"partition_uuid\"]}',\n '-t', f'{partition_number}:BF01', part_data['path']\n )\n await run('partprobe', os.path.join('/dev', part_data['disk']))\n else:\n await run('camcontrol', 'reprobe', vdev['disk'])\n await run('gpart', 'recover', vdev['disk'])\n await run('gpart', 'resize', '-i', str(partition_number), vdev['disk'])\n\n if osc.IS_FREEBSD and pool['encrypt']:\n geli_resize_cmd = (\n 'geli', 'resize', '-s', str(part_data['size']), vdev['device']\n )\n rollback_cmd = (\n 'gpart', 'resize', '-i', str(partition_number), '-s', str(part_data['size']), vdev['disk']\n )\n\n logger.warning('It will be obligatory to notify GELI that the provider has been resized: %r',\n join_commandline(geli_resize_cmd))\n logger.warning('Or to resize provider back: %r',\n join_commandline(rollback_cmd))\n geli_resize.append((geli_resize_cmd, rollback_cmd))\n finally:\n if osc.IS_FREEBSD and geli_resize:\n await self.__geli_resize(pool, geli_resize, options)\n finally:\n if osc.IS_FREEBSD:\n sysctl.filter('kern.geom.debugflags')[0].value = 0\n\n for vdev in sum(pool['topology'].values(), []):\n if vdev['type'] != 'DISK' or vdev['status'] != 'ONLINE':\n continue\n\n await self.middleware.call('zfs.pool.online', pool['name'], vdev['guid'], True)",
"def test_extend_volume_noextend(self):\n ctxt = context.get_admin_context()\n extra_specs = {}\n type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)\n volume = {'id': '1', 'name': 'volume1',\n 'display_name': '',\n 'volume_type_id': type_ref['id'],\n 'size': 10,\n 'provider_id': 'volume10'}\n self.extended = {'name': '', 'size': '0',\n 'storageserver': ''}\n self.driver.extend_volume(volume, 10)\n expected = {'name': '', 'size': '0',\n 'storageserver': ''}\n self.assertDictMatch(expected, self.extended)",
"def enable_block_storage_management(self):\n self._request({\"enable-block-storage-management\": True})",
"def test_extend_volume(self):\n ctxt = context.get_admin_context()\n extra_specs = {}\n type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)\n volume = {'id': '1', 'name': 'volume1',\n 'display_name': '',\n 'volume_type_id': type_ref['id'],\n 'size': 10,\n 'provider_id': 'volume10'}\n self.extended = {'name': '', 'size': '0',\n 'storageserver': ''}\n self.driver.extend_volume(volume, 12)\n expected = {'name': 'volume10', 'size': '2',\n 'storageserver': 'stor1:gbd0,stor2:gbd0,'}\n self.assertDictMatch(expected, self.extended)",
"def allowed_volumes(context, requested_volumes, size):\n project_id = context.project_id\n context = context.elevated()\n size = int(size)\n requested_gigabytes = requested_volumes * size\n used_volumes, used_gigabytes = db.volume_data_get_for_project(context,\n project_id)\n quota = get_project_quotas(context, project_id)\n allowed_volumes = _get_request_allotment(requested_volumes, used_volumes,\n quota['volumes'])\n allowed_gigabytes = _get_request_allotment(requested_gigabytes,\n used_gigabytes,\n quota['gigabytes'])\n allowed_volumes = min(allowed_volumes,\n int(allowed_gigabytes // size))\n return min(requested_volumes, allowed_volumes)",
"def set_volume_options(cd):\n\n try:\n vol_name = cd[\"vol_name\"]\n auth_allow = cd[\"auth_allow\"]\n auth_reject = cd[\"auth_reject\"]\n if \"nfs_disable\" in cd:\n nfs_disable = cd[\"nfs_disable\"]\n else:\n nfs_disable = False\n if \"enable_worm\" in cd:\n enable_worm = cd[\"enable_worm\"]\n else:\n enable_worm = False\n readonly = cd[\"readonly\"]\n nfs_volume_access = cd[\"nfs_volume_access\"]\n\n vol_info_dict, err = get_basic_volume_info(vol_name)\n if err:\n raise Exception(err)\n\n # set defaults first\n _auth_allow = \"*\"\n _auth_reject = \"NONE\"\n _readonly = \"off\"\n _nfs_disable = False\n _enable_worm = False\n _nfs_volume_access = \"read-write\"\n\n if \"options\" in vol_info_dict:\n for option in vol_info_dict[\"options\"]:\n if option[\"name\"] == \"auth.allow\":\n _auth_allow = option[\"value\"]\n if option[\"name\"] == \"auth.reject\":\n _auth_reject = option[\"value\"]\n if option[\"name\"] == \"nfs.disable\":\n if option[\"value\"].lower() == \"off\":\n _nfs_disable = False\n else:\n _nfs_disable = True\n if option[\"name\"] == \"nfs.volume-access\":\n _nfs_volume_access = option[\"value\"]\n if option[\"name\"] == \"features.read-only\":\n _readonly = option[\"value\"]\n if option[\"name\"] == \"features.worm\":\n if option[\"value\"].lower() == \"enable\":\n _enable_worm = True\n else:\n _enable_worm = False\n\n # Now, for each option that has changed, set the parameter\n ret_list = []\n\n if _auth_allow != auth_allow:\n d, err = _set_volume_option(vol_name, \"auth.allow\", auth_allow)\n if err:\n raise Exception(err)\n d['audit_str'] = \"Setting option for permitted access IP addresses for %s to \\'%s\\'\" % (\n vol_name, auth_allow)\n ret_list.append(d)\n\n if _auth_reject != auth_reject:\n d, err = _set_volume_option(vol_name, \"auth.reject\", auth_reject)\n if err:\n raise Exception(err)\n d['audit_str'] = \"Setting option for denied access IP addresses for %s to \\'%s\\'\" % (\n vol_name, auth_reject)\n ret_list.append(d)\n\n if _readonly != readonly:\n d, err = _set_volume_option(\n vol_name, \"features.read-only\", readonly)\n if err:\n raise Exception(err)\n d['audit_str'] = \"Setting readonly mount access(for all access methods) for %s to \\'%s\\'\" % (\n vol_name, readonly)\n ret_list.append(d)\n\n if readonly == \"off\":\n\n # All the rest applies only if volume access is read-write\n if _nfs_disable != nfs_disable:\n if nfs_disable:\n p = \"on\"\n else:\n p = \"off\"\n d, err = _set_volume_option(vol_name, \"nfs.disable\", p)\n if err:\n raise Exception(err)\n d['audit_str'] = \"Setting NFS disable for %s to \\'%s\\'\" % (\n vol_name, p)\n ret_list.append(d)\n\n if not nfs_disable:\n # print \"in\"\n if nfs_volume_access and _nfs_volume_access != nfs_volume_access:\n d, err = _set_volume_option(\n vol_name, \"nfs.volume-access\", nfs_volume_access)\n if err:\n raise Exception(err)\n d['audit_str'] = \"Setting NFS access type for %s to \\'%s\\'\" % (\n vol_name, nfs_volume_access)\n ret_list.append(d)\n\n if _enable_worm != enable_worm:\n if enable_worm:\n p = \"enable\"\n else:\n p = \"disable\"\n d, err = _set_volume_option(vol_name, \"features.worm\", p)\n if err:\n raise Exception(err)\n d['audit_str'] = \"Setting feature WORM for %s to \\'%s\\'\" % (\n vol_name, p)\n ret_list.append(d)\n\n except Exception, e:\n return None, 'Error setting volume options: %s' % str(e)\n else:\n return ret_list, None",
"def filterExpand(*args, expand: bool=True, fullPath: bool=False, selectionMask: Union[int,\n List[int]]=0, symActive: bool=True, symNegative: bool=True, symPositive:\n bool=True, symSeam: bool=True, **kwargs)->List[AnyStr]:\n pass",
"def test_ocs_upgrade_with_allowexpansion_false(\n self, project_factory, storageclass_factory, multi_pvc_factory\n ):\n size_list = [\"1\", \"3\", \"5\"]\n\n access_modes_cephfs = [constants.ACCESS_MODE_RWO, constants.ACCESS_MODE_RWX]\n access_modes_rbd = [\n f\"{constants.ACCESS_MODE_RWO}-Block\",\n f\"{constants.ACCESS_MODE_RWX}-Block\",\n ]\n\n # Create custom storage class\n\n custom_cephfs_sc = storageclass_factory(\n interface=constants.CEPHFILESYSTEM, allow_volume_expansion=False\n )\n custom_rbd_sc = storageclass_factory(\n interface=constants.CEPHBLOCKPOOL, allow_volume_expansion=False\n )\n\n # Appending all the pvc obj to base case param for cleanup and evaluation\n self.all_sc_obj.append(custom_cephfs_sc)\n self.all_sc_obj.append(custom_rbd_sc)\n\n log.info(\"Create pvcs for custom sc as well as for default sc\")\n project_obj = project_factory()\n for size in size_list:\n rbd_pvcs = multi_pvc_factory(\n interface=constants.CEPHBLOCKPOOL,\n access_modes=access_modes_rbd,\n project=project_obj,\n size=size,\n num_of_pvc=2,\n )\n log.info(f\"rbd_pvc created for size {size}\")\n assert rbd_pvcs, f\"Failed to create rbd_pvcs of size {size}\"\n\n cephfs_pvcs = multi_pvc_factory(\n interface=constants.CEPHFILESYSTEM,\n project=project_obj,\n access_modes=access_modes_cephfs,\n size=size,\n num_of_pvc=2,\n )\n assert cephfs_pvcs, \"Failed to create cephfs_pvcs PVC\"\n\n custom_rbd_pvcs = multi_pvc_factory(\n interface=constants.CEPHBLOCKPOOL,\n project=project_obj,\n access_modes=access_modes_rbd,\n storageclass=custom_rbd_sc,\n size=size,\n num_of_pvc=2,\n )\n assert custom_rbd_pvcs, \"Failed to create custom_rbd_pvcs PVC\"\n\n custom_cephfs_pvcs = multi_pvc_factory(\n interface=constants.CEPHFILESYSTEM,\n project=project_obj,\n access_modes=access_modes_cephfs,\n storageclass=custom_cephfs_sc,\n size=size,\n num_of_pvc=2,\n )\n assert custom_cephfs_pvcs, \"Failed to create custom_cephfs_pvcs PVC\"",
"def extend_volume(self, volume, new_size):\n if isinstance(new_size, dict):\n new_size = random.randint(new_size[\"min\"], new_size[\"max\"])\n\n aname = \"cinder_v%s.extend_volume\" % self.version\n with atomic.ActionTimer(self, aname):\n self._get_client().volumes.extend(volume, new_size)\n return self._wait_available_volume(volume)",
"def booted_from_volume(volumes_list):\n if any('/dev/vda' in volume['attachments'] for volume in\n volumes_list):\n return True\n return False",
"def setup_volumes():\n\n fix_path()\n\n # Get what we want from the ZK tree\n logical_volumes = {}\n expected_sdvols = set()\n f = open('/etc/zim/volumes')\n for line in f:\n line = line.strip()\n if not line:\n continue\n sdvols = line.split()\n mount_point = sdvols.pop(0)\n if len(sdvols) == 1:\n dev = sdvols[0]\n if dev[0] == '/':\n ln(mount_point, dev)\n else:\n single(mount_point, '/dev/'+dev)\n continue\n\n if len(sdvols) < 1:\n raise ValueError(line)\n\n if lvname(sdvols[0]):\n lvm(mount_point, sdvols)\n continue\n\n\n # RAID10:\n assert len(set(sdvol[:3] for sdvol in sdvols)) == 1, (\n \"Multiple device prefixes\")\n sdprefix = sdvols[0][:3]\n logical_volumes[sdprefix] = LogicalVolume(\n sdprefix, sdvols, mount_point)\n expected_sdvols.update(sdvols)\n\n if logical_volumes:\n\n # Wait for all of our expected sd volumes to appear. (They may be\n # attaching.)\n for v in expected_sdvols:\n wait_for_device('/dev/' + v)\n\n # The volumes may have been set up before on a previous machine.\n # Scan for them:\n s('mdadm --examine --scan >>/etc/mdadm.conf')\n f = open('/etc/mdadm.conf')\n if f.read().strip():\n s('mdadm -A --scan')\n f.close()\n\n # Read /proc/mdstat to find out about existing raid volumes:\n mdstat = re.compile(r'md(\\w+) : (\\w+) (\\w+) (.+)$').match\n mdstatsd = re.compile(r'(sd(\\w+))\\[\\d+\\](\\(F\\))?$').match\n for line in open('/proc/mdstat'):\n if not line.strip():\n continue\n m = mdstat(line)\n if not m:\n assert (line.startswith('Personalities') or\n line.startswith(' ') or\n line.startswith('unused devices')), (\n \"unexpected line\", line\n )\n continue\n mdnum, status, rtype, data = m.group(1, 2, 3, 4)\n data = [mdstatsd(d).groups() for d in data.strip().split()]\n\n assert not [d for d in data if d[2]], (\n \"Failed volume\", line\n )\n\n data = [d[0] for d in data]\n if not [d for d in data if d in expected_sdvols]:\n # Hm, not one weore interested in.\n print 'skipping', line\n continue\n\n assert not [d for d in data if d not in expected_sdvols], (\n \"Unexpected volume\", data\n )\n\n assert status == 'active', status\n assert rtype == 'raid10', rtype\n\n logical_volumes[data[0][:3]].add_md(mdnum, data)\n\n # Scan for logical volumes:\n lv_pat = re.compile('Found volume group \"vg_(sd\\w+)\"').search\n for line in p('vgscan'):\n m = lv_pat(line)\n if not m:\n continue\n name = m.group(1)\n if name in logical_volumes:\n logical_volumes[name].has_logical_volume()\n\n # Record the physical volums in each logical_volume so we can see\n # if any are missing:\n PV = re.compile(\"PV /dev/md(\\w+) +VG vg_(sd\\w+) \").search\n for line in p(\"pvscan\"):\n m = PV(line)\n if not m:\n continue\n mdnum, vgname = m.groups()\n logical_volumes[vgname].pvs.add(mdnum)\n\n # Finally, create any missing raid volumes and logical volumes\n for lv in logical_volumes.values():\n lv.setup()\n\n os.rename('/etc/zim/volumes', '/etc/zim/volumes-setup')",
"def _attach_volume(self):\n return []",
"def extend_volume(self, volume, new_size):\n spdk_name = self._get_spdk_volume_name(volume.name)\n params = {'name': spdk_name, 'size': new_size * units.Gi}\n self._rpc_call('bdev_lvol_resize', params)",
"def build_expand_volume_command(vol_info_dict, si):\n\n return_dict = None\n try:\n # First get all the node/disk combinations where the volume is not\n # present\n anl = []\n num_nodes = 0\n\n ondisk_storage = \"normal\"\n if \"compressed\" in vol_info_dict['bricks'][0]:\n ondisk_storage = \"compressed\"\n elif \"deduplicated\" in vol_info_dict['bricks'][0]:\n ondisk_storage = \"deduplicated\"\n\n anl, err = _get_allowable_node_list(si, vol_info_dict['name'])\n if err:\n raise Exception(err)\n\n cmd = 'gluster volume add-brick %s ' % vol_info_dict[\"name\"]\n\n repl_count = 0\n\n if 'replicate' in vol_info_dict['type'].lower():\n vol_type = \"replicated\"\n repl_count = int(vol_info_dict[\"replica_count\"])\n else:\n vol_type = \"distributed\"\n\n return_dict, err = build_create_or_expand_volume_command(\n cmd, si, anl, vol_type, ondisk_storage, repl_count, vol_info_dict[\"name\"])\n if err:\n raise Exception(err)\n\n if \"cmd\" in return_dict:\n return_dict[\"cmd\"] = return_dict[\"cmd\"] + \" force --xml\"\n except Exception, e:\n return None, 'Error building expand volume command: %s' % str(e)\n else:\n return return_dict, None",
"def extend_volume(self, volume, new_size):\n LOG.info('Extending volume: %(id)s New size: %(size)s GB',\n {'id': volume['id'], 'size': new_size})\n nfs_share = volume['provider_location']\n nms = self.share2nms[nfs_share]\n volume_path = self.remote_path(volume)\n if getattr(self.configuration,\n self.driver_prefix + '_sparsed_volumes'):\n self._create_sparsed_file(nms, volume_path, new_size)\n else:\n block_size_mb = 1\n block_count = ((new_size - volume['size']) * units.Gi /\n (block_size_mb * units.Mi))\n\n nms.appliance.execute(\n 'dd if=/dev/zero seek=%(seek)d of=%(path)s'\n ' bs=%(bs)dM count=%(count)d' % {\n 'seek': volume['size'] * units.Gi / block_size_mb,\n 'path': volume_path,\n 'bs': block_size_mb,\n 'count': block_count\n }\n )",
"def list_volumes(self):\n print '# Listing existing volumes'\n self.compute.list_volumes()",
"def attach_volume(self, instance_name, device_path, mountpoint):\n return True",
"def test_volumes_complex(self):\n with open(\".scuba.yml\", \"w\") as f:\n f.write(\n r\"\"\"\n image: na\n volumes:\n /foo: /host/foo\n /bar:\n hostpath: /host/bar\n /snap:\n hostpath: /host/snap\n options: z,ro\n \"\"\"\n )\n\n config = scuba.config.load_config(\".scuba.yml\")\n vols = config.volumes\n assert len(vols) == 3\n\n v = vols[\"/foo\"]\n assert isinstance(v, scuba.config.ScubaVolume)\n assert v.container_path == \"/foo\"\n assert v.host_path == \"/host/foo\"\n assert v.options == []\n\n v = vols[\"/bar\"]\n assert isinstance(v, scuba.config.ScubaVolume)\n assert v.container_path == \"/bar\"\n assert v.host_path == \"/host/bar\"\n assert v.options == []\n\n v = vols[\"/snap\"]\n assert isinstance(v, scuba.config.ScubaVolume)\n assert v.container_path == \"/snap\"\n assert v.host_path == \"/host/snap\"\n assert v.options == [\"z\", \"ro\"]",
"def attach_volume(self):\n\n # Choose volume\n volume_id = self._choose_among_available_volumes()\n\n # Cancel\n if not volume_id:\n print 'Operation cancelled'\n return\n\n # Choose instance\n instance_id = self._choose_among_running_instances()\n\n # Cancel\n if not instance_id:\n print 'Operation cancelled'\n return\n\n # Attach the volume\n print '# Attaching volume \"%s\"!' % volume_id\n if self.compute.attach_volume(volume_id, instance_id):\n print 'The volume has been attached!'\n else:\n print 'The volume could not been attached'",
"def set_volume(cls, newVolume: float) -> bool:\n raise NotImplementedError",
"def guest_grow_root_volume(self, userid, os_version):\n LOG.debug('Begin to punch grow partition commands to guest: %s',\n userid)\n linuxdist = self._dist_manager.get_linux_dist(os_version)()\n # get configuration commands\n config_cmds = linuxdist.get_extend_partition_cmds()\n # Creating tmp file with these cmds\n temp_folder = self._pathutils.get_guest_temp_path(userid)\n file_path = os.path.join(temp_folder, 'gpartvol.sh')\n LOG.debug('Creating file %s to contain root partition extension '\n 'commands' % file_path)\n with open(file_path, \"w\") as f:\n f.write(config_cmds)\n try:\n self._smtclient.punch_file(userid, file_path, \"X\")\n finally:\n LOG.debug('Removing the folder %s ', temp_folder)\n shutil.rmtree(temp_folder)",
"def check_volume(obj, char, quiet=False):\n vol = obj.item_data.size\n if vol is None:\n raise ValueError(f\"Object {obj} has an undefined size\")\n v_max = char.item_data.capacity\n if char.used_capacity + vol > v_max:\n if not quiet:\n char.msg(\"You can't carry %s.\" % obj)\n return False\n return True",
"def bootable_volume(volumes):\n for volume in volumes:\n if '/dev/vda' in volume['attachments']:\n return volume",
"def setVolume(self, *args):\n return _libsbml.Compartment_setVolume(self, *args)"
]
| [
"0.5745551",
"0.54371166",
"0.5398411",
"0.5398006",
"0.5301903",
"0.5236258",
"0.5219399",
"0.5200871",
"0.51637703",
"0.51509345",
"0.512159",
"0.5095982",
"0.50772727",
"0.506262",
"0.50104576",
"0.49561206",
"0.4911384",
"0.49091676",
"0.4906915",
"0.48999748",
"0.4882017",
"0.4869149",
"0.48545307",
"0.4842121",
"0.47673637",
"0.47602555",
"0.47504058",
"0.47201544",
"0.4711843",
"0.4708398"
]
| 0.80991673 | 0 |
ReclaimPolicy is the reclaim policy for the storage class | def reclaim_policy(self) -> Optional[str]:
return pulumi.get(self, "reclaim_policy") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def rebalance_policy(self):\n return self._rebalance_policy",
"def rebalance_policy(self, rebalance_policy):\n allowed_values = [\"auto\", \"manual\"]\n if rebalance_policy is not None and rebalance_policy not in allowed_values:\n raise ValueError(\n \"Invalid value for `rebalance_policy`, must be one of {0}\"\n .format(allowed_values)\n )\n\n self._rebalance_policy = rebalance_policy",
"def unclaim(self):\n if self.state == states.UNCLAIMED:\n return\n self._claimer.unclaim(self, self.owner)\n self._change_state(states.UNCLAIMED)",
"def setKeepPolicy(self, policy):\n if not self.__loaded:\n self.__load()\n \n if policy > self.KeepMax:\n return\n if policy == self.__keepCookies:\n return\n \n self.__keepCookies = policy\n self.__saveTimer.changeOccurred()",
"def abort_resource_claim(self, context, claim):\n if self.disabled:\n return\n\n # un-claim the resources:\n if self.claims.pop(claim.claim_id, None):\n LOG.info(_(\"Aborting claim: %s\") % claim)\n values = claim.undo_claim(self.compute_node)\n self.compute_node = self._update(context, values)\n else:\n # can't find the claim. this may mean the claim already timed\n # out or it was already explicitly finished/aborted.\n LOG.info(_(\"Claim %d not found. It either timed out or was \"\n \"already explicitly finished/aborted\"), claim.claim_id)",
"def delete_resource(\n self,\n namespace: str = None,\n propagation_policy: str = \"Foreground\",\n grace_period_seconds: int = 10,\n ):\n names = [\n \"delete_namespaced_csistorage_capacity\",\n \"delete_csistorage_capacity\",\n ]\n\n body = client.V1DeleteOptions(\n propagation_policy=propagation_policy,\n grace_period_seconds=grace_period_seconds,\n )\n\n _kube_api.execute(\n action=\"delete\",\n resource=self,\n names=names,\n namespace=namespace,\n api_client=None,\n api_args={\"name\": self.metadata.name, \"body\": body},\n )",
"def _reclaim_queued_deletes(self, context):\n interval = CONF.reclaim_instance_interval\n if interval <= 0:\n LOG.debug(\"CONF.reclaim_instance_interval <= 0, skipping...\")\n return\n\n # TODO(comstud, jichenjc): Dummy quota object for now See bug 1296414.\n # The only case that the quota might be inconsistent is\n # the cloud node died between set instance state to SOFT_DELETED\n # and quota commit to DB. When cloud node starts again\n # it will have no idea the reservation is committed or not or even\n # expired, since it's a rare case, so marked as todo.\n quotas = objects.Quotas.from_reservations(context, None)\n\n filters = {'vm_state': vm_states.SOFT_DELETED,\n 'task_state': None,\n 'host': self.host}\n instances = objects.InstanceList.get_by_filters(\n context, filters,\n expected_attrs=objects.instance.INSTANCE_DEFAULT_FIELDS,\n use_slave=True)\n for instance in instances:\n if self._deleted_old_enough(instance, interval):\n bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(\n context, instance.uuid)\n LOG.info(_LI('Reclaiming deleted instance'), instance=instance)\n try:\n self._delete_instance(context, instance, bdms, quotas)\n except Exception as e:\n LOG.warning(_LW(\"Periodic reclaim failed to delete \"\n \"instance: %s\"),\n e, instance=instance)",
"def reset(self):\n super().reset()\n self.policy.reset()",
"def reset(self):\n super().reset()\n self.policy.reset()",
"def reload_storage_policies():\n global _POLICIES\n policy_conf = ConfigParser()\n policy_conf.read(SWIFT_CONF_FILE)\n try:\n _POLICIES = parse_storage_policies(policy_conf)\n except PolicyError as e:\n raise SystemExit('ERROR: Invalid Storage Policy Configuration '\n 'in %s (%s)' % (SWIFT_CONF_FILE, e))",
"def __init__(self, name=None, queue_mode=None, description=None, redrive_policy=None, max_consume_count=None, retention_hours=None):\n \n \n\n self._name = None\n self._queue_mode = None\n self._description = None\n self._redrive_policy = None\n self._max_consume_count = None\n self._retention_hours = None\n self.discriminator = None\n\n self.name = name\n if queue_mode is not None:\n self.queue_mode = queue_mode\n if description is not None:\n self.description = description\n if redrive_policy is not None:\n self.redrive_policy = redrive_policy\n if max_consume_count is not None:\n self.max_consume_count = max_consume_count\n if retention_hours is not None:\n self.retention_hours = retention_hours",
"def shrink_cache(cls, target_memory_use_ratio=None):\n cleanup = False\n if not target_memory_use_ratio:\n target_memory_use_ratio = cls.target_memory_use_ratio\n with cls._lock:\n if cls.memory_usage_ratio() > target_memory_use_ratio:\n cleanup = True\n cls._cache = deque(\n sorted(cls._cache, key=lambda i: i.score, reverse=True))\n start = time.time()\n while (cls.memory_usage_ratio() > target_memory_use_ratio\n and time.time() - start < 1 and cls._cache):\n try:\n cls._cache.pop().delete()\n except IndexError:\n break\n if cleanup:\n gc.collect()",
"def resources_gc_prefix(options, policy_config, policy_collection):\n\n # Classify policies by region\n policy_regions = {}\n for p in policy_collection:\n if p.execution_mode == 'poll':\n continue\n policy_regions.setdefault(p.options.region, []).append(p)\n\n regions = get_gc_regions(options.regions, policy_config)\n for r in regions:\n region_gc(options, r, policy_config, policy_regions.get(r, []))",
"def update_policy(self):\n pass",
"def elf_storage_policy(self, elf_storage_policy):\n\n self._elf_storage_policy = elf_storage_policy",
"def reclaim_unschedulable_nodes(self, new_desired_capacity):\n desired_capacity = min(self.max_size, new_desired_capacity)\n num_unschedulable = len(self.unschedulable_nodes)\n num_schedulable = self.actual_capacity - num_unschedulable\n \n if num_schedulable < desired_capacity:\n for node in self.unschedulable_nodes:\n if node.uncordon():\n num_schedulable += 1\n # Uncordon only what we need\n if num_schedulable == desired_capacity:\n break",
"def pre_network_policy_delete(self, resource_id):\n pass",
"def test_patch_hyperflex_ext_fc_storage_policy(self):\n pass",
"def test_patch_hyperflex_cluster_storage_policy(self):\n pass",
"def rebalance(self):\n log.info(\"Rebalancing partitions for group '%s'\", self.group_name)\n members = sorted(self.members)\n partitions = sorted(self.partitions)\n\n self.mapping = self.allocator_fn(members, partitions)\n\n for topic in self.allocation:\n log.debug(\n \"Allocation for topic '%s': partitions %s\",\n topic, \", \".join(map(str, self.allocation[topic]))\n )\n\n if self.on_rebalance:\n self.on_rebalance()",
"def create_storageclass(\n self,\n blockPool,\n sc_name_prefix=\"autotests-sc\",\n allow_volume_expansion=True,\n reclaim_policy=\"Delete\",\n fstype=\"xfs\",\n clusterNamespace=framework.config.ENV_DATA['cluster_namespace'],\n ):\n if self.name:\n sc_name = self.name\n else:\n sc_name = f\"{sc_name_prefix}-{get_random_str()}\"\n\n sc_data = {}\n sc_data['k8s_api_version'] = defaults.STORAGE_API_VERSION\n sc_data['storageclass_name'] = sc_name\n sc_data['volume_expansion'] = allow_volume_expansion\n sc_data['reclaimPolicy'] = reclaim_policy\n sc_data['blockPool'] = blockPool\n sc_data['clusterNamespace'] = clusterNamespace\n sc_data['fstype'] = fstype\n\n data = generate_yaml_from_jinja2_template_with_data(\n self.template_path,\n **sc_data\n )\n self.service_sc.create(body=data)\n\n return sc_name",
"def refresh(self):\n self._policies = self._get_policies()",
"def __init__(self, resource_name, opts=None, provisioner=None, allow_volume_expansion=None, allowed_topologies=None, metadata=None, mount_options=None, parameters=None, reclaim_policy=None, volume_binding_mode=None, __name__=None, __opts__=None):\n if __name__ is not None:\n warnings.warn(\"explicit use of __name__ is deprecated\", DeprecationWarning)\n resource_name = __name__\n if __opts__ is not None:\n warnings.warn(\"explicit use of __opts__ is deprecated, use 'opts' instead\", DeprecationWarning)\n opts = __opts__\n if not resource_name:\n raise TypeError('Missing resource name argument (for URN creation)')\n if not isinstance(resource_name, str):\n raise TypeError('Expected resource name to be a string')\n if opts and not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError('Expected resource options to be a ResourceOptions instance')\n\n __props__ = dict()\n\n __props__['apiVersion'] = 'storage.k8s.io/v1'\n __props__['kind'] = 'StorageClass'\n if provisioner is None:\n raise TypeError('Missing required property provisioner')\n __props__['provisioner'] = provisioner\n __props__['allowVolumeExpansion'] = allow_volume_expansion\n __props__['allowedTopologies'] = allowed_topologies\n __props__['metadata'] = metadata\n __props__['mountOptions'] = mount_options\n __props__['parameters'] = parameters\n __props__['reclaimPolicy'] = reclaim_policy\n __props__['volumeBindingMode'] = volume_binding_mode\n\n __props__['status'] = None\n\n parent = opts.parent if opts and opts.parent else None\n aliases = [\n pulumi.Alias(type_=\"kubernetes:storage.k8s.io/v1beta1:StorageClass\"),\n ]\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(\n version=version.get_version(),\n aliases=aliases,\n ))\n\n super(StorageClass, self).__init__(\n \"kubernetes:storage.k8s.io/v1:StorageClass\",\n resource_name,\n __props__,\n opts)",
"def reinvocation_policy(self) -> Optional[str]:\n return pulumi.get(self, \"reinvocation_policy\")",
"def reinvocation_policy(self) -> Optional[str]:\n return pulumi.get(self, \"reinvocation_policy\")",
"def test_delete_hyperflex_ext_fc_storage_policy(self):\n pass",
"def normal_policy_class():\n policy_class = ActorCriticCnnPolicy\n _ = locals()\n del _",
"def test_delete_hyperflex_cluster_storage_policy(self):\n pass",
"def pre_qos_forwarding_class_delete(self, resource_id):\n pass",
"def snmpqosqos_policy_reevalrate(self) :\n\t\ttry :\n\t\t\treturn self._snmpqosqos_policy_reevalrate\n\t\texcept Exception as e:\n\t\t\traise e"
]
| [
"0.6216241",
"0.5859391",
"0.5115956",
"0.5060897",
"0.5006874",
"0.49962115",
"0.48800308",
"0.48706514",
"0.48706514",
"0.48609537",
"0.48232886",
"0.48182634",
"0.4793153",
"0.47804967",
"0.47694635",
"0.4755424",
"0.47546563",
"0.475363",
"0.4752715",
"0.47368622",
"0.47350585",
"0.4731967",
"0.473056",
"0.47245854",
"0.47245854",
"0.47098947",
"0.46996152",
"0.4695802",
"0.4689162",
"0.46401563"
]
| 0.6900613 | 0 |
ControllerStatus is the status of Controller pods | def controller_status(self) -> Optional['outputs.CSIUnityStatusControllerStatus']:
return pulumi.get(self, "controller_status") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def controller_status(self) -> Optional['outputs.CSIVXFlexOSStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def status(self) -> ControllerStatus:\n return self._status",
"def controller_status(self) -> Optional['outputs.CSIPowerStoreStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def controller_status(self) -> Optional['outputs.CSIIsilonStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def controller_status(self) -> Optional['outputs.CSIPowerMaxStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def getStatus(self):\r\n return self.controller.getStatus()",
"def status_controller(cls, args, config):\n logging.debug(\"MOLNSController.status_controller(args={0})\".format(args))\n if len(args) > 0:\n try:\n controller_obj = cls._get_controllerobj(args, config)\n except MOLNSException:\n return {}\n if controller_obj is None: return {}\n # Check if any instances are assigned to this controller\n instance_list = config.get_controller_instances(controller_id=controller_obj.id)\n table_data = []\n if len(instance_list) > 0:\n for i in instance_list:\n # provider_name = config.get_object_by_id(i.provider_id, 'Provider').name\n try:\n p = config.get_object_by_id(i.provider_id, 'Provider')\n provider_name = p.name\n except DatastoreException as e:\n provider_name = 'ERROR: {0}'.format(e)\n controller_name = config.get_object_by_id(i.controller_id, 'Controller').name\n status = controller_obj.get_instance_status(i)\n table_data.append(\n [controller_name, status, 'controller', provider_name, i.provider_instance_identifier,\n i.ip_address])\n\n else:\n return {'msg': \"No instance running for this controller\"}\n # Check if any worker instances are assigned to this controller\n instance_list = config.get_worker_instances(controller_id=controller_obj.id)\n if len(instance_list) > 0:\n for i in instance_list:\n worker_name = config.get_object_by_id(i.worker_group_id, 'WorkerGroup').name\n worker_obj = cls._get_workerobj([worker_name], config)\n # provider_name = config.get_object_by_id(i.provider_id, 'Provider').name\n try:\n p = config.get_object_by_id(i.provider_id, 'Provider')\n provider_name = p.name\n except DatastoreException as e:\n provider_name = 'ERROR: {0}'.format(e)\n status = worker_obj.get_instance_status(i)\n table_data.append(\n [worker_name, status, 'worker', provider_name, i.provider_instance_identifier, i.ip_address])\n # table_print(['name','status','type','provider','instance id', 'IP address'],table_data)\n r = {'type': 'table', 'column_names': ['name', 'status', 'type', 'provider', 'instance id', 'IP address'],\n 'data': table_data}\n return r\n else:\n instance_list = config.get_all_instances()\n if len(instance_list) > 0:\n table_data = []\n for i in instance_list:\n provider_obj = config.get_object_by_id(i.provider_id, 'Provider')\n if provider_obj is None:\n continue\n provider_name = provider_obj.name\n controller_name = config.get_object_by_id(i.controller_id, 'Controller').name\n if i.worker_group_id is not None:\n worker_name = config.get_object_by_id(i.worker_group_id, 'WorkerGroup').name\n table_data.append([worker_name, 'worker', provider_name, i.provider_instance_identifier])\n else:\n table_data.append(\n [controller_name, 'controller', provider_name, i.provider_instance_identifier])\n\n r = {'type': 'table', 'column_names': ['name', 'type', 'provider', 'instance id'], 'data': table_data}\n r['msg'] = \"\\n\\tUse 'molns status NAME' to see current status of each instance.\"\n return r\n else:\n return {'msg': \"No instance found\"}",
"def getStatus(self, request, context):\n \n statusDrone = str(self.vehicle.system_status).rpartition(':')[2]\n\t \n return droneconnect_pb2.Status(status = statusDrone)",
"def status(self):\n return status_dict[self._get_property_(self.STATUS).upper()]",
"def getStatus():\n return json.dumps({'camera': Camera.status(), 'rover': rover.status()}), 200",
"def status(self):\n return STATUS[self.fields['status']]",
"def model_status():\n return juju.CLIENT.Client(request=\"FullStatus\")",
"def status(self):\n return self.get(self._names[\"status\"])",
"def get_status():\n # TODO tie this in with requests that can fetch the status of the pod from the cluster\n\n if request.method == \"GET\":\n \"\"\"\n request looks like:\n {\n \"workflow_name\": \"test-workflow\"\n }\n \"\"\"\n\n req = request.get_json(force=True)\n if workflow_exists(req['workflow_name']):\n # TODO fit into database\n # Get the pod by workflow and read the status\n # status = RUNNING_JOBS[req['workflow_name']].get_pod_status()\n response = {\n \"status\": 'Still running'\n }\n else:\n app.logger.error(\n f\"Received request asking the pod status in {req['workflow_name']} \"\n f\"but this workflow is not present in running jobs\"\n f\"record. Nothing to do.\")\n response = {\n \"status\": \"Not running\"\n }\n\n return jsonify(response)",
"def getStatus(self):\n return self.__status",
"async def _status():\n # TODO(Deepankar): should we add versions of executors?\n return {\n 'status_code': status.HTTP_200_OK,\n 'jina_version': jina_version\n }",
"def getStatus(self):\n return self._status",
"def initStatus(status):\n if status == 0 :\n print(\"Supported controller connected\")\n elif status < 0 :\n print(\"No supported controller detected\")\n else:\n print(\"Waiting for controller {}\".format(status) )",
"def status(self):\n return {\n 'hawkular_services': self._hawkular.status(),\n 'alerts': self.alert.status(),\n 'inventory': self.inventory.status(),\n 'metrics': self.metric.status()\n }",
"def status(self):\n if self.qemu.is_running():\n status = 0\n self.log.info(\"vm-status\", result=\"online\")\n for device in list(self.qemu.block_info().values()):\n self.log.info(\n \"disk-throttle\",\n device=device[\"device\"],\n iops=device[\"inserted\"][\"iops\"],\n )\n else:\n status = 1\n self.log.info(\"vm-status\", result=\"offline\")\n for volume in self.ceph.volumes:\n locker = volume.lock_status()\n self.log.info(\"rbd-status\", volume=volume.fullname, locker=locker)\n consul = locate_live_service(self.consul, \"qemu-\" + self.name)\n if consul:\n self.log.info(\n \"consul\", service=consul[\"Service\"], address=consul[\"Address\"]\n )\n else:\n self.log.info(\"consul\", service=\"<not registered>\")\n return status",
"def _getCurrentComponentStatus(self):\n resOverall = self.sysAdminClient.getOverallStatus()\n if not resOverall['OK']:\n return resOverall\n currentStatus = {'Down': set(), 'Run': set(), 'All': set()}\n informationDict = resOverall['Value']\n for systemsDict in informationDict.values():\n for system, instancesDict in systemsDict.items():\n for instanceName, instanceInfoDict in instancesDict.items():\n identifier = '%s__%s' % (system, instanceName)\n runitStatus = instanceInfoDict.get('RunitStatus')\n if runitStatus in ('Run', 'Down'):\n currentStatus[runitStatus].add(identifier)\n\n currentStatus['All'] = currentStatus['Run'] | currentStatus['Down']\n return S_OK(currentStatus)",
"def status(self):\n return self._data['status']",
"def _get_status_obj(self):\n\n status = Status(self._config.dirout, name=self._config.name,\n hardware=self._config.hardware)\n return status",
"def get_status(self):\n return self._status",
"def status(self):\n\t\treturn self._status",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")"
]
| [
"0.78950554",
"0.7656238",
"0.7582138",
"0.74687314",
"0.7435508",
"0.6915362",
"0.68638337",
"0.5934879",
"0.5923136",
"0.5888573",
"0.58432204",
"0.58387125",
"0.58308405",
"0.5828534",
"0.58209366",
"0.5818346",
"0.5816",
"0.5798976",
"0.57974327",
"0.57905334",
"0.5790449",
"0.57745886",
"0.5757075",
"0.5755403",
"0.57445693",
"0.57413834",
"0.57413834",
"0.57413834",
"0.57413834",
"0.57413834"
]
| 0.7717445 | 1 |
DriverHash is a hash of the driver specification | def driver_hash(self) -> Optional[int]:
return pulumi.get(self, "driver_hash") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def calc_statistics_hash(self) -> bytes:\n return b\"somehash\"",
"def hash(self):\n hash_length = self.conf.get(\"hash_length\", 7)\n if hash_length != 0:\n return self.spec.dag_hash(length=hash_length)\n return None",
"def hash(self) -> str:\n return pulumi.get(self, \"hash\")",
"def get_hash(self):\r\n return",
"def get_hash(self, params):\n return self.sha",
"def hash(self) -> bytes:",
"def hash(self):\n return self.block_header.hash",
"def get_hash(self):\n return self.__hash",
"def _build_driver_dict(self):\n self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, False)",
"def hash(self):\n return self._hash",
"def get_hash(self):\n return freeze_dict(self.get_hash_params())",
"def hash(self):\n return Hash.dhash(bytes(self))",
"def get_hash(self) -> str:\n return self.__hash.hexdigest()",
"def calc_statistics_hash(self) -> bytes:\n raise NotImplementedError()",
"def hash_key(self):",
"def hash(self) -> str:\r\n ...",
"def get_hash(self):\r\n block_data = self.prev_hash\r\n block_data += bytearray(struct.pack(\"!f\", self.time))\r\n block_data += self.user_id.encode()\r\n block_data += self.signature.encode()\r\n block_data += self.choice.encode()\r\n\r\n digest = hashes.Hash(hashes.SHA256())\r\n digest.update(block_data)\r\n return digest.finalize()",
"def current_hash(self):",
"def hash_string(self):\n return self._hash_string",
"def _calculate_hash(self) -> str:\n data_str = str(self.version) + str(self.index) + self.pre_hash + str(self.timestamp) + str(self.data)\n return sha256(data_str.encode('utf-8')).hexdigest()",
"def hash_algo(self) -> str:\n return self._hash_algo",
"def calc_info_hash(self):\n return \"infohash\"",
"def get_hash(self):\n return \"%03d_%03d_%03d\" % (self.chest_region, self.chest_type, self.feature_type)",
"def hash(self):\n return xxhash.xxh64(self._pwm_to_str(3)).hexdigest()",
"def _Hash(self):\n out = [self.key.string_id()]\n properties = self._PropList()\n for prop in properties:\n out.append(unicode(getattr(self, prop, '')))\n to_hash = ''.join(out)\n return hashlib.md5(to_hash.encode('utf-8')).hexdigest()",
"def get_info_hash(self):\n return self.info_hash",
"def get_hash(self) -> str:\n if self.call_hash:\n # Derived state from a call_node.\n return hash_struct([\"Handle\", self.fullname, \"call_hash\", self.key, self.call_hash])\n else:\n # Initial state.\n return hash_struct([\"Handle\", self.fullname, \"init\", self.key, self.args, self.kwargs])",
"def distro_hash(self):\n return self._distro_hash",
"def _Hash(self):\n fullhash = util.PrefixHash(self.key_bytes)\n return util.Base64WSEncode(fullhash[:constants.KEY_HASH_SIZE])",
"def get_hash_from_model(model):\n hyperparams = get_hyperparams_from_model(model)\n model_hash = hyperparams['hash']\n return model_hash"
]
| [
"0.6173768",
"0.6065773",
"0.6036141",
"0.59962916",
"0.59433484",
"0.59169525",
"0.5836508",
"0.57406753",
"0.5732935",
"0.5724046",
"0.5717286",
"0.56912756",
"0.5676674",
"0.5643813",
"0.56277317",
"0.5624637",
"0.5575219",
"0.55626106",
"0.5556711",
"0.5542709",
"0.5527764",
"0.5526823",
"0.5519025",
"0.54790527",
"0.5469677",
"0.54574543",
"0.5450425",
"0.54475355",
"0.5446643",
"0.5442817"
]
| 0.7525444 | 1 |
NodeStatus is the status of Controller pods | def node_status(self) -> Optional['outputs.CSIUnityStatusNodeStatus']:
return pulumi.get(self, "node_status") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def node_status(self) -> Optional['outputs.CSIVXFlexOSStatusNodeStatus']:\n return pulumi.get(self, \"node_status\")",
"def node_status(self) -> Optional['outputs.CSIIsilonStatusNodeStatus']:\n return pulumi.get(self, \"node_status\")",
"def node_statuses(self) -> pulumi.Output[Sequence['outputs.NodeBalancerConfigNodeStatus']]:\n return pulumi.get(self, \"node_statuses\")",
"def node_status(self) -> Optional['outputs.CSIPowerStoreStatusNodeStatus']:\n return pulumi.get(self, \"node_status\")",
"def node_status(self) -> Optional['outputs.CSIPowerMaxStatusNodeStatus']:\n return pulumi.get(self, \"node_status\")",
"def status(self):\n url = API_PATH[\"node_status\"].format(tuneUuid=self._parentTune.uuid())\n rsp_json = self._parse(self._get(url))\n\n for status_obj in rsp_json:\n if status_obj[\"nodeUuid\"] == self.uuid():\n return self._new_instance(NodeStatus, status_obj, node=self)\n return None",
"def node_statuses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NodeBalancerConfigNodeStatusArgs']]]]:\n return pulumi.get(self, \"node_statuses\")",
"def getNodeStatus(self,status = 0):\n if status:\n self.node_status = status\n return self.node_status",
"def status(self) -> NodeStatus:\n return self._status",
"def controller_status(self) -> Optional['outputs.CSIVXFlexOSStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def test_get_node_status(self):\n pass",
"def controller_status(self) -> Optional['outputs.CSIIsilonStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def getNodeStatus(self,node):\n data = self.connect('get','nodes/%s/status' % (node),None)\n return data",
"def handle_status(self, request):\n \"\"\"\n @api {get} /status Get node status\n @apiName GetNodeStatus\n @apiGroup Node\n @apiVersion 1.1.0\n\n @apiSuccess {Boolean} execution_enabled Task execution is enabled on the node.\n @apiSuccess {Boolean} leader Node is the leader.\n @apiSuccess {String} name Node name.\n @apiSuccess {Boolean} scheduler_running The scheduler is running on the node.\n @apiSuccess {String} address Node IP address.\n @apiSuccess {String[]} pools Pools in which the node is registered.\n @apiSuccess {Object} running_processes Processes running on the host.\n @apiSuccess {Object} running_processes.process Process.\n @apiSuccess {String} running_processes.process.start_time Time the process started, ISO 8601 formatted.\n @apiSuccess {String} running_processes.process.task ID of the task.\n @apiSuccess {Boolean} cluster_joined Node has joined the cluster.\n @apiSuccess {Boolean} contending_for_lead Node is contending for lead.\n @apiSuccess {Boolean} pools_joined Node has joined its pools.\n\n @apiSuccessExample {json} Example response:\n {\n \"execution_enabled\": true,\n \"leader\": false,\n \"name\": \"node2\",\n \"scheduler_running\": false,\n \"address\": \"127.0.0.1:32002\",\n \"pools\": [\"pool1\", \"pool2\"],\n \"running_processes\": {\n \"b26e5cc2ef3f11e4817b0026b951c045\": {\n \"start_time\": \"2015-04-30T13:49:18.351494+00:00\",\n \"task\": \"508b4b72e44611e49e76c81f66cd0cca\"\n }\n },\n \"cluster_joined\": true,\n \"contending_for_lead\": true,\n \"pools_joined\": true\n }\n \"\"\"\n\n headers = {\n 'Content-Type': 'application/javascript',\n 'Access-Control-Allow-Origin': '*'\n }\n\n status = {\n 'name': self.cluster.nodename,\n 'address': self.cluster.addr,\n 'pools': self.cluster.mypools,\n 'leader': self.cluster.is_leader,\n 'cluster_joined': self.cluster.cluster_joined,\n 'pools_joined': self.cluster.pools_joined,\n 'contending_for_lead': self.cluster.contending_for_lead,\n\n 'execution_enabled': self.manager.enabled,\n 'running_processes': dict([ (execid, { 'task': details['task'], 'start_time': details['start_time'].isoformat() }) for (execid, details) in self.manager.running_processes.items() ]),\n\n 'scheduler_running': self.cluster.scheduler.running\n }\n\n return HTTPReply(body = json.dumps(status), headers = headers)",
"def controller_status(self) -> Optional['outputs.CSIUnityStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def get_status():\n # TODO tie this in with requests that can fetch the status of the pod from the cluster\n\n if request.method == \"GET\":\n \"\"\"\n request looks like:\n {\n \"workflow_name\": \"test-workflow\"\n }\n \"\"\"\n\n req = request.get_json(force=True)\n if workflow_exists(req['workflow_name']):\n # TODO fit into database\n # Get the pod by workflow and read the status\n # status = RUNNING_JOBS[req['workflow_name']].get_pod_status()\n response = {\n \"status\": 'Still running'\n }\n else:\n app.logger.error(\n f\"Received request asking the pod status in {req['workflow_name']} \"\n f\"but this workflow is not present in running jobs\"\n f\"record. Nothing to do.\")\n response = {\n \"status\": \"Not running\"\n }\n\n return jsonify(response)",
"def test_get_node_status_batterystatus(self):\n pass",
"def controller_status(self) -> Optional['outputs.CSIPowerStoreStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def controller_status(self) -> Optional['outputs.CSIPowerMaxStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def status(self):\n if Daemon.status(self) != 0:\n return 1\n \n # Load decoy logger\n self.load_outputs(decoy=True)\n\n # Load node pool & print status\n try:\n self.pool = PLNodePool(self)\n sys.stdout.write(self.status_str())\n except PLNodePoolException:\n sys.stdout.write(\"No node found.\\n\")\n\n return 0",
"def status(self) -> ControllerStatus:\n return self._status",
"def status(self):\n if self.error is not None:\n return Node.STATUS_ERROR\n if not self.exists():\n return Node.STATUS_UNDEFINED\n state_code_map = {\n libvirt.VIR_DOMAIN_NOSTATE: Node.STATUS_UNDEFINED,\n libvirt.VIR_DOMAIN_RUNNING: Node.STATUS_UP,\n libvirt.VIR_DOMAIN_BLOCKED: Node.STATUS_UP,\n libvirt.VIR_DOMAIN_PAUSED: Node.STATUS_UP,\n libvirt.VIR_DOMAIN_SHUTDOWN: Node.STATUS_DOWN,\n libvirt.VIR_DOMAIN_SHUTOFF: Node.STATUS_DOWN,\n libvirt.VIR_DOMAIN_CRASHED: Node.STATUS_ERROR,\n libvirt.VIR_DOMAIN_PMSUSPENDED: Node.STATUS_DOWN,\n }\n try:\n dom = self._get_domain()\n return state_code_map[dom.info()[0]]\n except libvirt.libvirtError as err:\n err_code = err.get_error_code()\n if err_code == libvirt.VIR_ERR_NO_DOMAIN:\n # The domains for sandbox nodes are temporal, so there's\n # no real mapping of \"no domain found\" other than the\n # node should be considered not started.\n return Node.STATUS_DOWN\n else:\n return Node.STATUS_ERROR\n except Exception as err:\n self.LOG.error(err)\n return Node.STATUS_ERROR",
"def iteration(self, node_status=True):\n self.clean_initial_status(list(self.available_statuses.values()))\n actual_status = {node: nstatus for node, nstatus in future.utils.iteritems(self.status)}\n\n # can remove this step\n # only tells us that the activated nodes are active while the other nodes are inactive\n '''\n {'iteration': 0, 'status': {0: 0, 1: 1, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 7: 0, 8: 0, 9: 0, 10: 0, 11: 0, 12: 0, 13: 0, 14: 0, 15: 0, 16: 0, 17: 0, 18: 0, 19: 0, 20: 0, 21: 0, 22: 0, 23: 0, 24: 0, 25: 0, 26: 0, 27: 0, 28: 0, 29: 0, 30: 0, 31: 0, 32: 0, 33: 0, 34: 0, 35: 0, 36: 0, 37: 0, 38: 0, 39: 0, 40: 0, 41: 0, 42: 0, 43: 0, 44: 0, 45: 0, 46: 0, 47: 0, 48: 0, 49: 0, 50: 0, 51: 0, 52: 0, 53: 0, 54: 0, 55: 0, 56: 0, 57: 0, 58: 0, 59: 0, 60: 0, 61: 0, 62: 0, 63: 0, 64: 0, 65: 0, 66: 0, 67: 0, 68: 0, 69: 0, 70: 0, 71: 0, 72: 0, 73: 0, 74: 0, 75: 0, 76: 0, 77: 0, 78: 0, 79: 0, 80: 0, 81: 0, 82: 0, 83: 0, 84: 0, 85: 0, 86: 0, 87: 0, 88: 0, 89: 0, 90: 0, 91: 0, 92: 0, 93: 0, 94: 0, 95: 0, 96: 0, 97: 0, 98: 0, 99: 0, 100: 0, 101: 0, 102: 0, 103: 0, 104: 0, 105: 0, 106: 0, 107: 0, 108: 0, 109: 0, 110: 0, 111: 0, 112: 0, 113: 0, 114: 0, 115: 0, 116: 0, 117: 0, 118: 0, 119: 0, 120: 0, 121: 0, 122: 0, 123: 0, 124: 0, 125: 0, 126: 0, 127: 0, 128: 0, 129: 0, 130: 0, 131: 0, 132: 0, 133: 0, 134: 0, 135: 0, 136: 0, 137: 0, 138: 0, 139: 0, 140: 0, 141: 0, 142: 0, 143: 0, 144: 0, 145: 0, 146: 0, 147: 0, 148: 0, 149: 0}, \n 'node_count': {0: 149, 1: 1}, 'status_delta': {0: 0, 1: 0}}\n '''\n # if self.actual_iteration == 0:\n # self.actual_iteration += 1\n # delta, node_count, status_delta = self.status_delta(actual_status)\n # if node_status:\n # return {\"iteration\": 0, \"status\": actual_status.copy(),\n # \"node_count\": node_count.copy(), \"status_delta\": status_delta.copy()}\n # else:\n # return {\"iteration\": 0, \"status\": {},\n # \"node_count\": node_count.copy(), \"status_delta\": status_delta.copy()}\n\n count_attempts = 0\n # print(self.params['nodes']['attempt'])\n # can consider \"for u in activated_nodes:\"\n # saves time especially in a big graph\n for u in self.graph.nodes: # for each node\n if self.status[u] != 1: # only select nodes with status = 1 (infected/active)\n continue\n if self.params['nodes']['attempt'][u] != 0: # and attempt = 0 (no previous attempts)\n continue\n # print(\"go\")\n\n neighbors = list(self.graph.neighbors(u)) # neighbors and successors (in DiGraph) produce the same result\n # get neighbors of this infected/active node\n\n # Standard threshold\n if len(neighbors) > 0: \n for v in neighbors: # for each neighbor\n if actual_status[v] == 0: # if their status = 0 (susceptible/inactive)\n key = (u, v) # key = (active node, inactive node) or (infected node, susceptible node)\n\n # Individual specified thresholds\n if 'threshold' in self.params['edges']: # if edge has a threshold\n if key in self.params['edges']['threshold']: # if key (u, v) in params... but why would it be in here? oh cos edges if from node to node so tuple (u , v)\n threshold = self.params['edges']['threshold'][key] # replace key?\n elif (v, u) in self.params['edges']['threshold'] and not self.graph.directed: # direction affects this. v to u instead of u to v. yup this\n threshold = self.params['edges']['threshold'][(v, u)] # similarly put in key in (but opposite direction)\n # oh this is the actual threshold used below for the flip i think\n \n flip = np.random.random_sample() # random float in half-open interval [0.0, 1.0)\n if flip <= threshold: # if less than threshold \n actual_status[v] = 1 # neighbor becomes infected/active\n # actual_status[v] = 1 # probability activated is 1\n self.params['nodes']['attempt'][u] = 1\n count_attempts += 1\n\n \n delta, node_count, status_delta = self.status_delta(actual_status)\n self.status = actual_status\n # print(\"self status\")\n # print(self.status)\n self.actual_iteration += 1\n if count_attempts == 0:\n self.stop = True\n\n # this one remains but change to ensure the output fits to what we want\n # we only want the active set size at the end of the iteration\n # can get from the last 'node_count': {0: inactive_set_size, 1: active_set_size}\n # if node_status:\n # return {\"iteration\": self.actual_iteration - 1, \"status\": delta.copy(),\n # \"node_count\": node_count.copy(), \"status_delta\": status_delta.copy(), 'active_set_size': node_count.copy()[1]}\n # else:\n # return {\"iteration\": self.actual_iteration - 1, \"status\": {},\n # \"node_count\": node_count.copy(), \"status_delta\": status_delta.copy()}\n \n active_set_size = node_count.copy()[1]\n status = delta.copy()\n # print(status)\n \n return active_set_size, status",
"def compute_single_node_status(\n node_name_message_map: Dict[str, Node], node_name: str\n) -> \"StatusValue\":\n\n node = node_name_message_map[node_name]\n\n if (\n node.status != Status.STATUS_UNSPECIFIED\n ): # if the current node's status was already computed\n return node.status\n\n status_count_map: Dict[\"StatusValue\", int] = defaultdict(int)\n for child_name in node.child_names:\n status_count_map[\n compute_single_node_status(node_name_message_map, child_name)\n ] += 1\n\n try:\n for dependency in node.dependencies:\n status_count_map[\n compute_single_node_status(\n node_name_message_map, dependency.target_name\n )\n ] += 1\n except AttributeError:\n pass\n\n try:\n for sli in node.slis:\n status_count_map[compute_sli_status(sli)] += 1\n except AttributeError:\n pass\n\n node.status = compute_status_from_count_map(status_count_map)\n\n if (\n node.override_status != Status.STATUS_UNSPECIFIED\n ): # if the current node's status was manually overwritten\n # notice we place this at the end, since we still want to compute the node's status\n # to display in the dropdown menu (regardless of the override)\n return node.override_status\n\n return node.status",
"def getContainerStatus(self,node,vmid):\n data = self.connect('get','nodes/%s/lxc/%s/status/current' % (node,vmid),None)\n return data",
"def handle_cluster_status(self, request):\n \"\"\"\n @api {get} /cluster/status Get cluster status\n @apiName GetClusterStatus\n @apiGroup Cluster\n @apiVersion 1.0.0\n\n @apiSuccess {Object} nodes Nodes in the cluster.\n @apiSuccess {Object} nodes.node Node.\n @apiSuccess {String[]} nodes.node.pools Pools in which the node is registered.\n @apiSuccess {String} nodes.node.address IP address of the node.\n @apiSuccess {String} leader Leader node.\n\n @apiSuccessExample {json} Example response:\n {\n \"nodes\": {\n \"node1\": {\n \"pools\": [\"pool1\", \"pool2\"],\n \"address\": \"127.0.0.1:32001\"\n },\n \"node2\": {\n \"pools\": [\"pool1\"],\n \"address\": \"127.0.0.1:32002\"\n },\n \"node3\": {\n \"pools\": [\"pool2\"],\n \"address\": \"127.0.0.1:32003\"\n },\n },\n \"leader\": \"node1\"\n }\n \"\"\"\n\n headers = {\n 'Content-Type': 'application/javascript',\n 'Access-Control-Allow-Origin': '*'\n }\n\n status = {\n 'nodes': self.cluster.nodes,\n 'leader': self.cluster.leader\n }\n\n return HTTPReply(body = json.dumps(status), headers = headers)",
"def status(self):\n return {\n 'id': 'status',\n 'protocol_version': 'PV62',\n 'network': self.origin_node.network.name,\n 'td': self.origin_node.chain.head.header.difficulty,\n 'best_hash': self.origin_node.chain.head.header.hash,\n 'genesis_hash': self.origin_node.chain.genesis.header.hash,\n 'size': kB_to_MB(self._message_size['status'])\n }",
"def getClusterStatus(self):\n data = self.connect('get','cluster/status', None)\n return data",
"def getStatus(self):\r\n return self.controller.getStatus()",
"def node_num_cpu(self) -> int:\n stdout, _, _ = RunKubectlCommand(\n ['get', 'nodes', '-o', 'jsonpath={.items[0].status.capacity.cpu}'])\n return int(stdout)"
]
| [
"0.75052977",
"0.7269733",
"0.71536577",
"0.7088996",
"0.70575774",
"0.70387757",
"0.6946383",
"0.6911487",
"0.68622947",
"0.6709298",
"0.6686322",
"0.65567106",
"0.6555048",
"0.6541361",
"0.6439678",
"0.63545966",
"0.63124776",
"0.62752545",
"0.62047577",
"0.6166106",
"0.60819995",
"0.60423917",
"0.60028344",
"0.5986068",
"0.59765726",
"0.59678155",
"0.59529984",
"0.59340745",
"0.5865184",
"0.5859063"
]
| 0.73668116 | 1 |
CSIVXFlexOSSpec defines the desired state of CSIVXFlexOS | def __init__(__self__, *,
driver: 'outputs.CSIVXFlexOSSpecDriver'):
pulumi.set(__self__, "driver", driver) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(__self__, *,\n common: 'outputs.CSIVXFlexOSSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIVXFlexOSSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIVXFlexOSSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def driver(self) -> 'outputs.CSIVXFlexOSSpecDriver':\n return pulumi.get(self, \"driver\")",
"def test_set_st_to_vx(self, cpu):\n cpu.V_register = bytearray([1, 5, 8, 12, 15, 18, 29, 53,\n 78, 102, 158, 183, 202, 234, 255, 0])\n for x in range(0x0, 0xF):\n cpu.opcode = 0xF018 | (x << 8)\n cpu.set_st_to_vx()\n assert(cpu.sound_timer == cpu.V_register[x])",
"def test_controller_status_from_knx_other_bits_set(self):\n assert DPTControllerStatus.from_knx((0x21,)) == HVACOperationMode.COMFORT\n assert DPTControllerStatus.from_knx((0x23,)) == HVACOperationMode.STANDBY\n assert DPTControllerStatus.from_knx((0x27,)) == HVACOperationMode.NIGHT\n assert (\n DPTControllerStatus.from_knx((0x2F,)) == HVACOperationMode.FROST_PROTECTION\n )",
"def test_feat_parity_cpuid_mpx(vm_builder, cpu_template, microvm, guest_kernel, disk):\n # fmt: off\n must_be_set = []\n must_be_unset = [\n (0x7, 0x0, \"ebx\",\n (1 << 14) # MPX\n ),\n ]\n # fmt: on\n\n check_cpuid_feat_flags(\n vm_builder,\n cpu_template,\n microvm,\n guest_kernel,\n disk,\n must_be_set,\n must_be_unset,\n )",
"def test_controller_status_from_knx(self):\n assert DPTControllerStatus.from_knx((0x21,)) == HVACOperationMode.COMFORT\n assert DPTControllerStatus.from_knx((0x22,)) == HVACOperationMode.STANDBY\n assert DPTControllerStatus.from_knx((0x24,)) == HVACOperationMode.NIGHT\n assert (\n DPTControllerStatus.from_knx((0x28,)) == HVACOperationMode.FROST_PROTECTION\n )",
"def test_st_facets00401m15_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m15.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m15_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_fc(self):\n self.assertEqual(self.nhf.metadata[\"ndim\"], 3)\n self.assertEqual(self.nhf.metadata[\"ngroup\"], 4)\n self.assertEqual(self.nhf.metadata[\"ninti\"], 5)\n self.assertEqual(self.nhf.metadata[\"nintj\"], 5)\n self.assertEqual(self.nhf.metadata[\"nintk\"], 6)\n self.assertEqual(self.nhf.metadata[\"nSurf\"], 6)\n self.assertEqual(self.nhf.metadata[\"nMom\"], 5)\n self.assertEqual(self.nhf.metadata[\"nintxy\"], 19)\n self.assertEqual(self.nhf.metadata[\"npcxy\"], 144)\n self.assertEqual(self.nhf.metadata[\"iaprx\"], 4)\n self.assertEqual(self.nhf.metadata[\"iaprxz\"], 3)\n\n variantControlInfo = nhflux.FILE_SPEC_1D_KEYS_VARIANT11\n for info in variantControlInfo:\n self.assertTrue(info not in self.nhf.metadata)",
"def test_st_facets00401m14_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m14.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m14_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_st_facets00401m16_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m16.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m16_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_find_dispersion_axis():\n dm = SlitModel()\n\n dm.meta.wcsinfo.dispersion_direction = 1 # horizontal\n assert find_dispersion_axis(dm) == 0 # X axis for wcs functions\n\n dm.meta.wcsinfo.dispersion_direction = 2 # vertical\n assert find_dispersion_axis(dm) == 1 # Y axis for wcs functions",
"def test_st_facets00401m11_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m11.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m11_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_set_vx_to_vx_shl(self, cpu):\n for x in range(0x0, 0xF):\n for v in range(0x0, 0xFF):\n cpu.V_register[x] = v\n cpu.opcode = 0x800E | (x << 8)\n cpu.set_vx_to_vx_shl()\n if v <= 0x7F:\n assert(cpu.V_register[0xF] == 0)\n assert(cpu.V_register[x] == 2*v)\n else:\n assert(cpu.V_register[0xF] == 1)\n assert(cpu.V_register[x] == 2*v & 0xFF)",
"def test_st_facets00401m8_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m8.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m8_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_st_facets00401m17_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m17.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m17_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_st_facets00401m13_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m13.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m13_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_st_facets00201m14_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00201m/ST_facets00201m14.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00201m/ST_facets00201m14_p.xml\",\n class_name=\"Test\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_st_facets00401m18_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m18.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00401m/ST_facets00401m18_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_st_facets00201m16_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00201m/ST_facets00201m16.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00201m/ST_facets00201m16_p.xml\",\n class_name=\"Test\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_controller_status_to_knx(self):\n with pytest.raises(ConversionError):\n DPTControllerStatus.to_knx(HVACOperationMode.AUTO)\n assert DPTControllerStatus.to_knx(HVACOperationMode.COMFORT) == (0x21,)\n assert DPTControllerStatus.to_knx(HVACOperationMode.STANDBY) == (0x22,)\n assert DPTControllerStatus.to_knx(HVACOperationMode.NIGHT) == (0x24,)\n assert DPTControllerStatus.to_knx(HVACOperationMode.FROST_PROTECTION) == (0x28,)",
"def test_st_facets00201m15_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00201m/ST_facets00201m15.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00201m/ST_facets00201m15_p.xml\",\n class_name=\"Test\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def __init__(__self__, *,\n common: 'outputs.CSIPowerMaxSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIPowerMaxSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIPowerMaxSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIPowerMaxSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def test_st_facets00201m8_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00201m/ST_facets00201m8.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00201m/ST_facets00201m8_p.xml\",\n class_name=\"Test\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def getInputSpecification(cls):\n specs = super(NuSVR, cls).getInputSpecification()\n specs.description = r\"\"\"The \\xmlNode{NuSVR} \\textit{Nu-Support Vector Regression} is an Nu-Support Vector Regressor.\n It is very similar to SVC but with the addition of the hyper-parameter Nu for controlling the\n number of support vectors. However, unlike NuSVC, where nu replaces C,\n here nu replaces the parameter epsilon of epsilon-SVR.\n \\zNormalizationPerformed{NuSVR}\n \"\"\"\n specs.addSub(InputData.parameterInputFactory('nu', contentType=InputTypes.FloatType,\n descr=r\"\"\"An upper bound on the fraction of margin errors and\n a lower bound of the fraction of support vectors. Should be in the interval $(0, 1]$.\"\"\", default=0.5))\n specs.addSub(InputData.parameterInputFactory('C', contentType=InputTypes.FloatType,\n descr=r\"\"\"Regularization parameter. The strength of the regularization is inversely\n proportional to C.\n Must be strictly positive. The penalty is a squared l2 penalty.\"\"\", default=1.0))\n specs.addSub(InputData.parameterInputFactory(\"kernel\", contentType=InputTypes.makeEnumType(\"kernel\", \"kernelType\",['linear','poly',\n 'rbf','sigmoid']),\n descr=r\"\"\"Specifies the kernel type to be used in the algorithm. It must be one of\n ``linear'', ``poly'', ``rbf'' or ``sigmoid''.\"\"\", default='rbf'))\n specs.addSub(InputData.parameterInputFactory(\"degree\", contentType=InputTypes.IntegerType,\n descr=r\"\"\"Degree of the polynomial kernel function ('poly').Ignored by all other kernels.\"\"\",\n default=3))\n specs.addSub(InputData.parameterInputFactory(\"gamma\", contentType=InputTypes.FloatType,\n descr=r\"\"\"Kernel coefficient for ``poly'', ``rbf'' or ``sigmoid''. If not input, then it uses\n $1 / (n_features * X.var())$ as value of gamma\"\"\", default=\"scale\"))\n specs.addSub(InputData.parameterInputFactory(\"coef0\", contentType=InputTypes.FloatType,\n descr=r\"\"\"Independent term in kernel function\"\"\", default=0.0))\n specs.addSub(InputData.parameterInputFactory(\"tol\", contentType=InputTypes.FloatType,\n descr=r\"\"\"Tolerance for stopping criterion\"\"\", default=1e-3))\n specs.addSub(InputData.parameterInputFactory(\"cache_size\", contentType=InputTypes.FloatType,\n descr=r\"\"\"Size of the kernel cache (in MB)\"\"\", default=200.))\n specs.addSub(InputData.parameterInputFactory(\"shrinking\", contentType=InputTypes.BoolType,\n descr=r\"\"\"Whether to use the shrinking heuristic.\"\"\", default=True))\n specs.addSub(InputData.parameterInputFactory(\"max_iter\", contentType=InputTypes.IntegerType,\n descr=r\"\"\"Hard limit on iterations within solver.``-1'' for no limit\"\"\", default=-1))\n return specs",
"def test_st_facets00201m11_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00201m/ST_facets00201m11.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00201m/ST_facets00201m11_p.xml\",\n class_name=\"Test\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_st_facets00505m8_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00505m/ST_facets00505m8.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00505m/ST_facets00505m8_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_st_facets00501m14_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00501m/ST_facets00501m14.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00501m/ST_facets00501m14_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_header_update8(self, capsys):\n\n # Prepare input files.\n self.get_data(\"input\", \"ocu252cmq_raw.fits\")\n self.get_data(\"input\", \"ocu252cmq_spt.fits\")\n\n capsys.readouterr()\n\n tastis('ocu252cmq_raw.fits', update=True)\n\n captured = capsys.readouterr()\n assert captured.out == \"===============================================================================\\n\" \\\n \"ocu252cmq HST/STIS MIRVIS F28X50OII ACQ/POINT\\n\" \\\n \"prop: 14143 visit: 52 line: 1 target: BD+41-3306\\n\" \\\n \"obs date, time: 2016-06-06 08:30:05 exposure time: 2.10\\n\" \\\n \"dom GS/FGS: N2JU001340F2 sub-dom GS/FGS: N2K1001229F1\\n\" \\\n \"ACQ params: bias sub: 1510 checkbox: 3 method: FLUX CENTROID\\n\" \\\n \"subarray (axis1,axis2): size=(100,100) corner=(487,466)\\n\" \\\n \"-------------------------------------------------------------------------------\\n\" \\\n \"Coarse locate phase: Target flux in max checkbox (DN): 1442\\n\" \\\n \"\\n\" \\\n \" global local\\n\" \\\n \" axis1 axis2 axis1 axis2\\n\" \\\n \"Target location: 527.8 513.1 41.8 48.1\\n\" \\\n \"\\n\" \\\n \" axis1 axis2 axis1 axis2 V2 V3\\n\" \\\n \" (pixels) (arcsec) (arcsec)\\n\" \\\n \"Estimated slew: -7.9 -2.9 -0.400 -0.147 -0.387 -0.179\\n\" \\\n \"-------------------------------------------------------------------------------\\n\" \\\n \"Fine locate phase: Target flux in max checkbox (DN): 611\\n\" \\\n \"\\n\" \\\n \" global local\\n\" \\\n \" axis1 axis2 axis1 axis2\\n\" \\\n \"Target location: 534.1 516.1 48.1 51.1\\n\" \\\n \"Ref ap location: 537.5 516.5 19.5 16.5\\n\" \\\n \"\\n\" \\\n \" axis1 axis2 axis1 axis2 V2 V3\\n\" \\\n \" (pixels) (arcsec) (arcsec)\\n\" \\\n \"Estimated slew: -2.1 -0.4 -0.106 -0.020 -0.089 -0.061\\n\" \\\n \"-------------------------------------------------------------------------------\\n\" \\\n \"Total est. slew: -10.0 -3.3 -0.506 -0.168 -0.477 -0.239\\n\" \\\n \"-------------------------------------------------------------------------------\\n\" \\\n \"The fluxes in the maximum checkbox in the fine and coarse stages differ\\n\" \\\n \"by more than 25%. This may indicate a problem with your acquisition.\\n\" \\\n \"\\n\" \\\n \"===============================================================================\\n\"\n\n # Compare results\n outputs = [(\"ocu252cmq_raw.fits\", \"ocu252cmq_raw_ref.fits\")]\n self.compare_outputs(outputs)",
"def test_st_facets00501m16_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00501m/ST_facets00501m16.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00501m/ST_facets00501m16_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )",
"def test_st_facets00501m15_positive(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/SType/ST_facets/ST_facets00501m/ST_facets00501m15.xsd\",\n instance=\"sunData/SType/ST_facets/ST_facets00501m/ST_facets00501m15_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )"
]
| [
"0.60012674",
"0.5062758",
"0.4951055",
"0.4939356",
"0.49062017",
"0.48738027",
"0.48640075",
"0.48322517",
"0.48254272",
"0.48190284",
"0.48021835",
"0.47678328",
"0.47568431",
"0.47492415",
"0.4741626",
"0.4740513",
"0.47390306",
"0.4738941",
"0.47331664",
"0.47286186",
"0.4718354",
"0.4705684",
"0.4705473",
"0.46729243",
"0.46715498",
"0.46618342",
"0.46480402",
"0.46452633",
"0.46445113",
"0.46415555"
]
| 0.5843422 | 1 |
Driver is the specification for the CSI VxFlexOS Driver | def driver(self) -> 'outputs.CSIVXFlexOSSpecDriver':
return pulumi.get(self, "driver") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(__self__, *,\n driver: 'outputs.CSIVXFlexOSSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def driver(self) -> 'outputs.CSIUnitySpecDriver':\n return pulumi.get(self, \"driver\")",
"def __init__(__self__, *,\n driver: 'outputs.CSIUnitySpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def __init__(__self__, *,\n common: 'outputs.CSIVXFlexOSSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIVXFlexOSSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIVXFlexOSSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def __init__(__self__, *,\n driver: 'outputs.CSIPowerStoreSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def __init__(__self__, *,\n driver: 'outputs.CSIPowerMaxSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def driver_version(self):\n data = fcntl.ioctl(self._fd, _EVIOCGVERSION, '\\x00\\x00\\x00\\x00')\n return struct.unpack(\"i\", data)[0]",
"def driver(self) -> 'outputs.CSIPowerMaxSpecDriver':\n return pulumi.get(self, \"driver\")",
"def disk_csi_driver(self) -> Optional[pulumi.Input['ManagedClusterStorageProfileDiskCSIDriverArgs']]:\n return pulumi.get(self, \"disk_csi_driver\")",
"def __init__(__self__, *,\n common: 'outputs.CSIPowerMaxSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIPowerMaxSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIPowerMaxSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIPowerMaxSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIPowerMaxSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def driver(self) -> 'outputs.CSIIsilonSpecDriver':\n return pulumi.get(self, \"driver\")",
"def __init__(__self__, *,\n driver: 'outputs.CSIIsilonSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)",
"def file_csi_driver(self) -> Optional[pulumi.Input['ManagedClusterStorageProfileFileCSIDriverArgs']]:\n return pulumi.get(self, \"file_csi_driver\")",
"def controller(self) -> Optional['outputs.CSIVXFlexOSSpecDriverController']:\n return pulumi.get(self, \"controller\")",
"def vga_session(self):\n gpu_driver = None\n if self.user['gpu_driver'] is True:\n\n # NVIDIA controller - append packages\n if 'nvidia' in self.user['vga_controller'].lower():\n\n if self.user['gpu_proprietary'] is True:\n hardvideo = self.packages['hardvideo'][3]\n\n if self.user['kernel'] == 'linux':\n gpu_driver = self.packages['gpu_driver'][3]\n\n elif self.user['kernel'] == 'linux-lts':\n gpu_driver = self.packages['gpu_driver'][4]\n\n else:\n gpu_driver = self.packages['gpu_driver'][5]\n\n else:\n gpu_driver = self.packages['gpu_driver'][2]\n hardvideo = self.packages['hardvideo'][2]\n\n # AMD Controller - append packages\n elif ('ATI' in self.user['vga_controller']) or \\\n ('AMD' in self.user['vga_controller']):\n\n gpu_driver = self.packages['gpu_driver'][1]\n hardvideo = self.packages['hardvideo'][1]\n\n # Intel controller - append packages\n elif 'intel' in self.user['vga_controller'].lower():\n gpu_driver = self.packages['gpu_driver'][0]\n hardvideo = self.packages['hardvideo'][0]\n\n # Unreconized controller - append packages\n else:\n gpu_driver = self.packages['gpu_driver'][6]\n hardvideo = self.packages['hardvideo'][4]\n\n # Set model with corresponding driver\n self.user['gpu'] = {'model': self.user['vga_controller'],\n 'driver': gpu_driver,\n 'hardvideo': self.user['hardvideo']}\n\n # Set hardware video acceleration\n if self.user['hardvideo'] is True:\n self.user['gpu']['hardvideo'] = hardvideo",
"def blob_csi_driver(self) -> Optional[pulumi.Input['ManagedClusterStorageProfileBlobCSIDriverArgs']]:\n return pulumi.get(self, \"blob_csi_driver\")",
"def __init__(__self__, *,\n common: 'outputs.CSIUnitySpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIUnitySpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIUnitySpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIUnitySpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIUnitySpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIUnitySpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def driver(self):\n return '<static-vmedia>'",
"def driver(self):\n driver = c_int()\n ckresult(_dll.FMOD_System_GetDriver(self._ptr, byref(driver)))\n return driver.value",
"def __init__(__self__, *,\n common: 'outputs.CSIIsilonSpecDriverCommon',\n config_version: str,\n replicas: int,\n auth_secret: Optional[str] = None,\n controller: Optional['outputs.CSIIsilonSpecDriverController'] = None,\n force_update: Optional[bool] = None,\n node: Optional['outputs.CSIIsilonSpecDriverNode'] = None,\n side_cars: Optional[Sequence['outputs.CSIIsilonSpecDriverSideCars']] = None,\n snapshot_class: Optional[Sequence['outputs.CSIIsilonSpecDriverSnapshotClass']] = None,\n storage_class: Optional[Sequence['outputs.CSIIsilonSpecDriverStorageClass']] = None,\n tls_cert_secret: Optional[str] = None):\n pulumi.set(__self__, \"common\", common)\n pulumi.set(__self__, \"config_version\", config_version)\n pulumi.set(__self__, \"replicas\", replicas)\n if auth_secret is not None:\n pulumi.set(__self__, \"auth_secret\", auth_secret)\n if controller is not None:\n pulumi.set(__self__, \"controller\", controller)\n if force_update is not None:\n pulumi.set(__self__, \"force_update\", force_update)\n if node is not None:\n pulumi.set(__self__, \"node\", node)\n if side_cars is not None:\n pulumi.set(__self__, \"side_cars\", side_cars)\n if snapshot_class is not None:\n pulumi.set(__self__, \"snapshot_class\", snapshot_class)\n if storage_class is not None:\n pulumi.set(__self__, \"storage_class\", storage_class)\n if tls_cert_secret is not None:\n pulumi.set(__self__, \"tls_cert_secret\", tls_cert_secret)",
"def node(self) -> Optional['outputs.CSIVXFlexOSSpecDriverNode']:\n return pulumi.get(self, \"node\")",
"def register():\n global DRIVER\n if not DRIVER:\n DRIVER = NSXv3QosDriver.create(agent_rpc=None)\n LOG.debug('NSXv3QosDriver QoS driver registered')",
"def DRIVER():\n return \"podman\"",
"def common(self) -> 'outputs.CSIVXFlexOSSpecDriverCommon':\n return pulumi.get(self, \"common\")",
"def init_vesc_driver(port, ser, l):\n to_int = lambda x: int.from_bytes(x, byteorder='big')\n length = ser.read(to_int(l) - 1)\n packet = l + length + ser.read(to_int(length) + 3)\n\n msg, _ = pyvesc.decode(packet)\n log.debug(msg)\n if isinstance(msg, pyvesc.ReqSubscription):\n log.debug('Creating VESC driver')\n ser.close()\n driver = VESCDriver(port.device)\n if msg.subscription not in ['armShoulder', 'armElbow', 'armWristPitch']: # hacks for position request\n driver.start_reader(handle_vesc_message)\n sub = msg.subscription\n manager.storage.drivers[sub] = driver\n manager.storage.sub_map[port.device] = sub\n # subscribe\n @manager.on('*/'+sub)\n async def _write_to_device(event, data):\n if 'USBManager' in event:\n # So we don't listen to ourself TODO: use negation in globbing instead?\n return\n subscription = event.split('/')[-1]\n dvr = manager.storage.drivers[subscription]\n vesc_message = dict_to_vesc(data)\n await manager.loop.run_in_executor(\n manager.executor, dvr.write, vesc_message)\n\n # request handler\n @manager.on_request(msg.subscription)\n async def _request_from_vesc(vesc_msg):\n dvr = manager.storage.drivers[msg.subscription]\n vesc_msg = dict_to_vesc(vesc_msg)\n await manager.loop.run_in_executor(\n manager.executor, dvr.write_request, vesc_msg)\n resp = await manager.loop.run_in_executor(\n manager.executor, dvr.read)\n resp = vesc_to_dict(resp)\n return resp\n return True\n else:\n log.warning('Got bad subscription')\n return False",
"def init_VI():\n\n\tprint 'Setting VI'\n\tvi = UsbVehicleInterface(payload_format=\"json\")\n\n\treturn vi",
"def driverName(self):\n return Cbytestring2Python(\n self.Rf2Scor.mVehicles[self.__playersDriverNum()].mDriverName)",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverControllerEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverControllerTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)",
"def __init__(__self__, *,\n driver: pulumi.Input[str]):\n pulumi.set(__self__, \"driver\", driver)",
"def __init__(__self__, *,\n args: Optional[Sequence[str]] = None,\n envs: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverCommonEnvs']] = None,\n image: Optional[str] = None,\n image_pull_policy: Optional[str] = None,\n name: Optional[str] = None,\n node_selector: Optional[Mapping[str, str]] = None,\n tolerations: Optional[Sequence['outputs.CSIVXFlexOSSpecDriverCommonTolerations']] = None):\n if args is not None:\n pulumi.set(__self__, \"args\", args)\n if envs is not None:\n pulumi.set(__self__, \"envs\", envs)\n if image is not None:\n pulumi.set(__self__, \"image\", image)\n if image_pull_policy is not None:\n pulumi.set(__self__, \"image_pull_policy\", image_pull_policy)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if node_selector is not None:\n pulumi.set(__self__, \"node_selector\", node_selector)\n if tolerations is not None:\n pulumi.set(__self__, \"tolerations\", tolerations)"
]
| [
"0.7499845",
"0.65094316",
"0.64233506",
"0.6400509",
"0.6386327",
"0.62878305",
"0.60758394",
"0.6042327",
"0.5886265",
"0.58776003",
"0.58217376",
"0.5741687",
"0.57011986",
"0.5674426",
"0.5655592",
"0.56050086",
"0.55793077",
"0.5455731",
"0.54092747",
"0.5309138",
"0.52798814",
"0.52721363",
"0.5270156",
"0.52473736",
"0.5221499",
"0.5193635",
"0.51580834",
"0.5156102",
"0.50447655",
"0.50281674"
]
| 0.7507063 | 0 |
SnapshotClass is the specification for Snapshot Classes | def snapshot_class(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSnapshotClass']]:
return pulumi.get(self, "snapshot_class") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def snapshot_class(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverSnapshotClass']]:\n return pulumi.get(self, \"snapshot_class\")",
"def snapshot_class(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverSnapshotClass']]:\n return pulumi.get(self, \"snapshot_class\")",
"def snapshot_class(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverSnapshotClass']]:\n return pulumi.get(self, \"snapshot_class\")",
"def snapshot_class(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverSnapshotClass']]:\n return pulumi.get(self, \"snapshot_class\")",
"def name(cls):\n return 'Snapshot'",
"def snapshot_type(self) -> str:\n return pulumi.get(self, \"snapshot_type\")",
"def test_snapshot(self):\n converter = InstructionToQobjConverter(PulseQobjInstruction, meas_level=2)\n instruction = Snapshot(name='label', snap_type='type')\n\n valid_qobj = PulseQobjInstruction(\n name='snapshot',\n t0=0,\n label='label',\n type='type'\n )\n\n self.assertEqual(converter(0, instruction), valid_qobj)",
"def test_class_hierarchy(self):\n\n assert issubclass(TIMESTAMP, sqltypes._Binary)\n assert issubclass(ROWVERSION, sqltypes._Binary)",
"def is_snapshot(self):\n return self.proto.display_type == DISPLAY_TYPE.Snapshot.value",
"def serialize_snapshot(self, snapshot, fields=None, version=None):\n fields = fields or self.snapshot_fields\n version = version or self.snapshot_version\n serialized_snapshot = serializers.serialize(\n 'python', [snapshot], fields=fields\n )[0]\n serialized_snapshot['version'] = version\n serialized_snapshot['extra_fields'] = {}\n return serialized_snapshot",
"def snapshot(self) -> Snapshot:\n snapshot = self.open(Snapshot.type).signed\n if not isinstance(snapshot, Snapshot):\n raise RuntimeError(\"Unexpected snapshot type\")\n return snapshot",
"def resource_type(self):\n return 'volume-snapshot'",
"def snapshot(self):\n pass",
"def _get_revision_class(self):\n return perf_revision_state.PerfRevisionState",
"def snapshot(self, snapshot_id):\r\n return self.connection.create_dbsnapshot(snapshot_id, self.id)",
"def test_snapshot(self):\n cmd = Snapshot(name='label', snap_type='type')\n instruction = cmd << 10\n\n qobj = PulseQobjInstruction(name='snapshot', t0=10, label='label', type='type')\n converted_instruction = self.converter(qobj)\n\n self.assertEqual(converted_instruction.timeslots, instruction.timeslots)\n self.assertEqual(converted_instruction.instructions[0][-1], cmd)",
"def snapshot_info(self) -> MetaFile:\n raise NotImplementedError",
"def __init__(self):\n self.id = None\n self.typeInfo['id'] = 'string'\n \"\"\"the account associated with the disk volume\"\"\"\n self.account = None\n self.typeInfo['account'] = 'string'\n \"\"\"the create date of the vm snapshot\"\"\"\n self.created = None\n self.typeInfo['created'] = 'date'\n \"\"\"indiates if this is current snapshot\"\"\"\n self.current = None\n self.typeInfo['current'] = 'boolean'\n \"\"\"the description of the vm snapshot\"\"\"\n self.description = None\n self.typeInfo['description'] = 'string'\n \"\"\"the display name of the vm snapshot\"\"\"\n self.displayname = None\n self.typeInfo['displayname'] = 'string'\n \"\"\"the domain associated with the disk volume\"\"\"\n self.domain = None\n self.typeInfo['domain'] = 'string'\n \"\"\"the ID of the domain associated with the disk volume\"\"\"\n self.domainid = None\n self.typeInfo['domainid'] = 'string'\n \"\"\"the name of the vm snapshot\"\"\"\n self.name = None\n self.typeInfo['name'] = 'string'\n \"\"\"the parent ID of the vm snapshot\"\"\"\n self.parent = None\n self.typeInfo['parent'] = 'string'\n \"\"\"the parent displayName of the vm snapshot\"\"\"\n self.parentName = None\n self.typeInfo['parentName'] = 'string'\n \"\"\"the project name of the vpn\"\"\"\n self.project = None\n self.typeInfo['project'] = 'string'\n \"\"\"the project id of the vpn\"\"\"\n self.projectid = None\n self.typeInfo['projectid'] = 'string'\n \"\"\"the state of the vm snapshot\"\"\"\n self.state = None\n self.typeInfo['state'] = 'state'\n \"\"\"VM Snapshot type\"\"\"\n self.type = None\n self.typeInfo['type'] = 'string'\n \"\"\"the vm ID of the vm snapshot\"\"\"\n self.virtualmachineid = None\n self.typeInfo['virtualmachineid'] = 'string'\n \"\"\"the Zone ID of the vm snapshot\"\"\"\n self.zoneid = None\n self.typeInfo['zoneid'] = 'string'",
"def get_snapshot_object(session, key, snapshot=None):\n # type: (Session, Text, Optional[Text]) -> Any\n url_tail = \"/{}/{}/{}/{}/{}\".format(\n CoordConstsV2.RSC_NETWORKS,\n session.network,\n CoordConstsV2.RSC_SNAPSHOTS,\n session.get_snapshot(snapshot),\n CoordConstsV2.RSC_OBJECTS,\n )\n return _get_stream(session, url_tail, {CoordConstsV2.QP_KEY: key})",
"def snapshot_gen(self):\n \n # Generate snapshot\n snapshot, snapshot_param = make_snapshot.snapshot_gen(self._parent)\n # Save to ICobj\n self._parent.snapshot = snapshot\n self._parent.snapshot_param = snapshot_param",
"def snapshot(self, snapshot):\n self._context[\"snapshot\"] = snapshot",
"def create_snapshot(self, snap_description=None):\n raise NotImplementedError()",
"def test_class_name(self):\n r = Review()\n r_dictionary = r.to_dict()\n self.assertIn('__class__', r_dictionary)",
"def create_snapshot(store, dataset, snapshot, description_fields, snapshot_changes):\n validate_snapshot_name(store, dataset, snapshot)\n validate_datalad_config(store, dataset)\n update_description(store, dataset, description_fields)\n update_changes(store, dataset, snapshot, snapshot_changes)\n save_snapshot(store, dataset, snapshot)\n return get_snapshot(store, dataset, snapshot)",
"def snapshot(snapshot_type, result_q, time_delta):",
"def snapshot_identification(snapshot):\n\t\treturn {\n\t\t\t'user_id': snapshot['user_id'],\n\t\t\t'timestamp': snapshot['timestamp'],\n\t\t\t'snapshot_id': snapshot['snapshot_id']}",
"def load_snapshot(self):\r\n assert self.snapshot is not None\r\n self.gain = self.snapshot[0]\r\n self.block = self.snapshot[1]\r\n self.locked = self.snapshot[2]\r\n self.bucket_num = self.snapshot[3]",
"def record_class_examined(self, cls):\n serialized = self.serialize_type(cls)\n if serialized is not None:\n self.classes_examined.add(serialized)",
"def deserialize_snapshot(self, serialized_snapshot):\n snapshot = list(serializers.deserialize(\n 'python', [serialized_snapshot]\n ))[0].object\n snapshot.__version__ = serialized_snapshot['version']\n snapshot.__extra_fields__ = serialized_snapshot['extra_fields']\n # override extra fields\n for name, value in serialized_snapshot['extra_fields'].items():\n if value:\n if isinstance(value, dict):\n value = self.deserialize_snapshot(value)\n setattr(snapshot, name, value)\n return snapshot",
"def restore_from_snapshot(self, snapshot_state: Mapping[str, jnp.ndarray]):\n def clear(attributes):\n for attr_name in attributes:\n if hasattr(self, attr_name):\n delattr(self, attr_name)\n\n def write(attributes, broadcast=False):\n for attr_name, chk_name in attributes.items():\n value = snapshot_state[chk_name]\n if broadcast:\n value = utils.bcast_local_devices(value)\n setattr(self, attr_name, value)\n\n # Explicitly clear existing attributes first, this (potentially) allows\n # broadcast values to reuse previous allocations leading to reduced\n # fragmentation of device memory.\n clear(self.CHECKPOINT_ATTRS)\n clear(self.NON_BROADCAST_CHECKPOINT_ATTRS)\n write(self.CHECKPOINT_ATTRS, broadcast=True)\n write(self.NON_BROADCAST_CHECKPOINT_ATTRS)"
]
| [
"0.73012376",
"0.72760487",
"0.7070211",
"0.7056704",
"0.669828",
"0.65507406",
"0.5934098",
"0.59296554",
"0.5928252",
"0.58405566",
"0.5718811",
"0.5634103",
"0.56048465",
"0.5574448",
"0.55006576",
"0.5482949",
"0.53952694",
"0.5382935",
"0.5382102",
"0.5359684",
"0.53471845",
"0.53255814",
"0.53096217",
"0.5309208",
"0.526724",
"0.5254382",
"0.525098",
"0.5246751",
"0.5215199",
"0.5195794"
]
| 0.7336533 | 0 |
Tolerations is the list of tolerations for the driver pods | def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverControllerTolerations']]:
return pulumi.get(self, "tolerations") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def toleration(self) -> Dict[str, str]:\n return self._toleration",
"def toleration(self, toleration: Dict[str, str]):\n\n self._toleration = toleration",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")"
]
| [
"0.7678924",
"0.7636232",
"0.7588812",
"0.7569768",
"0.75446975",
"0.7522246",
"0.75063044",
"0.74759024",
"0.7431494",
"0.7366254",
"0.73646927",
"0.73607546",
"0.73571926",
"0.7320017",
"0.7289272",
"0.7288363",
"0.7259748",
"0.7181028",
"0.71584857",
"0.67214155",
"0.66434145",
"0.5552369",
"0.5552369",
"0.5552369",
"0.5552369",
"0.5552369",
"0.5552369",
"0.5552369",
"0.5552369",
"0.5552369"
]
| 0.77393144 | 0 |
Tolerations is the list of tolerations for the driver pods | def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverNodeTolerations']]:
return pulumi.get(self, "tolerations") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverControllerTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverNodeTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverSideCarsTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def tolerations(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverCommonTolerations']]:\n return pulumi.get(self, \"tolerations\")",
"def toleration(self) -> Dict[str, str]:\n return self._toleration",
"def toleration(self, toleration: Dict[str, str]):\n\n self._toleration = toleration",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")",
"def toleration_seconds(self) -> Optional[int]:\n return pulumi.get(self, \"toleration_seconds\")"
]
| [
"0.77384454",
"0.7635416",
"0.7587556",
"0.7568399",
"0.7544048",
"0.7521143",
"0.75056195",
"0.7474809",
"0.7430536",
"0.73656464",
"0.7363599",
"0.735969",
"0.7356401",
"0.73185813",
"0.7287663",
"0.7287038",
"0.7258097",
"0.7179613",
"0.7157681",
"0.67209274",
"0.6642811",
"0.5551738",
"0.5551738",
"0.5551738",
"0.5551738",
"0.5551738",
"0.5551738",
"0.5551738",
"0.5551738",
"0.5551738"
]
| 0.76782155 | 1 |
AllowVolumeExpansion is a boolean flag which indicates if volumes can be expanded | def allow_volume_expansion(self) -> Optional[bool]:
return pulumi.get(self, "allow_volume_expansion") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def expand_volume(self, vol, new_size):\n self.authenticate_user()\n volume_name = self._get_vipr_volume_name(vol)\n size_in_bytes = vipr_utils.to_bytes(str(new_size) + \"G\")\n\n try:\n self.volume_obj.expand(\n self.configuration.vipr_tenant +\n \"/\" +\n self.configuration.vipr_project +\n \"/\" +\n volume_name,\n size_in_bytes,\n True)\n except vipr_utils.SOSError as e:\n if e.err_code == vipr_utils.SOSError.SOS_FAILURE_ERR:\n raise vipr_utils.SOSError(\n vipr_utils.SOSError.SOS_FAILURE_ERR,\n \"Volume \" + volume_name + \": expand failed\\n\" + e.err_text)\n else:\n with excutils.save_and_reraise_exception():\n LOG.exception(_(\"Volume : %s expand failed\") % volume_name)",
"def allowed_flex_volumes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AllowedFlexVolumeArgs']]]]:\n return pulumi.get(self, \"allowed_flex_volumes\")",
"def expand_volume_helper(self, vol, size_in_gb, existing_vol_size):\n vol_id = vol['volumeId']\n try:\n if size_in_gb < existing_vol_size:\n self.show_error_exit(msg='Current volume size {0} GB is '\n 'greater than {1} GB specified.'.\n format(existing_vol_size, size_in_gb))\n elif size_in_gb > existing_vol_size:\n if 'rdfGroupId' in vol:\n array_id = self.module.params['serial_no']\n array_details = self.common.get_array(array_id=array_id)\n if utils.parse_version(array_details['ucode'])\\\n < utils.parse_version(self.foxtail_version):\n msg = (\"Expansion of SRDF protected volume is\"\n \" supported from v5978.444.444 onward. Please\"\n \" upgrade the array for this support.\")\n self.show_error_exit(msg=msg)\n return self.srdf_volume_expansion(vol, size_in_gb,\n existing_vol_size)\n return self.expand_volume(vol_id, size_in_gb,\n existing_vol_size)\n\n LOG.info('Current volume size and specified volume size'\n ' are equal')\n return False\n except Exception as e:\n error_message = 'Expand volume %s failed with error: %s' \\\n % (vol_id, str(e))\n self.show_error_exit(msg=error_message)",
"def allowed_flex_volumes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AllowedFlexVolumePatchArgs']]]]:\n return pulumi.get(self, \"allowed_flex_volumes\")",
"def assign_volume_letters():\n remove_volume_letters()\n\n # Write script\n script = []\n for vol in get_volumes():\n script.append('select volume {}'.format(vol['Number']))\n script.append('assign')\n\n # Run\n run_diskpart(script)",
"def test_volume_extend(self, volume, volumes_steps):\n volumes_steps.extend_volume(volume.name)",
"async def expand(self, job, id, options):\n pool = await self.middleware.call('pool.get_instance', id)\n if osc.IS_LINUX:\n if options.get('passphrase'):\n raise CallError('Passphrase should not be supplied for this platform.')\n # FIXME: We have issues in ZoL where when pool is created with partition uuids, we are unable\n # to expand pool where all pool related options error out saying I/O error\n # https://github.com/zfsonlinux/zfs/issues/9830\n raise CallError('Expand is not supported on this platform yet because of underlying ZFS issues.')\n else:\n if pool['encrypt']:\n if not pool['is_decrypted']:\n raise CallError('You can only expand decrypted pool')\n\n for error in (\n await self.middleware.call('pool.pool_lock_pre_check', pool, options['geli']['passphrase'])\n ).errors:\n raise CallError(error.errmsg)\n\n all_partitions = {p['name']: p for p in await self.middleware.call('disk.list_all_partitions')}\n\n try:\n if osc.IS_FREEBSD:\n sysctl.filter('kern.geom.debugflags')[0].value = 16\n geli_resize = []\n try:\n for vdev in sum(pool['topology'].values(), []):\n if vdev['type'] != 'DISK':\n logger.debug('Not expanding vdev of type %r', vdev['type'])\n continue\n\n if vdev['status'] != 'ONLINE':\n logger.debug('Not expanding vdev that is %r', vdev['status'])\n continue\n\n part_data = all_partitions.get(vdev['device'])\n if not part_data:\n logger.debug('Unable to find partition data for %s', vdev['device'])\n\n partition_number = part_data['partition_number']\n if not partition_number:\n logger.debug('Could not parse partition number from %r', vdev['device'])\n continue\n\n assert part_data['disk'] == vdev['disk']\n\n if osc.IS_LINUX:\n await run(\n 'sgdisk', '-d', str(partition_number), '-n', f'{partition_number}:0:0',\n '-c', '2:', '-u', f'{partition_number}:{part_data[\"partition_uuid\"]}',\n '-t', f'{partition_number}:BF01', part_data['path']\n )\n await run('partprobe', os.path.join('/dev', part_data['disk']))\n else:\n await run('camcontrol', 'reprobe', vdev['disk'])\n await run('gpart', 'recover', vdev['disk'])\n await run('gpart', 'resize', '-i', str(partition_number), vdev['disk'])\n\n if osc.IS_FREEBSD and pool['encrypt']:\n geli_resize_cmd = (\n 'geli', 'resize', '-s', str(part_data['size']), vdev['device']\n )\n rollback_cmd = (\n 'gpart', 'resize', '-i', str(partition_number), '-s', str(part_data['size']), vdev['disk']\n )\n\n logger.warning('It will be obligatory to notify GELI that the provider has been resized: %r',\n join_commandline(geli_resize_cmd))\n logger.warning('Or to resize provider back: %r',\n join_commandline(rollback_cmd))\n geli_resize.append((geli_resize_cmd, rollback_cmd))\n finally:\n if osc.IS_FREEBSD and geli_resize:\n await self.__geli_resize(pool, geli_resize, options)\n finally:\n if osc.IS_FREEBSD:\n sysctl.filter('kern.geom.debugflags')[0].value = 0\n\n for vdev in sum(pool['topology'].values(), []):\n if vdev['type'] != 'DISK' or vdev['status'] != 'ONLINE':\n continue\n\n await self.middleware.call('zfs.pool.online', pool['name'], vdev['guid'], True)",
"def test_extend_volume_noextend(self):\n ctxt = context.get_admin_context()\n extra_specs = {}\n type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)\n volume = {'id': '1', 'name': 'volume1',\n 'display_name': '',\n 'volume_type_id': type_ref['id'],\n 'size': 10,\n 'provider_id': 'volume10'}\n self.extended = {'name': '', 'size': '0',\n 'storageserver': ''}\n self.driver.extend_volume(volume, 10)\n expected = {'name': '', 'size': '0',\n 'storageserver': ''}\n self.assertDictMatch(expected, self.extended)",
"def enable_block_storage_management(self):\n self._request({\"enable-block-storage-management\": True})",
"def test_extend_volume(self):\n ctxt = context.get_admin_context()\n extra_specs = {}\n type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)\n volume = {'id': '1', 'name': 'volume1',\n 'display_name': '',\n 'volume_type_id': type_ref['id'],\n 'size': 10,\n 'provider_id': 'volume10'}\n self.extended = {'name': '', 'size': '0',\n 'storageserver': ''}\n self.driver.extend_volume(volume, 12)\n expected = {'name': 'volume10', 'size': '2',\n 'storageserver': 'stor1:gbd0,stor2:gbd0,'}\n self.assertDictMatch(expected, self.extended)",
"def allowed_volumes(context, requested_volumes, size):\n project_id = context.project_id\n context = context.elevated()\n size = int(size)\n requested_gigabytes = requested_volumes * size\n used_volumes, used_gigabytes = db.volume_data_get_for_project(context,\n project_id)\n quota = get_project_quotas(context, project_id)\n allowed_volumes = _get_request_allotment(requested_volumes, used_volumes,\n quota['volumes'])\n allowed_gigabytes = _get_request_allotment(requested_gigabytes,\n used_gigabytes,\n quota['gigabytes'])\n allowed_volumes = min(allowed_volumes,\n int(allowed_gigabytes // size))\n return min(requested_volumes, allowed_volumes)",
"def set_volume_options(cd):\n\n try:\n vol_name = cd[\"vol_name\"]\n auth_allow = cd[\"auth_allow\"]\n auth_reject = cd[\"auth_reject\"]\n if \"nfs_disable\" in cd:\n nfs_disable = cd[\"nfs_disable\"]\n else:\n nfs_disable = False\n if \"enable_worm\" in cd:\n enable_worm = cd[\"enable_worm\"]\n else:\n enable_worm = False\n readonly = cd[\"readonly\"]\n nfs_volume_access = cd[\"nfs_volume_access\"]\n\n vol_info_dict, err = get_basic_volume_info(vol_name)\n if err:\n raise Exception(err)\n\n # set defaults first\n _auth_allow = \"*\"\n _auth_reject = \"NONE\"\n _readonly = \"off\"\n _nfs_disable = False\n _enable_worm = False\n _nfs_volume_access = \"read-write\"\n\n if \"options\" in vol_info_dict:\n for option in vol_info_dict[\"options\"]:\n if option[\"name\"] == \"auth.allow\":\n _auth_allow = option[\"value\"]\n if option[\"name\"] == \"auth.reject\":\n _auth_reject = option[\"value\"]\n if option[\"name\"] == \"nfs.disable\":\n if option[\"value\"].lower() == \"off\":\n _nfs_disable = False\n else:\n _nfs_disable = True\n if option[\"name\"] == \"nfs.volume-access\":\n _nfs_volume_access = option[\"value\"]\n if option[\"name\"] == \"features.read-only\":\n _readonly = option[\"value\"]\n if option[\"name\"] == \"features.worm\":\n if option[\"value\"].lower() == \"enable\":\n _enable_worm = True\n else:\n _enable_worm = False\n\n # Now, for each option that has changed, set the parameter\n ret_list = []\n\n if _auth_allow != auth_allow:\n d, err = _set_volume_option(vol_name, \"auth.allow\", auth_allow)\n if err:\n raise Exception(err)\n d['audit_str'] = \"Setting option for permitted access IP addresses for %s to \\'%s\\'\" % (\n vol_name, auth_allow)\n ret_list.append(d)\n\n if _auth_reject != auth_reject:\n d, err = _set_volume_option(vol_name, \"auth.reject\", auth_reject)\n if err:\n raise Exception(err)\n d['audit_str'] = \"Setting option for denied access IP addresses for %s to \\'%s\\'\" % (\n vol_name, auth_reject)\n ret_list.append(d)\n\n if _readonly != readonly:\n d, err = _set_volume_option(\n vol_name, \"features.read-only\", readonly)\n if err:\n raise Exception(err)\n d['audit_str'] = \"Setting readonly mount access(for all access methods) for %s to \\'%s\\'\" % (\n vol_name, readonly)\n ret_list.append(d)\n\n if readonly == \"off\":\n\n # All the rest applies only if volume access is read-write\n if _nfs_disable != nfs_disable:\n if nfs_disable:\n p = \"on\"\n else:\n p = \"off\"\n d, err = _set_volume_option(vol_name, \"nfs.disable\", p)\n if err:\n raise Exception(err)\n d['audit_str'] = \"Setting NFS disable for %s to \\'%s\\'\" % (\n vol_name, p)\n ret_list.append(d)\n\n if not nfs_disable:\n # print \"in\"\n if nfs_volume_access and _nfs_volume_access != nfs_volume_access:\n d, err = _set_volume_option(\n vol_name, \"nfs.volume-access\", nfs_volume_access)\n if err:\n raise Exception(err)\n d['audit_str'] = \"Setting NFS access type for %s to \\'%s\\'\" % (\n vol_name, nfs_volume_access)\n ret_list.append(d)\n\n if _enable_worm != enable_worm:\n if enable_worm:\n p = \"enable\"\n else:\n p = \"disable\"\n d, err = _set_volume_option(vol_name, \"features.worm\", p)\n if err:\n raise Exception(err)\n d['audit_str'] = \"Setting feature WORM for %s to \\'%s\\'\" % (\n vol_name, p)\n ret_list.append(d)\n\n except Exception, e:\n return None, 'Error setting volume options: %s' % str(e)\n else:\n return ret_list, None",
"def filterExpand(*args, expand: bool=True, fullPath: bool=False, selectionMask: Union[int,\n List[int]]=0, symActive: bool=True, symNegative: bool=True, symPositive:\n bool=True, symSeam: bool=True, **kwargs)->List[AnyStr]:\n pass",
"def test_ocs_upgrade_with_allowexpansion_false(\n self, project_factory, storageclass_factory, multi_pvc_factory\n ):\n size_list = [\"1\", \"3\", \"5\"]\n\n access_modes_cephfs = [constants.ACCESS_MODE_RWO, constants.ACCESS_MODE_RWX]\n access_modes_rbd = [\n f\"{constants.ACCESS_MODE_RWO}-Block\",\n f\"{constants.ACCESS_MODE_RWX}-Block\",\n ]\n\n # Create custom storage class\n\n custom_cephfs_sc = storageclass_factory(\n interface=constants.CEPHFILESYSTEM, allow_volume_expansion=False\n )\n custom_rbd_sc = storageclass_factory(\n interface=constants.CEPHBLOCKPOOL, allow_volume_expansion=False\n )\n\n # Appending all the pvc obj to base case param for cleanup and evaluation\n self.all_sc_obj.append(custom_cephfs_sc)\n self.all_sc_obj.append(custom_rbd_sc)\n\n log.info(\"Create pvcs for custom sc as well as for default sc\")\n project_obj = project_factory()\n for size in size_list:\n rbd_pvcs = multi_pvc_factory(\n interface=constants.CEPHBLOCKPOOL,\n access_modes=access_modes_rbd,\n project=project_obj,\n size=size,\n num_of_pvc=2,\n )\n log.info(f\"rbd_pvc created for size {size}\")\n assert rbd_pvcs, f\"Failed to create rbd_pvcs of size {size}\"\n\n cephfs_pvcs = multi_pvc_factory(\n interface=constants.CEPHFILESYSTEM,\n project=project_obj,\n access_modes=access_modes_cephfs,\n size=size,\n num_of_pvc=2,\n )\n assert cephfs_pvcs, \"Failed to create cephfs_pvcs PVC\"\n\n custom_rbd_pvcs = multi_pvc_factory(\n interface=constants.CEPHBLOCKPOOL,\n project=project_obj,\n access_modes=access_modes_rbd,\n storageclass=custom_rbd_sc,\n size=size,\n num_of_pvc=2,\n )\n assert custom_rbd_pvcs, \"Failed to create custom_rbd_pvcs PVC\"\n\n custom_cephfs_pvcs = multi_pvc_factory(\n interface=constants.CEPHFILESYSTEM,\n project=project_obj,\n access_modes=access_modes_cephfs,\n storageclass=custom_cephfs_sc,\n size=size,\n num_of_pvc=2,\n )\n assert custom_cephfs_pvcs, \"Failed to create custom_cephfs_pvcs PVC\"",
"def extend_volume(self, volume, new_size):\n if isinstance(new_size, dict):\n new_size = random.randint(new_size[\"min\"], new_size[\"max\"])\n\n aname = \"cinder_v%s.extend_volume\" % self.version\n with atomic.ActionTimer(self, aname):\n self._get_client().volumes.extend(volume, new_size)\n return self._wait_available_volume(volume)",
"def booted_from_volume(volumes_list):\n if any('/dev/vda' in volume['attachments'] for volume in\n volumes_list):\n return True\n return False",
"def setup_volumes():\n\n fix_path()\n\n # Get what we want from the ZK tree\n logical_volumes = {}\n expected_sdvols = set()\n f = open('/etc/zim/volumes')\n for line in f:\n line = line.strip()\n if not line:\n continue\n sdvols = line.split()\n mount_point = sdvols.pop(0)\n if len(sdvols) == 1:\n dev = sdvols[0]\n if dev[0] == '/':\n ln(mount_point, dev)\n else:\n single(mount_point, '/dev/'+dev)\n continue\n\n if len(sdvols) < 1:\n raise ValueError(line)\n\n if lvname(sdvols[0]):\n lvm(mount_point, sdvols)\n continue\n\n\n # RAID10:\n assert len(set(sdvol[:3] for sdvol in sdvols)) == 1, (\n \"Multiple device prefixes\")\n sdprefix = sdvols[0][:3]\n logical_volumes[sdprefix] = LogicalVolume(\n sdprefix, sdvols, mount_point)\n expected_sdvols.update(sdvols)\n\n if logical_volumes:\n\n # Wait for all of our expected sd volumes to appear. (They may be\n # attaching.)\n for v in expected_sdvols:\n wait_for_device('/dev/' + v)\n\n # The volumes may have been set up before on a previous machine.\n # Scan for them:\n s('mdadm --examine --scan >>/etc/mdadm.conf')\n f = open('/etc/mdadm.conf')\n if f.read().strip():\n s('mdadm -A --scan')\n f.close()\n\n # Read /proc/mdstat to find out about existing raid volumes:\n mdstat = re.compile(r'md(\\w+) : (\\w+) (\\w+) (.+)$').match\n mdstatsd = re.compile(r'(sd(\\w+))\\[\\d+\\](\\(F\\))?$').match\n for line in open('/proc/mdstat'):\n if not line.strip():\n continue\n m = mdstat(line)\n if not m:\n assert (line.startswith('Personalities') or\n line.startswith(' ') or\n line.startswith('unused devices')), (\n \"unexpected line\", line\n )\n continue\n mdnum, status, rtype, data = m.group(1, 2, 3, 4)\n data = [mdstatsd(d).groups() for d in data.strip().split()]\n\n assert not [d for d in data if d[2]], (\n \"Failed volume\", line\n )\n\n data = [d[0] for d in data]\n if not [d for d in data if d in expected_sdvols]:\n # Hm, not one weore interested in.\n print 'skipping', line\n continue\n\n assert not [d for d in data if d not in expected_sdvols], (\n \"Unexpected volume\", data\n )\n\n assert status == 'active', status\n assert rtype == 'raid10', rtype\n\n logical_volumes[data[0][:3]].add_md(mdnum, data)\n\n # Scan for logical volumes:\n lv_pat = re.compile('Found volume group \"vg_(sd\\w+)\"').search\n for line in p('vgscan'):\n m = lv_pat(line)\n if not m:\n continue\n name = m.group(1)\n if name in logical_volumes:\n logical_volumes[name].has_logical_volume()\n\n # Record the physical volums in each logical_volume so we can see\n # if any are missing:\n PV = re.compile(\"PV /dev/md(\\w+) +VG vg_(sd\\w+) \").search\n for line in p(\"pvscan\"):\n m = PV(line)\n if not m:\n continue\n mdnum, vgname = m.groups()\n logical_volumes[vgname].pvs.add(mdnum)\n\n # Finally, create any missing raid volumes and logical volumes\n for lv in logical_volumes.values():\n lv.setup()\n\n os.rename('/etc/zim/volumes', '/etc/zim/volumes-setup')",
"def _attach_volume(self):\n return []",
"def extend_volume(self, volume, new_size):\n spdk_name = self._get_spdk_volume_name(volume.name)\n params = {'name': spdk_name, 'size': new_size * units.Gi}\n self._rpc_call('bdev_lvol_resize', params)",
"def build_expand_volume_command(vol_info_dict, si):\n\n return_dict = None\n try:\n # First get all the node/disk combinations where the volume is not\n # present\n anl = []\n num_nodes = 0\n\n ondisk_storage = \"normal\"\n if \"compressed\" in vol_info_dict['bricks'][0]:\n ondisk_storage = \"compressed\"\n elif \"deduplicated\" in vol_info_dict['bricks'][0]:\n ondisk_storage = \"deduplicated\"\n\n anl, err = _get_allowable_node_list(si, vol_info_dict['name'])\n if err:\n raise Exception(err)\n\n cmd = 'gluster volume add-brick %s ' % vol_info_dict[\"name\"]\n\n repl_count = 0\n\n if 'replicate' in vol_info_dict['type'].lower():\n vol_type = \"replicated\"\n repl_count = int(vol_info_dict[\"replica_count\"])\n else:\n vol_type = \"distributed\"\n\n return_dict, err = build_create_or_expand_volume_command(\n cmd, si, anl, vol_type, ondisk_storage, repl_count, vol_info_dict[\"name\"])\n if err:\n raise Exception(err)\n\n if \"cmd\" in return_dict:\n return_dict[\"cmd\"] = return_dict[\"cmd\"] + \" force --xml\"\n except Exception, e:\n return None, 'Error building expand volume command: %s' % str(e)\n else:\n return return_dict, None",
"def extend_volume(self, volume, new_size):\n LOG.info('Extending volume: %(id)s New size: %(size)s GB',\n {'id': volume['id'], 'size': new_size})\n nfs_share = volume['provider_location']\n nms = self.share2nms[nfs_share]\n volume_path = self.remote_path(volume)\n if getattr(self.configuration,\n self.driver_prefix + '_sparsed_volumes'):\n self._create_sparsed_file(nms, volume_path, new_size)\n else:\n block_size_mb = 1\n block_count = ((new_size - volume['size']) * units.Gi /\n (block_size_mb * units.Mi))\n\n nms.appliance.execute(\n 'dd if=/dev/zero seek=%(seek)d of=%(path)s'\n ' bs=%(bs)dM count=%(count)d' % {\n 'seek': volume['size'] * units.Gi / block_size_mb,\n 'path': volume_path,\n 'bs': block_size_mb,\n 'count': block_count\n }\n )",
"def list_volumes(self):\n print '# Listing existing volumes'\n self.compute.list_volumes()",
"def attach_volume(self, instance_name, device_path, mountpoint):\n return True",
"def test_volumes_complex(self):\n with open(\".scuba.yml\", \"w\") as f:\n f.write(\n r\"\"\"\n image: na\n volumes:\n /foo: /host/foo\n /bar:\n hostpath: /host/bar\n /snap:\n hostpath: /host/snap\n options: z,ro\n \"\"\"\n )\n\n config = scuba.config.load_config(\".scuba.yml\")\n vols = config.volumes\n assert len(vols) == 3\n\n v = vols[\"/foo\"]\n assert isinstance(v, scuba.config.ScubaVolume)\n assert v.container_path == \"/foo\"\n assert v.host_path == \"/host/foo\"\n assert v.options == []\n\n v = vols[\"/bar\"]\n assert isinstance(v, scuba.config.ScubaVolume)\n assert v.container_path == \"/bar\"\n assert v.host_path == \"/host/bar\"\n assert v.options == []\n\n v = vols[\"/snap\"]\n assert isinstance(v, scuba.config.ScubaVolume)\n assert v.container_path == \"/snap\"\n assert v.host_path == \"/host/snap\"\n assert v.options == [\"z\", \"ro\"]",
"def attach_volume(self):\n\n # Choose volume\n volume_id = self._choose_among_available_volumes()\n\n # Cancel\n if not volume_id:\n print 'Operation cancelled'\n return\n\n # Choose instance\n instance_id = self._choose_among_running_instances()\n\n # Cancel\n if not instance_id:\n print 'Operation cancelled'\n return\n\n # Attach the volume\n print '# Attaching volume \"%s\"!' % volume_id\n if self.compute.attach_volume(volume_id, instance_id):\n print 'The volume has been attached!'\n else:\n print 'The volume could not been attached'",
"def set_volume(cls, newVolume: float) -> bool:\n raise NotImplementedError",
"def guest_grow_root_volume(self, userid, os_version):\n LOG.debug('Begin to punch grow partition commands to guest: %s',\n userid)\n linuxdist = self._dist_manager.get_linux_dist(os_version)()\n # get configuration commands\n config_cmds = linuxdist.get_extend_partition_cmds()\n # Creating tmp file with these cmds\n temp_folder = self._pathutils.get_guest_temp_path(userid)\n file_path = os.path.join(temp_folder, 'gpartvol.sh')\n LOG.debug('Creating file %s to contain root partition extension '\n 'commands' % file_path)\n with open(file_path, \"w\") as f:\n f.write(config_cmds)\n try:\n self._smtclient.punch_file(userid, file_path, \"X\")\n finally:\n LOG.debug('Removing the folder %s ', temp_folder)\n shutil.rmtree(temp_folder)",
"def check_volume(obj, char, quiet=False):\n vol = obj.item_data.size\n if vol is None:\n raise ValueError(f\"Object {obj} has an undefined size\")\n v_max = char.item_data.capacity\n if char.used_capacity + vol > v_max:\n if not quiet:\n char.msg(\"You can't carry %s.\" % obj)\n return False\n return True",
"def bootable_volume(volumes):\n for volume in volumes:\n if '/dev/vda' in volume['attachments']:\n return volume",
"def setVolume(self, *args):\n return _libsbml.Compartment_setVolume(self, *args)"
]
| [
"0.5748133",
"0.5436244",
"0.54004693",
"0.53974086",
"0.53028864",
"0.52384055",
"0.52220774",
"0.5203464",
"0.51631445",
"0.5153655",
"0.5122452",
"0.5096057",
"0.5078894",
"0.50637805",
"0.50124866",
"0.4956496",
"0.4913379",
"0.49104056",
"0.49088234",
"0.49020234",
"0.4884341",
"0.48699778",
"0.48553252",
"0.48437163",
"0.4768274",
"0.47604394",
"0.47520465",
"0.4720464",
"0.47136366",
"0.47091195"
]
| 0.8099426 | 1 |
Restrict the node topologies where volumes can be dynamically provisioned. | def allowed_topologies(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverStorageClassAllowedTopologies']]:
return pulumi.get(self, "allowed_topologies") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def allowed_topologies(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverStorageClassAllowedTopologies']]:\n return pulumi.get(self, \"allowed_topologies\")",
"def allowed_topologies(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverStorageClassAllowedTopologies']]:\n return pulumi.get(self, \"allowed_topologies\")",
"def allowed_topologies(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverStorageClassAllowedTopologies']]:\n return pulumi.get(self, \"allowed_topologies\")",
"def allowed_topologies(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverStorageClassAllowedTopologies']]:\n return pulumi.get(self, \"allowed_topologies\")",
"def allowed_topology_access_create(user):\n return user.has_perm(\"vnswww.add_topology\")",
"def _get_allowable_node_list(si, vol_name=None):\n anl = []\n try:\n for hostname in si.keys():\n # Volumes can only be placed on nodes that are ok and are part of\n # the storage pool\n if si[hostname][\"node_status\"] != 0 or si[hostname][\"in_cluster\"] == False:\n continue\n if vol_name and (vol_name in si[hostname][\"volume_list\"]):\n continue\n anl.append(hostname)\n except Exception, e:\n return None, 'Error building allowable node list: %s' % str(e)\n else:\n return anl, None",
"def get_allowed_topologies(user):\n try:\n up = user.get_profile()\n except AttributeError:\n return db.Topology.objects.none()\n\n if user.has_perm(\"vnswww.topology_use_any\"):\n # We can view and use any templates\n topos = db.Topology.objects.filter()\n else:\n q_own = Q(owner=user)\n q_permitted = Q(allowed_users=user)\n q_org = Q(org=user.get_profile().org)\n q_public = Q(public=True)\n if user.has_perm(\"vnswww.topology_use_org\"):\n print \"Allowed all topos in own org\"\n # We can view and use any from the user's organization\n topos = db.Topology.objects.filter(q_permitted | q_org | q_own)\n else:\n print \"NOT allowed all topos in own org\"\n # We can view any from our own organization which are protected\n topos = db.Topology.objects.filter(q_permitted | q_own)\n\n return topos",
"def allowed_volumes(context, requested_volumes, size):\n project_id = context.project_id\n context = context.elevated()\n size = int(size)\n requested_gigabytes = requested_volumes * size\n used_volumes, used_gigabytes = db.volume_data_get_for_project(context,\n project_id)\n quota = get_project_quotas(context, project_id)\n allowed_volumes = _get_request_allotment(requested_volumes, used_volumes,\n quota['volumes'])\n allowed_gigabytes = _get_request_allotment(requested_gigabytes,\n used_gigabytes,\n quota['gigabytes'])\n allowed_volumes = min(allowed_volumes,\n int(allowed_gigabytes // size))\n return min(requested_volumes, allowed_volumes)",
"def volumes(self):",
"def dvs_volume(self):\n self.show_step(1)\n self.show_step(2)\n self.env.revert_snapshot(\"ready_with_5_slaves\")\n plugin.install_dvs_plugin(self.ssh_manager.admin_ip)\n\n self.show_step(3)\n cluster_id = self.fuel_web.create_cluster(\n name=self.__class__.__name__,\n mode=DEPLOYMENT_MODE,\n settings={\n \"net_provider\": 'neutron',\n \"net_segment_type\": NEUTRON_SEGMENT_TYPE\n }\n )\n self.show_step(4)\n plugin.enable_plugin(cluster_id, self.fuel_web)\n\n self.show_step(5)\n self.show_step(6)\n self.show_step(7)\n self.fuel_web.update_nodes(cluster_id,\n {'slave-01': ['controller'],\n 'slave-02': ['compute'],\n 'slave-03': ['cinder'],\n 'slave-04': ['cinder-vmware'],\n 'slave-05': ['compute-vmware']})\n\n self.show_step(8)\n self.show_step(9)\n logger.info('Configure VMware vCenter Settings.')\n target_node_2 = self.node_name('slave-05')\n self.fuel_web.vcenter_configure(cluster_id,\n target_node_2=target_node_2,\n multiclusters=True)\n\n self.show_step(10)\n self.fuel_web.deploy_cluster_wait(cluster_id)\n\n self.fuel_web.run_ostf(cluster_id=cluster_id, test_sets=['smoke'])\n\n # Create connection to openstack\n os_ip = self.fuel_web.get_public_vip(cluster_id)\n os_conn = os_actions.OpenStackActions(\n os_ip, SERVTEST_USERNAME,\n SERVTEST_PASSWORD,\n SERVTEST_TENANT)\n\n # Get default security group\n _s_groups = os_conn.neutron.list_security_groups()['security_groups']\n _srv_tenant = os_conn.get_tenant(SERVTEST_TENANT).id\n default_sg = [sg for sg in _s_groups\n if sg['tenant_id'] == _srv_tenant and\n sg['name'] == 'default'][0]\n\n self.show_step(11)\n network = os_conn.nova.networks.find(label=self.inter_net_name)\n openstack.create_instances(\n os_conn=os_conn,\n nics=[{'net-id': network.id}],\n vm_count=1,\n security_groups=[default_sg['name']])\n openstack.verify_instance_state(os_conn)\n\n self.show_step(12)\n volume_vcenter = openstack.create_volume(os_conn, 'vcenter-cinder')\n volume_nova = openstack.create_volume(os_conn, 'nova')\n instances = os_conn.nova.servers.list()\n _az = 'OS-EXT-AZ:availability_zone'\n instance_vcenter = [inst for inst in instances\n if inst.to_dict()[_az] == 'vcenter'][0]\n instance_nova = [inst for inst in instances\n if inst.to_dict()[_az] == 'nova'][0]\n\n self.show_step(13)\n os_conn.attach_volume(volume_vcenter, instance_vcenter)\n os_conn.attach_volume(volume_nova, instance_nova)\n\n self.show_step(14)\n assert_true(os_conn.cinder.volumes.get(volume_nova.id).status ==\n 'in-use')\n\n assert_true(os_conn.cinder.volumes.get(volume_vcenter.id).status ==\n 'in-use')",
"def allowed_topologytemplate_access_create(user):\n return user.has_perm(\"vnswww.add_topologytemplate\")",
"def volume(nodes, graph):\n ###TODO\n pass",
"def min_system_resources(node):\n\n min_sys_res = True\n\n # CPUs\n if \"layout\" in node[\"cpu\"]:\n total_cpus = len(node[\"cpu\"][\"layout\"])\n if total_cpus < 2:\n print(\n \"\\nThere is only {} CPU(s) available on this system. \"\n \"This is not enough to run VPP.\".format(total_cpus)\n )\n min_sys_res = False\n\n # System Memory\n if (\n \"free\" in node[\"hugepages\"]\n and \"memfree\" in node[\"hugepages\"]\n and \"size\" in node[\"hugepages\"]\n ):\n free = node[\"hugepages\"][\"free\"]\n memfree = float(node[\"hugepages\"][\"memfree\"].split(\" \")[0])\n hugesize = float(node[\"hugepages\"][\"size\"].split(\" \")[0])\n\n memhugepages = MIN_TOTAL_HUGE_PAGES * hugesize\n percentmemhugepages = (memhugepages / memfree) * 100\n if free is \"0\" and percentmemhugepages > MAX_PERCENT_FOR_HUGE_PAGES:\n print(\n \"\\nThe System has only {} of free memory. You will not \"\n \"be able to allocate enough Huge Pages for VPP.\".format(\n int(memfree)\n )\n )\n min_sys_res = False\n\n return min_sys_res",
"def test_volumes_get(self):\n pass",
"def allowed_flex_volumes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AllowedFlexVolumeArgs']]]]:\n return pulumi.get(self, \"allowed_flex_volumes\")",
"def setupVolumeNodeViewLayout(self):\n layoutNodes = slicer.mrmlScene.GetNodesByClass('vtkMRMLLayoutNode')\n layoutNodes.SetReferenceCount(layoutNodes.GetReferenceCount()-1)\n layoutNodes.InitTraversal()\n layoutNode = layoutNodes.GetNextItemAsObject()\n layoutNode.SetViewArrangement(slicer.vtkMRMLLayoutNode.SlicerLayoutTwoOverTwoView)",
"def test_create_hyperflex_cluster_network_policy(self):\n pass",
"def deploy_ocp(self, log_cli_level='DEBUG'):\n super(AWSIPI, self).deploy_ocp(log_cli_level)\n if not self.ocs_operator_deployment:\n volume_size = int(\n config.ENV_DATA.get('device_size', defaults.DEVICE_SIZE)\n )\n self.add_volume(volume_size)",
"def is_sys(self):\n for lv in getattr(self, 'logical_volumes', []):\n if lv.is_sys():\n return True\n return False",
"def test_create_hyperflex_cluster_storage_policy(self):\n pass",
"def list_volumes(self):\n print '# Listing existing volumes'\n self.compute.list_volumes()",
"def dvs_vcenter_security(self):\n # constants\n wait_to_update_rules_on_dvs_ports = 30\n\n self.show_step(1)\n self.env.revert_snapshot(\"dvs_vcenter_systest_setup\")\n\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n os_ip = self.fuel_web.get_public_vip(cluster_id)\n os_conn = os_actions.OpenStackActions(\n os_ip, SERVTEST_USERNAME,\n SERVTEST_PASSWORD,\n SERVTEST_TENANT)\n\n tenant = os_conn.get_tenant(SERVTEST_TENANT)\n\n self.show_step(2)\n net_1 = os_conn.create_network(\n network_name=self.net_data[0].keys()[0],\n tenant_id=tenant.id)['network']\n\n subnet = os_conn.create_subnet(subnet_name=net_1['name'],\n network_id=net_1['id'],\n cidr=self.net_data[0]['net_1'],\n ip_version=4)\n\n logger.info(\"Check that network is created.\")\n assert_true(os_conn.get_network(net_1['name'])['id'] == net_1['id'])\n\n logger.info(\"Add net_1 to default router\")\n router = os_conn.get_router(os_conn.get_network(self.ext_net_name))\n os_conn.add_router_interface(router_id=router[\"id\"],\n subnet_id=subnet[\"id\"])\n\n self.show_step(3)\n openstack.create_instances(os_conn=os_conn,\n nics=[{'net-id': net_1['id']}],\n vm_count=1)\n openstack.verify_instance_state(os_conn)\n\n self.show_step(4)\n net_1 = os_conn.nova.networks.find(label=self.inter_net_name)\n openstack.create_instances(os_conn=os_conn,\n nics=[{'net-id': net_1.id}],\n vm_count=1)\n openstack.verify_instance_state(os_conn)\n\n # Remove default security group\n srv_list = os_conn.get_servers()\n for srv in srv_list:\n srv.remove_security_group(srv.security_groups[0]['name'])\n os_conn.goodbye_security()\n\n self.show_step(5)\n sg1 = os_conn.nova.security_groups.create('SG1', \"descr\")\n self.show_step(6)\n self.tcp[\"security_group_rule\"][\"security_group_id\"] = sg1.id\n os_conn.neutron.create_security_group_rule(self.tcp)\n\n self.show_step(7)\n sg2 = os_conn.nova.security_groups.create('SG2', \"descr\")\n self.show_step(8)\n self.icmp[\"security_group_rule\"][\"security_group_id\"] = sg2.id\n os_conn.neutron.create_security_group_rule(self.icmp)\n\n logger.info(\"Attach SG_1 and SG2 to instances\")\n for srv in srv_list:\n srv.add_security_group(sg1.id)\n srv.add_security_group(sg2.id)\n\n fip = openstack.create_and_assign_floating_ips(os_conn, srv_list)\n\n self.show_step(9)\n ip_pair = dict.fromkeys(fip)\n for key in ip_pair:\n ip_pair[key] = [value for value in fip if key != value]\n openstack.check_connection_vms(ip_pair)\n self.show_step(10)\n openstack.check_connection_vms(ip_pair, command='ssh')\n\n self.show_step(11)\n _sg_rules = os_conn.neutron.list_security_group_rules()[\n 'security_group_rules']\n sg_rules = [sg_rule for sg_rule in _sg_rules\n if sg_rule['security_group_id'] in [sg1.id, sg2.id]]\n for rule in sg_rules:\n os_conn.neutron.delete_security_group_rule(rule['id'])\n\n time.sleep(wait_to_update_rules_on_dvs_ports)\n\n self.show_step(12)\n for ip in fip:\n try:\n openstack.get_ssh_connection(\n ip, self.instance_creds[0], self.instance_creds[1])\n except Exception as e:\n logger.info('{}'.format(e))\n\n self.show_step(13)\n self.tcp[\"security_group_rule\"][\"security_group_id\"] = sg2.id\n os_conn.neutron.create_security_group_rule(self.tcp)\n\n self.tcp[\"security_group_rule\"][\"direction\"] = \"egress\"\n os_conn.neutron.create_security_group_rule(self.tcp)\n\n self.show_step(14)\n openstack.check_connection_vms(\n ip_pair, command='ssh', timeout=wait_to_update_rules_on_dvs_ports)\n\n self.show_step(15)\n openstack.check_connection_vms(ip_pair, result_of_command=1)\n\n self.show_step(16)\n self.icmp[\"security_group_rule\"][\"security_group_id\"] = sg1.id\n os_conn.neutron.create_security_group_rule(self.icmp)\n\n self.icmp[\"security_group_rule\"][\"direction\"] = \"egress\"\n os_conn.neutron.create_security_group_rule(self.icmp)\n\n self.show_step(17)\n openstack.check_connection_vms(\n ip_pair, timeout=wait_to_update_rules_on_dvs_ports)\n\n self.show_step(18)\n self.show_step(19)\n self.show_step(20)\n\n self.show_step(21)\n srv_list = os_conn.get_servers()\n for srv in srv_list:\n for sg in srv.security_groups:\n srv.remove_security_group(sg['name'])\n\n self.show_step(22)\n for srv in srv_list:\n srv.add_security_group('default')\n\n self.show_step(23)\n openstack.check_connection_vms(\n ip_pair, timeout=wait_to_update_rules_on_dvs_ports)\n\n self.show_step(24)\n openstack.check_connection_vms(ip_pair, command='ssh')",
"def test_get_node_partitions(self):\n pass",
"def deploy_group(group_name):\n group=get_entity_by_cond(Group,'group_name==\"%s\"'%group_name)\n nodes=session.query(Node).filter('group_name==\"%s\"'%group_name)\n\n machines_filename = os.path.join(\n spl.config.paths['headnode-config'],\n str(group.network_id),\n 'machines.txt',\n )\n\n machines_dirname = os.path.dirname(machines_filename)\n if not os.path.isdir(machines_dirname):\n os.mkdir(machines_dirname)\n\n with open(machines_filename, 'w') as f:\n for node in nodes:\n f.write((\"%s %s\\n\"%(node.mac_addr,node.manage_ip)))\n\n\n switches=[]\n ports='';\n for node in nodes:\n switches.append(node.port.switch_id)\n ports+=str(node.port.port_no)+','\n #Check all the nodes in the group are connected to the same switch\n switch_id=check_same_non_empty_list(switches)\n if switch_id==False:\n # TODO: raise an exception\n print \"error: ports not in same switch\"\n return\n\n switch=get_entity_by_cond(Switch,'switch_id==%d'%switch_id)\n\n import cisco_snmp\n switch_drivers = {'cisco_snmp.py':cisco_snmp}\n driver = switch_drivers[switch.script]\n\n\n\n\n print group.network_id\n driver.make_remove_vlans(str(group.network_id),True)\n print 'ports'+ports\n driver.edit_ports_on_vlan(ports,str(group.network_id),True)\n\n os.system(('../vm-vlan up %s %s' % (group.network_id,group.vm_name)))\n\n group.deployed = True",
"def test_create_hyperflex_node_config_policy(self):\n pass",
"def allowed_flex_volumes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AllowedFlexVolumePatchArgs']]]]:\n return pulumi.get(self, \"allowed_flex_volumes\")",
"def allow_virtual_network_access(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_virtual_network_access\")",
"def allow_virtual_network_access(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_virtual_network_access\")",
"def creation_validation(**_):\n\n for property_key in constants.VOLUME_REQUIRED_PROPERTIES:\n utils.validate_node_property(property_key, ctx.node.properties)\n\n volume_object = _get_volumes_from_id(utils.get_resource_id())\n\n if ctx.node.properties['use_external_resource'] and not volume_object:\n raise NonRecoverableError(\n 'External resource, but the supplied '\n 'EBS volume does not exist in the account.')\n\n if not ctx.node.properties['use_external_resource'] and volume_object:\n raise NonRecoverableError(\n 'Not external resource, but the supplied '\n 'EBS volume exists in the account.')",
"def _get_pvds(self):\n pvds = []\n for path in self.paths():\n if path.reqstate == ReqState.enabled and path.provider.name not in pvds:\n pvds += [path.provider.name]\n return pvds"
]
| [
"0.5830229",
"0.5813511",
"0.58032465",
"0.56135035",
"0.54997087",
"0.5374481",
"0.5219399",
"0.51760525",
"0.5159506",
"0.5103449",
"0.49977416",
"0.4946259",
"0.49090728",
"0.4903071",
"0.48948416",
"0.48678818",
"0.48345947",
"0.48270988",
"0.48091453",
"0.4797037",
"0.47909364",
"0.47896647",
"0.4788123",
"0.4784509",
"0.4778314",
"0.47700468",
"0.47460952",
"0.47460952",
"0.47341287",
"0.4709187"
]
| 0.5891469 | 0 |
ControllerStatus is the status of Controller pods | def controller_status(self) -> Optional['outputs.CSIVXFlexOSStatusControllerStatus']:
return pulumi.get(self, "controller_status") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def controller_status(self) -> Optional['outputs.CSIUnityStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def status(self) -> ControllerStatus:\n return self._status",
"def controller_status(self) -> Optional['outputs.CSIPowerStoreStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def controller_status(self) -> Optional['outputs.CSIIsilonStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def controller_status(self) -> Optional['outputs.CSIPowerMaxStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def getStatus(self):\r\n return self.controller.getStatus()",
"def status_controller(cls, args, config):\n logging.debug(\"MOLNSController.status_controller(args={0})\".format(args))\n if len(args) > 0:\n try:\n controller_obj = cls._get_controllerobj(args, config)\n except MOLNSException:\n return {}\n if controller_obj is None: return {}\n # Check if any instances are assigned to this controller\n instance_list = config.get_controller_instances(controller_id=controller_obj.id)\n table_data = []\n if len(instance_list) > 0:\n for i in instance_list:\n # provider_name = config.get_object_by_id(i.provider_id, 'Provider').name\n try:\n p = config.get_object_by_id(i.provider_id, 'Provider')\n provider_name = p.name\n except DatastoreException as e:\n provider_name = 'ERROR: {0}'.format(e)\n controller_name = config.get_object_by_id(i.controller_id, 'Controller').name\n status = controller_obj.get_instance_status(i)\n table_data.append(\n [controller_name, status, 'controller', provider_name, i.provider_instance_identifier,\n i.ip_address])\n\n else:\n return {'msg': \"No instance running for this controller\"}\n # Check if any worker instances are assigned to this controller\n instance_list = config.get_worker_instances(controller_id=controller_obj.id)\n if len(instance_list) > 0:\n for i in instance_list:\n worker_name = config.get_object_by_id(i.worker_group_id, 'WorkerGroup').name\n worker_obj = cls._get_workerobj([worker_name], config)\n # provider_name = config.get_object_by_id(i.provider_id, 'Provider').name\n try:\n p = config.get_object_by_id(i.provider_id, 'Provider')\n provider_name = p.name\n except DatastoreException as e:\n provider_name = 'ERROR: {0}'.format(e)\n status = worker_obj.get_instance_status(i)\n table_data.append(\n [worker_name, status, 'worker', provider_name, i.provider_instance_identifier, i.ip_address])\n # table_print(['name','status','type','provider','instance id', 'IP address'],table_data)\n r = {'type': 'table', 'column_names': ['name', 'status', 'type', 'provider', 'instance id', 'IP address'],\n 'data': table_data}\n return r\n else:\n instance_list = config.get_all_instances()\n if len(instance_list) > 0:\n table_data = []\n for i in instance_list:\n provider_obj = config.get_object_by_id(i.provider_id, 'Provider')\n if provider_obj is None:\n continue\n provider_name = provider_obj.name\n controller_name = config.get_object_by_id(i.controller_id, 'Controller').name\n if i.worker_group_id is not None:\n worker_name = config.get_object_by_id(i.worker_group_id, 'WorkerGroup').name\n table_data.append([worker_name, 'worker', provider_name, i.provider_instance_identifier])\n else:\n table_data.append(\n [controller_name, 'controller', provider_name, i.provider_instance_identifier])\n\n r = {'type': 'table', 'column_names': ['name', 'type', 'provider', 'instance id'], 'data': table_data}\n r['msg'] = \"\\n\\tUse 'molns status NAME' to see current status of each instance.\"\n return r\n else:\n return {'msg': \"No instance found\"}",
"def getStatus(self, request, context):\n \n statusDrone = str(self.vehicle.system_status).rpartition(':')[2]\n\t \n return droneconnect_pb2.Status(status = statusDrone)",
"def status(self):\n return status_dict[self._get_property_(self.STATUS).upper()]",
"def getStatus():\n return json.dumps({'camera': Camera.status(), 'rover': rover.status()}), 200",
"def status(self):\n return STATUS[self.fields['status']]",
"def model_status():\n return juju.CLIENT.Client(request=\"FullStatus\")",
"def status(self):\n return self.get(self._names[\"status\"])",
"def get_status():\n # TODO tie this in with requests that can fetch the status of the pod from the cluster\n\n if request.method == \"GET\":\n \"\"\"\n request looks like:\n {\n \"workflow_name\": \"test-workflow\"\n }\n \"\"\"\n\n req = request.get_json(force=True)\n if workflow_exists(req['workflow_name']):\n # TODO fit into database\n # Get the pod by workflow and read the status\n # status = RUNNING_JOBS[req['workflow_name']].get_pod_status()\n response = {\n \"status\": 'Still running'\n }\n else:\n app.logger.error(\n f\"Received request asking the pod status in {req['workflow_name']} \"\n f\"but this workflow is not present in running jobs\"\n f\"record. Nothing to do.\")\n response = {\n \"status\": \"Not running\"\n }\n\n return jsonify(response)",
"def getStatus(self):\n return self.__status",
"async def _status():\n # TODO(Deepankar): should we add versions of executors?\n return {\n 'status_code': status.HTTP_200_OK,\n 'jina_version': jina_version\n }",
"def getStatus(self):\n return self._status",
"def initStatus(status):\n if status == 0 :\n print(\"Supported controller connected\")\n elif status < 0 :\n print(\"No supported controller detected\")\n else:\n print(\"Waiting for controller {}\".format(status) )",
"def status(self):\n return {\n 'hawkular_services': self._hawkular.status(),\n 'alerts': self.alert.status(),\n 'inventory': self.inventory.status(),\n 'metrics': self.metric.status()\n }",
"def status(self):\n if self.qemu.is_running():\n status = 0\n self.log.info(\"vm-status\", result=\"online\")\n for device in list(self.qemu.block_info().values()):\n self.log.info(\n \"disk-throttle\",\n device=device[\"device\"],\n iops=device[\"inserted\"][\"iops\"],\n )\n else:\n status = 1\n self.log.info(\"vm-status\", result=\"offline\")\n for volume in self.ceph.volumes:\n locker = volume.lock_status()\n self.log.info(\"rbd-status\", volume=volume.fullname, locker=locker)\n consul = locate_live_service(self.consul, \"qemu-\" + self.name)\n if consul:\n self.log.info(\n \"consul\", service=consul[\"Service\"], address=consul[\"Address\"]\n )\n else:\n self.log.info(\"consul\", service=\"<not registered>\")\n return status",
"def _getCurrentComponentStatus(self):\n resOverall = self.sysAdminClient.getOverallStatus()\n if not resOverall['OK']:\n return resOverall\n currentStatus = {'Down': set(), 'Run': set(), 'All': set()}\n informationDict = resOverall['Value']\n for systemsDict in informationDict.values():\n for system, instancesDict in systemsDict.items():\n for instanceName, instanceInfoDict in instancesDict.items():\n identifier = '%s__%s' % (system, instanceName)\n runitStatus = instanceInfoDict.get('RunitStatus')\n if runitStatus in ('Run', 'Down'):\n currentStatus[runitStatus].add(identifier)\n\n currentStatus['All'] = currentStatus['Run'] | currentStatus['Down']\n return S_OK(currentStatus)",
"def status(self):\n return self._data['status']",
"def _get_status_obj(self):\n\n status = Status(self._config.dirout, name=self._config.name,\n hardware=self._config.hardware)\n return status",
"def get_status(self):\n return self._status",
"def status(self):\n\t\treturn self._status",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")"
]
| [
"0.7716838",
"0.76570404",
"0.7581352",
"0.74678415",
"0.7435039",
"0.6915596",
"0.6864449",
"0.59351426",
"0.59238404",
"0.5888972",
"0.584402",
"0.58390266",
"0.58311397",
"0.5826533",
"0.58211887",
"0.5818358",
"0.5816246",
"0.58006066",
"0.57969457",
"0.57903516",
"0.5789894",
"0.57749283",
"0.57571733",
"0.5755945",
"0.574518",
"0.57411844",
"0.57411844",
"0.57411844",
"0.57411844",
"0.57411844"
]
| 0.7894275 | 0 |
NodeStatus is the status of Controller pods | def node_status(self) -> Optional['outputs.CSIVXFlexOSStatusNodeStatus']:
return pulumi.get(self, "node_status") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def node_status(self) -> Optional['outputs.CSIUnityStatusNodeStatus']:\n return pulumi.get(self, \"node_status\")",
"def node_status(self) -> Optional['outputs.CSIIsilonStatusNodeStatus']:\n return pulumi.get(self, \"node_status\")",
"def node_statuses(self) -> pulumi.Output[Sequence['outputs.NodeBalancerConfigNodeStatus']]:\n return pulumi.get(self, \"node_statuses\")",
"def node_status(self) -> Optional['outputs.CSIPowerStoreStatusNodeStatus']:\n return pulumi.get(self, \"node_status\")",
"def node_status(self) -> Optional['outputs.CSIPowerMaxStatusNodeStatus']:\n return pulumi.get(self, \"node_status\")",
"def status(self):\n url = API_PATH[\"node_status\"].format(tuneUuid=self._parentTune.uuid())\n rsp_json = self._parse(self._get(url))\n\n for status_obj in rsp_json:\n if status_obj[\"nodeUuid\"] == self.uuid():\n return self._new_instance(NodeStatus, status_obj, node=self)\n return None",
"def node_statuses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NodeBalancerConfigNodeStatusArgs']]]]:\n return pulumi.get(self, \"node_statuses\")",
"def getNodeStatus(self,status = 0):\n if status:\n self.node_status = status\n return self.node_status",
"def status(self) -> NodeStatus:\n return self._status",
"def controller_status(self) -> Optional['outputs.CSIVXFlexOSStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def test_get_node_status(self):\n pass",
"def controller_status(self) -> Optional['outputs.CSIIsilonStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def getNodeStatus(self,node):\n data = self.connect('get','nodes/%s/status' % (node),None)\n return data",
"def handle_status(self, request):\n \"\"\"\n @api {get} /status Get node status\n @apiName GetNodeStatus\n @apiGroup Node\n @apiVersion 1.1.0\n\n @apiSuccess {Boolean} execution_enabled Task execution is enabled on the node.\n @apiSuccess {Boolean} leader Node is the leader.\n @apiSuccess {String} name Node name.\n @apiSuccess {Boolean} scheduler_running The scheduler is running on the node.\n @apiSuccess {String} address Node IP address.\n @apiSuccess {String[]} pools Pools in which the node is registered.\n @apiSuccess {Object} running_processes Processes running on the host.\n @apiSuccess {Object} running_processes.process Process.\n @apiSuccess {String} running_processes.process.start_time Time the process started, ISO 8601 formatted.\n @apiSuccess {String} running_processes.process.task ID of the task.\n @apiSuccess {Boolean} cluster_joined Node has joined the cluster.\n @apiSuccess {Boolean} contending_for_lead Node is contending for lead.\n @apiSuccess {Boolean} pools_joined Node has joined its pools.\n\n @apiSuccessExample {json} Example response:\n {\n \"execution_enabled\": true,\n \"leader\": false,\n \"name\": \"node2\",\n \"scheduler_running\": false,\n \"address\": \"127.0.0.1:32002\",\n \"pools\": [\"pool1\", \"pool2\"],\n \"running_processes\": {\n \"b26e5cc2ef3f11e4817b0026b951c045\": {\n \"start_time\": \"2015-04-30T13:49:18.351494+00:00\",\n \"task\": \"508b4b72e44611e49e76c81f66cd0cca\"\n }\n },\n \"cluster_joined\": true,\n \"contending_for_lead\": true,\n \"pools_joined\": true\n }\n \"\"\"\n\n headers = {\n 'Content-Type': 'application/javascript',\n 'Access-Control-Allow-Origin': '*'\n }\n\n status = {\n 'name': self.cluster.nodename,\n 'address': self.cluster.addr,\n 'pools': self.cluster.mypools,\n 'leader': self.cluster.is_leader,\n 'cluster_joined': self.cluster.cluster_joined,\n 'pools_joined': self.cluster.pools_joined,\n 'contending_for_lead': self.cluster.contending_for_lead,\n\n 'execution_enabled': self.manager.enabled,\n 'running_processes': dict([ (execid, { 'task': details['task'], 'start_time': details['start_time'].isoformat() }) for (execid, details) in self.manager.running_processes.items() ]),\n\n 'scheduler_running': self.cluster.scheduler.running\n }\n\n return HTTPReply(body = json.dumps(status), headers = headers)",
"def controller_status(self) -> Optional['outputs.CSIUnityStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def get_status():\n # TODO tie this in with requests that can fetch the status of the pod from the cluster\n\n if request.method == \"GET\":\n \"\"\"\n request looks like:\n {\n \"workflow_name\": \"test-workflow\"\n }\n \"\"\"\n\n req = request.get_json(force=True)\n if workflow_exists(req['workflow_name']):\n # TODO fit into database\n # Get the pod by workflow and read the status\n # status = RUNNING_JOBS[req['workflow_name']].get_pod_status()\n response = {\n \"status\": 'Still running'\n }\n else:\n app.logger.error(\n f\"Received request asking the pod status in {req['workflow_name']} \"\n f\"but this workflow is not present in running jobs\"\n f\"record. Nothing to do.\")\n response = {\n \"status\": \"Not running\"\n }\n\n return jsonify(response)",
"def test_get_node_status_batterystatus(self):\n pass",
"def controller_status(self) -> Optional['outputs.CSIPowerStoreStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def controller_status(self) -> Optional['outputs.CSIPowerMaxStatusControllerStatus']:\n return pulumi.get(self, \"controller_status\")",
"def status(self):\n if Daemon.status(self) != 0:\n return 1\n \n # Load decoy logger\n self.load_outputs(decoy=True)\n\n # Load node pool & print status\n try:\n self.pool = PLNodePool(self)\n sys.stdout.write(self.status_str())\n except PLNodePoolException:\n sys.stdout.write(\"No node found.\\n\")\n\n return 0",
"def status(self) -> ControllerStatus:\n return self._status",
"def status(self):\n if self.error is not None:\n return Node.STATUS_ERROR\n if not self.exists():\n return Node.STATUS_UNDEFINED\n state_code_map = {\n libvirt.VIR_DOMAIN_NOSTATE: Node.STATUS_UNDEFINED,\n libvirt.VIR_DOMAIN_RUNNING: Node.STATUS_UP,\n libvirt.VIR_DOMAIN_BLOCKED: Node.STATUS_UP,\n libvirt.VIR_DOMAIN_PAUSED: Node.STATUS_UP,\n libvirt.VIR_DOMAIN_SHUTDOWN: Node.STATUS_DOWN,\n libvirt.VIR_DOMAIN_SHUTOFF: Node.STATUS_DOWN,\n libvirt.VIR_DOMAIN_CRASHED: Node.STATUS_ERROR,\n libvirt.VIR_DOMAIN_PMSUSPENDED: Node.STATUS_DOWN,\n }\n try:\n dom = self._get_domain()\n return state_code_map[dom.info()[0]]\n except libvirt.libvirtError as err:\n err_code = err.get_error_code()\n if err_code == libvirt.VIR_ERR_NO_DOMAIN:\n # The domains for sandbox nodes are temporal, so there's\n # no real mapping of \"no domain found\" other than the\n # node should be considered not started.\n return Node.STATUS_DOWN\n else:\n return Node.STATUS_ERROR\n except Exception as err:\n self.LOG.error(err)\n return Node.STATUS_ERROR",
"def iteration(self, node_status=True):\n self.clean_initial_status(list(self.available_statuses.values()))\n actual_status = {node: nstatus for node, nstatus in future.utils.iteritems(self.status)}\n\n # can remove this step\n # only tells us that the activated nodes are active while the other nodes are inactive\n '''\n {'iteration': 0, 'status': {0: 0, 1: 1, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 7: 0, 8: 0, 9: 0, 10: 0, 11: 0, 12: 0, 13: 0, 14: 0, 15: 0, 16: 0, 17: 0, 18: 0, 19: 0, 20: 0, 21: 0, 22: 0, 23: 0, 24: 0, 25: 0, 26: 0, 27: 0, 28: 0, 29: 0, 30: 0, 31: 0, 32: 0, 33: 0, 34: 0, 35: 0, 36: 0, 37: 0, 38: 0, 39: 0, 40: 0, 41: 0, 42: 0, 43: 0, 44: 0, 45: 0, 46: 0, 47: 0, 48: 0, 49: 0, 50: 0, 51: 0, 52: 0, 53: 0, 54: 0, 55: 0, 56: 0, 57: 0, 58: 0, 59: 0, 60: 0, 61: 0, 62: 0, 63: 0, 64: 0, 65: 0, 66: 0, 67: 0, 68: 0, 69: 0, 70: 0, 71: 0, 72: 0, 73: 0, 74: 0, 75: 0, 76: 0, 77: 0, 78: 0, 79: 0, 80: 0, 81: 0, 82: 0, 83: 0, 84: 0, 85: 0, 86: 0, 87: 0, 88: 0, 89: 0, 90: 0, 91: 0, 92: 0, 93: 0, 94: 0, 95: 0, 96: 0, 97: 0, 98: 0, 99: 0, 100: 0, 101: 0, 102: 0, 103: 0, 104: 0, 105: 0, 106: 0, 107: 0, 108: 0, 109: 0, 110: 0, 111: 0, 112: 0, 113: 0, 114: 0, 115: 0, 116: 0, 117: 0, 118: 0, 119: 0, 120: 0, 121: 0, 122: 0, 123: 0, 124: 0, 125: 0, 126: 0, 127: 0, 128: 0, 129: 0, 130: 0, 131: 0, 132: 0, 133: 0, 134: 0, 135: 0, 136: 0, 137: 0, 138: 0, 139: 0, 140: 0, 141: 0, 142: 0, 143: 0, 144: 0, 145: 0, 146: 0, 147: 0, 148: 0, 149: 0}, \n 'node_count': {0: 149, 1: 1}, 'status_delta': {0: 0, 1: 0}}\n '''\n # if self.actual_iteration == 0:\n # self.actual_iteration += 1\n # delta, node_count, status_delta = self.status_delta(actual_status)\n # if node_status:\n # return {\"iteration\": 0, \"status\": actual_status.copy(),\n # \"node_count\": node_count.copy(), \"status_delta\": status_delta.copy()}\n # else:\n # return {\"iteration\": 0, \"status\": {},\n # \"node_count\": node_count.copy(), \"status_delta\": status_delta.copy()}\n\n count_attempts = 0\n # print(self.params['nodes']['attempt'])\n # can consider \"for u in activated_nodes:\"\n # saves time especially in a big graph\n for u in self.graph.nodes: # for each node\n if self.status[u] != 1: # only select nodes with status = 1 (infected/active)\n continue\n if self.params['nodes']['attempt'][u] != 0: # and attempt = 0 (no previous attempts)\n continue\n # print(\"go\")\n\n neighbors = list(self.graph.neighbors(u)) # neighbors and successors (in DiGraph) produce the same result\n # get neighbors of this infected/active node\n\n # Standard threshold\n if len(neighbors) > 0: \n for v in neighbors: # for each neighbor\n if actual_status[v] == 0: # if their status = 0 (susceptible/inactive)\n key = (u, v) # key = (active node, inactive node) or (infected node, susceptible node)\n\n # Individual specified thresholds\n if 'threshold' in self.params['edges']: # if edge has a threshold\n if key in self.params['edges']['threshold']: # if key (u, v) in params... but why would it be in here? oh cos edges if from node to node so tuple (u , v)\n threshold = self.params['edges']['threshold'][key] # replace key?\n elif (v, u) in self.params['edges']['threshold'] and not self.graph.directed: # direction affects this. v to u instead of u to v. yup this\n threshold = self.params['edges']['threshold'][(v, u)] # similarly put in key in (but opposite direction)\n # oh this is the actual threshold used below for the flip i think\n \n flip = np.random.random_sample() # random float in half-open interval [0.0, 1.0)\n if flip <= threshold: # if less than threshold \n actual_status[v] = 1 # neighbor becomes infected/active\n # actual_status[v] = 1 # probability activated is 1\n self.params['nodes']['attempt'][u] = 1\n count_attempts += 1\n\n \n delta, node_count, status_delta = self.status_delta(actual_status)\n self.status = actual_status\n # print(\"self status\")\n # print(self.status)\n self.actual_iteration += 1\n if count_attempts == 0:\n self.stop = True\n\n # this one remains but change to ensure the output fits to what we want\n # we only want the active set size at the end of the iteration\n # can get from the last 'node_count': {0: inactive_set_size, 1: active_set_size}\n # if node_status:\n # return {\"iteration\": self.actual_iteration - 1, \"status\": delta.copy(),\n # \"node_count\": node_count.copy(), \"status_delta\": status_delta.copy(), 'active_set_size': node_count.copy()[1]}\n # else:\n # return {\"iteration\": self.actual_iteration - 1, \"status\": {},\n # \"node_count\": node_count.copy(), \"status_delta\": status_delta.copy()}\n \n active_set_size = node_count.copy()[1]\n status = delta.copy()\n # print(status)\n \n return active_set_size, status",
"def compute_single_node_status(\n node_name_message_map: Dict[str, Node], node_name: str\n) -> \"StatusValue\":\n\n node = node_name_message_map[node_name]\n\n if (\n node.status != Status.STATUS_UNSPECIFIED\n ): # if the current node's status was already computed\n return node.status\n\n status_count_map: Dict[\"StatusValue\", int] = defaultdict(int)\n for child_name in node.child_names:\n status_count_map[\n compute_single_node_status(node_name_message_map, child_name)\n ] += 1\n\n try:\n for dependency in node.dependencies:\n status_count_map[\n compute_single_node_status(\n node_name_message_map, dependency.target_name\n )\n ] += 1\n except AttributeError:\n pass\n\n try:\n for sli in node.slis:\n status_count_map[compute_sli_status(sli)] += 1\n except AttributeError:\n pass\n\n node.status = compute_status_from_count_map(status_count_map)\n\n if (\n node.override_status != Status.STATUS_UNSPECIFIED\n ): # if the current node's status was manually overwritten\n # notice we place this at the end, since we still want to compute the node's status\n # to display in the dropdown menu (regardless of the override)\n return node.override_status\n\n return node.status",
"def getContainerStatus(self,node,vmid):\n data = self.connect('get','nodes/%s/lxc/%s/status/current' % (node,vmid),None)\n return data",
"def handle_cluster_status(self, request):\n \"\"\"\n @api {get} /cluster/status Get cluster status\n @apiName GetClusterStatus\n @apiGroup Cluster\n @apiVersion 1.0.0\n\n @apiSuccess {Object} nodes Nodes in the cluster.\n @apiSuccess {Object} nodes.node Node.\n @apiSuccess {String[]} nodes.node.pools Pools in which the node is registered.\n @apiSuccess {String} nodes.node.address IP address of the node.\n @apiSuccess {String} leader Leader node.\n\n @apiSuccessExample {json} Example response:\n {\n \"nodes\": {\n \"node1\": {\n \"pools\": [\"pool1\", \"pool2\"],\n \"address\": \"127.0.0.1:32001\"\n },\n \"node2\": {\n \"pools\": [\"pool1\"],\n \"address\": \"127.0.0.1:32002\"\n },\n \"node3\": {\n \"pools\": [\"pool2\"],\n \"address\": \"127.0.0.1:32003\"\n },\n },\n \"leader\": \"node1\"\n }\n \"\"\"\n\n headers = {\n 'Content-Type': 'application/javascript',\n 'Access-Control-Allow-Origin': '*'\n }\n\n status = {\n 'nodes': self.cluster.nodes,\n 'leader': self.cluster.leader\n }\n\n return HTTPReply(body = json.dumps(status), headers = headers)",
"def status(self):\n return {\n 'id': 'status',\n 'protocol_version': 'PV62',\n 'network': self.origin_node.network.name,\n 'td': self.origin_node.chain.head.header.difficulty,\n 'best_hash': self.origin_node.chain.head.header.hash,\n 'genesis_hash': self.origin_node.chain.genesis.header.hash,\n 'size': kB_to_MB(self._message_size['status'])\n }",
"def getClusterStatus(self):\n data = self.connect('get','cluster/status', None)\n return data",
"def getStatus(self):\r\n return self.controller.getStatus()",
"def node_num_cpu(self) -> int:\n stdout, _, _ = RunKubectlCommand(\n ['get', 'nodes', '-o', 'jsonpath={.items[0].status.capacity.cpu}'])\n return int(stdout)"
]
| [
"0.73655444",
"0.7268281",
"0.7152991",
"0.7087971",
"0.70559794",
"0.70392525",
"0.69451696",
"0.69108874",
"0.68627334",
"0.67092526",
"0.6685717",
"0.655616",
"0.65551025",
"0.65426487",
"0.6439449",
"0.6357788",
"0.63130367",
"0.6275088",
"0.62040925",
"0.6166804",
"0.60834527",
"0.6042873",
"0.6003662",
"0.5985932",
"0.59771216",
"0.5968582",
"0.59540534",
"0.5934432",
"0.58662367",
"0.58567697"
]
| 0.75042415 | 0 |
Store browser cookies in a pickle file | def save_cookies_in_pickle(self):
with open(self.path, "wb") as file:
pickle.dump(self.browser.get_cookies(), file)
print(f'Cookies saved to {self.service}.pickle') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def save_cookies(self):\n\n with open(self.location_of_cookies, 'wb') as f:\n pickle.dump(self.get_cookies(), f)\n f.close()",
"def _save_cookies(requests_cookiejar, filename):\n with open(filename, 'wb') as handle:\n pickle.dump(requests_cookiejar, handle)",
"def save_cookies(requests_cookiejar, filename):\n with open(filename, 'wb') as f:\n pickle.dump(requests_cookiejar, f)",
"def __saveCookies(self):\n with open(self.cookies_file, 'wb') as cookie_file:\n LOGGER.debug(\"Pickling HTTP cookies to {0}\".format(self.cookies_file))\n pickle.dump(requests.utils.dict_from_cookiejar(session.cookies), cookie_file)",
"def saveCookies(session_cookies):\n with open(getCookieFile(), 'wb') as f:\n pickle.dump(session_cookies, f)\n f.close()",
"def save_cookies(self, filename):\n\t\tif self._session is None:\n\t\t\tself.start_new_session()\n\n\t\tcookies = self._session.cookies\n\n\t\t_log.debug('writing out cookies...')\n\n\t\twith open(filename, 'wb') as f:\n\t\t\tpickle.dump(cookies, f)",
"def _save_cookies(self, filename):\n if not os.path.isdir(os.path.dirname(filename)):\n return False\n with open(filename, 'w') as f:\n f.truncate()\n pickle.dump(self.session.cookies._cookies, f)",
"def save(self,cookie_jar):\n if not os.path.exists(self.path):\n os.makedirs(self.path)\n with open(self.file_path, \"wb\") as cookie_file:\n cookie_file.write(bytearray(pickle.dumps(cookie_jar)))",
"def dumper(driver, stringId, page, directory):\n\n _string = stringId + page + \"Cookies.pkl\"\n pickle.dump(driver.get_cookies(), open((directory + _string),\"wb\"))\n print('cookies dumped')",
"def save_cookies_lwp(cookiejar, filename):\n lwp_cookiejar = cookielib.LWPCookieJar()\n for c in cookiejar:\n argz = dict(vars(c).items())\n argz['rest'] = argz['_rest']\n del argz['_rest']\n c = cookielib.Cookie(**argz)\n lwp_cookiejar.set_cookie(c)\n lwp_cookiejar.save(filename, ignore_discard=True)",
"def __save(self):\n if not self.__loaded:\n return\n \n cookieSettings = QSettings(self.__cookiesFile, QSettings.IniFormat)\n \n cookieSettings.setValue(\"Exceptions/block\", self.__exceptionsBlock)\n cookieSettings.setValue(\"Exceptions/allow\", self.__exceptionsAllow)\n cookieSettings.setValue(\"Exceptions/allowForSession\",\n self.__exceptionsAllowForSession)\n \n Preferences.setWebBrowser(\"AcceptCookies\", self.__acceptCookies)\n Preferences.setWebBrowser(\"KeepCookiesUntil\", self.__keepCookies)\n Preferences.setWebBrowser(\"FilterTrackingCookies\",\n self.__filterTrackingCookies)",
"def save_cookie_string():\n cookie_string = dowins.PostsExtractor.get_csrf_and_cookie_string()[1]\n# cookie_string = \"headers = { 'Set-Cookie: '\" + cookie_string + \"'}\"\n with open('cookie_string.txt','w') as f:\n f.write(cookie_string)",
"def _load_cookies(filename):\n with open(filename, 'rb') as handle:\n return pickle.load(handle)",
"def _save_cookiejar(self):\n if hasattr(self._cookiejar, \"save\"):\n try:\n getattr(self._cookiejar, \"save\")()\n except (NotImplementedError, ValueError):\n pass\n self._last_cookiejar_save = time()",
"def load_cookies(filename):\n with open(filename, 'rb') as f:\n requests_cookiejar = pickle.load(f)\n return requests_cookiejar",
"def setCookieFile(self, cookie):\n if os.path.isfile(cookie):\n jc = jsoncookie.jsoncookie()\n jc.open(cookie)\n self.cookiejar = jc.cookiejar(self.server)\n jc.close()",
"def load_cookies(self, filename):\n\t\tif self._session is None:\n\t\t\tself.start_new_session()\n\n\t\twith open(filename, 'rb') as f:\n\t\t\tcookies = pickle.load(f)\n\n\t\tself._session.cookies.update(cookies)",
"def load(self):\n if not os.path.exists(self.file_path):\n return\n with open(self.file_path, \"rb\") as cookie_file:\n cookie_file = xbmcvfs.File(self.file_path, 'rb')\n cookie_jar = pickle.loads(cookie_file.readBytes())\n return cookie_jar",
"def save_prefs(self):\n prefs_file = open(expanduser(self.prefs_path), 'w')\n pickle.dump(self.prefs, prefs_file)",
"def get_cookies_firefox(domname):\n cookpath = os.path.expanduser(udata.srcs['firefox']) + '/cookies.sqlite'\n\n # copy DB to prevent disk I/O error on Windows\n cookcopy = cookpath+'.copy'\n shutil.copy(cookpath, cookcopy)\n\n sqx = sqlite3.connect('%s' % (cookcopy))\n cks = sqx.execute('select name,value from moz_cookies where host = \"%s\"' % (domname)).fetchall()\n cookies = {}\n for cn, cv in cks:\n cookies[cn] = cv\n os.remove(cookcopy)\n return cookies",
"def _save_credentials_if_changed(self):\n if list(self._cookiejar) != self._old_cookies:\n logger.debug(\"Saving credentials to file: %r\", str(self._cookiejar_filepath))\n dirpath = os.path.dirname(self._cookiejar_filepath)\n os.makedirs(dirpath, exist_ok=True)\n\n fd = os.open(self._cookiejar_filepath, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o600)\n self._cookiejar.save(fd)",
"def cookiejar(name='session'):\n log = logging.getLogger('ipsv.common.cookiejar')\n spath = os.path.join(config().get('Paths', 'Data'), '{n}.txt'.format(n=name))\n cj = http.cookiejar.LWPCookieJar(spath)\n log.debug('Attempting to load session file: %s', spath)\n if os.path.exists(spath):\n try:\n cj.load()\n log.info('Successfully loaded a saved session / cookie file')\n except http.cookiejar.LoadError as e:\n log.warn('Session / cookie file exists, but could not be loaded', exc_info=e)\n\n return cj",
"def save_cookies(self, cookie_storage):\n\n def toPyCookieJar(QtCookieJar, PyCookieJar):\n for c in QtCookieJar.allCookies():\n PyCookieJar.set_cookie(toPyCookie(c))\n\n def toPyCookie(QtCookie):\n port = None\n port_specified = False\n secure = QtCookie.isSecure()\n name = str(QtCookie.name())\n value = str(QtCookie.value())\n v = str(QtCookie.path())\n path_specified = bool(v != \"\")\n path = v if path_specified else None\n v = str(QtCookie.domain())\n domain_specified = bool(v != \"\")\n domain = v\n domain_initial_dot = v.startswith('.') if domain_specified else None\n v = long(QtCookie.expirationDate().toTime_t())\n # Long type boundary on 32bit platfroms; avoid ValueError\n expires = 2147483647 if v > 2147483647 else v\n rest = {}\n discard = False\n return Cookie(0, name, value, port, port_specified, domain\n , domain_specified, domain_initial_dot, path, path_specified\n , secure, expires, discard, None, None, rest)\n\n if cookie_storage.__class__.__name__ == 'str':\n cj = LWPCookieJar(cookie_storage)\n toPyCookieJar(self.cookie_jar, cj)\n cj.save()\n elif cookie_storage.__class__.__name__.endswith('CookieJar'):\n toPyCookieJar(self.cookie_jar, cookie_storage)\n else:\n raise ValueError, 'unsupported cookie_storage type.'",
"def reload_cookies(self):\n\n if os.path.exists(self.location_of_cookies):\n with open(self.location_of_cookies, 'rb') as f:\n cookies = pickle.load(f)\n self.load_cookies(cookies, self.cookie_domain)\n \n f.close()",
"def __loadCookies(self):\n with open(self.cookies_file, 'rb') as cookie_file:\n LOGGER.debug(\"Unpickling HTTP cookies from file: {0}\".format(self.cookies_file))\n session.cookies = requests.utils.cookiejar_from_dict(pickle.load(cookie_file))",
"def saveCookie(self, resp):\n #save Cookie\n if resp.has_key('set-cookie'):\n self.updateHeaders('Cookie', resp['set-cookie'])\n print '--', 'Save cookie : ', resp['set-cookie']",
"def load_cookies_from_lwp(filename):\n lwp_cookiejar = cookielib.LWPCookieJar()\n lwp_cookiejar.load(filename, ignore_discard=True)\n return lwp_cookiejar",
"async def save(self, request, response) -> None:\n value = self.cipher.encrypt(request.session.dumps().encode())\n cookie = f'{self.cookie_name}={value.decode()}; SameSite=Lax'\n response.headers['Set-Cookie'] = cookie",
"def set_cookie( cookies, name, morsel, **kwargs ) :",
"def _load_cookies(self, filename):\n if not os.path.isfile(filename):\n return False\n\n with open(filename) as f:\n _cookies = pickle.load(f)\n if _cookies:\n jar = cookies.RequestsCookieJar()\n jar._cookies = _cookies\n self.session.cookies = jar\n else:\n return False"
]
| [
"0.8294968",
"0.82088995",
"0.81902015",
"0.79834753",
"0.79596627",
"0.7889409",
"0.7766356",
"0.775621",
"0.74142534",
"0.7398106",
"0.709393",
"0.69966495",
"0.6756405",
"0.67221403",
"0.66975796",
"0.66403407",
"0.6575654",
"0.6550851",
"0.6473795",
"0.6444341",
"0.6396243",
"0.6371194",
"0.63597894",
"0.63064903",
"0.62362355",
"0.61788523",
"0.61722356",
"0.60982466",
"0.60930777",
"0.6069543"
]
| 0.88248575 | 0 |
read_file Here this function will read a text file (UTF8) and prints it as the standardoutput | def read_file(filename=""):
with open(filename, encoding="utf-8") as n:
print(n.read(), end="") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def read_file(filename=\"\"):\n with open(filename, encoding='utf-8') as f:\n print(f.read(), end=\"\")",
"def read_file(filename=\"\"):\n with open(filename, 'r', encoding='utf-8') as fi:\n print(fi.read(), end=\"\")",
"def read_file(filename=\"\"):\n with open(filename, 'r', encoding='utf-8') as fl:\n print(fl.read(), end='')",
"def read_file(filename=\"\"):\n with open(filename, 'r', encoding='utf-8') as f:\n print(f.read(), end=\"\")",
"def read_file(filename=\"\"):\n with open(filename, encoding=\"UTF-8\") as f:\n for line in f:\n print(line, end='')",
"def read_file(file):\n f = open(file, 'r')\n print(f.read())",
"def read_file(filename=\"\"):\n with open(filename, 'r') as f:\n f_contents = f.read()\n print(f_contents, end='')",
"def read_file(file):\n f = open(file, \"r\", encoding=\"utf8\")\n return f.read()",
"def read_file(filename=\"\"):\n\n with open(filename, 'r', encoding='utf-8') as file:\n for line in file:\n print(line.rstrip())",
"def read_print_filename(filename):\n\ttry:\n\t\twith open(filename, encoding='utf 8') as f:\n\t\t\tcontents = f.read()\n\texcept FileNotFoundError:\n\t\tpass\n\telse:\n\t\tprint(contents)",
"def read_text_file(str_name_file: str):\n content: str = ''\n with open(str_name_file, mode=\"r\", encoding='utf-8') as file:\n print(\"file being read: \" + str_name_file + \"\\n\")\n content = file.read()\n return content",
"def open_and_read_file(file_path):\n text_data = open(file_path).read()\n # print text_data\n return text_data",
"def read_file(filename):\n with codecs.open(filename, 'r', 'utf8') as f:\n return f.read()",
"def read_file(filename):\n with codecs.open(filename, 'r', 'utf8') as f:\n return f.read()",
"def read_file(name):\n with open(name, 'r') as my_file:\n return my_file.read().encode('utf-8')",
"def read_file(filename=\"\"):\n\n with open(filename, 'r') as f:\n read_data = f.read()\n\n print('{:s}'.format(read_data), end='')\n\n f.closed",
"def read_file(filename):\n with codecs.open(os.path.join(here, filename), encoding='utf-8') as f:\n content = f.read()\n return content",
"def read_file(file_path, mode='r', encoding=\"utf-8\"):\n with codecs.open(file_path, mode, encoding=encoding) as fp:\n return fp.read().strip()",
"def read_file(file_path):\n try:\n input_file = open(file_path)\n text_content = input_file.read()\n input_file.close()\n return text_content\n except IOError:\n print (\"Can not read from file\")",
"def read_file(input_file):\n\n\ttext = open(input_file)\n\traw = text.read()\n#\tdecoded = raw.decode('utf8').encode('ascii', 'replace')\n\tdecoded = raw.decode('utf8')\n\n\t#moves this through the html cleaner\n\ttext = plaintext(decoded)\n\n\treturn text",
"def read_file(self, file: Path) -> str:\n with open(file) as f:\n return f.read()",
"def read_file(filename):\n open_kwargs = {}\n if sys.version_info.major == 3:\n open_kwargs = {'encoding': 'utf-8'}\n\n path = os.path.abspath(os.path.dirname(__file__))\n filepath = os.path.join(path, filename)\n with open(filepath, **open_kwargs) as filecontents:\n return filecontents.read()",
"def read_text_file(fpath, encoding, read_size=-1, force_unix_linebreaks=True):\n with codecs.open(fpath, encoding=encoding) as f:\n contents = f.read(read_size)\n\n if read_size > 0:\n contents = contents[:read_size]\n\n if force_unix_linebreaks:\n contents = linebreaks_win2unix(contents)\n\n return contents",
"def read_file(file_path: str) -> str:\n try:\n with open(file=file_path, mode='r', encoding=\"utf8\") as f:\n return f.read()\n\n except FileNotFoundError:\n raise FileNotFoundError(f'No text file was found at location {file_path}')",
"def read_from_file(file_name):\n with open(file_name, \"rb\") as text_file:\n return text_file.read()",
"def readfile(filename):\n with open(filename, encoding=\"utf-8\") as file:\n raw = file.read()\n return raw",
"def open_and_read_file(file_path):\n\n text_file = open(file_path)\n full_text = text_file.read()\n\n return full_text",
"def readFile(fileName):\n with open(fileName, 'r', encoding='utf-8') as f:\n text = f.read()\n return text",
"def read_file(file_name, enc=\"latin-1\"):\n f = open(file_name, \"r\", encoding=enc)\n content = \"\".join(f.readlines())\n f.close()\n return content",
"def read(file_name):\n with io.open(os.path.join(os.path.dirname(__file__), file_name),\n encoding='utf-8') as f:\n return f.read()"
]
| [
"0.8073081",
"0.80356634",
"0.8004845",
"0.8001571",
"0.7931309",
"0.77453053",
"0.74300694",
"0.74248487",
"0.7419696",
"0.72860175",
"0.7252469",
"0.7177055",
"0.7169224",
"0.7169224",
"0.7058749",
"0.70194954",
"0.7015463",
"0.6956451",
"0.6943433",
"0.687878",
"0.68684185",
"0.68566597",
"0.68398",
"0.6832768",
"0.68209314",
"0.68112165",
"0.6791534",
"0.6784688",
"0.67843604",
"0.6768503"
]
| 0.8194404 | 0 |
Print objects to stderr then exit | def printExit(*objects):
print(*objects, file=sys.stderr)
sys.exit(1) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def finalize_error():\n print('')\n exit(-1)",
"def _error(self, *args, **kwargs):\n print(\"[{}]\".format(self.type), *args, file=sys.stderr, **kwargs)\n sys.exit(1)",
"def err(*objects, file=sys.stderr, flush=True, style=Fore.RED, **kwargs):\r\n with ScopedColoredStream(file, style, flush_on_exit=flush) as stream:\r\n stream.write(\"ERROR: \")\r\n print(*objects, file=stream, flush=False, **kwargs)",
"def print_std_error(self):\n print(self.std_error)\n sys.exit()",
"def error(msg):\n print(msg, file=sys.stderr)\n sys.exit()",
"def nostderr():\n save_stderr = sys.stderr\n sys.stderr = cStringIO.StringIO()\n yield\n sys.stderr = save_stderr",
"def eprint(errmsg):\n print(errmsg, file=STDERR)",
"def ErrorExit(msg):\r\n print >>sys.stderr, msg\r\n sys.exit(1)",
"def error(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)",
"def die(errmsg):\n eprint(errmsg)\n exit(1)",
"def __exit__(self, exc_type, exc_val, exc_tb):\n sys.stdout.flush()\n sys.stdout.close()\n sys.stdout = sys.__stdout__",
"def ErrorExit(msg):\n print >>sys.stderr, msg\n sys.exit(1)",
"def print_err(self, *lst):\n self.print2file(self.stderr, False, True, *lst)",
"def printerr(*args, **kwargs):\n console_print(sys.stderr, *args, **kwargs)",
"def errprint(*args):\n sys.stderr.write(' '.join(map(str,args)) + '\\n')",
"def stderr_print(*args, **kwargs):\n\n sys.stdout.flush()\n print(*args, **kwargs, file=sys.stderr)\n sys.stderr.flush()\n\n # else caller has to \"{}\\n\".format(...) and flush",
"def nostderr():\n savestderr = sys.stderr\n\n class Devnull(object):\n def write(self, _):\n pass\n\n def flush(self):\n pass\n\n sys.stderr = Devnull()\n try:\n yield\n finally:\n sys.stderr = savestderr",
"def _write_err_msg_and_quit(self, msg):\n sys.stderr.write(msg)\n sys.exit(1)",
"def _ErrorExit(message):\n print >>sys.stderr, message\n sys.exit(1)",
"def bell():\n\n if sys.stderr.isatty():\n sys.stderr.write('\\a')\n sys.stderr.flush()",
"def print_err(self, *args):\r\n strings = []\r\n for arg in args:\r\n strings.append(str(arg))\r\n self.stderr.write(\",\".join(strings))",
"def err_print(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)",
"def msg(_type, text, exit=0):\n sys.stderr.write(\"%s: %s\\n\" % (_type, text))\n sys.exit(exit)",
"def errorExit(msg):\n msgString = (\"Error: \" + msg + \"\\n\")\n sys.stderr.write(msgString)\n sys.exit()",
"def err(string, exitval):\n\tprint >> sys.stderr, string.rstrip()\n\tsys.exit(exitval)",
"def _err(self, *args):\n logger.error(*args)\n exit(1)",
"def _default_eprint_worker(*args, **kwargs):\r\n kwargs[\"file\"] = sys.stderr\r\n print(*args, **kwargs)",
"def eprint(*args, **kwargs):\n\tprint(*args, file=sys.stderr, **kwargs)",
"def printError(s):\r\n sys.stderr.write(\"ERROR: %s\\n\" % s)\r\n sys.exit(-1)",
"def print_err(*args, **kwargs):\n print(*args, file=stderr, **kwargs)"
]
| [
"0.69286865",
"0.67440766",
"0.66084737",
"0.65934783",
"0.65758634",
"0.6541604",
"0.6508118",
"0.6497011",
"0.6484108",
"0.64639866",
"0.64514565",
"0.6436417",
"0.6435237",
"0.64178294",
"0.6345792",
"0.62707883",
"0.6222666",
"0.61963606",
"0.61913276",
"0.6187603",
"0.61849415",
"0.6175526",
"0.61610115",
"0.61446816",
"0.61178094",
"0.6115744",
"0.6107918",
"0.6100302",
"0.6069373",
"0.60676676"
]
| 0.7785314 | 0 |
Plot computed solutions to recurrences | def plot_example(length):
rec_plot = []
sol_plot = []
sol = SOL_DICT[INDEX]
for num in range(2, length):
rec_plot.append([num, recur(num)])
sol_plot.append([num, sol(num)])
simpleplot.plot_lines("Recurrence solutions", 600, 600, "number", "value",
[rec_plot, sol_plot]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def plot_solution(self):\n\n plt.plot(self.x_values, self.analytical(self.x_values, self.C,self.D), label = \"Analytical\")\n plt.plot(self.x_values, self.numerical, label = \"Numerical\")\n plt.title(\"Numerical vs. Analytical Solution\")\n plt.xlabel(\"x\")\n plt.ylabel(\"u(x)\")\n plt.legend()\n plt.show()",
"def plot(self, solution: Matrix) -> None:\n plots.plot_matrices(\"Total Variation Denoising\", self.M, solution)",
"def quick_plot(solution):\n plt.suptitle('GNLSE solution')\n\n plt.subplot(1, 2, 1)\n plot_wavelength_vs_distance(solution)\n\n plt.subplot(1, 2, 2)\n plot_delay_vs_distance(solution)\n\n plt.show()",
"def visualization(obj_value):\n for n in range(3):\n plt.loglog(obj_value[n],\".\");\n\n plt.ylabel('objective values');\n plt.xlabel('iteration counter');\n plt.title('objective values for each pair against iterations');\n plt.legend();\n plt.show();",
"def plot_problem_solutions(problem, **kwargs):\n # You must use get_random_color to pick the random color.\n # Don't forget to pass kwargs to affected functions.\n colors_list = [np.array([1., 1., 1.])] # [1., 1., 1.] = white (background color of the figure)\n ax = kwargs.get('ax')\n if ax is None:\n fig = plt.figure()\n ax = fig.add_subplot(111)\n plot_problem(problem, ax, **kwargs)\n # complete the function below\n # place_holder(problem, colors_list)\n # ax2 = plot_problem(problem)\n # if kwargs.get(\"solution\", False):\n # for solution in kwargs.get(\"solution\"):\n # ax2 = plot_solution(solution, ax2, get_random_color(colors_list))\n for solution in problem.solutions_list:\n ax = plot_solution(solution, ax, get_random_color(colors_list, **kwargs), **kwargs)\n return ax",
"def plot_graph(self) -> None:",
"def make_plot(x,y):",
"def plot_result(numerical, exact):\n \n numx, numy = numerical\n exax, exay = exact\n plt.title('Comparison Numerical vs. Analytical Solution')\n plt.xlabel('Distance x (m)')\n plt.ylabel('Temperature $(^{o}C)$')\n plt.plot(numx, numy, 'ro', label='numerical')\n plt.plot(exax, exay, 'y--', label='analytical')\n plt.legend()\n plt.grid()",
"def plot_spectrumxichange(self):\n countgood = 0 ; countbad = 0\n for idata in self.datarg:\n if idata[-1, 0] == 1.: \n self.fig.axes[0].plot(idata[0:,0], idata[0: ,1] ,'b') \n countgood += 1\n print countgood , 'good solution'\n else: \n self.fig.axes[0].plot(idata[0:,0], idata[0: ,1] ,'r') \n print countbad, 'bad solution'\n countbad += 1\n print 'We found %g good solutions and %g tda startdistributions that broke down before xi = 1, we hope that\\'s what you expected' %(countgood,countbad)\n #Create custom artistsr[goodline,badline],['solution','breakdown']\n goodline = pl.Line2D((0,1),(0,0), color='b') \n badline = pl.Line2D((0,1),(0,0), color='r')\n self.layout(self.reader.depvar['depvar'] , r'energy spectrum (a.u.)' , tit = r'All tda start distributions $\\xi$' , legendhand = [goodline , badline] , legendlab = ['solution', 'breakdown'] )\n self.savefig('xispec')",
"def plot_solution(self, solution):\n # Make plot\n fig, axs = plt.subplots(1, 3, figsize=(13, 4))\n\n # SPDs\n self.plot_solution_spds(solution, ax=axs[0])\n # xy\n self.plot_solution_xy(solution, ax=axs[1])\n # Aopic\n self.plot_solution_aopic(solution, ax=axs[2])\n\n return fig",
"def plot_solutions(self, solutions_list, plot_w=False, savefig_filename=None, display=True):\n plt.figure(figsize=(13, 8))\n for s in solutions_list:\n U = s[0]\n method, T = s[1]\n plt.plot(self.x, U[:self.N], label=rf\"$U$ : {method}, $T = {T}$\")\n if plot_w:\n plt.plot(self.x, U[self.N:], label=rf\"$U_t$ : {method}, $T = {T}$\")\n plt.xlabel(r\"$x$\")\n plt.ylabel(r\"$U, U_t$\")\n plt.title(\"Plot of Various Models at Different Times\")\n plt.legend()\n plt.grid()\n if savefig_filename:\n plt.savefig(savefig_filename) if savefig_filename.endswith(\".png\") else plt.savefig(savefig_filename+\".jpg\")\n if display:\n plt.show()",
"def make_plot_solved(lx, ly, tour):\n make_plot_original(lx, ly)\n for i in range(1, len(tour)):\n p2 = tour[i]\n p1 = tour[i - 1]\n plt.plot([lx[p1], lx[p2]], [ly[p1], ly[p2]], 'k-')",
"def plot(guess_log, counter_log, x):\n correct_sqrt = np.sqrt(x)\n x_axis = np.linspace(-1, 1.1*len(guess_log), len(guess_log))\n y_axis = np.ones(len(guess_log))*correct_sqrt\n plt.plot(counter_log, guess_log, 'ok')\n plt.plot(x_axis, y_axis, '--r', linewidth=2.0, label='Correct Answer')\n plt.axis([-1, max(x_axis), -0.1*max(guess_log), 1.1*max(guess_log)])\n plt.xlabel('Iterations')\n plt.ylabel('Guess')\n plt.legend(loc = 'upper right')\n plt.show()\n return 0",
"def graph_results(loss, acc):\n N = len(loss)\n x = np.linspace(0, N, N)\n plt.subplot(1,2,1)\n plt.plot(x, loss)\n plt.subplot(1,2,2)\n plt.plot(x,acc)\n plt.show()",
"def graph_points():\n fig_name = 'lect2_num_solv'\n\n # given data\n x = np.array([0.0, 0.4, 0.6, 0.8])\n ra = np.array([0.01, 0.0080, 0.005, 0.002])\n design_eq = np.divide(2.0, ra)\n print(\"Generic example design equation points: {}\".format([\"{:0.1f}\".format(x) for x in design_eq]))\n\n # cubic spline\n x_new = np.linspace(0.0, 0.8, 101)\n # alternately, from interpolation\n y_interp = interpolate.interp1d(x, design_eq, kind='quadratic')\n make_fig(fig_name, x, design_eq, ls1='o', x2_array=x_new, y2_array=y_interp(x_new),\n x_label=r'conversion (X, unitless)', y_label=r'$\\displaystyle\\frac{F_{A0}}{-r_A} \\left(L\\right)$',\n x_lima=0.0, x_limb=0.8, y_lima=0.0, y_limb=1000,\n fig_width=4, color2='green',\n )",
"def evaluate(self, plot):",
"def draw_simple_pp(taus, solver_vals, solvers, filename):\n fig = plt.figure(figsize=(10, 10))\n ax = fig.add_subplot(111)\n for n, solver in enumerate(solvers):\n ax.step(taus, solver_vals[n, :], label=solver)\n ax.set_xscale('log')\n plt.legend(loc=4)\n plt.xlim(1, taus.max())\n ax.set_xlabel('Performance Ratio')\n ax.set_ylabel('Fraction of problems')\n plt.title('cost in size')\n plt.savefig(filename, dpi=300)\n plt.plot()",
"def plot(self):\n pass",
"def plotComparison(x, nt, nx, c, phi, phiExact, methodName):\n \n plt.figure()\n plt.plot(x, phiExact)\n\n plt.plot(x, phi)\n plt.ylim([-0.2, 1.4])\n plt.title(str(methodName)+\" scheme\\nExact vs Numerical solution \"\\\n \"nt=\"+str(nt)+\", nx=\"+str(nx)+\"\\n\"\n \"Courant number: \"+str(c))\n plt.show()",
"def plot_coefs(results):\n coefs_noisy = pd.concat([\n arr_to_df(results['obj_noisy'], n_arr, 'obj'),\n vec_to_df(results['dist_obj'], n_arr, 'obj'),\n arr_to_df(results['pos_noisy'], n_arr, 'pos'),\n vec_to_df(results['dist_pos'], n_arr, 'pos'),\n arr_to_df(results['neg_noisy'], n_arr, 'neg'),\n vec_to_df(results['dist_neg'], n_arr, 'neg')\n ])\n\n xlim = (min(n_arr), max(n_arr))\n ylim = (-1.1, 1.1)\n\n g = sns.FacetGrid(coefs_noisy, row = 'id', col = 'component', xlim = xlim,\n ylim = ylim)\n g.map(sns.pointplot, 'n', 'value', order = n_arr)\n g.set_xticklabels(rotation = 45)\n\n for i, val in enumerate(results['obj_true']):\n ax = g.axes[0, i]\n ax.hlines(val, *ax.get_xlim())\n for i, val in enumerate(results['pos_true']):\n ax = g.axes[1, i]\n ax.hlines(0, *ax.get_xlim(), linestyle = '--', color = 'red')\n ax.hlines(val, *ax.get_xlim())\n for i, val in enumerate(results['neg_true']):\n ax = g.axes[2, i]\n ax.hlines(0, *ax.get_xlim(), linestyle = '--', color = 'red')\n ax.hlines(val, *ax.get_xlim())",
"def plot_iter(V, Pi, params):\n n_rows = params['n_rows']\n n_cols = params['n_cols'] \n occ_grid = params['occ_grid']\n R = params['R']\n\n goal = params['goal']\n sink = params['sink']\n\n actions = ['left','right','up','down']\n\n fig1 = plt.figure(1, clear=True)\n for row in range(n_rows):\n for col in range(n_cols):\n if occ_grid[row, col] == 1:\n plt.text(col, n_rows - 1 - row, '0.0', color='k', ha='center', va='center')\n elif np.any(np.logical_and(row==sink[:, 0], col==sink[:, 1])):\n plt.text(col, n_rows - 1 - row, \"{:.3f}\".format(R[row, col]), \n color='r', ha='center', va='center')\n elif np.all([row, col]==goal):\n plt.text(col, n_rows - 1 - row, \"{:.3f}\".format(R[row, col]), \n color='g', ha='center', va='center')\n else:\n plt.text(col, n_rows - 1 - row, \"{:.3f}\".format(V[row, col]), \n color='b', ha='center', va='center')\n plt.axis([-1, n_cols, -1, n_rows])\n plt.axis('off')\n\n\n fig2 = plt.figure(2, clear=True)\n for row in range(n_rows):\n for col in range(n_cols):\n if not Pi[row, col] == -1:\n plt.text(col, n_rows - 1 - row, actions[Pi[row, col]], \n color='k', ha='center', va='center')\n elif np.all([row, col]==goal):\n plt.text(col, n_rows - 1 - row, \"{:.3f}\".format(R[row, col]), \n color='g', ha='center', va='center')\n elif np.any(np.logical_and(row==sink[:, 0], col==sink[:, 1])):\n plt.text(col, n_rows - 1 - row, \"{:.3f}\".format(R[row, col]), \n color='r', ha='center', va='center')\n plt.axis([-1, n_cols, -1, n_rows])\n plt.axis('off')\n\n fig1.canvas.draw()\n fig1.canvas.flush_events()\n fig2.canvas.draw()\n fig2.canvas.flush_events()",
"def show_trace_2d(f, results): #@save\n set_figsize()\n plt.plot(*zip(*results), '-o', color='#ff7f0e')\n x1, x2 = torch.meshgrid(torch.arange(-5.5, 1.0, 0.1),torch.arange(-3.0, 1.0, 0.1))\n plt.contour(x1, x2, f(x1, x2), colors='#1f77b4')\n plt.xlabel('x1')",
"def visualize_data(dqn_rewards, ddqn_rewards):\n \n fig, ax = plt.subplots()\n x_values = list(range(1, dqn_rewards.size + 1))\n ax.plot(x_values, dqn_rewards, label='dqn rewards')\n ax.plot(x_values, ddqn_rewards, label='ddqn rewards')\n plt.xlabel('episodes')\n plt.title('Cumulative Reward per Game')\n plt.legend()\n plt.show()",
"def show_trace_2d(f, results):\n plt.close()\n # draw input points\n plt.plot(*zip(*results), '-o', color='#ff7f0e')\n # get the field of figure\n x1, x2 = np.meshgrid(np.arange(-5.5, 1.0, 0.1), np.arange(-3.0, 1.0, 0.1))\n # draw the contour of function using x1,x2 as step\n plt.contour(x1, x2, f(x1, x2), colors='#1f77b4')\n plt.xlabel('x1')\n plt.ylabel('x2')\n plt.show()",
"def plot(self, fname=None):\n x = np.linspace(self.bounds[0], self.bounds[-1], 200)\n y = [self.evaluate(xi) for xi in x]\n plt.figure()\n plt.plot(x, y, label='Class func')\n plt.plot(self.bounds, self.gis, 'o', label='Algorithm')\n plt.grid(color='0.7')\n plt.xlabel('Dependent Variable')\n plt.ylabel('PP Transformed Class Value')\n if fname:\n plt.savefig(fname)\n else:\n plt.show()",
"def prob4():\n theta = np.linspace(0, 2*np.pi, 200)\n x, y, r, th = sy.symbols(\"x, y, r, th\")\n expr = 1 - ((x**2 + y**2)**sy.Rational(7,2) + 18*x**5*y \\\n - 60*x**3*y**3 + 18*x*y**5)/(x**2 + y**2)**3\n expr = expr.subs({x:r*sy.cos(th), y:r*sy.sin(th)})\n expr = sy.simplify(expr)\n solutions = sy.solve(expr, r)\n r_f = sy.lambdify(th, solutions[0], \"numpy\")\n plt.ion()\n plt.plot(r_f(theta)*np.cos(theta), r_f(theta)*np.sin(theta))\n plt.show()",
"def simplexPlot(results):\n _, hole1Values, logProbs = getStatistics(results.items())\n import matplotlib.pyplot as plt\n plt.plot(hole1Values, logProbs)\n plt.show()",
"def plot_solution(self, Mx = 50, My = 50):\n U, xv, yv = self.num_solution_Mx_My(Mx = Mx, My = My)\n plot3d_sol(U, xv, yv, Uan = self.analytic_solution)",
"def _plot(self, step, rewards, losses):\n plt.figure(figsize=(20, 5))\n plt.subplot(131)\n plt.title('Total Episode Reward')\n plt.plot(rewards)\n plt.subplot(132)\n plt.title('MSE Loss')\n plt.plot(losses)\n plt.show()",
"def plot(self):\n\t\tself.plotOfLoopVoltage()"
]
| [
"0.6978192",
"0.6843992",
"0.67658085",
"0.6505199",
"0.6289391",
"0.6274354",
"0.627349",
"0.627176",
"0.62675595",
"0.62425923",
"0.6217016",
"0.62123513",
"0.6169168",
"0.6159171",
"0.6141866",
"0.61326224",
"0.6091893",
"0.6089854",
"0.6078055",
"0.6043386",
"0.6042587",
"0.6015542",
"0.60107535",
"0.600993",
"0.6000436",
"0.5975631",
"0.59504026",
"0.59479266",
"0.59380054",
"0.58918154"
]
| 0.73072284 | 0 |
Text definitions to format strings. | def formatter(text):
repl_map = {
"degC": "$^o$C",
"K": "$^o$C",
"month-1": "month$^{{-1}}$",
"day-1": "day$^{{-1}}$",
"d-1": "day$^{{-1}}$",
"decade-1": "decade$^{{-1}}$",
"year-1": "year$^{{-1}}$",
"rcp85": "RCP8.5",
"rcp45": "RCP4.5",
"rcp26": "RCP2.6",
"RCP85": "RCP8.5",
"RCP45": "RCP4.5",
"RCP26": "RCP2.6",
"cmip5-85": "RCP8.5",
"cmip5-60": "RCP6.0",
"cmip5-45": "RCP4.5",
"cmip5-26": "RCP2.6",
"ssp585": "SSP5-8.5",
"ssp245": "SSP2-4.5",
"ssp126": "SSP1-2.6",
"SSP585": "SSP5-8.5",
"SSP245": "SSP2-4.5",
"SSP126": "SSP1-2.6",
"cmip6-85": "SSP5-8.5",
"cmip6-70": "SSP3-7.0",
"cmip6-60": "SSP4-6.0",
"cmip6-34": "SSP4-3.4",
"cmip6-45": "SSP2-4.5",
"cmip6-26": "SSP1-2.6",
"cmip6-19": "SSP1-1.9",
"1": "%",
"era5": "ERA5",
"gpcc025x025_v8": "GPCC",
"cru": "CRU",
"jra55": "JRA55",
"HIGHRESMIP": "HighResMIP",
" ": "",
}
for key, val in repl_map.items():
if key in text:
text = text.replace(key, val)
break
return text | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def syntax_text():",
"def complete_opt_format(self, text, *_):\n return [t + \" \" for t in FORMATTERS if t.startswith(text)]",
"def formatted(self) -> str:\r\n ...",
"def get_text_format(self) -> constants.TextFormatStr:\n return constants.TEXT_FORMAT.inverse[self.textFormat()]",
"def format_text(self):\n\n return \"{}{}{}\".format(self.get_text(),\n Message.format_performers(self.get_performers()),\n Message.format_keywords(self.get_keywords())).strip()",
"def format(self) -> str:",
"def _translate_fmts(self):\n fmt_info = []\n fmt_append = fmt_info.append\n \n isvalid = self._is_valid_fmt\n typlist = self._typlist\n isstrvar = self._isstrvar\n default_fmts = self._default_fmts\n \n for i, fmt in enumerate(self._fmtlist):\n fmt = fmt.strip()\n \n iscalendar = (fmt[1] == 't' or fmt[1:3] == '-t')\n \n if iscalendar or not isvalid(fmt):\n if isstrvar(i):\n wid = min(typlist[i], 10)\n fmt_append(('s', \"{{:>{}s}}\".format(wid), wid))\n continue\n else:\n fmt = default_fmts[typlist[i]]\n \n last_char = fmt[-1]\n if last_char == 's': # string\n m = STR_FMT_RE.match(fmt)\n align, _, wid = m.group(1), m.group(2), m.group(3)\n new_align = (\"<\" if align == \"-\" \n else \"^\" if align == \"~\" else \">\")\n new = \"\".join((\"{:\", new_align, wid, \"s}\"))\n fmt_append(('s', new, int(wid)))\n elif last_char == 'H' or last_char == 'L': # binary\n fmt_append((last_char, fmt, int(fmt[1:-1])))\n elif last_char == 'x': # hexadecimal\n fmt_append(('x', fmt, 21))\n elif last_char in {'f', 'g', 'e', 'c'}: # numeric\n m = NUM_FMT_RE.match(fmt)\n align, _, wid, delim, prec, type, com = (m.group(1), m.group(2), \n m.group(3), m.group(4),\n m.group(5), m.group(6),\n m.group(7))\n aln = \"<\" if align == \"-\" else \">\"\n sep = \",\" if com is not None else \"\"\n if type == \"g\" and int(prec) == 0:\n new = \"\".join((\"{:\", aln, wid, sep, type, \"}\"))\n else:\n new = \"\".join((\"{:\", aln, wid, sep, \".\", prec, type, \"}\"))\n fmt_append((type, new, int(wid), delim, com))\n \n return fmt_info",
"def prepareExplainerText(amount, ranges):\n text = \"\\n\"\n for currKey in amount:\n text += f\"{currKey}: {ranges[currKey]} | {amount[currKey]}\\n\"\n text += \"\\n\\n\"\n return text",
"def prescription(self):\n prescription = \"\\n{0:>10}\\t{1:>10}\\t{2:>10}\\t{3:>10}\\n\".format(\"R\",\"Material\",\"d\",\"diameter\")\n for surface in self.lensSurfaces():\n prescription += \"{0:>10.2f}\\t{1:>10}\\t{2:>10.2f}\\t{3:>10.2f}\\n\".format(surface.R, str(surface.mat), surface.spacing, surface.diameter)\n return prescription",
"def reformat(ctx):\n pass",
"def __makeFormatString(self):\n self.__formatString = \"\"\n for f in self.__columns:\n self.__formatString += \"%(\"+ f + \")-\" + str(self.__widths[f]) + \\\n \"s \"",
"def initFormat(self):\n self.formatList = self.splitText(self.format)",
"def _format_output(**values):\r\n return WEATHER_TEXT.format(**values)",
"def format_string_1(file_num, float_num1, int_num, float_num2):\n\n text = \"file_{:0>3d} :{:9.2f}, {:.2e}, {:.3g}\"\n result = text.format(file_num, float_num1, int_num, float_num2)\n print(result)\n return result",
"def get_replacement():\n run_linter_throw(\"path/to/file\",\n \"{s}\\n{m} Text{e}\",\n style,\n whitelist=[\"headerblock/desc_space\"])",
"def handleFormatText(paragraphContent):\n # We tokenize and remove the stop word\n words = tokenizeWord(paragraphContent) \n \n stemWords = []\n # We loop on each word.\n for word in words:\n stemWord = STEMMER.stem(word)\n \n # Selection on a part of string.\n stemWord = re.sub(\"[*\\'\\.+:,\\`:/]\", '', stemWord)\n if stemWord.isdigit() or len(stemWord) < 2:\n continue\n \n stemWords.append(stemWord)\n my_r_string = stemWords.pop(0)\n for word in stemWords:\n my_r_string += \" \"+str(word)\n return my_r_string",
"def _define_formats(self, workbook):\n self.format_title_main_center = workbook.add_format({\n 'bold': True,\n 'align': 'left',\n 'font_size': 14,\n 'border': True,\n 'font_name':'Arial',\n 'align': 'Center',\n 'bg_color': '#D8D7D7',\n })\n self.format_title = workbook.add_format({\n 'align': 'left',\n 'font_size': 12,\n 'border': True,\n 'font_name':'Arial',\n 'text_wrap': True\n })\n self.format_title_noborder = workbook.add_format({\n 'align': 'left',\n 'font_size': 12,\n 'border': False,\n 'font_name':'Arial'\n })\n self.format_title_noborder_bold = workbook.add_format({\n 'align': 'left',\n 'font_size': 12,\n 'bold': True,\n 'border': False,\n 'font_name':'Arial'\n })\n self.format_title_center = workbook.add_format({\n 'align': 'left',\n 'font_size': 12,\n 'border': True,\n 'align': 'Center',\n 'font_name':'Arial'\n })\n self.format_title_bold = workbook.add_format({\n 'align': 'left',\n 'font_size': 12,\n 'border': True,\n 'font_name':'Arial',\n 'bold': True,\n })\n self.format_title_center_bold = workbook.add_format({\n 'align': 'left',\n 'font_size': 12,\n 'border': True,\n 'font_name':'Arial',\n 'align': 'Center',\n 'bold': True,\n })\n self.format_title_number = workbook.add_format({\n 'align': 'right',\n 'font_size': 12,\n 'border': True,\n 'font_name':'Arial',\n 'num_format': '#,##0.00',\n })\n self.format_title_number_bold = workbook.add_format({\n 'align': 'right',\n 'font_size': 12,\n 'border': True,\n 'font_name':'Arial',\n 'num_format': '#,##0.00',\n 'bold': True,\n 'bg_color': '#D8D7D7',\n })\n \n self.format_header = workbook.add_format({\n 'bold': True,\n 'border': True,\n 'font_name':'Arial',\n 'font_size': 12,\n 'align': 'Center',\n 'bg_color': '#D8D7D7', \n })\n\n self.merge_format = workbook.add_format({\n 'bold': 1,\n 'border': 1,\n 'align': 'center',\n 'valign': 'vcenter',\n })",
"def TEXT(number, format_type):\n raise NotImplementedError()",
"def __repr__(self) -> str:\r\n\r\n saida = \"Format: \"\r\n x = self.getformat()\r\n for _ in range(len(x)):\r\n saida = f\"{saida}{x[_]}\"\r\n if _ < len(x)-1:\r\n saida += \", \"\r\n saida += \"\\n\"\r\n return saida",
"def get_formatted_text(self, n_cols):",
"def reformat():\n toolkit.reformat()",
"def setFormattedStrings(object, event):\n entry = interfaces.IBiblatexEntry(object) # assert a biblatex entry object\n generator = interfaces.IFormattedEntryGenerator(entry)\n writer = interfaces.IWriteFormatted(entry)\n config = zope.component.queryUtility(\n interfaces.IBiblatexConfiguration, \n context = object)\n if config:\n languages = config.languages\n styles = config.styles\n else:\n languages = styles = (None,)\n for language in languages:\n for style in styles:\n #raise Exception(u\"language: %s, style: %s\" % (language, style))\n generator.setUp(language = language, style = style)\n generator.generate()\n writer.setBibliographicEntry(generator.getBibliographicEntry(), language, style)\n writer.setCitation(generator.getCitation(), language, style)\n writer.setCitationAgain(generator.getCitationAgain(), language, style)\n generator.tearDown()\n del generator\n del writer",
"def __init__(self, name, attrs={}):\n TextFormat.__init__(self, name, attrs)",
"def __init__(self, name, attrs={}):\n TextFormat.__init__(self, name, attrs)",
"def __init__(self, name, attrs={}):\n TextFormat.__init__(self, name, attrs)",
"def __init__(self, name, attrs={}):\n TextFormat.__init__(self, name, attrs)",
"def __init__(self, name, attrs={}):\n TextFormat.__init__(self, name, attrs)",
"def __init__(self, name, attrs={}):\n TextFormat.__init__(self, name, attrs)",
"def __init__(self, name, attrs={}):\n TextFormat.__init__(self, name, attrs)",
"def __init__(self, name, attrs={}):\n TextFormat.__init__(self, name, attrs)"
]
| [
"0.65333766",
"0.62248075",
"0.60538745",
"0.5975071",
"0.594579",
"0.5892548",
"0.5857848",
"0.58462155",
"0.58382225",
"0.5814157",
"0.58111644",
"0.5772846",
"0.5740308",
"0.5735877",
"0.5732846",
"0.5682013",
"0.56642157",
"0.56596196",
"0.56564844",
"0.56554836",
"0.5654559",
"0.56041265",
"0.5598805",
"0.5598805",
"0.5598805",
"0.5598805",
"0.5598805",
"0.5598805",
"0.5598805",
"0.5598805"
]
| 0.6782625 | 0 |
Sorts the longitudes of the cubes from 0/360 degrees to 180/180. | def regrid_longitude_coord(self, cube):
# make a list with the 'longitude' coord in the form: 0/180/-180/0
neg_lons = ((cube.coord("longitude").points + 180) % 360) - 180
# interpolates the cube data to the new 'longitude' dimensions
cube = cube.interpolate([("longitude", neg_lons)],
iris.analysis.Linear())
sorted_cube = self.sorted_dim(cube)
return sorted_cube | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _sort_cubelist(self, cubelist):\n sorted_cubelist = []\n realization_num = 1\n cubelist = cubelist.merge(unique=False)\n for cube in cubelist:\n # If time is a scalar coordinate, promote it to a dimension \n # coordinate, this is because all cubes must have the same number \n # of dimensions to be compared.\n if len(cube.coord(self.time_coord).points) == 1:\n cube = iris.util.new_axis(cube, scalar_coord=self.time_coord)\n \n # Chop cubes into individual realizations for relabelling.\n member_slices = get_coordinate_slice_dimensions(\n cube, [self.realization,self.forecast_ref_time],\n ignore_missing_coords=True)\n for member_slice in cube.slices(member_slices):\n \n if self.realization in [coord.name() \n for coord in member_slice.coords()]:\n member_slice.coord(\n self.realization).points = [realization_num]\n else:\n realization_coord = iris.coords.AuxCoord([realization_num],\n self.realization)\n member_slice.add_aux_coord(realization_coord)\n \n member_slice.cell_methods = None\n sorted_cubelist.append(member_slice)\n realization_num += 1\n \n sorted_cubelist = iris.cube.CubeList(sorted_cubelist)\n # Mask missing time steps so merging can be done.\n sorted_cubelist = pad_coords(sorted_cubelist, self.time_coord)\n cube = sorted_cubelist.merge_cube()\n # Check x-y coordinates match the specified range.\n cube = self._area_inst.check_cube_area_bounds(cube, self.xy_coords, \n self.area_bounds)\n cube = self.extract_area_bounds(cubes=cube)\n \n if cube.coord_dims(cube.coord(self.realization)) == \\\n cube.coord_dims(cube.coord(self.forecast_ref_time)):\n # Re order realizations in initialisation date order.\n ordered_inits = sorted(cube.coord('forecast_reference_time').points)\n ordered_mems = range(1, len(cube.coord('realization').points)+1)\n ordered_cubes = []\n for member_slice in cube.slices(member_slices):\n mem_index = ordered_inits.index(\n member_slice.coord(self.forecast_ref_time).points[0])\n member_slice.coord('realization').points = ordered_mems[mem_index]\n del ordered_inits[mem_index]\n del ordered_mems[mem_index]\n ordered_cubes.append(member_slice)\n cube = iris.cube.CubeList(ordered_cubes).merge_cube()\n \n return cube",
"def _sort_data(self, cubelist):\n sorted_cubelist = []\n for dates in self.dates:\n year_cubelist = self.extract_dates(dates, cubelist)\n for cube in year_cubelist.merge():\n # Check x-y coordinates match the specified range.\n cube = self._area_inst.check_cube_area_bounds(cube, \n self.xy_coords, \n self.area_bounds)\n cube = self.extract_area_bounds(cubes=cube)\n sorted_cubelist.append(cube)\n return iris.cube.CubeList(sorted_cubelist)",
"def sort_clockwise(coordinates):\n center = tuple(map(op.truediv, reduce(lambda x_, y_: map(op.add, x_, y_), coordinates), [len(coordinates)] * 2))\n coordinates = sorted(coordinates, key=lambda coord: (-135 - np.degrees(\n np.arctan2(*tuple(map(op.sub, center, coord))[::-1]))) % 360)\n return coordinates",
"def _sort_data(self, cubelist):\n sorted_cubelist = []\n for dates in self.dates:\n year_cubelist = self.extract_dates(dates, cubelist)\n sorted_cubelist.append(self._sort_cubelist(year_cubelist))\n return iris.cube.CubeList(sorted_cubelist)",
"def _sort_compounds(self):\n self.sorted_molecules = sorted(self.values(), key=operator.attrgetter('criterion'))",
"def correct_lon(ds):\n ds = ds.copy()\n x = ds['x'].data\n ds['x'].data = np.where(x < 0 , 360 + x, x)\n\n lon = ds['lon'].data\n ds['lon'].data = np.where(lon < 0 , 360 + lon, lon)\n \n ds = ds.sortby('x')\n return ds",
"def volume_sort(self):\n self.jobs_sorted = sorted(\n self.jobs,\n key=lambda job: (job['height'], job['width'] * job['height']),\n # key=lambda job: job['width'] * job['height'],\n reverse=True)",
"def _sort_locations(self,locations):\n i = np.lexsort(np.transpose(locations*np.array((1,-1))))\n return locations[i]",
"def sort_clockwise(a):\n\n # get centroids, shape=(1,2=(cx,cy))\n center = a.mean(axis=0).reshape((1, 2))\n\n sorted_inds = np.argsort(np.arctan2(a[:, 1]-center[:, 1], a[:, 0]-center[:, 0]))\n\n return np.take(a, sorted_inds, axis=0)",
"def correct_east(longitudes):\n longitudes[longitudes > 0] -= 360.\n return longitudes",
"def _sort_measurements(self):\n if self._unsorted:\n sorted_ndxs = np.argsort(self._angles)\n self._distances = self._distances[sorted_ndxs]\n self._angles = self._angles[sorted_ndxs]\n self._intensities = self._intensities[sorted_ndxs]\n self._error_codes = self._error_codes[sorted_ndxs]\n self._unsorted = False",
"def rotate_z(self,rad):\n DetElement.rotate_z(self,rad) #python2\n #super().rotate_z(rad) #python3\n for tube in self._list_of_tubes:\n self.__update_tube_pos_after_rotation(tube)\n tube.rotate_z(rad)",
"def sort_album(self):\n self.sort('album')",
"def lon360to180(lon):\n\tlon = np.asanyarray(lon)\n\treturn ((lon + 180.) % 360.) - 180.",
"def test_sort_angles(self):\n\n nb_points = 5\n points = np.array([[1, 2], [1, 1], [2, 1], [3, 7], [7, 2]]) # example of points\n\n sorted_points = convex_hull.sort_angle(points) # sorted points \n right_sorted_points = np.array([[2, 1], [7, 2], [3, 7], [1, 2], [1, 1]])\n\n self.assertTrue((sorted_points == right_sorted_points).all())",
"def correct_west(longitudes):\n longitudes[longitudes < 0] += 360.\n return longitudes",
"def _getlats(self):\n lats = 90. - np.degrees(self.zeros)\n return lats",
"def get_stops_sorted( latitude, longitude ):\n\treturnvalue = []\n\tstops_file = open( 'google_transit/stops.txt' )\n\tstops_iter = DictReader( stops_file )\n\tfor stop in stops_iter:\n\t\tdistance = angular_distance( latitude, longitude, \n\t\t\t\t\t\t\t float( stop[ 'stop_lat' ] ), float( stop[ 'stop_lon' ]))\n\t\tstop[ 'distance' ] = distance * MI\n\t\treturnvalue.append(( distance, stop ))\n\tstops_file.close( )\n\treturnvalue.sort( )\n\treturn [ y for x,y in returnvalue ]",
"def wrapTo180(lon):\r\n q = (lon < -180) | (180 < lon)\r\n lon[q] = wrapTo360(lon[q] + 180) - 180\r\n return lon",
"def fix_metadata(self, cubes):\n cube = self.get_cube_from_list(cubes)\n lat_coord = cube.coord('latitude', dimensions=(1, ))\n lon_coord = cube.coord('longitude', dimensions=(2, ))\n lat_coord.standard_name = None\n lat_coord.long_name = 'grid_latitude'\n lat_coord.var_name = 'i'\n lat_coord.units = '1'\n lon_coord.standard_name = None\n lon_coord.long_name = 'grid_longitude'\n lon_coord.var_name = 'j'\n lon_coord.units = '1'\n lon_coord.circular = False\n return cubes",
"def fix_metadata(self, cubes):\n cube = self.get_cube_from_list(cubes)\n lat_coord = cube.coord('latitude', dimensions=(1, ))\n lon_coord = cube.coord('longitude', dimensions=(2, ))\n lat_coord.standard_name = None\n lat_coord.long_name = 'grid_latitude'\n lat_coord.var_name = 'i'\n lat_coord.units = '1'\n lon_coord.standard_name = None\n lon_coord.long_name = 'grid_longitude'\n lon_coord.var_name = 'j'\n lon_coord.units = '1'\n lon_coord.circular = False\n return cubes",
"def _sort_tags_left_to_right(self, detections, id=0):\n BLOCK_IN_CLAW_DIST = 0.22 # meters\n sorted_detections = []\n\n for detection in detections:\n if (detection.id == id and\n detection.pose.pose.position.z > BLOCK_IN_CLAW_DIST):\n sorted_detections.append(detection)\n\n return sorted(sorted_detections, key=lambda x: x.pose.pose.position.x)",
"def sort_by_angle(x, y, var):\n\n # Get the angle wrt the mean of the cloud of points\n x0, y0 = x.mean(), y.mean()\n angle = np.arctan2(y - y0, x - x0)\n\n # Sort based on this angle\n idx = angle.argsort()\n idx = np.append(idx, idx[0])\n\n return x[idx], y[idx], var[idx]",
"def array_sort():\n to_concat = []\n for centroid_rgb, cluster in itertools.izip(centroids_rgb, self.clusters):\n # no need to revisit ratio\n new_idxed_arr = tf.concat(1,[tf.slice(cluster, [0,0], [-1,2]),\n tf.tile(tf.expand_dims(\n tf.constant(centroid_rgb), 0),\n multiples=[len(cluster.eval()), 1])])\n to_concat.append(new_idxed_arr)\n\n concated = tf.concat(0, to_concat)\n sorted_arr = np.array(sorted(concated.eval().tolist()), dtype=np.uint8)[:, 2:]\n\n new_img = Image.fromarray(sorted_arr.reshape([self.m, self.n, self.chann]))\n if save:\n new_img.save(outfile, format=format_)\n os.popen(\"open '{}'\".format(outfile))\n else:\n new_img.show()",
"def sort_cutpoly_by_angle(self, polydata, cells_list, num_cells, numPoints):\n # 1: get numpy array of points from vtk polydata object\n points = polydata.GetPoints()\n pts = np.zeros((numPoints, 3), dtype=float)\n\n index = 0\n for cell in cells_list:\n for id in cell:\n pts[index] = np.asarray(points.GetPoint(id))\n index += 1\n\n # 2: convert them to 2d points and obtain the R rotation matrix\n pts_2d = project_onto_xy_plane(pts)\n\n # 3: compute center (average of all points)\n center_pt = np.mean(pts_2d, axis=0)\n\n # 4: find top points by pointdata label\n\n # 4: compute angles from center to average cell pts:nt vertical_dir = vert_pt_2d - center_pt # get vertical direction vector (from center pt to tp1)\n signed_angles = np.zeros((numPoints,), dtype=float)\n\n for i in range(numPoints):\n current_vec = pts_2d[i] - center_pt\n signed_angles[i] = compute_angle_between(vertical_dir, current_vec)\n self.pers_var = 1\n if self.pers_var: # ctrl-w for exit window key\n plt.figure()\n plt.scatter(center_pt[0], center_pt[1], color='r', s=2)\n plt.scatter(pts_2d[:,0], pts_2d[:,1], color='b', s=0.5)\n plt.scatter(vert_pt_2d[0], vert_pt_2d[1], color='g', s=10)\n plt.scatter(pts_2d[i][0], pts_2d[i][1], color='k', s=7)\n plt.xlabel('angle = ' + str(signed_angles[i]))\n figManager = plt.get_current_fig_manager()\n figManager.window.showMaximized()\n plt.show()\n\n # 5: sort angles (no matter ascending or descending)\n sorted_idxs = np.argsort(signed_angles)\n\n # 6: sorted points\n sorted_pts = pts[sorted_idxs]\n\n return sorted_pts, sorted_idxs",
"def sort_by_polar_angle(points):\n\n\t# Call polar_angle function to calculate polar angle\n\t# of points with respect to P0\n\n\tp = polar_angle(points)\n\tpolar_angle_arr = np.asarray(p)\n\n\n\tvals1, idx_start1, count1 = np.unique(polar_angle_arr, return_counts=True,\n\t return_index=True)\n\n\tidx_sorted_pang = np.argsort(polar_angle_arr)\n\n\tsorted_polar_angle_arr = polar_angle_arr[idx_sorted_pang] \n\tvals, idx_start, count = np.unique(sorted_polar_angle_arr, return_counts=True,\n\t return_index=True)\n\n\n\tres = np.split(idx_sorted_pang, idx_start[1:])\n\n\t#filter them with respect to their size, keeping only items occurring more than once\n\tfinal_points =[]\n\tfor each in res:\n\t\t# print(\"len(each)\",len(each))\n\t\tif len(each) > 1:\n\t\t\ti = each.tolist()\n\t\t\tcheck_points = []\n\t\t\tfor j in i:\n\t\t\t\tcheck_points.append(points[j])\n\t\t\tcheck_points_arr = np.asarray(check_points)\n\t\t\t\n\t\t\tmax_far_idx = np.argmax(euclidean_distance(check_points,P0))\n\t\t\t\n\n\n\t\t\tfinal_points.append(check_points[max_far_idx])\n\t\telif len(each) == 1:\n\t\t\t\n\t\t\tfinal_points.append(points[each.tolist()[0]])\n\n\n\n\treturn final_points",
"def sort_objects_from_viewworld(self, viewworld):\n opaque_objects = []\n transparent_objects = []\n centers = []\n for guid in self.objects:\n obj = self.objects[guid]\n if isinstance(obj, BufferObject):\n if obj.opacity * self.opacity < 1 and obj.bounding_box_center is not None:\n transparent_objects.append(obj)\n centers.append(transform_points_numpy([obj.bounding_box_center], obj.matrix)[0])\n else:\n opaque_objects.append(obj)\n if transparent_objects:\n centers = transform_points_numpy(centers, viewworld)\n transparent_objects = sorted(zip(transparent_objects, centers), key=lambda pair: pair[1][2])\n transparent_objects, _ = zip(*transparent_objects)\n return opaque_objects + list(transparent_objects)",
"def _sort(self):\n self.population.sort()\n self.population.reverse()",
"def get_internal_angles(self):\n\n angles = []\n\n for elx, elz in zip(self.grid['x'], self.grid['z']):\n el_angles = []\n xy = np.vstack((elx, elz))\n for i in range(0, elx.size):\n i1 = (i - 1) % elx.size\n i2 = (i + 1) % elx.size\n\n a = (xy[:, i] - xy[:, i1])\n b = (xy[:, i2] - xy[:, i])\n # note that nodes are ordered counter-clockwise!\n angle = np.pi - np.arctan2(\n a[0] * b[1] - a[1] * b[0],\n a[0] * b[0] + a[1] * b[1]\n )\n el_angles.append(angle * 180 / np.pi)\n angles.append(el_angles)\n return np.array(angles)",
"def sort_hull_points_by_angle(self, points, hull_points):\n x = y = 0\n for point in points:\n x += point[0]\n y += point[1]\n points_length = len(points)\n center = Point(x / points_length, y / points_length)\n\n hull_points.sort(key=lambda hull_point: (atan2(hull_point[1] - center[1], hull_point[0] - center[0])))\n return hull_points"
]
| [
"0.6514328",
"0.6512158",
"0.6155754",
"0.61285055",
"0.58211863",
"0.5708495",
"0.5647426",
"0.5640268",
"0.5631797",
"0.56041026",
"0.5532551",
"0.5342842",
"0.5309956",
"0.530769",
"0.5285943",
"0.5281094",
"0.5253457",
"0.5238142",
"0.5233403",
"0.5211333",
"0.5211333",
"0.5173459",
"0.5166999",
"0.5102319",
"0.5088991",
"0.5072216",
"0.5064602",
"0.5053596",
"0.50247276",
"0.50203353"
]
| 0.6862481 | 0 |
Create a provenance record describing the hotspot fields plots. | def get_hotspot_provenance(self, suptitle, scenario, ancestor_files):
caption = (f"{suptitle}. Calculated for seasons "
f"{self.seasons[0].upper()}, "
f"{self.seasons[1].upper()} and {self.seasons[2].upper()} "
f"in the future periods {self.cfg['future_periods'][0]} "
f"and {self.cfg['future_periods'][1]} "
f"for CMIP5 {self.formatter(f'cmip5-{scenario}')} "
f"and CMIP6 {self.formatter(f'cmip6-{scenario}')}")
record = {
'caption': caption,
'statistics': ['anomaly', 'diff'],
'domains': ['reg'],
'plot_types': ['map'],
'authors': [
'cos_josep',
],
'references': [
'cos22esd',
],
'ancestors': ancestor_files,
}
return record | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _hotspot_fields_plot_save(self, against_region, variable, scenario,\n ancestor_files):\n suptitle = (f\"{self.cfg['region_name']} {variable.upper()} \"\n f\"change against mean {against_region} future \"\n f\"climatology. Baseline period: \"\n f\"{self.cfg['baseline_period'][0]}-\"\n f\"{self.cfg['baseline_period'][1]}\")\n plt.suptitle(suptitle, fontsize=13)\n\n basename = f\"{variable}_{scenario}\"\n provenance_record = self.get_hotspot_provenance(\n suptitle, scenario, ancestor_files)\n save_figure(basename, provenance_record, self.cfg)",
"def _write_xy_provenance(cfg, cubes, plot_path, title, *attrs):\n cubes = cubes.copy()\n if isinstance(cubes, iris.cube.Cube):\n cubes = iris.cube.CubeList([cubes])\n ancestors = []\n for attr in attrs:\n ancestors.extend(attr['filename'].split('|'))\n netcdf_path = mlr.get_new_path(cfg, plot_path)\n io.iris_save(cubes, netcdf_path)\n long_name = ' and '.join([cube.long_name for cube in cubes])\n caption = f\"Line plot of {long_name}\"\n if title:\n caption += f\" for {title}.\"\n else:\n caption += '.'\n record = {\n 'ancestors': ancestors,\n 'authors': ['schlund_manuel'],\n 'caption': caption,\n 'plot_types': ['line'],\n 'references': ['schlund20jgr'],\n }\n with ProvenanceLogger(cfg) as provenance_logger:\n provenance_logger.log(netcdf_path, record)\n provenance_logger.log(plot_path, record)",
"def _write_map_provenance(cfg, cube, plot_path, title, *attrs):\n cube = cube.copy()\n ancestors = []\n for attr in attrs:\n ancestors.extend(attr['filename'].split('|'))\n netcdf_path = mlr.get_new_path(cfg, plot_path)\n io.iris_save(cube, netcdf_path)\n record = {\n 'ancestors': ancestors,\n 'authors': ['schlund_manuel'],\n 'caption': f\"Geographical distribution of {cube.long_name} for \"\n f\"{title}.\",\n 'plot_types': ['geo'],\n 'references': ['schlund20jgr'],\n }\n with ProvenanceLogger(cfg) as provenance_logger:\n provenance_logger.log(netcdf_path, record)\n provenance_logger.log(plot_path, record)",
"def addDemographics(self):\n p = self.p\n demographics_data = {\n 'dob': p.dob,\n 'gender': p.gender,\n 'email': p.email,\n 'fname': p.fname,\n 'lname': p.lname,\n 'hphone': p.home,\n 'cphone': p.cell,\n 'country': p.country,\n 'city': p.city,\n 'pcode': p.pcode,\n 'region': p.region,\n 'street': p.street,\n }\n self.demographics_doc = DEMOGRAPHICS.sub(demographics_data).done()",
"def _dataset_fields(geno):\n return {'title': geno['title'], 'notes': geno.get('notes', '')}",
"def get_provenance_record(caption, ancestor_filenames):\n record = {\n 'caption': caption,\n 'statistics': ['mean'],\n 'domains': ['global'],\n 'plot_type': 'metrics',\n 'authors': [\n 'rumbold_heather',\n 'sellar_alistair',\n ],\n 'references': [\n 'esacci-soilmoisture',\n 'dorigo17rse',\n 'gruber19essd',\n ],\n \"ancestors\": ancestor_filenames,\n }\n\n return record",
"def new_police_report(self):\n\n d = {'category':'',\n 'original_text':'',\n 'line_num':0,\n 'address':'',\n 'map_scale':mapscale.UNKNOWN,\n 'date_month':0,\n 'date_day':0,\n 'date_year':0,\n 'lat':'',\n 'long':''}\n\n return d",
"def create_reference(frame, predictions, metadata):\n keypoints = get_keypoints(predictions, metadata)\n analysis_dict = calculate_analysis_dict(keypoints)\n vis_frame = draw_keypoints(frame, keypoints)\n return vis_frame, analysis_dict",
"def my_record_vars(context, data):\r\n # Check how many long and short positions we have.\r\n longs = shorts = 0\r\n for position in context.portfolio.positions.itervalues():\r\n if position.amount > 0:\r\n longs += 1\r\n if position.amount < 0:\r\n shorts += 1\r\n # Record and plot the leverage of our portfolio over time as well as the\r\n # number of long and short positions. Even in minute mode, only the end-of-day\r\n # leverage is plotted.\r\n record(leverage = context.account.leverage, long_count=longs, short_count=shorts)\r\n \"\"\"\r\n END OF PLOTTING VARIABLES \r\n \"\"\"",
"def hotspot_fields_plot(self, results_dict, tas_bound=None, pr_bound=None):\n sorted_keys = [(f\"{period}_{season}_{variable}\"\n f\"_{project}_{results_dict['scenario']}\")\n for variable in self.variables\n for period in self.cfg[\"future_periods\"]\n for project in self.projects for season in self.seasons]\n sorted_keys = [\n sorted_keys[:len(sorted_keys) // 2],\n sorted_keys[len(sorted_keys) // 2:]\n ]\n ancestor_files_var = [[\n ancestor_file for ancestor_file in results_dict[\"ancestors\"]\n if f\"/{var}_\" in ancestor_file\n ] for var in self.variables]\n for ancestor_files, keys, variable in zip(ancestor_files_var,\n sorted_keys, self.variables):\n fig = plt.figure(figsize=(14.4, 3.4),\n constrained_layout=True,\n dpi=300)\n plt.gcf().subplots_adjust()\n # bound colorbar to abs(max) value on the map\n style = self.cb_bounds(variable, results_dict, keys,\n [tas_bound, pr_bound])\n # plot each panel\n fill, frame = self._hotspot_fields_plot_panels(\n results_dict, fig, keys, style)\n # plot figtexts\n self._hotspot_fields_plot_figtexts(results_dict['scenario'], frame)\n # plot line\n self._hotspot_fields_plot_line(fig, frame)\n # plot colorbar\n cbar = plt.colorbar(fill,\n plt.gcf().add_axes([0.25, 0.125, 0.5, 0.04]),\n orientation=\"horizontal\",\n extend=\"both\")\n if variable == \"pr\":\n cbar.set_label(\"%\")\n against_region = (\n f\"{self.cfg['region'][2]}$^o$ N-\"\n f\"{self.cfg['region'][3]}$^o$ N latitudinal belt\")\n else:\n cbar.set_label(\n self.formatter(str(results_dict[keys[-1]].units)))\n against_region = \"global\"\n\n # plot title and save\n self._hotspot_fields_plot_save(against_region, variable,\n results_dict['scenario'],\n ancestor_files)",
"def generate_plots(ulog, px4_ulog, db_data, vehicle_data, link_to_3d_page,\n link_to_pid_analysis_page):\n\n plots = []\n data = ulog.data_list\n\n # COMPATIBILITY support for old logs\n if any(elem.name in ('vehicle_air_data', 'vehicle_magnetometer') for elem in data):\n baro_alt_meter_topic = 'vehicle_air_data'\n magnetometer_ga_topic = 'vehicle_magnetometer'\n else: # old\n baro_alt_meter_topic = 'sensor_combined'\n magnetometer_ga_topic = 'sensor_combined'\n manual_control_sp_controls = ['roll', 'pitch', 'yaw', 'throttle']\n manual_control_sp_throttle_range = '[-1, 1]'\n vehicle_gps_position_altitude = None\n for topic in data:\n if topic.name == 'system_power':\n # COMPATIBILITY: rename fields to new format\n if 'voltage5V_v' in topic.data: # old (prior to PX4/Firmware:213aa93)\n topic.data['voltage5v_v'] = topic.data.pop('voltage5V_v')\n if 'voltage3V3_v' in topic.data: # old (prior to PX4/Firmware:213aa93)\n topic.data['sensors3v3[0]'] = topic.data.pop('voltage3V3_v')\n if 'voltage3v3_v' in topic.data:\n topic.data['sensors3v3[0]'] = topic.data.pop('voltage3v3_v')\n elif topic.name == 'tecs_status':\n if 'airspeed_sp' in topic.data: # old (prior to PX4-Autopilot/pull/16585)\n topic.data['true_airspeed_sp'] = topic.data.pop('airspeed_sp')\n elif topic.name == 'manual_control_setpoint':\n if 'throttle' not in topic.data: # old (prior to PX4-Autopilot/pull/15949)\n manual_control_sp_controls = ['y', 'x', 'r', 'z']\n manual_control_sp_throttle_range = '[0, 1]'\n elif topic.name == 'vehicle_gps_position':\n if ulog.msg_info_dict.get('ver_data_format', 0) >= 2:\n vehicle_gps_position_altitude = topic.data['altitude_msl_m']\n else: # COMPATIBILITY\n vehicle_gps_position_altitude = topic.data['alt'] * 0.001\n\n if any(elem.name == 'vehicle_angular_velocity' for elem in data):\n rate_estimated_topic_name = 'vehicle_angular_velocity'\n rate_groundtruth_topic_name = 'vehicle_angular_velocity_groundtruth'\n rate_field_names = ['xyz[0]', 'xyz[1]', 'xyz[2]']\n else: # old\n rate_estimated_topic_name = 'vehicle_attitude'\n rate_groundtruth_topic_name = 'vehicle_attitude_groundtruth'\n rate_field_names = ['rollspeed', 'pitchspeed', 'yawspeed']\n if any(elem.name == 'manual_control_switches' for elem in data):\n manual_control_switches_topic = 'manual_control_switches'\n else: # old\n manual_control_switches_topic = 'manual_control_setpoint'\n dynamic_control_alloc = any(elem.name in ('actuator_motors', 'actuator_servos')\n for elem in data)\n actuator_controls_0 = ActuatorControls(ulog, dynamic_control_alloc, 0)\n actuator_controls_1 = ActuatorControls(ulog, dynamic_control_alloc, 1)\n\n # initialize flight mode changes\n flight_mode_changes = get_flight_mode_changes(ulog)\n\n # VTOL state changes & vehicle type\n vtol_states = None\n is_vtol = False\n is_vtol_tailsitter = False\n try:\n cur_dataset = ulog.get_dataset('vehicle_status')\n if np.amax(cur_dataset.data['is_vtol']) == 1:\n is_vtol = True\n # check if is tailsitter\n is_vtol_tailsitter = np.amax(cur_dataset.data['is_vtol_tailsitter']) == 1\n # find mode after transitions (states: 1=transition, 2=FW, 3=MC)\n if 'vehicle_type' in cur_dataset.data:\n vehicle_type_field = 'vehicle_type'\n vtol_state_mapping = {2: 2, 1: 3}\n vehicle_type = cur_dataset.data['vehicle_type']\n in_transition_mode = cur_dataset.data['in_transition_mode']\n vtol_states = []\n for i in range(len(vehicle_type)):\n # a VTOL can change state also w/o in_transition_mode set\n # (e.g. in Manual mode)\n if i == 0 or in_transition_mode[i-1] != in_transition_mode[i] or \\\n vehicle_type[i-1] != vehicle_type[i]:\n vtol_states.append((cur_dataset.data['timestamp'][i],\n in_transition_mode[i]))\n\n else: # COMPATIBILITY: old logs (https://github.com/PX4/Firmware/pull/11918)\n vtol_states = cur_dataset.list_value_changes('in_transition_mode')\n vehicle_type_field = 'is_rotary_wing'\n vtol_state_mapping = {0: 2, 1: 3}\n for i in range(len(vtol_states)):\n if vtol_states[i][1] == 0:\n t = vtol_states[i][0]\n idx = np.argmax(cur_dataset.data['timestamp'] >= t) + 1\n vtol_states[i] = (t, vtol_state_mapping[\n cur_dataset.data[vehicle_type_field][idx]])\n vtol_states.append((ulog.last_timestamp, -1))\n except (KeyError, IndexError) as error:\n vtol_states = None\n\n\n\n # Heading\n curdoc().template_variables['title_html'] = get_heading_html(\n ulog, px4_ulog, db_data, link_to_3d_page,\n additional_links=[(\"Open PID Analysis\", link_to_pid_analysis_page)])\n\n # info text on top (logging duration, max speed, ...)\n curdoc().template_variables['info_table_html'] = \\\n get_info_table_html(ulog, px4_ulog, db_data, vehicle_data, vtol_states)\n\n curdoc().template_variables['error_labels_html'] = get_error_labels_html()\n\n hardfault_html = get_hardfault_html(ulog)\n if hardfault_html is not None:\n curdoc().template_variables['hardfault_html'] = hardfault_html\n\n corrupt_log_html = get_corrupt_log_html(ulog)\n if corrupt_log_html:\n curdoc().template_variables['corrupt_log_html'] = corrupt_log_html\n\n # Position plot\n data_plot = DataPlot2D(data, plot_config, 'vehicle_local_position',\n x_axis_label='[m]', y_axis_label='[m]', plot_height='large')\n data_plot.add_graph('y', 'x', colors2[0], 'Estimated',\n check_if_all_zero=True)\n if not data_plot.had_error: # vehicle_local_position is required\n data_plot.change_dataset('vehicle_local_position_setpoint')\n data_plot.add_graph('y', 'x', colors2[1], 'Setpoint')\n # groundtruth (SITL only)\n data_plot.change_dataset('vehicle_local_position_groundtruth')\n data_plot.add_graph('y', 'x', color_gray, 'Groundtruth')\n # GPS + position setpoints\n plot_map(ulog, plot_config, map_type='plain', setpoints=True,\n bokeh_plot=data_plot.bokeh_plot)\n if data_plot.finalize() is not None:\n plots.append(data_plot.bokeh_plot)\n\n if any(elem.name == 'vehicle_gps_position' for elem in ulog.data_list):\n # Leaflet Map\n try:\n pos_datas, flight_modes = ulog_to_polyline(ulog, flight_mode_changes)\n curdoc().template_variables['pos_datas'] = pos_datas\n curdoc().template_variables['pos_flight_modes'] = flight_modes\n except:\n pass\n curdoc().template_variables['has_position_data'] = True\n\n # initialize parameter changes\n changed_params = None\n if not 'replay' in ulog.msg_info_dict: # replay can have many param changes\n if len(ulog.changed_parameters) > 0:\n changed_params = ulog.changed_parameters\n plots.append(None) # save space for the param change button\n\n ### Add all data plots ###\n\n x_range_offset = (ulog.last_timestamp - ulog.start_timestamp) * 0.05\n x_range = Range1d(ulog.start_timestamp - x_range_offset, ulog.last_timestamp + x_range_offset)\n\n # Altitude estimate\n data_plot = DataPlot(data, plot_config, 'vehicle_gps_position',\n y_axis_label='[m]', title='Altitude Estimate',\n changed_params=changed_params, x_range=x_range)\n data_plot.add_graph([lambda data: ('alt', vehicle_gps_position_altitude)],\n colors8[0:1], ['GPS Altitude (MSL)'])\n data_plot.change_dataset(baro_alt_meter_topic)\n data_plot.add_graph(['baro_alt_meter'], colors8[1:2], ['Barometer Altitude'])\n data_plot.change_dataset('vehicle_global_position')\n data_plot.add_graph(['alt'], colors8[2:3], ['Fused Altitude Estimation'])\n data_plot.change_dataset('position_setpoint_triplet')\n data_plot.add_circle(['current.alt'], [plot_config['mission_setpoint_color']],\n ['Altitude Setpoint'])\n data_plot.change_dataset(actuator_controls_0.thrust_sp_topic)\n if actuator_controls_0.thrust_z_neg is not None:\n data_plot.add_graph([lambda data: ('thrust', actuator_controls_0.thrust_z_neg*100)],\n colors8[6:7], ['Thrust [0, 100]'])\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # VTOL tailistter orientation conversion, if relevant\n if is_vtol_tailsitter:\n [tailsitter_attitude, tailsitter_rates] = tailsitter_orientation(ulog, vtol_states)\n\n # Roll/Pitch/Yaw angle & angular rate\n for index, axis in enumerate(['roll', 'pitch', 'yaw']):\n # angle\n axis_name = axis.capitalize()\n data_plot = DataPlot(data, plot_config, 'vehicle_attitude',\n y_axis_label='[deg]', title=axis_name+' Angle',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n if is_vtol_tailsitter:\n if tailsitter_attitude[axis] is not None:\n data_plot.add_graph([lambda data: (axis+'_q',\n np.rad2deg(tailsitter_attitude[axis]))],\n colors3[0:1], [axis_name+' Estimated'], mark_nan=True)\n else:\n data_plot.add_graph([lambda data: (axis, np.rad2deg(data[axis]))],\n colors3[0:1], [axis_name+' Estimated'], mark_nan=True)\n\n data_plot.change_dataset('vehicle_attitude_setpoint')\n data_plot.add_graph([lambda data: (axis+'_d', np.rad2deg(data[axis+'_d']))],\n colors3[1:2], [axis_name+' Setpoint'],\n use_step_lines=True)\n if axis == 'yaw':\n data_plot.add_graph(\n [lambda data: ('yaw_sp_move_rate', np.rad2deg(data['yaw_sp_move_rate']))],\n colors3[2:3], [axis_name+' FF Setpoint [deg/s]'],\n use_step_lines=True)\n data_plot.change_dataset('vehicle_attitude_groundtruth')\n data_plot.add_graph([lambda data: (axis, np.rad2deg(data[axis]))],\n [color_gray], [axis_name+' Groundtruth'])\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # rate\n data_plot = DataPlot(data, plot_config, rate_estimated_topic_name,\n y_axis_label='[deg/s]', title=axis_name+' Angular Rate',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n if is_vtol_tailsitter:\n if tailsitter_rates[axis] is not None:\n data_plot.add_graph([lambda data: (axis+'_q',\n np.rad2deg(tailsitter_rates[axis]))],\n colors3[0:1], [axis_name+' Rate Estimated'], mark_nan=True)\n else:\n data_plot.add_graph([lambda data: (axis+'speed',\n np.rad2deg(data[rate_field_names[index]]))],\n colors3[0:1], [axis_name+' Rate Estimated'], mark_nan=True)\n data_plot.change_dataset('vehicle_rates_setpoint')\n data_plot.add_graph([lambda data: (axis, np.rad2deg(data[axis]))],\n colors3[1:2], [axis_name+' Rate Setpoint'],\n mark_nan=True, use_step_lines=True)\n axis_letter = axis[0].upper()\n rate_int_limit = '(*100)'\n # this param is MC/VTOL only (it will not exist on FW)\n rate_int_limit_param = 'MC_' + axis_letter + 'R_INT_LIM'\n if rate_int_limit_param in ulog.initial_parameters:\n rate_int_limit = '[-{0:.0f}, {0:.0f}]'.format(\n ulog.initial_parameters[rate_int_limit_param]*100)\n data_plot.change_dataset('rate_ctrl_status')\n data_plot.add_graph([lambda data: (axis, data[axis+'speed_integ']*100)],\n colors3[2:3], [axis_name+' Rate Integral '+rate_int_limit])\n data_plot.change_dataset(rate_groundtruth_topic_name)\n data_plot.add_graph([lambda data: (axis+'speed',\n np.rad2deg(data[rate_field_names[index]]))],\n [color_gray], [axis_name+' Rate Groundtruth'])\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n\n # Local position\n for axis in ['x', 'y', 'z']:\n data_plot = DataPlot(data, plot_config, 'vehicle_local_position',\n y_axis_label='[m]', title='Local Position '+axis.upper(),\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph([axis], colors2[0:1], [axis.upper()+' Estimated'], mark_nan=True)\n data_plot.change_dataset('vehicle_local_position_setpoint')\n data_plot.add_graph([axis], colors2[1:2], [axis.upper()+' Setpoint'],\n use_step_lines=True)\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n\n # Velocity\n data_plot = DataPlot(data, plot_config, 'vehicle_local_position',\n y_axis_label='[m/s]', title='Velocity',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph(['vx', 'vy', 'vz'], colors8[0:3], ['X', 'Y', 'Z'])\n data_plot.change_dataset('vehicle_local_position_setpoint')\n data_plot.add_graph(['vx', 'vy', 'vz'], [colors8[5], colors8[4], colors8[6]],\n ['X Setpoint', 'Y Setpoint', 'Z Setpoint'], use_step_lines=True)\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # Visual Odometry (only if topic found)\n if any(elem.name == 'vehicle_visual_odometry' for elem in data):\n # Vision position\n data_plot = DataPlot(data, plot_config, 'vehicle_visual_odometry',\n y_axis_label='[m]', title='Visual Odometry Position',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph(['x', 'y', 'z'], colors3, ['X', 'Y', 'Z'], mark_nan=True)\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n data_plot.change_dataset('vehicle_local_position_groundtruth')\n data_plot.add_graph(['x', 'y', 'z'], colors8[2:5],\n ['Groundtruth X', 'Groundtruth Y', 'Groundtruth Z'])\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # Vision velocity\n data_plot = DataPlot(data, plot_config, 'vehicle_visual_odometry',\n y_axis_label='[m]', title='Visual Odometry Velocity',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph(['vx', 'vy', 'vz'], colors3, ['X', 'Y', 'Z'], mark_nan=True)\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n data_plot.change_dataset('vehicle_local_position_groundtruth')\n data_plot.add_graph(['vx', 'vy', 'vz'], colors8[2:5],\n ['Groundtruth VX', 'Groundtruth VY', 'Groundtruth VZ'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # Vision attitude\n data_plot = DataPlot(data, plot_config, 'vehicle_visual_odometry',\n y_axis_label='[deg]', title='Visual Odometry Attitude',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph([lambda data: ('roll', np.rad2deg(data['roll'])),\n lambda data: ('pitch', np.rad2deg(data['pitch'])),\n lambda data: ('yaw', np.rad2deg(data['yaw']))],\n colors3, ['Roll', 'Pitch', 'Yaw'], mark_nan=True)\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n data_plot.change_dataset('vehicle_attitude_groundtruth')\n data_plot.add_graph([lambda data: ('roll', np.rad2deg(data['roll'])),\n lambda data: ('pitch', np.rad2deg(data['pitch'])),\n lambda data: ('yaw', np.rad2deg(data['yaw']))],\n colors8[2:5],\n ['Roll Groundtruth', 'Pitch Groundtruth', 'Yaw Groundtruth'])\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # Vision attitude rate\n data_plot = DataPlot(data, plot_config, 'vehicle_visual_odometry',\n y_axis_label='[deg]', title='Visual Odometry Attitude Rate',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph([lambda data: ('rollspeed', np.rad2deg(data['rollspeed'])),\n lambda data: ('pitchspeed', np.rad2deg(data['pitchspeed'])),\n lambda data: ('yawspeed', np.rad2deg(data['yawspeed']))],\n colors3, ['Roll Rate', 'Pitch Rate', 'Yaw Rate'], mark_nan=True)\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n data_plot.change_dataset(rate_groundtruth_topic_name)\n data_plot.add_graph([lambda data: ('rollspeed', np.rad2deg(data[rate_field_names[0]])),\n lambda data: ('pitchspeed', np.rad2deg(data[rate_field_names[1]])),\n lambda data: ('yawspeed', np.rad2deg(data[rate_field_names[2]]))],\n colors8[2:5],\n ['Roll Rate Groundtruth', 'Pitch Rate Groundtruth',\n 'Yaw Rate Groundtruth'])\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # Vision latency\n data_plot = DataPlot(data, plot_config, 'vehicle_visual_odometry',\n y_axis_label='[ms]', title='Visual Odometry Latency',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph(\n [lambda data: ('latency', 1e-3*(data['timestamp'] - data['timestamp_sample']))],\n colors3, ['VIO Latency'], mark_nan=True)\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # Airspeed vs Ground speed: but only if there's valid airspeed data or a VTOL\n try:\n if is_vtol or ulog.get_dataset('airspeed') is not None:\n data_plot = DataPlot(data, plot_config, 'vehicle_global_position',\n y_axis_label='[m/s]', title='Airspeed',\n plot_height='small',\n changed_params=changed_params, x_range=x_range)\n data_plot.add_graph([lambda data: ('groundspeed_estimated',\n np.sqrt(data['vel_n']**2 + data['vel_e']**2))],\n colors8[0:1], ['Ground Speed Estimated'])\n if any(elem.name == 'airspeed_validated' for elem in data):\n airspeed_validated = ulog.get_dataset('airspeed_validated')\n data_plot.change_dataset('airspeed_validated')\n if np.amax(airspeed_validated.data['airspeed_sensor_measurement_valid']) == 1:\n data_plot.add_graph(['true_airspeed_m_s'], colors8[1:2],\n ['True Airspeed'])\n else:\n data_plot.add_graph(['true_ground_minus_wind_m_s'], colors8[1:2],\n ['True Airspeed (estimated)'])\n else:\n data_plot.change_dataset('airspeed')\n data_plot.add_graph(['indicated_airspeed_m_s'], colors8[1:2],\n ['Indicated Airspeed'])\n data_plot.change_dataset('vehicle_gps_position')\n data_plot.add_graph(['vel_m_s'], colors8[2:3], ['Ground Speed (from GPS)'])\n data_plot.change_dataset('tecs_status')\n data_plot.add_graph(['true_airspeed_sp'], colors8[3:4], ['True Airspeed Setpoint'])\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n except (KeyError, IndexError) as error:\n pass\n\n # TECS (fixed-wing or VTOLs)\n data_plot = DataPlot(data, plot_config, 'tecs_status', y_start=0, title='TECS',\n y_axis_label='[m/s]', plot_height='small',\n changed_params=changed_params, x_range=x_range)\n data_plot.add_graph(['height_rate', 'height_rate_setpoint'],\n colors2, ['Height Rate', 'Height Rate Setpoint'],\n mark_nan=True)\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # manual control inputs\n # prefer the manual_control_setpoint topic. Old logs do not contain it\n if any(elem.name == 'manual_control_setpoint' for elem in data):\n data_plot = DataPlot(data, plot_config, 'manual_control_setpoint',\n title='Manual Control Inputs (Radio or Joystick)',\n plot_height='small', y_range=Range1d(-1.1, 1.1),\n changed_params=changed_params, x_range=x_range)\n data_plot.add_graph(manual_control_sp_controls + ['aux1', 'aux2'], colors8[0:6],\n ['Y / Roll', 'X / Pitch', 'Yaw',\n 'Throttle ' + manual_control_sp_throttle_range, 'Aux1', 'Aux2'])\n data_plot.change_dataset(manual_control_switches_topic)\n data_plot.add_graph([lambda data: ('mode_slot', data['mode_slot']/6),\n lambda data: ('kill_switch', data['kill_switch'] == 1)],\n colors8[6:8], ['Flight Mode', 'Kill Switch'])\n # TODO: add RTL switch and others? Look at params which functions are mapped?\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n else: # it's an old log (COMPATIBILITY)\n data_plot = DataPlot(data, plot_config, 'rc_channels',\n title='Raw Radio Control Inputs',\n plot_height='small', y_range=Range1d(-1.1, 1.1),\n changed_params=changed_params, x_range=x_range)\n num_rc_channels = 8\n if data_plot.dataset:\n max_channels = np.amax(data_plot.dataset.data['channel_count'])\n if max_channels < num_rc_channels: num_rc_channels = max_channels\n legends = []\n for i in range(num_rc_channels):\n channel_names = px4_ulog.get_configured_rc_input_names(i)\n if channel_names is None:\n legends.append('Channel '+str(i))\n else:\n legends.append('Channel '+str(i)+' ('+', '.join(channel_names)+')')\n data_plot.add_graph(['channels['+str(i)+']' for i in range(num_rc_channels)],\n colors8[0:num_rc_channels], legends, mark_nan=True)\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # actuator controls 0\n data_plot = DataPlot(data, plot_config, actuator_controls_0.torque_sp_topic,\n y_start=0, title='Actuator Controls',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph(actuator_controls_0.torque_axes_field_names,\n colors8[0:3], ['Roll', 'Pitch', 'Yaw'], mark_nan=True)\n data_plot.change_dataset(actuator_controls_0.thrust_sp_topic)\n if actuator_controls_0.thrust_z_neg is not None:\n data_plot.add_graph([lambda data: ('thrust', actuator_controls_0.thrust_z_neg)],\n colors8[3:4], ['Thrust (up)'], mark_nan=True)\n if actuator_controls_0.thrust_x is not None:\n data_plot.add_graph([lambda data: ('thrust', actuator_controls_0.thrust_x)],\n colors8[4:5], ['Thrust (forward)'], mark_nan=True)\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # actuator controls (Main) FFT (for filter & output noise analysis)\n data_plot = DataPlotFFT(data, plot_config, actuator_controls_0.torque_sp_topic,\n title='Actuator Controls FFT', y_range = Range1d(0, 0.01))\n data_plot.add_graph(actuator_controls_0.torque_axes_field_names,\n colors3, ['Roll', 'Pitch', 'Yaw'])\n if not data_plot.had_error:\n if 'MC_DTERM_CUTOFF' in ulog.initial_parameters: # COMPATIBILITY\n data_plot.mark_frequency(\n ulog.initial_parameters['MC_DTERM_CUTOFF'],\n 'MC_DTERM_CUTOFF')\n if 'IMU_DGYRO_CUTOFF' in ulog.initial_parameters:\n data_plot.mark_frequency(\n ulog.initial_parameters['IMU_DGYRO_CUTOFF'],\n 'IMU_DGYRO_CUTOFF')\n if 'IMU_GYRO_CUTOFF' in ulog.initial_parameters:\n data_plot.mark_frequency(\n ulog.initial_parameters['IMU_GYRO_CUTOFF'],\n 'IMU_GYRO_CUTOFF', 20)\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # angular_velocity FFT (for filter & output noise analysis)\n data_plot = DataPlotFFT(data, plot_config, 'vehicle_angular_velocity',\n title='Angular Velocity FFT', y_range = Range1d(0, 0.01))\n data_plot.add_graph(['xyz[0]', 'xyz[1]', 'xyz[2]'],\n colors3, ['Rollspeed', 'Pitchspeed', 'Yawspeed'])\n if not data_plot.had_error:\n if 'IMU_GYRO_CUTOFF' in ulog.initial_parameters:\n data_plot.mark_frequency(\n ulog.initial_parameters['IMU_GYRO_CUTOFF'],\n 'IMU_GYRO_CUTOFF', 20)\n if 'IMU_GYRO_NF_FREQ' in ulog.initial_parameters:\n if ulog.initial_parameters['IMU_GYRO_NF_FREQ'] > 0:\n data_plot.mark_frequency(\n ulog.initial_parameters['IMU_GYRO_NF_FREQ'],\n 'IMU_GYRO_NF_FREQ', 70)\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # angular_acceleration FFT (for filter & output noise analysis)\n data_plot = DataPlotFFT(data, plot_config, 'vehicle_angular_acceleration',\n title='Angular Acceleration FFT')\n data_plot.add_graph(['xyz[0]', 'xyz[1]', 'xyz[2]'],\n colors3, ['Roll accel', 'Pitch accel', 'Yaw accel'])\n if not data_plot.had_error:\n if 'IMU_DGYRO_CUTOFF' in ulog.initial_parameters:\n data_plot.mark_frequency(\n ulog.initial_parameters['IMU_DGYRO_CUTOFF'],\n 'IMU_DGYRO_CUTOFF')\n if 'IMU_GYRO_NF_FREQ' in ulog.initial_parameters:\n if ulog.initial_parameters['IMU_GYRO_NF_FREQ'] > 0:\n data_plot.mark_frequency(\n ulog.initial_parameters['IMU_GYRO_NF_FREQ'],\n 'IMU_GYRO_NF_FREQ', 70)\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # actuator controls 1 (torque + thrust)\n # (only present on VTOL, Fixed-wing config)\n data_plot = DataPlot(data, plot_config, actuator_controls_1.torque_sp_topic,\n y_start=0, title='Actuator Controls 1 (VTOL in Fixed-Wing mode)',\n plot_height='small', changed_params=changed_params, topic_instance=1,\n x_range=x_range)\n data_plot.add_graph(actuator_controls_1.torque_axes_field_names,\n colors8[0:3], ['Roll', 'Pitch', 'Yaw'], mark_nan=True)\n data_plot.change_dataset(actuator_controls_1.thrust_sp_topic,\n actuator_controls_1.topic_instance)\n if actuator_controls_1.thrust_x is not None:\n data_plot.add_graph([lambda data: ('thrust', actuator_controls_1.thrust_x)],\n colors8[3:4], ['Thrust (forward)'], mark_nan=True)\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n if dynamic_control_alloc:\n\n # actuator motors, actuator servos\n actuator_output_plots = [(\"actuator_motors\", \"Motor\"), (\"actuator_servos\", \"Servo\")]\n for topic_name, plot_name in actuator_output_plots:\n\n data_plot = DataPlot(data, plot_config, topic_name,\n y_range=Range1d(-1, 1), title=plot_name+' Outputs',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n num_actuator_outputs = 12\n if data_plot.dataset:\n for i in range(num_actuator_outputs):\n try:\n output_data = data_plot.dataset.data['control['+str(i)+']']\n except KeyError:\n num_actuator_outputs = i\n break\n\n if np.isnan(output_data).all():\n num_actuator_outputs = i\n break\n\n if num_actuator_outputs > 0:\n data_plot.add_graph(['control['+str(i)+']'\n for i in range(num_actuator_outputs)],\n [colors8[i % 8] for i in range(num_actuator_outputs)],\n [plot_name+' '+str(i+1)\n for i in range(num_actuator_outputs)])\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n else:\n\n actuator_output_plots = [(0, \"Actuator Outputs (Main)\"), (1, \"Actuator Outputs (AUX)\"),\n (2, \"Actuator Outputs (EXTRA)\")]\n for topic_instance, plot_name in actuator_output_plots:\n\n data_plot = DataPlot(data, plot_config, 'actuator_outputs',\n y_start=0, title=plot_name, plot_height='small',\n changed_params=changed_params, topic_instance=topic_instance,\n x_range=x_range)\n num_actuator_outputs = 16\n # only plot if at least one of the outputs is not constant\n all_constant = True\n if data_plot.dataset:\n max_outputs = np.amax(data_plot.dataset.data['noutputs'])\n if max_outputs < num_actuator_outputs: num_actuator_outputs = max_outputs\n\n for i in range(num_actuator_outputs):\n output_data = data_plot.dataset.data['output['+str(i)+']']\n if not np.all(output_data == output_data[0]):\n all_constant = False\n\n if not all_constant:\n data_plot.add_graph(['output['+str(i)+']' for i in range(num_actuator_outputs)],\n [colors8[i % 8] for i in range(num_actuator_outputs)],\n ['Output '+str(i) for i in range(num_actuator_outputs)],\n mark_nan=True)\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # raw acceleration\n data_plot = DataPlot(data, plot_config, 'sensor_combined',\n y_axis_label='[m/s^2]', title='Raw Acceleration',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph(['accelerometer_m_s2[0]', 'accelerometer_m_s2[1]',\n 'accelerometer_m_s2[2]'], colors3, ['X', 'Y', 'Z'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # Vibration Metrics\n data_plot = DataPlot(data, plot_config, 'vehicle_imu_status',\n title='Vibration Metrics',\n plot_height='small', changed_params=changed_params,\n x_range=x_range, y_start=0, topic_instance=0)\n data_plot.add_graph(['accel_vibration_metric'], colors8[0:1],\n ['Accel 0 Vibration Level [m/s^2]'])\n\n data_plot.change_dataset('vehicle_imu_status', 1)\n data_plot.add_graph(['accel_vibration_metric'], colors8[1:2],\n ['Accel 1 Vibration Level [m/s^2]'])\n\n data_plot.change_dataset('vehicle_imu_status', 2)\n data_plot.add_graph(['accel_vibration_metric'], colors8[2:3],\n ['Accel 2 Vibration Level [m/s^2]'])\n\n data_plot.change_dataset('vehicle_imu_status', 3)\n data_plot.add_graph(['accel_vibration_metric'], colors8[3:4],\n ['Accel 3 Vibration Level [rad/s]'])\n\n data_plot.add_horizontal_background_boxes(\n ['green', 'orange', 'red'], [4.905, 9.81])\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # Acceleration Spectrogram\n data_plot = DataPlotSpec(data, plot_config, 'sensor_combined',\n y_axis_label='[Hz]', title='Acceleration Power Spectral Density',\n plot_height='small', x_range=x_range)\n data_plot.add_graph(['accelerometer_m_s2[0]', 'accelerometer_m_s2[1]', 'accelerometer_m_s2[2]'],\n ['X', 'Y', 'Z'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # Filtered Gyro (angular velocity) Spectrogram\n data_plot = DataPlotSpec(data, plot_config, 'vehicle_angular_velocity',\n y_axis_label='[Hz]', title='Angular velocity Power Spectral Density',\n plot_height='small', x_range=x_range)\n data_plot.add_graph(['xyz[0]', 'xyz[1]', 'xyz[2]'],\n ['rollspeed', 'pitchspeed', 'yawspeed'])\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # Filtered angular acceleration Spectrogram\n data_plot = DataPlotSpec(data, plot_config, 'vehicle_angular_acceleration',\n y_axis_label='[Hz]',\n title='Angular acceleration Power Spectral Density',\n plot_height='small', x_range=x_range)\n data_plot.add_graph(['xyz[0]', 'xyz[1]', 'xyz[2]'],\n ['roll accel', 'pitch accel', 'yaw accel'])\n\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # raw angular speed\n data_plot = DataPlot(data, plot_config, 'sensor_combined',\n y_axis_label='[deg/s]', title='Raw Angular Speed (Gyroscope)',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph([\n lambda data: ('gyro_rad[0]', np.rad2deg(data['gyro_rad[0]'])),\n lambda data: ('gyro_rad[1]', np.rad2deg(data['gyro_rad[1]'])),\n lambda data: ('gyro_rad[2]', np.rad2deg(data['gyro_rad[2]']))],\n colors3, ['X', 'Y', 'Z'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # FIFO accel\n for instance in range(3):\n if add_virtual_fifo_topic_data(ulog, 'sensor_accel_fifo', instance):\n # Raw data\n data_plot = DataPlot(data, plot_config, 'sensor_accel_fifo_virtual',\n y_axis_label='[m/s^2]',\n title=f'Raw Acceleration (FIFO, IMU{instance})',\n plot_height='small', changed_params=changed_params,\n x_range=x_range, topic_instance=instance)\n data_plot.add_graph(['x', 'y', 'z'], colors3, ['X', 'Y', 'Z'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # power spectral density\n data_plot = DataPlotSpec(data, plot_config, 'sensor_accel_fifo_virtual',\n y_axis_label='[Hz]',\n title=(f'Acceleration Power Spectral Density'\n f'(FIFO, IMU{instance})'),\n plot_height='normal', x_range=x_range, topic_instance=instance)\n data_plot.add_graph(['x', 'y', 'z'], ['X', 'Y', 'Z'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # sampling regularity\n data_plot = DataPlot(data, plot_config, 'sensor_accel_fifo', y_range=Range1d(0, 25e3),\n y_axis_label='[us]',\n title=f'Sampling Regularity of Sensor Data (FIFO, IMU{instance})',\n plot_height='small',\n changed_params=changed_params,\n x_range=x_range, topic_instance=instance)\n sensor_accel_fifo = ulog.get_dataset('sensor_accel_fifo').data\n sampling_diff = np.diff(sensor_accel_fifo['timestamp'])\n min_sampling_diff = np.amin(sampling_diff)\n plot_dropouts(data_plot.bokeh_plot, ulog.dropouts, min_sampling_diff)\n data_plot.add_graph([lambda data: ('timediff', np.append(sampling_diff, 0))],\n [colors3[2]], ['delta t (between 2 logged samples)'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # FIFO gyro\n for instance in range(3):\n if add_virtual_fifo_topic_data(ulog, 'sensor_gyro_fifo', instance):\n # Raw data\n data_plot = DataPlot(data, plot_config, 'sensor_gyro_fifo_virtual',\n y_axis_label='[deg/s]', title=f'Raw Gyro (FIFO, IMU{instance})',\n plot_height='small', changed_params=changed_params,\n x_range=x_range, topic_instance=instance)\n data_plot.add_graph(['x', 'y', 'z'], colors3, ['X', 'Y', 'Z'])\n data_plot.add_graph([\n lambda data: ('x', np.rad2deg(data['x'])),\n lambda data: ('y', np.rad2deg(data['y'])),\n lambda data: ('z', np.rad2deg(data['z']))],\n colors3, ['X', 'Y', 'Z'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n # power spectral density\n data_plot = DataPlotSpec(data, plot_config, 'sensor_gyro_fifo_virtual',\n y_axis_label='[Hz]',\n title=f'Gyro Power Spectral Density (FIFO, IMU{instance})',\n plot_height='normal', x_range=x_range, topic_instance=instance)\n data_plot.add_graph(['x', 'y', 'z'], ['X', 'Y', 'Z'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # magnetic field strength\n data_plot = DataPlot(data, plot_config, magnetometer_ga_topic,\n y_axis_label='[gauss]', title='Raw Magnetic Field Strength',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph(['magnetometer_ga[0]', 'magnetometer_ga[1]',\n 'magnetometer_ga[2]'], colors3,\n ['X', 'Y', 'Z'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # distance sensor\n data_plot = DataPlot(data, plot_config, 'distance_sensor',\n y_start=0, y_axis_label='[m]', title='Distance Sensor',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph(['current_distance', 'variance'], colors3[0:2],\n ['Distance', 'Variance'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n\n # gps uncertainty\n # the accuracy values can be really large if there is no fix, so we limit the\n # y axis range to some sane values\n data_plot = DataPlot(data, plot_config, 'vehicle_gps_position',\n title='GPS Uncertainty', y_range=Range1d(0, 40),\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph(['eph', 'epv', 'satellites_used', 'fix_type'], colors8[::2],\n ['Horizontal position accuracy [m]', 'Vertical position accuracy [m]',\n 'Num Satellites used', 'GPS Fix'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # gps noise & jamming\n data_plot = DataPlot(data, plot_config, 'vehicle_gps_position',\n y_start=0, title='GPS Noise & Jamming',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph(['noise_per_ms', 'jamming_indicator'], colors3[0:2],\n ['Noise per ms', 'Jamming Indicator'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # thrust and magnetic field\n data_plot = DataPlot(data, plot_config, magnetometer_ga_topic,\n y_start=0, title='Thrust and Magnetic Field', plot_height='small',\n changed_params=changed_params, x_range=x_range)\n data_plot.add_graph(\n [lambda data: ('len_mag', np.sqrt(data['magnetometer_ga[0]']**2 +\n data['magnetometer_ga[1]']**2 +\n data['magnetometer_ga[2]']**2))],\n colors3[0:1], ['Norm of Magnetic Field'])\n data_plot.change_dataset(actuator_controls_0.thrust_sp_topic)\n if actuator_controls_0.thrust is not None:\n data_plot.add_graph([lambda data: ('thrust', actuator_controls_0.thrust)],\n colors3[1:2], ['Thrust'])\n if is_vtol and not dynamic_control_alloc:\n data_plot.change_dataset(actuator_controls_1.thrust_sp_topic)\n if actuator_controls_1.thrust_x is not None:\n data_plot.add_graph([lambda data: ('thrust', actuator_controls_1.thrust_x)],\n colors3[2:3], ['Thrust (Fixed-wing'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n\n # power\n data_plot = DataPlot(data, plot_config, 'battery_status',\n y_start=0, title='Power',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph(['voltage_v', 'voltage_filtered_v',\n 'current_a', lambda data: ('discharged_mah', data['discharged_mah']/100),\n lambda data: ('remaining', data['remaining']*10)],\n colors8[::2]+colors8[1:2],\n ['Battery Voltage [V]', 'Battery Voltage filtered [V]',\n 'Battery Current [A]', 'Discharged Amount [mAh / 100]',\n 'Battery remaining [0=empty, 10=full]'])\n data_plot.change_dataset('system_power')\n if data_plot.dataset:\n if 'voltage5v_v' in data_plot.dataset.data and \\\n np.amax(data_plot.dataset.data['voltage5v_v']) > 0.0001:\n data_plot.add_graph(['voltage5v_v'], colors8[7:8], ['5 V'])\n if 'sensors3v3[0]' in data_plot.dataset.data and \\\n np.amax(data_plot.dataset.data['sensors3v3[0]']) > 0.0001:\n data_plot.add_graph(['sensors3v3[0]'], colors8[5:6], ['3.3 V'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n #Temperature\n data_plot = DataPlot(data, plot_config, 'sensor_baro',\n y_start=0, y_axis_label='[C]', title='Temperature',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph(['temperature'], colors8[0:1],\n ['Baro temperature'])\n data_plot.change_dataset('sensor_accel')\n data_plot.add_graph(['temperature'], colors8[2:3],\n ['Accel temperature'])\n data_plot.change_dataset('airspeed')\n data_plot.add_graph(['air_temperature_celsius'], colors8[4:5],\n ['Airspeed temperature'])\n data_plot.change_dataset('battery_status')\n data_plot.add_graph(['temperature'], colors8[6:7],\n ['Battery temperature'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # estimator flags\n try:\n data_plot = DataPlot(data, plot_config, 'estimator_status',\n y_start=0, title='Estimator Flags',\n plot_height='small', changed_params=changed_params,\n x_range=x_range)\n estimator_status = ulog.get_dataset('estimator_status').data\n plot_data = []\n plot_labels = []\n input_data = [\n ('Health Flags (vel, pos, hgt)', estimator_status['health_flags']),\n ('Timeout Flags (vel, pos, hgt)', estimator_status['timeout_flags']),\n ('Velocity Check Bit', (estimator_status['innovation_check_flags'])&0x1),\n ('Horizontal Position Check Bit', (estimator_status['innovation_check_flags']>>1)&1),\n ('Vertical Position Check Bit', (estimator_status['innovation_check_flags']>>2)&1),\n ('Mag X, Y, Z Check Bits', (estimator_status['innovation_check_flags']>>3)&0x7),\n ('Yaw Check Bit', (estimator_status['innovation_check_flags']>>6)&1),\n ('Airspeed Check Bit', (estimator_status['innovation_check_flags']>>7)&1),\n ('Synthetic Sideslip Check Bit', (estimator_status['innovation_check_flags']>>8)&1),\n ('Height to Ground Check Bit', (estimator_status['innovation_check_flags']>>9)&1),\n ('Optical Flow X, Y Check Bits', (estimator_status['innovation_check_flags']>>10)&0x3),\n ]\n # filter: show only the flags that have non-zero samples\n for cur_label, cur_data in input_data:\n if np.amax(cur_data) > 0.1:\n data_label = 'flags_'+str(len(plot_data)) # just some unique string\n plot_data.append(lambda d, data=cur_data, label=data_label: (label, data))\n plot_labels.append(cur_label)\n if len(plot_data) >= 8: # cannot add more than that\n break\n\n if len(plot_data) == 0:\n # add the plot even in the absence of any problem, so that the user\n # can validate that (otherwise it's ambiguous: it could be that the\n # estimator_status topic is not logged)\n plot_data = [lambda d: ('flags', input_data[0][1])]\n plot_labels = [input_data[0][0]]\n data_plot.add_graph(plot_data, colors8[0:len(plot_data)], plot_labels)\n if data_plot.finalize() is not None: plots.append(data_plot)\n except (KeyError, IndexError) as error:\n print('Error in estimator plot: '+str(error))\n\n\n # Failsafe flags\n try:\n data_plot = DataPlot(data, plot_config, 'vehicle_status',\n y_start=0, title='Failsafe Flags',\n plot_height='normal', changed_params=changed_params,\n x_range=x_range)\n data_plot.add_graph(['failsafe', 'failsafe_and_user_took_over'], [colors8[0], colors8[1]],\n ['In Failsafe', 'User Took Over'])\n num_graphs = 2\n skip_if_always_set = ['auto_mission_missing', 'offboard_control_signal_lost']\n\n data_plot.change_dataset('failsafe_flags')\n if data_plot.dataset is not None:\n failsafe_flags = data_plot.dataset.data\n for failsafe_field in failsafe_flags:\n if failsafe_field == 'timestamp' or failsafe_field.startswith('mode_req_'):\n continue\n cur_data = failsafe_flags[failsafe_field]\n # filter: show only the flags that are set at some point\n if np.amax(cur_data) >= 1:\n if failsafe_field in skip_if_always_set and np.amin(cur_data) >= 1:\n continue\n data_plot.add_graph([failsafe_field], [colors8[num_graphs % 8]],\n [failsafe_field.replace('_', ' ')])\n num_graphs += 1\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n if data_plot.finalize() is not None: plots.append(data_plot)\n except (KeyError, IndexError) as error:\n print('Error in failsafe plot: '+str(error))\n\n\n # cpu load\n data_plot = DataPlot(data, plot_config, 'cpuload',\n title='CPU & RAM', plot_height='small', y_range=Range1d(0, 1),\n changed_params=changed_params, x_range=x_range)\n data_plot.add_graph(['ram_usage', 'load'], [colors3[1], colors3[2]],\n ['RAM Usage', 'CPU Load'])\n data_plot.add_span('load', line_color=colors3[2])\n data_plot.add_span('ram_usage', line_color=colors3[1])\n plot_flight_modes_background(data_plot, flight_mode_changes, vtol_states)\n if data_plot.finalize() is not None: plots.append(data_plot)\n\n\n # sampling: time difference\n try:\n data_plot = DataPlot(data, plot_config, 'sensor_combined', y_range=Range1d(0, 25e3),\n y_axis_label='[us]',\n title='Sampling Regularity of Sensor Data', plot_height='small',\n changed_params=changed_params, x_range=x_range)\n sensor_combined = ulog.get_dataset('sensor_combined').data\n sampling_diff = np.diff(sensor_combined['timestamp'])\n min_sampling_diff = np.amin(sampling_diff)\n\n plot_dropouts(data_plot.bokeh_plot, ulog.dropouts, min_sampling_diff)\n\n data_plot.add_graph([lambda data: ('timediff', np.append(sampling_diff, 0))],\n [colors3[2]], ['delta t (between 2 logged samples)'])\n data_plot.change_dataset('estimator_status')\n data_plot.add_graph([lambda data: ('time_slip', data['time_slip']*1e6)],\n [colors3[1]], ['Estimator time slip (cumulative)'])\n if data_plot.finalize() is not None: plots.append(data_plot)\n except:\n pass\n\n\n\n # exchange all DataPlot's with the bokeh_plot and handle parameter changes\n\n param_changes_button = Button(label=\"Hide Parameter Changes\", width=170)\n param_change_labels = []\n # FIXME: this should be a CustomJS callback, not on the server. However this\n # did not work for me.\n def param_changes_button_clicked():\n \"\"\" callback to show/hide parameter changes \"\"\"\n for label in param_change_labels:\n if label.visible:\n param_changes_button.label = 'Show Parameter Changes'\n label.visible = False\n label.text_alpha = 0 # label.visible does not work, so we use this instead\n else:\n param_changes_button.label = 'Hide Parameter Changes'\n label.visible = True\n label.text_alpha = 1\n param_changes_button.on_click(param_changes_button_clicked)\n\n\n jinja_plot_data = []\n for i in range(len(plots)):\n if plots[i] is None:\n plots[i] = column(param_changes_button, width=int(plot_width * 0.99))\n if isinstance(plots[i], DataPlot):\n if plots[i].param_change_label is not None:\n param_change_labels.append(plots[i].param_change_label)\n\n plot_title = plots[i].title\n plots[i] = plots[i].bokeh_plot\n\n fragment = 'Nav-'+plot_title.replace(' ', '-') \\\n .replace('&', '_').replace('(', '').replace(')', '')\n jinja_plot_data.append({\n 'model_id': plots[i].ref['id'],\n 'fragment': fragment,\n 'title': plot_title\n })\n\n\n # changed parameters\n plots.append(get_changed_parameters(ulog, plot_width))\n\n\n\n # information about which messages are contained in the log\n# TODO: need to load all topics for this (-> log loading will take longer)\n# but if we load all topics and the log contains some (external) topics\n# with buggy timestamps, it will affect the plotting.\n# data_list_sorted = sorted(ulog.data_list, key=lambda d: d.name + str(d.multi_id))\n# table_text = []\n# for d in data_list_sorted:\n# message_size = sum([ULog.get_field_size(f.type_str) for f in d.field_data])\n# num_data_points = len(d.data['timestamp'])\n# table_text.append((d.name, str(d.multi_id), str(message_size), str(num_data_points),\n# str(message_size * num_data_points)))\n# topics_info = '<table><tr><th>Name</th><th>Topic instance</th><th>Message Size</th>' \\\n# '<th>Number of data points</th><th>Total bytes</th></tr>' + ''.join(\n# ['<tr><td>'+'</td><td>'.join(list(x))+'</td></tr>' for x in table_text]) + '</table>'\n# topics_div = Div(text=topics_info, width=int(plot_width*0.9))\n# plots.append(column(topics_div, width=int(plot_width*0.9)))\n\n\n # log messages\n plots.append(get_logged_messages(ulog, plot_width))\n\n\n # console messages, perf & top output\n top_data = ''\n perf_data = ''\n console_messages = ''\n if 'boot_console_output' in ulog.msg_info_multiple_dict:\n console_output = ulog.msg_info_multiple_dict['boot_console_output'][0]\n console_output = escape(''.join(console_output))\n console_messages = '<p><pre>'+console_output+'</pre></p>'\n\n for state in ['pre', 'post']:\n if 'perf_top_'+state+'flight' in ulog.msg_info_multiple_dict:\n current_top_data = ulog.msg_info_multiple_dict['perf_top_'+state+'flight'][0]\n flight_data = escape('\\n'.join(current_top_data))\n top_data += '<p>'+state.capitalize()+' Flight:<br/><pre>'+flight_data+'</pre></p>'\n if 'perf_counter_'+state+'flight' in ulog.msg_info_multiple_dict:\n current_perf_data = ulog.msg_info_multiple_dict['perf_counter_'+state+'flight'][0]\n flight_data = escape('\\n'.join(current_perf_data))\n perf_data += '<p>'+state.capitalize()+' Flight:<br/><pre>'+flight_data+'</pre></p>'\n if 'perf_top_watchdog' in ulog.msg_info_multiple_dict:\n current_top_data = ulog.msg_info_multiple_dict['perf_top_watchdog'][0]\n flight_data = escape('\\n'.join(current_top_data))\n top_data += '<p>Watchdog:<br/><pre>'+flight_data+'</pre></p>'\n\n additional_data_html = ''\n if len(console_messages) > 0:\n additional_data_html += '<h5>Console Output</h5>'+console_messages\n if len(top_data) > 0:\n additional_data_html += '<h5>Processes</h5>'+top_data\n if len(perf_data) > 0:\n additional_data_html += '<h5>Performance Counters</h5>'+perf_data\n if len(additional_data_html) > 0:\n # hide by default & use a button to expand\n additional_data_html = '''\n<button id=\"show-additional-data-btn\" class=\"btn btn-secondary\" data-toggle=\"collapse\" style=\"min-width:0;\"\n data-target=\"#show-additional-data\">Show additional Data</button>\n<div id=\"show-additional-data\" class=\"collapse\">\n{:}\n</div>\n'''.format(additional_data_html)\n curdoc().template_variables['additional_info'] = additional_data_html\n\n\n curdoc().template_variables['plots'] = jinja_plot_data\n\n return plots",
"def create(self):\n self.parent.copyCurrentWinState(self.pltw)\n # add a new vector\n vname = self.pltw.curvelist[self.cpos].name + 'BL'\n (nvec, npt) = np.shape(self.pltw.blklst[self.blkno])\n if self.pltw.pasteVector(self.data[2], self.blkno, vname):\n xname = self.pltw.getVnam(self.blkno, self.xpos)\n xvinfo = vectInfo(self.blkno, self.xpos, xname)\n yvinfo = vectInfo(self.blkno, nvec, vname)\n self.pltw.curvelist.append(curveInfo(vname, xvinfo, yvinfo))\n self.pltw.updatePlot()\n self.pltw.dirty = True\n self.pltw.activecurv = self.cpos\n self.parent.updateUI()\n self.hide()",
"def test_construct_and_write_metadata(tmp_path):\n\n prov = Provenance()\n prov.start_activity(\"test\")\n prov.finish_activity()\n prov_activity = prov.finished_activities[0]\n\n reference = meta.Reference(\n contact=meta.Contact(\n name=\"Somebody\", email=\"[email protected]\", organization=\"CTA Consortium\"\n ),\n product=meta.Product(\n description=\"An Amazing Product\",\n creation_time=\"2020-10-11 15:23:31\",\n data_category=\"S\",\n data_level=\"DL1\",\n data_association=\"Subarray\",\n data_model_name=\"Unofficial DL1\",\n data_model_version=\"1.0\",\n data_model_url=\"http://google.com\",\n format=\"hdf5\",\n ),\n process=meta.Process(_type=\"Simulation\", subtype=\"Prod3b\", _id=423442,),\n activity=meta.Activity.from_provenance(prov_activity.provenance),\n instrument=meta.Instrument(\n site=\"CTA-North\",\n class_=\"Array\",\n type_=\"Layout H1B\",\n version=\"1.0\",\n id_=\"threshold\",\n ),\n )\n\n ref_dict = reference.to_dict()\n assert ref_dict[\"CTA PRODUCT FORMAT\"] == \"hdf5\"\n\n import uuid # pylint: disable=import-outside-toplevel\n\n assert str(uuid.UUID(ref_dict[\"CTA PRODUCT ID\"])) == ref_dict[\"CTA PRODUCT ID\"]\n\n # check that we can write this to the header of a typical table file in multiple\n # formats:\n from astropy.table import Table # pylint: disable=import-outside-toplevel\n\n table = Table(dict(x=[1, 2, 3], y=[15.2, 15.2, 14.5]))\n table.meta = ref_dict\n for file_name in [tmp_path / \"test.fits\", tmp_path / \"test.ecsv\"]:\n table.write(file_name)\n\n # write to pytables file\n\n import tables # pylint: disable=import-outside-toplevel\n\n with tables.open_file(tmp_path / \"test.h5\", mode=\"w\") as h5file:\n meta.write_to_hdf5(ref_dict, h5file)",
"def generate_metadata(self) -> None:\n if self.output_anchor_metadata is None:\n output_record_info = self.engine.create_record_info()\n else:\n output_record_info = self.output_anchor_metadata\n\n if self.output_anchor.record_info is None:\n output_record_info.add_field(\"optional_value\", Sdk.FieldType.float)\n self.output_anchor_metadata = output_record_info\n\n self.output_anchor.record_info = self.output_anchor_metadata\n self.push_all_metadata()",
"def vpd(request):\n form = DiseaseForm(request.POST)\n disease = 'Chickenpox'\n if form.is_valid():\n disease = str(form.cleaned_data['disease'])\n\n all_data = pd.DataFrame.from_records(VaxIncidenceRate.objects.all().\n values())\n\n plot_data = vpdplot.VpdPlot(df=all_data, color='black', disease=disease)\n\n ptitle = 'Incidence Rate of %s per 100,000 Population'%disease\n hover = HoverTool(tooltips=[\n (\"Year\", \"$x{int}\"),\n (\"Incidence Rate:\", \"$y\"),\n ])\n\n plt = figure(plot_width=800, plot_height=400, tools=[hover], title=ptitle)\n\n plt.line(plot_data.x_values, plot_data.y_values, color=plot_data.color)\n plt.xaxis.axis_label = \"Year\"\n plt.yaxis.axis_label = \"Incidence Rate per 100,000\"\n plt.add_tools(BoxZoomTool())\n plt.add_tools(ResetTool())\n\n script, div = components(plt)\n\n data = VaxHistory.objects.all()\n\n return render(request, 'vaxcharts/vpd.html',\n {'script' : script, 'div' : div, 'form': form, 'data':data})",
"def addProvenance(self, provenance_on=True):\n self.kwargs['additionalInfo'] = provenance_on",
"def _ks_prepare_odoo_product_tag_data(self, record):\n data = {\n \"name\": record.ks_name,\n \"slug\": record.ks_slug or '',\n \"description\": record.ks_description or ''\n }\n return data",
"def _record(self):\r\n self._plot()\r\n self._csvWriter()\r\n self._logger()",
"def create_vuln_report():",
"def _annotate(self, generation: int):\n # Get pareto front\n pareto_front_scores = np.array(\n [individual.fitness.values for individual in self._population.individuals\n if individual.fitness.rank == 0]\n )\n\n # Calculate hypervolume\n self._evolution['hypervolume'][generation + 1] = hypervolume(pareto_front=pareto_front_scores)\n\n # Get number of solutions on the Pareto front\n self._evolution['num_solutions_front'][generation + 1] = len(pareto_front_scores)\n\n # Get best performance achieved for each objective\n self._evolution['best_values'][generation + 1] = np.max(pareto_front_scores, axis=0)",
"def create(line='', cell=None):\n from plottools import create, dataobj\n ip = get_ipython()\n if not cell:\n cell = line\n line = ''\n args = ip.ev('dict({})'.format(line))\n objs = (eval('dataobj({})'.format(line),\n ip.user_global_ns, dict(dataobj=dataobj))\n for line in cell.splitlines())\n create(*objs, **args)",
"def create_report():\n global inspection_report\n operator_name = request.form['inspectorName']\n inspection_date = request.form['datepicker']\n city = request.form['city']\n street = request.form['street']\n pipe_id = request.form['pipe_id']\n manhole_id = request.form['manhole_id']\n dimensions = request.form['sizes']\n shape = request.form['shapes']\n material = request.form['materials']\n\n inspection_report = InspectionReport(\n operator_name, inspection_date, city, street, pipe_id, manhole_id, dimensions, shape, material)\n # print(inspection_report.toJSON())\n return render_camera_view()",
"def predataShape(self):\n self._predatashape['name']=self._name\n self._predatashape['location'] = self._location\n self._predatashape['origin'] = self._origin\n self._predatashape['width'] = self._width\n self._predatashape['height'] = self._height\n return self._predatashape",
"def _create_report_record(self, trade, common_object, reset_period,\n nominal, provision, short_end_rate, forward_rate):\n pass",
"def plot_precision_figure(self):\n\n data_analysis = DatabaseData(dataframe=self.plot_data)\n prop_data, energy_data, M, C, pred_energy, pred_property = \\\n data_analysis.create_precision_bokeh_compat(self.prop_data, self.energy_data, properties=self.properties)\n p = figure(plot_height=400, plot_width=400,tools=\"pan,wheel_zoom,box_zoom,reset,previewsave\",\\\n x_axis_type=\"log\", y_axis_type=\"log\", x_axis_label='Energy Convergence (meV/atom)', title='Slope M is {0}'.format(str(M)) )\n p.line(pred_energy, pred_property, color='red')\n p.circle(self.energy_data, self.prop_data, color='blue',size=5, line_alpha=0)\n #p.multi_line(xs_err, ys_err, color='black')\n if self.properties == 'B':\n p.yaxis.axis_label = 'Bulk Modulus B (%)'\n elif self.properties == 'dB':\n p.yaxis.axis_label = 'Bulk Modulus Pressure Derivative (%)'\n elif self.properties == 'Multiple':\n p.yaxis.axis_label = \"V0, B, B' (%)\"\n elif self.properties == 'V0':\n p.yaxis.axis_label = 'Volume (%)'\n\n return p",
"def add_expl_data(i_exp, expl_id, prj):\n exp_remarks = tbl_exploration['mem_Remarks'][i_exp] \\\n if not pd.isna(tbl_exploration['mem_Remarks'][i_exp]) \\\n else None\n exp_ewt = tbl_exploration['dbl_DepthToWaterStatic'][i_exp] * 0.00328084 \\\n if not pd.isna(\n tbl_exploration['dbl_DepthToWaterStatic'][i_exp]) else None\n exp_predom = tbl_exploration[\n 'txt_USCSCodePredominant'][i_exp] if not pd.isna(\n tbl_exploration['txt_USCSCodePredominant'][i_exp]) else 'NA'\n exp_predom = uscs_predom[exp_predom]\n exp_type = tbl_exploration['txt_KeyExplorationType'][i_exp]\n exp_elev = tbl_exploration['dbl_GroundElevation'][i_exp] * 0.00328084 \\\n if not pd.isna(\n tbl_exploration['dbl_GroundElevation'][i_exp]) else None\n exploration = Borings(\n name=expl_id,\n project=prj,\n remarks=exp_remarks,\n ewt=exp_ewt,\n predom_soil=exp_predom,\n type=exp_type,\n elevation=exp_elev)\n\n return exploration",
"def generate(self):\n t = (self.context.identifier, RDF.type, META.Provenance)\n if t not in self.context.graph:\n self.context.graph.add(t)\n for name, value in self.data.items():\n pat = (self.context.identifier, META[name], None)\n if pat in self.context.graph:\n self.context.graph.remove(pat)\n self.context.graph.add((pat[0], META[name], Literal(value)))",
"def buildPage(self):\n args = {}\n args['valueCol'] = 'value'\n args['textCol'] = 'size'\n args['y'] = 'index'\n args['x'] = 'number'\n args['orientation'] = 'h'\n args['title'] = ''\n args['x_title'] = ''\n args['y_title'] = ''\n args['height'] = 900\n args['width'] = 900\n\n self.add_basic_layout()\n layout = hpstats.quick_numbers_panel()\n dfs = hpstats.get_db_stats_data()\n plots = []\n plots.append(hpstats.plot_store_size_components(dfs, title='DB Store Size', args=args))\n plots.append(hpstats.plot_node_rel_per_label(dfs, focus='nodes', title='Nodes per Label', args=args))\n plots.append(hpstats.plot_node_rel_per_label(dfs, focus='relationships', title='Relationships per Type', args=args))\n self.extend_layout(layout)\n self.extend_layout(plots)",
"def get_sqlite_plot_hpr(db_file_path: str, plot_title: str):\n # Get an SQLite connection\n conn = sqlite3.connect(db_file_path)\n\n # Query and get a dataframe result\n df_hpr = pd.read_sql_query(\"SELECT dateTime, heading, pitch, roll from ensembles; \", conn)\n\n # Close SQLite connection\n conn.close()\n\n # Create line plots\n line_heading = go.Scatter(x=df_hpr['dateTime'], y=df_hpr['heading'], mode='lines', name='Heading')\n line_pitch = go.Scatter(x=df_hpr['dateTime'], y=df_hpr['pitch'], mode='lines', name='Pitch')\n line_roll = go.Scatter(x=df_hpr['dateTime'], y=df_hpr['roll'], mode='lines', name='Roll')\n\n # Create the figure\n fig_hpr = go.Figure()\n fig_hpr.add_trace(line_heading)\n fig_hpr.add_trace(line_pitch)\n fig_hpr.add_trace(line_roll)\n\n # Set the plot titles\n fig_hpr.update_layout(\n title=plot_title,\n xaxis_title=\"DateTime\",\n yaxis_title=\"Degrees\"\n )\n\n return fig_hpr, df_hpr",
"def export_build_info(promexp: PrometheusExporter, version: str) -> None:\n\n promexp.register(\n name=\"promqtt_build_info\",\n datatype=MetricTypeEnum.GAUGE,\n helpstr=\"Version info\",\n )\n\n promexp.set(name=\"promqtt_build_info\", value=1, labels={\"version\": version})"
]
| [
"0.5756424",
"0.5504841",
"0.54990816",
"0.54645437",
"0.540593",
"0.535219",
"0.52645105",
"0.51810455",
"0.5171368",
"0.5142758",
"0.4949729",
"0.4925848",
"0.48983437",
"0.48819402",
"0.48776308",
"0.48431304",
"0.47729012",
"0.47432384",
"0.4723387",
"0.47224906",
"0.47210598",
"0.47138998",
"0.47104093",
"0.47059685",
"0.47035366",
"0.47003126",
"0.468977",
"0.4687884",
"0.46837986",
"0.46767753"
]
| 0.58652025 | 0 |
Create a provenance record with the rolling mean diagnostic data. | def get_rolling_mean_provenance(self, suptitle, ancestor_files):
suptitle = suptitle.replace('\n', '')
caption = (f"{suptitle}. For CMIP5 ("
f"{self.formatter(f'cmip5-{self.scenarios[0]}')}, "
f"{self.formatter(f'cmip5-{self.scenarios[1]}')} and "
f"{self.formatter(f'cmip5-{self.scenarios[2]}')}) "
f"and CMIP6 "
f"({self.formatter(f'cmip6-{self.scenarios[0]}')}, "
f"{self.formatter(f'cmip6-{self.scenarios[1]}')} and "
f"{self.formatter(f'cmip6-{self.scenarios[2]}')})")
record = {
'caption': caption,
'statistics': ['anomaly', "other"],
'domains': ['reg', 'global'],
'plot_types': ['scatter', 'line', 'times'],
'authors': [
'cos_josep',
],
'references': [
'cos22esd',
],
'ancestors': ancestor_files,
}
return record | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add_rolling_mean(self, rm):\n self.data['rolling_mean'] = rm",
"def new_archive_record(self, event):\n end_ts = event.record['dateTime']\n start_ts = end_ts - event.record['interval'] * 60\n\n for topic in self.subscriber.subscribed_topics: # topics might not be cached.. therefore use subscribed?\n self.logger.debug(\"Service record prior to update is: %s %s\"\n % (weeutil.weeutil.timestamp_to_string(event.record['dateTime']),\n to_sorted_string(event.record)))\n target_data = self.subscriber.get_accumulated_data(topic, start_ts, end_ts, event.record['usUnits'])\n event.record.update(target_data)\n self.logger.debug(\"Service record after update is: %s %s\"\n % (weeutil.weeutil.timestamp_to_string(event.record['dateTime']),\n to_sorted_string(event.record)))",
"def mean_STD(self,counter):\n \n \n pass",
"def _create_report_record(self, trade, common_object, reset_period,\n nominal, provision, short_end_rate, forward_rate):\n pass",
"def take_one_averaged(self):\n self.na.set_center_frequency(6.160574e9)\n self.na.set_span(10e6)\n self.na.set_power(-5, 1)\n self.na.set_ifbw(1e3)\n\n self.na.set_query_timeout(40e3)\n set_format = self.na.set_format('polar')\n print \"set_format returned: \", set_format\n self.na.set_trigger_source(\"manual\")\n self.na.set_averages(10)\n self.na.set_trigger_average_mode()\n\n self.na.clear_averages(channel=1)\n self.na.trigger_single(channel=1)\n fpts, xs, ys = self.na.read_data()\n #\n plt.figure()\n plt.plot(fpts, xs)\n plt.plot(fpts, ys)\n plt.show()",
"def _create_summaries(self):\n self._loss_summary = tf.summary.scalar('loss', self._loss)\n self._perplexity_summary = tf.summary.scalar('average_perplexity_per_sentence', self._average_perplexity)",
"def test_mean(log_prob_coo):\n\n def _add_offsets_to_truth(truth: np.ndarray, offset_dict: Dict[int, int]):\n return truth + np.array([offset_dict.get(m, 0) for m in range(len(truth))])\n\n offset_dict = log_prob_coo['offsets']\n\n # the input\n print(log_prob_coo)\n print('input log probs')\n dense = log_prob_sparse_to_dense(log_prob_coo['coo'])\n print(dense)\n\n # with this shape converter, we get one row, where each value is one m\n converter = IndexConverter(total_n_cells=1,\n total_n_genes=log_prob_coo['coo'].shape[0])\n\n # set up and estimate\n estimator = Mean(index_converter=converter)\n noise_csr = estimator.estimate_noise(noise_log_prob_coo=log_prob_coo['coo'],\n noise_offsets=offset_dict)\n\n # output\n print('dense noise count estimate, per m')\n out_per_m = np.array(noise_csr.todense()).squeeze()\n print(out_per_m)\n\n # truth\n brute_force = np.matmul(np.arange(dense.shape[1]), np.exp(dense).transpose())\n brute_force = _add_offsets_to_truth(truth=brute_force, offset_dict=offset_dict)\n print('truth')\n print(brute_force)\n\n # test\n np.testing.assert_allclose(out_per_m, brute_force)",
"def get_provenance_record(caption, ancestor_filenames):\n record = {\n 'caption': caption,\n 'statistics': ['mean'],\n 'domains': ['global'],\n 'plot_type': 'metrics',\n 'authors': [\n 'rumbold_heather',\n 'sellar_alistair',\n ],\n 'references': [\n 'esacci-soilmoisture',\n 'dorigo17rse',\n 'gruber19essd',\n ],\n \"ancestors\": ancestor_filenames,\n }\n\n return record",
"def new_archive_record(self, event):\n # If the record was software generated, then any corrections have\n # already been applied in the LOOP packet.\n if event.origin != 'software':\n for obs_type in self.corrections:\n try:\n event.record[obs_type] = eval(self.corrections[obs_type], None, event.record)\n except (TypeError, NameError):\n pass\n except ValueError, e:\n syslog.syslog(syslog.LOG_ERR, \"engine: StdCalibration archive error %s\" % e)",
"def new_archive_record(self, event):\n print \"REC: \", weeutil.weeutil.timestamp_to_string(event.record['dateTime']), StdPrint.sort(event.record)",
"def mean_baseline(self):\n train_mean = np.mean(self.data.loc[self.train_index, self.target_name])\n rmse = np.sqrt(\n np.mean(np.square(self.data.loc[self.test_index, self.target_name] - train_mean)))\n print 'mean baseline RMSE: {}'.format(rmse)",
"def new_archive_record(self, event):\n \n # Reset the alarm counter\n self.alarm_count = 0",
"def modelmean(self, model_params, this_data, this_suff_stat):\n pass",
"def store_overall_means(src_file: H5File) -> None:\n perp_sum = 0\n par_sum = 0\n ref_sum = 0\n counts = 0\n for path in rawnav.pump_group_paths(src_file):\n perp_path = path + '/perp'\n par_path = path + '/par'\n ref_path = path + '/ref'\n perp_sum += src_file[perp_path].attrs['mean']\n par_sum += src_file[par_path].attrs['mean']\n ref_sum += src_file[ref_path].attrs['mean']\n counts += 1\n src_file.attrs['perp_mean'] = perp_sum / counts\n src_file.attrs['par_mean'] = par_sum / counts\n src_file.attrs['ref_mean'] = ref_sum / counts\n return",
"def getLastAverage(self):\n lastAve=dict()\n lastAve['identifier']=self.lastWaveIdentifier\n lastAve['averageCalculated']=self.lastAverageCalculated \n lastAve['lastAverageArray']=self.lastAverageArray\n return lastAve",
"def create_data_record(self, data_dict):\n source_dict = deepcopy(data_dict)\n assert not self.is_conflicting_keys(data_dict,\n self.default_values), \"Conflicting keys between default_values and extra_values\"\n source_dict.update(self.default_values)\n return {\n '_index': self.get_full_index(),\n '_type': 'python_log',\n '_source': source_dict\n }",
"def publish_load_average(self, load_avg):\n\n event = ThriftEvent()\n event.payloadData.append(int(round(time.time() * 1000)))\n event.payloadData.append(Config.cluster_id)\n event.payloadData.append(Config.cluster_instance_id)\n event.payloadData.append(Config.network_partition_id)\n event.payloadData.append(Config.member_id)\n event.payloadData.append(Config.partition_id)\n event.payloadData.append(constants.LOAD_AVERAGE)\n event.payloadData.append(float(load_avg))\n # event.payloadData.append(str(load_avg))\n\n HealthStatisticsPublisher.log.debug(\"Publishing cep event: [stream] %r [payload_data] %r [version] %r\"\n % (\n self.stream_definition.name,\n event.payloadData,\n self.stream_definition.version))\n\n self.publisher.publish(event)",
"def mean(self, mean):\n\n self._mean = mean",
"def store_noise_means(src_file: H5File) -> None:\n perp_sum = 0\n par_sum = 0\n ref_sum = 0\n counts = 0\n for path in rawnav.pump_group_paths(src_file):\n perp_path = path + '/perp'\n par_path = path + '/par'\n ref_path = path + '/ref'\n perp_sum += src_file[perp_path].attrs['noise']\n par_sum += src_file[par_path].attrs['noise']\n ref_sum += src_file[ref_path].attrs['noise']\n counts += 1\n src_file.attrs['perp_noise_mean'] = perp_sum / counts\n src_file.attrs['par_noise_mean'] = par_sum / counts\n src_file.attrs['ref_noise_mean'] = ref_sum / counts\n return",
"def new_archive_record(self, event):\n dbmanager = self.engine.db_binder.get_manager(self.data_binding)\n dbmanager.addRecord(event.record)",
"def new_entry_update(cls, summary):\n totaltimes = [x.totaltime for x in summary.entries]\n total = sum(totaltimes, timedelta())\n average = total / len(totaltimes)\n summary.total_time = total\n summary.daily_average = average",
"def test_sum_and_average(pawprint_default_tracker_db_with_table):\n\n tracker = pawprint_default_tracker_db_with_table\n\n metadata = str('{\"val\": 1}').replace(\"'\", '\"')\n\n # Add a bunch of events\n query = (\n \"\"\"\n INSERT INTO {table} (timestamp, user_id, event, metadata) VALUES\n ('2016-01-01 12:30', 'alice', 'logged_in', '{metadata}'),\n ('2016-01-01 12:40', 'bob', 'logged_in', '{metadata}'),\n ('2016-01-01 16:00', 'charlotte', 'logged_in', '{metadata}'),\n ('2016-01-02 00:00', 'dan', 'logged_in', '{metadata}'),\n ('2016-01-02 00:00', 'elizabeth', 'logged_in', '{metadata}'),\n ('2016-01-05 00:00', 'frank', 'logged_in', '{metadata}'),\n ('2016-01-10 00:00', 'gabrielle', 'logged_in', '{metadata}'),\n ('2016-01-20 00:00', 'hans', 'logged_in', '{metadata}'),\n ('2016-02-01 00:00', 'iris', 'logged_in', '{metadata}'),\n ('2016-02-01 00:00', 'james', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'kelly', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'laura', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'mike', 'not_logged_in', '{metadata}')\n \"\"\"\n ).format(table=tracker.table, metadata=metadata)\n\n pd.io.sql.execute(query, tracker.db)\n\n x_sum_daily_all = tracker.sum(\"metadata__val\")\n x_sum_daily = tracker.sum(\"metadata__val\", event=\"logged_in\")\n\n x_avg_daily_all = tracker.average(\"metadata__val\", event=\"logged_in\")\n x_avg_daily = tracker.average(\"metadata__val\", event=\"logged_in\")\n\n assert len(x_sum_daily) == 7\n\n assert np.all(x_sum_daily_all[\"sum\"].values == [3, 2, 1, 1, 1, 2, 3])\n assert np.all(x_sum_daily[\"sum\"].values == [3, 2, 1, 1, 1, 2, 2])\n\n assert np.all(x_avg_daily_all[\"avg\"].values == [1, 1, 1, 1, 1, 1, 1])\n assert np.all(x_avg_daily[\"avg\"] == x_avg_daily_all[\"avg\"])",
"def make_DBLog(subject, event, badge, detail=''):\n app = create_app()\n with app.app_context():\n DBLog.new(subject=subject, scope=\"nox\", badge=badge, message=event, ip='-', user='-', detail=detail)",
"def sample(self, horizon, policy, record_fname=None):\n video_record = record_fname is not None\n recorder = None if not video_record else VideoRecorder(self.env, record_fname)\n\n times, rewards = [], []\n O, A, reward_sum, done = [self.env.reset()], [], 0, False\n\n policy.reset()\n for t in range(horizon):\n if video_record:\n recorder.capture_frame()\n start = time.time()\n A.append(policy.act(O[t], t))\n times.append(time.time() - start)\n\n obs, reward, done, info = self.env.step(A[t])\n\n O.append(obs)\n reward_sum += reward\n rewards.append(reward)\n if done:\n break\n\n if video_record:\n recorder.capture_frame()\n recorder.close()\n\n print(\"Average action selection time: \", np.mean(times))\n print(\"Rollout length: \", len(A))\n\n return {\n \"obs\": np.array(O),\n \"ac\": np.array(A),\n \"reward_sum\": reward_sum,\n \"rewards\": np.array(rewards),\n }",
"def print_mean_loss(self):\n print(f'Moyenne {self.list_name} : {np.mean(np.array(self.min_list[:,0]))}')",
"def accumulate_record(self, params, sample_state, record):\n return self._numerator.accumulate_record(params, sample_state, record)",
"def publish(event: dict):\n return kinesis.put_record(\n StreamName=DATA_STREAM,\n Data=json.dumps(event).encode('utf-8'),\n PartitionKey=randomize_arn(INVENTORY_ARN)\n )",
"def new_archive_record(self, event):\n now = int(time.time() + 0.5)\n delta = now - event.record['dateTime']\n if delta > event.record['interval'] * 60:\n logdbg(\"Skipping record: time difference %s too big\" % delta)\n return\n if self.last_ts is not None:\n self.save_data(self.get_data(now, self.last_ts))\n self.last_ts = now\n #-- TBD: make this tunable on/off via variable\n #-- if self.max_age is not None:\n #-- self.prune_data(now - self.max_age)",
"def summary_info_events(filename):\n # filename = self.out_filename('events')\n print('Reading {}'.format(filename))\n table = Table.read(str(filename), hdu='EVENTS')\n data = dict()\n\n # Copy over header info to the summary table\n data['RA_PNT'] = np.float32(table.meta['RA_PNT'])\n data['DEC_PNT'] = np.float32(table.meta['DEC_PNT'])\n # data['GLON_PNT'] = np.float32(table.meta['GLON_PNT'])\n # data['GLAT_PNT'] = np.float32(table.meta['GLAT_PNT'])\n data['ALT_PNT'] = np.float32(table.meta['ALT_PNT'])\n data['AZ_PNT'] = np.float32(table.meta['AZ_PNT'])\n #data['ZEN_PNT'] = np.float32(90. - table.meta['ALT_PNT'])\n data['ZEN_PNT'] = np.float32(90. - table['ALT'].mean())\n data['ONTIME'] = np.float32(table.meta['ONTIME'])\n data['LIVETIME'] = np.float32(table.meta['LIVETIME'])\n data['DEADC'] = np.float32(table.meta['DEADC'])\n\n MJDREFI = table.meta['MJDREFI']\n MJDREFF = table.meta['MJDREFF']\n MJDREF = MJDREFI + MJDREFF\n\n TSTART_MET = table.meta['TSTART'] / 3600. / 24.\n TSTOP_MET = table.meta['TSTOP'] / 3600. / 24.\n\n start_time = Time(MJDREF + TSTART_MET, scale='tt', format='mjd')\n stop_time = Time(MJDREF + TSTOP_MET, scale='tt', format='mjd')\n\n data['TSTART'] = np.float32(start_time.utc.mjd)\n data['TSTOP'] = np.float32(stop_time.utc.mjd)\n data['TSTART_STR'] = str(start_time.utc.iso[:-4])\n data['TSTOP_STR'] = str(stop_time.utc.iso[:-4])\n\n data['N_TELS'] = table.meta['N_TELS']\n data['TELLIST'] = table.meta['TELLIST']\n try:\n data['OBJECT'] = table.meta['OBJECT']\n except KeyError:\n data['OBJECT'] = \"\"\n data['RA_OBJ'] = np.float32(table.meta['RA_OBJ'])\n data['DEC_OBJ'] = np.float32(table.meta['DEC_OBJ'])\n\n # data['OBS_MODE'] = table.meta['OBS_MODE']\n\n try:\n data['MUONEFF'] = np.float32(table.meta['MUONEFF'])\n except KeyError:\n data['MUONEFF'] = np.float32(-1)\n\n # Calculate some summary statistics for important event columns\n data['EVENT_COUNT'] = len(table)\n data['EVENT_TIME_MIN'] = table['TIME'].min()\n data['EVENT_TIME_MAX'] = table['TIME'].max()\n data['EVENT_ENERGY_MEDIAN'] = np.float32(np.median(table['ENERGY']))\n data['EVENT_RA_MEDIAN'] = np.float32(np.median(table['RA']))\n data['EVENT_DEC_MEDIAN'] = np.float32(np.median(table['DEC']))\n\n return data",
"def gen_record_item(record: Tuple[MeasureInput, MeasureResult]):\n return {\"latency\": np.mean([v.value for v in record[1].costs])}"
]
| [
"0.5613848",
"0.50898737",
"0.5073275",
"0.5069689",
"0.5012415",
"0.50077665",
"0.499012",
"0.49855378",
"0.49821198",
"0.49237096",
"0.4893636",
"0.47850022",
"0.47435543",
"0.47421452",
"0.47396365",
"0.47319072",
"0.47303843",
"0.47267595",
"0.47252092",
"0.46979374",
"0.4687377",
"0.46802807",
"0.46542257",
"0.46529672",
"0.46156535",
"0.46050274",
"0.4601418",
"0.45856997",
"0.45835164",
"0.458036"
]
| 0.66347843 | 0 |
validate Prophet parameters This method validates some key parameters including growth rate and custom_seasonalities. | def validate_params(self) -> None:
# cap must be given when using logistic growth
if (self.growth == "logistic") and (self.cap is False):
msg = "Capacity must be provided for logistic growth"
logging.error(msg)
raise ValueError(msg)
# If custom_seasonalities passed, ensure they contain the required keys.
reqd_seasonality_keys = ["name", "period", "fourier_order"]
if not all(
req_key in seasonality
for req_key in reqd_seasonality_keys
for seasonality in self.custom_seasonalities
):
msg = f"Custom seasonality dicts must contain the following keys:\n{reqd_seasonality_keys}"
logging.error(msg)
raise ValueError(msg)
# If extra_regressors passed, ensure they contain the required keys.
all_regressor_keys = {"name", "prior_scale", "mode"}
for regressor in self.extra_regressors:
if not isinstance(regressor, dict):
msg = f"Elements in `extra_regressor` should be a dictionary but receives {type(regressor)}."
_error_msg(msg)
if "name" not in regressor:
msg = "Extra regressor dicts must contain the following keys: 'name'."
_error_msg(msg)
if not set(regressor.keys()).issubset(all_regressor_keys):
msg = f"Elements in `extra_regressor` should only contain keys in {all_regressor_keys} but receives {regressor.keys()}."
_error_msg(msg)
self._reqd_regressor_names = [
regressor["name"] for regressor in self.extra_regressors
]
# check floor and cap
if (self.cap is not False) and ("cap" not in self._reqd_cap_floor_names):
self._reqd_cap_floor_names.append("cap")
if self.floor is not False and ("floor" not in self._reqd_cap_floor_names):
self._reqd_cap_floor_names.append("floor") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _data_params_validation(self) -> None:\n extra_regressor_names = set(self.params._reqd_regressor_names)\n # univariate case\n if self.data.is_univariate():\n if len(extra_regressor_names) != 0:\n msg = (\n f\"Missing data for extra regressors: {self.params._reqd_regressor_names}! \"\n \"Please include the missing regressors in `data`.\"\n )\n raise ValueError(msg)\n # multivariate case\n else:\n value_cols = set(self.data.value.columns)\n if \"y\" not in value_cols:\n msg = \"`data` should contain a column called `y` representing the responsive value.\"\n raise ValueError(msg)\n if not extra_regressor_names.issubset(value_cols):\n msg = f\"`data` should contain all columns listed in {extra_regressor_names}.\"\n raise ValueError(msg)\n # validate cap\n if (self.params.cap is True) and (\"cap\" not in self.data.value.columns):\n msg = \"`data` should contain a column called `cap` representing the cap when `cap = True`.\"\n _error_msg(msg)\n # validate floor\n if (self.params.floor is True) and (\"floor\" not in self.data.value.columns):\n msg = \"`data` should contain a column called `floor` representing the floor when `floor = True`.\"\n _error_msg(msg)",
"def _validate_params(self):\n raise NotImplementedError('Must be implemented in subclasses.')",
"def validate_parameters(self):\n\n # env and fixed_env\n self._validate_envs()\n # checking optional data and scripts\n self._validate_download_data()\n self.data_path = self.params[\"data\"][\"location\"]\n self._validate_scripts()\n # checking optional data_ref (if not data_ref provided, path is the same as data path)\n if \"data_ref\" in self.params:\n self._validate_download_data(data_nm=\"data_ref\")\n# self.data_ref_path = self.params[\"data_ref\"][\"location\"]\n# else:\n# self.data_ref_path = self.data_path\n # checking analysis\n self._validate_analysis()\n # checking tests\n self._validate_tests()\n\n self.params.setdefault(\"post_build\", None)\n # if copy in post_build part that I'm changing the build_context\n if self.params[\"post_build\"] and \"copy\" in self.params[\"post_build\"]:\n self.build_context = self.workflow_path\n else:\n self.build_context = self.working_dir\n\n self.params.setdefault(\"plots\", [])\n if self.params[\"plots\"]:\n if not isinstance(self.params[\"plots\"], (list, tuple)):\n raise SpecificationError(\n \"Value of key 'plots' must be a list or a tuple\"\n )\n else:\n if any(not isinstance(j, dict) for j in self.params[\"plots\"]):\n raise SpecificationError(\n \"Every item in 'plots' must be a dictionary.\"\n )",
"def validate_parameters(self):\n\n flag = True\n warnings = \"\"\n # Check radius\n r = self.parameters.get('r', 0)\n if type(r) not in [int, float]:\n flag = False\n warnings += \"Radius r must be a float value\\n\"\n else:\n if r <= 0:\n flag = False\n warnings += \"Radius r must be higher than 0\\n\"\n # Check if is full penetrating\n op = self.parameters.get('full', False)\n\n if not op:\n # Check observation well length\n if 'd' in self.parameters and 'l' in self.parameters:\n d = self.parameters.get('d', -1)\n l = self.parameters.get('l', -1)\n if type(l) not in [int, float]:\n flag = False\n warnings += \"Depth of well bottom must be a float value\\n\"\n else:\n if l < 0:\n flag = False\n warnings += \"Depth l must be higher than 0\\n\"\n if type(d) not in [int, float]:\n flag = False\n warnings += \"Depth of well screen must be a float value\\n\"\n else:\n if d < 0 or d > l:\n flag = False\n warnings += \"Depth d must be in range 0 <= d <= l\\n\"\n # Check piezometer depth\n elif 'z' in self.parameters:\n z = self.parameters.get('z', -1)\n if type(z) not in [int, float]:\n flag = False\n warnings += \"Depth of piezometer must be a float value\\n\"\n else:\n if z < 0:\n flag = False\n warnings += \"Depth z must be higher than 0\\n\"\n else:\n flag = False\n warnings += \"Well don't contain well depth attributes\\n\"\n return(flag, warnings) # End Function",
"def _validate(self):\n for p in self.parameters:\n #Check for missing required parameters:\n if p.is_required and not(p.is_set):\n raise ValueError(\"Parameter %s is not set.\" \\\n % p.names[-1])\n #Also repeat the parameter validation here, just in case?",
"def _validate_parameters(self, epochs, log_interval):\n\n if not epochs > 0:\n msg = (\n \"The number of training epochs = {} should be strictly\"\n \" positive.\"\n )\n self.logger.error(msg.format(epochs))\n raise ValueError(msg.format(epochs))\n\n if not log_interval > 0:\n msg = (\n \"The number of batches to wait before printting the\"\n \" training status should be strictly positive, but got {}\"\n \" instead.\"\n )\n self.logger.error(msg.format(log_interval))\n raise ValueError(msg.format(log_interval))\n\n if not 0 < self.shrinkage_rate <= 1:\n msg = (\n \"The shrinkage rate should be in the range (0, 1], but got\"\n \" {} instead.\"\n )\n self.logger.error(msg.format(self.shrinkage_rate))\n raise ValueError(msg.format(self.shrinkage_rate))",
"def validate(self):\n if self.params.get(\"format\"):\n if self.params[\"format\"] not in formats:\n raise ValueError(f\"format must be one of {formats}: {self.dt}\")\n for p in self.required:\n if not self.params.get(p):\n raise ValueError(f\"{p} missing: {self.dt}\")",
"def validate_input_params(self):\n if isinstance(self.parameters, dict):\n # Setup the mandatory params for snowflake load\n mandatory_keys = ('load_type', 'hive_database', 'hive_table', 'sfSchema', 'sfTable', 'sfGrantee_roles')\n if not all(key in self.parameters for key in mandatory_keys):\n logging.info(\"Mandatory keys for GenieSnowflakeOperator(parameters): %s\\n\" % format(mandatory_keys))\n logging.error(\"Mandatory key(s) NOT exists in GenieSnowflakeOperator(parameters): %s\\n\" % format(self.parameters))\n raise Exception(\"Job failed\")\n\n # Setting up pre,post and grants scripts for snowflake\n self.sfPresteps_sql = self.parameters.get('sfPresteps_sql', self.sfPresteps_sql)\n self.sfPoststeps_sql = self.parameters.get('sfPoststeps_sql', self.sfPoststeps_sql)\n self.sfPostgrants_sql = self.parameters.get('sfPostgrants_sql', self.sfPostgrants_sql)\n else:\n logging.error(\"Input is NOT a dictionary: %s\\n\" % format(self.parameters))\n raise Exception(\"Job failed\")",
"def _validate(self):\n self.params['report date'] = None\n if any(self.params.values()):\n s = self.params['start']\n e = self.params['end']\n cond1 = s is None\n cond2 = e is None\n \n if cond1 and not cond2:\n self.params['report date'] = e\n if not cond1 and cond2:\n self.params['report date'] = s\n if not cond1 and not cond2:\n if s == e:\n self.params['report date'] = s\n else:\n if s > e:\n self.params['start'] = e\n self.params['end'] = s\n else:\n self.params['report date'] = MAX_DATE",
"def _validate_params(title, start, end, description, show_me_as):\n if start and end:\n start_date = datetime.datetime.strptime(start, '%Y-%m-%d')\n end_date = datetime.datetime.strptime(end, '%Y-%m-%d')\n if start_date > end_date:\n raise ValueError(\"Start date cannot be after end date}\")\n\n if title and not isinstance(title, str):\n raise TypeError(\"title must be a string\")\n\n if description and not isinstance(description, str):\n raise TypeError(\"description must be a string\")\n\n if show_me_as and transparency_d.get(show_me_as) is None:\n raise ValueError(f\"Invalid value ({show_me_as}) for show_me_as. \"\n f\"Accepted values are: {list(transparency_d.keys())}\")",
"def _validate_parameters(self):\n errors = []\n for key in self.PARAMETERS.keys():\n if key not in self.request_obj.data_params:\n errors.append(key)\n\n if errors:\n raise DataParsingError('Following data items are missing: {}'.format(', '.join(errors)))\n\n for key, params in self.PARAMETERS.items():\n params[0].validate_type(key, self.request_obj.data_params.get(key), params[1])",
"def _validate_hyperparameters(self):\n\n if (self.reg_gamma < 0) or (self.reg_gamma > 1):\n raise ValueError(\"reg_gamma must be >= 0 and <1, got %s.\" % self.reg_gamma)\n \n if self.xmin > self.xmax:\n raise ValueError(\"xmin must be <= xmax, got %s and %s.\" % (self.xmin, self.xmax))",
"def _validate(self):\n if not isinstance(self.parameter_schema, dict):\n raise TypeError(\"parameter_schema must be a dictionary\")\n # TODO: Settle on an input file schema and validation library\n if 'num_simulations' not in self.parameter_schema.keys():\n raise AttributeError(\"Parameter schema is missing the required 'num_simulations' key\")\n elif not isinstance(self.parameter_schema['num_simulations'], int):\n raise TypeError(\"Parameter schema 'num_simulations' must be an integer.\")\n self._create_parameter_names()\n for name in self._parameter_names:\n parameter_keys = self.parameter_schema[name].keys()\n parameter_definition = self.parameter_schema[name]\n if 'distribution' not in parameter_keys:\n raise AttributeError(f\"Parameter '{name}' does not contain the required 'distribution' key\")\n elif not isinstance(parameter_definition['distribution'], str) or \\\n not parameter_definition['distribution'].isidentifier():\n raise TypeError(f\"Parameter '{name}' distribution '{parameter_definition['distribution']}' is not a \" \\\n \"valid Python identifier\")\n else:\n for key in parameter_keys:\n if not isinstance(key, str) or not key.isidentifier():\n raise TypeError(f\"Parameter '{name}' keyword argument '{key}' is not a valid \" \\\n \"Python identifier\")\n # TODO: Raise an execption if the current parameter distributions don't match the previous_parameter_study\n self.parameter_distributions = self._generate_parameter_distributions()",
"def validate_parameters(self):\n\n flag = True\n warnings = \"\"\n # Check radius\n r = self.parameters.get('rw', 0)\n if type(r) not in [int, float]:\n flag = False\n warnings += \"Well radius rw must be a float value\\n\"\n else:\n if r <= 0:\n flag = False\n warnings += \"Well radius rw must be higher than 0\\n\"\n # Check if is full penetrating\n op = self.parameters.get('full', False)\n\n if not op:\n # Check observation well length\n if 'd' in self.parameters and 'l' in self.parameters:\n d = self.parameters.get('d', -1)\n l = self.parameters.get('l', -1)\n if type(l) not in [int, float]:\n flag = False\n warnings += \"Depth of well bottom must be a float value\\n\"\n else:\n if l < 0:\n flag = False\n warnings += \"Depth l must be higher than 0\\n\"\n if type(d) not in [int, float]:\n flag = False\n warnings += \"Depth of well screen must be a float value\\n\"\n else:\n if d < 0 or d > l:\n flag = False\n warnings += \"Depth d must be in range 0 <= d <= l\\n\"\n return(flag, warnings) # End Function",
"def validate_parameters(hyperparams):\n try:\n # Check Hyperparameter Type\n if not isinstance(hyperparams, dict):\n raise ValueError('Provided hyperparameter is not valid.')\n\n # Global Hyperparameter Check\n if 'global' in hyperparams:\n params = hyperparams['global']\n else:\n raise ValueError('Global parameters have not been defined.')\n\n if 'learning_rate' in params:\n if params['learning_rate'] < 0:\n raise ValueError('learning_rate={} must be strictly '\n 'positive'.format(params['learning_rate']))\n else:\n raise ValueError('learning_rate has not been defined.')\n\n if 'loss' in params:\n if params['loss'] not in VALID_LOSSES:\n raise ValueError('Loss {} is currently not supported.'\n 'Accpted losses: {}'.format(params['loss'],\n ', '.join(VALID_LOSSES)))\n else:\n raise ValueError('loss has not been defined.')\n\n if 'num_classes' in params:\n # Validate Class Parameter Types\n if type(params['num_classes']) is not int:\n raise ValueError('Provided classes value\\'s type is not valid, '\n 'should be an int value >= 2 for classification.')\n\n # Validate Classification Case\n if params['loss'] != 'least_squares':\n if params['num_classes'] < 0:\n raise ValueError('Provided class value must be >= 2 for '\n 'classification.')\n\n if params['loss'] == 'binary_crossentropy' and \\\n params['num_classes'] != 2:\n raise ValueError('Binary class models must have class of 2.')\n elif params['loss'] == 'categorical_crossentropy' and \\\n params['num_classes'] <= 2:\n raise ValueError('Multiclass models must have class > 2.')\n elif params['loss'] == 'auto':\n if params['num_classes'] < 2:\n raise ValueError('Class value must be >= 2.')\n else:\n logging.warning(\n 'Obtaining class labels based on local dataset. '\n 'This may cause failures during aggregation '\n 'when parties have distinctive class labels.')\n else:\n # Handle Classes Not Defined Case\n if params['loss'] != 'least_squares':\n raise ValueError('Classes has not been defined. Should provide '\n 'a value >= 2 for classification models.')\n\n if 'max_bins' in params:\n if not (2 <= params['max_bins'] and params['max_bins'] <= 255):\n raise ValueError('max_bins={} should be no smaller than 2 '\n 'and no larger than 255.'.format(params['max_bins']))\n\n if 'max_iter' in params:\n if params['max_iter'] < 1:\n raise ValueError('max_iter={} must not be smaller '\n 'than 1.'.format(params['max_iter']))\n else:\n raise ValueError('max_iter has not been defined.')\n\n if 'max_depth' in params:\n if params['max_depth'] is not None and params['max_depth'] <= 1:\n raise ValueError('max_depth={} must be strictly greater'\n 'than 1.'.format(params['max_leaf_nodes']))\n\n if 'max_leaf_nodes' in params:\n if params['max_leaf_nodes'] is not None and params['max_leaf_nodes'] <= 1:\n raise ValueError('max_leaf_nodes={} must be strictly greater'\n 'than 1.'.format(params['max_leaf_nodes']))\n\n if 'min_samples_leaf' in params:\n if params['min_samples_leaf'] is not None and params['min_samples_leaf'] < 0:\n raise ValueError('min_sample_leaf={} must not be smaller '\n 'than 0'.format(params['min_samples_leaf']))\n\n except Exception as ex:\n logger.exception(str(ex))\n raise HyperparamsException('Defined global hyperparameters malformed.')",
"def check_params(self):\r\n \r\n # TODO: More cases?\r\n\r\n if self.N <= 0:\r\n print('Bad Parameter: N')\r\n \r\n if self.Ha_tally <= 0 or self.Ha_tally > self.N:\r\n print('Bad Parameter: Reported winner tally')\r\n \r\n if len(self.round_sched) < 1 or not self.check_inc_sched(self.round_sched):\r\n print('Bad Parameter: Round Schedule')\r\n\r\n if self.alpha <= 0 or self.alpha >= .5:\r\n print('Bad Parameter: Alpha')",
"def validate_parameters(self):\n #################### metrics_params/metrics ####################\n if (self.metrics is not None) and (\"metrics\" in self.metrics_params.keys()):\n raise ValueError(\n \"`metrics` may be provided as a kwarg, or as a `metrics_params` key, but NOT BOTH. Received: \"\n + f\"\\n `metrics`={self.metrics}\\n `metrics_params`={self.metrics_params}\"\n )\n else:\n _metrics_alias = \"metrics\"\n if self.metrics is None:\n try:\n self.metrics = self.metrics_params[\"metrics\"]\n except KeyError:\n self.metrics = self.metrics_params[\"metrics_map\"]\n _metrics_alias = \"metrics_map\"\n self.metrics = format_metrics(self.metrics)\n self.metrics_params = {**{_metrics_alias: self.metrics}, **self.metrics_params}",
"def validate_params(self, params: Scenario) -> bool:\n valid = True\n # Make sure all needed parameters were provided\n valid = valid and \"R\" in params\n valid = valid and \"L\" in params\n\n # Make sure all parameters are physically valid\n valid = valid and params[\"R\"] > 0\n valid = valid and params[\"L\"] > 0\n\n return valid",
"def _validate(self):\n if not isinstance(self.parameter_schema, dict):\n raise TypeError(\"parameter_schema must be a dictionary\")\n try:\n self._parameter_names = self.parameter_schema['parameter_names']\n except KeyError:\n raise KeyError('parameter_schema must contain the key: parameter_names')\n if 'parameter_samples' not in self.parameter_schema:\n raise KeyError('parameter_schema must contain the key: parameter_samples')\n # Always convert to numpy array for shape check and _generate()\n else:\n self.parameter_schema['parameter_samples'] = numpy.array(self.parameter_schema['parameter_samples'],\n dtype=object)\n if len(self._parameter_names) != self.parameter_schema['parameter_samples'].shape[1]:\n raise ValueError(\"The parameter samples must be an array of shape MxN, \"\n \"where N is the number of parameters.\")\n return",
"def validate(env):\n exit_code = ErrorCode.NO_ERROR\n if not env.function_name:\n print('Mandatory parameter (function_name) is missing')\n PARSER.print_help()\n exit_code = ErrorCode.MANDATORY_PARAM_MISSING\n if not MetricsUtil.validate_date(env.start_datetime, env.end_datetime):\n PARSER.print_help()\n exit_code = ErrorCode.WRONG_DATE\n if exit_code != ErrorCode.NO_ERROR:\n MetricsUtil.bail_out(ErrorCode.MANDATORY_PARAM_MISSING)\n \n return True",
"def check_param(self):\n if scipy.ndim(self.param['initial_heading'].shape) > 1:\n raise(ValueError, 'initial_heading must have ndim=1')\n\n equal_shape_list = ['x_start_position','y_start_position','flight_speed','release_time']\n for item in equal_shape_list:\n if self.param[item].shape != self.param['initial_heading'].shape:\n raise(ValueError, '{0}.shape must equal initial_heading.shape'.format(item))",
"def _validate_params(self):\n assert set(self.required_params) - set(self._params) == set()\n for par, val in self.optional_params.items():\n if par not in self._params:\n self._params[par] = val",
"def validate_inputs(self, input_dict):\n required_keys = {\n 'start_delay_hours',\n 'mission_time_hours',\n 'critical_wind_speed_m_per_s',\n 'wind_height_of_interest_m',\n 'wind_shear_exponent',\n 'weather_window'\n }\n found_keys = set(input_dict.keys())\n if len(required_keys - found_keys) > 0:\n err_msg = '{}: did not find all required keys in inputs dictionary. Missing keys are {}'\n raise ValueError(err_msg.format(type(self).__name__, required_keys - found_keys))",
"def _validate_query_parameters(self):\n check_years(self._years)\n check_geo_hierarchy(self.for_geo, self.in_geo)\n check_geo_estimates(self.estimate, self.for_geo)\n return True",
"def validate_params(self) -> None:\n if isinstance(self.hamiltonian, PauliSumOp) and isinstance(\n self.hamiltonian.coeff, ParameterExpression\n ):\n raise ValueError(\"A global parametrized coefficient for PauliSumOp is not allowed.\")",
"def attribute_validation(cls, values: dict) -> dict:\n if not (total := values.get('total')):\n raise ValueError(\"Total attribute is required.\")\n \n if not (quantity := values.get('quantity')):\n raise ValueError(\"Quantity attribute is required.\")\n \n if not (symbol := values.get('symbol')):\n raise ValueError(\"Symbol attribute is required.\")\n\n filter = symbol.filters.market_lot_size_filter\n # if ONE :=1 and not filter.min_qty <= total <= filter.max_qty:\n # raise ValueError(\"The quantity is not in valid range.\")\n\n if filter.step_size and not is_valid_significant_digits(\n total,\n symbol.qty_decimal_precision\n ):\n raise ValueError(\"The quantity precision is not valid.\")\n\n return values",
"def _ValidateParameters(event_type, parameters, properties='properties'):\n # TODO(b/142421197): Validate nested objects and convert non-string types\n _CheckUnknownParameters(\n event_type,\n [p.name for p in getattr(event_type.crd, properties)],\n parameters.keys())\n _CheckMissingRequiredParameters(\n event_type,\n [p.name for p in getattr(event_type.crd, properties) if p.required],\n parameters.keys())",
"def _check_params(self):\n if self.k_initial <= 0 :\n raise ValueError('Initial K should be 1 or more.')",
"def _validate_parameters(self):\n super()._validate_parameters()\n\n #################### callbacks ####################\n self.callbacks = check_callback(self.callbacks)",
"def evaluate_parameters(rule_parameters):\n # print parameters if in DEBUG mode\n if DEBUG:\n print(\"eval_param ma:\",rule_parameters['MinimumAge'])\n\n if rule_parameters:\n # check int parameter\n minimum_age=rule_parameters['MinimumAge']\n if DEBUG:\n print(\"Checking int (Line 233)\")\n print(type(minimum_age))\n try:\n minimum_age=int(minimum_age)\n # a key over 1 year old kinda defaults the purpose of this rule so give and error\n # a key 0 days old or a negative number is not appropriate as well\n if minimum_age < 1 or minimum_age > 365:\n raise ValueError('MinimumAge must be between 1 and 365')\n except:\n raise ValueError('MinimumAge parameter needs to be an integer (no quotes).')\n else:\n print(\"rule_parameters is False\")\n\n valid_rule_parameters = rule_parameters\n\n if DEBUG:\n print(\"Exiting evaluate_parameters\")\n \n return valid_rule_parameters"
]
| [
"0.6698862",
"0.6689876",
"0.6623971",
"0.65948725",
"0.65588653",
"0.6514061",
"0.6488414",
"0.64832795",
"0.6465186",
"0.64392763",
"0.64350533",
"0.6429519",
"0.6407982",
"0.63650566",
"0.6298115",
"0.62283796",
"0.62046695",
"0.61903995",
"0.61821324",
"0.6179447",
"0.6130024",
"0.61021274",
"0.6095948",
"0.60893214",
"0.6069539",
"0.60397226",
"0.6038047",
"0.6038016",
"0.6034618",
"0.5974998"
]
| 0.7009893 | 0 |
Validate whether `data` contains specified regressors or not. | def _data_params_validation(self) -> None:
extra_regressor_names = set(self.params._reqd_regressor_names)
# univariate case
if self.data.is_univariate():
if len(extra_regressor_names) != 0:
msg = (
f"Missing data for extra regressors: {self.params._reqd_regressor_names}! "
"Please include the missing regressors in `data`."
)
raise ValueError(msg)
# multivariate case
else:
value_cols = set(self.data.value.columns)
if "y" not in value_cols:
msg = "`data` should contain a column called `y` representing the responsive value."
raise ValueError(msg)
if not extra_regressor_names.issubset(value_cols):
msg = f"`data` should contain all columns listed in {extra_regressor_names}."
raise ValueError(msg)
# validate cap
if (self.params.cap is True) and ("cap" not in self.data.value.columns):
msg = "`data` should contain a column called `cap` representing the cap when `cap = True`."
_error_msg(msg)
# validate floor
if (self.params.floor is True) and ("floor" not in self.data.value.columns):
msg = "`data` should contain a column called `floor` representing the floor when `floor = True`."
_error_msg(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fitRegressor(self, data):\r\n if data.SETS == 2:\r\n self.regressor.fit(data.trainX, data.trainy)",
"def validate(self, data):\n return any(imap(lambda validator: validate_common(validator, data), self.validators))",
"def check_regressor(self):\n\n # Sklearn and Mlxtend stacking regressors, as well as \n # LightGBM, XGBoost, and CatBoost regressor \n # do not adhere to the convention.\n try:\n super().check_regressor\n except:\n print(f'{_MODEL_DICT[self.regressor_choice]} does not adhere to sklearn conventions.')",
"def check_regressor(self):\n\n return sklearn.utils.estimator_checks.check_estimator(self._regressor)",
"def validate(data):\n if 'value' not in data or \\\n 'category' not in data or \\\n 'classification' not in data or \\\n 'account' not in data:\n raise Exception('Missing required field.')\n classifications = ['Personal', 'Essential', 'Savings', 'Income']\n if data['classification'] not in classifications:\n raise Exception('Invalid classification.')",
"def term_restrictions(data):\n\n term = [\"1st\", \"2nd\", \"3rd\", \"1ST\", \"2ND\", \"3RD\"]\n if data not in term:\n return False\n return True",
"def form_restrictions(data):\n\n form = [\"1\", \"2\", \"3\", \"4\"]\n if data not in form:\n return False\n return True",
"def _check_data_valid(self):\n\n is_valid = (sum(~np.isnan(self.data).flatten()) > 0 and self.data.flatten().sum() != 0)\n if not is_valid:\n raise FITSException(f\"No data in {self.survey}\")",
"def _isvalid(self, data):\n if data is None:\n return False\n elif isinstance(data, (list,tuple)):\n if len(data) <= 0:\n return False\n else:\n return True\n elif isinstance(data, (np.ndarray)):\n if data.size <= 0:\n return False\n else:\n return True\n elif not data:\n return False\n else:\n return True",
"def is_regressor(estimator):\n return getattr(estimator, \"_estimator_type\", None) == \"regressor\"",
"def validate_matrix(self, data, **kwargs):\n validate_matrix(data.get(\"params\"))",
"def validate_data(self):\n for pattern in self.patterns:\n if pattern == \"\":\n self.patterns.remove(\"\")\n\n if not self.patterns:\n print(\"WARNING! Missing pattern or empty string!\")\n sys.exit()",
"def validator(data):\n\n request_validator = cerberus.Validator(SCHEMA)\n if request_validator.validate(data):\n return True\n else:\n return request_validator.errors",
"def _check_input(self, X):\n symbols = np.concatenate(X)\n if len(symbols) == 1: # not enough data\n raise ValueError(\"expected at least 1 observation \"\n \"but none found.\")\n elif (symbols < 0).any(): # contains negative integers\n raise ValueError(\"expected non-negative features \"\n \"for each observation.\")\n elif X.shape[1] > 1: # contains to many features\n raise ValueError(\"expected only 1 feature but got {0} \"\n \"for each observation.\".format(X.shape[1]))\n else:\n return True",
"def __validate_node_data(self, data):\n\n # skipping check of 'grapheap_node_id' optimisation key\n if all(key in data for key in self.optimisation_keys[1:]):\n return True\n\n else:\n missing_keys = [\n x for x in self.optimisation_keys[1:] if x not in data]\n raise ValueError(\"Grapheap Error: \" + str(missing_keys) +\n \" optimisation keys missing in data\")",
"def validate(cls, data, errors):",
"def is_sklearn_regressor(obj):\n return is_sklearn_estimator(obj) and sklearn_scitype(obj) == \"regressor\"",
"def is_valid(self, data_model: DataModel) -> bool:\n if data_model is None:\n return True\n\n return all(c.is_valid(data_model) for c in self.constraints)",
"def is_valid(self, data_model: DataModel) -> bool:",
"def valid(data, requirement):\n try:\n validate(data, requirement)\n except ValidationError:\n return False\n return True",
"def check_regularizer(self) -> None:\n if \"REGULARIZER\" not in self.config:\n return None\n\n regularizer_names = get_class_names_in_files(\n \"src\" + os.path.sep + \"regularizers.py\"\n )\n\n # Check config regularizer exists\n assert self.config[\"REGULARIZER\"] in regularizer_names\n\n # Run regularizer config check\n params: Dict[str, Any] = self.config[\"REGULARIZER_PARAMS\"]\n\n if self.config[\"REGULARIZER\"] == \"BnWeight\":\n assert \"coeff\" in params\n assert params[\"coeff\"] > 0.0\n assert isinstance(params[\"coeff\"], float)",
"def validate(self, data):\n if data.has_key('site'):\n if FieldSightXF.objects.filter(\n xf__id=data['xf'], is_staged=False, is_scheduled=True, site=data['site']).exists():\n raise serializers.ValidationError(\"Form Already Exists, Duplicate Forms Not Allowded\")\n elif data.has_key('project'):\n if FieldSightXF.objects.filter(\n xf__id=data['xf'], is_staged=False, is_scheduled=True, project=data['project']).exists():\n raise serializers.ValidationError(\"Form Already Exists, Duplicate Forms Not Allowded\")\n return data",
"def check(self, data):\r\n if isinstance(data, Iterable):\r\n data = \"\".join([str(x) for x in data])\r\n try:\r\n data = str(data)\r\n except UnicodeDecodeError:\r\n return False\r\n if not data:\r\n return False\r\n return bool(self.__regexp.match(data))",
"def is_valid(self, dataset):\n pass",
"def test_no_data(self):\n self.assertRaises(NoDataError, lambda: GroupLinearRegression([], []))",
"def _run_extra_validators(self, data):\n errors = defaultdict(list)\n for validator in self.get_extra_validators():\n validator.set_instance(self.instance)\n try:\n validator(data)\n except ValidationError as exc:\n for field, field_errors in exc.detail.items():\n errors[field] += field_errors\n return errors",
"def is_valid(self, data_model: DataModel) -> bool:\n return all(constraint.is_valid(data_model) for constraint in self.constraints)",
"def is_valid(self, data_model: DataModel) -> bool:\n return all(constraint.is_valid(data_model) for constraint in self.constraints)",
"def valid_user_data(user_data):\n return 'account_ids' in user_data and 'monthly_expenses' in user_data",
"def validate_list(validators, data):\n if type(data) is not list:\n return False\n n_validators = len(validators)\n if n_validators == 0:\n return len(data) == 0\n elif n_validators == 1:\n validator = validators[0]\n return all(imap(lambda item: validate_common(validator, item), data))\n elif n_validators > 1:\n raise NotImplementedError(\"You cannot specify more than one validator for list at the moment.\")"
]
| [
"0.66148984",
"0.6360852",
"0.63539565",
"0.6292314",
"0.58856297",
"0.5856178",
"0.5741423",
"0.57359177",
"0.5584346",
"0.5562927",
"0.5553923",
"0.55270314",
"0.552547",
"0.5523597",
"0.551913",
"0.55061245",
"0.5425437",
"0.5414315",
"0.5379005",
"0.5355849",
"0.5340424",
"0.53103375",
"0.53100955",
"0.5295068",
"0.5268151",
"0.5259975",
"0.5249722",
"0.5249722",
"0.52343285",
"0.5230511"
]
| 0.67881787 | 0 |
get default parameter search space for Prophet model | def get_parameter_search_space() -> List[Dict[str, object]]:
return get_default_prophet_parameter_search_space() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def default_params():\n params = {}\n params['dataset'] = 'adult'\n params['engines'] = ['MD','RDA']\n params['iters'] = 10000\n params['epsilon'] = 1.0\n params['delta'] = 0.0\n params['bounded'] = True\n params['frequency'] = 1\n params['seed'] = 0\n params['save'] = None\n params['load'] = None\n params['plot'] = None\n\n return params",
"def getDefaultParameterValues(self):\r\n dct = {}\r\n self.initializeRoadRunnerModel()\r\n self.roadrunnerModel.reset()\r\n for parameterName in self.parametersToFit:\r\n dct[parameterName] = self.roadrunnerModel.model[parameterName]\r\n return dct",
"def default_parameters():\n prm = Parameters('windkessel_model')\n\n prm.add('total_volume', float())\n\n prm.add('venous_compliance', float())\n prm.add('arterial_compliance', float())\n\n prm.add('venous_resistance', float())\n prm.add('arterial_resistance', float())\n prm.add('peripheral_resistance', float())\n\n prm.add('venous_resting_volume', float())\n prm.add('arterial_resting_volume', float())\n\n return prm",
"def getDefaultParams():\n defpar = [\n # coordinate system\n ['crd_sys', \"'sph'\", 'Coordinate system'],\n ['nx', '[60, 40, 30]', 'Number of grid points in the first dimension'],\n ['xbound', '[0.1*au, 30.*au, 110.*au, 250.*au]', 'Number of radial grid points'],\n ['ny', '[10,30, 30, 10]',\n 'Number of grid points in the second dimension'],\n ['ybound', '[0.1, pi/6., pi/2., 5.*pi/6., 3.04]',\n 'Number of radial grid points'],\n ['nz', '[361]', 'Number of grid points in the third dimension'],\n ['zbound', '[0., 2.0*pi]', 'Number of radial grid points'],\n # star related\n ['tstar', '[3900.0]', 'Temperature of star'],\n ['mstar', '[1.0*ms]', 'Mass of the star(s)'],\n ['rstar', '[2.5*rs]', 'Radius of star'],\n # gas density \n ['Rin', '[0.1*au, 80*au]', 'inner bounding edge'],\n ['Rin_w', '[0, 1*au]', 'gaussian taper before inner edge'], \n ['Rout', '[30*au, 120*au]', 'outer bounding edge'],\n ['Rout_w', '[1*au, 1*au]', 'gaussian taper after outer edge'], \n ['sigp', '[-1.0, -1.5]', 'power-law surface density'],\n ['sig0', '[1e2, 1e1]', 'surface density at Rin in g/cm^2'], \n ['ring_r', '[50*au]', 'location of gaussian ring'], \n ['ring_win', '[5*au]', 'width of gaussian ring in inner radius'],\n ['ring_wout', '[5*au]', 'width of gaussian ring in outer radius'], \n ['ring_a', '[1e2]', 'surface density at center of ring in g/cm^2]'], \n ['cutgdens', '1e-30', 'cut for density'], \n ['Rt', '100*au', 'radius for scale height'], \n ['Ht', '10*au', 'scale height'], \n ['qheight', '1.25', 'height power-law'], \n # gas species\n ['gasspec_mol_name', \"['12co']\", 'name of molecule'],\n ['gasspec_mol_abun', '[5e-5]', 'mass abundance '],\n ['gasspec_mol_dbase_type', \"['leiden']\", ''],\n ['gasspec_mol_freezeout_dfact', '[1e-3]',\n 'Factor by which the molecular abundance should be decreased in the freeze-out zone'],\n ['mol_freeze_Ht', '[24*au]', 'Height at Rt, with index=qheight, for freeze out to happen'],\n ['mol_freeze_del_hfrac', '0.2', 'Gaussian taper for freeze-out. del H = h * hfrac'],\n ['mol_snowR', '[20*au]', 'Radius when freeze out begins to happen'],\n # dust density\n # flat power-law parts\n ['dRin', '[0.1*au, 80*au]', 'inner bounding edge'],\n ['dRin_w', '[0, 1*au]', 'gaussian taper before inner edge'], \n ['dRout', '[30*au, 120*au]', 'outer bounding edge'],\n ['dRout_w', '[1*au, 1*au]', 'gaussian taper after outer edge'], \n ['dsigp', '[-1.0, -1.5]', 'power-law surface density'],\n ['dsig0', '[1e2, 1e1]', 'surface density at Rin'],\n # Lynden-Bell parts\n ['dLB_Rin', '[0.1*au]', 'inner bounding radius'], \n ['dLB_Rsig', '[30*au]', 'charcteristic radius'],\n ['dLB_sigp', '[-1.0]', 'power-law exponent. Careful, the sign is different from the usual function by a negative sign for consistency with flat power-law'], \n ['dLB_sig0', '[1e2]', 'surface density'], \n # ring parts\n ['dring_r', '[50*au]', 'location of gaussian ring'],\n ['dring_win', '[5*au]', 'width of gaussian ring in inner radius'],\n ['dring_wout', '[5*au]', 'width of gaussian ring in outer radius'], \n ['dring_a', '[1e2]', 'surface density at center of ring in g/cm^2]'],\n ['cutddens', '1e-30', 'cut for dust density'],\n ['dRt', '[100*au]', 'radius for scale height for each grain size'], \n ['dHt', '[10*au]', 'scale height for each grain size'], \n ['dqheight', '[1.25]', 'scale height power-law for dust'], \n # temperature\n ['T0mid', '50', 'mid plane temperature at Rt'],\n ['T0atm', '50', 'atmosphere temperature at Rt'],\n ['zqratio', '3', 'factor of Ht of where temperature transition occurs'],\n ['qmid', '-0.5', 'midplane temperature exponent'],\n ['qatm', '-0.5', 'atmosphere temperature exponent'],\n ['hdel', '2', 'temperature transition exponent '],\n ['cuttemp', '10', 'temperature cut'], \n # alignment\n ['altype', \"'toroidal'\", 'alignment type']\n ]\n\n return defpar",
"def _get_fitted_params(self):\n return {}",
"def parameters(self):\n return self._default_params",
"def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper:\n cs = ConfigSpace.ConfigurationSpace('sklearn.naive_bayes.BernoulliNB', seed)\n\n # the smoothing parameter is a non-negative float\n # I will limit it to 1000 and put it on a logarithmic scale. (SF)\n # Please adjust that, if you know a proper range, this is just a guess.\n alpha = ConfigSpace.UniformFloatHyperparameter(\n name='alpha', lower=1e-2, upper=100, default_value=1, log=True)\n fit_prior = ConfigSpace.CategoricalHyperparameter(\n name='fit_prior', choices=[True, False], default_value=True)\n\n hyperparameters = [alpha, fit_prior]\n\n return ConfigSpaceWrapper(cs, hyperparameters, None)",
"def _default_params(self) -> Dict[str, Any]:\n normal_params = {\n \"temperature\": self.temperature,\n \"max_tokens\": self.max_tokens,\n \"top_p\": self.top_p,\n \"frequency_penalty\": self.frequency_penalty,\n \"presence_penalty\": self.presence_penalty,\n \"n\": self.n,\n # \"best_of\": self.best_of,\n \"request_timeout\": self.request_timeout,\n \"logit_bias\": self.logit_bias,\n }\n return {**normal_params, **self.model_kwargs}",
"def get_default_hparams():\n hparams_map = base_model.get_default_hparams().values()\n hparams_map.update({\n 'conditional': True,\n 'dec_rnn_size': [512], # Decoder RNN: number of units per layer.\n 'dec_rnn_attn_len': 0, # Decoder RNN: length of attention vector.\n 'enc_rnn_size': [256], # Encoder RNN: number of units per layer per dir.\n 'dropout_keep_prob': 1.0, # Probability all dropout keep.\n 'sampling_schedule': 'constant', # constant, exponential, inverse_sigmoid\n 'sampling_rate': 0.0, # Interpretation is based on `sampling_schedule`.\n })\n return tf.contrib.training.HParams(**hparams_map)",
"def get_default_params() -> Dict:\n default_params = {\n \"n_estimators\": {\n \"default_value\": 100,\n \"description\": \"Number of gradient boosted trees. \"\n \"Equivalent to number of boosting rounds.\",\n \"type\": \"int\"\n },\n \"max_depth\": {\n \"default_value\": 6,\n \"description\": \"Maximum tree depth for base learners.\",\n \"type\": \"int\"\n },\n \"learning_rate\": {\n \"default_value\": 0.3,\n \"description\": \"Boosting learning rate (xgb's 'eta')\",\n \"type\": \"float\"\n },\n \"verbosity\": {\n \"default_value\": 1,\n \"description\": \"The degree of verbosity. Valid values are 0 (silent) - 3 (debug).\",\n \"type\": [0, 1, 2, 3]\n },\n \"booster\": {\n \"default_value\": \"gbtree\",\n \"description\": \"Specify which booster to use: gbtree, gblinear or dart.\",\n \"type\": ['gbtree', 'gblinear', 'dart']\n },\n \"tree_method\": {\n \"default_value\": \"auto\",\n \"description\":\n '''\n Specify which tree method to use. Default to auto. If this parameter\n is set to default, XGBoost will choose the most conservative option\n available. It's recommended to study this option from parameters\n document.\n ''',\n \"type\": [\"auto\", \"exact\", \"approx\", \"hist\", \"gpu_hist\"]\n },\n \"n_jobs\": {\n \"default_value\": 1,\n \"description\": '''\n Number of parallel threads used to run xgboost. When used with other Scikit-Learn\n algorithms like grid search, you may choose which algorithm to parallelize and\n balance the threads. Creating thread contention will significantly slow dowm both\n algorithms.\n ''',\n \"type\": \"int\"\n },\n \"gamma\": {\n \"default_value\": 0.0,\n \"description\": \"Minimum loss reduction required to make a further \"\n \"partition on a leaf node of the tree.\",\n \"type\": \"float\"\n },\n \"min_child_weight\": {\n \"default_value\": 1.0,\n \"description\": \"Minimum loss reduction required to make a further \"\n \"partition on a leaf node of the tree.\",\n \"type\": \"float\"\n },\n \"max_delta_step\": {\n \"default_value\": 0.0,\n \"description\": \"Maximum delta step we allow each tree's weight estimation to be.\",\n \"type\": \"float\"\n },\n \"subsample\": {\n \"default_value\": 1.0,\n \"description\": \"Subsample ratio of the training instance.\",\n \"type\": \"float\"\n },\n \"colsample_bytree\": {\n \"default_value\": 1.0,\n \"description\": \"Subsample ratio of columns when constructing each tree.\",\n \"type\": \"float\"\n },\n \"colsample_bylevel\": {\n \"default_value\": 1.0,\n \"description\": \"Subsample ratio of columns for each level.\",\n \"type\": \"float\"\n },\n \"colsample_bynode\": {\n \"default_value\": 1.0,\n \"description\": \"Subsample ratio of columns for each split.\",\n \"type\": \"float\"\n },\n \"reg_alpha\": {\n \"default_value\": 0.0,\n \"description\": \"L1 regularization term on weights\",\n \"type\": \"float\"\n },\n \"reg_lambda\": {\n \"default_value\": 0.0,\n \"description\": \"L2 regularization term on weights\",\n \"type\": \"float\"\n },\n \"scale_pos_weight\": {\n \"default_value\": 1.0,\n \"description\": \"Balancing of positive and negative weights.\",\n \"type\": \"float\"\n },\n \"random_state\": {\n \"default_value\": 0,\n \"description\": \"Random number seed.\",\n \"type\": \"int\"\n },\n \"base_score\": {\n \"default_value\": 0.5,\n \"description\": \"The initial prediction score of all instances, global bias.\",\n \"type\": \"float\"\n },\n # \"missing\": {\n # \"default_value\": None,\n # \"description\": \"Value in the data which needs to be present as a missing value.\",\n # \"type\": \"float\"\n # },\n \"num_parallel_tree\": {\n \"default_value\": 1,\n \"description\": \"Used for boosting random forest.\",\n \"type\": \"int\"\n },\n # \"monotone_constraints\": {\n # \"default_value\": \"(0,0)\",\n # \"description\": \" Constraint of variable monotonicity. \"\n # \"See tutorial for more information.\",\n # \"type\": \"str\"\n # },\n # \"interaction_constraints\": {\n # \"default_value\": None,\n # \"description\": '''\n # Constraints for interaction representing permitted interactions. The\n # constraints must be specified in the form of a nest list, e.g. [[0, 1],\n # [2, 3, 4]], where each inner list is a group of indices of features\n # that are allowed to interact with each other. See tutorial for more\n # information\n # ''',\n # \"type\": \"str\"\n # },\n \"importance_type\": {\n \"default_value\": \"gain\",\n \"description\": '''\n The feature importance type for the feature_importances. property:\n either \"gain\", \"weight\", \"cover\", \"total_gain\" or \"total_cover\".\n ''',\n \"type\": [\"gain\", \"weight\", \"cover\", \"total_gain\", \"total_cover\"]\n }\n }\n\n return default_params",
"def get_hyperparams(self):",
"def doParametersOfInterest(self):\n ''' ref : physicsmodel -> rvf\n self.modelBuilder.out.var(\"MH\").setRange(float(self.mHRange[0]),float(self.mHRange[1]))\n self.modelBuilder.out.var(\"MH\").setConstant(False)\n '''\n\n self.modelBuilder.doVar(\"mu[0,0,1000]\") ##mu is what we want to return (in string) name[starting_value,min,max] \n self.modelBuilder.doVar(\"Fvbf[0,0,1]\") ##mu is what we want to return (in string) name[starting_value,min,max] \n self.modelBuilder.doSet(\"POI\",\"mu,Fvbf\")\n self.modelBuilder.doVar(\"\")\n self.modelBuilder.factory_('expr::ggH_s_func(\"(@0-sqrt(@0))*(1.-@1)\", mu,Fvbf)')\n self.modelBuilder.factory_( 'expr::ggH_b_func(\"(1-sqrt(@0))*(1.-@1)\", mu,Fvbf)')\n self.modelBuilder.factory_( 'expr::ggH_sbi_func(\"sqrt(@0)*(1.-@1)\", mu,Fvbf)')\n\n self.modelBuilder.factory_('expr::vbfH_s_func(\"(@0-sqrt(@0))*(@1)\", mu,Fvbf)')\n self.modelBuilder.factory_( 'expr::vbfH_b_func(\"(1-sqrt(@0))*(@1)\", mu,Fvbf)')\n self.modelBuilder.factory_( 'expr::vbfH_sbi_func(\"sqrt(@0)*(@1)\", mu,Fvbf)')",
"def doParametersOfInterest(self):\n self.modelBuilder.doVar(\"mu[1,0,100]\") ##mu is what we want to return (in string) name[starting_value,min,max] \n self.modelBuilder.doSet(\"POI\",\"mu\")\n self.modelBuilder.factory_('expr::ggH_s_func(\"@0-sqrt(@0)\", mu)')\n self.modelBuilder.factory_( 'expr::ggH_b_func(\"1-sqrt(@0)\", mu)')\n self.modelBuilder.factory_( 'expr::ggH_sbi_func(\"sqrt(@0)\", mu)')",
"def get_default_model_params(self):\n\n model_params = {\n 'dropout_rate': 0.3,\n 'hidden_layer_size': 160,\n 'learning_rate': 0.01,\n 'minibatch_size': 64,\n 'max_gradient_norm': 0.01,\n 'num_heads': 1,\n 'stack_size': 1\n }\n\n return model_params",
"def get_base_parameters(cls):\n return {\n \"cutoff\": None,\n \"method\": None\n }",
"def get_space(self, desc):\n model_desc = PipeStepConfig.model.model_desc\n model = ModelZoo().get_model(dict(type='BackboneDeformation', desc=model_desc))\n search_space = model.search_space\n times = random.randint(3, 5)\n params = [dict(key=\"network.props.doublechannel\", type=\"BINARY_CODE\", range=[len(search_space), times]),\n dict(key=\"network.props.downsample\", type=\"BINARY_CODE\", range=[len(search_space), times])]\n params.append(dict(key='network.deformation', type=\"CATEGORY\", range=['BackboneDeformation']))\n logging.info(\"Backbone Search Space: {}\".format(params))\n return {\"hyperparameters\": params}",
"def doParametersOfInterest(self):\n\n self.modelBuilder.doVar(\"Rdy[1.,0.0,10.0]\");\n self.modelBuilder.doVar(\"Rbk[1.,0.0,10.0]\");\n self.modelBuilder.doVar(\"Rqcd_emu[1,0.0,10.0]\");\n self.modelBuilder.doSet(\"POI\",\"Rbk,Rdy,Rqcd_emu\")",
"def params():\n return utils.Params('../experiments/base-model/params.json')",
"def doParametersOfInterest(self):\n self.modelBuilder.doVar(\"mu[0,0,1000]\") ##mu is what we want to return (in string) name[starting_value,min,max] \n self.modelBuilder.doSet(\"POI\",\"mu\")\n self.modelBuilder.factory_('expr::vbfH_s_func(\"@0-sqrt(@0)\", mu)')\n self.modelBuilder.factory_( 'expr::vbfH_b_func(\"1-sqrt(@0)\", mu)')\n self.modelBuilder.factory_( 'expr::vbfH_sbi_func(\"sqrt(@0)\", mu)')",
"def default_parameters():\n return BackendNSParameters()",
"def _default_parameters(cls) -> Options:\n params = super()._default_parameters()\n params.main_axes = None\n params.i_means = None\n params.q_means = None\n params.scales = None\n\n return params",
"def default_hparams():\n return {\n \"activation_fn\": \"tensorflow.identity\",\n \"name\": \"reparameterized_stochastic_connector\"\n }",
"def _get_current_hyperparameters(self):",
"def get_model_parameter_bounds():\n minf = float(\"-inf\")\n inf = float(\"inf\")\n params = dict(mu=(minf,inf), rho=(0.0 ,inf))\n return params",
"def default_parameters():\n prm = Parameters('lvad_model')\n\n prm.add('lvad_volume', 66.0)\n\n prm.add('alpha_slope', 0.0091)\n prm.add('alpha_intercept', 1.4)\n\n prm.add('beta_slope', -0.19)\n prm.add('beta_intercept', -1.9)\n\n prm.add('frequency', float())\n\n return prm",
"def doParametersOfInterest(self):\n\n self.modelBuilder.doVar(\"Rdy[1.,0.0,10.0]\");\n self.modelBuilder.doVar(\"Rqcd[1,0.0,10.0]\");\n self.modelBuilder.doSet(\"POI\",\"Rdy,Rqcd\")",
"def parameters_default(cls):\n return cls._Parameters.__new__.__defaults__",
"def _default_params(self) -> dict[str, Any]:\n return {\n \"max_tokens\": self.max_tokens,\n \"temperature\": self.temperature,\n \"top_p\": self.top_p,\n \"logprobs\": self.logprobs,\n \"echo\": self.echo,\n \"stop_sequences\": self.stop_sequences,\n \"repeat_penalty\": self.repeat_penalty,\n \"top_k\": self.top_k,\n \"n_threads\": self.n_threads,\n \"n_ctx\": self.n_ctx,\n \"n_gpu_layers\": self.n_gpu_layers,\n \"n_gqa\": self.n_gqa if self.n_gqa else None,\n \"n_parts\": self.n_parts,\n \"seed\": self.seed,\n \"f16_kv\": self.f16_kv,\n \"logits_all\": self.logits_all,\n \"vocab_only\": self.vocab_only,\n \"use_mlock\": self.use_mlock,\n \"n_batch\": self.n_batch,\n \"last_n_tokens_size\": self.last_n_tokens_size,\n \"streaming\": self.streaming,\n }",
"def _default_parameters():\n\n return {\n 'opt': 'adadelta',\n 'activation_function': 'softmax',\n 'lr': 0.0001,\n 'decay': 1e-6,\n 'loss': 'categorical_crossentropy',\n 'batch_size': 32,\n 'nb_epoch': 20,\n 'shuffle': True,\n 'momentum': 0.9,\n 'nesterov': True,\n 'rho': 0.95,\n 'epsilon': 1e-08,\n 'beta_1': 0.9,\n 'beta_2': 0.999,\n 'horizontal_flip': False,\n 'im_size': 240,#256,\n 'dense_layer': 1024,\n 'nb_classes': 10,\n 'nb_channels': 3,\n 'dropout': 0.5,\n 'metrics': ['accuracy'],\n 'volume': None,\n 'input_size': 25,\n 'temporal': False,\n 'input_dim': 512,\n 'nb_frames': 60,\n 'stride': 16,\n 'nb_hidden':512,\n 'lstm': False\n\n }",
"def doParametersOfInterest(self):\n\n self.modelBuilder.doVar(\"eAfb[0.6,-0.75,0.75]\");\n self.modelBuilder.doVar(\"eA0[0.05, -1.0, 1.0]\");\n self.modelBuilder.doVar(\"rAfb[1.0,-5.0, 5.0]\");\n self.modelBuilder.doVar(\"rA0[1.0, -5.0, 5.0]\");\n self.modelBuilder.doSet(\"POI\",\"rAfb,rA0\")\n self.modelBuilder.factory_('expr::mAfb(\"@0*@1\",eAfb,rAfb)')\n self.modelBuilder.factory_('expr::mA0(\"(@0*@1)\",eA0,rA0)')\n\n \n self.modelBuilder.factory_('expr::eAlph(\"2.0*@0/(2.0-@0)\",eA0)')\n self.modelBuilder.factory_('expr::eNorm(\"3.0/4.0/(2.0+@0)\",eAlph)')\n self.modelBuilder.factory_('expr::eRAlph(\"@0*@1\",eAlph,eNorm)')\n self.modelBuilder.factory_('expr::eRpl(\"(@0+@1)\",eNorm,eAfb)')\n self.modelBuilder.factory_('expr::eRmn(\"(@0-@1)\",eNorm,eAfb)')\n\n self.modelBuilder.factory_('expr::mAlph(\"2.0*@0/(2.0-@0)\",mA0)')\n self.modelBuilder.factory_('expr::mNorm(\"3.0/4.0/(2.0+@0)\",mAlph)')\n self.modelBuilder.factory_('expr::mRAlph(\"@0*@1\",mAlph,mNorm)')\n self.modelBuilder.factory_('expr::mRpl(\"(@0+@1)\",mNorm,mAfb)')\n self.modelBuilder.factory_('expr::mRmn(\"(@0-@1)\",mNorm,mAfb)')"
]
| [
"0.65847033",
"0.6535262",
"0.63484514",
"0.6261159",
"0.615016",
"0.6135261",
"0.60658514",
"0.6031016",
"0.59677446",
"0.5958387",
"0.5953047",
"0.58978546",
"0.5886156",
"0.58836716",
"0.58489406",
"0.58475643",
"0.5846227",
"0.58361953",
"0.58296055",
"0.5819666",
"0.5811491",
"0.5810172",
"0.58045393",
"0.580236",
"0.57939684",
"0.5787391",
"0.577873",
"0.57614034",
"0.5750419",
"0.5748858"
]
| 0.80838275 | 0 |
Sample draws of the future trend values. Vectorized version of sample_predictive_trend(). | def _sample_predictive_trend_vectorized(
prophet_model: Prophet, df: pd.DataFrame, n_samples: int, iteration: int = 0
) -> np.ndarray:
if prophet_model.growth == "linear":
return sample_linear_predictive_trend_vectorize(
prophet_model, df, n_samples, iteration
)
deltas = prophet_model.params["delta"][iteration]
m0 = prophet_model.params["m"][iteration]
k = prophet_model.params["k"][iteration]
if prophet_model.growth == "logistic":
expected = prophet_model.piecewise_logistic(
df["t"].values,
df["cap_scaled"].values,
deltas,
k,
m0,
prophet_model.changepoints_t,
)
elif prophet_model.growth == "flat":
expected = prophet_model.flat_trend(df["t"].values, m0)
else:
raise NotImplementedError
uncertainty = _sample_trend_uncertainty(prophet_model, n_samples, df, iteration)
return (
np.tile(expected, (n_samples, 1)) + uncertainty
) * prophet_model.y_scale + np.tile(df["floor"].values, (n_samples, 1)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sample_linear_predictive_trend_vectorize(\n prophet_model: Prophet, df: pd.DataFrame, sample_size: int, iteration: int\n) -> np.ndarray:\n k = prophet_model.params[\"k\"][iteration]\n m = prophet_model.params[\"m\"][iteration]\n deltas = prophet_model.params[\"delta\"][iteration]\n changepoints_t = prophet_model.changepoints_t\n changepoint_ts = np.row_stack([changepoints_t] * sample_size)\n\n deltas = np.row_stack([deltas] * sample_size)\n\n t = np.array(df[\"t\"])\n T = t.max()\n\n # vectorize possion sample\n S = len(changepoints_t)\n if (\n S * (T - 1) > 0\n ): # ensuring parameter of Poission distribution is valid; otherwise no need to generate samples\n possion_sample = np.random.poisson(S * (T - 1), sample_size)\n max_possion_num = possion_sample.max()\n else:\n max_possion_num = 0\n\n if max_possion_num > 0:\n\n # sample change points\n changepoint_ts_new = 1 + np.random.rand(sample_size, max_possion_num) * (T - 1)\n changepoint_ts_new.sort(axis=1)\n\n # create mask for deltas -> to mute some deltas based on number of change points\n mask = np.random.uniform(\n 0, max_possion_num, max_possion_num * sample_size\n ).reshape(sample_size, -1)\n mask = mask < possion_sample[:, None]\n\n # Sample deltas\n lambda_ = np.mean(np.abs(deltas)) + 1e-8\n deltas_new = np.random.laplace(\n 0, lambda_, max_possion_num * sample_size\n ).reshape(sample_size, -1)\n # mute some deltas based on mask\n deltas_new = deltas_new * mask\n\n # Prepend the times and deltas from the history\n changepoint_ts = np.column_stack((changepoint_ts, changepoint_ts_new))\n deltas = np.column_stack((deltas, deltas_new))\n\n trend = _piecewise_linear_vectorize(t, deltas, k, m, changepoint_ts)\n\n return trend * prophet_model.y_scale + df[\"floor\"].values",
"def _sample_trend_uncertainty(\n prophet_model: Prophet,\n n_samples: int,\n df: pd.DataFrame,\n iteration: int = 0,\n) -> np.ndarray:\n\n # when there is only historical data\n # given that df is sorted by time, it's last item has the largest date.\n if df[\"t\"].iloc[-1] <= 1:\n # there is no trend uncertainty in historic trends\n uncertainties = np.zeros((n_samples, len(df)))\n else:\n\n future_df = df.loc[df[\"t\"] > 1]\n n_length = len(future_df)\n hist_len = len(df) - n_length\n # handle 1 length futures by using history\n if n_length > 1:\n single_diff = np.diff(future_df[\"t\"]).mean()\n else:\n single_diff = np.diff(prophet_model.history[\"t\"]).mean()\n change_likelihood = len(prophet_model.changepoints_t) * single_diff\n deltas = prophet_model.params[\"delta\"][iteration]\n m0 = prophet_model.params[\"m\"][iteration]\n k = prophet_model.params[\"k\"][iteration]\n mean_delta = np.mean(np.abs(deltas)) + 1e-8\n if prophet_model.growth == \"linear\":\n mat = _make_trend_shift_matrix(\n mean_delta, change_likelihood, n_length, n_samples=n_samples\n )\n uncertainties = mat.cumsum(axis=1).cumsum(\n axis=1\n ) # from slope changes to actual values\n uncertainties *= single_diff # scaled by the actual meaning of the slope\n elif prophet_model.growth == \"logistic\":\n mat = _make_trend_shift_matrix(\n mean_delta, change_likelihood, n_length, n_samples=n_samples\n )\n uncertainties = _logistic_uncertainty(\n prophet_model=prophet_model,\n mat=mat,\n deltas=deltas,\n k=k,\n m=m0,\n cap=future_df[\"cap_scaled\"].values,\n t_time=future_df[\"t\"].values,\n n_length=n_length,\n single_diff=single_diff,\n )\n elif prophet_model.growth == \"flat\":\n # no trend uncertainty when there is no growth\n uncertainties = np.zeros((n_samples, n_length))\n else:\n raise NotImplementedError\n # historical part\n if hist_len > 0:\n past_uncertainty = np.zeros((n_samples, hist_len))\n uncertainties = np.concatenate([past_uncertainty, uncertainties], axis=1)\n return uncertainties",
"def get_trend_pred(united_samples, look_back):\n\n features = united_samples[:, :1].astype(str)\n labels = united_samples[:, -1:]\n\n # move all dates a day behind\n delta = -1\n generator = (change_date(date[0], delta_days=delta) for date in features)\n new_dates = np.fromiter(generator, features.dtype)\n\n # selecting samples after April 2020 when the COVID-19 became global\n BOOL_COND_ARRAY = [(int(date[0:4]) >= 2020 and int(date[5:7]) >= 4)\n for date in new_dates]\n new_dates = new_dates[BOOL_COND_ARRAY]\n new_dates = new_dates.reshape(-1, 1)\n\n labels = labels[BOOL_COND_ARRAY]\n\n # converting date to numerical source:\n generator = (dt.toordinal(dt.strptime(date[0], DATE_FORMAT)) for date in\n new_dates)\n numerical_dates = np.fromiter(generator, features.dtype)\n numerical_dates = numerical_dates.reshape(-1, 1).astype(float)\n\n # change degree of polynomial features\n poly_features = PolynomialFeatures(degree=4)\n features_transformed = poly_features.fit_transform(numerical_dates)\n\n # model\n linreg_model = LinearRegression()\n linreg_model.fit(features_transformed, labels)\n\n # trend\n trend_labels = linreg_model.predict(\n poly_features.fit_transform(numerical_dates))\n\n ahead = trend_labels[-1:, 0]\n behind = trend_labels[-look_back-1: -look_back, 0]\n\n if ahead > behind:\n return 'upward'\n elif ahead < behind:\n return 'downward'\n else:\n return 'not_changed'",
"def predict_trend(self, features, data_tier):\n prediction = self.clf_trend[data_tier].predict(features)\n return prediction[0]",
"def benchmark_predict(self):\n\n # Get time series index for split train/test set\n start_index_test = int(len(self.y) * 2 / 3)\n\n # Initialize result array with the length of test set = length set - length training set\n log_return_predict_benchmark = np.empty(int(len(self.y)) - start_index_test)\n\n # Loop through time series\n # python range is equivalent to [start_index_test, len(self.y))\n for i in range(start_index_test, int(len(self.y))):\n # Predict r_t with r_t-1\n # Calculate mean of rolling sum (=cummulative log returns)\n log_return_predict_benchmark[i - start_index_test] = np.mean(\n rolling_sum(self.y[:i-1], self.forecast_horizon))\n self.log_return_predict_benchmark = log_return_predict_benchmark\n return log_return_predict_benchmark",
"def sample_trajectories(self):\n minibatch = []\n for i in range(self.num_avg_gradient):\n trajectory = self.replay_buffer[random.randint(0, len(self.replay_buffer) - 1)]\n trajectory = trajectory[random.randint(0, len(trajectory) - 1):]\n minibatch.append(trajectory)\n return minibatch",
"def evaluate_random_forest(y_test, y_pred):",
"def tpr(y_true, y_pred):\n return recall(y_true, y_pred)",
"def detrend_and_decimate_new(trace,f_sample, params):\n\n logging.info(\"detrending\")\n \n f_new = int(params.f_new)\n print(f_sample,f_new)\n f_sample2= (int(f_sample)//1000)*1000\n print(f_sample2,f_new)\n leng =len(trace)\n\n up = int(f_new/np.gcd(f_sample2,f_new))\n down = int(f_sample2*up/f_new)\n print(up,down)\n factor=down/up\n logging.info(f\"up = {up}, down = {down}\")\n\n # up = int(100_000//f_sample)\n # down = int(100_000//f_new)\n\n\n trace_sub = resample_poly(trace,up,down,padtype='edge')\n dt=1/f_new\n times_sub = np.linspace(0.0,leng/f_sample,len(trace_sub))\n\n ord_filt_len = 2*(int(params.ord_len_ms*f_new/1000)//2)+1\n trace_sub2_ord = order_filter(trace_sub, np.ones(ord_filt_len), ord_filt_len//10) # 10 percentile filter\n\n down_temp = int(f_new//params.f_ord_decimate) \n print(f\"down_temp = {down_temp}\")\n trace_sub2_ord = decimate(trace_sub2_ord, down_temp, ftype='fir')\n trace_sub2_ord = medfilt(trace_sub2_ord) #median filter after decimation\n trace_sub2_ord = resample_poly(trace_sub2_ord, down_temp, 1,padtype='edge')\n\n savgol_len1 = 2*(int(25*f_new/1000)//2)+1\n\n # trace_sub2_ord = savgol_filter(trace_sub2_ord, savgol_len1, 3, mode='interp')\n\n #added to fix length errors, URGH\n last_ind=min(len(trace_sub),len(trace_sub2_ord))\n \n trace_zerod = trace_sub[:last_ind]-trace_sub2_ord[:last_ind]\n \n times_sub = times_sub[:last_ind]\n\n\n MAD = stats.median_absolute_deviation(trace_zerod)\n\n\n\n if params.post_savgol: # False\n savgol_len2 = 2*(int(params.savgol_len_ms*f_new/1000)//2)+1\n trace_zerod = savgol_filter(trace_zerod, savgol_len2, 3, mode='interp') # params.savgol_len=7\n \n trace_zerod = trace_zerod - np.quantile(trace_zerod, params.subs_quantile) # params.subs_quantile=0.25\n logging.info(\"finished detrending\")\n \n # times[]\n\n return trace_zerod, times_sub, MAD , factor",
"def sample_model_vectorized(\n prophet_model: Prophet,\n df: pd.DataFrame,\n seasonal_features: pd.DataFrame,\n iteration: int,\n s_a: np.ndarray,\n s_m: np.ndarray,\n n_samples: int,\n) -> Dict[str, np.ndarray]:\n # Get the seasonality and regressor components, which are deterministic per iteration\n beta = prophet_model.params[\"beta\"][iteration]\n Xb_a = (\n np.matmul(seasonal_features.values, beta * s_a.values) * prophet_model.y_scale\n )\n Xb_m = np.matmul(seasonal_features.values, beta * s_m.values)\n # Get the future trend, which is stochastic per iteration\n trends = _sample_predictive_trend_vectorized(\n prophet_model, df, n_samples, iteration\n )\n\n sigma = prophet_model.params[\"sigma_obs\"][iteration]\n noise_terms = np.random.normal(0, sigma, trends.shape) * prophet_model.y_scale\n\n return {\"yhat\": trends * (1 + Xb_m) + Xb_a + noise_terms, \"trend\": trends}",
"def train_test_dataset(self, train_rate=0.8):\n point_date = int(len(self.y) * train_rate)\n y_to_train = self.y[:point_date]\n y_to_val = self.y[point_date:]\n predict_date = len(self.y) - len(y_to_train) # the number of data points for the test set\n date_val = self.y.index[point_date]\n return y_to_train, y_to_val, predict_date, date_val",
"def predict(self, X_new):\n trees = self.all_trees\n num_observations = X_new.shape[0]\n pred = np.zeros((len(trees), num_observations))\n np.random.randint(len(trees))\n for draw, trees_to_sum in enumerate(trees):\n new_Y = np.zeros(num_observations)\n for tree in trees_to_sum:\n new_Y += [tree.predict_out_of_sample(x) for x in X_new]\n pred[draw] = new_Y\n return pred",
"def _sample_synthetic(self, X):\n n_samples = X.shape[0]\n self.y = np.concatenate((np.ones(n_samples), np.zeros(n_samples)))\n \n random_state = _forest.check_random_state(self.random_state) \n\n X_synth = np.asarray([np.apply_along_axis(random_state.choice, 0, X) for _ in range(n_samples)])\n self.X = np.concatenate((X, X_synth))\n\n return self.X, self.y",
"def ts_rewards_sample(self):\n exp_rewards_list = [np.random.beta(a, b) for a, b in zip(self.alpha_params, self.beta_params)]\n return np.array(exp_rewards_list)",
"def recall_np(y_true: numpy.array, y_pred: numpy.array, epsilon: float = EPSILON) -> float:\n true_positives = numpy.sum(y_true * y_pred)\n possible_positives = numpy.sum(y_true)\n return true_positives / (possible_positives + epsilon)",
"def do_smooth(d, WT, sample_rate):\n d_smooth = np.zeros(len(d))\n Wt = int(np.ceil(sample_rate*WT))\n for i in range(len(d)-Wt):\n d_smooth[i] = np.mean(d[i: i+Wt])\n d_smooth[0:Wt+100] = np.nan # +100 removes \"edge effects\" at start of f4\n return(d_smooth)",
"def captureNewDataPoint(self):\n return 2.0*rand()-1.0",
"def _trend_changes(self, input_df=None, value_column=None):\n import numpy as np\n from scipy import stats\n from statsmodels.tsa.stattools import acf\n\n min_float = 1e-10\n window_length = self.tc_window_length\n sig_level = self.sig_level\n\n series = input_df[value_column].tolist()\n timestamps = input_df.index.tolist()\n\n if not window_length:\n window_length = self._detect_window_size(series=series)\n\n # Creating a crude estimation of the required window size\n nwindows = int(window_length * 1.5)\n\n current_mid_point = window_length * nwindows\n past_trend_change = 0\n global_trend_changes = []\n local_trend_changes = {}\n past_p_value = -1\n\n # If the remaining part of the time series is less than the (window_length * nwindows) we terminate the while loop\n current_reminder = len(series) if (current_mid_point + (window_length * nwindows)) < len(series) else 0\n while current_reminder >= window_length:\n\n # Creating the left and the right window for slope detection\n l_window = series[current_mid_point - (window_length * nwindows): current_mid_point]\n r_window = series[current_mid_point: current_mid_point + (window_length * nwindows)]\n l_window_length = len(l_window)\n r_window_length = len(r_window)\n N = l_window_length + r_window_length\n\n # Finding the effective degrees of freedom\n auto_corr = acf(l_window + r_window, nlags=N)\n auto_corr[np.isnan(auto_corr)] = 1\n eff_df = 0\n for i in range(1, N):\n eff_df = eff_df + (((N - i) / float(N)) * auto_corr[i])\n eff_df = max(1, int(np.rint(1 / ((1 / float(N)) + ((2 / float(N)) * eff_df)))) - 4)\n\n # Creating the left and right indices for running the regression\n l_x, r_x = np.arange(l_window_length), np.arange(r_window_length)\n\n # Linear regression on the left and the right window\n l_slope, l_intercept, l_r_value, l_p_value, l_std_err = stats.linregress(l_x, l_window)\n r_slope, r_intercept, r_r_value, r_p_value, r_std_err = stats.linregress(r_x, r_window)\n\n # t-test for slope shift\n l_window_hat = (l_slope * l_x) + l_intercept\n r_window_hat = (r_slope * r_x) + r_intercept\n\n l_sse = np.sum((l_window - l_window_hat) ** 2)\n r_sse = np.sum((r_window - r_window_hat) ** 2)\n\n l_const = np.sum((np.arange(1, l_window_length + 1) - ((l_window_length + 1) / 2.0)) ** 2)\n r_const = np.sum((np.arange(1, r_window_length + 1) - ((r_window_length + 1) / 2.0)) ** 2)\n\n prop_const = (l_const * r_const) / (l_const + r_const)\n\n total_sse = l_sse + r_sse\n\n std_err = max(np.sqrt(total_sse / (prop_const * (l_window_length + r_window_length - 4))), min_float)\n\n t_stat = abs(l_slope - r_slope) / std_err\n\n p_value = (1 - stats.t.cdf(t_stat, df=eff_df)) * 2\n\n if p_value < sig_level:\n # Check if the same shift detected multiple times\n if p_value == past_p_value:\n local_trend_changes.pop(past_trend_change)\n if current_reminder - window_length < window_length:\n if len(local_trend_changes) > 2:\n # _local_minima function is called to detec the optimal trend change(s) among a group of local\n # trend changes\n current_trend_change = self._local_minima(input_dict=local_trend_changes,\n window_length=window_length)\n for key in current_trend_change:\n global_trend_changes.append(timestamps[key].__str__())\n else:\n # Handling the trend changes at the tail of the time series\n local_trend_changes[current_mid_point] = p_value\n past_trend_change = current_mid_point\n past_p_value = p_value\n else:\n # Handling the trend changes at the tail of the time series\n if (current_mid_point - past_trend_change) <= window_length and len(local_trend_changes) > 2:\n current_trend_change = self._local_minima(input_dict=local_trend_changes, window_length=window_length)\n for key in current_trend_change:\n global_trend_changes.append(timestamps[key].__str__())\n local_trend_changes = {}\n\n current_mid_point = current_mid_point + window_length\n current_reminder = len(series) - current_mid_point\n\n return global_trend_changes",
"def extract_trend_component(insample_data):\n x = np.arange(len(insample_data))\n a, b = np.polyfit(x, insample_data, 1)\n return a, b",
"def test_up_using_trendline(self):\n assert setup.setup_component(\n self.opp,\n \"binary_sensor\",\n {\n \"binary_sensor\": {\n \"platform\": \"trend\",\n \"sensors\": {\n \"test_trend_sensor\": {\n \"entity_id\": \"sensor.test_state\",\n \"sample_duration\": 10000,\n \"min_gradient\": 1,\n \"max_samples\": 25,\n }\n },\n }\n },\n )\n self.opp.block_till_done()\n\n now = dt_util.utcnow()\n for val in [10, 0, 20, 30]:\n with patch(\"openpeerpower.util.dt.utcnow\", return_value=now):\n self.opp.states.set(\"sensor.test_state\", val)\n self.opp.block_till_done()\n now += timedelta(seconds=2)\n\n state = self.opp.states.get(\"binary_sensor.test_trend_sensor\")\n assert state.state == \"on\"\n\n # have to change state value, otherwise sample will lost\n for val in [0, 30, 1, 0]:\n with patch(\"openpeerpower.util.dt.utcnow\", return_value=now):\n self.opp.states.set(\"sensor.test_state\", val)\n self.opp.block_till_done()\n now += timedelta(seconds=2)\n\n state = self.opp.states.get(\"binary_sensor.test_trend_sensor\")\n assert state.state == \"off\"",
"def define_trends(data, x):\n from sklearn.linear_model import LinearRegression\n \n model = LinearRegression().fit(x, data)\n trend = model.predict(x)\n \n return trend, model",
"def test_detrend_transform(self):\n snv = SNV(q=50, robust=False, detrend=True)\n X_t = snv.fit_transform(self.X)\n self.assertTrue(np.allclose(X_t, 0.0))",
"def test_model_avg_3_weeks_bt(df_y_train, df_y_predict, df_y_ground_truth_scaled, predict_period, n_samples_week, target):\n\n # Use average of last 3 weeks (for same time stamps) as predicted values\n df_hist = pd.concat([df_y_train, df_y_ground_truth_scaled], 0)\n df_hist_1week = df_hist.shift(n_samples_week)\n df_hist_2week = df_hist.shift(2*n_samples_week)\n df_hist_3week = df_hist.shift(3*n_samples_week)\n df_hist_all = pd.concat([df_hist_1week, df_hist_2week, df_hist_3week], 1)\n df_hist_all = df_hist_all[df_hist_all.index.isin(df_y_predict.index)]\n \n if target == \"count\":\n # Average\n prediction = df_hist_all.mean(axis = 1)\n elif target == \"level\":\n # Majority class\n prediction = df_hist_all.mode(axis = 1).iloc[:, 0]\n \n return prediction",
"def detrend(self, polyorder=1, break_tolerance=10):\n lc = self.copy()\n half = lc.time.shape[0] // 2\n if half % 2 == 0:\n # add 1 if even\n half += 1\n return lc.flatten(\n window_length=half,\n polyorder=polyorder,\n break_tolerance=break_tolerance,\n )",
"def detrend(x):\n\n t = x['t']\n f = x['f']\n t0 = np.mean(x['t'])\n time_since_transit = t - t0\n\n # select out just the continuum points\n continuum = x['continuum']==1\n\n pfit = np.polyfit(\n time_since_transit[continuum], f[continuum], poly_degree\n )\n\n fldt = f.copy()\n fldt -= np.polyval(pfit,time_since_transit)\n return fldt",
"def _forecast_train(stan_fit, p):\n \n train_n = stan_fit.data['N']\n t = np.arange(train_n) / train_n\n train_changepoints = stan_fit.data['s']\n \n A = (t[:, None] > train_changepoints) * 1. # t x s\n \n X = fourier_series(t, p=p, fourier_order=stan_fit.data['n_fourier']) \n \n samples = stan_fit.extract(permuted=True)\n \n samples_n = len(samples['lp__'])\n \n y_hat = np.zeros(shape=(samples_n, len(t)))\n mu_trend = np.zeros(shape=(samples_n, len(t)))\n mu_s = np.zeros(shape=(samples_n, len(t)))\n \n for iteration in range(samples_n):\n \n k = samples['k'][iteration]\n m = samples['m'][iteration]\n deltas = samples['delta'][iteration]\n beta = samples['beta'][iteration]\n sigma = samples['sigma'][iteration]\n \n gamma = -train_changepoints * deltas;\n mu_trend[iteration,:] = k + np.matmul(A, deltas) * t + (m + np.matmul(A, gamma))\n mu_s[iteration,:] = np.matmul(X, beta)\n \n mu = mu_trend[iteration] + mu_s[iteration]\n \n y_hat[iteration,:] = np.random.normal(mu, sigma)\n \n return y_hat, mu_trend, mu_s, samples, samples_n, train_n, train_changepoints",
"def predict(self):\n # format data\n df = self.normalize(self.daily)\n x = df.index.astype(np.int64).values.reshape(-1, 1)\n y = self.normalize(df[['Adj Close']]).values\n\n # format time\n one_day_time = 86400000000000\n x_tomorrow = x[-1] + one_day_time\n x_incl_tomorrow = np.append(x, [x_tomorrow], axis=0)\n dates = pd.to_datetime(x_incl_tomorrow.reshape(-1))\n\n # average the predictions\n lin_reg = self.linear_regression(x, y, x_tomorrow, x_incl_tomorrow, dates)\n knn = self.knn(x, y, x_tomorrow, x_incl_tomorrow, dates)\n tomorrow_norm = [(lin_reg + knn) / 2]\n today_norm = [df['Adj Close'][-1]]\n tomorrow = round((tomorrow_norm[0] * self.daily['Adj Close'][0]), 2)\n today = self.daily['Adj Close'][-1]\n percent_gain = round((((tomorrow / today) - 1) * 100), 2)\n percent_gain_int = abs(int(round(percent_gain, 0)))\n\n if percent_gain > 0:\n self.debug += '\\nExpected price gain: {} %, buys + {}, predicted close is {}'.format(percent_gain, percent_gain_int, tomorrow)\n self.buys += percent_gain_int\n else:\n self.debug += '\\nExpected price gain: {} %, sells + {}, predicted close is {}'.format(percent_gain, percent_gain_int, tomorrow)\n self.sells += percent_gain_int\n\n # plots dotted line connecting stock today with tomorrow's prediction\n predicting_line = np.append(today_norm, tomorrow_norm, axis=0)\n\n if self.will_plot:\n self.ax.plot(dates[-2:], predicting_line, color='cyan', dashes=([1, 1, 1, 1]))\n self.ax.plot(pd.to_datetime(x_tomorrow), tomorrow_norm, marker='o', markersize=3, color=\"cyan\")",
"def gaussianise_series(self, train_x):\n\n n_batches = train_x.shape[0]\n\n for batch in range(n_batches):\n train_x[batch, :, :] = gaussianise(train_x[batch, :, :], target_sigma=1.0)\n\n return train_x",
"def time_per_part():\r\n return random.normalvariate(PT_MEAN, PT_SIGMA)",
"def train_test_samples(df):\n\n from math import floor\n\n shuffled_df = df.reindex(np.random.permutation(df.index))\n\n seventy_five_percent = int(floor(len(shuffled_df) * 0.75))\n train_df = shuffled_df.iloc[:seventy_five_percent, ]\n test_df = shuffled_df.iloc[seventy_five_percent:, ]\n\n return train_df, test_df"
]
| [
"0.64506996",
"0.6297814",
"0.61076504",
"0.57060647",
"0.56256115",
"0.5473225",
"0.547147",
"0.5432517",
"0.5382349",
"0.53632545",
"0.53604054",
"0.5356977",
"0.5291475",
"0.52841",
"0.52544636",
"0.5239208",
"0.52355576",
"0.5212369",
"0.5200511",
"0.5188102",
"0.51754284",
"0.5174917",
"0.51740366",
"0.5153568",
"0.5152645",
"0.514895",
"0.5148776",
"0.5144823",
"0.5134155",
"0.51242614"
]
| 0.74734485 | 0 |
Creates a matrix of random trend shifts based on historical likelihood and size of shifts. Can be used for either linear or logistic trend shifts. Each row represents a different sample of a possible future, and each column is a time step into the future. | def _make_trend_shift_matrix(
mean_delta: float, likelihood: float, future_length: float, n_samples: int
) -> np.ndarray:
# create a bool matrix of where these trend shifts should go
bool_slope_change = np.random.uniform(size=(n_samples, future_length)) < likelihood
shift_values = np.random.laplace(0, mean_delta, size=bool_slope_change.shape)
mat = shift_values * bool_slope_change
n_mat = np.hstack([np.zeros((len(mat), 1)), mat])[:, :-1]
mat = (n_mat + mat) / 2
return mat | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def gen_all_arms_reward_shifts(self):\n reward_shifts = (\n torch.randn(self._num_arms_all) * self.sigma_shift + self.mu_shift\n )\n return reward_shifts",
"def generate_shift(self, k=1, static=False):\n if static:\n shift = np.zeros((k, 2))\n else:\n shift = np.array(self.environment_size)/2 * np.random.rand(k, 2) - np.array(self.environment_size)/4\n return shift",
"def generate_shift(self, k=1, static=False):\n if static:\n shift = np.zeros((k, 2))\n else:\n shift = np.array(self.environment_size)/2 * np.random.rand(k, 2) - np.array(self.environment_size)/4\n return shift",
"def generate_schedule(top_length, top_depth, left_length, left_depth):\n # The process of calculating the schedule starts from the leftmost\n # topmost element which is active from 0..top_depth timesteps.\n out = np.zeros((left_length, top_length, top_depth), dtype=\"i\")\n out[0][0] = np.arange(top_depth)\n\n # Fill the first col: Every column runs one \"step\" behind the column on\n # its left.\n for col in range(1, top_length):\n out[0][col] = out[0][col - 1] + 1\n\n # Fill the remaining rows. Similarly, all rows run one \"step\" behind the\n # row on their top.\n for row in range(1, left_length):\n out[row][0] = out[row - 1][0] + 1\n for col in range(1, top_length):\n out[row][col] = out[row][col - 1] + 1\n\n return out",
"def forecast(days):\n transition = np.array([[.7, .6], [.3, .4]])\n state = 0\n record = []\n for day in xrange(days):\n state = np.random.binomial(1, transition[1, state])\n record.append(state)\n return record",
"def timeshift(self, shift='random'):\n\n if shift == 'random':\n one_month = pd.Timedelta('30 days').value\n two_years = pd.Timedelta('730 days').value\n random_timedelta = - pd.Timedelta(random.uniform(one_month, two_years)).round('min')\n self.timeshift(random_timedelta)\n\n if not self.data.index.empty:\n if isinstance(shift, pd.Timestamp):\n timedeltas = self.data.index - self.data.index[0]\n self.data.index = shift.round('min') + timedeltas\n if isinstance(shift, pd.Timedelta):\n self.data.index += shift.round('min')\n self.data['date'] = self.data.index.map(lambda timestamp: timestamp.date())\n self.data['time'] = self.data.index.map(lambda timestamp: timestamp.time())\n else:\n if isinstance(shift, pd.Timestamp):\n timedeltas = self.data['timestamp'] - self.data['timestamp'].min()\n self.data['timestamp'] = shift.round('min') + timedeltas\n if isinstance(shift, pd.Timedelta):\n self.data['timestamp'] += shift.round('min')\n self.data['date'] = self.data['timestamp'].map(lambda timestamp: timestamp.date())\n self.data['time'] = self.data['timestamp'].map(lambda timestamp: timestamp.time())",
"def resampz(x, m_type, shift=1):\n sx = np.array(x.shape)\n\n if m_type == 0 or m_type == 1:\n y = np.zeros((sx[0] + np.abs(shift * (sx[1] - 1)), sx[1]))\n\n if m_type == 0:\n shift1 = np.arange(0, sx[1]) * (- shift)\n else:\n shift1 = np.arange(0, sx[1]) * shift\n\n if shift1[-1] < 0:\n shift1 = shift1 - shift1[-1]\n\n for n in range(sx[1]):\n y[shift1[n] + np.arange(0, sx[0]), n] = x[:, n]\n\n # Remove extra rows\n start = 0\n finish = y.shape[0]\n\n while np.linalg.norm(y[start, :], 2) == 0:\n start += 1\n\n while np.linalg.norm(y[finish-1, :], 2) == 0:\n finish -= 1\n\n y = y[start:finish, :]\n\n elif m_type == 2 or m_type == 3:\n y = np.zeros((sx[0], sx[1] + np.abs(shift * (sx[0] - 1))))\n\n if m_type == 2:\n shift2 = np.arange(0, sx[0]) * (- shift)\n else:\n shift2 = np.arange(0, sx[0]) * shift\n\n if shift2[-1] < 0:\n shift2 = shift2 - shift2[-1]\n\n for m in range(sx[0]):\n y[m, shift2[m] + np.arange(0, sx[1])] = x[m, :]\n\n # Remove extra rows\n start = 0\n finish = y.shape[1]\n\n while np.linalg.norm(y[:, start], 2) == 0:\n start += 1\n\n while np.linalg.norm(y[:, finish-1], 2) == 0:\n finish -= 1\n\n y = y[:, start:finish]\n\n else:\n print('Error: type not valid.')\n y = 0\n\n return y",
"def random_time_shift(_spec, Tshift=50):\n\n n_frames = _spec.shape[0]\n\n # deltat drawn from a uniform distribution from 1 to parameter Tshift\n # minimum shift is one frame (avoids crash)\n deltat = int(np.random.uniform(low=1.0, high=Tshift))\n\n # allocate\n _spec_out = np.zeros_like(_spec)\n\n # delay shift\n # end\n _spec_out[deltat:, :] = _spec[:n_frames-deltat, :]\n # begin\n _spec_out[:deltat, :] = _spec[-deltat:, :]\n return _spec_out",
"def test_past_horizon(cell_cls):\n with tf.Graph().as_default():\n with tf.Session() as sess:\n pos_enc = positional_encoding(4, 6, dtype=tf.float64)\n in_seq = tf.get_variable('in_seq',\n shape=(3, 5, 6),\n initializer=tf.truncated_normal_initializer(),\n dtype=tf.float64)\n cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24)\n actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)\n\n def apply_regular(sequence):\n with tf.variable_scope('rnn', reuse=True):\n with tf.variable_scope('transformer', reuse=True):\n expected = sequence + pos_enc\n for _ in range(3):\n expected = transformer_layer(expected, num_heads=2, hidden=24)\n return expected\n expected = tf.concat([apply_regular(in_seq[:, :-1]),\n apply_regular(in_seq[:, 1:])[:, -1:]], axis=1)\n sess.run(tf.global_variables_initializer())\n\n actual, expected = sess.run((actual, expected))\n\n assert not np.isnan(actual).any()\n assert not np.isnan(expected).any()\n assert actual.shape == expected.shape\n assert np.allclose(actual, expected)",
"def get_time_oriented_matrix(self, sliding_window_size):\n nbr_timestamps = self.hyperparameters.time_series_length\n\n if sliding_window_size < 3 or sliding_window_size > nbr_timestamps:\n print('Unsuitable sliding window size configured:', sliding_window_size)\n print('Using complete linkage instead.\\n')\n matrix = np.ones(shape=(nbr_timestamps, nbr_timestamps), dtype=np.float)\n np.fill_diagonal(matrix, val=0)\n return matrix\n\n assert sliding_window_size % 2 != 0, 'Sliding window size should be an odd number.'\n sliding_window_half = sliding_window_size // 2\n matrix = np.zeros(shape=(nbr_timestamps, nbr_timestamps), dtype=np.float)\n\n # use each timestamp i as a central point and set floor(sliding_window_size/2) many\n # timestamps left and right of it to 1, but not i itself\n for i in range(nbr_timestamps):\n lower = max(0, i - sliding_window_half)\n upper = min(nbr_timestamps, i + sliding_window_half + 1)\n\n index_interval = [j for j in range(lower, upper) if i != j]\n matrix[i, index_interval] = 1\n\n return matrix",
"def _get_game_history(board, T=8):\n board_copy = board.copy()\n history = np.zeros((8, 8, 14 * T))\n\n for i in range(T):\n try:\n board_copy.pop()\n except IndexError:\n break\n history[:, :, i * 14: (i + 1) * 14] =\\\n _get_current_game_state(board_copy)\n\n return history",
"def fixed_time_trajectories(self, ll=1, distributions=None, discrete=False, noise=0):\n\n self.time_uniform = np.linspace(0, self.nsteps, self.nsteps * self.padding)\n\n for t in tqdm.tqdm(range(self.ntraj)):\n\n if distributions is not None:\n\n if self.dwell_distribution == 'exponential':\n self.lamb = np.random.choice(distributions[0])\n elif self.dwell_distribution == 'power':\n self.alpha = np.random.choice(distributions[0])\n\n self.hop_sigma = np.random.choice(distributions[1])\n self.H = np.random.choice(distributions[2])\n #self.H = np.mean(distributions[2])\n\n time = [0]\n total_time = 0 # saves a lot of time\n\n while total_time < self.nsteps:\n\n # hop at random time intervals according to one of the following PDFs\n if self.dwell_distribution == 'exponential':\n time.append(sampling.random_exponential_dwell(self.lamb))\n elif self.dwell_distribution == 'power':\n if self.alpha == 1:\n time.append(1)\n else:\n time.append(sampling.random_power_law_dwell(1 + self.alpha, ll=ll, discrete=discrete)[0])\n else:\n sys.exit('Please enter a valid dwell time probability distribution')\n total_time += time[-1]\n\n time = np.cumsum(time)\n\n if self.hop_distribution in ['gaussian', 'Gaussian']:\n\n z = np.cumsum(np.random.normal(loc=0, scale=self.hop_sigma, size=len(time)))\n z -= z[0] # untested\n\n elif self.hop_distribution in ['fbm', 'fractional', 'fraction_brownian_motion']:\n z = fbm.FBM(len(time), self.H, method=\"daviesharte\").fbm()[:-1] # automatically inserts zero at beginning of array\n z /= ((1.0 / len(time)) ** self.H) # reversing a normalization done in the fbm code\n z *= self.hop_sigma\n self.steps.append(z[1:] - z[:-1]) # for autocorrelation calculation\n\n else:\n sys.exit('Please enter a valid hop distance probability distribution')\n\n # for visualizing hops\n # trajectory_hops = np.zeros([2 * len(time) - 1, 2])\n #\n # trajectory_hops[1::2, 0] = time[1:]\n # trajectory_hops[2::2, 0] = time[1:]\n #\n # trajectory_hops[::2, 1] = z\n # trajectory_hops[1:-1:2, 1] = z[:-1]\n # trajectory_hops[-1, 1] = z[-1]\n # plt.plot(trajectory_hops[:, 0], trajectory_hops[:, 1])\n # plt.show()\n # exit()\n\n # make uniform time intervals with the same interval for each simulated trajectory\n self.z_interpolated[t, :] = z[np.digitize(self.time_uniform, time, right=False) - 1]\n\n #plt.hist(np.random.normal(loc=0, scale=noise, size=len(self.time_uniform)))\n\n if noise > 0:\n self.z_interpolated += np.random.normal(loc=0, scale=noise, size=len(self.time_uniform))\n\n self.time_uniform *= self.dt\n # plt.plot(trajectory_hops[:, 0]*self.dt, trajectory_hops[:, 1])\n # plt.plot(self.time_uniform, self.z_interpolated[-1, :])\n # plt.show()\n # exit()",
"def _shift(self, s):\n start_pos = self._relative_head_pos()\n l = 1 + 2 * self.shift_length\n shift = int(s * l - 0.000000001) - int(l / 2)\n for s in range(abs(shift)):\n if shift > 0:\n if self.head_pos == len(self.memory) - 1 and len(self.memory) < self.max_memory:\n self.memory = np.concatenate((self.memory, np.zeros((1, self.memory_unit_size))), 0)\n self.head_pos += 1\n else:\n self.head_pos = (self.head_pos + 1) % self.max_memory\n else:\n if self.head_pos == 0 and len(self.memory) < self.max_memory:\n self.memory = np.concatenate((np.zeros((1, self.memory_unit_size)), self.memory), 0)\n self.left_expands += 1\n else:\n self.head_pos = (self.head_pos - 1) % self.max_memory\n if self.history is not None:\n self.history[\"loc\"][-1].append((start_pos, 0.1))\n return np.sign(shift)",
"def make_first_level_design_matrix(\n frame_times,\n events=None,\n hrf_model=\"glover\",\n drift_model=\"cosine\",\n high_pass=0.01,\n drift_order=1,\n fir_delays=[0],\n add_regs=None,\n add_reg_names=None,\n min_onset=-24,\n oversampling=50,\n):\n # check arguments\n # check that additional regressor specification is correct\n n_add_regs = 0\n if add_regs is not None:\n if isinstance(add_regs, pd.DataFrame):\n add_regs_ = add_regs.values\n add_reg_names = add_regs.columns.tolist()\n else:\n add_regs_ = np.atleast_2d(add_regs)\n n_add_regs = add_regs_.shape[1]\n assert add_regs_.shape[0] == np.size(frame_times), ValueError(\n \"Incorrect specification of additional regressors: \"\n f\"length of regressors provided: {add_regs_.shape[0]}, number of \"\n f\"time-frames: {np.size(frame_times)}.\"\n )\n\n # check that additional regressor names are well specified\n if add_reg_names is None:\n add_reg_names = [f\"reg{int(k)}\" for k in range(n_add_regs)]\n elif len(add_reg_names) != n_add_regs:\n raise ValueError(\n \"Incorrect number of additional regressor names was provided\"\n f\"({len(add_reg_names)} provided, {n_add_regs} expected.\"\n )\n\n # computation of the matrix\n names = []\n matrix = None\n\n # step 1: events-related regressors\n if events is not None:\n # create the condition-related regressors\n if isinstance(hrf_model, str):\n hrf_model = hrf_model.lower()\n matrix, names = _convolve_regressors(\n events, hrf_model, frame_times, fir_delays, min_onset, oversampling\n )\n\n # step 2: additional regressors\n if add_regs is not None:\n # add user-supplied regressors and corresponding names\n matrix = (\n np.hstack((matrix, add_regs)) if matrix is not None else add_regs\n )\n names += add_reg_names\n\n # step 3: drifts\n drift, dnames = _make_drift(\n drift_model, frame_times, drift_order, high_pass\n )\n\n matrix = np.hstack((matrix, drift)) if matrix is not None else drift\n\n names += dnames\n # check column names are all unique\n if len(np.unique(names)) != len(names):\n raise ValueError(\"Design matrix columns do not have unique names\")\n\n # step 4: Force the design matrix to be full rank at working precision\n matrix, _ = full_rank(matrix)\n\n design_matrix = pd.DataFrame(matrix, columns=names, index=frame_times)\n return design_matrix",
"def pitch_shift_spectrogram(spectrogram):\n nb_cols = spectrogram.shape[0]\n max_shifts = nb_cols // 20 # around 5% shift\n nb_shifts = np.random.randint(-max_shifts, max_shifts)\n\n return np.roll(spectrogram, nb_shifts, axis=0)",
"def init_shiftind(self, n_t):\n i = np.arange(n_t * n_t)\n i2 = np.arange(n_t).repeat(n_t)\n ik = np.arange(n_t).repeat(n_t)\n ii = np.arange(n_t)[np.newaxis].repeat(n_t, 0).flatten()\n\n si = ik * n_t + (ik + ii) % n_t\n self.shiftinds_fwd = np.roll(si.reshape((n_t, n_t)), int((n_t - 1) / 2), 1)[:, ::-1].flatten()\n\n si = ik * n_t + (ii - ik) % n_t\n self.shiftinds_back = np.roll(np.arange(n_t * n_t).reshape((n_t, n_t))[:, ::-1], -int((n_t - 1) / 2), 1).flatten()[si]\n\n self.shiftinds = ((i + i2 - n_t) % n_t + i2 * n_t).astype(int)\n self.shiftinds_neg = ((i + i2 - n_t) % n_t + i2 * n_t).astype(int)\n self.shiftinds_pos = ((-n_t + i - i2) % n_t + i2 * n_t).astype(int)\n # self.shiftinds = ((i + i2 - n_t) % n_t + i2 * n_t).astype(int).reshape((n_t, n_t)).transpose().flatten()\n # self.shiftinds_neg = ((i + i2 - n_t) % n_t + i2 * n_t).astype(int).reshape((n_t, n_t)).transpose().flatten()\n # self.shiftinds_pos = ((-n_t + i - i2) % n_t + i2 * n_t).astype(int).reshape((n_t, n_t)).transpose().flatten()",
"def fixed_steps_trajectories(self, noise=0, nt=1, ll=0.1, limit=None):\n\n print('Generating Trajectories...')\n for i in tqdm.tqdm(range(self.ntraj)):\n\n if self.hop_distribution == 'gaussian' or self.hop_distribution == 'Gaussian':\n z_position = np.cumsum(\n np.random.normal(loc=0, scale=self.hop_sigma, size=self.nsteps)) # accumulate gaussian steps\n else:\n sys.exit('Please enter a valid hop distance probability distribution')\n\n self.trajectories[i, :, 1] = z_position - z_position[0] # make initial z equal to 0\n\n # hop at random time intervals according to one of the following PDFs\n if self.dwell_distribution == 'exponential':\n time = sampling.random_exponential_dwell(self.lamb, size=self.nsteps)\n elif self.dwell_distribution == 'power':\n time = sampling.random_power_law_dwell(1 + self.alpha, size=self.nsteps, ll=ll, limit=limit)\n else:\n sys.exit('Please enter a valid dwell time probability distribution')\n\n time = np.cumsum(time) # accumulate dwell times\n time -= time[0]\n\n self.trajectories[i, :, 0] = time\n\n # Add to array with all corners of hop distribution for visualization purposes\n self.trajectory_hops[i, 1::2, 0] = time[1:]\n self.trajectory_hops[i, 2::2, 0] = time[1:]\n\n self.trajectory_hops[i, ::2, 1] = self.trajectories[i, :, 1]\n self.trajectory_hops[i, 1:-1:2, 1] = self.trajectories[i, :-1, 1]\n self.trajectory_hops[i, -1, 1] = self.trajectories[i, -1, 1]\n\n print('Interpolating Trajectories...')\n # make uniform time intervals with the same interval for each simulated trajectory\n max_time = np.min(self.trajectories[:, -1, 0])\n self.time_uniform = np.linspace(0, max_time, self.nsteps*10)\n\n if nt > 1:\n # self.pbar = tqdm.tqdm(total=self.ntraj)\n pool = Pool(nt)\n for i, t in enumerate(pool.map(self.interpolate_trajectories, range(self.ntraj))):\n self.z_interpolated[i, :] = t\n else:\n for t in tqdm.tqdm(range(self.ntraj)):\n self.z_interpolated[t, :] = self.trajectories[t, np.digitize(self.time_uniform,\n self.trajectories[t, :, 0], right=False) - 1, 1]\n #self.z_interpolated[t, :] = self.interpolate_trajectories(t, noise=noise)",
"def uniform_redshift(self):\n z_map = self['z_map']\n z_dist = np.linspace(min(z_map), max(z_map), len(z_map))\n\n new_z_map = np.zeros(len(z_map))\n\n new_table = self.copy()\n\n for i in range(len(new_z_map)):\n probs = np.random.sample(len(z_map))\n probs /= np.sum(probs)\n new_z_map[i] = np.random.choice(z_dist, p=probs)\n\n # Create random cdfs\n new_table['z_map'] = np.random.rand(len(z_map)) * np.max(z_map)\n cdf_x, cdf_y = compute_cdf_percentiles(\n self['fit_stack'].data,\n cdf_sigmas=np.linspace(-5, 5, 51))\n new_table['cdf_z'] = cdf_x\n\n return new_table",
"def make_temporally_misaligned(x, n_per_spike=1, min_shift='auto',\n max_shift='auto'):\n n_spikes, waveform_length, n_neigh = x.shape\n n_out = int(n_spikes * n_per_spike)\n\n if max_shift == 'auto':\n max_shift = int(0.5 * waveform_length)\n\n if min_shift == 'auto':\n min_shift = int(0.1 * waveform_length)\n\n x_temporally = np.zeros((n_out, waveform_length, n_neigh))\n\n logger.debug('Making spikes with max_shift: %i, output shape: %s',\n max_shift, x_temporally.shape)\n\n for j in range(n_out):\n\n shift = random.choice([-1, 1]) * random.randint(min_shift, max_shift)\n\n idx = np.random.choice(x.shape[0], 1, replace=True)[0]\n spike = x[idx]\n\n if shift > 0:\n x_temporally[j, :(x_temporally.shape[1]-shift)] += spike[shift:]\n\n elif shift < 0:\n x_temporally[\n j, (-shift):] += spike[:(x_temporally.shape[1]+shift)]\n else:\n x_temporally[j] += spike\n\n return x_temporally",
"def generate_timeseries(F=F, H=H, stop=2000, x0=np.array([-0.72, -0.64]),\n R_v=np.eye(2)*0, R_n=np.eye(2)*0.001):\n dim = 2 # Number of dimensions for the system\n U, Y = [], []\n\n x = x0\n for k in range(stop):\n U.append(u(k, dim))\n x = F(x, U[-1]) + np.random.multivariate_normal(np.zeros(dim), R_v)\n Y.append(H(x) + np.random.multivariate_normal(np.zeros(dim), R_n))\n\n return U, Y, R_v, R_n",
"def adjustFrame(frame, shifts):\n if min(shifts)<0:\n botShifts = [colShift-min(shifts) for colShift in shifts]\n else:\n botShifts = [colShift for colShift in shifts]\n topShifts = [max(botShifts)-shift for shift in botShifts]\n newFrame=np.empty([frame.shape[1],frame.shape[0]+max(botShifts)])\n for i, col in enumerate(frame.T):\n newCol = np.concatenate((np.zeros(topShifts[i]),col,np.zeros(botShifts[i])))\n newFrame[i]=newCol\n newFrame=newFrame.T\n \n return newFrame",
"def adjustFrame(frame, shifts):\n if min(shifts)<0:\n botShifts = [colShift-min(shifts) for colShift in shifts]\n else:\n botShifts = [colShift for colShift in shifts]\n topShifts = [max(botShifts)-shift for shift in botShifts]\n newFrame=np.empty([frame.shape[1],frame.shape[0]+max(botShifts)])\n for i, col in enumerate(frame.T):\n newCol = np.concatenate((np.zeros(topShifts[i]),col,np.zeros(botShifts[i])))\n newFrame[i]=newCol\n newFrame=newFrame.T\n \n return newFrame",
"def populate_a_matrix_per_schedule(self):\n self.matrixes = []\n for i in range(self.num_schedules):\n m = np.zeros((2048, 20))\n self.matrixes.append(m)\n for i, each_matrix in enumerate(self.matrixes):\n # lets look at elements of schedule 1\n for j in range(self.schedule_array_train_naive[i][0], self.schedule_array_train_naive[i][1] + 1):\n binary_embedding = self.total_binary_embeddings[j]\n index = self.pass_in_embedding_out_state_ID(binary_embedding)\n # action taken at this instance\n action = self.Y_train_naive[j]\n each_matrix[index][action] += 1\n total_sum = each_matrix.sum()\n self.matrixes[i] = np.divide(each_matrix, total_sum)\n\n print('n matrices have been generated')",
"def generate_action_sequences(num_sequences, len_horizon, env):\n action_sequences = np.zeros((num_sequences, len_horizon, env.action_space.shape[0]))\n for s in range(num_sequences):\n for h in range(len_horizon):\n action_sequences[s,h] = env.action_space.sample() # random action\n\n return action_sequences",
"def create_time_slices(weeks, lookback, horizon, gap,\n step_size, holdout_window, num_steps):\n\n n = len(weeks)\n min_week = min(weeks)\n holdout_gap = horizon + gap - 1 # gap between train and holdout set\n holdout_size = horizon + holdout_window - 1\n step_space = (num_steps - 1) * step_size\n\n training_window = n - lookback - holdout_gap - holdout_size - step_space\n\n if training_window <= 0:\n err_msg = \"negative window size using specified parameters\"\n logging.error(err_msg)\n raise Exception(err_msg)\n\n def create_time_slice(step=0):\n base = min_week + lookback + step\n time_slice = (\n [base + x for x in range(training_window)],\n [base + x + holdout_gap + training_window\n for x in range(holdout_window)]\n )\n return time_slice\n\n output = [create_time_slice(x*step_size) for x in range(0, num_steps)]\n\n return output",
"def generate_time_series(length, M):\n #standard normal values\n X = np.random.normal(0,1,[length,M])\n return pd.DataFrame(X)",
"def hurst(data):\n\tn = 6\n\tdata = pd.Series(data).pct_change()[1:]\n\tars = list()\n\tlag = list()\n\tfor i in range(n):\n\t\tm = 2 ** i\n\t\tsize = np.size(data) // m\n\t\tlag.append(size)\n\t\tpanel = {}\n\t\tfor j in range(m):\n\t\t\tpanel[str(j)] = data[j * size:(j + 1) * size].values\n\n\t\tpanel = pd.DataFrame(panel)\n\t\tmean = panel.mean()\n\t\tdeviation = (panel - mean).cumsum()\n\t\tmaxi = deviation.max()\n\t\tmini = deviation.min()\n\t\tsigma = panel.std()\n\t\trs = maxi - mini\n\t\trs = rs / sigma\n\t\tars.append(rs.mean())\n\n\tlag = np.log10(lag)\n\tars = np.log10(ars)\n\thurst_exponent = np.polyfit(lag, ars, 1)\n\tresult = hurst_exponent[0]\n\treturn result",
"def get_params(shift):\r\n hshift, vshift = np.random.uniform(-shift, shift, size=2)\r\n\r\n return hshift, vshift",
"def random_transitions(self) -> np.ndarray:\n transitions = np.zeros((self.n_states, self.n_states))\n for i, row in enumerate(transitions):\n size = min(2, self.n_states - i)\n row[i:(i + size)] = self.random_state.dirichlet(np.ones(size))\n return transitions",
"def generate_dataframe(forecast, observed):\n dataframe = pd.DataFrame(columns=COLUMNS, dtype=str)\n\n # Write cumulative forecasts.\n forecast_date_str = FORECAST_DATE.strftime(\"%Y-%m-%d\")\n for cum_week in sorted(forecast.keys()):\n target_end_date = FIRST_WEEK + ((cum_week - 1) * datetime.timedelta(7)) \n target_end_date_str = target_end_date.strftime(\"%Y-%m-%d\")\n # Terminate the loop after 8 weeks of forecasts.\n if cum_week >= 8:\n break\n \n # Skip forecasts before the forecast date.\n if target_end_date <= FORECAST_DATE:\n continue\n\n # Write a row for \"weeks ahead\" if forecast end day is a Saturday.\n if target_end_date >= FIRST_WEEK and target_end_date.weekday() == 5:\n target = str(cum_week) + \" wk ahead cum death\"\n for state_id in forecast[cum_week].keys():\n for quantile in forecast[cum_week][state_id].keys():\n val = observed[(FORECAST_DATE - datetime.timedelta(1)).strftime(\"%Y-%m-%d\")][state_id]\n for i in range(1, cum_week + 1):\n val += forecast[i][state_id][quantile]\n if quantile == \"point\":\n dataframe = dataframe.append(\n generate_new_row(\n forecast_date=forecast_date_str,\n target=target,\n target_end_date=target_end_date_str,\n location=str(state_id),\n type=\"point\",\n quantile=\"NA\",\n value=val\n ), ignore_index=True)\n else:\n dataframe = dataframe.append(\n generate_new_row(\n forecast_date=forecast_date_str,\n target=target,\n target_end_date=target_end_date_str,\n location=str(state_id),\n type=\"quantile\",\n quantile=quantile,\n value=val\n ), ignore_index=True)\n \n # Write incident forecasts.\n forecast_date_str = FORECAST_DATE.strftime(\"%Y-%m-%d\")\n for cum_week in sorted(forecast.keys()):\n target_end_date = FIRST_WEEK + ((cum_week - 1) * datetime.timedelta(7)) \n target_end_date_str = target_end_date.strftime(\"%Y-%m-%d\")\n # Terminate the loop after 8 weeks of forecasts.\n if cum_week >= 8:\n break\n \n # Skip forecasts before the forecast date.\n if target_end_date <= FORECAST_DATE:\n continue\n\n if target_end_date >= FIRST_WEEK and target_end_date.weekday() == 5:\n target = str(cum_week) + \" wk ahead inc death\"\n for state_id in forecast[cum_week].keys():\n for quantile in forecast[cum_week][state_id].keys():\n if quantile == \"point\":\n dataframe = dataframe.append(\n generate_new_row(\n forecast_date=forecast_date_str,\n target=target,\n target_end_date=target_end_date_str,\n location=str(state_id),\n type=\"point\",\n quantile=\"NA\",\n value=forecast[cum_week][state_id][quantile]\n ), ignore_index=True)\n else:\n dataframe = dataframe.append(\n generate_new_row(\n forecast_date=forecast_date_str,\n target=target,\n target_end_date=target_end_date_str,\n location=str(state_id),\n type=\"quantile\",\n quantile=quantile,\n value=forecast[cum_week][state_id][quantile]\n ), ignore_index=True)\n \n return dataframe"
]
| [
"0.58462596",
"0.5509655",
"0.5509655",
"0.53215075",
"0.5302834",
"0.52888364",
"0.5211271",
"0.5162743",
"0.5153816",
"0.5103179",
"0.509162",
"0.5089095",
"0.5083639",
"0.504301",
"0.49893412",
"0.49773443",
"0.49760026",
"0.4973572",
"0.4966216",
"0.49432498",
"0.492389",
"0.492389",
"0.49141333",
"0.49095756",
"0.49003053",
"0.48742148",
"0.4856454",
"0.48467943",
"0.4846192",
"0.48413816"
]
| 0.7993488 | 0 |
Vectorizes prophet's logistic uncertainty by creating a matrix of future possible trends. | def _logistic_uncertainty(
prophet_model: Prophet,
mat: np.ndarray,
deltas: np.ndarray,
k: float,
m: float,
cap: np.ndarray,
t_time: np.ndarray,
n_length: int,
single_diff: float,
) -> np.ndarray:
def _ffill(arr: np.ndarray) -> np.ndarray:
mask = arr == 0
idx = np.where(~mask, np.arange(mask.shape[1]), 0)
np.maximum.accumulate(idx, axis=1, out=idx)
return arr[np.arange(idx.shape[0])[:, None], idx]
# for logistic growth we need to evaluate the trend all the way from the start of the train item
historical_mat, historical_time = _make_historical_mat_time(
deltas, prophet_model.changepoints_t, len(mat), single_diff
)
mat = np.concatenate([historical_mat, mat], axis=1)
full_t_time = np.concatenate([historical_time, t_time])
# apply logistic growth logic on the slope changes
k_cum = np.concatenate(
(np.ones((mat.shape[0], 1)) * k, np.where(mat, np.cumsum(mat, axis=1) + k, 0)),
axis=1,
)
k_cum_b = _ffill(k_cum)
gammas = np.zeros_like(mat)
for i in range(mat.shape[1]):
x = full_t_time[i] - m - np.sum(gammas[:, :i], axis=1)
ks = 1 - k_cum_b[:, i] / k_cum_b[:, i + 1]
gammas[:, i] = x * ks
# the data before the -n_length is the historical values, which are not needed, so cut the last n_length
k_t = (mat.cumsum(axis=1) + k)[:, -n_length:]
m_t = (gammas.cumsum(axis=1) + m)[:, -n_length:]
sample_trends = cap / (1 + np.exp(-k_t * (t_time - m_t)))
# remove the mean because we only need width of the uncertainty centered around 0
# we will add the width to the main forecast - yhat (which is the mean) - later
return sample_trends - sample_trends.mean(axis=0) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _sample_trend_uncertainty(\n prophet_model: Prophet,\n n_samples: int,\n df: pd.DataFrame,\n iteration: int = 0,\n) -> np.ndarray:\n\n # when there is only historical data\n # given that df is sorted by time, it's last item has the largest date.\n if df[\"t\"].iloc[-1] <= 1:\n # there is no trend uncertainty in historic trends\n uncertainties = np.zeros((n_samples, len(df)))\n else:\n\n future_df = df.loc[df[\"t\"] > 1]\n n_length = len(future_df)\n hist_len = len(df) - n_length\n # handle 1 length futures by using history\n if n_length > 1:\n single_diff = np.diff(future_df[\"t\"]).mean()\n else:\n single_diff = np.diff(prophet_model.history[\"t\"]).mean()\n change_likelihood = len(prophet_model.changepoints_t) * single_diff\n deltas = prophet_model.params[\"delta\"][iteration]\n m0 = prophet_model.params[\"m\"][iteration]\n k = prophet_model.params[\"k\"][iteration]\n mean_delta = np.mean(np.abs(deltas)) + 1e-8\n if prophet_model.growth == \"linear\":\n mat = _make_trend_shift_matrix(\n mean_delta, change_likelihood, n_length, n_samples=n_samples\n )\n uncertainties = mat.cumsum(axis=1).cumsum(\n axis=1\n ) # from slope changes to actual values\n uncertainties *= single_diff # scaled by the actual meaning of the slope\n elif prophet_model.growth == \"logistic\":\n mat = _make_trend_shift_matrix(\n mean_delta, change_likelihood, n_length, n_samples=n_samples\n )\n uncertainties = _logistic_uncertainty(\n prophet_model=prophet_model,\n mat=mat,\n deltas=deltas,\n k=k,\n m=m0,\n cap=future_df[\"cap_scaled\"].values,\n t_time=future_df[\"t\"].values,\n n_length=n_length,\n single_diff=single_diff,\n )\n elif prophet_model.growth == \"flat\":\n # no trend uncertainty when there is no growth\n uncertainties = np.zeros((n_samples, n_length))\n else:\n raise NotImplementedError\n # historical part\n if hist_len > 0:\n past_uncertainty = np.zeros((n_samples, hist_len))\n uncertainties = np.concatenate([past_uncertainty, uncertainties], axis=1)\n return uncertainties",
"def _sample_predictive_trend_vectorized(\n prophet_model: Prophet, df: pd.DataFrame, n_samples: int, iteration: int = 0\n) -> np.ndarray:\n\n if prophet_model.growth == \"linear\":\n return sample_linear_predictive_trend_vectorize(\n prophet_model, df, n_samples, iteration\n )\n\n deltas = prophet_model.params[\"delta\"][iteration]\n m0 = prophet_model.params[\"m\"][iteration]\n k = prophet_model.params[\"k\"][iteration]\n if prophet_model.growth == \"logistic\":\n expected = prophet_model.piecewise_logistic(\n df[\"t\"].values,\n df[\"cap_scaled\"].values,\n deltas,\n k,\n m0,\n prophet_model.changepoints_t,\n )\n elif prophet_model.growth == \"flat\":\n expected = prophet_model.flat_trend(df[\"t\"].values, m0)\n else:\n raise NotImplementedError\n\n uncertainty = _sample_trend_uncertainty(prophet_model, n_samples, df, iteration)\n return (\n np.tile(expected, (n_samples, 1)) + uncertainty\n ) * prophet_model.y_scale + np.tile(df[\"floor\"].values, (n_samples, 1))",
"def estimate_emission(train):\n emission = [[0 for i in xrange(len(WORDS))] for j in xrange(len(TAGS))]\n for sent in train:\n new_sent = add_start_and_stop(sent)\n for word, tag in new_sent:\n emission[TAG2INDEX[tag]][WORDS2INDEX[word]] += 1\n # normalize so each row has sum of 1\n for row in emission:\n row_sum = sum(row)\n for i in xrange(len(row)):\n row[i] = safe_log(float(row[i]) / row_sum)\n return emission",
"def predict_uncertainty(\n prophet_model: Prophet, df: pd.DataFrame, vectorized: bool\n) -> pd.DataFrame:\n sim_values = sample_posterior_predictive(prophet_model, df, vectorized)\n\n lower_p = 100 * (1.0 - prophet_model.interval_width) / 2\n upper_p = 100 * (1.0 + prophet_model.interval_width) / 2\n\n series = {}\n\n for key in [\"yhat\", \"trend\"]:\n series[\"{}_lower\".format(key)] = prophet_model.percentile(\n sim_values[key], lower_p, axis=0\n )\n series[\"{}_upper\".format(key)] = prophet_model.percentile(\n sim_values[key], upper_p, axis=0\n )\n\n return pd.DataFrame(series)",
"def ytrue(t):\n return np.array([np.exp(lam*t)])",
"def _make_trend_shift_matrix(\n mean_delta: float, likelihood: float, future_length: float, n_samples: int\n) -> np.ndarray:\n # create a bool matrix of where these trend shifts should go\n bool_slope_change = np.random.uniform(size=(n_samples, future_length)) < likelihood\n shift_values = np.random.laplace(0, mean_delta, size=bool_slope_change.shape)\n mat = shift_values * bool_slope_change\n n_mat = np.hstack([np.zeros((len(mat), 1)), mat])[:, :-1]\n mat = (n_mat + mat) / 2\n return mat",
"def _calculate_log_det(self, var):\n log_det = []\n\n for k in range(self.n_components):\n evals, evecs = tf.linalg.eig(var[0, k])\n\n log_det.append(tf.reduce_sum(tf.math.log(tf.math.real(evals))))\n log_det = tf.convert_to_tensor(log_det)\n return tf.expand_dims(log_det, -1)",
"def sample_linear_predictive_trend_vectorize(\n prophet_model: Prophet, df: pd.DataFrame, sample_size: int, iteration: int\n) -> np.ndarray:\n k = prophet_model.params[\"k\"][iteration]\n m = prophet_model.params[\"m\"][iteration]\n deltas = prophet_model.params[\"delta\"][iteration]\n changepoints_t = prophet_model.changepoints_t\n changepoint_ts = np.row_stack([changepoints_t] * sample_size)\n\n deltas = np.row_stack([deltas] * sample_size)\n\n t = np.array(df[\"t\"])\n T = t.max()\n\n # vectorize possion sample\n S = len(changepoints_t)\n if (\n S * (T - 1) > 0\n ): # ensuring parameter of Poission distribution is valid; otherwise no need to generate samples\n possion_sample = np.random.poisson(S * (T - 1), sample_size)\n max_possion_num = possion_sample.max()\n else:\n max_possion_num = 0\n\n if max_possion_num > 0:\n\n # sample change points\n changepoint_ts_new = 1 + np.random.rand(sample_size, max_possion_num) * (T - 1)\n changepoint_ts_new.sort(axis=1)\n\n # create mask for deltas -> to mute some deltas based on number of change points\n mask = np.random.uniform(\n 0, max_possion_num, max_possion_num * sample_size\n ).reshape(sample_size, -1)\n mask = mask < possion_sample[:, None]\n\n # Sample deltas\n lambda_ = np.mean(np.abs(deltas)) + 1e-8\n deltas_new = np.random.laplace(\n 0, lambda_, max_possion_num * sample_size\n ).reshape(sample_size, -1)\n # mute some deltas based on mask\n deltas_new = deltas_new * mask\n\n # Prepend the times and deltas from the history\n changepoint_ts = np.column_stack((changepoint_ts, changepoint_ts_new))\n deltas = np.column_stack((deltas, deltas_new))\n\n trend = _piecewise_linear_vectorize(t, deltas, k, m, changepoint_ts)\n\n return trend * prophet_model.y_scale + df[\"floor\"].values",
"def multivariat_ml(x):\n\tnumel, dim=x.shape\n\tmu=x.mean(axis=0)\n\tcov=np.zeros((dim,dim))\n\tfor dt in x:\n\t\tcov+=1/numel*np.outer((dt-mu),(dt-mu))\n\treturn mu, cov",
"def _y_hat(df, coefficients):\n log = np.log\n c = coefficients\n log_y_ijs = df.exportness + \\\n c['mindist'] * df.mindist + \\\n c['x_j'] * log(df.x_j) + \\\n c['f_is'] * log(df.f_is) + c['v_is'] * log(df.v_is)\n y_ijs = np.exp(log_y_ijs)\n return y_ijs.fillna(0)",
"def compute_log(tx, index_log, mean=[], std=[]):\n tx_new = np.log10(3+abs(tx[:,index_log]))\n return standardize(tx_new,mean,std)",
"def create_matrix(data, discrete, prop, cutoff, nfeatures):\n y = np.zeros(len(data))\n \n count = 0 \n for i in range (len(data)):\n if data[i][nfeatures+prop]>cutoff:\n y[i]=1\n count += 1\n else:\n y[i]=0\n \n if discrete==False:\n y[i]=data[i][nfeatures+prop]\n \n x = data[:, 0:nfeatures]\n \n \n print (\"Number of good designs \"+str(count)+\" out of total \"+str(len(y)))\n return x, y",
"def _forecast_train(stan_fit, p):\n \n train_n = stan_fit.data['N']\n t = np.arange(train_n) / train_n\n train_changepoints = stan_fit.data['s']\n \n A = (t[:, None] > train_changepoints) * 1. # t x s\n \n X = fourier_series(t, p=p, fourier_order=stan_fit.data['n_fourier']) \n \n samples = stan_fit.extract(permuted=True)\n \n samples_n = len(samples['lp__'])\n \n y_hat = np.zeros(shape=(samples_n, len(t)))\n mu_trend = np.zeros(shape=(samples_n, len(t)))\n mu_s = np.zeros(shape=(samples_n, len(t)))\n \n for iteration in range(samples_n):\n \n k = samples['k'][iteration]\n m = samples['m'][iteration]\n deltas = samples['delta'][iteration]\n beta = samples['beta'][iteration]\n sigma = samples['sigma'][iteration]\n \n gamma = -train_changepoints * deltas;\n mu_trend[iteration,:] = k + np.matmul(A, deltas) * t + (m + np.matmul(A, gamma))\n mu_s[iteration,:] = np.matmul(X, beta)\n \n mu = mu_trend[iteration] + mu_s[iteration]\n \n y_hat[iteration,:] = np.random.normal(mu, sigma)\n \n return y_hat, mu_trend, mu_s, samples, samples_n, train_n, train_changepoints",
"def each_evidence(y_, f, fh, v, s, vh, N, D):\n epsilon = 1e-5\n alpha = 1.0\n beta = 1.0\n lam = alpha / beta\n tmp = (vh @ (f @ np.ascontiguousarray(y_)))\n for _ in range(11):\n # should converge after at most 10 steps\n # typically converge after two or three steps\n gamma = (s / (s + lam)).sum()\n # A = v @ np.diag(alpha + beta * s) @ v.transpose() # no need to compute A\n # A_inv = v @ np.diag(1.0 / (alpha + beta * s)) @ v.transpose() # no need to compute A_inv\n m = v @ (tmp * beta / (alpha + beta * s))\n alpha_de = (m * m).sum()\n alpha = gamma / (alpha_de + epsilon)\n beta_de = ((y_ - fh @ m) ** 2).sum()\n beta = (N - gamma) / (beta_de + epsilon)\n new_lam = alpha / beta\n if np.abs(new_lam - lam) / lam < 0.01:\n break\n lam = new_lam\n evidence = D / 2.0 * np.log(alpha) \\\n + N / 2.0 * np.log(beta) \\\n - 0.5 * np.sum(np.log(alpha + beta * s)) \\\n - beta / 2.0 * (beta_de + epsilon) \\\n - alpha / 2.0 * (alpha_de + epsilon) \\\n - N / 2.0 * np.log(2 * np.pi)\n return evidence / N, alpha, beta, m",
"def log2FC_data(data):\n log2FC_df = pd.DataFrame()\n for i in range(0,len(data.columns),10):\n i = i\n data_subset = data[data.columns[i:i+10]]\n log_data = data_subset.apply(np.log2)\n \n new_df = pd.DataFrame()\n for j in range(len(log_data.columns)):\n tmp_col = log_data.iloc[:, j].name\n tmp_df = log_data.iloc[:,0] - log_data.iloc[:,j]\n new_df[tmp_col] = tmp_df\n \n log2FC_df = log2FC_df.append(new_df.T)\n log2FC_df = log2FC_df.T\n return log2FC_df",
"def log_predictive_density(self, y_test, mu_star, var_star, Y_metadata=None):\n assert y_test.shape==mu_star.shape\n assert y_test.shape==var_star.shape\n assert y_test.shape[1] == 1\n\n flat_y_test = y_test.flatten()\n flat_mu_star = mu_star.flatten()\n flat_var_star = var_star.flatten()\n\n if Y_metadata is not None:\n #Need to zip individual elements of Y_metadata aswell\n Y_metadata_flat = {}\n if Y_metadata is not None:\n for key, val in Y_metadata.items():\n Y_metadata_flat[key] = np.atleast_1d(val).reshape(-1,1)\n\n zipped_values = []\n\n for i in range(y_test.shape[0]):\n y_m = {}\n for key, val in Y_metadata_flat.items():\n if np.isscalar(val) or val.shape[0] == 1:\n y_m[key] = val\n else:\n #Won't broadcast yet\n y_m[key] = val[i]\n zipped_values.append((flat_y_test[i], flat_mu_star[i], flat_var_star[i], y_m))\n else:\n #Otherwise just pass along None's\n zipped_values = zip(flat_y_test, flat_mu_star, flat_var_star, [None]*y_test.shape[0])\n\n def integral_generator(yi, mi, vi, yi_m):\n \"\"\"Generate a function which can be integrated\n to give p(Y*|Y) = int p(Y*|f*)p(f*|Y) df*\"\"\"\n def f(fi_star):\n #exponent = np.exp(-(1./(2*vi))*np.square(mi-fi_star))\n #from GPy.util.misc import safe_exp\n #exponent = safe_exp(exponent)\n #res = safe_exp(self.logpdf(fi_star, yi, yi_m))*exponent\n\n #More stable in the log space\n res = np.exp(self.logpdf(fi_star, yi, yi_m)\n - 0.5*np.log(2*np.pi*vi)\n - 0.5*np.square(fi_star-mi)/vi)\n if not np.isfinite(res):\n import ipdb; ipdb.set_trace() # XXX BREAKPOINT\n return res\n\n return f\n\n p_ystar, _ = zip(*[quad(integral_generator(yi, mi, vi, yi_m), -np.inf, np.inf)\n for yi, mi, vi, yi_m in zipped_values])\n p_ystar = np.array(p_ystar).reshape(*y_test.shape)\n return np.log(p_ystar)",
"def mfom_eer_normalized_np(y_true, y_pred):\n s = y_true.shape\n y_true = np.reshape(y_true, (-1, s[-1]))\n y_pred = np.reshape(y_pred, (-1, s[-1]))\n y_neg = 1 - y_true\n # number of positive samples per each class\n P = np.sum(y_true, axis=0)\n # number of negative samples per each class\n N = np.sum(y_neg, axis=0)\n # smooth false negative and false positive\n fn = y_pred * y_true\n fp = (1. - y_pred) * y_neg\n fnr = np.log(np.sum(fn, axis=0) + 1.) - np.log(P + 1.)\n fpr = np.log(np.sum(fp, axis=0) + 1.) - np.log(N + 1.)\n fnr = np.exp(fnr)\n fpr = np.exp(fpr)\n smooth_eer = fpr + .5 * np.abs(fnr - fpr) # dim = number of classes\n return np.mean(smooth_eer)",
"def logits(self):\n return np.array([m['actor'] for m in self.model_outs], dtype=np.float32)",
"def compute_empirical_mutual_info_nats(var1_values, var2_values):\n\n # -------------------------------------------------------------------------\n # YOUR CODE HERE\n #\n\n empirical_mutual_info_nats = 0.0\n \n var1_distribution = compute_empirical_distribution(var1_values)\n var2_distribution = compute_empirical_distribution(var2_values)\n joint_distribution = compute_empirical_distribution(list(zip(var1_values,var2_values)))\n \n empirical_mutual_info_nats = 0\n for var1 in var1_distribution:\n for var2 in var2_distribution:\n empirical_mutual_info_nats += joint_distribution[(var1, var2)] \\\n * np.log(joint_distribution[(var1,var2)]/(var1_distribution[var1]*var2_distribution[var2]))\n \n #\n # END OF YOUR CODE\n # -------------------------------------------------------------------------\n\n return empirical_mutual_info_nats",
"def _compute_log_moment(self, num_examples=5040):\n q = tf.cast(num_examples, tf.float64) * 1.0 / self._total_examples\n mu_1, sigma_1 = 0, 4 # mean and standard deviation\n s_1 = np.random.normal(mu_1, sigma_1, 1000)\n mu_2, sigma_2 = 1, 4 # mean and standard deviation\n s_2 = np.random.normal(mu_2, sigma_2, 1000)\n s = (1-q)*s_1 + q*s_2\n moment_1 =[0]*len(self._log_moments)\n moment_2 = [0]*len(self._log_moments)\n log_moment = [0] * len(self._log_moments)\n for i in range(len(self._log_moments)):\n for j in range(len(s_1)):\n moment_1[i] += ((s_1[j]/s[j])**self._moment_orders[i])/len(s_1)\n moment_2[i] += ((s[j] / s_1[j]) ** self._moment_orders[i]) / len(s_1)\n for i in range(len(self._log_moments)):\n log_moment[i] = math.log(abs(max(moment_1[i],moment_2[i])))\n return log_moment",
"def LogLikeEducation(DFData,SATTuition,grad_horizon,sectors,beta,\n ability,flows_penalized,unskilled_var,grade_params_by_quality,normReps,simReps,\n LaborGradeRange,final_size,dropout_payouts, STEM_payouts_by_quality,\n nonSTEM_payouts_by_quality,gamma_p, unskilled_meanvar, norm_quantiles,\n skilled_wage_coeffs,unskilled_wage_coeffs, skilled_wage_covar,LaborGradeInt,\n choose,year_four_intercept,year_four_flow_penalized, ed_switching_costs,\n univ_type_shifters,grad_payoff,return_array=False):\n\n year_four_exp = 0\n year_four_quadratic = 0\n year_four_year_1 = 0\n\n\n flowUnskilled=flows_penalized[sectors]\n flowSTEM=flows_penalized[sectors+1]\n flownonSTEM=flows_penalized[sectors+2]\n flow_educ=np.array([flowSTEM,flownonSTEM,flowUnskilled],dtype=np.float64)\n ed_Emax=np.zeros((len(SATTuition),6,81,2),dtype=np.float64)\n STEM1=np.zeros(len(SATTuition),dtype=np.float64)\n nonSTEM1=np.zeros(len(SATTuition),dtype=np.float64)\n\n flows_by_univ_type = [np.zeros(3,dtype=np.float64) for x in range(4)]\n flows_by_univ_type[0][0]=flow_educ[0]\n flows_by_univ_type[0][1]=flow_educ[1]\n flows_by_univ_type[0][2]=flow_educ[2]\n for x in range(1,4):\n flows_by_univ_type[x][0]=flow_educ[0]+univ_type_shifters[2*x-2]\n flows_by_univ_type[x][1]=flow_educ[1]+univ_type_shifters[2*x-1]\n flows_by_univ_type[x][2]=flows_penalized[sectors]\n\n for idx,x in enumerate(SATTuition):\n # differentiate by quality\n if x[5] == 1:\n STEM_payouts = STEM_payouts_by_quality[1]\n nonSTEM_payouts = nonSTEM_payouts_by_quality[1]\n grade_params = grade_params_by_quality[1]\n else:\n STEM_payouts = STEM_payouts_by_quality[0]\n nonSTEM_payouts = nonSTEM_payouts_by_quality[0]\n grade_params = grade_params_by_quality[0]\n\n tuition = x[6]\n flow_educ_univ_type=flows_by_univ_type[x[7]]\n\n Ed=EmaxEducationJIT(dropout_payouts,STEM_payouts,nonSTEM_payouts,\n grade_params,gamma_p,beta,flow_educ_univ_type,\n np.array(([tuition,tuition,tuition,tuition]),dtype=np.float64),\n np.array((x[2],x[3],x[4]),dtype=np.float64),\n np.array((ability[0],ability[1]),dtype=np.float64),\n unskilled_meanvar, norm_quantiles, year_four_intercept,\n year_four_flow_penalized, ed_switching_costs, grad_payoff)\n Ed.solve()\n ed_Emax[idx]=Ed.EmaxEducationValues\n STEM1[idx]=Ed.STEM_cond_val_first\n nonSTEM1[idx]=Ed.nonSTEM_cond_val_first\n del Ed\n\n # this part is hard coded for the number of sectors\n skilled_experience=create_skilled_experience(np.array(DFData.skilled1),\n np.array(DFData.skilled2),np.array(DFData.skilled3),\n np.array(DFData.hp))\n\n\n wage_shock=calculate_wage_shock(np.array(DFData.outcome),\n np.array(DFData.col_type),np.array(DFData.numeric_choice),\n np.array(DFData.numeric_state),skilled_wage_coeffs,\n unskilled_wage_coeffs,skilled_experience,\n np.array(DFData.unskilled),np.array(DFData.dSTEM),np.array(DFData.tGPA),\n np.array(DFData.quality),np.array(DFData.tdropout),year_four_intercept,\n year_four_exp,year_four_quadratic,year_four_year_1)\n\n\n (meanterm,covar,skilled_shocks,hp_wage_shocks)=(\n MVNposterior(skilled_wage_covar,4))\n skilled_shocks_list=[x for x in skilled_shocks]\n skilled_wage_shocks=tuple(skilled_shocks_list)\n\n unskilledWageShocks=(np.transpose(scipy.stats.norm.ppf(\n (np.array(range(simReps))+1)/(simReps+1))*(unskilled_var[0][0])**0.5))\n firstUnskilledDraws=np.exp(unskilled_wage_coeffs[0]+unskilledWageShocks)\n\n year_four_first_draws=np.exp(year_four_intercept+unskilledWageShocks)\n\n num_grades=20\n grade_quantiles=scipy.stats.norm.ppf(\n np.array(range(1,num_grades))/num_grades)\n\n out=calculate_likelihood_education(grad_horizon,sectors,\n np.array(DFData.time),\n np.array(DFData.numeric_choice),np.array(DFData.numeric_state),\n np.array(DFData.cumulativeGPA),np.array(DFData.tdropout),ability,\n np.array(DFData.SAT_M),\n np.array(DFData.SAT_V),np.array(DFData.hs_GPA),np.array(DFData.tuition),\n STEM_payouts_by_quality, nonSTEM_payouts_by_quality, grade_quantiles,\n dropout_payouts, wage_shock,\n skilled_wage_shocks,hp_wage_shocks,skilled_experience,\n np.array(DFData.unskilled),np.array(DFData.outcome),\n np.array(DFData.dSTEM),np.array(DFData.tGPA),meanterm,\n ed_Emax,np.array(DFData.ed_emax_mapping),\n flowUnskilled,flow_educ,skilled_wage_covar,\n gamma_p,beta,skilled_wage_coeffs,unskilled_wage_coeffs,\n unskilled_var,choose,unskilledWageShocks,grade_params_by_quality,STEM1,\n nonSTEM1,firstUnskilledDraws,LaborGradeInt,np.array(DFData.quality),\n year_four_first_draws,year_four_flow_penalized,ed_switching_costs,\n np.array(DFData.lastchoice),univ_type_shifters,\n np.array(DFData.univ_type_num),grad_payoff)\n\n del ed_Emax\n if return_array:\n return out\n return np.sum(out)",
"def compute_logits(self):\n # [num test images, 1, embedding size].\n test_embeddings = tf.expand_dims(self.test_embeddings, 1)\n\n # [1, num_clases, embedding_size].\n prototypes = tf.expand_dims(self.prototypes, 0)\n\n # Squared euclidean distances between each test embedding / prototype pair.\n distances = tf.reduce_sum(tf.square(test_embeddings - prototypes), 2)\n self.test_logits = -distances\n return self.test_logits",
"def logistic_derivative(errors):\n return [log_deriv(error) for error in errors]",
"def calculate_uncertainty(logits):\n assert logits.shape[1] == 1\n gt_class_logits = logits.clone()\n return -(torch.abs(gt_class_logits))",
"def generate_varp(coef_tensor,cov_matrix,length,initial):\n \n # We first want to make sure the coefficient matrix we've received is \n # square in its first two dimensions.\n try:\n assert coef_tensor.shape[0]==coef_tensor.shape[1]\n except AssertionError:\n print 'Coefficient tensor is not square in first two dimensions.'\n \n try:\n p = coef_tensor.shape[2]\n except IndexError:\n p = 1\n \n r = coef_tensor.shape[0]\n \n y = np.empty([length,r])\n \n # We'll sample all our errors at once. Innovations should be of dimension length x r\n innovations = np.random.multivariate_normal(np.zeros(r),cov_matrix,size = length)\n \n for t in range(length):\n \n # If the timestep is less than the VAR(p) order, then there won't be enough\n # previous data to build a lagged data set\n if t < p:\n y[t,:] = initial\n else:\n # We snap off a block of recent values\n # with shape [r,p].\n recent_y_matrix = y[t-p:t,:]\n \n # Since the time index runs like low:high implies past:recent,\n # we need to invert the time axis because the coefficient tensor\n # has a time index running like low:high implies recent:past,\n # i.e. p=1 comes first\n reversed_recent = np.flipud(recent_y_matrix)\n \n # Then, we use einsum to perform a tensor contraction \n # and then we add the innovations.\n y[t,:] = np.einsum('ikj,jk->i',coef_tensor,reversed_recent) + innovations[t,:]\n \n return y",
"def compute_logits(self):\n # [num train labels, num classes] where each row is a one-hot-encoded label.\n one_hot_train_labels = tf.one_hot(self.data.train_labels, self.way)\n\n # Undocumented in the paper, but *very important*: *only* the support set\n # embeddings is L2-normalized, which means that the distance is not exactly\n # a cosine distance. For comparison we also allow for the actual cosine\n # distance to be computed, which is controlled with the\n # `exact_cosine_distance` instance attribute.\n train_embeddings = tf.nn.l2_normalize(\n self.train_embeddings, 1, epsilon=1e-3)\n test_embeddings = self.test_embeddings\n if self.exact_cosine_distance:\n test_embeddings = tf.nn.l2_normalize(test_embeddings, 1, epsilon=1e-3)\n # [num_test_images, num_train_images]\n similarities = tf.matmul(\n test_embeddings, train_embeddings, transpose_b=True)\n attention = tf.nn.softmax(similarities)\n\n # [num_test_images, way]\n probs = tf.matmul(attention, one_hot_train_labels)\n self.test_logits = tf.log(probs)\n return self.test_logits",
"def log(self): # just use base?\n return Factor().__build( VarSet(self.v) , np.log(self.t) )",
"def generate_ml_matrix(full_df, pred_ticker, feat_days):\n\n feature_tickers = [i for i in full_df.ticker.unique() if i != pred_ticker]\n dfml = full_df[full_df.ticker == pred_ticker].drop('ticker', axis=1)\n dfml.rename({'percent_change_feat': f'{pred_ticker}_percent_change_{feat_days}'}, axis=1, inplace=True)\n dfml.rename({'percent_change_pred': f'{pred_ticker}_percent_change_pred'}, axis=1, inplace=True)\n for ticker in feature_tickers:\n help_df = full_df[full_df.ticker == ticker][['past_date', 'current_date', 'prediction_date', 'percent_change_feat']]\n help_df.rename({'percent_change_feat': f'{ticker}_percent_change_{feat_days}'}, axis=1, inplace=True)\n dfml = pd.merge(dfml, help_df,\n left_on=['past_date', 'current_date', 'prediction_date'],\n right_on=['past_date', 'current_date', 'prediction_date'],\n how='left')\n return dfml",
"def construct_df(t,y):\n\n df = np.zeros((3,3))\n\n df[0][0] = 77.27*(1.0 - y(1) -2.*8.375e-6*y(0))\n df[0][1] = 77.27*(1.0 -y(0) )\n df[0][2] = 0.0;\n df[1][0] = -1.0/77.27;\n df[1][1] = (-1.0/77.27)*(1.0+y(0))\n df[1][2] = 1.0/77.27\n df[2][0] = 0.161\n df[2][1] = 0.0\n df[2][2] = -0.161\n\n return df",
"def generate_daily_matrix(full_df, feat_days):\n pred_ticker = full_df.ticker.unique()[0]\n feature_tickers = [i for i in full_df.ticker.unique() if i != pred_ticker]\n dfml = full_df[full_df.ticker == pred_ticker].drop('ticker', axis=1)\n dfml.rename({'percent_change_pred': f'{pred_ticker}_percent_change_{feat_days}'}, axis=1, inplace=True)\n for ticker in feature_tickers:\n help_df = full_df[full_df.ticker == ticker][['past_date', 'current_date', 'prediction_date', 'percent_change_pred']]\n help_df.rename({'percent_change_pred': f'{ticker}_percent_change_{feat_days}'}, axis=1, inplace=True)\n dfml = pd.merge(dfml, help_df,\n left_on=['past_date', 'current_date', 'prediction_date'],\n right_on=['past_date', 'current_date', 'prediction_date'],\n how='left')\n return dfml.drop('percent_change_feat', axis=1)"
]
| [
"0.62239593",
"0.555138",
"0.55282885",
"0.54154485",
"0.54047865",
"0.5336276",
"0.5314676",
"0.5234991",
"0.520957",
"0.5207892",
"0.51892084",
"0.51597226",
"0.5141467",
"0.5124765",
"0.5116228",
"0.5116116",
"0.50987893",
"0.509756",
"0.5079248",
"0.50788826",
"0.50776434",
"0.50758857",
"0.50626105",
"0.5060002",
"0.5058874",
"0.50584763",
"0.50569046",
"0.5055658",
"0.50525063",
"0.50465024"
]
| 0.6462167 | 0 |
This is the 1st version of the cookies checker In this function, we check the website cookies | def cookies_check_v1():
cookies_check = driver.find_element_by_xpath("//button[@id='didomi-notice-agree-button']")
cookies_check.click() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cookies_check_v2():\n cookies_check = driver.find_element_by_xpath(\"//button[@class='jad_cmp_paywall_button jad_cmp_paywall_button-cookies jad_cmp_paywall_cookies didomi-components-button didomi-button didomi-dismiss-button didomi-components-button--color didomi-button-highlight highlight-button']\")\n cookies_check.click()",
"def check_cookies(self):\r\n try:\r\n driver = self.driver\r\n my_file = open(\"CookiesFb.pkl\")\r\n return my_file, driver\r\n\r\n except IOError:\r\n print(\"Cookies does not exist. Will create cookies anyway. Wait for a second~\")\r\n #Buka file credential\r\n self.login(accountfacebook.EMAIL, accountfacebook.PWD)\r\n print(\"File cookies created. Next file run will pass login page\")",
"def test_78_cookies_warning(self):\r\n # As Anonymous\r\n res = self.app.get('/', follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"If cookies are not accepted, cookies banner should be shown\"\r\n assert dom.find(id='cookies_warning') is not None, err_msg\r\n\r\n # As user\r\n self.signin(email=Fixtures.email_addr2, password=Fixtures.password)\r\n res = self.app.get('/', follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"If cookies are not accepted, cookies banner should be shown\"\r\n assert dom.find(id='cookies_warning') is not None, err_msg\r\n self.signout()\r\n\r\n # As admin\r\n self.signin(email=Fixtures.root_addr, password=Fixtures.root_password)\r\n res = self.app.get('/', follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"If cookies are not accepted, cookies banner should be shown\"\r\n assert dom.find(id='cookies_warning') is not None, err_msg\r\n self.signout()",
"def test_79_cookies_warning2(self):\r\n # As Anonymous\r\n self.app.set_cookie(\"localhost\", \"PyBossa_accept_cookies\", \"Yes\")\r\n res = self.app.get('/', follow_redirects=True, headers={})\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"If cookies are not accepted, cookies banner should be hidden\"\r\n assert dom.find(id='cookies_warning') is None, err_msg\r\n\r\n # As user\r\n self.signin(email=Fixtures.email_addr2, password=Fixtures.password)\r\n res = self.app.get('/', follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"If cookies are not accepted, cookies banner should be hidden\"\r\n assert dom.find(id='cookies_warning') is None, err_msg\r\n self.signout()\r\n\r\n # As admin\r\n self.signin(email=Fixtures.root_addr, password=Fixtures.root_password)\r\n res = self.app.get('/', follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"If cookies are not accepted, cookies banner should be hidden\"\r\n assert dom.find(id='cookies_warning') is None, err_msg\r\n self.signout()",
"def init_base_cookie(self):\n url = 'https://ceq.nkust.edu.tw/'\n try:\n res = self.main_session.get(url=url)\n if res.status_code == 200:\n soup = BeautifulSoup(res.text, 'html.parser')\n\n self.csrf_key = soup.find(\n 'input', {'name': '__RequestVerificationToken'}).get('value')\n if self.csrf_key != \"\":\n return True\n except:\n return False\n return False",
"def check_xsrf_cookie(self):\n pass",
"def verify_cookies(self, device):\n self.assertTrue(device.cookies is not None)",
"def okcookies(request):\n if request.session.get('okcookies', False) or request.GET.get('cookie-agree', False):\n request.session['okcookies'] = True\n return {'OK_COOKIES': True}\n else:\n return {'OK_COOKIES': False}",
"def allocine_connect(url):\n #go to allocine page\n driver.get(url)\n #sleep until the page load\n sleep(10)\n #click on cookies button\n print(\"cookies checking\")\n cookies_check_v2()\n sleep(1)\n driver.get(url)",
"def session_relevance():\r\n\r\n # instantiation a session object in the first step\r\n session = requests.Session()\r\n\r\n # interface 1\r\n response = session.get(base_url + '/cookies/set/user/tom')\r\n print(response.text)\r\n\r\n # get the cookie in the same domain\r\n response = session.get(base_url + '/cookies')\r\n print(response.cookies)\r\n print(response.text)\r\n # In fact this is the rule you must obey in the test website\r\n # response = session.get(base_url + '/cookies/set/user/george')\r\n # print(response.status_code)\r\n # print(response.text)\r",
"def _analyze_cookie_security(self, request, response, cookie_obj,\n cookie_header_value):\n self._secure_over_http(request, response, cookie_obj,\n cookie_header_value)\n self._not_secure_over_https(request, response, cookie_obj,\n cookie_header_value)\n\n fingerprinted = self._match_cookie_fingerprint(request, response,\n cookie_obj)\n self._http_only(request, response, cookie_obj,\n cookie_header_value, fingerprinted)",
"def parse_cookies( headers ):",
"def extract_cookie_info():\n # setup cookie jar\n cj = cookielib.CookieJar()\n login_data = urllib.urlencode({ID_USERNAME: USERNAME,ID_PASSWORD: PASSWORD})\n # create url opener\n opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))\n resp = opener.open(LOGIN_URL, login_data)\n # send login info\n for cookie in cj:\n print \"----First time cookie: %s --> %s\" % (cookie.name,cookie.value)\n print \"Headers: %s\" % resp.headers\n # now access without any login info\n resp = opener.open(NORMAL_URL)\n for cookie in cj:\n print \"++++Second time cookie: %s --> %s\" % (cookie.name,cookie.value)\n print \"Headers: %s\" % resp.headers",
"def __cookieFilter(self, request):\n if not self.__loaded:\n self.__load()\n \n if self.__acceptCookies == self.AcceptNever:\n res = self.__isOnDomainList(self.__exceptionsAllow,\n request.origin.host())\n if not res:\n return False\n \n if self.__acceptCookies == self.AcceptAlways:\n res = self.__isOnDomainList(self.__exceptionsBlock,\n request.origin.host())\n if res:\n return False\n \n if (\n self.__acceptCookies == self.AcceptOnlyFromSitesNavigatedTo and\n request.thirdParty\n ):\n return False\n \n return True",
"def verify(ctx, config):\n logger = config['logger']\n px_cookie = ctx['_px']\n try:\n if not px_cookie:\n logger.debug('No risk cookie on the request')\n ctx['s2s_call_reason'] = 'no_cookie'\n return False\n\n decrypted_cookie = decrypt_cookie(config['cookie_key'], px_cookie)\n\n if not decrypted_cookie:\n logger.error('Cookie decryption failed')\n ctx['px_orig_cookie'] = px_cookie\n ctx['s2s_call_reason'] = 'cookie_decryption_failed'\n return False\n\n decoded_cookie = json.loads(decrypted_cookie)\n try:\n decoded_cookie['s'], decoded_cookie['s']['b'], decoded_cookie['u'], decoded_cookie['t'], decoded_cookie['v']\n except:\n logger.error('Cookie decryption failed')\n ctx['px_orig_cookie'] = px_cookie\n ctx['s2s_call_reason'] = 'cookie_decryption_failed'\n return False\n\n ctx['risk_score'] = decoded_cookie['s']['b']\n ctx['uuid'] = decoded_cookie.get('u', '')\n ctx['vid'] = decoded_cookie.get('v', '')\n ctx['decoded_cookie'] = decoded_cookie\n\n if decoded_cookie['s']['b'] >= config['blocking_score']:\n ctx['block_reason'] = 'cookie_high_score'\n logger.debug('Cookie with high score: ' + str(ctx['risk_score']))\n return True\n\n if is_cookie_expired(decoded_cookie):\n ctx['s2s_call_reason'] = 'cookie_expired'\n logger.debug('Cookie expired')\n return False\n\n if not is_cookie_valid(decoded_cookie, config['cookie_key'], ctx):\n logger.debug('Cookie validation failed')\n ctx['s2s_call_reason'] = 'cookie_validation_failed'\n return False\n\n logger.debug('Cookie validation passed with good score: ' + str(ctx['risk_score']))\n return True\n except:\n logger.debug('Cookie validation failed')\n ctx['s2s_call_reason'] = 'cookie_validation_failed'\n return False",
"def cookie_cutter():\n sweets = open('cookies.txt', 'r').read().splitlines()\n cookie = {}\n for i in sweets:\n x = i.split()\n l = list(x[4])\n if l[0] in '1234567890':\n x[4], x[5] = x[5], x[4]\n # domain = x[0]\n # secure = x[1]\n name = x[4]\n try:\n value = x[6]\n except IndexError:\n value = x[5]\n # path = x[2]\n cookie[name] = value\n return cookie",
"def grep(self, request, response):\n # do this check every time\n self._ssl_cookie_via_http(request, response)\n\n #\n # Analyze the response headers and find cookies\n #\n headers = response.get_headers()\n\n for header_name in headers:\n if header_name.lower() in COOKIE_HEADERS:\n\n cookie_header_value = headers[header_name].strip()\n cookie_object = self._parse_cookie(request, response,\n cookie_header_value)\n\n if cookie_object is not None:\n self._collect_cookies(request, response,\n cookie_object,\n cookie_header_value)\n\n # Find if the cookie introduces any vulnerability,\n # or discloses information\n self._analyze_cookie_security(request, response,\n cookie_object,\n cookie_header_value)",
"def _verify_session_cookies(self):\n if not self.session.cookies:\n return False\n for cookie_name in LOGIN_COOKIES:\n if cookie_name not in list(self.session.cookies.keys()):\n LOG.error('The cookie \"{}\" do not exist, it is not possible to check the expiration',\n cookie_name)\n return False\n for cookie in self.session.cookies.jar:\n if cookie.name != cookie_name:\n continue\n if cookie.expires <= int(time.time()):\n LOG.info('Login is expired')\n return False\n return True",
"def getCookie(key):",
"def accept_cookies(driver):\r\n try:\r\n frame = driver.find_element_by_xpath('//*[@id=\"cnsw\"]/iframe') #Accept cookies button is element in <iframe>\r\n driver.switch_to.frame(frame) #Switch to locating elements in iframe\r\n accept_cookies = driver.find_element_by_xpath('//*[@id=\"introAgreeButton\"]') #Accept cookies button\r\n accept_cookies.click()\r\n except NoSuchElementException:\r\n accept_cookies = driver.find_element_by_xpath('//*[@id=\"zV9nZe\"]') #Sometimes accept cookies button has this id\r\n accept_cookies.click() #Click to accept cookies\r\n except NoSuchElementException:\r\n driver.quit()\r\n return",
"def get_cookies(domname):\n if 'firefox' in udata.srcs:\n cout = get_cookies_firefox(domname)\n elif 'chrome' in udata.srcs:\n cout = get_cookies_chrome(domname)\n else:\n print(\"Error: No cookie source defined. Define either `srcs.firefox` or `srcs.chrome`.\")\n cout = None\n return cout",
"async def check_session():\n\n if not Path(config.cookies_file).is_file():\n return False\n\n cookies = None\n with open(config.cookies_file, 'r') as f:\n cookies = json.loads(f.read())\n\n as_client = httpx.AsyncClient(cookies=cookies)\n\n req = await as_client.get(\"https://www.linkedin.com/in/me\", allow_redirects=False)\n if req.status_code == 200:\n return as_client\n else:\n await as_client.aclose()\n return False",
"def cookie(date, page_offset, payload):\r\n \r\n print(\"cookie\")\r\n \r\n with session() as c: #Create a cookie session to login to the protected page \r\n c.post('https://data.terapeak.com/verify/', payload)\r\n url = \"http://data.terapeak.com/?id=0&search=1&view=item_browse&query=iphone+5s&date=2015-02-1&date_range=1&buyer_country_id=1&condition=rollup_3&type%5Bfixed%5D=1&from_start_price=100&to_start_price=800&from_end_price=100&to_end_price=800&seller_country_id=1&txn_site_id=0&numPages=12&siteID=0&offset={25}\"\r\n date_setup(date, page_offset, url,c)",
"def get_cookies_firefox(domname):\n cookpath = os.path.expanduser(udata.srcs['firefox']) + '/cookies.sqlite'\n\n # copy DB to prevent disk I/O error on Windows\n cookcopy = cookpath+'.copy'\n shutil.copy(cookpath, cookcopy)\n\n sqx = sqlite3.connect('%s' % (cookcopy))\n cks = sqx.execute('select name,value from moz_cookies where host = \"%s\"' % (domname)).fetchall()\n cookies = {}\n for cn, cv in cks:\n cookies[cn] = cv\n os.remove(cookcopy)\n return cookies",
"def test_59_help_policy(self):\r\n url = \"/help/cookies-policy\"\r\n res = self.app.get(url, follow_redirects=True)\r\n err_msg = \"There should be a TOS page\"\r\n assert \"uses cookies\" in res.data, err_msg",
"def read_secure_cookie(self, name):\n cookie_val = self.request.cookies.get(name)\n return cookie_val and check_secure_val(cookie_val)",
"def read_secure_cookie(self, name):\n cookie_val = self.request.cookies.get(name)\n return cookie_val and check_secure_val(cookie_val)",
"def extract_cookie_info():\n\t# setup cookie jar\n\tcj = cookielib.CookieJar()\n\tlogin_data = urllib.urlencode({ID_USERNAME: USERNAME, \n\t\tID_PASSWORD: PASSWORD})\n\t# create url opener\n\topener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))\n\tresp = opener.open(LOGIN_URL, login_data)\n\n\t# Send login info\n\tfor cookie in cj:\n\t\tprint \"----first time cookie: %s --> %s\" % (cookie.name, cookie.value)\n\tprint \"Headers: %s\" % resp.headers",
"def read_secure_cookie(self, name):\n\n cookie_val = self.request.cookies.get(name)\n return cookie_val and check_secure_val(cookie_val)",
"def _get_cookie(self, name, domain):\n for cookie in self._cookiejar:\n if cookie.name == name and cookie.domain == domain:\n if cookie.is_expired():\n break\n return cookie"
]
| [
"0.70112216",
"0.68745637",
"0.6659873",
"0.66192377",
"0.6598411",
"0.6596154",
"0.65820396",
"0.6524812",
"0.64531296",
"0.63488746",
"0.6323791",
"0.63105494",
"0.6305254",
"0.62533003",
"0.6236098",
"0.62262046",
"0.6187676",
"0.61697054",
"0.6099342",
"0.6093783",
"0.6065732",
"0.6056168",
"0.60517484",
"0.6022465",
"0.60110474",
"0.5987011",
"0.5987011",
"0.59833604",
"0.59769285",
"0.596868"
]
| 0.7016504 | 0 |
This is the 2nd version of the cookies checker In this function, we check the website cookies | def cookies_check_v2():
cookies_check = driver.find_element_by_xpath("//button[@class='jad_cmp_paywall_button jad_cmp_paywall_button-cookies jad_cmp_paywall_cookies didomi-components-button didomi-button didomi-dismiss-button didomi-components-button--color didomi-button-highlight highlight-button']")
cookies_check.click() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cookies_check_v1():\n cookies_check = driver.find_element_by_xpath(\"//button[@id='didomi-notice-agree-button']\")\n cookies_check.click()",
"def check_cookies(self):\r\n try:\r\n driver = self.driver\r\n my_file = open(\"CookiesFb.pkl\")\r\n return my_file, driver\r\n\r\n except IOError:\r\n print(\"Cookies does not exist. Will create cookies anyway. Wait for a second~\")\r\n #Buka file credential\r\n self.login(accountfacebook.EMAIL, accountfacebook.PWD)\r\n print(\"File cookies created. Next file run will pass login page\")",
"def test_78_cookies_warning(self):\r\n # As Anonymous\r\n res = self.app.get('/', follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"If cookies are not accepted, cookies banner should be shown\"\r\n assert dom.find(id='cookies_warning') is not None, err_msg\r\n\r\n # As user\r\n self.signin(email=Fixtures.email_addr2, password=Fixtures.password)\r\n res = self.app.get('/', follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"If cookies are not accepted, cookies banner should be shown\"\r\n assert dom.find(id='cookies_warning') is not None, err_msg\r\n self.signout()\r\n\r\n # As admin\r\n self.signin(email=Fixtures.root_addr, password=Fixtures.root_password)\r\n res = self.app.get('/', follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"If cookies are not accepted, cookies banner should be shown\"\r\n assert dom.find(id='cookies_warning') is not None, err_msg\r\n self.signout()",
"def test_79_cookies_warning2(self):\r\n # As Anonymous\r\n self.app.set_cookie(\"localhost\", \"PyBossa_accept_cookies\", \"Yes\")\r\n res = self.app.get('/', follow_redirects=True, headers={})\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"If cookies are not accepted, cookies banner should be hidden\"\r\n assert dom.find(id='cookies_warning') is None, err_msg\r\n\r\n # As user\r\n self.signin(email=Fixtures.email_addr2, password=Fixtures.password)\r\n res = self.app.get('/', follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"If cookies are not accepted, cookies banner should be hidden\"\r\n assert dom.find(id='cookies_warning') is None, err_msg\r\n self.signout()\r\n\r\n # As admin\r\n self.signin(email=Fixtures.root_addr, password=Fixtures.root_password)\r\n res = self.app.get('/', follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"If cookies are not accepted, cookies banner should be hidden\"\r\n assert dom.find(id='cookies_warning') is None, err_msg\r\n self.signout()",
"def verify_cookies(self, device):\n self.assertTrue(device.cookies is not None)",
"def check_xsrf_cookie(self):\n pass",
"def okcookies(request):\n if request.session.get('okcookies', False) or request.GET.get('cookie-agree', False):\n request.session['okcookies'] = True\n return {'OK_COOKIES': True}\n else:\n return {'OK_COOKIES': False}",
"def init_base_cookie(self):\n url = 'https://ceq.nkust.edu.tw/'\n try:\n res = self.main_session.get(url=url)\n if res.status_code == 200:\n soup = BeautifulSoup(res.text, 'html.parser')\n\n self.csrf_key = soup.find(\n 'input', {'name': '__RequestVerificationToken'}).get('value')\n if self.csrf_key != \"\":\n return True\n except:\n return False\n return False",
"def allocine_connect(url):\n #go to allocine page\n driver.get(url)\n #sleep until the page load\n sleep(10)\n #click on cookies button\n print(\"cookies checking\")\n cookies_check_v2()\n sleep(1)\n driver.get(url)",
"def parse_cookies( headers ):",
"def extract_cookie_info():\n # setup cookie jar\n cj = cookielib.CookieJar()\n login_data = urllib.urlencode({ID_USERNAME: USERNAME,ID_PASSWORD: PASSWORD})\n # create url opener\n opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))\n resp = opener.open(LOGIN_URL, login_data)\n # send login info\n for cookie in cj:\n print \"----First time cookie: %s --> %s\" % (cookie.name,cookie.value)\n print \"Headers: %s\" % resp.headers\n # now access without any login info\n resp = opener.open(NORMAL_URL)\n for cookie in cj:\n print \"++++Second time cookie: %s --> %s\" % (cookie.name,cookie.value)\n print \"Headers: %s\" % resp.headers",
"def _analyze_cookie_security(self, request, response, cookie_obj,\n cookie_header_value):\n self._secure_over_http(request, response, cookie_obj,\n cookie_header_value)\n self._not_secure_over_https(request, response, cookie_obj,\n cookie_header_value)\n\n fingerprinted = self._match_cookie_fingerprint(request, response,\n cookie_obj)\n self._http_only(request, response, cookie_obj,\n cookie_header_value, fingerprinted)",
"def session_relevance():\r\n\r\n # instantiation a session object in the first step\r\n session = requests.Session()\r\n\r\n # interface 1\r\n response = session.get(base_url + '/cookies/set/user/tom')\r\n print(response.text)\r\n\r\n # get the cookie in the same domain\r\n response = session.get(base_url + '/cookies')\r\n print(response.cookies)\r\n print(response.text)\r\n # In fact this is the rule you must obey in the test website\r\n # response = session.get(base_url + '/cookies/set/user/george')\r\n # print(response.status_code)\r\n # print(response.text)\r",
"def cookie_cutter():\n sweets = open('cookies.txt', 'r').read().splitlines()\n cookie = {}\n for i in sweets:\n x = i.split()\n l = list(x[4])\n if l[0] in '1234567890':\n x[4], x[5] = x[5], x[4]\n # domain = x[0]\n # secure = x[1]\n name = x[4]\n try:\n value = x[6]\n except IndexError:\n value = x[5]\n # path = x[2]\n cookie[name] = value\n return cookie",
"def verify(ctx, config):\n logger = config['logger']\n px_cookie = ctx['_px']\n try:\n if not px_cookie:\n logger.debug('No risk cookie on the request')\n ctx['s2s_call_reason'] = 'no_cookie'\n return False\n\n decrypted_cookie = decrypt_cookie(config['cookie_key'], px_cookie)\n\n if not decrypted_cookie:\n logger.error('Cookie decryption failed')\n ctx['px_orig_cookie'] = px_cookie\n ctx['s2s_call_reason'] = 'cookie_decryption_failed'\n return False\n\n decoded_cookie = json.loads(decrypted_cookie)\n try:\n decoded_cookie['s'], decoded_cookie['s']['b'], decoded_cookie['u'], decoded_cookie['t'], decoded_cookie['v']\n except:\n logger.error('Cookie decryption failed')\n ctx['px_orig_cookie'] = px_cookie\n ctx['s2s_call_reason'] = 'cookie_decryption_failed'\n return False\n\n ctx['risk_score'] = decoded_cookie['s']['b']\n ctx['uuid'] = decoded_cookie.get('u', '')\n ctx['vid'] = decoded_cookie.get('v', '')\n ctx['decoded_cookie'] = decoded_cookie\n\n if decoded_cookie['s']['b'] >= config['blocking_score']:\n ctx['block_reason'] = 'cookie_high_score'\n logger.debug('Cookie with high score: ' + str(ctx['risk_score']))\n return True\n\n if is_cookie_expired(decoded_cookie):\n ctx['s2s_call_reason'] = 'cookie_expired'\n logger.debug('Cookie expired')\n return False\n\n if not is_cookie_valid(decoded_cookie, config['cookie_key'], ctx):\n logger.debug('Cookie validation failed')\n ctx['s2s_call_reason'] = 'cookie_validation_failed'\n return False\n\n logger.debug('Cookie validation passed with good score: ' + str(ctx['risk_score']))\n return True\n except:\n logger.debug('Cookie validation failed')\n ctx['s2s_call_reason'] = 'cookie_validation_failed'\n return False",
"def grep(self, request, response):\n # do this check every time\n self._ssl_cookie_via_http(request, response)\n\n #\n # Analyze the response headers and find cookies\n #\n headers = response.get_headers()\n\n for header_name in headers:\n if header_name.lower() in COOKIE_HEADERS:\n\n cookie_header_value = headers[header_name].strip()\n cookie_object = self._parse_cookie(request, response,\n cookie_header_value)\n\n if cookie_object is not None:\n self._collect_cookies(request, response,\n cookie_object,\n cookie_header_value)\n\n # Find if the cookie introduces any vulnerability,\n # or discloses information\n self._analyze_cookie_security(request, response,\n cookie_object,\n cookie_header_value)",
"def __cookieFilter(self, request):\n if not self.__loaded:\n self.__load()\n \n if self.__acceptCookies == self.AcceptNever:\n res = self.__isOnDomainList(self.__exceptionsAllow,\n request.origin.host())\n if not res:\n return False\n \n if self.__acceptCookies == self.AcceptAlways:\n res = self.__isOnDomainList(self.__exceptionsBlock,\n request.origin.host())\n if res:\n return False\n \n if (\n self.__acceptCookies == self.AcceptOnlyFromSitesNavigatedTo and\n request.thirdParty\n ):\n return False\n \n return True",
"def getCookie(key):",
"def accept_cookies(driver):\r\n try:\r\n frame = driver.find_element_by_xpath('//*[@id=\"cnsw\"]/iframe') #Accept cookies button is element in <iframe>\r\n driver.switch_to.frame(frame) #Switch to locating elements in iframe\r\n accept_cookies = driver.find_element_by_xpath('//*[@id=\"introAgreeButton\"]') #Accept cookies button\r\n accept_cookies.click()\r\n except NoSuchElementException:\r\n accept_cookies = driver.find_element_by_xpath('//*[@id=\"zV9nZe\"]') #Sometimes accept cookies button has this id\r\n accept_cookies.click() #Click to accept cookies\r\n except NoSuchElementException:\r\n driver.quit()\r\n return",
"def _verify_session_cookies(self):\n if not self.session.cookies:\n return False\n for cookie_name in LOGIN_COOKIES:\n if cookie_name not in list(self.session.cookies.keys()):\n LOG.error('The cookie \"{}\" do not exist, it is not possible to check the expiration',\n cookie_name)\n return False\n for cookie in self.session.cookies.jar:\n if cookie.name != cookie_name:\n continue\n if cookie.expires <= int(time.time()):\n LOG.info('Login is expired')\n return False\n return True",
"def get_cookies_firefox(domname):\n cookpath = os.path.expanduser(udata.srcs['firefox']) + '/cookies.sqlite'\n\n # copy DB to prevent disk I/O error on Windows\n cookcopy = cookpath+'.copy'\n shutil.copy(cookpath, cookcopy)\n\n sqx = sqlite3.connect('%s' % (cookcopy))\n cks = sqx.execute('select name,value from moz_cookies where host = \"%s\"' % (domname)).fetchall()\n cookies = {}\n for cn, cv in cks:\n cookies[cn] = cv\n os.remove(cookcopy)\n return cookies",
"def get_cookies(domname):\n if 'firefox' in udata.srcs:\n cout = get_cookies_firefox(domname)\n elif 'chrome' in udata.srcs:\n cout = get_cookies_chrome(domname)\n else:\n print(\"Error: No cookie source defined. Define either `srcs.firefox` or `srcs.chrome`.\")\n cout = None\n return cout",
"async def check_session():\n\n if not Path(config.cookies_file).is_file():\n return False\n\n cookies = None\n with open(config.cookies_file, 'r') as f:\n cookies = json.loads(f.read())\n\n as_client = httpx.AsyncClient(cookies=cookies)\n\n req = await as_client.get(\"https://www.linkedin.com/in/me\", allow_redirects=False)\n if req.status_code == 200:\n return as_client\n else:\n await as_client.aclose()\n return False",
"def extract_cookie_info():\n\t# setup cookie jar\n\tcj = cookielib.CookieJar()\n\tlogin_data = urllib.urlencode({ID_USERNAME: USERNAME, \n\t\tID_PASSWORD: PASSWORD})\n\t# create url opener\n\topener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))\n\tresp = opener.open(LOGIN_URL, login_data)\n\n\t# Send login info\n\tfor cookie in cj:\n\t\tprint \"----first time cookie: %s --> %s\" % (cookie.name, cookie.value)\n\tprint \"Headers: %s\" % resp.headers",
"def cookie(date, page_offset, payload):\r\n \r\n print(\"cookie\")\r\n \r\n with session() as c: #Create a cookie session to login to the protected page \r\n c.post('https://data.terapeak.com/verify/', payload)\r\n url = \"http://data.terapeak.com/?id=0&search=1&view=item_browse&query=iphone+5s&date=2015-02-1&date_range=1&buyer_country_id=1&condition=rollup_3&type%5Bfixed%5D=1&from_start_price=100&to_start_price=800&from_end_price=100&to_end_price=800&seller_country_id=1&txn_site_id=0&numPages=12&siteID=0&offset={25}\"\r\n date_setup(date, page_offset, url,c)",
"def _get_cookie(self, name, domain):\n for cookie in self._cookiejar:\n if cookie.name == name and cookie.domain == domain:\n if cookie.is_expired():\n break\n return cookie",
"def read_secure_cookie(self, name):\n cookie_val = self.request.cookies.get(name)\n return cookie_val and check_secure_val(cookie_val)",
"def read_secure_cookie(self, name):\n cookie_val = self.request.cookies.get(name)\n return cookie_val and check_secure_val(cookie_val)",
"def read_secure_cookie(self, name):\n\n cookie_val = self.request.cookies.get(name)\n return cookie_val and check_secure_val(cookie_val)",
"def get_cookies():\r\n home = expanduser('~')\r\n with open(home + '/config/edx-tools/cookie.txt') as f:\r\n lines = f.readlines()\r\n lines = [line.strip(' \\t\\n\\r') for line in lines if '✓' not in line]\r\n\r\n d = {}\r\n count = 0\r\n for line in lines:\r\n if count == 0:\r\n cookie_name = line\r\n if count == 1:\r\n cookie_val = line\r\n d[cookie_name] = cookie_val\r\n if count == 5:\r\n count = 0\r\n else:\r\n count += 1\r\n return d"
]
| [
"0.70106214",
"0.686315",
"0.66480744",
"0.6626341",
"0.652721",
"0.6525927",
"0.6513388",
"0.64766115",
"0.6431573",
"0.6409806",
"0.63803834",
"0.6351012",
"0.63405955",
"0.6314522",
"0.62200975",
"0.620027",
"0.61731374",
"0.61453736",
"0.61277217",
"0.612463",
"0.6109356",
"0.6105904",
"0.60492766",
"0.6040111",
"0.60390437",
"0.5990566",
"0.59808046",
"0.59808046",
"0.5971115",
"0.59514403"
]
| 0.70474845 | 0 |
This is the 1st version of movie reviews page browser In this function, we navigate to the movie reviews page in order to get the top ten prolifics reviews | def go_to_movie_reviews_page_v1(movie):
#get search bar input and send the movie name as key
search_bar = driver.find_element_by_xpath("//input[@id='header-search-input']")
search_bar.send_keys(movie)
sleep(5)
search_button = driver.find_element_by_xpath("//button[@class='header-search-submit icon icon-search']")
search_button.click() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def go_to_movie_reviews_page_V2(movie):\n #/film/fichefilm-249877/critiques/spectateurs/\n driver.get(search_url_base+movie)\n # driver.get(\"https://www.allocine.fr/rechercher/?q=yourname\")\n sleep(5)\n movie_link = driver.find_element_by_link_text(movie)\n movie_link.click()\n # sleep(5)\n # close_popup = driver.find_element_by_xpath(\"//button[@class='modal-close icon icon-cross light js-trigger-overlay-close']\")\n # close_popup.click()\n sleep(5)\n movie_reviews_link = driver.find_element_by_link_text(reviews_link_text)\n movie_reviews_link.click()",
"def get_reviews(rest_link):\n\tfilename = rest_link.split('/')[-1]\n\n\tcontents = None\n\n\tif contents is None:\n\t\tstart = time()\n\t\tdriver = init_chromedriver()\n\t\tdriver.get(rest_link + '/reviews')\n\n\t\t# print('There are {} reviews'.format(self.review_count))\n\n\t\t# click on the button 'All reviews'\n\t\tsleep(5)\n\t\tdriver.execute_script(\"window.scrollBy(0, 950);\")\n\t\twhile(1):\n\t\t\ttry:\n\t\t\t\tel = driver.find_element_by_css_selector('#selectors > a.item.default-section-title.everyone.empty')\n\t\t\t\twebdriver.ActionChains(driver).move_to_element(el).click(el).perform()\n\t\t\texcept TimeoutException:\n\t\t\t\tcontinue\t\t\n\t\t\texcept (NoSuchElementException):\n\t\t\t\tbreak\n\t\t\tbreak\n\n\t\tsleep(5)\t\n\t\tload_more = '#reviews-container > div.notifications-content > div.res-reviews-container.res-reviews-area > div > div > div.mt0.ui.segment.res-page-load-more.zs-load-more > div.load-more.bold.ttupper.tac.cursor-pointer.fontsize2'\n\t\tsleep(5)\n\t\twhile element_present(driver, load_more):\n\t\t\ttry:\n\t\t\t\tel2 = driver.find_element_by_css_selector(load_more)\n\t\t\t\tdriver.execute_script(\"return arguments[0].scrollIntoView();\", el2)\n\t\t\t\tdriver.execute_script(\"window.scrollBy(0, -150);\")\n\t\t\t\tsleep(0.5)\n\t\t\t\twebdriver.ActionChains(driver).move_to_element(el2).click(el2).perform()\n\t\t\texcept TimeoutException:\n\t\t\t\tcontinue\n\t\t\texcept (StaleElementReferenceException, NoSuchElementException):\n\t\t\t\tbreak\n\n\t\tsource = get_source(driver)\n\t\tdriver.quit()\n\n\telse:\n\t\tprint('Using cached page')\n\t\tsource = contents\n\n\tsoup = source_to_soup(source)\n\t#review_blocks = soup.find_all('div', class_=re.compile('ui segments res-review-body'))\n\n\treview_blocks = (soup.find_all('div', class_='ui segment clearfix brtop '))\n\tif len(review_blocks) == 0:\n\t\tprint('Error in parsing reviews...\\n Review blocks size is 0\\n')\n\t\twith open('not_parsed','a+') as f:\n\t\t\tf.write(rest_link)\n\t\treturn\n\tprint('Loaded {} reviews'.format(len(review_blocks)))\n\n\n\tlastreview = filename + '_last'\n\n\twith open(filename,'a+', encoding='utf-8') as f:\n\n\t\treviews = []\n\t\ti = start\n\t\tmy_str = None\n\t\tfor review in review_blocks[:]:\n\t\t\ttry:\n\t\t\t\tname_and_link = review.find('div', class_='header nowrap ui left')\n\t\t\t\t# print(name_and_link.contents)\n\n\t\t\t\tu_link = name_and_link.contents[1].attrs['href']\n\t\t\t\tu_entity_id = int(name_and_link.contents[1].attrs['data-entity_id'])\n\t\t\t\tu_name = name_and_link.contents[1].contents[0].strip()\n\t\t\t\t# print(u_name)\n\n\t\t\t\ttup = (u_name,u_entity_id)\n\t\t\t\t#userset.add(tup)\n\n\t\t\t\tuserset.add(u_link)\t\t\t\n\t\t\t\trating_and_rev_text = review.find('div', text='Rated')\n\t\t\t\tcomment_time = review.find('time').attrs['datetime']\n\t\t\t\trating = float(rating_and_rev_text.attrs['aria-label'].split()[-1])\n\t\t\t\treview_text = rating_and_rev_text.parent.contents[2].strip()\n\t\t\t\t#f.write('Review number '+str(my_ctr)+'\\n')\n\n\t\t\t\tif my_str is None:\n\t\t\t\t\tmy_str=comment_time\n\n\t\t\t\tf.write(str(comment_time)+'\\n')\n\t\t\t\tf.write(u_name+'\\n')\n\t\t\t\tf.write(str(u_entity_id)+'\\n')\n\t\t\t\tf.write(str(rating)+'\\n')\n\t\t\t\tf.write(review_text+'\\n\\n##\\n\\n')\n\t\t\t\tcomm_file = filename + 'last_review_date'\n\n\t\t\t\twith open (comm_file,'w') as myfile200:\n\t\t\t\t\tmyfile200.write(my_str)\n\t\t\t\t\n\t\t\texcept:\n\t\t\t\tpass\n\t\t\ti += 1",
"def get_reviews(review_url):\n print review_url\n html = urllib.urlopen(review_url).read()\n soup = bs4.BeautifulSoup(html, 'html.parser')\n\n rating_scores = soup.findAll(\"span\", \"ratingScore\")\n num_ratings = len(rating_scores) - 1\n\n current_reviews = soup.findAll(\"div\", \"currentVintageProfessinalReviews\")\n num_cur_reviews = str(current_reviews).count('ratingProvider')\n past_reviews = soup.findAll(\"ul\", \"pastVintagesProfessionalReviews\")\n num_past_reviews = str(past_reviews).count('ratingProvider')\n\n print 'There are {0} reviews for prior vintages of this wine.'.format(num_past_reviews)\n print 'There are {0} current reviews for this vintage.\\n'.format(num_cur_reviews)\n\n rating_provider = soup.findAll(\"span\", \"ratingProvider\")\n rating_score = soup.findAll(\"span\", \"ratingScore\")\n reviewers = re.findall('(?<![A-Z])[>]([A-Z]+(?![A-Z]))', str(rating_provider))\n ratings = re.findall('(?<![A-Z])[0-9]{2}(?![A-Z])', str(rating_score))\n\n print \"Ratings List:\", ratings\n print \"Current Reviews: \", num_cur_reviews\n\n currentreviews = []\n for j in range(num_cur_reviews):\n print \"Current Review #\"+str(j+1)+\":\", reviewers[j], ratings[j]\n currentreviews.append((reviewers[j], ratings[j]))\n print currentreviews\n\n print \"\\nPast Reviews: \", num_past_reviews\n past_review_ratings = []\n for k in range(num_cur_reviews, num_past_reviews+num_cur_reviews):\n #print \"Past Review #\"+str(k-num_cur_reviews+1)+\":\", reviewers[k], int(ratings[k])\n past_review_ratings.append(float(ratings[k]))\n if k > 30:\n break\n if num_past_reviews != 0:\n avg_past_reviews = sum(past_review_ratings)/len(past_review_ratings)\n round(avg_past_reviews, 2)\n else:\n avg_past_reviews = 0\n\n print \"Average of Past Reviews: \", avg_past_reviews\n\n return currentreviews, avg_past_reviews",
"def fetch(self):\n try:\n self.genre = 'Review'\n log.debug(self.log_msg(\"Fetching the prouct page url %s\"%self.currenturi))\n res=self._getHTML(self.currenturi) # Assuming self.currenturi is at the product page\n self.rawpage=res['result']\n self._setCurrentPage()\n try:\n self.parent_page_title = stripHtml(self.soup.find('h1',{'id':'pgTitleDetail'}).renderContents())\n except:\n self.parent_page_title =''\n try:\n self.__product_price = self.soup.find('tbody',{'class':'prices'}).td.renderContents().replace('$','')\n except:\n log.exception(\"Error in fetching product_price\")\n self.__product_price = None\n\n parent_page_url = self.task.instance_data['uri']\n review_first_page_url = self.soup.find('a',text=\"Show All Customer Reviews » \").parent['href']\n review_url_order = \"&sortReviewsBy=DateDescending\"\n self.currenturi = self.base_url + review_first_page_url + review_url_order\n log.info(self.log_msg('current_uri :: %s'%(self.currenturi)))\n self._getParentPage()\n self.next_url_links=[]\n self.fetch_next_link = True\n while self.fetch_next_link:\n self._iterateReviewPages(parent_page_url)\n return True\n except Exception,e:\n log.exception(self.log_msg(\"Exception occured in fetch()\"))\n return False",
"def top_ten(request):\n if request.method == 'GET':\n movies = Movie.objects.filter(date_of_release__lte=datetime.date.today())\n movies = movies.order_by('-rating')[:10]\n serializer = MovieSerializer(movies, many=True)\n return Response(serializer.data)",
"def extract_reviews(url, review_count):\n\n api_url = url + \"%3Fstart%3D40\"\n\n html_obj = retrieve_html(url)\n\n review_list = parse_page(html_obj)\n\n result = review_list\n\n num_pages = review_count // 20 + 1\n\n for i in range(1, num_pages):\n curr_offset = i * 20\n curr_url = api_url + \"&start=%d\" % curr_offset\n\n curr_page_reviews = parse_page(retrieve_html(curr_url)[1])\n\n result += curr_page_reviews\n\n return result",
"def go_product_reviews_next(self, driver, website):\n paginator = driver.find_element_by_class_name(\"BVRRPager\")\n next_link = paginator.find_element_by_class_name(\"BVRRNextPage\")\n next_link.find_element_by_name(\"BV_TrackingTag_Review_Display_NextPage\").click()\n time.sleep(1)",
"def check_ratings(self):\n\n self.browser.get('https://www.imdb.com/')\n\n for title in self.titles:\n input_bar = self.browser.find_element_by_id('navbar-query')\n input_bar.clear()\n\n input_bar.send_keys(title)\n input_bar.send_keys(Keys.RETURN)\n\n time.sleep(3)\n\n # Click on the first suggestion\n css_selector = \"div.findSection:nth-child(3) > table:nth-child(2) > tbody:nth-child(1) > tr:nth-child(1) > td:nth-child(2) > a:nth-child(1)\"\n self.browser.find_element_by_css_selector(css_selector).click()\n time.sleep(3)\n\n # Pull details that will always be available\n score = str(self.browser.find_element_by_class_name('ratingValue').text)\n score = score.split('/10')[0].replace(',', '.')\n\n time.sleep(3)\n\n summary = str(self.browser.find_element_by_class_name('summary_text').text)\n subtext = str(self.browser.find_element_by_class_name('subtext').text)\n\n # Pull details that differ between movies and series\n try:\n duration = str(self.browser.find_element_by_class_name('bp_sub_heading').text) # Only for series\n if 'episodes' not in duration:\n duration = 'Some episodes'\n except Exception:\n # bp_sub_heading won't be found on a movie page\n duration = 'movie'\n\n if subtext[0].isdigit():\n # Split up the details from the subtext\n subtext_list = subtext.split(' | ')\n else:\n # Some movies' subtext starts with 'R' / 'PG-13'\n subtext_list = subtext.split(' | ')\n del subtext_list[0]\n\n # Duration\n if duration == 'movie':\n show_type = 'Movie'\n duration = subtext_list[0]\n try:\n year = datetime.datetime.strptime(subtext_list[2].split(' (')[0], '%d %B %Y').strftime('%Y')\n except ValueError:\n year = str(subtext_list[2].split(' (')[0][-4:])\n\n else: # series\n show_type = 'Serie'\n # Retrieve last season and its release date\n season_tab = str(self.browser.find_element_by_class_name('seasons-and-year-nav').text).strip()\n\n numbers = re.findall('[0-9]+', season_tab)\n latest_season = int(numbers[0])\n latest_year = int(max(numbers, key=lambda x: int(x)))\n\n duration += ' (%d Seasons in %d), %s per episode' % (latest_season, latest_year, subtext_list[0])\n\n year = re.findall('[0-9]+', subtext_list[2])[0]\n\n # Pull some more data out from the subtext\n genres = subtext_list[1].split(', ')\n\n # Pull details that are not always available\n creds_list = []\n creds = self.browser.find_elements_by_class_name('credit_summary_item')\n for c in creds:\n temp = str(c.text)\n if '|' in temp:\n temp = temp.split('|')[0]\n\n creds_list.append(temp)\n\n self.data_dict[title] = {\n 'score': score,\n 'summary': summary,\n 'duration': duration,\n 'credits': creds_list,\n 'genres': genres,\n 'released': year,\n 'type': show_type,\n }",
"def review_pages(catalog):\n review_pages_list = list()\n errors = 0\n for ix, movie in enumerate(catalog.iloc[:, 0], 1):\n try:\n soup_2 = fetch(movie, \"/reviews/?page=1\").find_all(\"span\", {\"class\", \"pageInfo\"})\n if len(soup_2) >= 1:\n for n in range(1, int(soup_2[0].text[-2:]) + 1):\n review_pages_list.append(movie + \"/reviews/?page=\" + str(n))\n except:\n errors += 1\n print('\\r3/4 — {:.2%} of review page URLs scraped. Error rate: {:.2%}'.format(\n ix/len(catalog), errors/ix), end=' ')\n print('\\r{} review page URLs successfully scraped. Error rate: {:.2%}'.format(\n len(review_pages_list)-errors, errors/ix), end='\\n')\n return review_pages_list",
"def top_controversial(self, n):\n return top_movies",
"def get_review_page(review_link):\n\n session = r.Session()\n response = session.get(BASE_URL + '/music/albumreviews/' + review_link,\n headers=HEADERS)\n return response",
"def download_top_movies(genre: Genre) -> str:\n\n page_url = create_page_url(genre)\n response = requests.get(page_url)\n response.raise_for_status()\n return response.text",
"def top_by_num_of_ratings(self, n):\n return top_movies",
"def top_ten(subreddit):\n\n limit = \"10\"\n\n url = \"https://www.reddit.com/r/{}/hot.json?limit={}\".format(subreddit,\n limit)\n\n user_agent = {\"User-Agent\": \"Python\"}\n response = requests.get(url, headers=user_agent, allow_redirects=False)\n if response.status_code >= 300:\n print(\"None\")\n else:\n for elem in response.json().get(\"data\").get(\"children\"):\n print(elem.get(\"data\").get(\"title\"))",
"def scraper(storyid, reviews_num, rate_limit=3):\n\n # There may be up to 15 reviews on a single page, therefore the number of\n # pages the reviews are stored on is equal to the following:\n number_of_pages = (reviews_num // 15) + 1\n\n # Returns a list of tuples (based on the contents of _reviews_in_table)\n list_of_review_tuples = []\n\n for p in range(number_of_pages):\n\n soup = soupify('https://www.fanfiction.net/r/' + storyid +\n '/0/' + str(p+1) + '/',\n rate_limit=rate_limit)\n\n for review in _reviews_in_table(soup):\n list_of_review_tuples.append(review)\n\n return list_of_review_tuples",
"def top_ten(subreddit):\n url = \"https://www.reddit.com/r/\" + subreddit + \"/top/.json\"\n r = requests.get(url,\n headers={'User-agent': 'norman'},\n params={\"limit\": 10},\n allow_redirects=False)\n if r.status_code == 200:\n for dic in r.json().get('data').get('children'):\n print(dic.get('data').get('title'))\n else:\n print(\"None\")",
"def getTopMovies(endpoint, date, count=10):\n\n try:\n response = urlreq.urlopen(endpoint.format(date))\n soup = BeautifulSoup(response.read(), \"html.parser\")\n table = soup.find('table', border=\"0\", cellpadding=\"5\", cellspacing=\"1\")\n tdata = []\n\n for i, row in enumerate(table.find_all('tr')[1:], start=1):\n if i > count:\n break\n\n cells = row.find_all('td')\n tdict = {}\n\n tdict['rank'] = i\n tdict['title'] = cells[2].text.strip()\n tdict['daily_gross'] = int(re.sub(r'[^\\d]', '', cells[4].text))\n tdict['theaters'] = int(re.sub(r'[^\\d]', '', cells[7].text))\n tdict['todate_gross'] = int(re.sub(r'[^\\d]', '', cells[9].text))\n tdict['release_day'] = int(cells[10].text)\n\n tdata.append(tdict)\n\n tdata = pd.DataFrame(tdata)\n tdata['gross_date'] = date\n return tdata\n\n except urlerr.URLError as err:\n print(\"\\nThere was an error retrieving daily revenue information\")\n print(err)\n return None\n except Exception:\n print(\"\\nThere's something wrong with the BOMojo daily revenue page\")\n return None",
"def top_ten(subreddit):\n\n url = \"https://www.reddit.com/r/{}/hot.json?limit=10\".format(subreddit)\n headers = {\"user-agent\": \"Mozilla/5.0\"}\n\n res = requests.get(url, headers=headers, allow_redirects=False)\n\n if res.status_code == 200:\n for post in res.json()['data']['children']:\n print(post['data']['title'])\n else:\n print(None)",
"def top_ten(subreddit):\n\n if subreddit is None or not isinstance(subreddit, str):\n print(\"None\")\n\n user_agent = {'User-agent': 'Google Chrome Version 81.0.4044.129'}\n params = {'limit': 10}\n url = 'https://www.reddit.com/r/{}/hot/.json'.format(subreddit)\n\n response = get(url, headers=user_agent, params=params)\n all_data = response.json()\n\n try:\n raw1 = all_data.get('data').get('children')\n\n for i in raw1:\n print(i.get('data').get('title'))\n\n except:\n print(\"None\")",
"def top_ten(subreddit):\n\n user_agent = {'User-agent': 'Mozilla/5.0 (Macintosh; \\\nIntel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) \\\nChrome/39.0.2171.95 Safari/537.36'}\n\n res = requests.get('https://www.reddit.com/r/{}/hot.json?limit=10'.format(\n subreddit), headers=user_agent)\n\n if res.status_code == 404:\n print(None)\n\n else:\n for sub in res.json().get(\"data\").get(\"children\"):\n print(sub.get(\"data\").get(\"title\"))",
"def get_movies(iurl):\n movies = []\n \n if iurl[-3:] == '?s=':\n search_text = GetSearchQuery('WatchOnlineMovies')\n search_text = urllib.quote_plus(search_text)\n iurl += search_text\n\n html = requests.get(iurl, headers=mozhdr).text\n mlink = SoupStrainer('div', {'class':re.compile('postbox')})\n items = BeautifulSoup(html, parseOnlyThese=mlink)\n plink = SoupStrainer('div', {'class':'wp-pagenavi'})\n Paginator = BeautifulSoup(html, parseOnlyThese=plink)\n\n for item in items:\n title1 = item.h2.text\n try:\n title2 = title1.replace(\"Full Movie\", \"\")\n except:\n title2 = title1.replace(\"Watch Online\", \"\")\n try:\n title3 = title2.replace(\"Watch Online Placeholdernt\", \"\")\n except:\n title3 = title2.replace(\".\", \"\")\n try:\n title4 = title3.replace(\".\", \"\")\n except:\n title4 = title3.replace(\"Watch Online Placeholder\",\"\")\n try:\n title5 = title4.replace(\"Watch Online\", \"\")\n except:\n title5 = title4.replace(\"Download\",\"\")\n try:\n title6 = title5.replace(\"Watch Onlin\", \"\")\n except:\n title6 = title5.replace(\"Placeholder\",\"\")\n try:\n title7 = title6.replace(\"HD Pri\", \"\")\n except:\n title7 = title6.replace(\"Placeholder\",\"\")\n try:\n title8 = title7.replace(\" Watch On\", \"\")\n except:\n title8 = title7.replace(\"Placeholder\",\"\")\n try:\n title9 = title8.replace(\" Watch\", \"\")\n except:\n title9 = title8.replace(\"Placeholder\",\"\")\n try:\n title10 = title9.replace(\"Free Down\", \"\")\n except:\n title10 = title9.replace(\"Placeholder\",\"\")\n try:\n title11 = title10.replace(\"Free D\", \"\")\n except:\n title11 = title10.replace(\"Placeholder\",\"\")\n try:\n title12 = title11.replace(\"Free\", \"\")\n except:\n title12 = title11.replace(\"Placeholder\",\"\")\n try:\n title13 = title12.replace(\" F\", \"\")\n except:\n title13 = title12.replace(\"Placeholder\",\"\")\n try:\n title14 = title13.replace(\" Fr\", \"\")\n except:\n title14 = title13.replace(\"Placeholder\",\"\")\n try:\n title15 = title14.replace(\" Fre\", \"\")\n except:\n title15 = title14.replace(\"Placeholder\",\"\")\n try:\n title16 = title15.replace(\" HD\", \"\")\n except:\n title16 = title15.replace(\"Placeholder\",\"\")\n try:\n title17 = title16.replace(\" H\", \"\")\n except:\n title17 = title16.replace(\"Placeholder\",\"\")\n try:\n title18 = title17.replace(\" HD P\", \"\")\n except:\n title18 = title17.replace(\"Placeholder\",\"\")\n try:\n title19 = title18.replace(\" re\", \"\")\n except:\n title19 = title18.replace(\"Placeholder\",\"\")\n try:\n title120 = title19.replace(\" r\", \"\")\n except:\n title120 = title19.replace(\"Placeholder\",\"\")\n # Coloring Years\n try:\n title21 = title120.replace(\"(2018)\", \"[COLOR yellow](2018)[/COLOR]\")\n except:\n title21 = title120.replace(\"Placeholder\",\"\")\n try:\n title22 = title21.replace(\"(2016)\", \"[COLOR lightsalmon](2016)[/COLOR]\")\n except:\n title22 = title21.replace(\"Placeholder\",\"\")\n try:\n title23 = title22.replace(\"(2015)\", \"[COLOR lime](2016)[/COLOR]\")\n except:\n title23 = title22.replace(\"Placeholder\",\"\")\n # Language\n try:\n title24 = title23.replace(\"Hindi\", \"[COLOR green]Hindi[/COLOR]\")\n except:\n title24 = title23.replace(\"Placeholder\",\"\")\n try:\n title25 = title24.replace(\"Dubbed\", \"[COLOR cyan]Dubbed[/COLOR]\")\n except:\n title25 = title24.replace(\"Placeholder\",\"\")\n\n # Continued\n try:\n title26 = title25.replace(\" nt o\", \"\")\n except:\n title26 = title25.replace(\"Placeholder\",\"\")\n try:\n title27 = title26.replace(\" nt F\", \"\")\n except:\n title27 = title26.replace(\"Placeholder\",\"\")\n try:\n title28 = title27.replace(\" nt\", \"\")\n except:\n title28 = title27.replace(\"Placeholder\",\"\")\n try:\n title = title28.replace(\" Pr\", \"\")\n except:\n title = title28.replace(\"Placeholder\",\"\")\n\n url = item.h2.find('a')['href']\n try:\n thumb = item.find('img')['src'].strip()\n except:\n thumb = _icon\n movies.append((title, thumb, url))\n \n if 'next' in str(Paginator):\n\n nextli = Paginator.find('a', {'class':re.compile('page larger')})\n\n purl = nextli.get('href')\n pages = Paginator.findAll('span', {'class':re.compile('pages')})\n lastpg = pages[len(pages)-1].text\n title = 'Next Page.. (Currently in %s)' % (lastpg)\n movies.append((title, _icon, purl))\n \n return movies",
"def top_ten(subreddit):\n\n settings = {'allow_redirects': False, 'headers': {'User-agent': ''}}\n url = \"https://www.reddit.com/r/{}/hot.json\".format(subreddit)\n\n try:\n responses = get(url, **settings).json().get('data').get('children')\n for post in responses[:10]:\n print(post['data']['title'])\n except:\n print(\"None\")",
"def top_ten(subreddit):\n try:\n info = requests.get('https://www.reddit.com/r/{}/hot.json?limit=10'\n .format(subreddit), allow_redirects=False,\n headers={'User-Agent': 'Custom'}).json().get(\n 'data').get('children')\n for child in info:\n print(child.get('data').get('title'))\n except:\n print('None')",
"def top_by_ratings(self, n, metric=average):\n return top_movies",
"def top_ten(subreddit):\n url = \"https://api.reddit.com/r/{}/hot?limit=10\".format(subreddit)\n response = requests.get(url, headers={\"User-Agent\": \"Python3\"})\n if str(response) != \"<Response [200]>\": # response.status_code != 200\n print(None)\n return\n response = response.json()\n child = response[\"data\"][\"children\"]\n for tittle in child:\n print(tittle[\"data\"][\"title\"])",
"def top_ten(subreddit):\n header = {\"User-Agent\": \"Holberton\"}\n url = \"https://www.reddit.com/r/{}/hot.json?limit=10\".format(subreddit)\n response = requests.get(url, headers=header, allow_redirects=False)\n if response.status_code == 200:\n\n for item in response.json().get(\"data\", None).get(\"children\", None):\n print(item.get(\"data\", None).get(\"title\", None))\n else:\n print(None)\n return",
"def movie_page(movie_id):\n\n current_movie = Movie.query.filter_by(movie_id=movie_id).first()\n title = current_movie.title\n released = current_movie.released_at\n url = current_movie.imdb_url\n thing = current_movie.movie_id\n\n movie_rating = db.session.query(Rating.score).join(Movie).filter(\n Movie.movie_id==thing).all()\n\n return render_template('movie_page.html', current_movie=current_movie, \n title=title, released=released, url=url, movie_rating=movie_rating)",
"def top_ten(subreddit):\n url = \"https://www.reddit.com/r/\" + subreddit + \"/hot.json?limit=10\"\n identify = {\"User-Agent\": \"Requests library from Python\",\n \"From\": \"[email protected]\"}\n to_print = []\n hot = requests.get(url, headers=identify, allow_redirects=False)\n if hot.status_code == 404:\n print(\"None\")\n return 0\n if hot.status_code == 200:\n hot = hot.json()\n hot = hot[\"data\"]\n hot = hot[\"children\"]\n for items in hot:\n del items[\"kind\"]\n for data in hot:\n to_print.append(data[\"data\"])\n hot = to_print\n to_print = []\n for dictio in hot:\n to_print.append(dictio[\"title\"])\n for itera in to_print:\n print(itera)",
"def top_ten(subreddit):\n headers = {\"User-Agent\": \"Holberton\"}\n url = \"https://www.reddit.com/r/{}/hot.json?limit=10\".format(\n subreddit)\n req = requests.get(url, headers=headers)\n\n if req.status_code != 200:\n print(None)\n return\n redit = req.json().get(\"data\").get(\"children\")\n for chil in redit:\n print(chil.get(\"data\").get(\"title\"))",
"def top_ten(subreddit):\n req = get(\n \"https://www.reddit.com/r/{}/hot.json\".format(subreddit),\n headers={\n \"User-Agent\": \"alx_app\"},\n params={\n \"limit\": 10},\n allow_redirects=False)\n if req.status_code != 200:\n print(None)\n else:\n posts = req.json().get(\"data\").get(\"children\")\n for post in posts:\n print(post.get(\"data\").get(\"title\"))"
]
| [
"0.73674446",
"0.68199515",
"0.65317106",
"0.6462899",
"0.6443715",
"0.6410947",
"0.64017564",
"0.6366908",
"0.6355977",
"0.6304084",
"0.6208476",
"0.6156166",
"0.6142576",
"0.61343473",
"0.6131709",
"0.6118773",
"0.6112458",
"0.60929984",
"0.6059543",
"0.6041042",
"0.6027874",
"0.60079134",
"0.59947485",
"0.5975588",
"0.5959209",
"0.59537965",
"0.59512585",
"0.59477985",
"0.59443456",
"0.5944182"
]
| 0.70917654 | 1 |
This is the 2nd version of movie reviews page browser In this function, we navigate to the movie reviews page in order to get the top ten prolifics reviews | def go_to_movie_reviews_page_V2(movie):
#/film/fichefilm-249877/critiques/spectateurs/
driver.get(search_url_base+movie)
# driver.get("https://www.allocine.fr/rechercher/?q=yourname")
sleep(5)
movie_link = driver.find_element_by_link_text(movie)
movie_link.click()
# sleep(5)
# close_popup = driver.find_element_by_xpath("//button[@class='modal-close icon icon-cross light js-trigger-overlay-close']")
# close_popup.click()
sleep(5)
movie_reviews_link = driver.find_element_by_link_text(reviews_link_text)
movie_reviews_link.click() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def go_to_movie_reviews_page_v1(movie):\n #get search bar input and send the movie name as key\n search_bar = driver.find_element_by_xpath(\"//input[@id='header-search-input']\")\n search_bar.send_keys(movie)\n sleep(5)\n search_button = driver.find_element_by_xpath(\"//button[@class='header-search-submit icon icon-search']\")\n search_button.click()",
"def get_reviews(rest_link):\n\tfilename = rest_link.split('/')[-1]\n\n\tcontents = None\n\n\tif contents is None:\n\t\tstart = time()\n\t\tdriver = init_chromedriver()\n\t\tdriver.get(rest_link + '/reviews')\n\n\t\t# print('There are {} reviews'.format(self.review_count))\n\n\t\t# click on the button 'All reviews'\n\t\tsleep(5)\n\t\tdriver.execute_script(\"window.scrollBy(0, 950);\")\n\t\twhile(1):\n\t\t\ttry:\n\t\t\t\tel = driver.find_element_by_css_selector('#selectors > a.item.default-section-title.everyone.empty')\n\t\t\t\twebdriver.ActionChains(driver).move_to_element(el).click(el).perform()\n\t\t\texcept TimeoutException:\n\t\t\t\tcontinue\t\t\n\t\t\texcept (NoSuchElementException):\n\t\t\t\tbreak\n\t\t\tbreak\n\n\t\tsleep(5)\t\n\t\tload_more = '#reviews-container > div.notifications-content > div.res-reviews-container.res-reviews-area > div > div > div.mt0.ui.segment.res-page-load-more.zs-load-more > div.load-more.bold.ttupper.tac.cursor-pointer.fontsize2'\n\t\tsleep(5)\n\t\twhile element_present(driver, load_more):\n\t\t\ttry:\n\t\t\t\tel2 = driver.find_element_by_css_selector(load_more)\n\t\t\t\tdriver.execute_script(\"return arguments[0].scrollIntoView();\", el2)\n\t\t\t\tdriver.execute_script(\"window.scrollBy(0, -150);\")\n\t\t\t\tsleep(0.5)\n\t\t\t\twebdriver.ActionChains(driver).move_to_element(el2).click(el2).perform()\n\t\t\texcept TimeoutException:\n\t\t\t\tcontinue\n\t\t\texcept (StaleElementReferenceException, NoSuchElementException):\n\t\t\t\tbreak\n\n\t\tsource = get_source(driver)\n\t\tdriver.quit()\n\n\telse:\n\t\tprint('Using cached page')\n\t\tsource = contents\n\n\tsoup = source_to_soup(source)\n\t#review_blocks = soup.find_all('div', class_=re.compile('ui segments res-review-body'))\n\n\treview_blocks = (soup.find_all('div', class_='ui segment clearfix brtop '))\n\tif len(review_blocks) == 0:\n\t\tprint('Error in parsing reviews...\\n Review blocks size is 0\\n')\n\t\twith open('not_parsed','a+') as f:\n\t\t\tf.write(rest_link)\n\t\treturn\n\tprint('Loaded {} reviews'.format(len(review_blocks)))\n\n\n\tlastreview = filename + '_last'\n\n\twith open(filename,'a+', encoding='utf-8') as f:\n\n\t\treviews = []\n\t\ti = start\n\t\tmy_str = None\n\t\tfor review in review_blocks[:]:\n\t\t\ttry:\n\t\t\t\tname_and_link = review.find('div', class_='header nowrap ui left')\n\t\t\t\t# print(name_and_link.contents)\n\n\t\t\t\tu_link = name_and_link.contents[1].attrs['href']\n\t\t\t\tu_entity_id = int(name_and_link.contents[1].attrs['data-entity_id'])\n\t\t\t\tu_name = name_and_link.contents[1].contents[0].strip()\n\t\t\t\t# print(u_name)\n\n\t\t\t\ttup = (u_name,u_entity_id)\n\t\t\t\t#userset.add(tup)\n\n\t\t\t\tuserset.add(u_link)\t\t\t\n\t\t\t\trating_and_rev_text = review.find('div', text='Rated')\n\t\t\t\tcomment_time = review.find('time').attrs['datetime']\n\t\t\t\trating = float(rating_and_rev_text.attrs['aria-label'].split()[-1])\n\t\t\t\treview_text = rating_and_rev_text.parent.contents[2].strip()\n\t\t\t\t#f.write('Review number '+str(my_ctr)+'\\n')\n\n\t\t\t\tif my_str is None:\n\t\t\t\t\tmy_str=comment_time\n\n\t\t\t\tf.write(str(comment_time)+'\\n')\n\t\t\t\tf.write(u_name+'\\n')\n\t\t\t\tf.write(str(u_entity_id)+'\\n')\n\t\t\t\tf.write(str(rating)+'\\n')\n\t\t\t\tf.write(review_text+'\\n\\n##\\n\\n')\n\t\t\t\tcomm_file = filename + 'last_review_date'\n\n\t\t\t\twith open (comm_file,'w') as myfile200:\n\t\t\t\t\tmyfile200.write(my_str)\n\t\t\t\t\n\t\t\texcept:\n\t\t\t\tpass\n\t\t\ti += 1",
"def get_reviews(review_url):\n print review_url\n html = urllib.urlopen(review_url).read()\n soup = bs4.BeautifulSoup(html, 'html.parser')\n\n rating_scores = soup.findAll(\"span\", \"ratingScore\")\n num_ratings = len(rating_scores) - 1\n\n current_reviews = soup.findAll(\"div\", \"currentVintageProfessinalReviews\")\n num_cur_reviews = str(current_reviews).count('ratingProvider')\n past_reviews = soup.findAll(\"ul\", \"pastVintagesProfessionalReviews\")\n num_past_reviews = str(past_reviews).count('ratingProvider')\n\n print 'There are {0} reviews for prior vintages of this wine.'.format(num_past_reviews)\n print 'There are {0} current reviews for this vintage.\\n'.format(num_cur_reviews)\n\n rating_provider = soup.findAll(\"span\", \"ratingProvider\")\n rating_score = soup.findAll(\"span\", \"ratingScore\")\n reviewers = re.findall('(?<![A-Z])[>]([A-Z]+(?![A-Z]))', str(rating_provider))\n ratings = re.findall('(?<![A-Z])[0-9]{2}(?![A-Z])', str(rating_score))\n\n print \"Ratings List:\", ratings\n print \"Current Reviews: \", num_cur_reviews\n\n currentreviews = []\n for j in range(num_cur_reviews):\n print \"Current Review #\"+str(j+1)+\":\", reviewers[j], ratings[j]\n currentreviews.append((reviewers[j], ratings[j]))\n print currentreviews\n\n print \"\\nPast Reviews: \", num_past_reviews\n past_review_ratings = []\n for k in range(num_cur_reviews, num_past_reviews+num_cur_reviews):\n #print \"Past Review #\"+str(k-num_cur_reviews+1)+\":\", reviewers[k], int(ratings[k])\n past_review_ratings.append(float(ratings[k]))\n if k > 30:\n break\n if num_past_reviews != 0:\n avg_past_reviews = sum(past_review_ratings)/len(past_review_ratings)\n round(avg_past_reviews, 2)\n else:\n avg_past_reviews = 0\n\n print \"Average of Past Reviews: \", avg_past_reviews\n\n return currentreviews, avg_past_reviews",
"def extract_reviews(url, review_count):\n\n api_url = url + \"%3Fstart%3D40\"\n\n html_obj = retrieve_html(url)\n\n review_list = parse_page(html_obj)\n\n result = review_list\n\n num_pages = review_count // 20 + 1\n\n for i in range(1, num_pages):\n curr_offset = i * 20\n curr_url = api_url + \"&start=%d\" % curr_offset\n\n curr_page_reviews = parse_page(retrieve_html(curr_url)[1])\n\n result += curr_page_reviews\n\n return result",
"def go_product_reviews_next(self, driver, website):\n paginator = driver.find_element_by_class_name(\"BVRRPager\")\n next_link = paginator.find_element_by_class_name(\"BVRRNextPage\")\n next_link.find_element_by_name(\"BV_TrackingTag_Review_Display_NextPage\").click()\n time.sleep(1)",
"def review_pages(catalog):\n review_pages_list = list()\n errors = 0\n for ix, movie in enumerate(catalog.iloc[:, 0], 1):\n try:\n soup_2 = fetch(movie, \"/reviews/?page=1\").find_all(\"span\", {\"class\", \"pageInfo\"})\n if len(soup_2) >= 1:\n for n in range(1, int(soup_2[0].text[-2:]) + 1):\n review_pages_list.append(movie + \"/reviews/?page=\" + str(n))\n except:\n errors += 1\n print('\\r3/4 — {:.2%} of review page URLs scraped. Error rate: {:.2%}'.format(\n ix/len(catalog), errors/ix), end=' ')\n print('\\r{} review page URLs successfully scraped. Error rate: {:.2%}'.format(\n len(review_pages_list)-errors, errors/ix), end='\\n')\n return review_pages_list",
"def top_ten(request):\n if request.method == 'GET':\n movies = Movie.objects.filter(date_of_release__lte=datetime.date.today())\n movies = movies.order_by('-rating')[:10]\n serializer = MovieSerializer(movies, many=True)\n return Response(serializer.data)",
"def fetch(self):\n try:\n self.genre = 'Review'\n log.debug(self.log_msg(\"Fetching the prouct page url %s\"%self.currenturi))\n res=self._getHTML(self.currenturi) # Assuming self.currenturi is at the product page\n self.rawpage=res['result']\n self._setCurrentPage()\n try:\n self.parent_page_title = stripHtml(self.soup.find('h1',{'id':'pgTitleDetail'}).renderContents())\n except:\n self.parent_page_title =''\n try:\n self.__product_price = self.soup.find('tbody',{'class':'prices'}).td.renderContents().replace('$','')\n except:\n log.exception(\"Error in fetching product_price\")\n self.__product_price = None\n\n parent_page_url = self.task.instance_data['uri']\n review_first_page_url = self.soup.find('a',text=\"Show All Customer Reviews » \").parent['href']\n review_url_order = \"&sortReviewsBy=DateDescending\"\n self.currenturi = self.base_url + review_first_page_url + review_url_order\n log.info(self.log_msg('current_uri :: %s'%(self.currenturi)))\n self._getParentPage()\n self.next_url_links=[]\n self.fetch_next_link = True\n while self.fetch_next_link:\n self._iterateReviewPages(parent_page_url)\n return True\n except Exception,e:\n log.exception(self.log_msg(\"Exception occured in fetch()\"))\n return False",
"def top_controversial(self, n):\n return top_movies",
"def check_ratings(self):\n\n self.browser.get('https://www.imdb.com/')\n\n for title in self.titles:\n input_bar = self.browser.find_element_by_id('navbar-query')\n input_bar.clear()\n\n input_bar.send_keys(title)\n input_bar.send_keys(Keys.RETURN)\n\n time.sleep(3)\n\n # Click on the first suggestion\n css_selector = \"div.findSection:nth-child(3) > table:nth-child(2) > tbody:nth-child(1) > tr:nth-child(1) > td:nth-child(2) > a:nth-child(1)\"\n self.browser.find_element_by_css_selector(css_selector).click()\n time.sleep(3)\n\n # Pull details that will always be available\n score = str(self.browser.find_element_by_class_name('ratingValue').text)\n score = score.split('/10')[0].replace(',', '.')\n\n time.sleep(3)\n\n summary = str(self.browser.find_element_by_class_name('summary_text').text)\n subtext = str(self.browser.find_element_by_class_name('subtext').text)\n\n # Pull details that differ between movies and series\n try:\n duration = str(self.browser.find_element_by_class_name('bp_sub_heading').text) # Only for series\n if 'episodes' not in duration:\n duration = 'Some episodes'\n except Exception:\n # bp_sub_heading won't be found on a movie page\n duration = 'movie'\n\n if subtext[0].isdigit():\n # Split up the details from the subtext\n subtext_list = subtext.split(' | ')\n else:\n # Some movies' subtext starts with 'R' / 'PG-13'\n subtext_list = subtext.split(' | ')\n del subtext_list[0]\n\n # Duration\n if duration == 'movie':\n show_type = 'Movie'\n duration = subtext_list[0]\n try:\n year = datetime.datetime.strptime(subtext_list[2].split(' (')[0], '%d %B %Y').strftime('%Y')\n except ValueError:\n year = str(subtext_list[2].split(' (')[0][-4:])\n\n else: # series\n show_type = 'Serie'\n # Retrieve last season and its release date\n season_tab = str(self.browser.find_element_by_class_name('seasons-and-year-nav').text).strip()\n\n numbers = re.findall('[0-9]+', season_tab)\n latest_season = int(numbers[0])\n latest_year = int(max(numbers, key=lambda x: int(x)))\n\n duration += ' (%d Seasons in %d), %s per episode' % (latest_season, latest_year, subtext_list[0])\n\n year = re.findall('[0-9]+', subtext_list[2])[0]\n\n # Pull some more data out from the subtext\n genres = subtext_list[1].split(', ')\n\n # Pull details that are not always available\n creds_list = []\n creds = self.browser.find_elements_by_class_name('credit_summary_item')\n for c in creds:\n temp = str(c.text)\n if '|' in temp:\n temp = temp.split('|')[0]\n\n creds_list.append(temp)\n\n self.data_dict[title] = {\n 'score': score,\n 'summary': summary,\n 'duration': duration,\n 'credits': creds_list,\n 'genres': genres,\n 'released': year,\n 'type': show_type,\n }",
"def top_by_num_of_ratings(self, n):\n return top_movies",
"def download_top_movies(genre: Genre) -> str:\n\n page_url = create_page_url(genre)\n response = requests.get(page_url)\n response.raise_for_status()\n return response.text",
"def get_review_page(review_link):\n\n session = r.Session()\n response = session.get(BASE_URL + '/music/albumreviews/' + review_link,\n headers=HEADERS)\n return response",
"def getTopMovies(endpoint, date, count=10):\n\n try:\n response = urlreq.urlopen(endpoint.format(date))\n soup = BeautifulSoup(response.read(), \"html.parser\")\n table = soup.find('table', border=\"0\", cellpadding=\"5\", cellspacing=\"1\")\n tdata = []\n\n for i, row in enumerate(table.find_all('tr')[1:], start=1):\n if i > count:\n break\n\n cells = row.find_all('td')\n tdict = {}\n\n tdict['rank'] = i\n tdict['title'] = cells[2].text.strip()\n tdict['daily_gross'] = int(re.sub(r'[^\\d]', '', cells[4].text))\n tdict['theaters'] = int(re.sub(r'[^\\d]', '', cells[7].text))\n tdict['todate_gross'] = int(re.sub(r'[^\\d]', '', cells[9].text))\n tdict['release_day'] = int(cells[10].text)\n\n tdata.append(tdict)\n\n tdata = pd.DataFrame(tdata)\n tdata['gross_date'] = date\n return tdata\n\n except urlerr.URLError as err:\n print(\"\\nThere was an error retrieving daily revenue information\")\n print(err)\n return None\n except Exception:\n print(\"\\nThere's something wrong with the BOMojo daily revenue page\")\n return None",
"def top_by_ratings(self, n, metric=average):\n return top_movies",
"def scraper(storyid, reviews_num, rate_limit=3):\n\n # There may be up to 15 reviews on a single page, therefore the number of\n # pages the reviews are stored on is equal to the following:\n number_of_pages = (reviews_num // 15) + 1\n\n # Returns a list of tuples (based on the contents of _reviews_in_table)\n list_of_review_tuples = []\n\n for p in range(number_of_pages):\n\n soup = soupify('https://www.fanfiction.net/r/' + storyid +\n '/0/' + str(p+1) + '/',\n rate_limit=rate_limit)\n\n for review in _reviews_in_table(soup):\n list_of_review_tuples.append(review)\n\n return list_of_review_tuples",
"def get_movies(iurl):\n movies = []\n \n if iurl[-3:] == '?s=':\n search_text = GetSearchQuery('WatchOnlineMovies')\n search_text = urllib.quote_plus(search_text)\n iurl += search_text\n\n html = requests.get(iurl, headers=mozhdr).text\n mlink = SoupStrainer('div', {'class':re.compile('postbox')})\n items = BeautifulSoup(html, parseOnlyThese=mlink)\n plink = SoupStrainer('div', {'class':'wp-pagenavi'})\n Paginator = BeautifulSoup(html, parseOnlyThese=plink)\n\n for item in items:\n title1 = item.h2.text\n try:\n title2 = title1.replace(\"Full Movie\", \"\")\n except:\n title2 = title1.replace(\"Watch Online\", \"\")\n try:\n title3 = title2.replace(\"Watch Online Placeholdernt\", \"\")\n except:\n title3 = title2.replace(\".\", \"\")\n try:\n title4 = title3.replace(\".\", \"\")\n except:\n title4 = title3.replace(\"Watch Online Placeholder\",\"\")\n try:\n title5 = title4.replace(\"Watch Online\", \"\")\n except:\n title5 = title4.replace(\"Download\",\"\")\n try:\n title6 = title5.replace(\"Watch Onlin\", \"\")\n except:\n title6 = title5.replace(\"Placeholder\",\"\")\n try:\n title7 = title6.replace(\"HD Pri\", \"\")\n except:\n title7 = title6.replace(\"Placeholder\",\"\")\n try:\n title8 = title7.replace(\" Watch On\", \"\")\n except:\n title8 = title7.replace(\"Placeholder\",\"\")\n try:\n title9 = title8.replace(\" Watch\", \"\")\n except:\n title9 = title8.replace(\"Placeholder\",\"\")\n try:\n title10 = title9.replace(\"Free Down\", \"\")\n except:\n title10 = title9.replace(\"Placeholder\",\"\")\n try:\n title11 = title10.replace(\"Free D\", \"\")\n except:\n title11 = title10.replace(\"Placeholder\",\"\")\n try:\n title12 = title11.replace(\"Free\", \"\")\n except:\n title12 = title11.replace(\"Placeholder\",\"\")\n try:\n title13 = title12.replace(\" F\", \"\")\n except:\n title13 = title12.replace(\"Placeholder\",\"\")\n try:\n title14 = title13.replace(\" Fr\", \"\")\n except:\n title14 = title13.replace(\"Placeholder\",\"\")\n try:\n title15 = title14.replace(\" Fre\", \"\")\n except:\n title15 = title14.replace(\"Placeholder\",\"\")\n try:\n title16 = title15.replace(\" HD\", \"\")\n except:\n title16 = title15.replace(\"Placeholder\",\"\")\n try:\n title17 = title16.replace(\" H\", \"\")\n except:\n title17 = title16.replace(\"Placeholder\",\"\")\n try:\n title18 = title17.replace(\" HD P\", \"\")\n except:\n title18 = title17.replace(\"Placeholder\",\"\")\n try:\n title19 = title18.replace(\" re\", \"\")\n except:\n title19 = title18.replace(\"Placeholder\",\"\")\n try:\n title120 = title19.replace(\" r\", \"\")\n except:\n title120 = title19.replace(\"Placeholder\",\"\")\n # Coloring Years\n try:\n title21 = title120.replace(\"(2018)\", \"[COLOR yellow](2018)[/COLOR]\")\n except:\n title21 = title120.replace(\"Placeholder\",\"\")\n try:\n title22 = title21.replace(\"(2016)\", \"[COLOR lightsalmon](2016)[/COLOR]\")\n except:\n title22 = title21.replace(\"Placeholder\",\"\")\n try:\n title23 = title22.replace(\"(2015)\", \"[COLOR lime](2016)[/COLOR]\")\n except:\n title23 = title22.replace(\"Placeholder\",\"\")\n # Language\n try:\n title24 = title23.replace(\"Hindi\", \"[COLOR green]Hindi[/COLOR]\")\n except:\n title24 = title23.replace(\"Placeholder\",\"\")\n try:\n title25 = title24.replace(\"Dubbed\", \"[COLOR cyan]Dubbed[/COLOR]\")\n except:\n title25 = title24.replace(\"Placeholder\",\"\")\n\n # Continued\n try:\n title26 = title25.replace(\" nt o\", \"\")\n except:\n title26 = title25.replace(\"Placeholder\",\"\")\n try:\n title27 = title26.replace(\" nt F\", \"\")\n except:\n title27 = title26.replace(\"Placeholder\",\"\")\n try:\n title28 = title27.replace(\" nt\", \"\")\n except:\n title28 = title27.replace(\"Placeholder\",\"\")\n try:\n title = title28.replace(\" Pr\", \"\")\n except:\n title = title28.replace(\"Placeholder\",\"\")\n\n url = item.h2.find('a')['href']\n try:\n thumb = item.find('img')['src'].strip()\n except:\n thumb = _icon\n movies.append((title, thumb, url))\n \n if 'next' in str(Paginator):\n\n nextli = Paginator.find('a', {'class':re.compile('page larger')})\n\n purl = nextli.get('href')\n pages = Paginator.findAll('span', {'class':re.compile('pages')})\n lastpg = pages[len(pages)-1].text\n title = 'Next Page.. (Currently in %s)' % (lastpg)\n movies.append((title, _icon, purl))\n \n return movies",
"def top_ten(subreddit):\n\n limit = \"10\"\n\n url = \"https://www.reddit.com/r/{}/hot.json?limit={}\".format(subreddit,\n limit)\n\n user_agent = {\"User-Agent\": \"Python\"}\n response = requests.get(url, headers=user_agent, allow_redirects=False)\n if response.status_code >= 300:\n print(\"None\")\n else:\n for elem in response.json().get(\"data\").get(\"children\"):\n print(elem.get(\"data\").get(\"title\"))",
"def movies(catalog):\n movies_url = list()\n errors = 0\n for ix, critic_profile in enumerate(catalog, 1):\n try:\n checker = fetch(website, critic_profile).find_all(\"h2\", {\"class\": \"panel-heading js-review-type\"})\n if len(checker) > 0:\n if checker[0].text == \"Movie Reviews Only\":\n for td in fetch(website, critic_profile).find_all(\"td\",\n {\"class\": \"col-xs-12 col-sm-6 critic-review-table__title-column\"}):\n for a in td.find_all(\"a\"):\n if a['href'] not in movies_url:\n movies_url.append(a['href'])\n except:\n errors += 1\n # print('\\r2/4 — {:.2%} of movie URLs scraped. Error rate: {:.2%}'.format(ix/len(catalog),\n # errors/ix), end=' ')\n # print('\\r{} movie URLs successfully scraped. Error rate: {:.2%}'.format(len(movies_url)-errors, errors/ix), end='\\n')\n return movies_url",
"def get_rating(text):\n movie = text\n page = requests.get('http://www.imdb.com/find?ref_=nv_sr_fn&q=' + movie + '&s=tt')\n soup1 = BeautifulSoup(page.content, 'html.parser')\n movieid = soup1.select(\".findList tr a\")[0].get('href')\n movielink = \"http://www.imdb.com\" + movieid\n mlinkpage = requests.get(movielink)\n soup2 = BeautifulSoup(mlinkpage.content, 'html.parser')\n movierating = soup2.select(\".ratingValue span\")[0].text\n metascore = soup2.select(\".metacriticScore\")\n reviewlink = movielink + 'reviews'\n linkpage = requests.get(reviewlink)\n soup3 = BeautifulSoup(linkpage.content, 'html.parser')\n \n return soup3, movierating",
"def top_ten(subreddit):\n url = \"https://www.reddit.com/r/\" + subreddit + \"/top/.json\"\n r = requests.get(url,\n headers={'User-agent': 'norman'},\n params={\"limit\": 10},\n allow_redirects=False)\n if r.status_code == 200:\n for dic in r.json().get('data').get('children'):\n print(dic.get('data').get('title'))\n else:\n print(\"None\")",
"def getTopTen():\n\n if moviesRanked > 10:\n return moviesRanked[0:10]\n else: \n return moviesRanked",
"def top_ten(subreddit):\n\n if subreddit is None or not isinstance(subreddit, str):\n print(\"None\")\n\n user_agent = {'User-agent': 'Google Chrome Version 81.0.4044.129'}\n params = {'limit': 10}\n url = 'https://www.reddit.com/r/{}/hot/.json'.format(subreddit)\n\n response = get(url, headers=user_agent, params=params)\n all_data = response.json()\n\n try:\n raw1 = all_data.get('data').get('children')\n\n for i in raw1:\n print(i.get('data').get('title'))\n\n except:\n print(\"None\")",
"def top_ten(subreddit):\n\n user_agent = {'User-agent': 'Mozilla/5.0 (Macintosh; \\\nIntel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) \\\nChrome/39.0.2171.95 Safari/537.36'}\n\n res = requests.get('https://www.reddit.com/r/{}/hot.json?limit=10'.format(\n subreddit), headers=user_agent)\n\n if res.status_code == 404:\n print(None)\n\n else:\n for sub in res.json().get(\"data\").get(\"children\"):\n print(sub.get(\"data\").get(\"title\"))",
"def scrape_trustpilot_reviews(company, PATH, n_pages):\n\n # Review properties\n names = []\n ratings = []\n headers = []\n reviews = []\n dates = []\n locations = []\n\n # Setup monitoring variables\n start_time = time()\n requests = 0\n request_limit = 50\n\n # For each page specified, get reviews\n for p in range(1, n_pages+1):\n\n url = f'{PATH}{p}'\n response = get(url)\n print(f'URL: {url}')\n\n # Pause the loop to limit access to the server\n sleep(randint(8, 15))\n\n # Monitor the request\n requests += 1\n elapsed_time = time() - start_time\n print('Request:{}; Frequency: {} requests/s'.format(requests, requests / elapsed_time))\n os.system('clear')\n\n if response.status_code != 200:\n warn(f'Request: {requests}l; Status Code: {response}')\n\n if requests > request_limit:\n warn('Number of requests have exceeded expectation')\n break\n\n # Identify page areas of interest\n page_html = BeautifulSoup(response.text, 'html.parser')\n review_containers = page_html.find_all('div', class_='review-content__body')\n user_containers = page_html.find_all('div', class_='consumer-information__details')\n rating_container = page_html.find_all('div', class_='review-content-header')\n dates_container = page_html.find_all(\"section\", {\"class\": \"review__content\"})\n profile_container = page_html.find_all('aside', class_='review__consumer-information')\n\n print(f'Containers for request: {len(rating_container)}')\n for x in range(len(rating_container)):\n review_c = review_containers[x]\n headers.append(review_c.h2.a.text)\n r = review_c.p\n if r:\n reviews.append(review_c.p.text)\n else:\n reviews.append('')\n\n reviewer = user_containers[x]\n names.append(reviewer.div.text)\n\n rating = rating_container[x]\n ratings.append(rating.img.get('alt'))\n\n date = dates_container[x]\n date_json = json.loads(date.find('script').string)\n date_j = date_json['publishedDate']\n dates.append(date_j)\n\n prof = profile_container[x]\n link = 'https://www.trustpilot.com' + prof.a['href']\n c_profile = get(f'{link}')\n if c_profile:\n profile_html = BeautifulSoup(c_profile.text, 'html.parser')\n cust_container = profile_html.find('div', class_='user-summary-location')\n locations.append(cust_container.text)\n\n reviews_df = pd.DataFrame(\n {\n 'Company': company,\n 'Header': headers,\n 'Review': reviews,\n 'Rating': ratings,\n 'Name': names,\n 'Location': locations,\n 'Date': dates\n }\n )\n\n reviews_df.Header = clean_string(reviews_df.Header)\n reviews_df.Review = clean_string(reviews_df.Review)\n reviews_df.Name = clean_string(reviews_df.Name)\n reviews_df.Location = clean_string(reviews_df.Location)\n reviews_df.Location = reviews_df.Location.apply(lambda x: x.split(',', 1)[-1])\n reviews_df.Date = pd.to_datetime(reviews_df.Date)\n\n return reviews_df",
"def top_ten(subreddit):\n\n url = \"https://www.reddit.com/r/{}/hot.json?limit=10\".format(subreddit)\n headers = {\"user-agent\": \"Mozilla/5.0\"}\n\n res = requests.get(url, headers=headers, allow_redirects=False)\n\n if res.status_code == 200:\n for post in res.json()['data']['children']:\n print(post['data']['title'])\n else:\n print(None)",
"def top_ten(subreddit):\n\n settings = {'allow_redirects': False, 'headers': {'User-agent': ''}}\n url = \"https://www.reddit.com/r/{}/hot.json\".format(subreddit)\n\n try:\n responses = get(url, **settings).json().get('data').get('children')\n for post in responses[:10]:\n print(post['data']['title'])\n except:\n print(\"None\")",
"def top_ten(subreddit):\n try:\n info = requests.get('https://www.reddit.com/r/{}/hot.json?limit=10'\n .format(subreddit), allow_redirects=False,\n headers={'User-Agent': 'Custom'}).json().get(\n 'data').get('children')\n for child in info:\n print(child.get('data').get('title'))\n except:\n print('None')",
"def top_ten(subreddit):\n url = \"https://www.reddit.com/r/\" + subreddit + \"/hot.json?limit=10\"\n identify = {\"User-Agent\": \"Requests library from Python\",\n \"From\": \"[email protected]\"}\n to_print = []\n hot = requests.get(url, headers=identify, allow_redirects=False)\n if hot.status_code == 404:\n print(\"None\")\n return 0\n if hot.status_code == 200:\n hot = hot.json()\n hot = hot[\"data\"]\n hot = hot[\"children\"]\n for items in hot:\n del items[\"kind\"]\n for data in hot:\n to_print.append(data[\"data\"])\n hot = to_print\n to_print = []\n for dictio in hot:\n to_print.append(dictio[\"title\"])\n for itera in to_print:\n print(itera)",
"def top_ten(subreddit):\n url = \"https://api.reddit.com/r/{}/hot?limit=10\".format(subreddit)\n response = requests.get(url, headers={\"User-Agent\": \"Python3\"})\n if str(response) != \"<Response [200]>\": # response.status_code != 200\n print(None)\n return\n response = response.json()\n child = response[\"data\"][\"children\"]\n for tittle in child:\n print(tittle[\"data\"][\"title\"])"
]
| [
"0.7084704",
"0.6852602",
"0.66241145",
"0.6481701",
"0.6451495",
"0.642058",
"0.641994",
"0.6399066",
"0.63749605",
"0.63677484",
"0.6301293",
"0.6188999",
"0.61817837",
"0.61139286",
"0.61109746",
"0.6109608",
"0.6074871",
"0.60566145",
"0.6026517",
"0.60226715",
"0.5995197",
"0.59723157",
"0.5971399",
"0.59636617",
"0.5960508",
"0.59479386",
"0.5883903",
"0.58804893",
"0.5874284",
"0.5871613"
]
| 0.7519103 | 0 |
convert keyboard event into double2 movement code and return if unrecognized key obtained, return None ======KEYBOARD MANUAL====== W/up_arrow = forward S/down_arrow = backward A/left_arrow = turn left D/right_arrow = turn right P = parking V = stop ALL action I = pole up K = pole down ============================ | def db2_movement_convert(evtype, kname):
if evtype == 'down':
if kname == 'w' or kname == 'W' or kname == 'up':
return 'f'
elif kname == 's' or kname == 'S' or kname == 'down':
return 'b'
elif kname == 'a' or kname == 'A' or kname == 'left':
return 'l'
elif kname == 'd' or kname == 'D' or kname == 'right':
return 'r'
elif kname == 'i' or kname == 'I':
return 'u'
elif kname == 'k' or kname == 'K':
return 'd'
elif kname == 'p' or kname == 'P':
return 'p'
elif kname == 'v' or kname == 'V':
return 'x'
else:
return None
elif evtype == 'up':
if kname == 'w' or kname == 'W' or kname == 'up':
return 's'
elif kname == 's' or kname == 'S' or kname == 'down':
return 's'
elif kname == 'a' or kname == 'A' or kname == 'left':
return 't'
elif kname == 'd' or kname == 'D' or kname == 'right':
return 't'
elif kname == 'i' or kname == 'I':
return 'h'
elif kname == 'k' or kname == 'K':
return 'h'
else:
return None
else:
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def keyMove(self, event):\n UP = 111\n RIGHT = 114\n DOWN = 116\n LEFT = 113\n dct = {\n RIGHT: 0,\n DOWN: 1,\n LEFT: 2,\n UP: 3\n }[event.keycode]\n self.requestSegByDct(dct)",
"def get_event(self, event):\n\n if event.type == pg.KEYDOWN:\n if event.key == pg.K_RETURN:\n print(self.game.current_room_no)\n if event.key == pg.K_BACKSPACE:\n print(self.game.room.room_no_list)\n if event.key == pg.K_a:\n self.is_moving_left = True \n self.move_left()\n if event.key == pg.K_d:\n self.is_moving_right = True \n self.move_right()\n if event.key == pg.K_w:\n self.is_moving_up = True\n self.move_up() \n if event.key == pg.K_s:\n self.is_moving_down = True\n self.move_down() \n if event.type == pg.KEYUP:\n if event.key == pg.K_a:\n if self.is_moving_right == True:\n self.is_moving_left = False \n self.move_right()\n else:\n self.is_moving_left = False \n self.stopX()\n if event.key == pg.K_d:\n if self.is_moving_left == True:\n self.is_moving_right = False\n self.move_left() \n else:\n self.is_moving_right = False \n self.stopX()\n if event.key == pg.K_w:\n if self.is_moving_down == True:\n self.is_moving_up = False \n self.move_down()\n else:\n self.is_moving_up = False \n self.stopY()\n if event.key == pg.K_s:\n if self.is_moving_up == True:\n self.is_moving_down = False \n self.move_up()\n else:\n self.is_moving_down = False \n self.stopY()\n\n if event.type == pg.MOUSEBUTTONDOWN and event.button == 1:\n self.is_shooting = True\n elif event.type == pg.MOUSEBUTTONUP and event.button == 1:\n self.is_shooting = False",
"def handle_movement_keyup(self, key):\n def _opposite_dir(key):\n return {pygame.K_LEFT: pygame.K_RIGHT,\n pygame.K_RIGHT: pygame.K_LEFT,\n pygame.K_UP: pygame.K_DOWN}[key]\n try:\n log.debug(f'released: {key}')\n self.keys_down[key] = False\n if key in {pygame.K_LEFT, pygame.K_RIGHT} and \\\n not(self.keys_down[_opposite_dir(key)]):\n self.stop_movement()\n log.debug(f'keys down: {self.keys_down}')\n except AttributeError:\n log.error(\"you didn't pass a keyboard event!!\")",
"def processInput(direction):\n userinput = screen.getch()\n if userinput == curses.KEY_UP:\n direction = 3\n elif userinput == curses.KEY_DOWN:\n direction = 2\n elif userinput == curses.KEY_LEFT:\n direction = 1\n elif userinput == curses.KEY_RIGHT:\n direction = 0\n return direction",
"def handle_movement_keydown(self, key):\n try:\n log.debug(f'pressed: {key}')\n if key == pygame.K_LEFT:\n self.walk_left()\n elif key == pygame.K_RIGHT:\n self.walk_right()\n elif key == pygame.K_DOWN:\n pass\n elif key == pygame.K_UP:\n pass\n elif key == pygame.K_SPACE:\n self.jump()\n self.keys_down[key] = True\n except AttributeError:\n log.info(\"you didn't pass a keyboard event!!\")",
"def ev_keydown(self, event: tcod.event.KeyDown) -> T | None:",
"def handle_keystroke_direction(keystroke):\n return KEYSTROKES.get(keystroke)",
"def ev_keydown(self, event: KeyDown) -> None:",
"def keyCam(self, event):\n dct = {\n \"d\": 0,\n \"s\": 1,\n \"q\": 2,\n \"z\": 3\n }[event.char]\n self.moveAllSeg(dct)",
"def ev_KEYDOWN(self, event):",
"def ev_KEYUP(self, event):",
"def next(self):\n \n jump = 0\n \n for event in pudding.process_event():\n if event[0] == sdlconst.KEYDOWN:\n if (event[1] == sdlconst.K_q) or (event[1] == sdlconst.K_ESCAPE):\n tofu.GAME_INTERFACE.end_game() # Quit the game\n \n elif event[1] == sdlconst.K_m:\n print \"trying to change single to multiplayer mode\"\n tofu.GAME_INTERFACE.end_game('client')\n \n elif event[1] == sdlconst.K_LSHIFT:\n # Shift key is for jumping\n # Contrary to other action, jump is only performed once, at the beginning of\n # the jump.\n jump = 1\n \n elif event[1] == sdlconst.K_LEFT: self.left_key_down = 1\n elif event[1] == sdlconst.K_RIGHT: self.right_key_down = 1\n elif event[1] == sdlconst.K_UP: self.up_key_down = 1\n elif event[1] == sdlconst.K_DOWN: self.down_key_down = 1\n \n elif event[0] == sdlconst.KEYUP:\n if event[1] == sdlconst.K_LEFT: self.left_key_down = 0\n elif event[1] == sdlconst.K_RIGHT: self.right_key_down = 0\n elif event[1] == sdlconst.K_UP: self.up_key_down = 0\n elif event[1] == sdlconst.K_DOWN: self.down_key_down = 0\n \n if jump: return Action(ACTION_JUMP)\n \n # People saying that Python doesn't have switch/select case are wrong...\n # Remember this if you are coding a fighting game !\n return Action({\n (0, 0, 1, 0) : ACTION_ADVANCE,\n (1, 0, 1, 0) : ACTION_ADVANCE_LEFT,\n (0, 1, 1, 0) : ACTION_ADVANCE_RIGHT,\n (1, 0, 0, 0) : ACTION_TURN_LEFT,\n (0, 1, 0, 0) : ACTION_TURN_RIGHT,\n (0, 0, 0, 1) : ACTION_GO_BACK,\n (1, 0, 0, 1) : ACTION_GO_BACK_LEFT,\n (0, 1, 0, 1) : ACTION_GO_BACK_RIGHT,\n }.get((self.left_key_down, self.right_key_down, self.up_key_down, self.down_key_down), ACTION_WAIT))",
"def _process_key(evt):\n key = evt.GetKeyCode()\n if key in KEYMAP:\n return KEYMAP[key], ''\n if 97 <= key <= 122:\n key -= 32\n if key >= 32 and key <= 127:\n return keys.Key(chr(key)), chr(key)\n else:\n return None, None",
"def key_down(event, ai, var, screen, ship, shots, enemies, charges, shields, hub):\r\n\tif event.key == pygame.K_UP:\r\n\t\tship.move_up = 1\r\n\telif event.key == pygame.K_DOWN:\r\n\t\tship.move_down = 1\r\n\telif event.key == pygame.K_SPACE:\r\n\t\tshoot_bullet(ai, screen, ship, shots, enemies)\r\n\t\tbegin_charge(ai, var, screen, ship, charges)\r\n\telif event.key == pygame.K_RSHIFT or event.key == pygame.K_LSHIFT:\r\n\t\tcall_shield(ai, var, screen, ship, shields, hub)\r\n\telif event.key == pygame.K_q:\r\n\t\tsys.exit()\r\n\t#elif event.key == pygame.K_p:\r\n\t#\thub.pause = 1\r\n\telif event.key == pygame.K_z:\r\n\t\thub.za_wurado(ai)",
"def handle_keyboard_data(data):\n pass",
"def _check_keyup_events(self, event):\n if event.key == pygame.K_RIGHT:\n self.ship.moving_right = False # moving right key released, stop moving\n elif event.key == pygame.K_LEFT:\n self.ship.moving_left = False # moving left key released, stop moving",
"def process_keychange(self):\n # Process up/down\n if self.up_pressed and not self.down_pressed:\n if self.physics_engine.is_on_ladder():\n self.player_sprite.change_y = PLAYER_MOVEMENT_SPEED\n elif (\n self.physics_engine.can_jump(y_distance=10)\n and not self.jump_needs_reset\n ):\n self.player_sprite.change_y = PLAYER_JUMP_SPEED\n self.jump_needs_reset = True\n arcade.play_sound(self.jump_sound)\n elif self.down_pressed and not self.up_pressed:\n if self.physics_engine.is_on_ladder():\n self.player_sprite.change_y = -PLAYER_MOVEMENT_SPEED\n\n # Process up/down when on a ladder and no movement\n if self.physics_engine.is_on_ladder():\n if not self.up_pressed and not self.down_pressed:\n self.player_sprite.change_y = 0\n elif self.up_pressed and self.down_pressed:\n self.player_sprite.change_y = 0\n\n # Process left/right\n if self.right_pressed and not self.left_pressed:\n self.player_sprite.change_x = PLAYER_MOVEMENT_SPEED\n elif self.left_pressed and not self.right_pressed:\n self.player_sprite.change_x = -PLAYER_MOVEMENT_SPEED\n else:\n self.player_sprite.change_x = 0",
"def handle_input(self, event):\n if event.type == pygame.KEYDOWN and event.key == self.actions[\"right\"]:\n self.action = 2\n if event.type == pygame.KEYUP and event.key == self.actions[\"right\"] and self.action == 2:\n self.action = 1\n if event.type == pygame.KEYDOWN and event.key == self.actions[\"left\"]:\n self.action = 0\n if event.type == pygame.KEYUP and event.key == self.actions[\"left\"] and self.action == 0:\n self.action = 1",
"def _on_key_press(self, event):",
"def handle_key(self, key):\n direction = DIRECTIONS.get(key)\n if direction:\n self.move(direction)",
"def _check_keyup_events(self, event):\t\n\t\tif event.key == pygame.K_RIGHT:\n\t\t\tself.pigeon.moving_right = False\n\t\telif event.key == pygame.K_LEFT:\n\t\t\tself.pigeon.moving_left = False",
"def _check_keyup_events(self, event):\n if event.key == pygame.K_RIGHT:\n self.ship.moving_right = False\n elif event.key == pygame.K_LEFT:\n self.ship.moving_left = False",
"def keyReleaseEvent(self, event: QtGui.QKeyEvent) -> None:\n if event.key() in [Qt.Key_W, Qt.Key_S, Qt.Key_A, Qt.Key_D] and self.__enable_key:\n new_direction = self.__directions.index(event.text())\n # ignore opposite direction\n if (new_direction + 2) % 4 == self.__h_direction:\n return\n self.__h_direction = new_direction\n if event.isAutoRepeat():\n self.__change_speed(self.__acc_step)\n print(f'{event.text().capitalize()}:accelerate speed')\n else:\n self.__change_speed(self.__step)\n print(f'{event.text().capitalize()}:normal speed')",
"def handle(self, event):\r\n if event.type == pygame.KEYDOWN:\r\n if event.key == pygame.K_LEFT:\r\n self.left()\r\n self.state['ldown'] = True\r\n ''' Previous functionality if desired, but this has been deprecated and moved to the update() function.\r\n if not self.state['falling']:\r\n mods = pygame.key.get_mods()\r\n # Hold shift (left or right) to run\r\n if mods & pygame.KMOD_LSHIFT or mods & pygame.KMOD_RSHIFT:\r\n self.run()\r\n self.nowalk()\r\n else:\r\n self.walk()\r\n self.norun()\r\n '''\r\n \r\n elif event.key == pygame.K_RIGHT:\r\n self.right()\r\n self.state['rdown'] = True\r\n if not (self.state['jumping'] or self.state['falling'] or self.state['fallingfast'] or self.state['landing']):\r\n ''' Previous functionality if desired, but this has been deprecated and moved to the update() function.\r\n \r\n mods = pygame.key.get_mods()\r\n # Hold shift (left or right) to run\r\n if mods & pygame.KMOD_LSHIFT or mods & pygame.KMOD_RSHIFT:\r\n self.run()\r\n else:\r\n self.walk()\r\n '''\r\n \r\n elif event.key == pygame.K_UP:\r\n if not (self.state['jumping'] or self.state['falling'] or self.state['fallingfast'] or self.state['landing']):\r\n self.jump()\r\n \r\n # TODO: Consider adding in Smash Bros. style falling through the floor.\r\n # elif event.key == pygame.K_DOWN:\r\n # self.fall()\r\n \r\n \r\n elif event.type == pygame.KEYUP:\r\n if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT:\r\n if event.key == pygame.K_LEFT :\r\n self.state['ldown'] = False\r\n if self.state['rdown']:\r\n self.right()\r\n elif event.key == pygame.K_RIGHT:\r\n self.state['rdown'] = False\r\n if self.state['ldown']:\r\n self.left()\r\n if not (self.state['jumping'] or self.state['falling'] or self.state['fallingfast'] or self.state['landing']) and not self.state['ldown'] and not self.state['rdown']:\r\n self.stand()\r\n if not self.state['running']:\r\n self.state['steps'] = 0\r\n \r\n if self.state['ldown'] or self.state['rdown']: self.walk()",
"def OnKeyDown(self, event):\n\t\traw_code = event.GetRawKeyCode()\n\t\tmodifiers = event.GetModifiers()\n\t\t#print \"raw_code=\",raw_code,\";modifiers=\",modifiers\n\n\t\tif raw_code == 39 or raw_code == 73 : # <I> or -> = zoom in \n\t\t\tself.screenXsize += 20 \n\t\t\tprint \"X Zoom In\"\n\t\telif raw_code == 37 or raw_code ==79 :# <O> or <- = zomm out\n\t\t\tself.screenXsize -= 5 \n\t\t\tprint \"X Zoom Out\"\n\t\telif raw_code == 38:# <arrow up> = Y zomm in\n\t\t\tself.factorY += 0.2 \n\t\t\tprint \"Y Zoom In\"\n\t\telif raw_code == 40:# <arrow dn> = Y zomm out\n\t\t\tself.factorY -= 0.1 \n\t\t\tprint \"Y Zoom Out\"\n\t\telif raw_code ==33:# <PgUp> = Y move Up \n\t\t\tself.offsetY -= 10\n\t\t\tprint \"Y Move Up\"\n\t\telif raw_code ==34:# <PgDn> = Y move Down\n\t\t\tself.offsetY += 30 \n\t\t\tprint \"Y Move Down\"\n\t\telif raw_code == 90 and modifiers ==2 :# <ctrl>+<Z> = \n\t\t\tself.SetRenderDefault()\n\t\telif raw_code == 85 and modifiers ==2 :# <ctrl>+<U> = increase Vout \n\t\t\tself.AdjVout(100,'+')\n\t\telif raw_code == 88 and modifiers ==2 :# <ctrl>+<x> = clear debug_out \n\t\t\tself.window.clear_out()\n\t\telif raw_code == 85 and modifiers ==6 :# <ctrl>+<shift>+<U> = decrease Vout \n\t\t\tself.AdjVout(10,'-')\n\t\telif raw_code == 85 and modifiers ==3 :# <ctrl>+<alt>+<U> = [email protected] \n\t\t\tself.AdjVout(5.0,'=')\n\t\t\tprint \"Set Vout to 5.0V\"\n\t\telif raw_code == 74 and modifiers ==7 :# <ctrl>+<shift>+<alt>+<J> = [email protected]\n\t\t\tself.AdjVout(10.0,'=')\n\t\t\tprint \"Set Vout to 10.0V\"\n\t\telif raw_code == 77 and modifiers ==7 :# <ctrl>+<shift>+<alt>+<M> = [email protected]\n\t\t\tself.AdjVout(15.0,'=')\n\t\t\tprint \"Set Vout to 15.0V\"\n\t\telif (raw_code == 3 and modifiers ==2) or raw_code == 32 :# <ctrl>+<Pause> = run/pause\n\t\t\tself.OnRunStop(event)\n\t\telif raw_code == 114 :# <F3> = pause\n\t\t\tself.Pause()\n\t\telif raw_code == 113 :# <F2> = run\n\t\t\tself.Run()\n\t\telif raw_code == 115 :# <F4> = setup\n\t\t\tself.Setup()\n\t\telif raw_code == 116 :# <F5> = full screen\n\t\t\tself.FullScreen = not self.FullScreen \n\t\t\tself.window.ShowFullScreen(self.FullScreen)\n\t\telif raw_code == 27 :# <ESC> = NOT full screen\n\t\t\tself.FullScreen = False\n\t\t\tself.window.ShowFullScreen(self.FullScreen)\n\t\t\tapp=wx.GetApp()\n\t\t\tframe = app.GetTopWindow()\n\t\t\tframe.StopLogo()\n\t\telif raw_code == 118 :# <F7> = open/close debug window\n\t\t\tself.window.SetDebug()\n\t\telif raw_code == 119 :# <F8> = expand/shrink sheet window\n\t\t\tself.window.SetSheet()\n\t\telif raw_code == 120 :# <F9> = hide sheet field\n\t\t\tself.window.HideSheetField()\n\t\telif raw_code == 112 :# <F1> = Select EUT\n\t\t\tself.SelectEut()\n\t\tself.Refresh(True)",
"def process_keystroke(self, keystroke):\n import x84.bbs.session\n self.moved = False\n rstr = u''\n if keystroke in self.keyset['refresh']:\n rstr += self.refresh()\n elif keystroke in self.keyset['up']:\n rstr += self.move_up()\n elif keystroke in self.keyset['down']:\n rstr += self.move_down()\n elif keystroke in self.keyset['home']:\n rstr += self.move_home()\n elif keystroke in self.keyset['end']:\n rstr += self.move_end()\n elif keystroke in self.keyset['pgup']:\n rstr += self.move_pgup()\n elif keystroke in self.keyset['pgdown']:\n rstr += self.move_pgdown()\n elif keystroke in self.keyset['exit']:\n self._quit = True\n else:\n logger = logging.getLogger()\n logger.debug(\n 'unhandled, %r', keystroke if type(keystroke) is not int\n else x84.bbs.session.getterminal().keyname(keystroke))\n return rstr",
"def game_input(self):\n inp = \"\"\n while inp not in [\"DOWN\", \"RIGHT\", \"UP\", \"LEFT\"]:\n inp = input(\"Use the numeric keypad to choose a direction\").upper()\n if inp == \"Q\":\n break\n inp = self.pave_num(inp)\n return inp",
"def key_down_char(self, key):\n # Used to check if Logic.[direction] worked\n done = None\n # Need to check if tuple or not\n # Python sometimes sends key as tuple or char\n if isinstance(key, tuple):\n if key[0] == '\\'a\\'':\n self.matrix, done = Logic.left(self.matrix)\n if key[0] == '\\'s\\'':\n self.matrix, done = Logic.down(self.matrix)\n if key[0] == '\\'d\\'':\n self.matrix, done = Logic.right(self.matrix)\n if key[0] == '\\'w\\'':\n self.matrix, done = Logic.up(self.matrix)\n else:\n if key == '\\'a\\'':\n self.matrix, done = Logic.left(self.matrix)\n if key == '\\'s\\'':\n self.matrix, done = Logic.down(self.matrix)\n if key == '\\'d\\'':\n self.matrix, done = Logic.right(self.matrix)\n if key == '\\'w\\'':\n self.matrix, done = Logic.up(self.matrix)\n\n if done:\n # Logic.[direction] worked = add new tile (game rules)\n self.matrix = Logic.add_tile(self.matrix)\n # NOT USED, record last move for potential back track\n # self.history_matrix.append(self.matrix)\n # ONLY used in UI\n # self.update_grid_cells()\n # done = False",
"def check_keyup_events(event, ship):\r\n if event.key == pygame.K_RIGHT:\r\n ship.moving_right = False\r\n elif event.key == pygame.K_LEFT:\r\n ship.moving_left = False",
"def check_keyDown(event, ai_settings, screen, player, projectiles):\n if event.key == pygame.K_UP:\n player.moving_up = True\n elif event.key == pygame.K_DOWN:\n player.moving_down = True\n elif event.key == pygame.K_SPACE:\n player.moving_right = True\n player.not_moving = False\n elif event.key == pygame.K_v:\n fire_laser(ai_settings, screen, player, projectiles)\n elif event.key == pygame.K_q:\n sys.exit()"
]
| [
"0.7092827",
"0.6680352",
"0.65994185",
"0.65799326",
"0.6546511",
"0.64121026",
"0.64097077",
"0.63590294",
"0.6296625",
"0.62940115",
"0.62729996",
"0.6228549",
"0.6190524",
"0.6168614",
"0.61459535",
"0.6139373",
"0.6129849",
"0.61266243",
"0.61151725",
"0.61027867",
"0.6084264",
"0.60702884",
"0.6062135",
"0.6061094",
"0.60540193",
"0.6051625",
"0.60460216",
"0.60409653",
"0.60169935",
"0.5991159"
]
| 0.7269131 | 0 |
Testing M6 remeshing formula in 2D, 2 kernel, simple precision, o2 splitting. | def test_2D_m6_2k():
scal, velo = setup_2D()
advec = Advection(velo, scal, discretization=d2d,
method={TimeIntegrator: RK2,
Interpolation: Linear,
Remesh: L4_2,
Support: 'gpu_2k',
Splitting: 'o2'}
)
advec_py = Advection(velo, scal, discretization=d2d,
method={TimeIntegrator: RK2,
Interpolation: Linear,
Remesh: L4_2,
Support: '',
Splitting: 'o2'},
)
assertion_2D_withPython(scal, velo, advec, advec_py) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_1_2_2D_cube_splits(self):\n check = [(0, 0), (1, 1), (1, 0), (0, 1), (0.5, 0.5), (0.0, 0.5),\n (0.5, 0.0),\n (0.25, 0.25), (1.0, 0.5), (0.5, 1.0), (0.75, 0.75),\n (0.75, 0.25),\n (0.25, 0.75), (0.5, 0.25), (0.25, 0.5), (0.375, 0.375),\n (0.0, 0.25),\n (0.25, 0.0), (0.125, 0.125), (0.125, 0.375), (0.375, 0.125),\n (0.5, 0.75), (0.75, 0.5), (0.625, 0.625), (1.0, 0.75),\n (0.75, 1.0),\n (0.875, 0.875), (0.875, 0.625), (0.625, 0.875), (0.625, 0.375),\n (1.0, 0.25), (0.75, 0.0), (0.875, 0.125), (0.875, 0.375),\n (0.625, 0.125), (0.375, 0.625), (0.0, 0.75), (0.25, 1.0),\n (0.125, 0.875), (0.125, 0.625), (0.375, 0.875)]\n\n nn_checks = {(0, 0): [(0.25, 0.0), (0.0, 0.25), (0.125, 0.125)],\n (0.625, 0.375): [(0.5, 0.5), (0.75, 0.25), (0.75, 0.5),\n (0.5, 0.25)],\n (0, 1): [(0.25, 1.0), (0.125, 0.875),(0.0, 0.75)],\n (0.625, 0.125): [(0.5, 0.0), (0.75, 0.25), (0.75, 0.0),\n (0.5, 0.25)]}\n\n\n init_triangulation(2, 2, check, nn_checks)",
"def test_2D_m6_1k():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2'},\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_2D_m6_2k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_multiple_case(self):\r\n\r\n shp = (3, 3)\r\n fx, fy, fz, fw = fmatrices('xyzw')\r\n dx, dy, dz, dw = dmatrices('xyzw')\r\n fv = fvector('r').dimshuffle('x', 0)\r\n dv = dvector('s').dimshuffle('x', 0)\r\n fxv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fyv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fzv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fwv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fvv = theano._asarray(numpy.random.rand(shp[0]), dtype='float32').reshape(1, shp[0])\r\n dxv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dyv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dzv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dwv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dvv = theano._asarray(numpy.random.rand(shp[0]), dtype='float64').reshape(1, shp[0])\r\n\r\n #We must be sure that the Canonizer is working, but that we don't have other\r\n # optimisation that could hide bug in the Canonizer as local_elemwise_fusion\r\n mode = compile.mode.get_default_mode()\r\n old_optimizer = mode._optimizer\r\n try:\r\n mode._optimizer = gof.Query([\"canonicalize\"])\r\n mode._optimizer = mode._optimizer.including('ShapeOpt')\r\n mode._optimizer = mode._optimizer.excluding(\r\n 'local_elemwise_fusion')\r\n\r\n #test x / x -> 1\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([(fx/fx,[fx],[fxv],'float32'),\r\n (dx/dx,[dx],[dxv],'float64'),\r\n (fv/fv,[fv],[fvv],'float32'),\r\n (dv/dv,[dv],[dvv],'float64'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert (out == numpy.ones(shp, dtype=out_dtype)).all()\r\n topo = f.maker.fgraph.toposort()\r\n if sym_inputs[0].broadcastable[0]:\r\n assert len(topo) == 2\r\n assert isinstance(topo[0].op, Shape_i)\r\n assert isinstance(topo[1].op, tensor.Alloc)\r\n else:\r\n assert len(topo) == 3\r\n assert isinstance(topo[0].op, Shape_i)\r\n assert isinstance(topo[1].op, Shape_i)\r\n assert isinstance(topo[2].op, tensor.Alloc)\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (x * y) / x -> y\r\n for id,(g, sym_inputs, val_inputs, nb_elemwise, out_dtype) in enumerate([\r\n ((dx*dy)/dx,[dx,dy],[dxv,dyv],0,'float64'),\r\n ((fx*fy)/fx,[fx,fy],[fxv,fyv],0,'float32'),\r\n ((dv*dy)/dv,[dv,dy],[dvv,dyv],0,'float64'),\r\n ((fv*fy)/fv,[fv,fy],[fvv,fyv],0,'float32'),\r\n #must broadcast as their is a dimshuffle in the computation\r\n ((dx*dv)/dx,[dx,dv],[dxv,dvv],1,'float64'),\r\n #topo: [Elemwise{second,no_inplace}(x, <TensorType(float64, row)>)]\r\n ((fx*fv)/fx,[fx,fv],[fxv,fvv],1,'float32')\r\n #topo: [Elemwise{second,no_inplace}(x, <TensorType(float32, row)>)]\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert(out_dtype == out.dtype)\r\n assert numpy.allclose(out, val_inputs[1])\r\n topo = f.maker.fgraph.toposort()\r\n print \"ID TOPO\", id, topo, sym_inputs\r\n for r, t in f.maker.fgraph.shape_feature.shape_of.items():\r\n print ' ', r, t\r\n if topo and not(len(topo)==1 and topo[0].op==deep_copy_op):\r\n for node in topo[:-1]:\r\n assert isinstance(node.op, Shape_i)\r\n assert isinstance(topo[-1].op, tensor.Alloc)\r\n\r\n #test x / y / x -> 1 / y\r\n for id,(g, sym_inputs, val_inputs, nb_elemwise, out_dtype) in enumerate([\r\n ((dx/dy)/dx,[dx,dy],[dxv,dyv],1,'float64'),\r\n ((fx/fy)/fx,[fx,fy],[fxv,fyv],1,'float32'),\r\n ((dv/dy)/dv,[dv,dy],[dvv,dyv],1,'float64'),\r\n ((fv/fy)/fv,[fv,fy],[fvv,fyv],1,'float32'),\r\n #must broadcast as their is a dimshuffle in the computation\r\n\r\n ((dx/dv)/dx,[dx,dv],[dxv,dvv],1,'float64'),\r\n #topo: [Shape_i, Shape_i, Elemwise{inv,no_inplace}(<TensorType(float64, row)>), Alloc]\r\n ((fx/fv)/fx,[fx,fv],[fxv,fvv],1,'float32'),\r\n #topo:[Shape_i, Shape_i, Elemwise{inv,no_inplace}(<TensorType(float32, row)>), Alloc]\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (1 / val_inputs[1]))\r\n topo = f.maker.fgraph.toposort()\r\n print topo\r\n elem = [t for t in topo if isinstance(t.op, T.Elemwise)]\r\n assert len(elem) == nb_elemwise\r\n assert isinstance(elem[0].op, (T.Elemwise, ))\r\n assert isinstance(elem[0].op.scalar_op, (\r\n theano.scalar.basic.Inv, theano.scalar.basic.TrueDiv))\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (a / b) * (b / c) * (c / d) -> a / d\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((dx / dy) * (dy / dz) * (dz / dw),[dx,dy,dz,dw],[dxv,dyv,dzv,dwv],'float64'),\r\n ((fx / fy) * (fy / fz) * (fz / fw),[fx,fy,fz,fw],[fxv,fyv,fzv,fwv],'float32'),\r\n ((dv / dy) * (dy / dz) * (dz / dw),[dv,dy,dz,dw],[dvv,dyv,dzv,dwv],'float64'),\r\n ((fv / fy) * (fy / fz) * (fz / fw),[fv,fy,fz,fw],[fvv,fyv,fzv,fwv],'float32'),\r\n ((dx / dv) * (dv / dz) * (dz / dw),[dx,dv,dz,dw],[dxv,dvv,dzv,dwv],'float64'),\r\n ((fx / fv) * (fv / fz) * (fz / fw),[fx,fv,fz,fw],[fxv,fvv,fzv,fwv],'float32'),\r\n ((dx / dy) * (dy / dv) * (dv / dw),[dx,dy,dv,dw],[dxv,dyv,dvv,dwv],'float64'),\r\n ((fx / fy) * (fy / fv) * (fv / fw),[fx,fy,fv,fw],[fxv,fyv,fvv,fwv],'float32'),\r\n ((dx / dy) * (dy / dz) * (dz / dv),[dx,dy,dz,dv],[dxv,dyv,dzv,dvv],'float64'),\r\n ((fx / fy) * (fy / fz) * (fz / fv),[fx,fy,fz,fv],[fxv,fyv,fzv,fvv],'float32'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (val_inputs[0] / val_inputs[3]))\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 1\r\n assert isinstance(topo[0].op, (T.Elemwise, ))\r\n assert isinstance(topo[0].op.scalar_op,\r\n theano.scalar.basic.TrueDiv)\r\n assert len(topo[0].inputs) == 2\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (2.0 * x) / (4.0 * y) -> (0.5 * x) / y\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n (((2.0*dx)/(4.0*dy)),[dx,dy],[dxv,dyv],'float64'),\r\n (((2.0*fx)/(4.0*fy)),[fx,fy],[fxv,fyv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n (((2.0*dv)/(4.0*dy)),[dv,dy],[dvv,dyv],'float64'),\r\n (((2.0*fv)/(4.0*fy)),[fv,fy],[fvv,fyv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n (((2.0*dx)/(4.0*dv)),[dx,dv],[dxv,dvv],'float64'),\r\n (((2.0*fx)/(4.0*fv)),[fx,fv],[fxv,fvv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (0.5 *\r\n val_inputs[0] / val_inputs[1]))\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 2\r\n assert isinstance(topo[0].op, (T.Elemwise, ))\r\n assert isinstance(topo[0].op.scalar_op,\r\n theano.scalar.basic.Mul)\r\n assert len(topo[0].inputs) == 2\r\n assert isinstance(topo[1].op, (T.Elemwise, ))\r\n assert isinstance(topo[1].op.scalar_op,\r\n theano.scalar.basic.TrueDiv)\r\n assert len(topo[1].inputs) == 2\r\n assert(out_dtype == out.dtype)\r\n\r\n #test 2 * x / 2 -> x\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((2*dx)/2,[dx],[dxv],'float64'),\r\n ((2*fx)/2,[fx],[fxv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2*dv)/2,[dv],[dvv],'float64'),\r\n ((2*fv)/2,[fv],[fvv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, val_inputs[0])\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 1\r\n topo[0].op == deep_copy_op\r\n assert(out_dtype == out.dtype)\r\n\r\n #test x / abs(x) -> sign(x)\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n (dx/abs(dx),[dx],[0.5-dxv],'float64'),\r\n (fx/abs(fx),[fx],[0.5-fxv], 'float32'),\r\n (dx/abs(dx),[dx],[0.1*dxv],'float64'),\r\n (fx/abs(fx),[fx],[0.1*fxv], 'float32'),\r\n (dv/abs(dv),[dv],[0.5-dvv],'float64'),\r\n (fv/abs(fv),[fv],[0.5-fvv], 'float32'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.all(numpy.isfinite(out))\r\n assert numpy.allclose(out, numpy.sign(val_inputs[0]))\r\n assert(out_dtype == out.dtype)\r\n assert len(f.maker.fgraph.toposort()) == 1\r\n\r\n #test (2*x) / (3*abs(x)) -> sign(x)\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((2 * dx) / (3 * abs(dx)), [dx], [0.5 - dxv], 'float64'),\r\n ((2 * fx) / (3 * abs(fx)), [fx], [0.5 - fxv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2 * dx) / (3 * abs(dx)), [dx], [0.1 * dxv], 'float64'),\r\n ((2 * fx) / (3 * abs(fx)), [fx], [0.1 * fxv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2 * dv) / (3 * abs(dv)), [dv], [0.5 - dvv], 'float64'),\r\n ((2 * fv) / (3 * abs(fv)), [fv], [0.5 - fvv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n topo = f.maker.fgraph.toposort()\r\n out = f(*val_inputs)\r\n assert numpy.all(numpy.isfinite(out))\r\n assert numpy.allclose(out, numpy.sign(val_inputs[0]) * 2 / 3)\r\n assert(out_dtype == out.dtype)\r\n finally:\r\n mode._optimizer = old_optimizer",
"def test_2D_m6_1k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_linear_2d_merwe_column():\n\n\n def fx(x, dt):\n F = np.array([[1, dt, 0, 0],\n [0, 1, 0, 0],\n [0, 0, 1, dt],\n [0, 0, 0, 1]], dtype=float)\n\n return np.dot(F, x)\n\n def hx(x):\n return np.array([x[0], x[2]])\n\n\n dt = 0.1\n points = MerweScaledSigmaPoints2(4, .1, 2., -1)\n kf = UKF2(dim_x=4, dim_z=2, dt=dt, fx=fx, hx=hx, points=points)\n\n\n kf.x = np.array([[-1., 1., -1., 1]]).T\n kf.P*=0.0001\n #kf.R *=0\n #kf.Q\n\n zs = []\n for i in range(20):\n z = np.array([[i+randn()*0.1],\n [i+randn()*0.1]])\n zs.append(z)\n\n Ms, Ps = kf.batch_filter(zs)\n smooth_x, _, _ = kf.rts_smoother(Ms, Ps, dt=dt)\n\n if DO_PLOT:\n plt.figure()\n zs = np.asarray(zs)\n plt.plot(zs[:,0], marker='+', c='b')\n plt.plot(Ms[:,0], c='b')\n plt.plot(smooth_x[:,0], smooth_x[:,2], c='r')\n print(smooth_x)",
"def test_3_2_4D_cube_splits(self):\n check = [(0, 0, 0, 0), (1, 1, 1, 1), (1, 0, 0, 0), (1, 1, 0, 0),\n (1, 1, 1, 0),\n (1, 1, 0, 1), (1, 0, 1, 0), (1, 0, 1, 1), (1, 0, 0, 1),\n (0, 1, 0, 0),\n (0, 1, 1, 0), (0, 1, 1, 1), (0, 1, 0, 1), (0, 0, 1, 0),\n (0, 0, 1, 1),\n (0, 0, 0, 1), (0.5, 0.5, 0.5, 0.5), (0.0, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.5, 0.5), (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.5), (0.0, 0.5, 0.0, 0.0),\n (0.0, 0.5, 0.5, 0.0),\n (0.5, 0.0, 0.5, 0.5), (0.5, 0.0, 0.0, 0.5),\n (0.5, 0.0, 0.0, 0.0),\n (0.5, 0.0, 0.5, 0.0), (0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.0, 0.0),\n (0.5, 0.5, 0.5, 0.0), (0.25, 0.25, 0.25, 0.25),\n (1.0, 0.5, 0.5, 0.5),\n (1.0, 1.0, 0.5, 0.5), (1.0, 1.0, 1.0, 0.5),\n (1.0, 1.0, 0.5, 1.0),\n (1.0, 0.5, 1.0, 0.5), (1.0, 0.5, 1.0, 1.0),\n (1.0, 0.5, 0.5, 1.0),\n (0.5, 1.0, 0.5, 0.5), (0.5, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 1.0),\n (0.5, 1.0, 0.5, 1.0), (0.5, 0.5, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0),\n (0.5, 0.5, 0.5, 1.0), (0.75, 0.75, 0.75, 0.75),\n (1.0, 0.0, 0.5, 0.5),\n (1.0, 0.0, 0.0, 0.5), (1.0, 0.0, 0.5, 0.0),\n (1.0, 0.5, 0.0, 0.5),\n (1.0, 0.5, 0.0, 0.0), (1.0, 0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25, 0.25),\n (1.0, 1.0, 0.0, 0.5), (1.0, 1.0, 0.5, 0.0),\n (0.5, 1.0, 0.0, 0.5),\n (0.5, 1.0, 0.0, 0.0), (0.5, 1.0, 0.5, 0.0),\n (0.75, 0.75, 0.25, 0.25),\n (1.0, 0.5, 1.0, 0.0), (0.5, 1.0, 1.0, 0.0),\n (0.5, 0.5, 1.0, 0.0),\n (0.75, 0.75, 0.75, 0.25), (1.0, 0.5, 0.0, 1.0),\n (0.5, 1.0, 0.0, 1.0),\n (0.5, 0.5, 0.0, 1.0), (0.75, 0.75, 0.25, 0.75),\n (1.0, 0.0, 1.0, 0.5),\n (0.5, 0.0, 1.0, 0.5), (0.5, 0.0, 1.0, 0.0),\n (0.75, 0.25, 0.75, 0.25),\n (1.0, 0.0, 0.5, 1.0), (0.5, 0.0, 1.0, 1.0),\n (0.5, 0.0, 0.5, 1.0),\n (0.75, 0.25, 0.75, 0.75), (0.5, 0.0, 0.0, 1.0),\n (0.75, 0.25, 0.25, 0.75), (0.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 0.0, 0.5),\n (0.0, 1.0, 0.5, 0.0), (0.25, 0.75, 0.25, 0.25),\n (0.0, 1.0, 1.0, 0.5),\n (0.0, 0.5, 1.0, 0.5), (0.0, 0.5, 1.0, 0.0),\n (0.25, 0.75, 0.75, 0.25),\n (0.0, 1.0, 0.5, 1.0), (0.0, 0.5, 1.0, 1.0),\n (0.0, 0.5, 0.5, 1.0),\n (0.25, 0.75, 0.75, 0.75), (0.0, 0.5, 0.0, 1.0),\n (0.25, 0.75, 0.25, 0.75), (0.0, 0.0, 1.0, 0.5),\n (0.25, 0.25, 0.75, 0.25), (0.0, 0.0, 0.5, 1.0),\n (0.25, 0.25, 0.75, 0.75), (0.25, 0.25, 0.25, 0.75)]\n\n nn_checks = {(0, 0, 0, 0): [(0.0, 0.5, 0.5, 0.0), (0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.5),\n (0.25, 0.25, 0.25, 0.25),\n (0.5, 0.0, 0.0, 0.5), (0.5, 0.0, 0.5, 0.0),\n (0.5, 0.5, 0.0, 0.0), (0.0, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.0), (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.5, 0.5, 0.5), (0.0, 0.0, 0.5, 0.5),\n (0.0, 0.5, 0.0, 0.5), (0.5, 0.0, 0.0, 0.0),\n (0.5, 0.5, 0.5, 0.0)],\n (1.0, 1.0, 0.5, 0.5): [(1.0, 1.0, 0.5, 1.0), (1, 1, 0, 1),\n (1.0, 1.0, 1.0, 0.5),\n (1.0, 0.5, 0.5, 0.5), (1, 1, 1, 0),\n (1.0, 1.0, 0.5, 0.0),\n (1.0, 1.0, 0.0, 0.5), (1, 1, 0, 0),\n (1, 1, 1, 1), (0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5),\n (0.75, 0.75, 0.75, 0.75),\n (0.75, 0.75, 0.25, 0.25),\n (0.75, 0.75, 0.75, 0.25),\n (0.75, 0.75, 0.25, 0.75)],\n (0.25, 0.25, 0.25, 0.75): [(0.0, 0.0, 0.5, 1.0),\n (0.0, 0.5, 0.0, 1.0),\n (0.5, 0.0, 0.0, 1.0),\n (0.5, 0.5, 0.5, 1.0),\n (0, 0, 0, 1),\n (0.0, 0.5, 0.5, 1.0),\n (0.5, 0.0, 0.5, 1.0),\n (0.5, 0.5, 0.0, 1.0),\n (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.0, 0.5),\n (0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.5, 0.5),\n (0.5, 0.0, 0.0, 0.5)]}\n\n init_triangulation(4, 1, check, nn_checks)",
"def test_linear_2d_merwe():\n\n\n def fx(x, dt):\n F = np.array([[1, dt, 0, 0],\n [0, 1, 0, 0],\n [0, 0, 1, dt],\n [0, 0, 0, 1]], dtype=float)\n\n return np.dot(F, x)\n\n def hx(x):\n return np.array([x[0], x[2]])\n\n\n dt = 0.1\n points = MerweScaledSigmaPoints(4, .1, 2., -1)\n kf = UKF(dim_x=4, dim_z=2, dt=dt, fx=fx, hx=hx, points=points)\n\n\n kf.x = np.array([-1., 1., -1., 1])\n kf.P*=0.0001\n #kf.R *=0\n #kf.Q\n\n zs = []\n for i in range(20):\n z = np.array([i+randn()*0.1, i+randn()*0.1])\n zs.append(z)\n\n Ms, Ps = kf.batch_filter(zs)\n smooth_x, _, _ = kf.rts_smoother(Ms, Ps, dt=dt)\n\n if DO_PLOT:\n plt.figure()\n zs = np.asarray(zs)\n plt.plot(zs[:,0], marker='+')\n plt.plot(Ms[:,0], c='b')\n plt.plot(smooth_x[:,0], smooth_x[:,2], c='r')\n print(smooth_x)",
"def test_2_2_3D_cube_splits(self):\n check = [(0, 0, 0), (1, 1, 1), (1, 0, 0), (1, 1, 0), (1, 0, 1),\n (0, 1, 0),\n (0, 1, 1), (0, 0, 1), (0.5, 0.5, 0.5), (0.0, 0.5, 0.5),\n (0.0, 0.0, 0.5), (0.0, 0.5, 0.0), (0.5, 0.0, 0.5),\n (0.5, 0.0, 0.0),\n (0.5, 0.5, 0.0), (0.25, 0.25, 0.25), (1.0, 0.5, 0.5),\n (1.0, 1.0, 0.5),\n (1.0, 0.5, 1.0), (0.5, 1.0, 0.5), (0.5, 1.0, 1.0),\n (0.5, 0.5, 1.0),\n (0.75, 0.75, 0.75), (1.0, 0.0, 0.5), (1.0, 0.5, 0.0),\n (0.75, 0.25, 0.25), (0.5, 1.0, 0.0), (0.75, 0.75, 0.25),\n (0.5, 0.0, 1.0), (0.75, 0.25, 0.75), (0.0, 1.0, 0.5),\n (0.25, 0.75, 0.25), (0.0, 0.5, 1.0), (0.25, 0.75, 0.75),\n (0.25, 0.25, 0.75), (0.5, 0.25, 0.25), (0.5, 0.5, 0.25),\n (0.5, 0.25, 0.5), (0.25, 0.5, 0.25), (0.25, 0.5, 0.5),\n (0.25, 0.25, 0.5), (0.375, 0.375, 0.375), (0.0, 0.25, 0.25),\n (0.0, 0.0, 0.25), (0.0, 0.25, 0.0), (0.25, 0.0, 0.25),\n (0.25, 0.0, 0.0), (0.25, 0.25, 0.0), (0.125, 0.125, 0.125),\n (0.0, 0.5, 0.25), (0.0, 0.25, 0.5), (0.125, 0.375, 0.375),\n (0.25, 0.0, 0.5), (0.125, 0.125, 0.375), (0.25, 0.5, 0.0),\n (0.125, 0.375, 0.125), (0.5, 0.0, 0.25), (0.375, 0.125, 0.375),\n (0.5, 0.25, 0.0), (0.375, 0.125, 0.125), (0.375, 0.375, 0.125),\n (0.5, 0.75, 0.75), (0.5, 0.5, 0.75), (0.5, 0.75, 0.5),\n (0.75, 0.5, 0.75), (0.75, 0.5, 0.5), (0.75, 0.75, 0.5),\n (0.625, 0.625, 0.625), (1.0, 0.75, 0.75), (1.0, 1.0, 0.75),\n (1.0, 0.75, 1.0), (0.75, 1.0, 0.75), (0.75, 1.0, 1.0),\n (0.75, 0.75, 1.0), (0.875, 0.875, 0.875), (1.0, 0.5, 0.75),\n (1.0, 0.75, 0.5), (0.875, 0.625, 0.625), (0.75, 1.0, 0.5),\n (0.875, 0.875, 0.625), (0.75, 0.5, 1.0), (0.875, 0.625, 0.875),\n (0.5, 1.0, 0.75), (0.625, 0.875, 0.625), (0.5, 0.75, 1.0),\n (0.625, 0.875, 0.875), (0.625, 0.625, 0.875),\n (0.75, 0.5, 0.25),\n (0.75, 0.25, 0.5), (0.625, 0.375, 0.375), (1.0, 0.25, 0.25),\n (1.0, 0.0, 0.25), (1.0, 0.25, 0.0), (0.75, 0.0, 0.25),\n (0.75, 0.0, 0.0), (0.75, 0.25, 0.0), (0.875, 0.125, 0.125),\n (1.0, 0.5, 0.25), (1.0, 0.25, 0.5), (0.875, 0.375, 0.375),\n (0.75, 0.0, 0.5), (0.875, 0.125, 0.375), (0.75, 0.5, 0.0),\n (0.875, 0.375, 0.125), (0.625, 0.125, 0.375),\n (0.625, 0.125, 0.125),\n (0.625, 0.375, 0.125), (0.5, 0.75, 0.25),\n (0.625, 0.625, 0.375),\n (1.0, 0.75, 0.25), (1.0, 1.0, 0.25), (1.0, 0.75, 0.0),\n (0.75, 1.0, 0.25), (0.75, 1.0, 0.0), (0.75, 0.75, 0.0),\n (0.875, 0.875, 0.125), (0.875, 0.625, 0.375),\n (0.875, 0.875, 0.375),\n (0.875, 0.625, 0.125), (0.5, 1.0, 0.25), (0.625, 0.875, 0.375),\n (0.5, 0.75, 0.0), (0.625, 0.875, 0.125), (0.625, 0.625, 0.125),\n (0.5, 0.25, 0.75), (0.625, 0.375, 0.625), (1.0, 0.25, 0.75),\n (1.0, 0.0, 0.75), (1.0, 0.25, 1.0), (0.75, 0.0, 0.75),\n (0.75, 0.0, 1.0), (0.75, 0.25, 1.0), (0.875, 0.125, 0.875),\n (0.875, 0.375, 0.625), (0.875, 0.125, 0.625),\n (0.875, 0.375, 0.875),\n (0.5, 0.0, 0.75), (0.625, 0.125, 0.625), (0.5, 0.25, 1.0),\n (0.625, 0.125, 0.875), (0.625, 0.375, 0.875),\n (0.25, 0.75, 0.5),\n (0.375, 0.625, 0.375), (0.0, 0.75, 0.25), (0.0, 1.0, 0.25),\n (0.0, 0.75, 0.0), (0.25, 1.0, 0.25), (0.25, 1.0, 0.0),\n (0.25, 0.75, 0.0), (0.125, 0.875, 0.125), (0.0, 0.75, 0.5),\n (0.125, 0.625, 0.375), (0.25, 1.0, 0.5), (0.125, 0.875, 0.375),\n (0.125, 0.625, 0.125), (0.375, 0.875, 0.375),\n (0.375, 0.875, 0.125),\n (0.375, 0.625, 0.125), (0.25, 0.5, 0.75),\n (0.375, 0.625, 0.625),\n (0.0, 0.75, 0.75), (0.0, 1.0, 0.75), (0.0, 0.75, 1.0),\n (0.25, 1.0, 0.75), (0.25, 1.0, 1.0), (0.25, 0.75, 1.0),\n (0.125, 0.875, 0.875), (0.0, 0.5, 0.75), (0.125, 0.625, 0.625),\n (0.125, 0.875, 0.625), (0.25, 0.5, 1.0), (0.125, 0.625, 0.875),\n (0.375, 0.875, 0.625), (0.375, 0.875, 0.875),\n (0.375, 0.625, 0.875),\n (0.375, 0.375, 0.625), (0.0, 0.25, 0.75), (0.0, 0.0, 0.75),\n (0.0, 0.25, 1.0), (0.25, 0.0, 0.75), (0.25, 0.0, 1.0),\n (0.25, 0.25, 1.0), (0.125, 0.125, 0.875),\n (0.125, 0.375, 0.625),\n (0.125, 0.125, 0.625), (0.125, 0.375, 0.875),\n (0.375, 0.125, 0.625),\n (0.375, 0.125, 0.875), (0.375, 0.375, 0.875)]\n\n nn_checks = {(0.5, 0.25, 0.25): [(0.375, 0.375, 0.125), (0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25),\n (0.625, 0.375, 0.375),\n (0.625, 0.125, 0.375),\n (0.625, 0.125, 0.125),\n (0.5, 0.5, 0.25), (0.25, 0.25, 0.25),\n (0.375, 0.375, 0.375),\n (0.5, 0.25, 0.5), (0.5, 0.5, 0.5),\n (0.5, 0.0, 0.25),\n (0.375, 0.125, 0.375), (0.5, 0.0, 0.5),\n (0.5, 0.25, 0.0),\n (0.375, 0.125, 0.125), (0.5, 0.0, 0.0),\n (0.625, 0.375, 0.125)],\n (0.625, 0.625, 0.875): [(0.75, 0.5, 1.0),\n (0.75, 0.75, 1.0),\n (0.5, 0.75, 1.0), (0.5, 0.5, 1.0),\n (0.5, 0.5, 0.75),\n (0.5, 0.75, 0.75),\n (0.75, 0.5, 0.75),\n (0.75, 0.75, 0.75)],\n (0, 0, 0): [(0.0, 0.25, 0.0), (0.125, 0.125, 0.125),\n (0.0, 0.0, 0.25), (0.25, 0.0, 0.0),\n (0.0, 0.25, 0.25), (0.25, 0.25, 0.0),\n (0.25, 0.0, 0.25)]}\n\n init_triangulation(3, 2, check, nn_checks)",
"def test_dmi_uses_unit_length_2dmesh():\n A = 8.78e-12 # J/m\n D = 1.58e-3 # J/m^2\n Ms = 3.84e5 # A/m\n\n energies = []\n\n # unit_lengths 1e-9 and 1 are common, let's throw in an intermediate length\n # just to challenge the system a little:\n for unit_length in (1, 1e-4, 1e-9):\n radius = 200e-9 / unit_length\n maxh = 5e-9 / unit_length\n helical_period = (4 * pi * A / D) / unit_length\n k = 2 * pi / helical_period\n # HF 27 April 2014: The next command fails in dolfin 1.3\n # mesh = df.CircleMesh(df.Point(0, 0), radius, maxh)\n # The actual shape of the domain shouldn't matter for the test,\n # so let's use a Rectangular mesh which should work the same:\n\n nx = ny = int(round(radius / maxh))\n mesh = df.RectangleMesh(df.Point(0, 0), df.Point(radius, radius), nx, ny)\n\n S3 = df.VectorFunctionSpace(mesh, \"CG\", 1, dim=3)\n m_expr = df.Expression((\"0\", \"cos(k * x[0])\", \"sin(k * x[0])\"), k=k, degree=1)\n m = Field(S3, m_expr, name='m')\n dmi = DMI(D)\n Ms_dg = Field(df.FunctionSpace(mesh, 'DG', 0), Ms)\n dmi.setup(m, Ms_dg, unit_length=unit_length)\n energies.append(dmi.compute_energy())\n\n H = df.Function(S3)\n H.vector()[:] = dmi.compute_field()\n print H(0.0, 0.0)\n\n print \"Using unit_length = {}.\".format(unit_length)\n print \"Helical period {}.\".format(helical_period)\n print \"Energy {}.\".format(dmi.compute_energy())\n\n rel_diff_energies = abs(energies[0] - energies[1]) / abs(energies[1])\n print \"Relative difference of energy {}.\".format(rel_diff_energies)\n assert rel_diff_energies < 1e-13\n\n rel_diff_energies2 = abs(energies[0] - energies[2]) / abs(energies[2])\n print \"Relative difference2 of energy {}.\".format(rel_diff_energies2)\n assert rel_diff_energies2 < 1e-13",
"def test_MeshMat_1group(self):\n\n MS_grp = self.meshsol.get_group(\"stator\")\n cells_grp, nb_cell, indices = MS_grp.get_mesh().get_cell()\n solution = np.array([[0, 1, 2], [1, 2, 3]])\n result_tgl = cells_grp[\"triangle\"]\n testA = np.sum(abs(solution - result_tgl))\n msg = (\n \"Wrong output: returned \" + str(result_tgl) + \", expected: \" + str(solution)\n )\n self.assertAlmostEqual(testA, 0, msg=msg, delta=self.DELTA)\n\n MS_grp = self.meshsol.get_group(\"rotor\")\n cells_grp, nb_cell, indices = MS_grp.get_mesh().get_cell()\n solution = np.array([[3, 3], [1, 2], [2, 3]])\n results = cells_grp[\"triangle\"] # The point indices have changed !\n points = MS_grp.get_mesh().get_point(results)\n testA = np.sum(abs(solution - points))\n msg = \"Wrong output: returned \" + str(results) + \", expected: \" + str(solution)\n self.assertAlmostEqual(testA, 0, msg=msg, delta=self.DELTA)",
"def init():\n\tN = np.int32(DIM) #prepare for stitching\n\t#HII_DIM = np.int32(HII_DIM)\n\tf_pixel_factor = DIM/HII_DIM;\n\tscale = np.float32(BOX_LEN)/DIM\n\tHII_scale = np.float32(BOX_LEN)/HII_DIM\n\tshape = (N,N,N)\n\t\n\tMRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=0)\n\n\tkernel_source = open(cmd_folder+\"/initialize.cu\").read()\n\tkernel_code = kernel_source % {\n\n\t\t'DELTAK': DELTA_K,\n\t\t'VOLUME': VOLUME,\n\t\t'DIM': DIM\n\t}\n\tmain_module = nvcc.SourceModule(kernel_code)\n\tinit_kernel = main_module.get_function(\"init_kernel\")\n\tHII_filter = main_module.get_function(\"HII_filter\")\n\tadj_complex_conj = main_module.get_function(\"adj_complex_conj\")\n\tsubsample_kernel = main_module.get_function(\"subsample\")\n\tvelocity_kernel = main_module.get_function(\"set_velocity\")\n\tpspec_texture = main_module.get_texref(\"pspec\")\n\n\tinterpPspec, interpSize = init_pspec() #interpPspec contains both k array and P array\n\tinterp_cu = cuda.matrix_to_array(interpPspec, order='F')\n\tcuda.bind_array_to_texref(interp_cu, pspec_texture)\n\n\tlargebox_d = gpuarray.zeros(shape, dtype=np.float32)\n\tinit_kernel(largebox_d, np.int32(DIM), block=block_size, grid=grid_size)\n\n\t#import IPython; IPython.embed()\n\tlargebox_d_imag = gpuarray.zeros(shape, dtype=np.float32)\n\tinit_kernel(largebox_d_imag, np.int32(DIM), block=block_size, grid=grid_size)\n\n\tlargebox_d *= MRGgen.gen_normal(shape, dtype=np.float32)\n\tlargebox_d_imag *= MRGgen.gen_normal(shape, dtype=np.float32)\n\tlargebox_d = largebox_d + np.complex64(1.j) * largebox_d_imag\n\n\t#adj_complex_conj(largebox_d, DIM, block=block_size, grid=grid_size)\n\tlargebox = largebox_d.get()\n\t#np.save(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc\".format(DIM, BOX_LEN), largebox)\n\n\t#save real space box before smoothing\n\tplan = Plan(shape, dtype=np.complex64)\n\tplan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\tlargebox_d /= scale**3\n\tnp.save(parent_folder+\"/Boxes/deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(DIM, BOX_LEN), largebox_d.real.get_async())\n\n\t#save real space box after smoothing and subsampling\n\t# host largebox is still in k space, no need to reload from disk\n\tlargebox_d = gpuarray.to_gpu(largebox)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tHII_filter(largebox_d, N, ZERO, smoothR, block=block_size, grid=grid_size);\n\tplan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\tlargebox_d /= scale**3\n\tsmallbox_d = gpuarray.zeros(HII_shape, dtype=np.float32)\n\tsubsample_kernel(largebox_d.real, smallbox_d, N, HII_DIM, PIXEL_FACTOR, block=block_size, grid=HII_grid_size) #subsample in real space\n\tnp.save(parent_folder+\"/Boxes/smoothed_deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(HII_DIM, BOX_LEN), smallbox_d.get_async())\n\n\t# reload the k-space box for velocity boxes\n\tlargebox_d = gpuarray.to_gpu(largebox)\n\t\n\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tlargevbox_d = gpuarray.zeros((DIM,DIM,DIM), dtype=np.complex64)\n\tsmallbox_d = gpuarray.zeros(HII_shape, dtype=np.float32)\n\tfor num, mode in enumerate(['x', 'y', 'z']):\n\t\tvelocity_kernel(largebox_d, largevbox_d, DIM, np.int32(num), block=block_size, grid=grid_size)\n\t\tHII_filter(largevbox_d, DIM, ZERO, smoothR, block=block_size, grid=grid_size)\n\t\tplan.execute(largevbox_d, inverse=True)\n\t\tlargevbox_d /= scale**3\n\t\t#import IPython; IPython.embed()\n\t\tsubsample_kernel(largevbox_d.real, smallbox_d, DIM, HII_DIM,PIXEL_FACTOR, block=block_size, grid=HII_grid_size)\n\t\tnp.save(parent_folder+\"/Boxes/v{0}overddot_{1:d}_{2:.0f}Mpc\".format(mode, HII_DIM, BOX_LEN), smallbox_d.get())\n\n\treturn",
"def test_4_2_5D_cube_splits(self):\n check = [(0, 0, 0, 0, 0), (1, 1, 1, 1, 1), (1, 0, 0, 0, 0),\n (1, 1, 0, 0, 0), (1, 1, 1, 0, 0), (1, 1, 1, 1, 0),\n (1, 1, 1, 0, 1), (1, 1, 0, 1, 0), (1, 1, 0, 1, 1),\n (1, 1, 0, 0, 1), (1, 0, 1, 0, 0), (1, 0, 1, 1, 0),\n (1, 0, 1, 1, 1), (1, 0, 1, 0, 1), (1, 0, 0, 1, 0),\n (1, 0, 0, 1, 1), (1, 0, 0, 0, 1), (0, 1, 0, 0, 0),\n (0, 1, 1, 0, 0), (0, 1, 1, 1, 0), (0, 1, 1, 1, 1),\n (0, 1, 1, 0, 1), (0, 1, 0, 1, 0), (0, 1, 0, 1, 1),\n (0, 1, 0, 0, 1), (0, 0, 1, 0, 0), (0, 0, 1, 1, 0),\n (0, 0, 1, 1, 1), (0, 0, 1, 0, 1), (0, 0, 0, 1, 0),\n (0, 0, 0, 1, 1), (0, 0, 0, 0, 1), (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5, 0.5), (0.0, 0.0, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.0, 0.5, 0.5), (0.0, 0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.0, 0.5, 0.0), (0.0, 0.0, 0.5, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.0, 0.0), (0.0, 0.0, 0.5, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.5, 0.5), (0.0, 0.5, 0.0, 0.0, 0.5),\n (0.0, 0.5, 0.0, 0.0, 0.0), (0.0, 0.5, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.5, 0.0, 0.5), (0.0, 0.5, 0.5, 0.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.0), (0.5, 0.0, 0.5, 0.5, 0.5),\n (0.5, 0.0, 0.0, 0.5, 0.5), (0.5, 0.0, 0.0, 0.0, 0.5),\n (0.5, 0.0, 0.0, 0.0, 0.0), (0.5, 0.0, 0.0, 0.5, 0.0),\n (0.5, 0.0, 0.5, 0.0, 0.5), (0.5, 0.0, 0.5, 0.0, 0.0),\n (0.5, 0.0, 0.5, 0.5, 0.0), (0.5, 0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.0, 0.5), (0.5, 0.5, 0.0, 0.0, 0.0),\n (0.5, 0.5, 0.0, 0.5, 0.0), (0.5, 0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.5, 0.0, 0.0), (0.5, 0.5, 0.5, 0.5, 0.0),\n (0.25, 0.25, 0.25, 0.25, 0.25), (1.0, 0.5, 0.5, 0.5, 0.5),\n (1.0, 1.0, 0.5, 0.5, 0.5), (1.0, 1.0, 1.0, 0.5, 0.5),\n (1.0, 1.0, 1.0, 1.0, 0.5), (1.0, 1.0, 1.0, 0.5, 1.0),\n (1.0, 1.0, 0.5, 1.0, 0.5), (1.0, 1.0, 0.5, 1.0, 1.0),\n (1.0, 1.0, 0.5, 0.5, 1.0), (1.0, 0.5, 1.0, 0.5, 0.5),\n (1.0, 0.5, 1.0, 1.0, 0.5), (1.0, 0.5, 1.0, 1.0, 1.0),\n (1.0, 0.5, 1.0, 0.5, 1.0), (1.0, 0.5, 0.5, 1.0, 0.5),\n (1.0, 0.5, 0.5, 1.0, 1.0), (1.0, 0.5, 0.5, 0.5, 1.0),\n (0.5, 1.0, 0.5, 0.5, 0.5), (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5), (0.5, 1.0, 1.0, 1.0, 1.0),\n (0.5, 1.0, 1.0, 0.5, 1.0), (0.5, 1.0, 0.5, 1.0, 0.5),\n (0.5, 1.0, 0.5, 1.0, 1.0), (0.5, 1.0, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 0.5), (0.5, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 1.0), (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.5, 0.5, 1.0, 0.5), (0.5, 0.5, 0.5, 1.0, 1.0),\n (0.5, 0.5, 0.5, 0.5, 1.0), (0.75, 0.75, 0.75, 0.75, 0.75),\n (1.0, 0.0, 0.5, 0.5, 0.5), (1.0, 0.0, 0.0, 0.5, 0.5),\n (1.0, 0.0, 0.0, 0.0, 0.5), (1.0, 0.0, 0.0, 0.5, 0.0),\n (1.0, 0.0, 0.5, 0.0, 0.5), (1.0, 0.0, 0.5, 0.0, 0.0),\n (1.0, 0.0, 0.5, 0.5, 0.0), (1.0, 0.5, 0.0, 0.5, 0.5),\n (1.0, 0.5, 0.0, 0.0, 0.5), (1.0, 0.5, 0.0, 0.0, 0.0),\n (1.0, 0.5, 0.0, 0.5, 0.0), (1.0, 0.5, 0.5, 0.0, 0.5),\n (1.0, 0.5, 0.5, 0.0, 0.0), (1.0, 0.5, 0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25, 0.25, 0.25), (1.0, 1.0, 0.0, 0.5, 0.5),\n (1.0, 1.0, 0.0, 0.0, 0.5), (1.0, 1.0, 0.0, 0.5, 0.0),\n (1.0, 1.0, 0.5, 0.0, 0.5), (1.0, 1.0, 0.5, 0.0, 0.0),\n (1.0, 1.0, 0.5, 0.5, 0.0), (0.5, 1.0, 0.0, 0.5, 0.5),\n (0.5, 1.0, 0.0, 0.0, 0.5), (0.5, 1.0, 0.0, 0.0, 0.0),\n (0.5, 1.0, 0.0, 0.5, 0.0), (0.5, 1.0, 0.5, 0.0, 0.5),\n (0.5, 1.0, 0.5, 0.0, 0.0), (0.5, 1.0, 0.5, 0.5, 0.0),\n (0.75, 0.75, 0.25, 0.25, 0.25), (1.0, 1.0, 1.0, 0.0, 0.5),\n (1.0, 1.0, 1.0, 0.5, 0.0), (1.0, 0.5, 1.0, 0.0, 0.5),\n (1.0, 0.5, 1.0, 0.0, 0.0), (1.0, 0.5, 1.0, 0.5, 0.0),\n (0.5, 1.0, 1.0, 0.0, 0.5), (0.5, 1.0, 1.0, 0.0, 0.0),\n (0.5, 1.0, 1.0, 0.5, 0.0), (0.5, 0.5, 1.0, 0.0, 0.5),\n (0.5, 0.5, 1.0, 0.0, 0.0), (0.5, 0.5, 1.0, 0.5, 0.0),\n (0.75, 0.75, 0.75, 0.25, 0.25), (1.0, 1.0, 0.5, 1.0, 0.0),\n (1.0, 0.5, 1.0, 1.0, 0.0), (1.0, 0.5, 0.5, 1.0, 0.0),\n (0.5, 1.0, 1.0, 1.0, 0.0), (0.5, 1.0, 0.5, 1.0, 0.0),\n (0.5, 0.5, 1.0, 1.0, 0.0), (0.5, 0.5, 0.5, 1.0, 0.0),\n (0.75, 0.75, 0.75, 0.75, 0.25), (1.0, 1.0, 0.5, 0.0, 1.0),\n (1.0, 0.5, 1.0, 0.0, 1.0), (1.0, 0.5, 0.5, 0.0, 1.0),\n (0.5, 1.0, 1.0, 0.0, 1.0), (0.5, 1.0, 0.5, 0.0, 1.0),\n (0.5, 0.5, 1.0, 0.0, 1.0), (0.5, 0.5, 0.5, 0.0, 1.0),\n (0.75, 0.75, 0.75, 0.25, 0.75), (1.0, 1.0, 0.0, 1.0, 0.5),\n (1.0, 0.5, 0.0, 1.0, 0.5), (1.0, 0.5, 0.0, 1.0, 0.0),\n (0.5, 1.0, 0.0, 1.0, 0.5), (0.5, 1.0, 0.0, 1.0, 0.0),\n (0.5, 0.5, 0.0, 1.0, 0.5), (0.5, 0.5, 0.0, 1.0, 0.0),\n (0.75, 0.75, 0.25, 0.75, 0.25), (1.0, 1.0, 0.0, 0.5, 1.0),\n (1.0, 0.5, 0.0, 1.0, 1.0), (1.0, 0.5, 0.0, 0.5, 1.0),\n (0.5, 1.0, 0.0, 1.0, 1.0), (0.5, 1.0, 0.0, 0.5, 1.0),\n (0.5, 0.5, 0.0, 1.0, 1.0), (0.5, 0.5, 0.0, 0.5, 1.0),\n (0.75, 0.75, 0.25, 0.75, 0.75), (1.0, 0.5, 0.0, 0.0, 1.0),\n (0.5, 1.0, 0.0, 0.0, 1.0), (0.5, 0.5, 0.0, 0.0, 1.0),\n (0.75, 0.75, 0.25, 0.25, 0.75), (1.0, 0.0, 1.0, 0.5, 0.5),\n (1.0, 0.0, 1.0, 0.0, 0.5), (1.0, 0.0, 1.0, 0.5, 0.0),\n (0.5, 0.0, 1.0, 0.5, 0.5), (0.5, 0.0, 1.0, 0.0, 0.5),\n (0.5, 0.0, 1.0, 0.0, 0.0), (0.5, 0.0, 1.0, 0.5, 0.0),\n (0.75, 0.25, 0.75, 0.25, 0.25), (1.0, 0.0, 1.0, 1.0, 0.5),\n (1.0, 0.0, 0.5, 1.0, 0.5), (1.0, 0.0, 0.5, 1.0, 0.0),\n (0.5, 0.0, 1.0, 1.0, 0.5), (0.5, 0.0, 1.0, 1.0, 0.0),\n (0.5, 0.0, 0.5, 1.0, 0.5), (0.5, 0.0, 0.5, 1.0, 0.0),\n (0.75, 0.25, 0.75, 0.75, 0.25), (1.0, 0.0, 1.0, 0.5, 1.0),\n (1.0, 0.0, 0.5, 1.0, 1.0), (1.0, 0.0, 0.5, 0.5, 1.0),\n (0.5, 0.0, 1.0, 1.0, 1.0), (0.5, 0.0, 1.0, 0.5, 1.0),\n (0.5, 0.0, 0.5, 1.0, 1.0), (0.5, 0.0, 0.5, 0.5, 1.0),\n (0.75, 0.25, 0.75, 0.75, 0.75), (1.0, 0.0, 0.5, 0.0, 1.0),\n (0.5, 0.0, 1.0, 0.0, 1.0), (0.5, 0.0, 0.5, 0.0, 1.0),\n (0.75, 0.25, 0.75, 0.25, 0.75), (1.0, 0.0, 0.0, 1.0, 0.5),\n (0.5, 0.0, 0.0, 1.0, 0.5), (0.5, 0.0, 0.0, 1.0, 0.0),\n (0.75, 0.25, 0.25, 0.75, 0.25), (1.0, 0.0, 0.0, 0.5, 1.0),\n (0.5, 0.0, 0.0, 1.0, 1.0), (0.5, 0.0, 0.0, 0.5, 1.0),\n (0.75, 0.25, 0.25, 0.75, 0.75), (0.5, 0.0, 0.0, 0.0, 1.0),\n (0.75, 0.25, 0.25, 0.25, 0.75), (0.0, 1.0, 0.5, 0.5, 0.5),\n (0.0, 1.0, 0.0, 0.5, 0.5), (0.0, 1.0, 0.0, 0.0, 0.5),\n (0.0, 1.0, 0.0, 0.5, 0.0), (0.0, 1.0, 0.5, 0.0, 0.5),\n (0.0, 1.0, 0.5, 0.0, 0.0), (0.0, 1.0, 0.5, 0.5, 0.0),\n (0.25, 0.75, 0.25, 0.25, 0.25), (0.0, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.0, 0.5), (0.0, 1.0, 1.0, 0.5, 0.0),\n (0.0, 0.5, 1.0, 0.5, 0.5), (0.0, 0.5, 1.0, 0.0, 0.5),\n (0.0, 0.5, 1.0, 0.0, 0.0), (0.0, 0.5, 1.0, 0.5, 0.0),\n (0.25, 0.75, 0.75, 0.25, 0.25), (0.0, 1.0, 1.0, 1.0, 0.5),\n (0.0, 1.0, 0.5, 1.0, 0.5), (0.0, 1.0, 0.5, 1.0, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.5), (0.0, 0.5, 1.0, 1.0, 0.0),\n (0.0, 0.5, 0.5, 1.0, 0.5), (0.0, 0.5, 0.5, 1.0, 0.0),\n (0.25, 0.75, 0.75, 0.75, 0.25), (0.0, 1.0, 1.0, 0.5, 1.0),\n (0.0, 1.0, 0.5, 1.0, 1.0), (0.0, 1.0, 0.5, 0.5, 1.0),\n (0.0, 0.5, 1.0, 1.0, 1.0), (0.0, 0.5, 1.0, 0.5, 1.0),\n (0.0, 0.5, 0.5, 1.0, 1.0), (0.0, 0.5, 0.5, 0.5, 1.0),\n (0.25, 0.75, 0.75, 0.75, 0.75), (0.0, 1.0, 0.5, 0.0, 1.0),\n (0.0, 0.5, 1.0, 0.0, 1.0), (0.0, 0.5, 0.5, 0.0, 1.0),\n (0.25, 0.75, 0.75, 0.25, 0.75), (0.0, 1.0, 0.0, 1.0, 0.5),\n (0.0, 0.5, 0.0, 1.0, 0.5), (0.0, 0.5, 0.0, 1.0, 0.0),\n (0.25, 0.75, 0.25, 0.75, 0.25), (0.0, 1.0, 0.0, 0.5, 1.0),\n (0.0, 0.5, 0.0, 1.0, 1.0), (0.0, 0.5, 0.0, 0.5, 1.0),\n (0.25, 0.75, 0.25, 0.75, 0.75), (0.0, 0.5, 0.0, 0.0, 1.0),\n (0.25, 0.75, 0.25, 0.25, 0.75), (0.0, 0.0, 1.0, 0.5, 0.5),\n (0.0, 0.0, 1.0, 0.0, 0.5), (0.0, 0.0, 1.0, 0.5, 0.0),\n (0.25, 0.25, 0.75, 0.25, 0.25), (0.0, 0.0, 1.0, 1.0, 0.5),\n (0.0, 0.0, 0.5, 1.0, 0.5), (0.0, 0.0, 0.5, 1.0, 0.0),\n (0.25, 0.25, 0.75, 0.75, 0.25), (0.0, 0.0, 1.0, 0.5, 1.0),\n (0.0, 0.0, 0.5, 1.0, 1.0), (0.0, 0.0, 0.5, 0.5, 1.0),\n (0.25, 0.25, 0.75, 0.75, 0.75), (0.0, 0.0, 0.5, 0.0, 1.0),\n (0.25, 0.25, 0.75, 0.25, 0.75), (0.0, 0.0, 0.0, 1.0, 0.5),\n (0.25, 0.25, 0.25, 0.75, 0.25), (0.0, 0.0, 0.0, 0.5, 1.0),\n (0.25, 0.25, 0.25, 0.75, 0.75), (0.25, 0.25, 0.25, 0.25, 0.75)]\n\n nn_checks = {(1, 1, 1, 1, 1): [(1.0, 1.0, 1.0, 0.5, 1.0),\n (1.0, 1.0, 0.5, 1.0, 1.0),\n (1.0, 0.5, 0.5, 0.5, 0.5),\n (1.0, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 1.0),\n (1.0, 0.5, 0.5, 1.0, 0.5),\n (1.0, 0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.5, 0.5, 1.0, 1.0),\n (0.5, 1.0, 0.5, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.5, 1.0, 0.5, 1.0, 0.5),\n (1.0, 1.0, 1.0, 0.5, 0.5),\n (1.0, 1.0, 0.5, 1.0, 0.5),\n (1.0, 1.0, 0.5, 0.5, 0.5),\n (1.0, 1.0, 1.0, 1.0, 0.5),\n (1.0, 1.0, 0.5, 0.5, 1.0),\n (1.0, 0.5, 1.0, 0.5, 1.0),\n (1.0, 0.5, 0.5, 1.0, 1.0),\n (0.5, 1.0, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 0.5),\n (1.0, 0.5, 0.5, 0.5, 1.0),\n (1.0, 0.5, 1.0, 1.0, 1.0),\n (0.5, 0.5, 0.5, 1.0, 0.5),\n (0.75, 0.75, 0.75, 0.75, 0.75),\n (0.5, 0.5, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 1.0, 1.0),\n (0.5, 1.0, 1.0, 0.5, 1.0),\n (0.5, 1.0, 0.5, 1.0, 1.0)],\n (0.25, 0.75, 0.75, 0.75, 0.25): [(0.5, 1.0, 1.0, 1.0, 0.0),\n (0.5, 0.5, 0.5, 1.0, 0.5),\n (0, 1, 1, 1, 0),\n (0.5, 1.0, 0.5, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5),\n (0.0, 1.0, 0.5, 0.5, 0.5),\n (0.0, 1.0, 1.0, 1.0, 0.5),\n (0.5, 1.0, 0.5, 1.0, 0.5),\n (0.0, 1.0, 0.5, 1.0, 0.5),\n (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.5, 0.0),\n (0.0, 1.0, 0.5, 1.0, 0.0),\n (0.5, 1.0, 1.0, 0.5, 0.0),\n (0.5, 1.0, 0.5, 1.0, 0.0),\n (0.5, 1.0, 0.5, 0.5, 0.0),\n (0.0, 1.0, 0.5, 0.5, 0.0),\n (0.5, 0.5, 1.0, 0.5, 0.0),\n (0.5, 0.5, 0.5, 1.0, 0.0),\n (0.0, 0.5, 1.0, 0.5, 0.5),\n (0.0, 0.5, 0.5, 1.0, 0.5),\n (0.0, 0.5, 1.0, 0.5, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 0.0),\n (0.0, 0.5, 0.5, 1.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.0),\n (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.5, 0.5, 1.0, 1.0, 0.5),\n (\n 0.5, 0.5, 1.0, 0.5, 0.5)],\n (0.0, 0.0, 1.0, 0.5, 1.0): [(0.5, 0.0, 0.5, 0.5, 1.0),\n (0.0, 0.5, 0.5, 0.5, 1.0),\n (0.5, 0.5, 0.5, 0.5, 1.0),\n (0.0, 0.0, 0.5, 0.5, 1.0),\n (0, 0, 1, 1, 1),\n (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.0, 1.0, 0.5, 1.0),\n (0.0, 0.5, 1.0, 0.5, 1.0),\n (0, 0, 1, 0, 1),\n (0.5, 0.0, 1.0, 0.5, 0.5),\n (0.0, 0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 1.0, 0.5, 0.5),\n (0.0, 0.0, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.5, 0.5, 0.5),\n (0.25, 0.25, 0.75, 0.75, 0.75),\n (0.5, 0.0, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5, 0.5), (\n 0.25, 0.25, 0.75, 0.25, 0.75)]}\n\n init_triangulation(5, 1, check, nn_checks)",
"def __init__(self, M, rat):\n self.M = M\n xc0, _ = np.polynomial.chebyshev.chebgauss(M-0)\n xc1, _ = np.polynomial.chebyshev.chebgauss(M-1)\n xc2, _ = np.polynomial.chebyshev.chebgauss(M-2)\n # vandermonde and inverse vandermonde matrices\n self.V0 = np.polynomial.chebyshev.chebvander(xc0, M-1)\n self.V1 = np.polynomial.chebyshev.chebvander(xc1, M-2)\n self.V2 = np.polynomial.chebyshev.chebvander(xc2, M-3)\n self.VI0 = np.linalg.inv(self.V0)\n self.VI1 = np.linalg.inv(self.V1)\n self.VI2 = np.linalg.inv(self.V2)\n # differentiation matrices\n DC01 = np.polynomial.chebyshev.chebder(np.eye(M-0)) / rat\n DC12 = np.polynomial.chebyshev.chebder(np.eye(M-1)) / rat\n DC00 = np.row_stack([DC01, np.zeros(M)])\n self.D00 = self.V0.dot(DC00.dot(self.VI0))\n self.D01 = self.V1.dot(DC01.dot(self.VI0))\n self.D12 = self.V2.dot(DC12.dot(self.VI1))\n # boundary condition operators\n self.ibc_dirichlet = np.polynomial.chebyshev.chebvander(1, M-1).dot(self.VI0)\n self.obc_dirichlet = np.polynomial.chebyshev.chebvander(-1, M-1).dot(self.VI0)\n self.ibc_neumann = self.ibc_dirichlet.dot(self.D00)\n self.obc_neumann = self.obc_dirichlet.dot(self.D00)\n # rank reduction operators\n temp = np.zeros([M-1, M-0], dtype=float)\n np.fill_diagonal(temp, 1.0)\n self.R01 = self.V1.dot(temp.dot(self.VI0))\n temp = np.zeros([M-2, M-1], dtype=float)\n np.fill_diagonal(temp, 1.0)\n self.R12 = self.V2.dot(temp.dot(self.VI1))\n self.R02 = self.R12.dot(self.R01)\n # get poof operator from M-1 --> M\n temp = np.zeros([M, M-1], dtype=float)\n np.fill_diagonal(temp, 1.0)\n self.P10 = self.V0.dot(temp.dot(self.VI1))",
"def test_uv_degrid_gaussian_kernel():\n\n layout = read_layout(layout_path=f\"{test_data}/test_mwa.txt\")\n xyz = enh_xyz(layout=layout, latitude=mwa_geo.latitude.radians)\n uvw = xyz_uvw(xyz=xyz, freq=freq, dec0=mwa_geo.latitude.radians, ha0=0)\n uv = uv_degrid(\n max_lambda=1400, nside=20, uvw=uvw, sigma=3, kersize=21, kernel=\"gaussian\"\n )\n\n assert uv.shape == (20, 20)\n assert uv[0, 0] == 1.295932713086053e-05",
"def reg2():\n data2 = np.load(\"./data/measure4_1.npy\")[2:]\n\n x2 = np.arange(0,len(data2),1)\n\n fit = True \n redistribute = True \n\n #x2 = 1.3149710372035508*x2 -22.617788714272098\n c2 = np.where(x2 < 135)\n\n data = data2[c2] \n x = x2[c2]\n print(\"datapoints:\",len(data))\n\n mass = 79/251/6080*52658\n if redistribute == True:\n\n # conserving the mass\n total_mass = mass * len(data)\n remaining = (data > 0)\n\n while True:\n print(\"new redistributing ...\")\n print(\"total mass:\",total_mass)\n # find those which are smaller\n q = (data[remaining] <= mass)\n remaining = ~q\n if len(np.nonzero(q)[0]) == 0:\n data[remaining] -= mass\n break\n print(\"number of smaller values:\",len(np.nonzero(q)[0]),\"\\n\")\n # subtract the mass of this data\n total_mass -= np.sum(data[q])\n mass = total_mass / len(np.nonzero(~remaining)[0]) \n data[q] = 0\n\n # redistribute total remaining mass to single channels\n print(\"number of nonzero:\",len(np.nonzero(data)[0]))\n\n c = np.nonzero(data) \n data = data[c]\n x = x[c]\n\n #scaling to time units\n x = 6.3149710372035508*x -22.617788714272098\n c = (x>0)\n x = x[c]\n data = data[c]\n\n x = x[::-1] - min(x)\n\n\n error = np.sqrt(data) \n # only fit for x < 135\n fig = plt.figure()\n ax = plt.subplot(111)\n plt.grid(True)\n\n if fit==True:\n\n def func(x, *p):\n a,b,c = p\n return a + b * c**x\n\n # p0 is the initial guess for the fitting coefficients \n p0 = [1., 1., 1.]\n\n p, cov = curve_fit(func, x, data, p0=p0, sigma = error)\n p_uc = uc.correlated_values(p, cov)\n c = p_uc[2]\n\n T12_lit = 98 \n lamb_lit = -(np.log(2)/T12_lit)\n print(\"lit\",lamb_lit)\n \n\n lamb = umath.log(c)\n print(lamb)\n T12 = -np.log(2) /lamb \n print(\"t12=\",T12)\n\n x_fit = np.linspace(min(x),max(x))\n\n data_fit = func(x_fit,*p) \n pmin = (p - np.sqrt(np.diag(cov)))\n pmax = (p + np.sqrt(np.diag(cov)))\n\n data_fit_min = func(x_fit, *pmin)\n data_fit_max = func(x_fit, *pmax)\n\n plt.plot(x_fit,data_fit)\n plt.plot(x_fit,90*np.exp(x_fit * lamb_lit))\n plt.fill_between(x_fit, data_fit_min , data_fit_max,facecolor=\"r\", color=\"b\", alpha=0.3 )\n\n # place a text box in upper left in axes coords\n props = dict(boxstyle='round', facecolor='white', alpha=0.5)\n textstr = '$a + b \\cdot c^x$ with\\n$a=%.2f$\\n$b=%.2f$\\n$c=%.2f$'%(p[0], p[1],p[2])\n ax.text(0.6, 0.85, textstr, transform=ax.transAxes, fontsize=18, va='top', bbox=props)\n\n ax.xaxis.set_tick_params(labelsize = 14)\n ax.yaxis.set_tick_params(labelsize = 14)\n\n ax.add_patch(plt.Rectangle((0,0.1),155,100,alpha = 0.2))\n\n plt.errorbar(x,data, yerr=error,fmt=\"x\")\n #plt.scatter(x,data,c=\"blue\",alpha = 0.9,s=100, marker=\"x\")\n plt.ylim(min(data)*0.8,max(data))\n #plt.yscale(\"log\")\n plt.xlim(min(x)*0.8,max(x))\n plt.xlabel(\"time in $ns$\", fontsize = 14)\n plt.ylabel(\"counts\", fontsize = 14)\n make_fig(fig,1,1,name=\"plot4_1_reg\")",
"def feature_processing(array2d):\n new_array2d = np.zeros([array2d.shape[0], 29])\n # items/ orders\n new_array2d[:, 0] = array2d[:, 4] / array2d[:, 3]\n # cancels / orders\n new_array2d[:, 1] = array2d[:, 5] / array2d[:, 3]\n # returns / items\n new_array2d[:, 2] = array2d[:, 6] / array2d[:, 4]\n # voucher / orders\n new_array2d[:, 3] = array2d[:, 10] / array2d[:, 3]\n # female_items / female_items + male_items\n new_array2d[:, 4] = array2d[:, 15] / ([1 if x == 0 else x for x in (array2d[:, 15] + array2d[:, 16])])\n # male_items / female_items + male_items\n new_array2d[:, 5] = array2d[:, 16] / ([1 if x == 0 else x for x in (array2d[:, 15] + array2d[:, 16])])\n # unisex_items / items\n new_array2d[:, 6] = array2d[:, 17] / array2d[:, 4]\n # wapp_items / items\n new_array2d[:, 7] = array2d[:, 18] / array2d[:, 4]\n # wftw_items / items\n new_array2d[:, 8] = array2d[:, 19] / array2d[:, 4]\n # mapp_items / items\n new_array2d[:, 9] = array2d[:, 20] / array2d[:, 4]\n # wacc_items / items\n new_array2d[:, 10] = array2d[:, 21] / array2d[:, 4]\n # macc_items / items\n new_array2d[:, 11] = array2d[:, 22] / array2d[:, 4]\n # mftw_items / items\n new_array2d[:, 12] = array2d[:, 23] / array2d[:, 4]\n # wspt_items / items\n new_array2d[:, 13] = array2d[:, 24] / array2d[:, 4]\n # mspt_items / items\n new_array2d[:, 14] = array2d[:, 25] / array2d[:, 4]\n # curvy_items / items\n # Curvy item has a strong correlation with gender, however they are very right-skewed use np.power(1/6) to smooth it\n new_array2d[:, 15] = np.power(array2d[:, 26] / array2d[:, 4], 1 / 6)\n # sacc_items / items\n new_array2d[:, 16] = array2d[:, 27] / array2d[:, 4]\n # msite_orders / orders\n new_array2d[:, 17] = array2d[:, 28] / array2d[:, 3]\n # desktop_orders / orders\n new_array2d[:, 18] = array2d[:, 29] / array2d[:, 3]\n # android_orders / orders\n new_array2d[:, 19] = array2d[:, 30] / array2d[:, 3]\n # ios_orders / orders\n new_array2d[:, 20] = array2d[:, 31] / array2d[:, 3]\n # other_device_orders / orders\n new_array2d[:, 21] = array2d[:, 32] / array2d[:, 3]\n # work_orders / orders\n new_array2d[:, 22] = array2d[:, 33] / array2d[:, 3]\n # home_orders / orders\n new_array2d[:, 23] = array2d[:, 34] / array2d[:, 3]\n # parcelpoint_orders / orders\n new_array2d[:, 24] = array2d[:, 35] / array2d[:, 3]\n # other_collection_orders / orders\n new_array2d[:, 25] = array2d[:, 36] / array2d[:, 3]\n # average_discount_onoffer\n new_array2d[:, 26] = array2d[:, 39]\n # average_discount_used\n new_array2d[:, 27] = array2d[:, 40]\n # revenue / order\n new_array2d[:, 28] = array2d[:, 41] / array2d[:, 3]\n\n # normalize by each feature\n new_array2d = normalize(new_array2d, axis=0, norm='max')\n return new_array2d",
"def setup(self):\n igd = self.options['input_grid_data']\n ogd = self.options['output_grid_data']\n output_subset = self.options['output_subset']\n\n if ogd is None:\n ogd = igd\n\n # Build the interpolation matrix which maps from the input grid to the output grid.\n # Rather than a single phase-wide interpolating polynomial, map each segment.\n # To do this, find the nodes in the output grid which fall in each segment of the input\n # grid. Then build a Lagrange interpolating polynomial for that segment\n L_blocks = []\n output_nodes_ptau = list(ogd.node_ptau[ogd.subset_node_indices[output_subset]])\n\n for iseg in range(igd.num_segments):\n i1, i2 = igd.segment_indices[iseg]\n iptau_segi = np.take(igd.node_ptau, (i1, i2-1))\n istau_segi = np.take(igd.node_stau, (i1, i2-1))\n\n # The indices of the output grid that fall within this segment of the input grid\n if ogd is igd:\n optau_segi = iptau_segi\n else:\n ptau_hi = igd.segment_ends[iseg+1]\n if iseg < igd.num_segments - 1:\n idxs_in_iseg = np.where(output_nodes_ptau <= ptau_hi)[0]\n else:\n idxs_in_iseg = np.arange(len(output_nodes_ptau))\n optau_segi = np.asarray(output_nodes_ptau)[idxs_in_iseg]\n # Remove the captured nodes so we don't accidentally include them again\n output_nodes_ptau = output_nodes_ptau[len(idxs_in_iseg):]\n\n # Now get the output nodes which fall in iseg in iseg's segment tau space.\n ostau_segi = 2.0 * (optau_segi - iptau_segi[0]) / (iptau_segi[-1] - iptau_segi[0]) - 1\n\n # Create the interpolation matrix and add it to the blocks\n L, _ = lagrange_matrices(istau_segi, ostau_segi)\n L_blocks.append(L)\n\n self.interpolation_matrix = block_diag(*L_blocks)\n r, c = np.nonzero(self.interpolation_matrix)\n\n output_num_nodes, input_num_nodes = self.interpolation_matrix.shape\n\n for (name, kwargs) in self._timeseries_outputs:\n\n input_kwargs = {k: kwargs[k] for k in ('units', 'desc')}\n input_name = 'input_values:{0}'.format(name)\n self.add_input(input_name,\n shape=(input_num_nodes,) + kwargs['shape'],\n **input_kwargs)\n\n output_name = name\n output_kwargs = {k: kwargs[k] for k in ('units', 'desc')}\n output_kwargs['shape'] = (output_num_nodes,) + kwargs['shape']\n self.add_output(output_name, **output_kwargs)\n\n self._vars.append((input_name, output_name, kwargs['shape']))\n\n size = np.prod(kwargs['shape'])\n val_jac = np.zeros((output_num_nodes, size, input_num_nodes, size))\n\n for i in range(size):\n val_jac[:, i, :, i] = self.interpolation_matrix\n\n val_jac = val_jac.reshape((output_num_nodes * size, input_num_nodes * size),\n order='C')\n\n val_jac_rows, val_jac_cols = np.where(val_jac != 0)\n\n rs, cs = val_jac_rows, val_jac_cols\n self.declare_partials(of=output_name,\n wrt=input_name,\n rows=rs, cols=cs, val=val_jac[rs, cs])",
"def init_stitch(N):\n\tif N is None:\n\t\tN = np.int32(HII_DIM) #prepare for stitching\n\tMETA_GRID_SIZE = DIM/N\n\tM = np.int32(HII_DIM/META_GRID_SIZE)\n\t#HII_DIM = np.int32(HII_DIM)\n\tf_pixel_factor = DIM/HII_DIM;\n\tscale = np.float32(BOX_LEN/DIM)\n\tprint 'scale', scale\n\tHII_scale = np.float32(BOX_LEN/HII_DIM)\n\tshape = (DIM,DIM,N)\n\tstitch_grid_size = (DIM/(block_size[0]),\n\t\t\t\t\t\tDIM/(block_size[0]),\n\t\t\t\t\t\tN/(block_size[0]))\n\tHII_stitch_grid_size = (HII_DIM/(block_size[0]),\n\t\t\t\t\t\tHII_DIM/(block_size[0]),\n\t\t\t\t\t\tM/(block_size[0]))\n\t#ratio of large box to small size\n\tkernel_source = open(cmd_folder+\"/initialize_stitch.cu\").read()\n\tkernel_code = kernel_source % {\n\n\t\t'DELTAK': DELTA_K,\n\t\t'DIM': DIM, \n\t\t'VOLUME': VOLUME,\n\t\t'META_BLOCKDIM': N\n\t}\n\tmain_module = nvcc.SourceModule(kernel_code)\n\tinit_stitch = main_module.get_function(\"init_kernel\")\n\tHII_filter = main_module.get_function(\"HII_filter\")\n\tsubsample_kernel = main_module.get_function(\"subsample\")\n\tvelocity_kernel = main_module.get_function(\"set_velocity\")\n\tpspec_texture = main_module.get_texref(\"pspec\")\n\tMRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=0)\n\tplan2d = Plan((np.int64(DIM), np.int64(DIM)), dtype=np.complex64)\n\tplan1d = Plan((np.int64(DIM)), dtype=np.complex64)\n\tprint \"init pspec\"\n\tinterpPspec, interpSize = init_pspec() #interpPspec contains both k array and P array\n\tinterp_cu = cuda.matrix_to_array(interpPspec, order='F')\n\tcuda.bind_array_to_texref(interp_cu, pspec_texture)\n\t#hbox_large = pyfftw.empty_aligned((DIM, DIM, DIM), dtype='complex64')\n\thbox_large = np.zeros((DIM, DIM, DIM), dtype=np.complex64)\n\t#hbox_small = np.zeros(HII_shape, dtype=np.float32)\n\t#hbox_large = n\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\n\t# Set up pinned memory for transfer\n\t#largebox_hs = cuda.aligned_empty(shape=shape, dtype=np.float32, alignment=resource.getpagesize())\n\tlargebox_pin = cuda.pagelocked_empty(shape=shape, dtype=np.float32)\n\tlargecbox_pin = cuda.pagelocked_empty(shape=shape, dtype=np.complex64)\n\n\tlargebox_d = gpuarray.zeros(shape, dtype=np.float32)\n\tlargebox_d_imag = gpuarray.zeros(shape, dtype=np.float32)\n\tprint \"init boxes\"\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t# MRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=meta_x*N**3)\n\t\tinit_stitch(largebox_d, DIM, np.int32(meta_z),block=block_size, grid=stitch_grid_size)\n\t\tinit_stitch(largebox_d_imag, DIM, np.int32(meta_z),block=block_size, grid=stitch_grid_size)\n\t\tlargebox_d *= MRGgen.gen_normal(shape, dtype=np.float32)\n\t\tlargebox_d_imag *= MRGgen.gen_normal(shape, dtype=np.float32)\n\t\tlargebox_d = largebox_d + np.complex64(1.j) * largebox_d_imag\n\t\tcuda.memcpy_dtoh_async(largecbox_pin, largebox_d)\n\t\thbox_large[:, :, meta_z*N:(meta_z+1)*N] = largecbox_pin.copy()\n\t#if want to get velocity need to use this\n\tif True:\n\t\tprint \"saving kbox\"\n\t\tnp.save(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN), hbox_large)\n\n\tprint \"Executing FFT on device\"\n\t#hbox_large = pyfftw.interfaces.numpy_fft.ifftn(hbox_large).real\n\thbox_large = fft_stitch(N, plan2d, plan1d, hbox_large, largebox_d).real\n\tprint hbox_large.dtype\n\tprint \"Finished FFT on device\"\n\tnp.save(parent_folder+\"/Boxes/deltax_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN), hbox_large)\n\t\n\tif True:\n\t\tprint \"loading kbox\"\n\t\thbox_large = np.load(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN))\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\tlargebox_pin = hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy()\n\t\t#cuda.memcpy_htod_async(largebox_d, largebox_pin)\n\t\tlargebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\tHII_filter(largebox_d, DIM, np.int32(meta_z), ZERO, smoothR, block=block_size, grid=stitch_grid_size);\n\t\thbox_large[:, :, meta_z*N:(meta_z+1)*N] = largebox_d.get_async()\n\t#import IPython; IPython.embed()\n\tprint \"Executing FFT on host\"\n\t#hbox_large = hifft(hbox_large).astype(np.complex64).real\n\t#hbox_large = pyfftw.interfaces.numpy_fft.ifftn(hbox_large).real\n\thbox_large = fft_stitch(N, plan2d, plan1d, hbox_large, largebox_d).real\n\tprint \"Finished FFT on host\"\n\t#import IPython; IPython.embed()\n\n\t# for meta_x in xrange(META_GRID_SIZE):\n\t# \tfor meta_y in xrange(META_GRID_SIZE):\n\t# \t\tfor meta_z in xrange(META_GRID_SIZE):\n\t# \t\t\tlargebox_d = gpuarray.to_gpu(hbox_large[meta_x*N:(meta_x+1)*N, meta_y*N:(meta_y+1)*N, meta_z*N:(meta_z+1)*N])\n\t# \t\t\tHII_filter(largebox_d, N, np.int32(meta_x), np.int32(meta_y), np.int32(meta_z), ZERO, smoothR, block=block_size, grid=grid_size);\n\t# \t\t\thbox_large[meta_x*N:(meta_x+1)*N, meta_y*N:(meta_y+1)*N, meta_z*N:(meta_z+1)*N] = largebox_d.get()\n\t#plan = Plan(shape, dtype=np.complex64)\n\t#plan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\n\n\t# This saves a large resolution deltax\n\n\t\n\tprint \"downsampling\"\n\tsmallbox_d = gpuarray.zeros((HII_DIM,HII_DIM,M), dtype=np.float32)\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\tlargebox_pin = hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy()\n\t\tcuda.memcpy_dtoh_async(largecbox_pin, largebox_d)\n\t\t#largebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\tlargebox_d /= scale**3 #\n\t\tsubsample_kernel(largebox_d, smallbox_d, DIM, HII_DIM, PIXEL_FACTOR, block=block_size, grid=HII_stitch_grid_size) #subsample in real space\n\t\thbox_small[:, :, meta_z*M:(meta_z+1)*M] = smallbox_d.get_async()\n\tnp.save(parent_folder+\"/Boxes/smoothed_deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(HII_DIM, BOX_LEN), hbox_small)\n\t#import IPython; IPython.embed()\n\n\n\t# To get velocities: reload the k-space box\n\thbox_large = np.load(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN))\n\thvbox_large = np.zeros((DIM, DIM, DIM), dtype=np.float32)\n\thvbox_small = np.zeros(HII_shape, dtype=np.float32)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tlargevbox_d = gpuarray.zeros((DIM,DIM,N), dtype=np.complex64)\n\tsmallvbox_d = gpuarray.zeros((HII_DIM, HII_DIM, M), dtype=np.float32)\n\tfor num, mode in enumerate(['x', 'y', 'z']):\n\t\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t\tlargebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\t\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\t\t\tvelocity_kernel(largebox_d, largevbox_d, DIM, np.int32(meta_z), np.int32(num), block=block_size, grid=stitch_grid_size)\n\t\t\tHII_filter(largevbox_d, DIM, ZERO, smoothR, block=block_size, grid=stitch_grid_size)\n\t\t\tprint hvbox_large.shape, largevbox_d.shape\n\t\t\thvbox_large[:, :, meta_z*N:(meta_z+1)*N] = largevbox_d.get_async()\n\t\thvbox_large = fft_stitch(N, plan2d, plan1d, hvbox_large, largevbox_d).real\n\t\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t\tlargevbox_d = gpuarray.to_gpu_async(hvbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\t\tsubsample_kernel(largevbox_d.real, smallvbox_d, DIM, HII_DIM,PIXEL_FACTOR, block=block_size, grid=HII_stitch_grid_size)\n\t\t\thvbox_small[:, :, meta_z*M:(meta_z+1)*M] = smallvbox_d.get_async()\n\t\tnp.save(parent_folder+\"/Boxes/v{0}overddot_{1:d}_{2:.0f}Mpc\".format(mode, HII_DIM, BOX_LEN), smallvbox_d.get())\n\n\treturn",
"def kernel_test(slabs, data, backend):\n Q = data[:, 0]\n\n layers = []\n for thickness, rsld, isld, sigma in slabs:\n layers.append(\n model.Layer(\n b=(rsld - 1j * isld), dens=0.1, d=thickness, sigma=sigma\n )\n )\n layers.reverse()\n stack = model.Stack(Layers=list(layers[1:-1]), Repetitions=1)\n sample = model.Sample(\n Stacks=[stack], Ambient=layers[-1], Substrate=layers[0]\n )\n # print(sample)\n\n inst = model.Instrument(\n probe=backend,\n wavelength=1.54,\n coords=\"q\",\n I0=1,\n res=0,\n restype=\"no conv\",\n respoints=5,\n resintrange=2,\n beamw=0.1,\n footype=\"no corr\",\n samplelen=10,\n pol=\"uu\",\n )\n if data.shape[1] == 4:\n dQ = data[:, 3]\n inst.restype = \"full conv and varying res.\"\n inst.res = dQ\n if backend == \"neutron pol spin flip\":\n # memory issues in matrix formalism if too many data points\n inst.respoints = 101\n else:\n inst.respoints = (\n 10001 # try to use same convolution as ref1d when generating\n )\n inst.resintrange = 3.5\n\n # print(inst)\n R = sample.SimSpecular(Q, inst)\n\n assert R.shape == data[:, 1].shape\n if data.shape[1] == 4:\n # validation accuracy is reduced for resolution runs, as strongly\n # depends on numerical convolution scheme\n if backend == \"neutron pol spin flip\":\n np.testing.assert_allclose(R, data[:, 1], rtol=0.005)\n else:\n np.testing.assert_allclose(R, data[:, 1], rtol=0.001)\n else:\n np.testing.assert_allclose(R, data[:, 1], rtol=0.001)",
"def test_2_layer():\r\n # angular frequency in radians * THz\r\n w = 100 * nu.THz\r\n # Relative permittivity of metal and dielectric\r\n em = -4.56 + 0.12j\r\n ed = 1.23 + 0.01j\r\n ex_list = ez_list = [ed, em]\r\n # Relative permeabilities\r\n mu_list = [1,1]\r\n # Dictionary of input parameters\r\n input_params = {'w': w, 'd_list': [inf,inf], 'ex_list': ex_list,\r\n 'ez_list': ez_list, 'mu_list': mu_list}\r\n \r\n # Calculate the theoretical kx\r\n theo_kx = (w / nu.c0) * cmath.sqrt((em * ed) / (em + ed))\r\n if theo_kx.imag < 0:\r\n theo_kx *= -1\r\n print('Theoretical kx:',\r\n '(%.7g+%.7gj) rad/um' % (theo_kx.real / nu.um**-1, theo_kx.imag / nu.um**-1))\r\n \r\n # If I use the theoretical kx value, the mode should be correct and\r\n # all my tests should pass.\r\n params = deepcopy(input_params)\r\n params['kx'] = theo_kx\r\n params = find_all_params_from_kx(params)\r\n kzd, kzm = params['kz_list']\r\n # check that kz_list is correct\r\n assert_floats_are_equal(kzd**2, (w**2 / nu.c0**2) * ed**2 / (em + ed))\r\n assert_floats_are_equal(kzm**2, (w**2 / nu.c0**2) * em**2 / (em + ed))\r\n # check that layer_bottom_list is correct\r\n assert params['layer_bottom_list'][0] == -inf\r\n assert params['layer_bottom_list'][1] == 0\r\n # Check that the boundary condition matrix agrees with hand-calculation\r\n bc_mat = bc_matrix(params)\r\n # ...top-left is Ex0down / H0down\r\n assert_floats_are_equal(bc_mat[0,0], -kzd / (w * ed * nu.eps0))\r\n # ...top-right is -Ex1up / H1up\r\n assert_floats_are_equal(bc_mat[0,1], -kzm / (w * em * nu.eps0))\r\n # ...bottom-left is eps0 * Ez0down / H0down\r\n assert_floats_are_equal(bc_mat[1,0], ed * -theo_kx / (w * ed * nu.eps0))\r\n # ...bottom-right is -eps1 * Ez1up / H1up\r\n assert_floats_are_equal(bc_mat[1,1], -em * -theo_kx / (w * em * nu.eps0))\r\n # Check that one of the eigenvalues is almost zero (compared to the size\r\n # of the matrix elements).\r\n eigenvalues = np.linalg.eig(bc_mat)[0]\r\n assert abs(eigenvalues).min() / abs(bc_mat).max() < 1e-6\r\n # Check that the mode passes all tests.\r\n assert check_mode(params, thorough=True) is True\r\n # Check that I can scale the fields and it still passes all tests.\r\n params_scaled = rescale_fields(1.23+4.56j, params)\r\n assert check_mode(params_scaled, thorough=True) is True\r\n \r\n # Now try my kx-finding algorithm, to see if it finds the right value.\r\n kx_list = find_kx(input_params)\r\n print('kx_list:',\r\n ['(%.7g+%.7gj) rad/um' % (kx.real / nu.um**-1, kx.imag / nu.um**-1)\r\n for kx in kx_list])\r\n kx = kx_list[0]\r\n assert_floats_are_equal(theo_kx, kx)\r\n \r\n plot_mode(params)\r\n \r\n print('If you see this message, all the tests succeeded!!')",
"def test_matrix22(gridsize=50):\n\n v1 = vec2(3,0)\n v2 = vec2(0,3)\n\n #rotate 45 degrees \n m22 = matrix22()\n m22.from_euler(45)\n\n # make a second matrix, also 45 degrees, should give us 90 total \n m22_2 = matrix22()\n m22_2.from_euler(45)\n m22 = m22_2 * m22\n\n # mutliply a vector by the matrix \n v3 = m22 * v2 \n\n fb = pixel_op() \n fb.create_buffer(800, 800)\n fb.graticule(gridsize)\n \n pts = [ (0,0), (0,1), (2,1), (0,2) ]\n #bloody_simple_2drender('2d_rotation.png', pts=pts, gridsize=50, pfb=fb)\n\n vecs = [v2,v3]\n bloody_simple_2drender('2d_rotation.png', vecs=vecs, gridsize=50, pfb=fb)\n\n #rotate the points by matrix multiplication \n pts = m22.batch_mult_pts(pts) \n bloody_simple_2drender('2d_rotation.png', pts=pts, gridsize=50, pfb=fb)\n fb.save('2d_rotation.png')",
"def test_mueller_product(self, ):\n mdims = ('mueller_v', 'mueller_h')\n mm_1 = xr.DataArray(np.random.rand(4, 4, ), dims=mdims, )\n mm_2 = xr.DataArray(np.identity(4, ), dims=mdims, )\n sv_1 = xr.DataArray(np.random.rand(4, ), dims=('stokes', ), )\n\n assert_almost_equal(mm_1.values, mueller_product(mm_1, mm_2).values, )\n assert_almost_equal(mm_1.values, mueller_product(mm_2, mm_1).values, )\n assert_almost_equal(sv_1.values, mueller_product(mm_2, sv_1).data, )",
"def test_gauss_kernel():\n\n gauss = gauss_kernel(2, 5)\n\n assert gauss.shape == (5, 5)\n assert gauss[2, 2] == 0.039788735772973836",
"def test_filter_l2_1():\n box = Box(length=L, origin=O)\n f = Field(box, formula=func, name='f0')\n d_fine = Discretization([513, 513, 513])\n d_coarse = Discretization([257, 257, 257], ghosts=[2, 2, 2])\n op = MultiresolutionFilter(d_in=d_fine, d_out=d_coarse,\n variables={f: d_coarse},\n method={Remesh: L2_1, })\n op.discretize()\n op.setup()\n topo_coarse = op.discreteFields[f].topology\n topo_fine = [t for t in f.discreteFields.keys()\n if not t is topo_coarse][0]\n f.initialize(topo=topo_fine)\n f_out = f.discreteFields[topo_coarse]\n op.apply(simu)\n valid = [npw.zeros(f_out[0].shape), ]\n valid = func(valid, *topo_coarse.mesh.coords)\n assert np.allclose(valid[0][topo_coarse.mesh.iCompute],\n f_out[0][topo_coarse.mesh.iCompute]), \\\n np.max(np.abs(valid[0][topo_coarse.mesh.iCompute] -\n f_out[0][topo_coarse.mesh.iCompute]))",
"def regularize_fwd(X, y, mu0, mu1, v1, nz, K, verbose=False, showpath=False, fignum=1):\n \n if verbose: sss=0#print '\\ncompute path between mu=%.4f and mu=%.4f'%(mu0, mu1)\n \n n, m = X.shape\n X_nz = np.atleast_2d(X[:, nz])\n b = np.dot(X.T, y)\n G = np.dot(X.T, X)\n \n nbr = 0\n mu = mu0\n trans_type = -1\n trans_sign = 0\n trans_ind = -1\n if verbose: sss=0#print 'initial active features =', nz\n if showpath:\n import matplotlib.pyplot as plt\n pth = np.linspace(mu0, mu1, 100)\n thetapth = np.zeros((m, 100))\n fig = plt.figure(fignum)\n plt.clf()\n allbr = []\n \n while mu < mu1:\n \n # find the breakpoints where coefficients become zero\n b_nz = b[nz]\n Kv1 = np.dot(K, v1)\n Kb_nz = np.dot(K, b_nz)\n mu_0 = Kb_nz / Kv1\n \n # find the breakpoints where new coefficients become active\n z = np.setdiff1d(np.arange(m), nz)\n X_z = np.atleast_2d(X[:, z])\n b_z = b[z]\n M = G[np.ix_(z, nz)]\n MKb_nz = np.dot(M, Kb_nz)\n MKv1 = np.dot(M, Kv1)\n mu_1 = (b_z - MKb_nz) / (1 - MKv1)\n mu_m1 = (b_z - MKb_nz) / (-1 - MKv1)\n \n if trans_type > 0: mu_0[-1] = mu1\n mu_0[mu_0 <= mu] = mu1\n if len(mu_0) > 0: \n mu_0_argmin = mu_0.argmin()\n mu_0_min = mu_0[mu_0_argmin][0]\n else:\n mu_0_min = mu1\n if trans_type == 0:\n if trans_sign == 1: mu_1[np.where(z == trans_ind)[0]] = mu1 + 1\n else: mu_m1[np.where(z == trans_ind)[0]] = mu1 + 1\n mu_1[mu_1 <= mu] = mu1\n if len(mu_1) > 0: \n mu_1_argmin = mu_1.argmin()\n mu_1_min = mu_1[mu_1_argmin][0]\n else:\n mu_1_min = mu1\n mu_m1[mu_m1 <= mu] = mu1\n if len(mu_m1) > 0: \n mu_m1_argmin = mu_m1.argmin()\n mu_m1_min = mu_m1[mu_m1_argmin][0]\n else:\n mu_m1_min = mu1\n \n # compute the breakpoint\n mu_br_all = np.array([mu_0_min, mu_1_min, mu_m1_min])\n trans_type = mu_br_all.argmin()\n mu_br = mu_br_all[trans_type]\n \n if mu_br < mu1:\n \n if showpath:\n if len(nz) > 0:\n inds = np.intersect1d(np.where(pth >= mu)[0], np.where(pth < mu_br)[0])\n thetapth[np.ix_(nz, inds)] = np.tile(Kb_nz, (1, len(inds))) - np.tile(Kv1, (1, len(inds))) * \\\n np.tile(pth[inds], (len(nz), 1))\n allbr.append(mu_br)\n \n nbr += 1\n mu = mu_br\n \n if trans_type == 0: # an element of theta(t) goes to zero\n trans_ind = nz[mu_0_argmin]\n trans_sign = v1[mu_0_argmin]\n if verbose: nbr=nbr#print 'transition point :: mu = %.4f :: feature %d is inactive'%(mu, trans_ind)\n nzind = range(len(nz))\n ####################################\n index=np.where(nz==trans_ind)[0][0]\n ####################################\n #print '1)', nzind, nz, trans_ind, index\n nzind=np.delete(nzind,np.where(nzind==index))#nzind.remove(index)\n #print '2)', nzind\n #nzind.remove(nz.index(trans_ind))\n v1 = v1[nzind]\n nz=np.delete(nz,np.where(nz==trans_ind))\n #print '3)', nz\n #nz.remove(trans_ind)\n X_nz = X[:, nz]\n K = invupdatered(K, mu_0_argmin)\n else: # new active element\n if trans_type == 1: # it is positive\n trans_ind = z[mu_1_argmin]\n if verbose: K=K#print 'transition point :: mu = %.4f :: feature %d is positive'%(mu, trans_ind)\n nz=np.append(nz,trans_ind)\n #nz.append(trans_ind)\n v1 = np.vstack([v1, 1])\n else: # it is negative\n trans_ind = z[mu_m1_argmin]\n if verbose: K=K#print 'transition point :: mu = %.4f :: feature %d is negative'%(mu, trans_ind)\n nz=np.append(nz,trans_ind)\n #nz.append(trans_ind)\n v1 = np.vstack([v1, -1])\n X_new = np.atleast_2d(X[:, trans_ind]).T\n K = invupdateapp(K, np.dot(X_nz.T, X_new), np.dot(X_new.T, X_nz), \n np.dot(X_new.T, X_new))\n X_nz = X[:, nz]\n \n else: # compute solution at mu1\n \n if verbose: sss=0#print 'compute solution at mu =', mu1\n if showpath and len(nz) > 0:\n inds = np.intersect1d(np.where(pth >= mu)[0], np.where(pth <= mu1)[0])\n thetapth[np.ix_(nz, inds)] = np.tile(Kb_nz, (1, len(inds))) - np.tile(Kv1, (1, len(inds))) * \\\n np.tile(pth[inds], (len(nz), 1))\n \n theta_nz = Kb_nz - mu1*Kv1\n mu = mu1\n \n if showpath:\n fig = plt.figure(fignum)\n leg = []\n for i in range(m):\n plt.plot(pth, thetapth[i, :])\n leg.append(r'$\\theta_%d(\\mu)$'%(i+1))\n plt.plot(pth, np.zeros(len(pth),), 'k')\n plt.xlabel(r'$\\mu$', fontsize=16)\n plt.title(r'Step 1: homotopy in $\\mu$', fontsize=16)\n plt.legend(leg, loc='best')\n plt.plot(allbr, np.zeros(nbr), 'ko')\n plt.xlim(mu0, mu1)\n plt.show()\n \n return theta_nz, nz, K, nbr",
"def test_power_spectral_density_from_spatially_resolved_magnetisation_confined_to_mesh_region(tmpdir, debug=False):\n os.chdir(str(tmpdir))\n RTOL = 1e-10\n\n H1 = 1e6 # external field in A/m\n alpha1 = 0.5 # some sort of damping constant\n omega1 = gamma * H1 # precession frequency\n\n H2 = 2.8e4 # external field in A/m\n alpha2 = 0.3 # some sort of damping constant\n omega2 = gamma * H2 # precession frequency\n\n ##\n # Step 1: Construct a time series of artificial magnetisation\n # data and save it to a bunch of .npy files.\n ##\n t_step = 1e-11\n t_ini = 0\n t_end = 10e-9\n\n N1 = 42 # in a real application this would be the number of mesh vertices\n N2 = 23 # in a real application this would be the number of mesh vertices\n fft_test_helpers.create_test_npy_files_with_two_regions(\n str(tmpdir), t_step, t_ini, t_end, omega1, alpha1, N1, omega2, alpha2, N2)\n\n ##\n # Step 2: compute the FFT of a resampled time series, both by\n # hand and using FFT_m.\n ##\n # XXX TODO: Resampling timesteps is not supported when using .npy\n # files. Either simplify the code below, or implement saving to\n # .h5 files so that it's easier to implement resampling for\n # spatially resolved data, too.\n ##\n t_step_res = t_step\n t_ini_res = t_ini\n t_end_res = t_end\n ts_resampled = np.arange(t_ini_res, t_end_res, t_step_res)\n\n # Compute time series based on resampled timesteps\n mx_res = exp(-ts_resampled * 1e8 / alpha1) * sin(omega1 * ts_resampled)\n my_res = exp(-ts_resampled * 1e8 / alpha1) * cos(omega1 * ts_resampled)\n mz_res = 1 - sqrt(mx_res ** 2 + my_res ** 2)\n\n # Compute 'analytical' Fourier transform of resampled time series and\n # determine the power of the spectrum for each component. We also need\n # to multiply by the number of mesh nodes because the numerical algorithm\n # sums up all contributions at the individual nodes (but we can just\n # multiply because they are all identical by construction).\n psd_mx_expected = N1 * np.absolute(np.fft.rfft(mx_res)) ** 2\n psd_my_expected = N1 * np.absolute(np.fft.rfft(my_res)) ** 2\n psd_mz_expected = N1 * np.absolute(np.fft.rfft(mz_res)) ** 2\n\n # Compute Fourier transform of resampled time series using FFT_m\n freqs_computed, psd_mx_computed, psd_my_computed, psd_mz_computed = \\\n compute_power_spectral_density('m_ringdown*.npy', t_step_res, t_ini=t_ini_res,\n t_end=t_end_res, subtract_values=None, restrict_to_vertices=xrange(N1))\n\n # Check that the analytically determined power spectra are the same as the\n # computed ones.\n assert(np.allclose(psd_mx_expected, psd_mx_computed, atol=0, rtol=RTOL))\n assert(np.allclose(psd_my_expected, psd_my_computed, atol=0, rtol=RTOL))\n assert(np.allclose(psd_mz_expected, psd_mz_computed, atol=0, rtol=RTOL))\n\n if debug:\n # Plot the spectra for debugging\n fig = plt.figure(figsize=(20, 5))\n ax = fig.gca()\n ax.plot(freqs_computed, psd_mx_expected, label='psd_mx_expected')\n ax.plot(freqs_computed, psd_my_expected, label='psd_my_expected')\n ax.plot(freqs_computed, psd_mz_expected, label='psd_mz_expected')\n ax.plot(freqs_computed, psd_mx_computed, label='psd_mx_computed')\n ax.plot(freqs_computed, psd_my_computed, label='psd_my_computed')\n ax.plot(freqs_computed, psd_mz_computed, label='psd_mz_computed')\n ax.legend(loc='best')\n fig.savefig('psd_m_McMichaelStiles.png')",
"def test_unroll_kern(self):\r\n # 6 filters is a multiple of 2 and 3. Should work.\r\n self.validate((2, 3, 3, 3), (6, 3, 2, 2), 'valid', unroll_kern=2,\r\n verify_grad=False)\r\n self.validate((2, 3, 3, 3), (6, 3, 2, 2), 'valid', unroll_kern=3,\r\n verify_grad=False)",
"def test052_2d_numerical_comparison_on_vs_np_mp(\n self,\n batch_size=8,\n num_features=16,\n height=45,\n width=64,\n alpha_fwd=0.99,\n alpha_bkw=0.99,\n itrs=2,\n ):\n # create inputs\n np_inputs = np.random.randn(batch_size, num_features, height, width) + .25\n # instantiate gradient at the output\n np_grad_out = np.random.randn(batch_size, num_features, height, width) + .125\n\n tf.keras.backend.set_floatx('float16')\n\n self.template_numerical_comparison_on_vs_np(\n np_inputs,\n np_grad_out=np_grad_out,\n axis=1,\n alpha_fwd=alpha_fwd,\n alpha_bkw=alpha_bkw,\n itrs=itrs,\n dtype=Policy('infer_float32_vars'),\n )",
"def test_1_2_2D_rec_splits(self):\n check = [(3.0, -2.0), (7.0, -1.0), (7.0, -2.0), (3.0, -1.0),\n (5.0, -1.5), (3.0, -1.5), (5.0, -2.0), (4.0, -1.75),\n (7.0, -1.5), (5.0, -1.0), (6.0, -1.25), (6.0, -1.75),\n (4.0, -1.25), (5.0, -1.75), (4.0, -1.5), (4.5, -1.625),\n (3.0, -1.75), (4.0, -2.0), (3.5, -1.875), (3.5, -1.625),\n (4.5, -1.875), (5.0, -1.25), (6.0, -1.5), (5.5, -1.375),\n (7.0, -1.25), (6.0, -1.0), (6.5, -1.125), (6.5, -1.375),\n (5.5, -1.125), (5.5, -1.625), (7.0, -1.75), (6.0, -2.0),\n (6.5, -1.875), (6.5, -1.625), (5.5, -1.875), (4.5, -1.375),\n (3.0, -1.25), (4.0, -1.0), (3.5, -1.125), (3.5, -1.375),\n (4.5, -1.125)]\n nn_checks = {(3.0, -2.0): [(3.0, -1.75), (3.5, -1.875), (4.0, -2.0)],\n (5.0, -1.75): [(5.0, -2.0), (5.0, -1.5), (5.5, -1.625),\n (5.5, -1.875), (4.5, -1.625), (6.0, -1.75),\n (4.5, -1.875), (4.0, -1.75)],\n (6.0, -2.0): [(5.0, -2.0), (5.5, -1.875), (6.0, -1.75),\n (6.5, -1.875), (7, -2)],\n (4.5, -1.125): [(5.0, -1.0), (4.0, -1.25), (5.0, -1.25),\n (4.0, -1.0)]}\n\n init_triangulation(2, 2, check, nn_checks, bounds=[(3, 7), (-2, -1)])"
]
| [
"0.6104809",
"0.60860753",
"0.60539633",
"0.60334903",
"0.60219675",
"0.5804723",
"0.5646907",
"0.5639585",
"0.56350946",
"0.5630155",
"0.56274045",
"0.56215525",
"0.5603267",
"0.55760777",
"0.55667526",
"0.55426526",
"0.55111784",
"0.5501427",
"0.5493366",
"0.5487018",
"0.5486849",
"0.54854685",
"0.5454987",
"0.5453282",
"0.5446921",
"0.54316026",
"0.54193985",
"0.541347",
"0.54095644",
"0.54088074"
]
| 0.6174399 | 0 |
Testing M6 remeshing formula in 2D, 1 kernel, simple precision, o2_FullHalf splitting. | def test_2D_m6_1k_sFH():
scal, velo = setup_2D()
advec = Advection(velo, scal, discretization=d2d,
method={TimeIntegrator: RK2,
Interpolation: Linear,
Remesh: L4_2,
Support: 'gpu_2k',
Splitting: 'o2_FullHalf'}
)
advec_py = Advection(velo, scal, discretization=d2d,
method={TimeIntegrator: RK2,
Interpolation: Linear,
Remesh: L4_2,
Support: '',
Splitting: 'o2_FullHalf'}
)
assertion_2D_withPython(scal, velo, advec, advec_py) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_2D_m6_2k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_1_2_2D_cube_splits(self):\n check = [(0, 0), (1, 1), (1, 0), (0, 1), (0.5, 0.5), (0.0, 0.5),\n (0.5, 0.0),\n (0.25, 0.25), (1.0, 0.5), (0.5, 1.0), (0.75, 0.75),\n (0.75, 0.25),\n (0.25, 0.75), (0.5, 0.25), (0.25, 0.5), (0.375, 0.375),\n (0.0, 0.25),\n (0.25, 0.0), (0.125, 0.125), (0.125, 0.375), (0.375, 0.125),\n (0.5, 0.75), (0.75, 0.5), (0.625, 0.625), (1.0, 0.75),\n (0.75, 1.0),\n (0.875, 0.875), (0.875, 0.625), (0.625, 0.875), (0.625, 0.375),\n (1.0, 0.25), (0.75, 0.0), (0.875, 0.125), (0.875, 0.375),\n (0.625, 0.125), (0.375, 0.625), (0.0, 0.75), (0.25, 1.0),\n (0.125, 0.875), (0.125, 0.625), (0.375, 0.875)]\n\n nn_checks = {(0, 0): [(0.25, 0.0), (0.0, 0.25), (0.125, 0.125)],\n (0.625, 0.375): [(0.5, 0.5), (0.75, 0.25), (0.75, 0.5),\n (0.5, 0.25)],\n (0, 1): [(0.25, 1.0), (0.125, 0.875),(0.0, 0.75)],\n (0.625, 0.125): [(0.5, 0.0), (0.75, 0.25), (0.75, 0.0),\n (0.5, 0.25)]}\n\n\n init_triangulation(2, 2, check, nn_checks)",
"def test_multiple_case(self):\r\n\r\n shp = (3, 3)\r\n fx, fy, fz, fw = fmatrices('xyzw')\r\n dx, dy, dz, dw = dmatrices('xyzw')\r\n fv = fvector('r').dimshuffle('x', 0)\r\n dv = dvector('s').dimshuffle('x', 0)\r\n fxv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fyv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fzv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fwv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fvv = theano._asarray(numpy.random.rand(shp[0]), dtype='float32').reshape(1, shp[0])\r\n dxv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dyv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dzv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dwv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dvv = theano._asarray(numpy.random.rand(shp[0]), dtype='float64').reshape(1, shp[0])\r\n\r\n #We must be sure that the Canonizer is working, but that we don't have other\r\n # optimisation that could hide bug in the Canonizer as local_elemwise_fusion\r\n mode = compile.mode.get_default_mode()\r\n old_optimizer = mode._optimizer\r\n try:\r\n mode._optimizer = gof.Query([\"canonicalize\"])\r\n mode._optimizer = mode._optimizer.including('ShapeOpt')\r\n mode._optimizer = mode._optimizer.excluding(\r\n 'local_elemwise_fusion')\r\n\r\n #test x / x -> 1\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([(fx/fx,[fx],[fxv],'float32'),\r\n (dx/dx,[dx],[dxv],'float64'),\r\n (fv/fv,[fv],[fvv],'float32'),\r\n (dv/dv,[dv],[dvv],'float64'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert (out == numpy.ones(shp, dtype=out_dtype)).all()\r\n topo = f.maker.fgraph.toposort()\r\n if sym_inputs[0].broadcastable[0]:\r\n assert len(topo) == 2\r\n assert isinstance(topo[0].op, Shape_i)\r\n assert isinstance(topo[1].op, tensor.Alloc)\r\n else:\r\n assert len(topo) == 3\r\n assert isinstance(topo[0].op, Shape_i)\r\n assert isinstance(topo[1].op, Shape_i)\r\n assert isinstance(topo[2].op, tensor.Alloc)\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (x * y) / x -> y\r\n for id,(g, sym_inputs, val_inputs, nb_elemwise, out_dtype) in enumerate([\r\n ((dx*dy)/dx,[dx,dy],[dxv,dyv],0,'float64'),\r\n ((fx*fy)/fx,[fx,fy],[fxv,fyv],0,'float32'),\r\n ((dv*dy)/dv,[dv,dy],[dvv,dyv],0,'float64'),\r\n ((fv*fy)/fv,[fv,fy],[fvv,fyv],0,'float32'),\r\n #must broadcast as their is a dimshuffle in the computation\r\n ((dx*dv)/dx,[dx,dv],[dxv,dvv],1,'float64'),\r\n #topo: [Elemwise{second,no_inplace}(x, <TensorType(float64, row)>)]\r\n ((fx*fv)/fx,[fx,fv],[fxv,fvv],1,'float32')\r\n #topo: [Elemwise{second,no_inplace}(x, <TensorType(float32, row)>)]\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert(out_dtype == out.dtype)\r\n assert numpy.allclose(out, val_inputs[1])\r\n topo = f.maker.fgraph.toposort()\r\n print \"ID TOPO\", id, topo, sym_inputs\r\n for r, t in f.maker.fgraph.shape_feature.shape_of.items():\r\n print ' ', r, t\r\n if topo and not(len(topo)==1 and topo[0].op==deep_copy_op):\r\n for node in topo[:-1]:\r\n assert isinstance(node.op, Shape_i)\r\n assert isinstance(topo[-1].op, tensor.Alloc)\r\n\r\n #test x / y / x -> 1 / y\r\n for id,(g, sym_inputs, val_inputs, nb_elemwise, out_dtype) in enumerate([\r\n ((dx/dy)/dx,[dx,dy],[dxv,dyv],1,'float64'),\r\n ((fx/fy)/fx,[fx,fy],[fxv,fyv],1,'float32'),\r\n ((dv/dy)/dv,[dv,dy],[dvv,dyv],1,'float64'),\r\n ((fv/fy)/fv,[fv,fy],[fvv,fyv],1,'float32'),\r\n #must broadcast as their is a dimshuffle in the computation\r\n\r\n ((dx/dv)/dx,[dx,dv],[dxv,dvv],1,'float64'),\r\n #topo: [Shape_i, Shape_i, Elemwise{inv,no_inplace}(<TensorType(float64, row)>), Alloc]\r\n ((fx/fv)/fx,[fx,fv],[fxv,fvv],1,'float32'),\r\n #topo:[Shape_i, Shape_i, Elemwise{inv,no_inplace}(<TensorType(float32, row)>), Alloc]\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (1 / val_inputs[1]))\r\n topo = f.maker.fgraph.toposort()\r\n print topo\r\n elem = [t for t in topo if isinstance(t.op, T.Elemwise)]\r\n assert len(elem) == nb_elemwise\r\n assert isinstance(elem[0].op, (T.Elemwise, ))\r\n assert isinstance(elem[0].op.scalar_op, (\r\n theano.scalar.basic.Inv, theano.scalar.basic.TrueDiv))\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (a / b) * (b / c) * (c / d) -> a / d\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((dx / dy) * (dy / dz) * (dz / dw),[dx,dy,dz,dw],[dxv,dyv,dzv,dwv],'float64'),\r\n ((fx / fy) * (fy / fz) * (fz / fw),[fx,fy,fz,fw],[fxv,fyv,fzv,fwv],'float32'),\r\n ((dv / dy) * (dy / dz) * (dz / dw),[dv,dy,dz,dw],[dvv,dyv,dzv,dwv],'float64'),\r\n ((fv / fy) * (fy / fz) * (fz / fw),[fv,fy,fz,fw],[fvv,fyv,fzv,fwv],'float32'),\r\n ((dx / dv) * (dv / dz) * (dz / dw),[dx,dv,dz,dw],[dxv,dvv,dzv,dwv],'float64'),\r\n ((fx / fv) * (fv / fz) * (fz / fw),[fx,fv,fz,fw],[fxv,fvv,fzv,fwv],'float32'),\r\n ((dx / dy) * (dy / dv) * (dv / dw),[dx,dy,dv,dw],[dxv,dyv,dvv,dwv],'float64'),\r\n ((fx / fy) * (fy / fv) * (fv / fw),[fx,fy,fv,fw],[fxv,fyv,fvv,fwv],'float32'),\r\n ((dx / dy) * (dy / dz) * (dz / dv),[dx,dy,dz,dv],[dxv,dyv,dzv,dvv],'float64'),\r\n ((fx / fy) * (fy / fz) * (fz / fv),[fx,fy,fz,fv],[fxv,fyv,fzv,fvv],'float32'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (val_inputs[0] / val_inputs[3]))\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 1\r\n assert isinstance(topo[0].op, (T.Elemwise, ))\r\n assert isinstance(topo[0].op.scalar_op,\r\n theano.scalar.basic.TrueDiv)\r\n assert len(topo[0].inputs) == 2\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (2.0 * x) / (4.0 * y) -> (0.5 * x) / y\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n (((2.0*dx)/(4.0*dy)),[dx,dy],[dxv,dyv],'float64'),\r\n (((2.0*fx)/(4.0*fy)),[fx,fy],[fxv,fyv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n (((2.0*dv)/(4.0*dy)),[dv,dy],[dvv,dyv],'float64'),\r\n (((2.0*fv)/(4.0*fy)),[fv,fy],[fvv,fyv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n (((2.0*dx)/(4.0*dv)),[dx,dv],[dxv,dvv],'float64'),\r\n (((2.0*fx)/(4.0*fv)),[fx,fv],[fxv,fvv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (0.5 *\r\n val_inputs[0] / val_inputs[1]))\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 2\r\n assert isinstance(topo[0].op, (T.Elemwise, ))\r\n assert isinstance(topo[0].op.scalar_op,\r\n theano.scalar.basic.Mul)\r\n assert len(topo[0].inputs) == 2\r\n assert isinstance(topo[1].op, (T.Elemwise, ))\r\n assert isinstance(topo[1].op.scalar_op,\r\n theano.scalar.basic.TrueDiv)\r\n assert len(topo[1].inputs) == 2\r\n assert(out_dtype == out.dtype)\r\n\r\n #test 2 * x / 2 -> x\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((2*dx)/2,[dx],[dxv],'float64'),\r\n ((2*fx)/2,[fx],[fxv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2*dv)/2,[dv],[dvv],'float64'),\r\n ((2*fv)/2,[fv],[fvv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, val_inputs[0])\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 1\r\n topo[0].op == deep_copy_op\r\n assert(out_dtype == out.dtype)\r\n\r\n #test x / abs(x) -> sign(x)\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n (dx/abs(dx),[dx],[0.5-dxv],'float64'),\r\n (fx/abs(fx),[fx],[0.5-fxv], 'float32'),\r\n (dx/abs(dx),[dx],[0.1*dxv],'float64'),\r\n (fx/abs(fx),[fx],[0.1*fxv], 'float32'),\r\n (dv/abs(dv),[dv],[0.5-dvv],'float64'),\r\n (fv/abs(fv),[fv],[0.5-fvv], 'float32'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.all(numpy.isfinite(out))\r\n assert numpy.allclose(out, numpy.sign(val_inputs[0]))\r\n assert(out_dtype == out.dtype)\r\n assert len(f.maker.fgraph.toposort()) == 1\r\n\r\n #test (2*x) / (3*abs(x)) -> sign(x)\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((2 * dx) / (3 * abs(dx)), [dx], [0.5 - dxv], 'float64'),\r\n ((2 * fx) / (3 * abs(fx)), [fx], [0.5 - fxv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2 * dx) / (3 * abs(dx)), [dx], [0.1 * dxv], 'float64'),\r\n ((2 * fx) / (3 * abs(fx)), [fx], [0.1 * fxv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2 * dv) / (3 * abs(dv)), [dv], [0.5 - dvv], 'float64'),\r\n ((2 * fv) / (3 * abs(fv)), [fv], [0.5 - fvv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n topo = f.maker.fgraph.toposort()\r\n out = f(*val_inputs)\r\n assert numpy.all(numpy.isfinite(out))\r\n assert numpy.allclose(out, numpy.sign(val_inputs[0]) * 2 / 3)\r\n assert(out_dtype == out.dtype)\r\n finally:\r\n mode._optimizer = old_optimizer",
"def test_2D_m6_1k():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2'},\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_2D_m6_2k():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2'},\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def _apply_array_spatial12_halffilling(self, h1e: 'Nparray',\n h2e: 'Nparray') -> 'Nparray':\n if fqe.settings.use_accelerated_code:\n return self._apply_array_spatial12_lm(h1e, h2e)\n else:\n h1e = copy.deepcopy(h1e)\n h2e = numpy.moveaxis(copy.deepcopy(h2e), 1, 2) * (-1.0)\n norb = self.norb()\n for k in range(norb):\n h1e[:, :] -= h2e[:, k, k, :]\n\n if numpy.iscomplex(h1e).any() or numpy.iscomplex(h2e).any():\n dvec = self.calculate_dvec_spatial()\n out = numpy.einsum(\"ij,ijkl->kl\", h1e, dvec)\n dvec = numpy.einsum(\"ijkl,klmn->ijmn\", h2e, dvec)\n out += self._calculate_coeff_spatial_with_dvec(dvec)\n else:\n nij = norb * (norb + 1) // 2\n h1ec = numpy.zeros((nij), dtype=self._dtype)\n h2ec = numpy.zeros((nij, nij), dtype=self._dtype)\n for i in range(norb):\n for j in range(i + 1):\n ijn = j + i * (i + 1) // 2\n h1ec[ijn] = h1e[i, j]\n for k in range(norb):\n for l in range(k + 1):\n kln = l + k * (k + 1) // 2\n h2ec[ijn, kln] = h2e[i, j, k, l]\n dvec = self._calculate_dvec_spatial_compressed()\n out = numpy.einsum(\"i,ikl->kl\", h1ec, dvec)\n dvec = numpy.einsum(\"ik,kmn->imn\", h2ec, dvec)\n for i in range(self.norb()):\n for j in range(self.norb()):\n ijn = min(i, j) + max(i, j) * (max(i, j) + 1) // 2\n work = self._core.alpha_map(j, i)\n for source, target, parity in work:\n out[source, :] += dvec[ijn, target, :] * parity\n work = self._core.beta_map(j, i)\n for source, target, parity in work:\n out[:, source] += dvec[ijn, :, target] * parity\n\n return out",
"def test_2D_m4_1k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: 'gpu_1k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_3_2_4D_cube_splits(self):\n check = [(0, 0, 0, 0), (1, 1, 1, 1), (1, 0, 0, 0), (1, 1, 0, 0),\n (1, 1, 1, 0),\n (1, 1, 0, 1), (1, 0, 1, 0), (1, 0, 1, 1), (1, 0, 0, 1),\n (0, 1, 0, 0),\n (0, 1, 1, 0), (0, 1, 1, 1), (0, 1, 0, 1), (0, 0, 1, 0),\n (0, 0, 1, 1),\n (0, 0, 0, 1), (0.5, 0.5, 0.5, 0.5), (0.0, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.5, 0.5), (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.5), (0.0, 0.5, 0.0, 0.0),\n (0.0, 0.5, 0.5, 0.0),\n (0.5, 0.0, 0.5, 0.5), (0.5, 0.0, 0.0, 0.5),\n (0.5, 0.0, 0.0, 0.0),\n (0.5, 0.0, 0.5, 0.0), (0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.0, 0.0),\n (0.5, 0.5, 0.5, 0.0), (0.25, 0.25, 0.25, 0.25),\n (1.0, 0.5, 0.5, 0.5),\n (1.0, 1.0, 0.5, 0.5), (1.0, 1.0, 1.0, 0.5),\n (1.0, 1.0, 0.5, 1.0),\n (1.0, 0.5, 1.0, 0.5), (1.0, 0.5, 1.0, 1.0),\n (1.0, 0.5, 0.5, 1.0),\n (0.5, 1.0, 0.5, 0.5), (0.5, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 1.0),\n (0.5, 1.0, 0.5, 1.0), (0.5, 0.5, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0),\n (0.5, 0.5, 0.5, 1.0), (0.75, 0.75, 0.75, 0.75),\n (1.0, 0.0, 0.5, 0.5),\n (1.0, 0.0, 0.0, 0.5), (1.0, 0.0, 0.5, 0.0),\n (1.0, 0.5, 0.0, 0.5),\n (1.0, 0.5, 0.0, 0.0), (1.0, 0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25, 0.25),\n (1.0, 1.0, 0.0, 0.5), (1.0, 1.0, 0.5, 0.0),\n (0.5, 1.0, 0.0, 0.5),\n (0.5, 1.0, 0.0, 0.0), (0.5, 1.0, 0.5, 0.0),\n (0.75, 0.75, 0.25, 0.25),\n (1.0, 0.5, 1.0, 0.0), (0.5, 1.0, 1.0, 0.0),\n (0.5, 0.5, 1.0, 0.0),\n (0.75, 0.75, 0.75, 0.25), (1.0, 0.5, 0.0, 1.0),\n (0.5, 1.0, 0.0, 1.0),\n (0.5, 0.5, 0.0, 1.0), (0.75, 0.75, 0.25, 0.75),\n (1.0, 0.0, 1.0, 0.5),\n (0.5, 0.0, 1.0, 0.5), (0.5, 0.0, 1.0, 0.0),\n (0.75, 0.25, 0.75, 0.25),\n (1.0, 0.0, 0.5, 1.0), (0.5, 0.0, 1.0, 1.0),\n (0.5, 0.0, 0.5, 1.0),\n (0.75, 0.25, 0.75, 0.75), (0.5, 0.0, 0.0, 1.0),\n (0.75, 0.25, 0.25, 0.75), (0.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 0.0, 0.5),\n (0.0, 1.0, 0.5, 0.0), (0.25, 0.75, 0.25, 0.25),\n (0.0, 1.0, 1.0, 0.5),\n (0.0, 0.5, 1.0, 0.5), (0.0, 0.5, 1.0, 0.0),\n (0.25, 0.75, 0.75, 0.25),\n (0.0, 1.0, 0.5, 1.0), (0.0, 0.5, 1.0, 1.0),\n (0.0, 0.5, 0.5, 1.0),\n (0.25, 0.75, 0.75, 0.75), (0.0, 0.5, 0.0, 1.0),\n (0.25, 0.75, 0.25, 0.75), (0.0, 0.0, 1.0, 0.5),\n (0.25, 0.25, 0.75, 0.25), (0.0, 0.0, 0.5, 1.0),\n (0.25, 0.25, 0.75, 0.75), (0.25, 0.25, 0.25, 0.75)]\n\n nn_checks = {(0, 0, 0, 0): [(0.0, 0.5, 0.5, 0.0), (0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.5),\n (0.25, 0.25, 0.25, 0.25),\n (0.5, 0.0, 0.0, 0.5), (0.5, 0.0, 0.5, 0.0),\n (0.5, 0.5, 0.0, 0.0), (0.0, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.0), (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.5, 0.5, 0.5), (0.0, 0.0, 0.5, 0.5),\n (0.0, 0.5, 0.0, 0.5), (0.5, 0.0, 0.0, 0.0),\n (0.5, 0.5, 0.5, 0.0)],\n (1.0, 1.0, 0.5, 0.5): [(1.0, 1.0, 0.5, 1.0), (1, 1, 0, 1),\n (1.0, 1.0, 1.0, 0.5),\n (1.0, 0.5, 0.5, 0.5), (1, 1, 1, 0),\n (1.0, 1.0, 0.5, 0.0),\n (1.0, 1.0, 0.0, 0.5), (1, 1, 0, 0),\n (1, 1, 1, 1), (0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5),\n (0.75, 0.75, 0.75, 0.75),\n (0.75, 0.75, 0.25, 0.25),\n (0.75, 0.75, 0.75, 0.25),\n (0.75, 0.75, 0.25, 0.75)],\n (0.25, 0.25, 0.25, 0.75): [(0.0, 0.0, 0.5, 1.0),\n (0.0, 0.5, 0.0, 1.0),\n (0.5, 0.0, 0.0, 1.0),\n (0.5, 0.5, 0.5, 1.0),\n (0, 0, 0, 1),\n (0.0, 0.5, 0.5, 1.0),\n (0.5, 0.0, 0.5, 1.0),\n (0.5, 0.5, 0.0, 1.0),\n (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.0, 0.5),\n (0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.5, 0.5),\n (0.5, 0.0, 0.0, 0.5)]}\n\n init_triangulation(4, 1, check, nn_checks)",
"def test_2D_m8_1k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: 'gpu_1k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_4_2_5D_cube_splits(self):\n check = [(0, 0, 0, 0, 0), (1, 1, 1, 1, 1), (1, 0, 0, 0, 0),\n (1, 1, 0, 0, 0), (1, 1, 1, 0, 0), (1, 1, 1, 1, 0),\n (1, 1, 1, 0, 1), (1, 1, 0, 1, 0), (1, 1, 0, 1, 1),\n (1, 1, 0, 0, 1), (1, 0, 1, 0, 0), (1, 0, 1, 1, 0),\n (1, 0, 1, 1, 1), (1, 0, 1, 0, 1), (1, 0, 0, 1, 0),\n (1, 0, 0, 1, 1), (1, 0, 0, 0, 1), (0, 1, 0, 0, 0),\n (0, 1, 1, 0, 0), (0, 1, 1, 1, 0), (0, 1, 1, 1, 1),\n (0, 1, 1, 0, 1), (0, 1, 0, 1, 0), (0, 1, 0, 1, 1),\n (0, 1, 0, 0, 1), (0, 0, 1, 0, 0), (0, 0, 1, 1, 0),\n (0, 0, 1, 1, 1), (0, 0, 1, 0, 1), (0, 0, 0, 1, 0),\n (0, 0, 0, 1, 1), (0, 0, 0, 0, 1), (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5, 0.5), (0.0, 0.0, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.0, 0.5, 0.5), (0.0, 0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.0, 0.5, 0.0), (0.0, 0.0, 0.5, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.0, 0.0), (0.0, 0.0, 0.5, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.5, 0.5), (0.0, 0.5, 0.0, 0.0, 0.5),\n (0.0, 0.5, 0.0, 0.0, 0.0), (0.0, 0.5, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.5, 0.0, 0.5), (0.0, 0.5, 0.5, 0.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.0), (0.5, 0.0, 0.5, 0.5, 0.5),\n (0.5, 0.0, 0.0, 0.5, 0.5), (0.5, 0.0, 0.0, 0.0, 0.5),\n (0.5, 0.0, 0.0, 0.0, 0.0), (0.5, 0.0, 0.0, 0.5, 0.0),\n (0.5, 0.0, 0.5, 0.0, 0.5), (0.5, 0.0, 0.5, 0.0, 0.0),\n (0.5, 0.0, 0.5, 0.5, 0.0), (0.5, 0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.0, 0.5), (0.5, 0.5, 0.0, 0.0, 0.0),\n (0.5, 0.5, 0.0, 0.5, 0.0), (0.5, 0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.5, 0.0, 0.0), (0.5, 0.5, 0.5, 0.5, 0.0),\n (0.25, 0.25, 0.25, 0.25, 0.25), (1.0, 0.5, 0.5, 0.5, 0.5),\n (1.0, 1.0, 0.5, 0.5, 0.5), (1.0, 1.0, 1.0, 0.5, 0.5),\n (1.0, 1.0, 1.0, 1.0, 0.5), (1.0, 1.0, 1.0, 0.5, 1.0),\n (1.0, 1.0, 0.5, 1.0, 0.5), (1.0, 1.0, 0.5, 1.0, 1.0),\n (1.0, 1.0, 0.5, 0.5, 1.0), (1.0, 0.5, 1.0, 0.5, 0.5),\n (1.0, 0.5, 1.0, 1.0, 0.5), (1.0, 0.5, 1.0, 1.0, 1.0),\n (1.0, 0.5, 1.0, 0.5, 1.0), (1.0, 0.5, 0.5, 1.0, 0.5),\n (1.0, 0.5, 0.5, 1.0, 1.0), (1.0, 0.5, 0.5, 0.5, 1.0),\n (0.5, 1.0, 0.5, 0.5, 0.5), (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5), (0.5, 1.0, 1.0, 1.0, 1.0),\n (0.5, 1.0, 1.0, 0.5, 1.0), (0.5, 1.0, 0.5, 1.0, 0.5),\n (0.5, 1.0, 0.5, 1.0, 1.0), (0.5, 1.0, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 0.5), (0.5, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 1.0), (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.5, 0.5, 1.0, 0.5), (0.5, 0.5, 0.5, 1.0, 1.0),\n (0.5, 0.5, 0.5, 0.5, 1.0), (0.75, 0.75, 0.75, 0.75, 0.75),\n (1.0, 0.0, 0.5, 0.5, 0.5), (1.0, 0.0, 0.0, 0.5, 0.5),\n (1.0, 0.0, 0.0, 0.0, 0.5), (1.0, 0.0, 0.0, 0.5, 0.0),\n (1.0, 0.0, 0.5, 0.0, 0.5), (1.0, 0.0, 0.5, 0.0, 0.0),\n (1.0, 0.0, 0.5, 0.5, 0.0), (1.0, 0.5, 0.0, 0.5, 0.5),\n (1.0, 0.5, 0.0, 0.0, 0.5), (1.0, 0.5, 0.0, 0.0, 0.0),\n (1.0, 0.5, 0.0, 0.5, 0.0), (1.0, 0.5, 0.5, 0.0, 0.5),\n (1.0, 0.5, 0.5, 0.0, 0.0), (1.0, 0.5, 0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25, 0.25, 0.25), (1.0, 1.0, 0.0, 0.5, 0.5),\n (1.0, 1.0, 0.0, 0.0, 0.5), (1.0, 1.0, 0.0, 0.5, 0.0),\n (1.0, 1.0, 0.5, 0.0, 0.5), (1.0, 1.0, 0.5, 0.0, 0.0),\n (1.0, 1.0, 0.5, 0.5, 0.0), (0.5, 1.0, 0.0, 0.5, 0.5),\n (0.5, 1.0, 0.0, 0.0, 0.5), (0.5, 1.0, 0.0, 0.0, 0.0),\n (0.5, 1.0, 0.0, 0.5, 0.0), (0.5, 1.0, 0.5, 0.0, 0.5),\n (0.5, 1.0, 0.5, 0.0, 0.0), (0.5, 1.0, 0.5, 0.5, 0.0),\n (0.75, 0.75, 0.25, 0.25, 0.25), (1.0, 1.0, 1.0, 0.0, 0.5),\n (1.0, 1.0, 1.0, 0.5, 0.0), (1.0, 0.5, 1.0, 0.0, 0.5),\n (1.0, 0.5, 1.0, 0.0, 0.0), (1.0, 0.5, 1.0, 0.5, 0.0),\n (0.5, 1.0, 1.0, 0.0, 0.5), (0.5, 1.0, 1.0, 0.0, 0.0),\n (0.5, 1.0, 1.0, 0.5, 0.0), (0.5, 0.5, 1.0, 0.0, 0.5),\n (0.5, 0.5, 1.0, 0.0, 0.0), (0.5, 0.5, 1.0, 0.5, 0.0),\n (0.75, 0.75, 0.75, 0.25, 0.25), (1.0, 1.0, 0.5, 1.0, 0.0),\n (1.0, 0.5, 1.0, 1.0, 0.0), (1.0, 0.5, 0.5, 1.0, 0.0),\n (0.5, 1.0, 1.0, 1.0, 0.0), (0.5, 1.0, 0.5, 1.0, 0.0),\n (0.5, 0.5, 1.0, 1.0, 0.0), (0.5, 0.5, 0.5, 1.0, 0.0),\n (0.75, 0.75, 0.75, 0.75, 0.25), (1.0, 1.0, 0.5, 0.0, 1.0),\n (1.0, 0.5, 1.0, 0.0, 1.0), (1.0, 0.5, 0.5, 0.0, 1.0),\n (0.5, 1.0, 1.0, 0.0, 1.0), (0.5, 1.0, 0.5, 0.0, 1.0),\n (0.5, 0.5, 1.0, 0.0, 1.0), (0.5, 0.5, 0.5, 0.0, 1.0),\n (0.75, 0.75, 0.75, 0.25, 0.75), (1.0, 1.0, 0.0, 1.0, 0.5),\n (1.0, 0.5, 0.0, 1.0, 0.5), (1.0, 0.5, 0.0, 1.0, 0.0),\n (0.5, 1.0, 0.0, 1.0, 0.5), (0.5, 1.0, 0.0, 1.0, 0.0),\n (0.5, 0.5, 0.0, 1.0, 0.5), (0.5, 0.5, 0.0, 1.0, 0.0),\n (0.75, 0.75, 0.25, 0.75, 0.25), (1.0, 1.0, 0.0, 0.5, 1.0),\n (1.0, 0.5, 0.0, 1.0, 1.0), (1.0, 0.5, 0.0, 0.5, 1.0),\n (0.5, 1.0, 0.0, 1.0, 1.0), (0.5, 1.0, 0.0, 0.5, 1.0),\n (0.5, 0.5, 0.0, 1.0, 1.0), (0.5, 0.5, 0.0, 0.5, 1.0),\n (0.75, 0.75, 0.25, 0.75, 0.75), (1.0, 0.5, 0.0, 0.0, 1.0),\n (0.5, 1.0, 0.0, 0.0, 1.0), (0.5, 0.5, 0.0, 0.0, 1.0),\n (0.75, 0.75, 0.25, 0.25, 0.75), (1.0, 0.0, 1.0, 0.5, 0.5),\n (1.0, 0.0, 1.0, 0.0, 0.5), (1.0, 0.0, 1.0, 0.5, 0.0),\n (0.5, 0.0, 1.0, 0.5, 0.5), (0.5, 0.0, 1.0, 0.0, 0.5),\n (0.5, 0.0, 1.0, 0.0, 0.0), (0.5, 0.0, 1.0, 0.5, 0.0),\n (0.75, 0.25, 0.75, 0.25, 0.25), (1.0, 0.0, 1.0, 1.0, 0.5),\n (1.0, 0.0, 0.5, 1.0, 0.5), (1.0, 0.0, 0.5, 1.0, 0.0),\n (0.5, 0.0, 1.0, 1.0, 0.5), (0.5, 0.0, 1.0, 1.0, 0.0),\n (0.5, 0.0, 0.5, 1.0, 0.5), (0.5, 0.0, 0.5, 1.0, 0.0),\n (0.75, 0.25, 0.75, 0.75, 0.25), (1.0, 0.0, 1.0, 0.5, 1.0),\n (1.0, 0.0, 0.5, 1.0, 1.0), (1.0, 0.0, 0.5, 0.5, 1.0),\n (0.5, 0.0, 1.0, 1.0, 1.0), (0.5, 0.0, 1.0, 0.5, 1.0),\n (0.5, 0.0, 0.5, 1.0, 1.0), (0.5, 0.0, 0.5, 0.5, 1.0),\n (0.75, 0.25, 0.75, 0.75, 0.75), (1.0, 0.0, 0.5, 0.0, 1.0),\n (0.5, 0.0, 1.0, 0.0, 1.0), (0.5, 0.0, 0.5, 0.0, 1.0),\n (0.75, 0.25, 0.75, 0.25, 0.75), (1.0, 0.0, 0.0, 1.0, 0.5),\n (0.5, 0.0, 0.0, 1.0, 0.5), (0.5, 0.0, 0.0, 1.0, 0.0),\n (0.75, 0.25, 0.25, 0.75, 0.25), (1.0, 0.0, 0.0, 0.5, 1.0),\n (0.5, 0.0, 0.0, 1.0, 1.0), (0.5, 0.0, 0.0, 0.5, 1.0),\n (0.75, 0.25, 0.25, 0.75, 0.75), (0.5, 0.0, 0.0, 0.0, 1.0),\n (0.75, 0.25, 0.25, 0.25, 0.75), (0.0, 1.0, 0.5, 0.5, 0.5),\n (0.0, 1.0, 0.0, 0.5, 0.5), (0.0, 1.0, 0.0, 0.0, 0.5),\n (0.0, 1.0, 0.0, 0.5, 0.0), (0.0, 1.0, 0.5, 0.0, 0.5),\n (0.0, 1.0, 0.5, 0.0, 0.0), (0.0, 1.0, 0.5, 0.5, 0.0),\n (0.25, 0.75, 0.25, 0.25, 0.25), (0.0, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.0, 0.5), (0.0, 1.0, 1.0, 0.5, 0.0),\n (0.0, 0.5, 1.0, 0.5, 0.5), (0.0, 0.5, 1.0, 0.0, 0.5),\n (0.0, 0.5, 1.0, 0.0, 0.0), (0.0, 0.5, 1.0, 0.5, 0.0),\n (0.25, 0.75, 0.75, 0.25, 0.25), (0.0, 1.0, 1.0, 1.0, 0.5),\n (0.0, 1.0, 0.5, 1.0, 0.5), (0.0, 1.0, 0.5, 1.0, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.5), (0.0, 0.5, 1.0, 1.0, 0.0),\n (0.0, 0.5, 0.5, 1.0, 0.5), (0.0, 0.5, 0.5, 1.0, 0.0),\n (0.25, 0.75, 0.75, 0.75, 0.25), (0.0, 1.0, 1.0, 0.5, 1.0),\n (0.0, 1.0, 0.5, 1.0, 1.0), (0.0, 1.0, 0.5, 0.5, 1.0),\n (0.0, 0.5, 1.0, 1.0, 1.0), (0.0, 0.5, 1.0, 0.5, 1.0),\n (0.0, 0.5, 0.5, 1.0, 1.0), (0.0, 0.5, 0.5, 0.5, 1.0),\n (0.25, 0.75, 0.75, 0.75, 0.75), (0.0, 1.0, 0.5, 0.0, 1.0),\n (0.0, 0.5, 1.0, 0.0, 1.0), (0.0, 0.5, 0.5, 0.0, 1.0),\n (0.25, 0.75, 0.75, 0.25, 0.75), (0.0, 1.0, 0.0, 1.0, 0.5),\n (0.0, 0.5, 0.0, 1.0, 0.5), (0.0, 0.5, 0.0, 1.0, 0.0),\n (0.25, 0.75, 0.25, 0.75, 0.25), (0.0, 1.0, 0.0, 0.5, 1.0),\n (0.0, 0.5, 0.0, 1.0, 1.0), (0.0, 0.5, 0.0, 0.5, 1.0),\n (0.25, 0.75, 0.25, 0.75, 0.75), (0.0, 0.5, 0.0, 0.0, 1.0),\n (0.25, 0.75, 0.25, 0.25, 0.75), (0.0, 0.0, 1.0, 0.5, 0.5),\n (0.0, 0.0, 1.0, 0.0, 0.5), (0.0, 0.0, 1.0, 0.5, 0.0),\n (0.25, 0.25, 0.75, 0.25, 0.25), (0.0, 0.0, 1.0, 1.0, 0.5),\n (0.0, 0.0, 0.5, 1.0, 0.5), (0.0, 0.0, 0.5, 1.0, 0.0),\n (0.25, 0.25, 0.75, 0.75, 0.25), (0.0, 0.0, 1.0, 0.5, 1.0),\n (0.0, 0.0, 0.5, 1.0, 1.0), (0.0, 0.0, 0.5, 0.5, 1.0),\n (0.25, 0.25, 0.75, 0.75, 0.75), (0.0, 0.0, 0.5, 0.0, 1.0),\n (0.25, 0.25, 0.75, 0.25, 0.75), (0.0, 0.0, 0.0, 1.0, 0.5),\n (0.25, 0.25, 0.25, 0.75, 0.25), (0.0, 0.0, 0.0, 0.5, 1.0),\n (0.25, 0.25, 0.25, 0.75, 0.75), (0.25, 0.25, 0.25, 0.25, 0.75)]\n\n nn_checks = {(1, 1, 1, 1, 1): [(1.0, 1.0, 1.0, 0.5, 1.0),\n (1.0, 1.0, 0.5, 1.0, 1.0),\n (1.0, 0.5, 0.5, 0.5, 0.5),\n (1.0, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 1.0),\n (1.0, 0.5, 0.5, 1.0, 0.5),\n (1.0, 0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.5, 0.5, 1.0, 1.0),\n (0.5, 1.0, 0.5, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.5, 1.0, 0.5, 1.0, 0.5),\n (1.0, 1.0, 1.0, 0.5, 0.5),\n (1.0, 1.0, 0.5, 1.0, 0.5),\n (1.0, 1.0, 0.5, 0.5, 0.5),\n (1.0, 1.0, 1.0, 1.0, 0.5),\n (1.0, 1.0, 0.5, 0.5, 1.0),\n (1.0, 0.5, 1.0, 0.5, 1.0),\n (1.0, 0.5, 0.5, 1.0, 1.0),\n (0.5, 1.0, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 0.5),\n (1.0, 0.5, 0.5, 0.5, 1.0),\n (1.0, 0.5, 1.0, 1.0, 1.0),\n (0.5, 0.5, 0.5, 1.0, 0.5),\n (0.75, 0.75, 0.75, 0.75, 0.75),\n (0.5, 0.5, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 1.0, 1.0),\n (0.5, 1.0, 1.0, 0.5, 1.0),\n (0.5, 1.0, 0.5, 1.0, 1.0)],\n (0.25, 0.75, 0.75, 0.75, 0.25): [(0.5, 1.0, 1.0, 1.0, 0.0),\n (0.5, 0.5, 0.5, 1.0, 0.5),\n (0, 1, 1, 1, 0),\n (0.5, 1.0, 0.5, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5),\n (0.0, 1.0, 0.5, 0.5, 0.5),\n (0.0, 1.0, 1.0, 1.0, 0.5),\n (0.5, 1.0, 0.5, 1.0, 0.5),\n (0.0, 1.0, 0.5, 1.0, 0.5),\n (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.5, 0.0),\n (0.0, 1.0, 0.5, 1.0, 0.0),\n (0.5, 1.0, 1.0, 0.5, 0.0),\n (0.5, 1.0, 0.5, 1.0, 0.0),\n (0.5, 1.0, 0.5, 0.5, 0.0),\n (0.0, 1.0, 0.5, 0.5, 0.0),\n (0.5, 0.5, 1.0, 0.5, 0.0),\n (0.5, 0.5, 0.5, 1.0, 0.0),\n (0.0, 0.5, 1.0, 0.5, 0.5),\n (0.0, 0.5, 0.5, 1.0, 0.5),\n (0.0, 0.5, 1.0, 0.5, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 0.0),\n (0.0, 0.5, 0.5, 1.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.0),\n (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.5, 0.5, 1.0, 1.0, 0.5),\n (\n 0.5, 0.5, 1.0, 0.5, 0.5)],\n (0.0, 0.0, 1.0, 0.5, 1.0): [(0.5, 0.0, 0.5, 0.5, 1.0),\n (0.0, 0.5, 0.5, 0.5, 1.0),\n (0.5, 0.5, 0.5, 0.5, 1.0),\n (0.0, 0.0, 0.5, 0.5, 1.0),\n (0, 0, 1, 1, 1),\n (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.0, 1.0, 0.5, 1.0),\n (0.0, 0.5, 1.0, 0.5, 1.0),\n (0, 0, 1, 0, 1),\n (0.5, 0.0, 1.0, 0.5, 0.5),\n (0.0, 0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 1.0, 0.5, 0.5),\n (0.0, 0.0, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.5, 0.5, 0.5),\n (0.25, 0.25, 0.75, 0.75, 0.75),\n (0.5, 0.0, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5, 0.5), (\n 0.25, 0.25, 0.75, 0.25, 0.75)]}\n\n init_triangulation(5, 1, check, nn_checks)",
"def test_MeshMat_1group(self):\n\n MS_grp = self.meshsol.get_group(\"stator\")\n cells_grp, nb_cell, indices = MS_grp.get_mesh().get_cell()\n solution = np.array([[0, 1, 2], [1, 2, 3]])\n result_tgl = cells_grp[\"triangle\"]\n testA = np.sum(abs(solution - result_tgl))\n msg = (\n \"Wrong output: returned \" + str(result_tgl) + \", expected: \" + str(solution)\n )\n self.assertAlmostEqual(testA, 0, msg=msg, delta=self.DELTA)\n\n MS_grp = self.meshsol.get_group(\"rotor\")\n cells_grp, nb_cell, indices = MS_grp.get_mesh().get_cell()\n solution = np.array([[3, 3], [1, 2], [2, 3]])\n results = cells_grp[\"triangle\"] # The point indices have changed !\n points = MS_grp.get_mesh().get_point(results)\n testA = np.sum(abs(solution - points))\n msg = \"Wrong output: returned \" + str(results) + \", expected: \" + str(solution)\n self.assertAlmostEqual(testA, 0, msg=msg, delta=self.DELTA)",
"def test_2D_m4_2k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_2_2_3D_cube_splits(self):\n check = [(0, 0, 0), (1, 1, 1), (1, 0, 0), (1, 1, 0), (1, 0, 1),\n (0, 1, 0),\n (0, 1, 1), (0, 0, 1), (0.5, 0.5, 0.5), (0.0, 0.5, 0.5),\n (0.0, 0.0, 0.5), (0.0, 0.5, 0.0), (0.5, 0.0, 0.5),\n (0.5, 0.0, 0.0),\n (0.5, 0.5, 0.0), (0.25, 0.25, 0.25), (1.0, 0.5, 0.5),\n (1.0, 1.0, 0.5),\n (1.0, 0.5, 1.0), (0.5, 1.0, 0.5), (0.5, 1.0, 1.0),\n (0.5, 0.5, 1.0),\n (0.75, 0.75, 0.75), (1.0, 0.0, 0.5), (1.0, 0.5, 0.0),\n (0.75, 0.25, 0.25), (0.5, 1.0, 0.0), (0.75, 0.75, 0.25),\n (0.5, 0.0, 1.0), (0.75, 0.25, 0.75), (0.0, 1.0, 0.5),\n (0.25, 0.75, 0.25), (0.0, 0.5, 1.0), (0.25, 0.75, 0.75),\n (0.25, 0.25, 0.75), (0.5, 0.25, 0.25), (0.5, 0.5, 0.25),\n (0.5, 0.25, 0.5), (0.25, 0.5, 0.25), (0.25, 0.5, 0.5),\n (0.25, 0.25, 0.5), (0.375, 0.375, 0.375), (0.0, 0.25, 0.25),\n (0.0, 0.0, 0.25), (0.0, 0.25, 0.0), (0.25, 0.0, 0.25),\n (0.25, 0.0, 0.0), (0.25, 0.25, 0.0), (0.125, 0.125, 0.125),\n (0.0, 0.5, 0.25), (0.0, 0.25, 0.5), (0.125, 0.375, 0.375),\n (0.25, 0.0, 0.5), (0.125, 0.125, 0.375), (0.25, 0.5, 0.0),\n (0.125, 0.375, 0.125), (0.5, 0.0, 0.25), (0.375, 0.125, 0.375),\n (0.5, 0.25, 0.0), (0.375, 0.125, 0.125), (0.375, 0.375, 0.125),\n (0.5, 0.75, 0.75), (0.5, 0.5, 0.75), (0.5, 0.75, 0.5),\n (0.75, 0.5, 0.75), (0.75, 0.5, 0.5), (0.75, 0.75, 0.5),\n (0.625, 0.625, 0.625), (1.0, 0.75, 0.75), (1.0, 1.0, 0.75),\n (1.0, 0.75, 1.0), (0.75, 1.0, 0.75), (0.75, 1.0, 1.0),\n (0.75, 0.75, 1.0), (0.875, 0.875, 0.875), (1.0, 0.5, 0.75),\n (1.0, 0.75, 0.5), (0.875, 0.625, 0.625), (0.75, 1.0, 0.5),\n (0.875, 0.875, 0.625), (0.75, 0.5, 1.0), (0.875, 0.625, 0.875),\n (0.5, 1.0, 0.75), (0.625, 0.875, 0.625), (0.5, 0.75, 1.0),\n (0.625, 0.875, 0.875), (0.625, 0.625, 0.875),\n (0.75, 0.5, 0.25),\n (0.75, 0.25, 0.5), (0.625, 0.375, 0.375), (1.0, 0.25, 0.25),\n (1.0, 0.0, 0.25), (1.0, 0.25, 0.0), (0.75, 0.0, 0.25),\n (0.75, 0.0, 0.0), (0.75, 0.25, 0.0), (0.875, 0.125, 0.125),\n (1.0, 0.5, 0.25), (1.0, 0.25, 0.5), (0.875, 0.375, 0.375),\n (0.75, 0.0, 0.5), (0.875, 0.125, 0.375), (0.75, 0.5, 0.0),\n (0.875, 0.375, 0.125), (0.625, 0.125, 0.375),\n (0.625, 0.125, 0.125),\n (0.625, 0.375, 0.125), (0.5, 0.75, 0.25),\n (0.625, 0.625, 0.375),\n (1.0, 0.75, 0.25), (1.0, 1.0, 0.25), (1.0, 0.75, 0.0),\n (0.75, 1.0, 0.25), (0.75, 1.0, 0.0), (0.75, 0.75, 0.0),\n (0.875, 0.875, 0.125), (0.875, 0.625, 0.375),\n (0.875, 0.875, 0.375),\n (0.875, 0.625, 0.125), (0.5, 1.0, 0.25), (0.625, 0.875, 0.375),\n (0.5, 0.75, 0.0), (0.625, 0.875, 0.125), (0.625, 0.625, 0.125),\n (0.5, 0.25, 0.75), (0.625, 0.375, 0.625), (1.0, 0.25, 0.75),\n (1.0, 0.0, 0.75), (1.0, 0.25, 1.0), (0.75, 0.0, 0.75),\n (0.75, 0.0, 1.0), (0.75, 0.25, 1.0), (0.875, 0.125, 0.875),\n (0.875, 0.375, 0.625), (0.875, 0.125, 0.625),\n (0.875, 0.375, 0.875),\n (0.5, 0.0, 0.75), (0.625, 0.125, 0.625), (0.5, 0.25, 1.0),\n (0.625, 0.125, 0.875), (0.625, 0.375, 0.875),\n (0.25, 0.75, 0.5),\n (0.375, 0.625, 0.375), (0.0, 0.75, 0.25), (0.0, 1.0, 0.25),\n (0.0, 0.75, 0.0), (0.25, 1.0, 0.25), (0.25, 1.0, 0.0),\n (0.25, 0.75, 0.0), (0.125, 0.875, 0.125), (0.0, 0.75, 0.5),\n (0.125, 0.625, 0.375), (0.25, 1.0, 0.5), (0.125, 0.875, 0.375),\n (0.125, 0.625, 0.125), (0.375, 0.875, 0.375),\n (0.375, 0.875, 0.125),\n (0.375, 0.625, 0.125), (0.25, 0.5, 0.75),\n (0.375, 0.625, 0.625),\n (0.0, 0.75, 0.75), (0.0, 1.0, 0.75), (0.0, 0.75, 1.0),\n (0.25, 1.0, 0.75), (0.25, 1.0, 1.0), (0.25, 0.75, 1.0),\n (0.125, 0.875, 0.875), (0.0, 0.5, 0.75), (0.125, 0.625, 0.625),\n (0.125, 0.875, 0.625), (0.25, 0.5, 1.0), (0.125, 0.625, 0.875),\n (0.375, 0.875, 0.625), (0.375, 0.875, 0.875),\n (0.375, 0.625, 0.875),\n (0.375, 0.375, 0.625), (0.0, 0.25, 0.75), (0.0, 0.0, 0.75),\n (0.0, 0.25, 1.0), (0.25, 0.0, 0.75), (0.25, 0.0, 1.0),\n (0.25, 0.25, 1.0), (0.125, 0.125, 0.875),\n (0.125, 0.375, 0.625),\n (0.125, 0.125, 0.625), (0.125, 0.375, 0.875),\n (0.375, 0.125, 0.625),\n (0.375, 0.125, 0.875), (0.375, 0.375, 0.875)]\n\n nn_checks = {(0.5, 0.25, 0.25): [(0.375, 0.375, 0.125), (0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25),\n (0.625, 0.375, 0.375),\n (0.625, 0.125, 0.375),\n (0.625, 0.125, 0.125),\n (0.5, 0.5, 0.25), (0.25, 0.25, 0.25),\n (0.375, 0.375, 0.375),\n (0.5, 0.25, 0.5), (0.5, 0.5, 0.5),\n (0.5, 0.0, 0.25),\n (0.375, 0.125, 0.375), (0.5, 0.0, 0.5),\n (0.5, 0.25, 0.0),\n (0.375, 0.125, 0.125), (0.5, 0.0, 0.0),\n (0.625, 0.375, 0.125)],\n (0.625, 0.625, 0.875): [(0.75, 0.5, 1.0),\n (0.75, 0.75, 1.0),\n (0.5, 0.75, 1.0), (0.5, 0.5, 1.0),\n (0.5, 0.5, 0.75),\n (0.5, 0.75, 0.75),\n (0.75, 0.5, 0.75),\n (0.75, 0.75, 0.75)],\n (0, 0, 0): [(0.0, 0.25, 0.0), (0.125, 0.125, 0.125),\n (0.0, 0.0, 0.25), (0.25, 0.0, 0.0),\n (0.0, 0.25, 0.25), (0.25, 0.25, 0.0),\n (0.25, 0.0, 0.25)]}\n\n init_triangulation(3, 2, check, nn_checks)",
"def test_dmi_uses_unit_length_2dmesh():\n A = 8.78e-12 # J/m\n D = 1.58e-3 # J/m^2\n Ms = 3.84e5 # A/m\n\n energies = []\n\n # unit_lengths 1e-9 and 1 are common, let's throw in an intermediate length\n # just to challenge the system a little:\n for unit_length in (1, 1e-4, 1e-9):\n radius = 200e-9 / unit_length\n maxh = 5e-9 / unit_length\n helical_period = (4 * pi * A / D) / unit_length\n k = 2 * pi / helical_period\n # HF 27 April 2014: The next command fails in dolfin 1.3\n # mesh = df.CircleMesh(df.Point(0, 0), radius, maxh)\n # The actual shape of the domain shouldn't matter for the test,\n # so let's use a Rectangular mesh which should work the same:\n\n nx = ny = int(round(radius / maxh))\n mesh = df.RectangleMesh(df.Point(0, 0), df.Point(radius, radius), nx, ny)\n\n S3 = df.VectorFunctionSpace(mesh, \"CG\", 1, dim=3)\n m_expr = df.Expression((\"0\", \"cos(k * x[0])\", \"sin(k * x[0])\"), k=k, degree=1)\n m = Field(S3, m_expr, name='m')\n dmi = DMI(D)\n Ms_dg = Field(df.FunctionSpace(mesh, 'DG', 0), Ms)\n dmi.setup(m, Ms_dg, unit_length=unit_length)\n energies.append(dmi.compute_energy())\n\n H = df.Function(S3)\n H.vector()[:] = dmi.compute_field()\n print H(0.0, 0.0)\n\n print \"Using unit_length = {}.\".format(unit_length)\n print \"Helical period {}.\".format(helical_period)\n print \"Energy {}.\".format(dmi.compute_energy())\n\n rel_diff_energies = abs(energies[0] - energies[1]) / abs(energies[1])\n print \"Relative difference of energy {}.\".format(rel_diff_energies)\n assert rel_diff_energies < 1e-13\n\n rel_diff_energies2 = abs(energies[0] - energies[2]) / abs(energies[2])\n print \"Relative difference2 of energy {}.\".format(rel_diff_energies2)\n assert rel_diff_energies2 < 1e-13",
"def test_3D_m6_2k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def test_2D_m8_2k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_linear_2d_merwe_column():\n\n\n def fx(x, dt):\n F = np.array([[1, dt, 0, 0],\n [0, 1, 0, 0],\n [0, 0, 1, dt],\n [0, 0, 0, 1]], dtype=float)\n\n return np.dot(F, x)\n\n def hx(x):\n return np.array([x[0], x[2]])\n\n\n dt = 0.1\n points = MerweScaledSigmaPoints2(4, .1, 2., -1)\n kf = UKF2(dim_x=4, dim_z=2, dt=dt, fx=fx, hx=hx, points=points)\n\n\n kf.x = np.array([[-1., 1., -1., 1]]).T\n kf.P*=0.0001\n #kf.R *=0\n #kf.Q\n\n zs = []\n for i in range(20):\n z = np.array([[i+randn()*0.1],\n [i+randn()*0.1]])\n zs.append(z)\n\n Ms, Ps = kf.batch_filter(zs)\n smooth_x, _, _ = kf.rts_smoother(Ms, Ps, dt=dt)\n\n if DO_PLOT:\n plt.figure()\n zs = np.asarray(zs)\n plt.plot(zs[:,0], marker='+', c='b')\n plt.plot(Ms[:,0], c='b')\n plt.plot(smooth_x[:,0], smooth_x[:,2], c='r')\n print(smooth_x)",
"def init_stitch(N):\n\tif N is None:\n\t\tN = np.int32(HII_DIM) #prepare for stitching\n\tMETA_GRID_SIZE = DIM/N\n\tM = np.int32(HII_DIM/META_GRID_SIZE)\n\t#HII_DIM = np.int32(HII_DIM)\n\tf_pixel_factor = DIM/HII_DIM;\n\tscale = np.float32(BOX_LEN/DIM)\n\tprint 'scale', scale\n\tHII_scale = np.float32(BOX_LEN/HII_DIM)\n\tshape = (DIM,DIM,N)\n\tstitch_grid_size = (DIM/(block_size[0]),\n\t\t\t\t\t\tDIM/(block_size[0]),\n\t\t\t\t\t\tN/(block_size[0]))\n\tHII_stitch_grid_size = (HII_DIM/(block_size[0]),\n\t\t\t\t\t\tHII_DIM/(block_size[0]),\n\t\t\t\t\t\tM/(block_size[0]))\n\t#ratio of large box to small size\n\tkernel_source = open(cmd_folder+\"/initialize_stitch.cu\").read()\n\tkernel_code = kernel_source % {\n\n\t\t'DELTAK': DELTA_K,\n\t\t'DIM': DIM, \n\t\t'VOLUME': VOLUME,\n\t\t'META_BLOCKDIM': N\n\t}\n\tmain_module = nvcc.SourceModule(kernel_code)\n\tinit_stitch = main_module.get_function(\"init_kernel\")\n\tHII_filter = main_module.get_function(\"HII_filter\")\n\tsubsample_kernel = main_module.get_function(\"subsample\")\n\tvelocity_kernel = main_module.get_function(\"set_velocity\")\n\tpspec_texture = main_module.get_texref(\"pspec\")\n\tMRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=0)\n\tplan2d = Plan((np.int64(DIM), np.int64(DIM)), dtype=np.complex64)\n\tplan1d = Plan((np.int64(DIM)), dtype=np.complex64)\n\tprint \"init pspec\"\n\tinterpPspec, interpSize = init_pspec() #interpPspec contains both k array and P array\n\tinterp_cu = cuda.matrix_to_array(interpPspec, order='F')\n\tcuda.bind_array_to_texref(interp_cu, pspec_texture)\n\t#hbox_large = pyfftw.empty_aligned((DIM, DIM, DIM), dtype='complex64')\n\thbox_large = np.zeros((DIM, DIM, DIM), dtype=np.complex64)\n\t#hbox_small = np.zeros(HII_shape, dtype=np.float32)\n\t#hbox_large = n\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\n\t# Set up pinned memory for transfer\n\t#largebox_hs = cuda.aligned_empty(shape=shape, dtype=np.float32, alignment=resource.getpagesize())\n\tlargebox_pin = cuda.pagelocked_empty(shape=shape, dtype=np.float32)\n\tlargecbox_pin = cuda.pagelocked_empty(shape=shape, dtype=np.complex64)\n\n\tlargebox_d = gpuarray.zeros(shape, dtype=np.float32)\n\tlargebox_d_imag = gpuarray.zeros(shape, dtype=np.float32)\n\tprint \"init boxes\"\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t# MRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=meta_x*N**3)\n\t\tinit_stitch(largebox_d, DIM, np.int32(meta_z),block=block_size, grid=stitch_grid_size)\n\t\tinit_stitch(largebox_d_imag, DIM, np.int32(meta_z),block=block_size, grid=stitch_grid_size)\n\t\tlargebox_d *= MRGgen.gen_normal(shape, dtype=np.float32)\n\t\tlargebox_d_imag *= MRGgen.gen_normal(shape, dtype=np.float32)\n\t\tlargebox_d = largebox_d + np.complex64(1.j) * largebox_d_imag\n\t\tcuda.memcpy_dtoh_async(largecbox_pin, largebox_d)\n\t\thbox_large[:, :, meta_z*N:(meta_z+1)*N] = largecbox_pin.copy()\n\t#if want to get velocity need to use this\n\tif True:\n\t\tprint \"saving kbox\"\n\t\tnp.save(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN), hbox_large)\n\n\tprint \"Executing FFT on device\"\n\t#hbox_large = pyfftw.interfaces.numpy_fft.ifftn(hbox_large).real\n\thbox_large = fft_stitch(N, plan2d, plan1d, hbox_large, largebox_d).real\n\tprint hbox_large.dtype\n\tprint \"Finished FFT on device\"\n\tnp.save(parent_folder+\"/Boxes/deltax_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN), hbox_large)\n\t\n\tif True:\n\t\tprint \"loading kbox\"\n\t\thbox_large = np.load(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN))\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\tlargebox_pin = hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy()\n\t\t#cuda.memcpy_htod_async(largebox_d, largebox_pin)\n\t\tlargebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\tHII_filter(largebox_d, DIM, np.int32(meta_z), ZERO, smoothR, block=block_size, grid=stitch_grid_size);\n\t\thbox_large[:, :, meta_z*N:(meta_z+1)*N] = largebox_d.get_async()\n\t#import IPython; IPython.embed()\n\tprint \"Executing FFT on host\"\n\t#hbox_large = hifft(hbox_large).astype(np.complex64).real\n\t#hbox_large = pyfftw.interfaces.numpy_fft.ifftn(hbox_large).real\n\thbox_large = fft_stitch(N, plan2d, plan1d, hbox_large, largebox_d).real\n\tprint \"Finished FFT on host\"\n\t#import IPython; IPython.embed()\n\n\t# for meta_x in xrange(META_GRID_SIZE):\n\t# \tfor meta_y in xrange(META_GRID_SIZE):\n\t# \t\tfor meta_z in xrange(META_GRID_SIZE):\n\t# \t\t\tlargebox_d = gpuarray.to_gpu(hbox_large[meta_x*N:(meta_x+1)*N, meta_y*N:(meta_y+1)*N, meta_z*N:(meta_z+1)*N])\n\t# \t\t\tHII_filter(largebox_d, N, np.int32(meta_x), np.int32(meta_y), np.int32(meta_z), ZERO, smoothR, block=block_size, grid=grid_size);\n\t# \t\t\thbox_large[meta_x*N:(meta_x+1)*N, meta_y*N:(meta_y+1)*N, meta_z*N:(meta_z+1)*N] = largebox_d.get()\n\t#plan = Plan(shape, dtype=np.complex64)\n\t#plan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\n\n\t# This saves a large resolution deltax\n\n\t\n\tprint \"downsampling\"\n\tsmallbox_d = gpuarray.zeros((HII_DIM,HII_DIM,M), dtype=np.float32)\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\tlargebox_pin = hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy()\n\t\tcuda.memcpy_dtoh_async(largecbox_pin, largebox_d)\n\t\t#largebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\tlargebox_d /= scale**3 #\n\t\tsubsample_kernel(largebox_d, smallbox_d, DIM, HII_DIM, PIXEL_FACTOR, block=block_size, grid=HII_stitch_grid_size) #subsample in real space\n\t\thbox_small[:, :, meta_z*M:(meta_z+1)*M] = smallbox_d.get_async()\n\tnp.save(parent_folder+\"/Boxes/smoothed_deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(HII_DIM, BOX_LEN), hbox_small)\n\t#import IPython; IPython.embed()\n\n\n\t# To get velocities: reload the k-space box\n\thbox_large = np.load(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN))\n\thvbox_large = np.zeros((DIM, DIM, DIM), dtype=np.float32)\n\thvbox_small = np.zeros(HII_shape, dtype=np.float32)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tlargevbox_d = gpuarray.zeros((DIM,DIM,N), dtype=np.complex64)\n\tsmallvbox_d = gpuarray.zeros((HII_DIM, HII_DIM, M), dtype=np.float32)\n\tfor num, mode in enumerate(['x', 'y', 'z']):\n\t\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t\tlargebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\t\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\t\t\tvelocity_kernel(largebox_d, largevbox_d, DIM, np.int32(meta_z), np.int32(num), block=block_size, grid=stitch_grid_size)\n\t\t\tHII_filter(largevbox_d, DIM, ZERO, smoothR, block=block_size, grid=stitch_grid_size)\n\t\t\tprint hvbox_large.shape, largevbox_d.shape\n\t\t\thvbox_large[:, :, meta_z*N:(meta_z+1)*N] = largevbox_d.get_async()\n\t\thvbox_large = fft_stitch(N, plan2d, plan1d, hvbox_large, largevbox_d).real\n\t\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t\tlargevbox_d = gpuarray.to_gpu_async(hvbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\t\tsubsample_kernel(largevbox_d.real, smallvbox_d, DIM, HII_DIM,PIXEL_FACTOR, block=block_size, grid=HII_stitch_grid_size)\n\t\t\thvbox_small[:, :, meta_z*M:(meta_z+1)*M] = smallvbox_d.get_async()\n\t\tnp.save(parent_folder+\"/Boxes/v{0}overddot_{1:d}_{2:.0f}Mpc\".format(mode, HII_DIM, BOX_LEN), smallvbox_d.get())\n\n\treturn",
"def test_3D_m6_1k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_1k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def test_power_spectral_density_from_spatially_resolved_magnetisation_confined_to_mesh_region(tmpdir, debug=False):\n os.chdir(str(tmpdir))\n RTOL = 1e-10\n\n H1 = 1e6 # external field in A/m\n alpha1 = 0.5 # some sort of damping constant\n omega1 = gamma * H1 # precession frequency\n\n H2 = 2.8e4 # external field in A/m\n alpha2 = 0.3 # some sort of damping constant\n omega2 = gamma * H2 # precession frequency\n\n ##\n # Step 1: Construct a time series of artificial magnetisation\n # data and save it to a bunch of .npy files.\n ##\n t_step = 1e-11\n t_ini = 0\n t_end = 10e-9\n\n N1 = 42 # in a real application this would be the number of mesh vertices\n N2 = 23 # in a real application this would be the number of mesh vertices\n fft_test_helpers.create_test_npy_files_with_two_regions(\n str(tmpdir), t_step, t_ini, t_end, omega1, alpha1, N1, omega2, alpha2, N2)\n\n ##\n # Step 2: compute the FFT of a resampled time series, both by\n # hand and using FFT_m.\n ##\n # XXX TODO: Resampling timesteps is not supported when using .npy\n # files. Either simplify the code below, or implement saving to\n # .h5 files so that it's easier to implement resampling for\n # spatially resolved data, too.\n ##\n t_step_res = t_step\n t_ini_res = t_ini\n t_end_res = t_end\n ts_resampled = np.arange(t_ini_res, t_end_res, t_step_res)\n\n # Compute time series based on resampled timesteps\n mx_res = exp(-ts_resampled * 1e8 / alpha1) * sin(omega1 * ts_resampled)\n my_res = exp(-ts_resampled * 1e8 / alpha1) * cos(omega1 * ts_resampled)\n mz_res = 1 - sqrt(mx_res ** 2 + my_res ** 2)\n\n # Compute 'analytical' Fourier transform of resampled time series and\n # determine the power of the spectrum for each component. We also need\n # to multiply by the number of mesh nodes because the numerical algorithm\n # sums up all contributions at the individual nodes (but we can just\n # multiply because they are all identical by construction).\n psd_mx_expected = N1 * np.absolute(np.fft.rfft(mx_res)) ** 2\n psd_my_expected = N1 * np.absolute(np.fft.rfft(my_res)) ** 2\n psd_mz_expected = N1 * np.absolute(np.fft.rfft(mz_res)) ** 2\n\n # Compute Fourier transform of resampled time series using FFT_m\n freqs_computed, psd_mx_computed, psd_my_computed, psd_mz_computed = \\\n compute_power_spectral_density('m_ringdown*.npy', t_step_res, t_ini=t_ini_res,\n t_end=t_end_res, subtract_values=None, restrict_to_vertices=xrange(N1))\n\n # Check that the analytically determined power spectra are the same as the\n # computed ones.\n assert(np.allclose(psd_mx_expected, psd_mx_computed, atol=0, rtol=RTOL))\n assert(np.allclose(psd_my_expected, psd_my_computed, atol=0, rtol=RTOL))\n assert(np.allclose(psd_mz_expected, psd_mz_computed, atol=0, rtol=RTOL))\n\n if debug:\n # Plot the spectra for debugging\n fig = plt.figure(figsize=(20, 5))\n ax = fig.gca()\n ax.plot(freqs_computed, psd_mx_expected, label='psd_mx_expected')\n ax.plot(freqs_computed, psd_my_expected, label='psd_my_expected')\n ax.plot(freqs_computed, psd_mz_expected, label='psd_mz_expected')\n ax.plot(freqs_computed, psd_mx_computed, label='psd_mx_computed')\n ax.plot(freqs_computed, psd_my_computed, label='psd_my_computed')\n ax.plot(freqs_computed, psd_mz_computed, label='psd_mz_computed')\n ax.legend(loc='best')\n fig.savefig('psd_m_McMichaelStiles.png')",
"def test_1_2_2D_rec_splits(self):\n check = [(3.0, -2.0), (7.0, -1.0), (7.0, -2.0), (3.0, -1.0),\n (5.0, -1.5), (3.0, -1.5), (5.0, -2.0), (4.0, -1.75),\n (7.0, -1.5), (5.0, -1.0), (6.0, -1.25), (6.0, -1.75),\n (4.0, -1.25), (5.0, -1.75), (4.0, -1.5), (4.5, -1.625),\n (3.0, -1.75), (4.0, -2.0), (3.5, -1.875), (3.5, -1.625),\n (4.5, -1.875), (5.0, -1.25), (6.0, -1.5), (5.5, -1.375),\n (7.0, -1.25), (6.0, -1.0), (6.5, -1.125), (6.5, -1.375),\n (5.5, -1.125), (5.5, -1.625), (7.0, -1.75), (6.0, -2.0),\n (6.5, -1.875), (6.5, -1.625), (5.5, -1.875), (4.5, -1.375),\n (3.0, -1.25), (4.0, -1.0), (3.5, -1.125), (3.5, -1.375),\n (4.5, -1.125)]\n nn_checks = {(3.0, -2.0): [(3.0, -1.75), (3.5, -1.875), (4.0, -2.0)],\n (5.0, -1.75): [(5.0, -2.0), (5.0, -1.5), (5.5, -1.625),\n (5.5, -1.875), (4.5, -1.625), (6.0, -1.75),\n (4.5, -1.875), (4.0, -1.75)],\n (6.0, -2.0): [(5.0, -2.0), (5.5, -1.875), (6.0, -1.75),\n (6.5, -1.875), (7, -2)],\n (4.5, -1.125): [(5.0, -1.0), (4.0, -1.25), (5.0, -1.25),\n (4.0, -1.0)]}\n\n init_triangulation(2, 2, check, nn_checks, bounds=[(3, 7), (-2, -1)])",
"def feature_processing(array2d):\n new_array2d = np.zeros([array2d.shape[0], 29])\n # items/ orders\n new_array2d[:, 0] = array2d[:, 4] / array2d[:, 3]\n # cancels / orders\n new_array2d[:, 1] = array2d[:, 5] / array2d[:, 3]\n # returns / items\n new_array2d[:, 2] = array2d[:, 6] / array2d[:, 4]\n # voucher / orders\n new_array2d[:, 3] = array2d[:, 10] / array2d[:, 3]\n # female_items / female_items + male_items\n new_array2d[:, 4] = array2d[:, 15] / ([1 if x == 0 else x for x in (array2d[:, 15] + array2d[:, 16])])\n # male_items / female_items + male_items\n new_array2d[:, 5] = array2d[:, 16] / ([1 if x == 0 else x for x in (array2d[:, 15] + array2d[:, 16])])\n # unisex_items / items\n new_array2d[:, 6] = array2d[:, 17] / array2d[:, 4]\n # wapp_items / items\n new_array2d[:, 7] = array2d[:, 18] / array2d[:, 4]\n # wftw_items / items\n new_array2d[:, 8] = array2d[:, 19] / array2d[:, 4]\n # mapp_items / items\n new_array2d[:, 9] = array2d[:, 20] / array2d[:, 4]\n # wacc_items / items\n new_array2d[:, 10] = array2d[:, 21] / array2d[:, 4]\n # macc_items / items\n new_array2d[:, 11] = array2d[:, 22] / array2d[:, 4]\n # mftw_items / items\n new_array2d[:, 12] = array2d[:, 23] / array2d[:, 4]\n # wspt_items / items\n new_array2d[:, 13] = array2d[:, 24] / array2d[:, 4]\n # mspt_items / items\n new_array2d[:, 14] = array2d[:, 25] / array2d[:, 4]\n # curvy_items / items\n # Curvy item has a strong correlation with gender, however they are very right-skewed use np.power(1/6) to smooth it\n new_array2d[:, 15] = np.power(array2d[:, 26] / array2d[:, 4], 1 / 6)\n # sacc_items / items\n new_array2d[:, 16] = array2d[:, 27] / array2d[:, 4]\n # msite_orders / orders\n new_array2d[:, 17] = array2d[:, 28] / array2d[:, 3]\n # desktop_orders / orders\n new_array2d[:, 18] = array2d[:, 29] / array2d[:, 3]\n # android_orders / orders\n new_array2d[:, 19] = array2d[:, 30] / array2d[:, 3]\n # ios_orders / orders\n new_array2d[:, 20] = array2d[:, 31] / array2d[:, 3]\n # other_device_orders / orders\n new_array2d[:, 21] = array2d[:, 32] / array2d[:, 3]\n # work_orders / orders\n new_array2d[:, 22] = array2d[:, 33] / array2d[:, 3]\n # home_orders / orders\n new_array2d[:, 23] = array2d[:, 34] / array2d[:, 3]\n # parcelpoint_orders / orders\n new_array2d[:, 24] = array2d[:, 35] / array2d[:, 3]\n # other_collection_orders / orders\n new_array2d[:, 25] = array2d[:, 36] / array2d[:, 3]\n # average_discount_onoffer\n new_array2d[:, 26] = array2d[:, 39]\n # average_discount_used\n new_array2d[:, 27] = array2d[:, 40]\n # revenue / order\n new_array2d[:, 28] = array2d[:, 41] / array2d[:, 3]\n\n # normalize by each feature\n new_array2d = normalize(new_array2d, axis=0, norm='max')\n return new_array2d",
"def _apply_array_spin12_halffilling(self, h1e: 'Nparray',\n h2e: 'Nparray') -> 'Nparray':\n if fqe.settings.use_accelerated_code:\n #return self._apply_array_spin12_blocked(h1e, h2e)\n return self._apply_array_spin12_lm(h1e, h2e)\n else:\n h1e = copy.deepcopy(h1e)\n h2e = numpy.moveaxis(copy.deepcopy(h2e), 1, 2) * (-1.0)\n norb = self.norb()\n for k in range(norb * 2):\n h1e[:, :] -= h2e[:, k, k, :]\n\n (dveca, dvecb) = self.calculate_dvec_spin()\n out = numpy.einsum(\"ij,ijkl->kl\", h1e[:norb, :norb], dveca) \\\n + numpy.einsum(\"ij,ijkl->kl\", h1e[norb:, norb:], dvecb)\n ndveca = numpy.einsum(\"ijkl,klmn->ijmn\",\n h2e[:norb, :norb, :norb, :norb], dveca) \\\n + numpy.einsum(\"ijkl,klmn->ijmn\",\n h2e[:norb, :norb, norb:, norb:], dvecb)\n ndvecb = numpy.einsum(\"ijkl,klmn->ijmn\",\n h2e[norb:, norb:, :norb, :norb], dveca) \\\n + numpy.einsum(\"ijkl,klmn->ijmn\",\n h2e[norb:, norb:, norb:, norb:], dvecb)\n out += self._calculate_coeff_spin_with_dvec((ndveca, ndvecb))\n return out",
"def init():\n\tN = np.int32(DIM) #prepare for stitching\n\t#HII_DIM = np.int32(HII_DIM)\n\tf_pixel_factor = DIM/HII_DIM;\n\tscale = np.float32(BOX_LEN)/DIM\n\tHII_scale = np.float32(BOX_LEN)/HII_DIM\n\tshape = (N,N,N)\n\t\n\tMRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=0)\n\n\tkernel_source = open(cmd_folder+\"/initialize.cu\").read()\n\tkernel_code = kernel_source % {\n\n\t\t'DELTAK': DELTA_K,\n\t\t'VOLUME': VOLUME,\n\t\t'DIM': DIM\n\t}\n\tmain_module = nvcc.SourceModule(kernel_code)\n\tinit_kernel = main_module.get_function(\"init_kernel\")\n\tHII_filter = main_module.get_function(\"HII_filter\")\n\tadj_complex_conj = main_module.get_function(\"adj_complex_conj\")\n\tsubsample_kernel = main_module.get_function(\"subsample\")\n\tvelocity_kernel = main_module.get_function(\"set_velocity\")\n\tpspec_texture = main_module.get_texref(\"pspec\")\n\n\tinterpPspec, interpSize = init_pspec() #interpPspec contains both k array and P array\n\tinterp_cu = cuda.matrix_to_array(interpPspec, order='F')\n\tcuda.bind_array_to_texref(interp_cu, pspec_texture)\n\n\tlargebox_d = gpuarray.zeros(shape, dtype=np.float32)\n\tinit_kernel(largebox_d, np.int32(DIM), block=block_size, grid=grid_size)\n\n\t#import IPython; IPython.embed()\n\tlargebox_d_imag = gpuarray.zeros(shape, dtype=np.float32)\n\tinit_kernel(largebox_d_imag, np.int32(DIM), block=block_size, grid=grid_size)\n\n\tlargebox_d *= MRGgen.gen_normal(shape, dtype=np.float32)\n\tlargebox_d_imag *= MRGgen.gen_normal(shape, dtype=np.float32)\n\tlargebox_d = largebox_d + np.complex64(1.j) * largebox_d_imag\n\n\t#adj_complex_conj(largebox_d, DIM, block=block_size, grid=grid_size)\n\tlargebox = largebox_d.get()\n\t#np.save(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc\".format(DIM, BOX_LEN), largebox)\n\n\t#save real space box before smoothing\n\tplan = Plan(shape, dtype=np.complex64)\n\tplan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\tlargebox_d /= scale**3\n\tnp.save(parent_folder+\"/Boxes/deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(DIM, BOX_LEN), largebox_d.real.get_async())\n\n\t#save real space box after smoothing and subsampling\n\t# host largebox is still in k space, no need to reload from disk\n\tlargebox_d = gpuarray.to_gpu(largebox)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tHII_filter(largebox_d, N, ZERO, smoothR, block=block_size, grid=grid_size);\n\tplan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\tlargebox_d /= scale**3\n\tsmallbox_d = gpuarray.zeros(HII_shape, dtype=np.float32)\n\tsubsample_kernel(largebox_d.real, smallbox_d, N, HII_DIM, PIXEL_FACTOR, block=block_size, grid=HII_grid_size) #subsample in real space\n\tnp.save(parent_folder+\"/Boxes/smoothed_deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(HII_DIM, BOX_LEN), smallbox_d.get_async())\n\n\t# reload the k-space box for velocity boxes\n\tlargebox_d = gpuarray.to_gpu(largebox)\n\t\n\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tlargevbox_d = gpuarray.zeros((DIM,DIM,DIM), dtype=np.complex64)\n\tsmallbox_d = gpuarray.zeros(HII_shape, dtype=np.float32)\n\tfor num, mode in enumerate(['x', 'y', 'z']):\n\t\tvelocity_kernel(largebox_d, largevbox_d, DIM, np.int32(num), block=block_size, grid=grid_size)\n\t\tHII_filter(largevbox_d, DIM, ZERO, smoothR, block=block_size, grid=grid_size)\n\t\tplan.execute(largevbox_d, inverse=True)\n\t\tlargevbox_d /= scale**3\n\t\t#import IPython; IPython.embed()\n\t\tsubsample_kernel(largevbox_d.real, smallbox_d, DIM, HII_DIM,PIXEL_FACTOR, block=block_size, grid=HII_grid_size)\n\t\tnp.save(parent_folder+\"/Boxes/v{0}overddot_{1:d}_{2:.0f}Mpc\".format(mode, HII_DIM, BOX_LEN), smallbox_d.get())\n\n\treturn",
"def test_2_2_3D_rec_splits(self):\n check = [(-3.0, -2.0, 0.0), (4.0, 10.0, 1.0), (4.0, -2.0, 0.0),\n (4.0, 10.0, 0.0), (4.0, -2.0, 1.0), (-3.0, 10.0, 0.0),\n (-3.0, 10.0, 1.0), (-3.0, -2.0, 1.0), (0.5, 4.0, 0.5),\n (-3.0, 4.0, 0.5), (-3.0, -2.0, 0.5), (-3.0, 4.0, 0.0),\n (0.5, -2.0, 0.5), (0.5, -2.0, 0.0), (0.5, 4.0, 0.0),\n (-1.25, 1.0, 0.25), (4.0, 4.0, 0.5), (4.0, 10.0, 0.5),\n (4.0, 4.0, 1.0), (0.5, 10.0, 0.5), (0.5, 10.0, 1.0),\n (0.5, 4.0, 1.0), (2.25, 7.0, 0.75), (4.0, -2.0, 0.5),\n (4.0, 4.0, 0.0), (2.25, 1.0, 0.25), (0.5, 10.0, 0.0),\n (2.25, 7.0, 0.25), (0.5, -2.0, 1.0), (2.25, 1.0, 0.75),\n (-3.0, 10.0, 0.5), (-1.25, 7.0, 0.25), (-3.0, 4.0, 1.0),\n (-1.25, 7.0, 0.75), (-1.25, 1.0, 0.75), (0.5, 1.0, 0.25),\n (0.5, 4.0, 0.25), (0.5, 1.0, 0.5), (-1.25, 4.0, 0.25),\n (-1.25, 4.0, 0.5), (-1.25, 1.0, 0.5), (-0.375, 2.5, 0.375),\n (-3.0, 1.0, 0.25), (-3.0, -2.0, 0.25), (-3.0, 1.0, 0.0),\n (-1.25, -2.0, 0.25), (-1.25, -2.0, 0.0), (-1.25, 1.0, 0.0),\n (-2.125, -0.5, 0.125), (-3.0, 4.0, 0.25), (-3.0, 1.0, 0.5),\n (-2.125, 2.5, 0.375), (-1.25, -2.0, 0.5),\n (-2.125, -0.5, 0.375), (-1.25, 4.0, 0.0), (-2.125, 2.5, 0.125),\n (0.5, -2.0, 0.25), (-0.375, -0.5, 0.375), (0.5, 1.0, 0.0),\n (-0.375, -0.5, 0.125), (-0.375, 2.5, 0.125), (0.5, 7.0, 0.75),\n (0.5, 4.0, 0.75), (0.5, 7.0, 0.5), (2.25, 4.0, 0.75),\n (2.25, 4.0, 0.5), (2.25, 7.0, 0.5), (1.375, 5.5, 0.625),\n (4.0, 7.0, 0.75), (4.0, 10.0, 0.75), (4.0, 7.0, 1.0),\n (2.25, 10.0, 0.75), (2.25, 10.0, 1.0), (2.25, 7.0, 1.0),\n (3.125, 8.5, 0.875), (4.0, 4.0, 0.75), (4.0, 7.0, 0.5),\n (3.125, 5.5, 0.625), (2.25, 10.0, 0.5), (3.125, 8.5, 0.625),\n (2.25, 4.0, 1.0), (3.125, 5.5, 0.875), (0.5, 10.0, 0.75),\n (1.375, 8.5, 0.625), (0.5, 7.0, 1.0), (1.375, 8.5, 0.875),\n (1.375, 5.5, 0.875), (2.25, 4.0, 0.25), (2.25, 1.0, 0.5),\n (1.375, 2.5, 0.375), (4.0, 1.0, 0.25), (4.0, -2.0, 0.25),\n (4.0, 1.0, 0.0), (2.25, -2.0, 0.25), (2.25, -2.0, 0.0),\n (2.25, 1.0, 0.0), (3.125, -0.5, 0.125), (4.0, 4.0, 0.25),\n (4.0, 1.0, 0.5), (3.125, 2.5, 0.375), (2.25, -2.0, 0.5),\n (3.125, -0.5, 0.375), (2.25, 4.0, 0.0), (3.125, 2.5, 0.125),\n (1.375, -0.5, 0.375), (1.375, -0.5, 0.125),\n (1.375, 2.5, 0.125), (0.5, 7.0, 0.25), (1.375, 5.5, 0.375),\n (4.0, 7.0, 0.25), (4.0, 10.0, 0.25), (4.0, 7.0, 0.0),\n (2.25, 10.0, 0.25), (2.25, 10.0, 0.0), (2.25, 7.0, 0.0),\n (3.125, 8.5, 0.125), (3.125, 5.5, 0.375), (3.125, 8.5, 0.375),\n (3.125, 5.5, 0.125), (0.5, 10.0, 0.25), (1.375, 8.5, 0.375),\n (0.5, 7.0, 0.0), (1.375, 8.5, 0.125), (1.375, 5.5, 0.125),\n (0.5, 1.0, 0.75), (1.375, 2.5, 0.625), (4.0, 1.0, 0.75),\n (4.0, -2.0, 0.75), (4.0, 1.0, 1.0), (2.25, -2.0, 0.75),\n (2.25, -2.0, 1.0), (2.25, 1.0, 1.0), (3.125, -0.5, 0.875),\n (3.125, 2.5, 0.625), (3.125, -0.5, 0.625), (3.125, 2.5, 0.875),\n (0.5, -2.0, 0.75), (1.375, -0.5, 0.625), (0.5, 1.0, 1.0),\n (1.375, -0.5, 0.875), (1.375, 2.5, 0.875), (-1.25, 7.0, 0.5),\n (-0.375, 5.5, 0.375), (-3.0, 7.0, 0.25), (-3.0, 10.0, 0.25),\n (-3.0, 7.0, 0.0), (-1.25, 10.0, 0.25), (-1.25, 10.0, 0.0),\n (-1.25, 7.0, 0.0), (-2.125, 8.5, 0.125), (-3.0, 7.0, 0.5),\n (-2.125, 5.5, 0.375), (-1.25, 10.0, 0.5), (-2.125, 8.5, 0.375),\n (-2.125, 5.5, 0.125), (-0.375, 8.5, 0.375),\n (-0.375, 8.5, 0.125), (-0.375, 5.5, 0.125), (-1.25, 4.0, 0.75),\n (-0.375, 5.5, 0.625), (-3.0, 7.0, 0.75), (-3.0, 10.0, 0.75),\n (-3.0, 7.0, 1.0), (-1.25, 10.0, 0.75), (-1.25, 10.0, 1.0),\n (-1.25, 7.0, 1.0), (-2.125, 8.5, 0.875), (-3.0, 4.0, 0.75),\n (-2.125, 5.5, 0.625), (-2.125, 8.5, 0.625), (-1.25, 4.0, 1.0),\n (-2.125, 5.5, 0.875), (-0.375, 8.5, 0.625),\n (-0.375, 8.5, 0.875), (-0.375, 5.5, 0.875),\n (-0.375, 2.5, 0.625), (-3.0, 1.0, 0.75), (-3.0, -2.0, 0.75),\n (-3.0, 1.0, 1.0), (-1.25, -2.0, 0.75), (-1.25, -2.0, 1.0),\n (-1.25, 1.0, 1.0), (-2.125, -0.5, 0.875), (-2.125, 2.5, 0.625),\n (-2.125, -0.5, 0.625), (-2.125, 2.5, 0.875),\n (-0.375, -0.5, 0.625), (-0.375, -0.5, 0.875),\n (-0.375, 2.5, 0.875)]\n nn_checks = {(2.25, 7.0, 0.75): [(4.0, 7.0, 0.75), (2.25, 7.0, 1.0),\n (4.0, 7.0, 0.5), (4.0, 7.0, 1.0),\n (4.0, 4.0, 0.75), (1.375, 5.5, 0.875),\n (2.25, 4.0, 1.0), (2.25, 4.0, 0.5),\n (2.25, 4.0, 0.75), (3.125, 8.5, 0.875),\n (3.125, 8.5, 0.625), (4.0, 10.0, 0.75),\n (2.25, 10.0, 1.0), (2.25, 10.0, 0.75),\n (2.25, 10.0, 0.5), (1.375, 8.5, 0.625),\n (1.375, 8.5, 0.875), (0.5, 7.0, 0.75),\n (0.5, 7.0, 0.5), (3.125, 5.5, 0.625),\n (3.125, 5.5, 0.875), (0.5, 10.0, 0.75),\n (0.5, 7.0, 1.0), (0.5, 4.0, 0.75),\n (2.25, 7.0, 0.5), (1.375, 5.5, 0.625)],\n (4.0, -2.0, 0.5): [(4.0, -2.0, 0.75), (4.0, -2.0, 0.25),\n (2.25, 1.0, 0.5), (2.25, -2.0, 0.75),\n (2.25, -2.0, 0.5), (2.25, -2.0, 0.25),\n (4.0, 1.0, 0.25), (4.0, 1.0, 0.75),\n (4.0, 1.0, 0.5), (3.125, -0.5, 0.375),\n (3.125, -0.5, 0.625)],\n (-2.125, -0.5, 0.875): [(-1.25, 1.0, 1.0),\n (-1.25, 1.0, 0.75),\n (-1.25, -2.0, 0.75),\n (-1.25, -2.0, 1.0),\n (-3.0, -2.0, 0.75),\n (-3.0, 1.0, 1.0), (-3, -2, 1),\n (-3.0, 1.0, 0.75)]}\n\n init_triangulation(3, 2, check, nn_checks, bounds=[(-3, 4), (-2, 10), (0, 1)])",
"def test_filter_l2_1():\n box = Box(length=L, origin=O)\n f = Field(box, formula=func, name='f0')\n d_fine = Discretization([513, 513, 513])\n d_coarse = Discretization([257, 257, 257], ghosts=[2, 2, 2])\n op = MultiresolutionFilter(d_in=d_fine, d_out=d_coarse,\n variables={f: d_coarse},\n method={Remesh: L2_1, })\n op.discretize()\n op.setup()\n topo_coarse = op.discreteFields[f].topology\n topo_fine = [t for t in f.discreteFields.keys()\n if not t is topo_coarse][0]\n f.initialize(topo=topo_fine)\n f_out = f.discreteFields[topo_coarse]\n op.apply(simu)\n valid = [npw.zeros(f_out[0].shape), ]\n valid = func(valid, *topo_coarse.mesh.coords)\n assert np.allclose(valid[0][topo_coarse.mesh.iCompute],\n f_out[0][topo_coarse.mesh.iCompute]), \\\n np.max(np.abs(valid[0][topo_coarse.mesh.iCompute] -\n f_out[0][topo_coarse.mesh.iCompute]))",
"def test_linear_2d_merwe():\n\n\n def fx(x, dt):\n F = np.array([[1, dt, 0, 0],\n [0, 1, 0, 0],\n [0, 0, 1, dt],\n [0, 0, 0, 1]], dtype=float)\n\n return np.dot(F, x)\n\n def hx(x):\n return np.array([x[0], x[2]])\n\n\n dt = 0.1\n points = MerweScaledSigmaPoints(4, .1, 2., -1)\n kf = UKF(dim_x=4, dim_z=2, dt=dt, fx=fx, hx=hx, points=points)\n\n\n kf.x = np.array([-1., 1., -1., 1])\n kf.P*=0.0001\n #kf.R *=0\n #kf.Q\n\n zs = []\n for i in range(20):\n z = np.array([i+randn()*0.1, i+randn()*0.1])\n zs.append(z)\n\n Ms, Ps = kf.batch_filter(zs)\n smooth_x, _, _ = kf.rts_smoother(Ms, Ps, dt=dt)\n\n if DO_PLOT:\n plt.figure()\n zs = np.asarray(zs)\n plt.plot(zs[:,0], marker='+')\n plt.plot(Ms[:,0], c='b')\n plt.plot(smooth_x[:,0], smooth_x[:,2], c='r')\n print(smooth_x)",
"def test_uv_degrid_gaussian_kernel():\n\n layout = read_layout(layout_path=f\"{test_data}/test_mwa.txt\")\n xyz = enh_xyz(layout=layout, latitude=mwa_geo.latitude.radians)\n uvw = xyz_uvw(xyz=xyz, freq=freq, dec0=mwa_geo.latitude.radians, ha0=0)\n uv = uv_degrid(\n max_lambda=1400, nside=20, uvw=uvw, sigma=3, kersize=21, kernel=\"gaussian\"\n )\n\n assert uv.shape == (20, 20)\n assert uv[0, 0] == 1.295932713086053e-05",
"def test_sw2():\n B1 = 100\n B2 = 200\n h = 18\n t = 1\n H = h + 2 * t\n E1 = 20000\n E2 = 10000\n sections = ((B1, t, 0, E1), (B2, t, h + t, E2))\n EI, top, bot = bm.EI(sections, E1)\n EIc = E1 * B1 * (H ** 3 - h ** 3) / 12\n assert 0.99 < EI / EIc < 1.01",
"def test_2_layer():\r\n # angular frequency in radians * THz\r\n w = 100 * nu.THz\r\n # Relative permittivity of metal and dielectric\r\n em = -4.56 + 0.12j\r\n ed = 1.23 + 0.01j\r\n ex_list = ez_list = [ed, em]\r\n # Relative permeabilities\r\n mu_list = [1,1]\r\n # Dictionary of input parameters\r\n input_params = {'w': w, 'd_list': [inf,inf], 'ex_list': ex_list,\r\n 'ez_list': ez_list, 'mu_list': mu_list}\r\n \r\n # Calculate the theoretical kx\r\n theo_kx = (w / nu.c0) * cmath.sqrt((em * ed) / (em + ed))\r\n if theo_kx.imag < 0:\r\n theo_kx *= -1\r\n print('Theoretical kx:',\r\n '(%.7g+%.7gj) rad/um' % (theo_kx.real / nu.um**-1, theo_kx.imag / nu.um**-1))\r\n \r\n # If I use the theoretical kx value, the mode should be correct and\r\n # all my tests should pass.\r\n params = deepcopy(input_params)\r\n params['kx'] = theo_kx\r\n params = find_all_params_from_kx(params)\r\n kzd, kzm = params['kz_list']\r\n # check that kz_list is correct\r\n assert_floats_are_equal(kzd**2, (w**2 / nu.c0**2) * ed**2 / (em + ed))\r\n assert_floats_are_equal(kzm**2, (w**2 / nu.c0**2) * em**2 / (em + ed))\r\n # check that layer_bottom_list is correct\r\n assert params['layer_bottom_list'][0] == -inf\r\n assert params['layer_bottom_list'][1] == 0\r\n # Check that the boundary condition matrix agrees with hand-calculation\r\n bc_mat = bc_matrix(params)\r\n # ...top-left is Ex0down / H0down\r\n assert_floats_are_equal(bc_mat[0,0], -kzd / (w * ed * nu.eps0))\r\n # ...top-right is -Ex1up / H1up\r\n assert_floats_are_equal(bc_mat[0,1], -kzm / (w * em * nu.eps0))\r\n # ...bottom-left is eps0 * Ez0down / H0down\r\n assert_floats_are_equal(bc_mat[1,0], ed * -theo_kx / (w * ed * nu.eps0))\r\n # ...bottom-right is -eps1 * Ez1up / H1up\r\n assert_floats_are_equal(bc_mat[1,1], -em * -theo_kx / (w * em * nu.eps0))\r\n # Check that one of the eigenvalues is almost zero (compared to the size\r\n # of the matrix elements).\r\n eigenvalues = np.linalg.eig(bc_mat)[0]\r\n assert abs(eigenvalues).min() / abs(bc_mat).max() < 1e-6\r\n # Check that the mode passes all tests.\r\n assert check_mode(params, thorough=True) is True\r\n # Check that I can scale the fields and it still passes all tests.\r\n params_scaled = rescale_fields(1.23+4.56j, params)\r\n assert check_mode(params_scaled, thorough=True) is True\r\n \r\n # Now try my kx-finding algorithm, to see if it finds the right value.\r\n kx_list = find_kx(input_params)\r\n print('kx_list:',\r\n ['(%.7g+%.7gj) rad/um' % (kx.real / nu.um**-1, kx.imag / nu.um**-1)\r\n for kx in kx_list])\r\n kx = kx_list[0]\r\n assert_floats_are_equal(theo_kx, kx)\r\n \r\n plot_mode(params)\r\n \r\n print('If you see this message, all the tests succeeded!!')"
]
| [
"0.6300875",
"0.60879",
"0.60446244",
"0.6017783",
"0.5991836",
"0.5873277",
"0.5863614",
"0.576695",
"0.5719629",
"0.5708563",
"0.56913555",
"0.5682705",
"0.56802297",
"0.563788",
"0.56305665",
"0.55943346",
"0.5587067",
"0.5576163",
"0.5573613",
"0.5570954",
"0.5570553",
"0.5540334",
"0.55210084",
"0.54895604",
"0.5474874",
"0.54736507",
"0.5427908",
"0.5419212",
"0.5414014",
"0.54095596"
]
| 0.63488454 | 0 |
Testing M6 remeshing formula in 2D, 2 kernel, simple precision, o2_FullHalf splitting. | def test_2D_m6_2k_sFH():
scal, velo = setup_2D()
advec = Advection(velo, scal, discretization=d2d,
method={TimeIntegrator: RK2,
Interpolation: Linear,
Remesh: L4_2,
Support: 'gpu_2k',
Splitting: 'o2_FullHalf'}
)
advec_py = Advection(velo, scal, discretization=d2d,
method={TimeIntegrator: RK2,
Interpolation: Linear,
Remesh: L4_2,
Support: '',
Splitting: 'o2_FullHalf'}
)
assertion_2D_withPython(scal, velo, advec, advec_py) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_2D_m6_1k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_2D_m6_2k():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2'},\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_1_2_2D_cube_splits(self):\n check = [(0, 0), (1, 1), (1, 0), (0, 1), (0.5, 0.5), (0.0, 0.5),\n (0.5, 0.0),\n (0.25, 0.25), (1.0, 0.5), (0.5, 1.0), (0.75, 0.75),\n (0.75, 0.25),\n (0.25, 0.75), (0.5, 0.25), (0.25, 0.5), (0.375, 0.375),\n (0.0, 0.25),\n (0.25, 0.0), (0.125, 0.125), (0.125, 0.375), (0.375, 0.125),\n (0.5, 0.75), (0.75, 0.5), (0.625, 0.625), (1.0, 0.75),\n (0.75, 1.0),\n (0.875, 0.875), (0.875, 0.625), (0.625, 0.875), (0.625, 0.375),\n (1.0, 0.25), (0.75, 0.0), (0.875, 0.125), (0.875, 0.375),\n (0.625, 0.125), (0.375, 0.625), (0.0, 0.75), (0.25, 1.0),\n (0.125, 0.875), (0.125, 0.625), (0.375, 0.875)]\n\n nn_checks = {(0, 0): [(0.25, 0.0), (0.0, 0.25), (0.125, 0.125)],\n (0.625, 0.375): [(0.5, 0.5), (0.75, 0.25), (0.75, 0.5),\n (0.5, 0.25)],\n (0, 1): [(0.25, 1.0), (0.125, 0.875),(0.0, 0.75)],\n (0.625, 0.125): [(0.5, 0.0), (0.75, 0.25), (0.75, 0.0),\n (0.5, 0.25)]}\n\n\n init_triangulation(2, 2, check, nn_checks)",
"def test_2D_m6_1k():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2'},\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_multiple_case(self):\r\n\r\n shp = (3, 3)\r\n fx, fy, fz, fw = fmatrices('xyzw')\r\n dx, dy, dz, dw = dmatrices('xyzw')\r\n fv = fvector('r').dimshuffle('x', 0)\r\n dv = dvector('s').dimshuffle('x', 0)\r\n fxv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fyv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fzv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fwv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fvv = theano._asarray(numpy.random.rand(shp[0]), dtype='float32').reshape(1, shp[0])\r\n dxv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dyv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dzv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dwv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dvv = theano._asarray(numpy.random.rand(shp[0]), dtype='float64').reshape(1, shp[0])\r\n\r\n #We must be sure that the Canonizer is working, but that we don't have other\r\n # optimisation that could hide bug in the Canonizer as local_elemwise_fusion\r\n mode = compile.mode.get_default_mode()\r\n old_optimizer = mode._optimizer\r\n try:\r\n mode._optimizer = gof.Query([\"canonicalize\"])\r\n mode._optimizer = mode._optimizer.including('ShapeOpt')\r\n mode._optimizer = mode._optimizer.excluding(\r\n 'local_elemwise_fusion')\r\n\r\n #test x / x -> 1\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([(fx/fx,[fx],[fxv],'float32'),\r\n (dx/dx,[dx],[dxv],'float64'),\r\n (fv/fv,[fv],[fvv],'float32'),\r\n (dv/dv,[dv],[dvv],'float64'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert (out == numpy.ones(shp, dtype=out_dtype)).all()\r\n topo = f.maker.fgraph.toposort()\r\n if sym_inputs[0].broadcastable[0]:\r\n assert len(topo) == 2\r\n assert isinstance(topo[0].op, Shape_i)\r\n assert isinstance(topo[1].op, tensor.Alloc)\r\n else:\r\n assert len(topo) == 3\r\n assert isinstance(topo[0].op, Shape_i)\r\n assert isinstance(topo[1].op, Shape_i)\r\n assert isinstance(topo[2].op, tensor.Alloc)\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (x * y) / x -> y\r\n for id,(g, sym_inputs, val_inputs, nb_elemwise, out_dtype) in enumerate([\r\n ((dx*dy)/dx,[dx,dy],[dxv,dyv],0,'float64'),\r\n ((fx*fy)/fx,[fx,fy],[fxv,fyv],0,'float32'),\r\n ((dv*dy)/dv,[dv,dy],[dvv,dyv],0,'float64'),\r\n ((fv*fy)/fv,[fv,fy],[fvv,fyv],0,'float32'),\r\n #must broadcast as their is a dimshuffle in the computation\r\n ((dx*dv)/dx,[dx,dv],[dxv,dvv],1,'float64'),\r\n #topo: [Elemwise{second,no_inplace}(x, <TensorType(float64, row)>)]\r\n ((fx*fv)/fx,[fx,fv],[fxv,fvv],1,'float32')\r\n #topo: [Elemwise{second,no_inplace}(x, <TensorType(float32, row)>)]\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert(out_dtype == out.dtype)\r\n assert numpy.allclose(out, val_inputs[1])\r\n topo = f.maker.fgraph.toposort()\r\n print \"ID TOPO\", id, topo, sym_inputs\r\n for r, t in f.maker.fgraph.shape_feature.shape_of.items():\r\n print ' ', r, t\r\n if topo and not(len(topo)==1 and topo[0].op==deep_copy_op):\r\n for node in topo[:-1]:\r\n assert isinstance(node.op, Shape_i)\r\n assert isinstance(topo[-1].op, tensor.Alloc)\r\n\r\n #test x / y / x -> 1 / y\r\n for id,(g, sym_inputs, val_inputs, nb_elemwise, out_dtype) in enumerate([\r\n ((dx/dy)/dx,[dx,dy],[dxv,dyv],1,'float64'),\r\n ((fx/fy)/fx,[fx,fy],[fxv,fyv],1,'float32'),\r\n ((dv/dy)/dv,[dv,dy],[dvv,dyv],1,'float64'),\r\n ((fv/fy)/fv,[fv,fy],[fvv,fyv],1,'float32'),\r\n #must broadcast as their is a dimshuffle in the computation\r\n\r\n ((dx/dv)/dx,[dx,dv],[dxv,dvv],1,'float64'),\r\n #topo: [Shape_i, Shape_i, Elemwise{inv,no_inplace}(<TensorType(float64, row)>), Alloc]\r\n ((fx/fv)/fx,[fx,fv],[fxv,fvv],1,'float32'),\r\n #topo:[Shape_i, Shape_i, Elemwise{inv,no_inplace}(<TensorType(float32, row)>), Alloc]\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (1 / val_inputs[1]))\r\n topo = f.maker.fgraph.toposort()\r\n print topo\r\n elem = [t for t in topo if isinstance(t.op, T.Elemwise)]\r\n assert len(elem) == nb_elemwise\r\n assert isinstance(elem[0].op, (T.Elemwise, ))\r\n assert isinstance(elem[0].op.scalar_op, (\r\n theano.scalar.basic.Inv, theano.scalar.basic.TrueDiv))\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (a / b) * (b / c) * (c / d) -> a / d\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((dx / dy) * (dy / dz) * (dz / dw),[dx,dy,dz,dw],[dxv,dyv,dzv,dwv],'float64'),\r\n ((fx / fy) * (fy / fz) * (fz / fw),[fx,fy,fz,fw],[fxv,fyv,fzv,fwv],'float32'),\r\n ((dv / dy) * (dy / dz) * (dz / dw),[dv,dy,dz,dw],[dvv,dyv,dzv,dwv],'float64'),\r\n ((fv / fy) * (fy / fz) * (fz / fw),[fv,fy,fz,fw],[fvv,fyv,fzv,fwv],'float32'),\r\n ((dx / dv) * (dv / dz) * (dz / dw),[dx,dv,dz,dw],[dxv,dvv,dzv,dwv],'float64'),\r\n ((fx / fv) * (fv / fz) * (fz / fw),[fx,fv,fz,fw],[fxv,fvv,fzv,fwv],'float32'),\r\n ((dx / dy) * (dy / dv) * (dv / dw),[dx,dy,dv,dw],[dxv,dyv,dvv,dwv],'float64'),\r\n ((fx / fy) * (fy / fv) * (fv / fw),[fx,fy,fv,fw],[fxv,fyv,fvv,fwv],'float32'),\r\n ((dx / dy) * (dy / dz) * (dz / dv),[dx,dy,dz,dv],[dxv,dyv,dzv,dvv],'float64'),\r\n ((fx / fy) * (fy / fz) * (fz / fv),[fx,fy,fz,fv],[fxv,fyv,fzv,fvv],'float32'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (val_inputs[0] / val_inputs[3]))\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 1\r\n assert isinstance(topo[0].op, (T.Elemwise, ))\r\n assert isinstance(topo[0].op.scalar_op,\r\n theano.scalar.basic.TrueDiv)\r\n assert len(topo[0].inputs) == 2\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (2.0 * x) / (4.0 * y) -> (0.5 * x) / y\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n (((2.0*dx)/(4.0*dy)),[dx,dy],[dxv,dyv],'float64'),\r\n (((2.0*fx)/(4.0*fy)),[fx,fy],[fxv,fyv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n (((2.0*dv)/(4.0*dy)),[dv,dy],[dvv,dyv],'float64'),\r\n (((2.0*fv)/(4.0*fy)),[fv,fy],[fvv,fyv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n (((2.0*dx)/(4.0*dv)),[dx,dv],[dxv,dvv],'float64'),\r\n (((2.0*fx)/(4.0*fv)),[fx,fv],[fxv,fvv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (0.5 *\r\n val_inputs[0] / val_inputs[1]))\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 2\r\n assert isinstance(topo[0].op, (T.Elemwise, ))\r\n assert isinstance(topo[0].op.scalar_op,\r\n theano.scalar.basic.Mul)\r\n assert len(topo[0].inputs) == 2\r\n assert isinstance(topo[1].op, (T.Elemwise, ))\r\n assert isinstance(topo[1].op.scalar_op,\r\n theano.scalar.basic.TrueDiv)\r\n assert len(topo[1].inputs) == 2\r\n assert(out_dtype == out.dtype)\r\n\r\n #test 2 * x / 2 -> x\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((2*dx)/2,[dx],[dxv],'float64'),\r\n ((2*fx)/2,[fx],[fxv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2*dv)/2,[dv],[dvv],'float64'),\r\n ((2*fv)/2,[fv],[fvv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, val_inputs[0])\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 1\r\n topo[0].op == deep_copy_op\r\n assert(out_dtype == out.dtype)\r\n\r\n #test x / abs(x) -> sign(x)\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n (dx/abs(dx),[dx],[0.5-dxv],'float64'),\r\n (fx/abs(fx),[fx],[0.5-fxv], 'float32'),\r\n (dx/abs(dx),[dx],[0.1*dxv],'float64'),\r\n (fx/abs(fx),[fx],[0.1*fxv], 'float32'),\r\n (dv/abs(dv),[dv],[0.5-dvv],'float64'),\r\n (fv/abs(fv),[fv],[0.5-fvv], 'float32'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.all(numpy.isfinite(out))\r\n assert numpy.allclose(out, numpy.sign(val_inputs[0]))\r\n assert(out_dtype == out.dtype)\r\n assert len(f.maker.fgraph.toposort()) == 1\r\n\r\n #test (2*x) / (3*abs(x)) -> sign(x)\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((2 * dx) / (3 * abs(dx)), [dx], [0.5 - dxv], 'float64'),\r\n ((2 * fx) / (3 * abs(fx)), [fx], [0.5 - fxv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2 * dx) / (3 * abs(dx)), [dx], [0.1 * dxv], 'float64'),\r\n ((2 * fx) / (3 * abs(fx)), [fx], [0.1 * fxv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2 * dv) / (3 * abs(dv)), [dv], [0.5 - dvv], 'float64'),\r\n ((2 * fv) / (3 * abs(fv)), [fv], [0.5 - fvv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n topo = f.maker.fgraph.toposort()\r\n out = f(*val_inputs)\r\n assert numpy.all(numpy.isfinite(out))\r\n assert numpy.allclose(out, numpy.sign(val_inputs[0]) * 2 / 3)\r\n assert(out_dtype == out.dtype)\r\n finally:\r\n mode._optimizer = old_optimizer",
"def test_2D_m4_1k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: 'gpu_1k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def _apply_array_spatial12_halffilling(self, h1e: 'Nparray',\n h2e: 'Nparray') -> 'Nparray':\n if fqe.settings.use_accelerated_code:\n return self._apply_array_spatial12_lm(h1e, h2e)\n else:\n h1e = copy.deepcopy(h1e)\n h2e = numpy.moveaxis(copy.deepcopy(h2e), 1, 2) * (-1.0)\n norb = self.norb()\n for k in range(norb):\n h1e[:, :] -= h2e[:, k, k, :]\n\n if numpy.iscomplex(h1e).any() or numpy.iscomplex(h2e).any():\n dvec = self.calculate_dvec_spatial()\n out = numpy.einsum(\"ij,ijkl->kl\", h1e, dvec)\n dvec = numpy.einsum(\"ijkl,klmn->ijmn\", h2e, dvec)\n out += self._calculate_coeff_spatial_with_dvec(dvec)\n else:\n nij = norb * (norb + 1) // 2\n h1ec = numpy.zeros((nij), dtype=self._dtype)\n h2ec = numpy.zeros((nij, nij), dtype=self._dtype)\n for i in range(norb):\n for j in range(i + 1):\n ijn = j + i * (i + 1) // 2\n h1ec[ijn] = h1e[i, j]\n for k in range(norb):\n for l in range(k + 1):\n kln = l + k * (k + 1) // 2\n h2ec[ijn, kln] = h2e[i, j, k, l]\n dvec = self._calculate_dvec_spatial_compressed()\n out = numpy.einsum(\"i,ikl->kl\", h1ec, dvec)\n dvec = numpy.einsum(\"ik,kmn->imn\", h2ec, dvec)\n for i in range(self.norb()):\n for j in range(self.norb()):\n ijn = min(i, j) + max(i, j) * (max(i, j) + 1) // 2\n work = self._core.alpha_map(j, i)\n for source, target, parity in work:\n out[source, :] += dvec[ijn, target, :] * parity\n work = self._core.beta_map(j, i)\n for source, target, parity in work:\n out[:, source] += dvec[ijn, :, target] * parity\n\n return out",
"def test_2D_m4_2k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_2D_m8_1k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: 'gpu_1k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_2D_m8_2k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_3_2_4D_cube_splits(self):\n check = [(0, 0, 0, 0), (1, 1, 1, 1), (1, 0, 0, 0), (1, 1, 0, 0),\n (1, 1, 1, 0),\n (1, 1, 0, 1), (1, 0, 1, 0), (1, 0, 1, 1), (1, 0, 0, 1),\n (0, 1, 0, 0),\n (0, 1, 1, 0), (0, 1, 1, 1), (0, 1, 0, 1), (0, 0, 1, 0),\n (0, 0, 1, 1),\n (0, 0, 0, 1), (0.5, 0.5, 0.5, 0.5), (0.0, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.5, 0.5), (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.5), (0.0, 0.5, 0.0, 0.0),\n (0.0, 0.5, 0.5, 0.0),\n (0.5, 0.0, 0.5, 0.5), (0.5, 0.0, 0.0, 0.5),\n (0.5, 0.0, 0.0, 0.0),\n (0.5, 0.0, 0.5, 0.0), (0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.0, 0.0),\n (0.5, 0.5, 0.5, 0.0), (0.25, 0.25, 0.25, 0.25),\n (1.0, 0.5, 0.5, 0.5),\n (1.0, 1.0, 0.5, 0.5), (1.0, 1.0, 1.0, 0.5),\n (1.0, 1.0, 0.5, 1.0),\n (1.0, 0.5, 1.0, 0.5), (1.0, 0.5, 1.0, 1.0),\n (1.0, 0.5, 0.5, 1.0),\n (0.5, 1.0, 0.5, 0.5), (0.5, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 1.0),\n (0.5, 1.0, 0.5, 1.0), (0.5, 0.5, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0),\n (0.5, 0.5, 0.5, 1.0), (0.75, 0.75, 0.75, 0.75),\n (1.0, 0.0, 0.5, 0.5),\n (1.0, 0.0, 0.0, 0.5), (1.0, 0.0, 0.5, 0.0),\n (1.0, 0.5, 0.0, 0.5),\n (1.0, 0.5, 0.0, 0.0), (1.0, 0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25, 0.25),\n (1.0, 1.0, 0.0, 0.5), (1.0, 1.0, 0.5, 0.0),\n (0.5, 1.0, 0.0, 0.5),\n (0.5, 1.0, 0.0, 0.0), (0.5, 1.0, 0.5, 0.0),\n (0.75, 0.75, 0.25, 0.25),\n (1.0, 0.5, 1.0, 0.0), (0.5, 1.0, 1.0, 0.0),\n (0.5, 0.5, 1.0, 0.0),\n (0.75, 0.75, 0.75, 0.25), (1.0, 0.5, 0.0, 1.0),\n (0.5, 1.0, 0.0, 1.0),\n (0.5, 0.5, 0.0, 1.0), (0.75, 0.75, 0.25, 0.75),\n (1.0, 0.0, 1.0, 0.5),\n (0.5, 0.0, 1.0, 0.5), (0.5, 0.0, 1.0, 0.0),\n (0.75, 0.25, 0.75, 0.25),\n (1.0, 0.0, 0.5, 1.0), (0.5, 0.0, 1.0, 1.0),\n (0.5, 0.0, 0.5, 1.0),\n (0.75, 0.25, 0.75, 0.75), (0.5, 0.0, 0.0, 1.0),\n (0.75, 0.25, 0.25, 0.75), (0.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 0.0, 0.5),\n (0.0, 1.0, 0.5, 0.0), (0.25, 0.75, 0.25, 0.25),\n (0.0, 1.0, 1.0, 0.5),\n (0.0, 0.5, 1.0, 0.5), (0.0, 0.5, 1.0, 0.0),\n (0.25, 0.75, 0.75, 0.25),\n (0.0, 1.0, 0.5, 1.0), (0.0, 0.5, 1.0, 1.0),\n (0.0, 0.5, 0.5, 1.0),\n (0.25, 0.75, 0.75, 0.75), (0.0, 0.5, 0.0, 1.0),\n (0.25, 0.75, 0.25, 0.75), (0.0, 0.0, 1.0, 0.5),\n (0.25, 0.25, 0.75, 0.25), (0.0, 0.0, 0.5, 1.0),\n (0.25, 0.25, 0.75, 0.75), (0.25, 0.25, 0.25, 0.75)]\n\n nn_checks = {(0, 0, 0, 0): [(0.0, 0.5, 0.5, 0.0), (0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.5),\n (0.25, 0.25, 0.25, 0.25),\n (0.5, 0.0, 0.0, 0.5), (0.5, 0.0, 0.5, 0.0),\n (0.5, 0.5, 0.0, 0.0), (0.0, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.0), (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.5, 0.5, 0.5), (0.0, 0.0, 0.5, 0.5),\n (0.0, 0.5, 0.0, 0.5), (0.5, 0.0, 0.0, 0.0),\n (0.5, 0.5, 0.5, 0.0)],\n (1.0, 1.0, 0.5, 0.5): [(1.0, 1.0, 0.5, 1.0), (1, 1, 0, 1),\n (1.0, 1.0, 1.0, 0.5),\n (1.0, 0.5, 0.5, 0.5), (1, 1, 1, 0),\n (1.0, 1.0, 0.5, 0.0),\n (1.0, 1.0, 0.0, 0.5), (1, 1, 0, 0),\n (1, 1, 1, 1), (0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5),\n (0.75, 0.75, 0.75, 0.75),\n (0.75, 0.75, 0.25, 0.25),\n (0.75, 0.75, 0.75, 0.25),\n (0.75, 0.75, 0.25, 0.75)],\n (0.25, 0.25, 0.25, 0.75): [(0.0, 0.0, 0.5, 1.0),\n (0.0, 0.5, 0.0, 1.0),\n (0.5, 0.0, 0.0, 1.0),\n (0.5, 0.5, 0.5, 1.0),\n (0, 0, 0, 1),\n (0.0, 0.5, 0.5, 1.0),\n (0.5, 0.0, 0.5, 1.0),\n (0.5, 0.5, 0.0, 1.0),\n (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.0, 0.5),\n (0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.5, 0.5),\n (0.5, 0.0, 0.0, 0.5)]}\n\n init_triangulation(4, 1, check, nn_checks)",
"def test_dmi_uses_unit_length_2dmesh():\n A = 8.78e-12 # J/m\n D = 1.58e-3 # J/m^2\n Ms = 3.84e5 # A/m\n\n energies = []\n\n # unit_lengths 1e-9 and 1 are common, let's throw in an intermediate length\n # just to challenge the system a little:\n for unit_length in (1, 1e-4, 1e-9):\n radius = 200e-9 / unit_length\n maxh = 5e-9 / unit_length\n helical_period = (4 * pi * A / D) / unit_length\n k = 2 * pi / helical_period\n # HF 27 April 2014: The next command fails in dolfin 1.3\n # mesh = df.CircleMesh(df.Point(0, 0), radius, maxh)\n # The actual shape of the domain shouldn't matter for the test,\n # so let's use a Rectangular mesh which should work the same:\n\n nx = ny = int(round(radius / maxh))\n mesh = df.RectangleMesh(df.Point(0, 0), df.Point(radius, radius), nx, ny)\n\n S3 = df.VectorFunctionSpace(mesh, \"CG\", 1, dim=3)\n m_expr = df.Expression((\"0\", \"cos(k * x[0])\", \"sin(k * x[0])\"), k=k, degree=1)\n m = Field(S3, m_expr, name='m')\n dmi = DMI(D)\n Ms_dg = Field(df.FunctionSpace(mesh, 'DG', 0), Ms)\n dmi.setup(m, Ms_dg, unit_length=unit_length)\n energies.append(dmi.compute_energy())\n\n H = df.Function(S3)\n H.vector()[:] = dmi.compute_field()\n print H(0.0, 0.0)\n\n print \"Using unit_length = {}.\".format(unit_length)\n print \"Helical period {}.\".format(helical_period)\n print \"Energy {}.\".format(dmi.compute_energy())\n\n rel_diff_energies = abs(energies[0] - energies[1]) / abs(energies[1])\n print \"Relative difference of energy {}.\".format(rel_diff_energies)\n assert rel_diff_energies < 1e-13\n\n rel_diff_energies2 = abs(energies[0] - energies[2]) / abs(energies[2])\n print \"Relative difference2 of energy {}.\".format(rel_diff_energies2)\n assert rel_diff_energies2 < 1e-13",
"def test_4_2_5D_cube_splits(self):\n check = [(0, 0, 0, 0, 0), (1, 1, 1, 1, 1), (1, 0, 0, 0, 0),\n (1, 1, 0, 0, 0), (1, 1, 1, 0, 0), (1, 1, 1, 1, 0),\n (1, 1, 1, 0, 1), (1, 1, 0, 1, 0), (1, 1, 0, 1, 1),\n (1, 1, 0, 0, 1), (1, 0, 1, 0, 0), (1, 0, 1, 1, 0),\n (1, 0, 1, 1, 1), (1, 0, 1, 0, 1), (1, 0, 0, 1, 0),\n (1, 0, 0, 1, 1), (1, 0, 0, 0, 1), (0, 1, 0, 0, 0),\n (0, 1, 1, 0, 0), (0, 1, 1, 1, 0), (0, 1, 1, 1, 1),\n (0, 1, 1, 0, 1), (0, 1, 0, 1, 0), (0, 1, 0, 1, 1),\n (0, 1, 0, 0, 1), (0, 0, 1, 0, 0), (0, 0, 1, 1, 0),\n (0, 0, 1, 1, 1), (0, 0, 1, 0, 1), (0, 0, 0, 1, 0),\n (0, 0, 0, 1, 1), (0, 0, 0, 0, 1), (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5, 0.5), (0.0, 0.0, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.0, 0.5, 0.5), (0.0, 0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.0, 0.5, 0.0), (0.0, 0.0, 0.5, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.0, 0.0), (0.0, 0.0, 0.5, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.5, 0.5), (0.0, 0.5, 0.0, 0.0, 0.5),\n (0.0, 0.5, 0.0, 0.0, 0.0), (0.0, 0.5, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.5, 0.0, 0.5), (0.0, 0.5, 0.5, 0.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.0), (0.5, 0.0, 0.5, 0.5, 0.5),\n (0.5, 0.0, 0.0, 0.5, 0.5), (0.5, 0.0, 0.0, 0.0, 0.5),\n (0.5, 0.0, 0.0, 0.0, 0.0), (0.5, 0.0, 0.0, 0.5, 0.0),\n (0.5, 0.0, 0.5, 0.0, 0.5), (0.5, 0.0, 0.5, 0.0, 0.0),\n (0.5, 0.0, 0.5, 0.5, 0.0), (0.5, 0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.0, 0.5), (0.5, 0.5, 0.0, 0.0, 0.0),\n (0.5, 0.5, 0.0, 0.5, 0.0), (0.5, 0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.5, 0.0, 0.0), (0.5, 0.5, 0.5, 0.5, 0.0),\n (0.25, 0.25, 0.25, 0.25, 0.25), (1.0, 0.5, 0.5, 0.5, 0.5),\n (1.0, 1.0, 0.5, 0.5, 0.5), (1.0, 1.0, 1.0, 0.5, 0.5),\n (1.0, 1.0, 1.0, 1.0, 0.5), (1.0, 1.0, 1.0, 0.5, 1.0),\n (1.0, 1.0, 0.5, 1.0, 0.5), (1.0, 1.0, 0.5, 1.0, 1.0),\n (1.0, 1.0, 0.5, 0.5, 1.0), (1.0, 0.5, 1.0, 0.5, 0.5),\n (1.0, 0.5, 1.0, 1.0, 0.5), (1.0, 0.5, 1.0, 1.0, 1.0),\n (1.0, 0.5, 1.0, 0.5, 1.0), (1.0, 0.5, 0.5, 1.0, 0.5),\n (1.0, 0.5, 0.5, 1.0, 1.0), (1.0, 0.5, 0.5, 0.5, 1.0),\n (0.5, 1.0, 0.5, 0.5, 0.5), (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5), (0.5, 1.0, 1.0, 1.0, 1.0),\n (0.5, 1.0, 1.0, 0.5, 1.0), (0.5, 1.0, 0.5, 1.0, 0.5),\n (0.5, 1.0, 0.5, 1.0, 1.0), (0.5, 1.0, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 0.5), (0.5, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 1.0), (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.5, 0.5, 1.0, 0.5), (0.5, 0.5, 0.5, 1.0, 1.0),\n (0.5, 0.5, 0.5, 0.5, 1.0), (0.75, 0.75, 0.75, 0.75, 0.75),\n (1.0, 0.0, 0.5, 0.5, 0.5), (1.0, 0.0, 0.0, 0.5, 0.5),\n (1.0, 0.0, 0.0, 0.0, 0.5), (1.0, 0.0, 0.0, 0.5, 0.0),\n (1.0, 0.0, 0.5, 0.0, 0.5), (1.0, 0.0, 0.5, 0.0, 0.0),\n (1.0, 0.0, 0.5, 0.5, 0.0), (1.0, 0.5, 0.0, 0.5, 0.5),\n (1.0, 0.5, 0.0, 0.0, 0.5), (1.0, 0.5, 0.0, 0.0, 0.0),\n (1.0, 0.5, 0.0, 0.5, 0.0), (1.0, 0.5, 0.5, 0.0, 0.5),\n (1.0, 0.5, 0.5, 0.0, 0.0), (1.0, 0.5, 0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25, 0.25, 0.25), (1.0, 1.0, 0.0, 0.5, 0.5),\n (1.0, 1.0, 0.0, 0.0, 0.5), (1.0, 1.0, 0.0, 0.5, 0.0),\n (1.0, 1.0, 0.5, 0.0, 0.5), (1.0, 1.0, 0.5, 0.0, 0.0),\n (1.0, 1.0, 0.5, 0.5, 0.0), (0.5, 1.0, 0.0, 0.5, 0.5),\n (0.5, 1.0, 0.0, 0.0, 0.5), (0.5, 1.0, 0.0, 0.0, 0.0),\n (0.5, 1.0, 0.0, 0.5, 0.0), (0.5, 1.0, 0.5, 0.0, 0.5),\n (0.5, 1.0, 0.5, 0.0, 0.0), (0.5, 1.0, 0.5, 0.5, 0.0),\n (0.75, 0.75, 0.25, 0.25, 0.25), (1.0, 1.0, 1.0, 0.0, 0.5),\n (1.0, 1.0, 1.0, 0.5, 0.0), (1.0, 0.5, 1.0, 0.0, 0.5),\n (1.0, 0.5, 1.0, 0.0, 0.0), (1.0, 0.5, 1.0, 0.5, 0.0),\n (0.5, 1.0, 1.0, 0.0, 0.5), (0.5, 1.0, 1.0, 0.0, 0.0),\n (0.5, 1.0, 1.0, 0.5, 0.0), (0.5, 0.5, 1.0, 0.0, 0.5),\n (0.5, 0.5, 1.0, 0.0, 0.0), (0.5, 0.5, 1.0, 0.5, 0.0),\n (0.75, 0.75, 0.75, 0.25, 0.25), (1.0, 1.0, 0.5, 1.0, 0.0),\n (1.0, 0.5, 1.0, 1.0, 0.0), (1.0, 0.5, 0.5, 1.0, 0.0),\n (0.5, 1.0, 1.0, 1.0, 0.0), (0.5, 1.0, 0.5, 1.0, 0.0),\n (0.5, 0.5, 1.0, 1.0, 0.0), (0.5, 0.5, 0.5, 1.0, 0.0),\n (0.75, 0.75, 0.75, 0.75, 0.25), (1.0, 1.0, 0.5, 0.0, 1.0),\n (1.0, 0.5, 1.0, 0.0, 1.0), (1.0, 0.5, 0.5, 0.0, 1.0),\n (0.5, 1.0, 1.0, 0.0, 1.0), (0.5, 1.0, 0.5, 0.0, 1.0),\n (0.5, 0.5, 1.0, 0.0, 1.0), (0.5, 0.5, 0.5, 0.0, 1.0),\n (0.75, 0.75, 0.75, 0.25, 0.75), (1.0, 1.0, 0.0, 1.0, 0.5),\n (1.0, 0.5, 0.0, 1.0, 0.5), (1.0, 0.5, 0.0, 1.0, 0.0),\n (0.5, 1.0, 0.0, 1.0, 0.5), (0.5, 1.0, 0.0, 1.0, 0.0),\n (0.5, 0.5, 0.0, 1.0, 0.5), (0.5, 0.5, 0.0, 1.0, 0.0),\n (0.75, 0.75, 0.25, 0.75, 0.25), (1.0, 1.0, 0.0, 0.5, 1.0),\n (1.0, 0.5, 0.0, 1.0, 1.0), (1.0, 0.5, 0.0, 0.5, 1.0),\n (0.5, 1.0, 0.0, 1.0, 1.0), (0.5, 1.0, 0.0, 0.5, 1.0),\n (0.5, 0.5, 0.0, 1.0, 1.0), (0.5, 0.5, 0.0, 0.5, 1.0),\n (0.75, 0.75, 0.25, 0.75, 0.75), (1.0, 0.5, 0.0, 0.0, 1.0),\n (0.5, 1.0, 0.0, 0.0, 1.0), (0.5, 0.5, 0.0, 0.0, 1.0),\n (0.75, 0.75, 0.25, 0.25, 0.75), (1.0, 0.0, 1.0, 0.5, 0.5),\n (1.0, 0.0, 1.0, 0.0, 0.5), (1.0, 0.0, 1.0, 0.5, 0.0),\n (0.5, 0.0, 1.0, 0.5, 0.5), (0.5, 0.0, 1.0, 0.0, 0.5),\n (0.5, 0.0, 1.0, 0.0, 0.0), (0.5, 0.0, 1.0, 0.5, 0.0),\n (0.75, 0.25, 0.75, 0.25, 0.25), (1.0, 0.0, 1.0, 1.0, 0.5),\n (1.0, 0.0, 0.5, 1.0, 0.5), (1.0, 0.0, 0.5, 1.0, 0.0),\n (0.5, 0.0, 1.0, 1.0, 0.5), (0.5, 0.0, 1.0, 1.0, 0.0),\n (0.5, 0.0, 0.5, 1.0, 0.5), (0.5, 0.0, 0.5, 1.0, 0.0),\n (0.75, 0.25, 0.75, 0.75, 0.25), (1.0, 0.0, 1.0, 0.5, 1.0),\n (1.0, 0.0, 0.5, 1.0, 1.0), (1.0, 0.0, 0.5, 0.5, 1.0),\n (0.5, 0.0, 1.0, 1.0, 1.0), (0.5, 0.0, 1.0, 0.5, 1.0),\n (0.5, 0.0, 0.5, 1.0, 1.0), (0.5, 0.0, 0.5, 0.5, 1.0),\n (0.75, 0.25, 0.75, 0.75, 0.75), (1.0, 0.0, 0.5, 0.0, 1.0),\n (0.5, 0.0, 1.0, 0.0, 1.0), (0.5, 0.0, 0.5, 0.0, 1.0),\n (0.75, 0.25, 0.75, 0.25, 0.75), (1.0, 0.0, 0.0, 1.0, 0.5),\n (0.5, 0.0, 0.0, 1.0, 0.5), (0.5, 0.0, 0.0, 1.0, 0.0),\n (0.75, 0.25, 0.25, 0.75, 0.25), (1.0, 0.0, 0.0, 0.5, 1.0),\n (0.5, 0.0, 0.0, 1.0, 1.0), (0.5, 0.0, 0.0, 0.5, 1.0),\n (0.75, 0.25, 0.25, 0.75, 0.75), (0.5, 0.0, 0.0, 0.0, 1.0),\n (0.75, 0.25, 0.25, 0.25, 0.75), (0.0, 1.0, 0.5, 0.5, 0.5),\n (0.0, 1.0, 0.0, 0.5, 0.5), (0.0, 1.0, 0.0, 0.0, 0.5),\n (0.0, 1.0, 0.0, 0.5, 0.0), (0.0, 1.0, 0.5, 0.0, 0.5),\n (0.0, 1.0, 0.5, 0.0, 0.0), (0.0, 1.0, 0.5, 0.5, 0.0),\n (0.25, 0.75, 0.25, 0.25, 0.25), (0.0, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.0, 0.5), (0.0, 1.0, 1.0, 0.5, 0.0),\n (0.0, 0.5, 1.0, 0.5, 0.5), (0.0, 0.5, 1.0, 0.0, 0.5),\n (0.0, 0.5, 1.0, 0.0, 0.0), (0.0, 0.5, 1.0, 0.5, 0.0),\n (0.25, 0.75, 0.75, 0.25, 0.25), (0.0, 1.0, 1.0, 1.0, 0.5),\n (0.0, 1.0, 0.5, 1.0, 0.5), (0.0, 1.0, 0.5, 1.0, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.5), (0.0, 0.5, 1.0, 1.0, 0.0),\n (0.0, 0.5, 0.5, 1.0, 0.5), (0.0, 0.5, 0.5, 1.0, 0.0),\n (0.25, 0.75, 0.75, 0.75, 0.25), (0.0, 1.0, 1.0, 0.5, 1.0),\n (0.0, 1.0, 0.5, 1.0, 1.0), (0.0, 1.0, 0.5, 0.5, 1.0),\n (0.0, 0.5, 1.0, 1.0, 1.0), (0.0, 0.5, 1.0, 0.5, 1.0),\n (0.0, 0.5, 0.5, 1.0, 1.0), (0.0, 0.5, 0.5, 0.5, 1.0),\n (0.25, 0.75, 0.75, 0.75, 0.75), (0.0, 1.0, 0.5, 0.0, 1.0),\n (0.0, 0.5, 1.0, 0.0, 1.0), (0.0, 0.5, 0.5, 0.0, 1.0),\n (0.25, 0.75, 0.75, 0.25, 0.75), (0.0, 1.0, 0.0, 1.0, 0.5),\n (0.0, 0.5, 0.0, 1.0, 0.5), (0.0, 0.5, 0.0, 1.0, 0.0),\n (0.25, 0.75, 0.25, 0.75, 0.25), (0.0, 1.0, 0.0, 0.5, 1.0),\n (0.0, 0.5, 0.0, 1.0, 1.0), (0.0, 0.5, 0.0, 0.5, 1.0),\n (0.25, 0.75, 0.25, 0.75, 0.75), (0.0, 0.5, 0.0, 0.0, 1.0),\n (0.25, 0.75, 0.25, 0.25, 0.75), (0.0, 0.0, 1.0, 0.5, 0.5),\n (0.0, 0.0, 1.0, 0.0, 0.5), (0.0, 0.0, 1.0, 0.5, 0.0),\n (0.25, 0.25, 0.75, 0.25, 0.25), (0.0, 0.0, 1.0, 1.0, 0.5),\n (0.0, 0.0, 0.5, 1.0, 0.5), (0.0, 0.0, 0.5, 1.0, 0.0),\n (0.25, 0.25, 0.75, 0.75, 0.25), (0.0, 0.0, 1.0, 0.5, 1.0),\n (0.0, 0.0, 0.5, 1.0, 1.0), (0.0, 0.0, 0.5, 0.5, 1.0),\n (0.25, 0.25, 0.75, 0.75, 0.75), (0.0, 0.0, 0.5, 0.0, 1.0),\n (0.25, 0.25, 0.75, 0.25, 0.75), (0.0, 0.0, 0.0, 1.0, 0.5),\n (0.25, 0.25, 0.25, 0.75, 0.25), (0.0, 0.0, 0.0, 0.5, 1.0),\n (0.25, 0.25, 0.25, 0.75, 0.75), (0.25, 0.25, 0.25, 0.25, 0.75)]\n\n nn_checks = {(1, 1, 1, 1, 1): [(1.0, 1.0, 1.0, 0.5, 1.0),\n (1.0, 1.0, 0.5, 1.0, 1.0),\n (1.0, 0.5, 0.5, 0.5, 0.5),\n (1.0, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 1.0),\n (1.0, 0.5, 0.5, 1.0, 0.5),\n (1.0, 0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.5, 0.5, 1.0, 1.0),\n (0.5, 1.0, 0.5, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.5, 1.0, 0.5, 1.0, 0.5),\n (1.0, 1.0, 1.0, 0.5, 0.5),\n (1.0, 1.0, 0.5, 1.0, 0.5),\n (1.0, 1.0, 0.5, 0.5, 0.5),\n (1.0, 1.0, 1.0, 1.0, 0.5),\n (1.0, 1.0, 0.5, 0.5, 1.0),\n (1.0, 0.5, 1.0, 0.5, 1.0),\n (1.0, 0.5, 0.5, 1.0, 1.0),\n (0.5, 1.0, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 0.5),\n (1.0, 0.5, 0.5, 0.5, 1.0),\n (1.0, 0.5, 1.0, 1.0, 1.0),\n (0.5, 0.5, 0.5, 1.0, 0.5),\n (0.75, 0.75, 0.75, 0.75, 0.75),\n (0.5, 0.5, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 1.0, 1.0),\n (0.5, 1.0, 1.0, 0.5, 1.0),\n (0.5, 1.0, 0.5, 1.0, 1.0)],\n (0.25, 0.75, 0.75, 0.75, 0.25): [(0.5, 1.0, 1.0, 1.0, 0.0),\n (0.5, 0.5, 0.5, 1.0, 0.5),\n (0, 1, 1, 1, 0),\n (0.5, 1.0, 0.5, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5),\n (0.0, 1.0, 0.5, 0.5, 0.5),\n (0.0, 1.0, 1.0, 1.0, 0.5),\n (0.5, 1.0, 0.5, 1.0, 0.5),\n (0.0, 1.0, 0.5, 1.0, 0.5),\n (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.5, 0.0),\n (0.0, 1.0, 0.5, 1.0, 0.0),\n (0.5, 1.0, 1.0, 0.5, 0.0),\n (0.5, 1.0, 0.5, 1.0, 0.0),\n (0.5, 1.0, 0.5, 0.5, 0.0),\n (0.0, 1.0, 0.5, 0.5, 0.0),\n (0.5, 0.5, 1.0, 0.5, 0.0),\n (0.5, 0.5, 0.5, 1.0, 0.0),\n (0.0, 0.5, 1.0, 0.5, 0.5),\n (0.0, 0.5, 0.5, 1.0, 0.5),\n (0.0, 0.5, 1.0, 0.5, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 0.0),\n (0.0, 0.5, 0.5, 1.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.0),\n (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.5, 0.5, 1.0, 1.0, 0.5),\n (\n 0.5, 0.5, 1.0, 0.5, 0.5)],\n (0.0, 0.0, 1.0, 0.5, 1.0): [(0.5, 0.0, 0.5, 0.5, 1.0),\n (0.0, 0.5, 0.5, 0.5, 1.0),\n (0.5, 0.5, 0.5, 0.5, 1.0),\n (0.0, 0.0, 0.5, 0.5, 1.0),\n (0, 0, 1, 1, 1),\n (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.0, 1.0, 0.5, 1.0),\n (0.0, 0.5, 1.0, 0.5, 1.0),\n (0, 0, 1, 0, 1),\n (0.5, 0.0, 1.0, 0.5, 0.5),\n (0.0, 0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 1.0, 0.5, 0.5),\n (0.0, 0.0, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.5, 0.5, 0.5),\n (0.25, 0.25, 0.75, 0.75, 0.75),\n (0.5, 0.0, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5, 0.5), (\n 0.25, 0.25, 0.75, 0.25, 0.75)]}\n\n init_triangulation(5, 1, check, nn_checks)",
"def test_1_2_2D_rec_splits(self):\n check = [(3.0, -2.0), (7.0, -1.0), (7.0, -2.0), (3.0, -1.0),\n (5.0, -1.5), (3.0, -1.5), (5.0, -2.0), (4.0, -1.75),\n (7.0, -1.5), (5.0, -1.0), (6.0, -1.25), (6.0, -1.75),\n (4.0, -1.25), (5.0, -1.75), (4.0, -1.5), (4.5, -1.625),\n (3.0, -1.75), (4.0, -2.0), (3.5, -1.875), (3.5, -1.625),\n (4.5, -1.875), (5.0, -1.25), (6.0, -1.5), (5.5, -1.375),\n (7.0, -1.25), (6.0, -1.0), (6.5, -1.125), (6.5, -1.375),\n (5.5, -1.125), (5.5, -1.625), (7.0, -1.75), (6.0, -2.0),\n (6.5, -1.875), (6.5, -1.625), (5.5, -1.875), (4.5, -1.375),\n (3.0, -1.25), (4.0, -1.0), (3.5, -1.125), (3.5, -1.375),\n (4.5, -1.125)]\n nn_checks = {(3.0, -2.0): [(3.0, -1.75), (3.5, -1.875), (4.0, -2.0)],\n (5.0, -1.75): [(5.0, -2.0), (5.0, -1.5), (5.5, -1.625),\n (5.5, -1.875), (4.5, -1.625), (6.0, -1.75),\n (4.5, -1.875), (4.0, -1.75)],\n (6.0, -2.0): [(5.0, -2.0), (5.5, -1.875), (6.0, -1.75),\n (6.5, -1.875), (7, -2)],\n (4.5, -1.125): [(5.0, -1.0), (4.0, -1.25), (5.0, -1.25),\n (4.0, -1.0)]}\n\n init_triangulation(2, 2, check, nn_checks, bounds=[(3, 7), (-2, -1)])",
"def test_2_2_3D_cube_splits(self):\n check = [(0, 0, 0), (1, 1, 1), (1, 0, 0), (1, 1, 0), (1, 0, 1),\n (0, 1, 0),\n (0, 1, 1), (0, 0, 1), (0.5, 0.5, 0.5), (0.0, 0.5, 0.5),\n (0.0, 0.0, 0.5), (0.0, 0.5, 0.0), (0.5, 0.0, 0.5),\n (0.5, 0.0, 0.0),\n (0.5, 0.5, 0.0), (0.25, 0.25, 0.25), (1.0, 0.5, 0.5),\n (1.0, 1.0, 0.5),\n (1.0, 0.5, 1.0), (0.5, 1.0, 0.5), (0.5, 1.0, 1.0),\n (0.5, 0.5, 1.0),\n (0.75, 0.75, 0.75), (1.0, 0.0, 0.5), (1.0, 0.5, 0.0),\n (0.75, 0.25, 0.25), (0.5, 1.0, 0.0), (0.75, 0.75, 0.25),\n (0.5, 0.0, 1.0), (0.75, 0.25, 0.75), (0.0, 1.0, 0.5),\n (0.25, 0.75, 0.25), (0.0, 0.5, 1.0), (0.25, 0.75, 0.75),\n (0.25, 0.25, 0.75), (0.5, 0.25, 0.25), (0.5, 0.5, 0.25),\n (0.5, 0.25, 0.5), (0.25, 0.5, 0.25), (0.25, 0.5, 0.5),\n (0.25, 0.25, 0.5), (0.375, 0.375, 0.375), (0.0, 0.25, 0.25),\n (0.0, 0.0, 0.25), (0.0, 0.25, 0.0), (0.25, 0.0, 0.25),\n (0.25, 0.0, 0.0), (0.25, 0.25, 0.0), (0.125, 0.125, 0.125),\n (0.0, 0.5, 0.25), (0.0, 0.25, 0.5), (0.125, 0.375, 0.375),\n (0.25, 0.0, 0.5), (0.125, 0.125, 0.375), (0.25, 0.5, 0.0),\n (0.125, 0.375, 0.125), (0.5, 0.0, 0.25), (0.375, 0.125, 0.375),\n (0.5, 0.25, 0.0), (0.375, 0.125, 0.125), (0.375, 0.375, 0.125),\n (0.5, 0.75, 0.75), (0.5, 0.5, 0.75), (0.5, 0.75, 0.5),\n (0.75, 0.5, 0.75), (0.75, 0.5, 0.5), (0.75, 0.75, 0.5),\n (0.625, 0.625, 0.625), (1.0, 0.75, 0.75), (1.0, 1.0, 0.75),\n (1.0, 0.75, 1.0), (0.75, 1.0, 0.75), (0.75, 1.0, 1.0),\n (0.75, 0.75, 1.0), (0.875, 0.875, 0.875), (1.0, 0.5, 0.75),\n (1.0, 0.75, 0.5), (0.875, 0.625, 0.625), (0.75, 1.0, 0.5),\n (0.875, 0.875, 0.625), (0.75, 0.5, 1.0), (0.875, 0.625, 0.875),\n (0.5, 1.0, 0.75), (0.625, 0.875, 0.625), (0.5, 0.75, 1.0),\n (0.625, 0.875, 0.875), (0.625, 0.625, 0.875),\n (0.75, 0.5, 0.25),\n (0.75, 0.25, 0.5), (0.625, 0.375, 0.375), (1.0, 0.25, 0.25),\n (1.0, 0.0, 0.25), (1.0, 0.25, 0.0), (0.75, 0.0, 0.25),\n (0.75, 0.0, 0.0), (0.75, 0.25, 0.0), (0.875, 0.125, 0.125),\n (1.0, 0.5, 0.25), (1.0, 0.25, 0.5), (0.875, 0.375, 0.375),\n (0.75, 0.0, 0.5), (0.875, 0.125, 0.375), (0.75, 0.5, 0.0),\n (0.875, 0.375, 0.125), (0.625, 0.125, 0.375),\n (0.625, 0.125, 0.125),\n (0.625, 0.375, 0.125), (0.5, 0.75, 0.25),\n (0.625, 0.625, 0.375),\n (1.0, 0.75, 0.25), (1.0, 1.0, 0.25), (1.0, 0.75, 0.0),\n (0.75, 1.0, 0.25), (0.75, 1.0, 0.0), (0.75, 0.75, 0.0),\n (0.875, 0.875, 0.125), (0.875, 0.625, 0.375),\n (0.875, 0.875, 0.375),\n (0.875, 0.625, 0.125), (0.5, 1.0, 0.25), (0.625, 0.875, 0.375),\n (0.5, 0.75, 0.0), (0.625, 0.875, 0.125), (0.625, 0.625, 0.125),\n (0.5, 0.25, 0.75), (0.625, 0.375, 0.625), (1.0, 0.25, 0.75),\n (1.0, 0.0, 0.75), (1.0, 0.25, 1.0), (0.75, 0.0, 0.75),\n (0.75, 0.0, 1.0), (0.75, 0.25, 1.0), (0.875, 0.125, 0.875),\n (0.875, 0.375, 0.625), (0.875, 0.125, 0.625),\n (0.875, 0.375, 0.875),\n (0.5, 0.0, 0.75), (0.625, 0.125, 0.625), (0.5, 0.25, 1.0),\n (0.625, 0.125, 0.875), (0.625, 0.375, 0.875),\n (0.25, 0.75, 0.5),\n (0.375, 0.625, 0.375), (0.0, 0.75, 0.25), (0.0, 1.0, 0.25),\n (0.0, 0.75, 0.0), (0.25, 1.0, 0.25), (0.25, 1.0, 0.0),\n (0.25, 0.75, 0.0), (0.125, 0.875, 0.125), (0.0, 0.75, 0.5),\n (0.125, 0.625, 0.375), (0.25, 1.0, 0.5), (0.125, 0.875, 0.375),\n (0.125, 0.625, 0.125), (0.375, 0.875, 0.375),\n (0.375, 0.875, 0.125),\n (0.375, 0.625, 0.125), (0.25, 0.5, 0.75),\n (0.375, 0.625, 0.625),\n (0.0, 0.75, 0.75), (0.0, 1.0, 0.75), (0.0, 0.75, 1.0),\n (0.25, 1.0, 0.75), (0.25, 1.0, 1.0), (0.25, 0.75, 1.0),\n (0.125, 0.875, 0.875), (0.0, 0.5, 0.75), (0.125, 0.625, 0.625),\n (0.125, 0.875, 0.625), (0.25, 0.5, 1.0), (0.125, 0.625, 0.875),\n (0.375, 0.875, 0.625), (0.375, 0.875, 0.875),\n (0.375, 0.625, 0.875),\n (0.375, 0.375, 0.625), (0.0, 0.25, 0.75), (0.0, 0.0, 0.75),\n (0.0, 0.25, 1.0), (0.25, 0.0, 0.75), (0.25, 0.0, 1.0),\n (0.25, 0.25, 1.0), (0.125, 0.125, 0.875),\n (0.125, 0.375, 0.625),\n (0.125, 0.125, 0.625), (0.125, 0.375, 0.875),\n (0.375, 0.125, 0.625),\n (0.375, 0.125, 0.875), (0.375, 0.375, 0.875)]\n\n nn_checks = {(0.5, 0.25, 0.25): [(0.375, 0.375, 0.125), (0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25),\n (0.625, 0.375, 0.375),\n (0.625, 0.125, 0.375),\n (0.625, 0.125, 0.125),\n (0.5, 0.5, 0.25), (0.25, 0.25, 0.25),\n (0.375, 0.375, 0.375),\n (0.5, 0.25, 0.5), (0.5, 0.5, 0.5),\n (0.5, 0.0, 0.25),\n (0.375, 0.125, 0.375), (0.5, 0.0, 0.5),\n (0.5, 0.25, 0.0),\n (0.375, 0.125, 0.125), (0.5, 0.0, 0.0),\n (0.625, 0.375, 0.125)],\n (0.625, 0.625, 0.875): [(0.75, 0.5, 1.0),\n (0.75, 0.75, 1.0),\n (0.5, 0.75, 1.0), (0.5, 0.5, 1.0),\n (0.5, 0.5, 0.75),\n (0.5, 0.75, 0.75),\n (0.75, 0.5, 0.75),\n (0.75, 0.75, 0.75)],\n (0, 0, 0): [(0.0, 0.25, 0.0), (0.125, 0.125, 0.125),\n (0.0, 0.0, 0.25), (0.25, 0.0, 0.0),\n (0.0, 0.25, 0.25), (0.25, 0.25, 0.0),\n (0.25, 0.0, 0.25)]}\n\n init_triangulation(3, 2, check, nn_checks)",
"def init_stitch(N):\n\tif N is None:\n\t\tN = np.int32(HII_DIM) #prepare for stitching\n\tMETA_GRID_SIZE = DIM/N\n\tM = np.int32(HII_DIM/META_GRID_SIZE)\n\t#HII_DIM = np.int32(HII_DIM)\n\tf_pixel_factor = DIM/HII_DIM;\n\tscale = np.float32(BOX_LEN/DIM)\n\tprint 'scale', scale\n\tHII_scale = np.float32(BOX_LEN/HII_DIM)\n\tshape = (DIM,DIM,N)\n\tstitch_grid_size = (DIM/(block_size[0]),\n\t\t\t\t\t\tDIM/(block_size[0]),\n\t\t\t\t\t\tN/(block_size[0]))\n\tHII_stitch_grid_size = (HII_DIM/(block_size[0]),\n\t\t\t\t\t\tHII_DIM/(block_size[0]),\n\t\t\t\t\t\tM/(block_size[0]))\n\t#ratio of large box to small size\n\tkernel_source = open(cmd_folder+\"/initialize_stitch.cu\").read()\n\tkernel_code = kernel_source % {\n\n\t\t'DELTAK': DELTA_K,\n\t\t'DIM': DIM, \n\t\t'VOLUME': VOLUME,\n\t\t'META_BLOCKDIM': N\n\t}\n\tmain_module = nvcc.SourceModule(kernel_code)\n\tinit_stitch = main_module.get_function(\"init_kernel\")\n\tHII_filter = main_module.get_function(\"HII_filter\")\n\tsubsample_kernel = main_module.get_function(\"subsample\")\n\tvelocity_kernel = main_module.get_function(\"set_velocity\")\n\tpspec_texture = main_module.get_texref(\"pspec\")\n\tMRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=0)\n\tplan2d = Plan((np.int64(DIM), np.int64(DIM)), dtype=np.complex64)\n\tplan1d = Plan((np.int64(DIM)), dtype=np.complex64)\n\tprint \"init pspec\"\n\tinterpPspec, interpSize = init_pspec() #interpPspec contains both k array and P array\n\tinterp_cu = cuda.matrix_to_array(interpPspec, order='F')\n\tcuda.bind_array_to_texref(interp_cu, pspec_texture)\n\t#hbox_large = pyfftw.empty_aligned((DIM, DIM, DIM), dtype='complex64')\n\thbox_large = np.zeros((DIM, DIM, DIM), dtype=np.complex64)\n\t#hbox_small = np.zeros(HII_shape, dtype=np.float32)\n\t#hbox_large = n\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\n\t# Set up pinned memory for transfer\n\t#largebox_hs = cuda.aligned_empty(shape=shape, dtype=np.float32, alignment=resource.getpagesize())\n\tlargebox_pin = cuda.pagelocked_empty(shape=shape, dtype=np.float32)\n\tlargecbox_pin = cuda.pagelocked_empty(shape=shape, dtype=np.complex64)\n\n\tlargebox_d = gpuarray.zeros(shape, dtype=np.float32)\n\tlargebox_d_imag = gpuarray.zeros(shape, dtype=np.float32)\n\tprint \"init boxes\"\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t# MRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=meta_x*N**3)\n\t\tinit_stitch(largebox_d, DIM, np.int32(meta_z),block=block_size, grid=stitch_grid_size)\n\t\tinit_stitch(largebox_d_imag, DIM, np.int32(meta_z),block=block_size, grid=stitch_grid_size)\n\t\tlargebox_d *= MRGgen.gen_normal(shape, dtype=np.float32)\n\t\tlargebox_d_imag *= MRGgen.gen_normal(shape, dtype=np.float32)\n\t\tlargebox_d = largebox_d + np.complex64(1.j) * largebox_d_imag\n\t\tcuda.memcpy_dtoh_async(largecbox_pin, largebox_d)\n\t\thbox_large[:, :, meta_z*N:(meta_z+1)*N] = largecbox_pin.copy()\n\t#if want to get velocity need to use this\n\tif True:\n\t\tprint \"saving kbox\"\n\t\tnp.save(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN), hbox_large)\n\n\tprint \"Executing FFT on device\"\n\t#hbox_large = pyfftw.interfaces.numpy_fft.ifftn(hbox_large).real\n\thbox_large = fft_stitch(N, plan2d, plan1d, hbox_large, largebox_d).real\n\tprint hbox_large.dtype\n\tprint \"Finished FFT on device\"\n\tnp.save(parent_folder+\"/Boxes/deltax_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN), hbox_large)\n\t\n\tif True:\n\t\tprint \"loading kbox\"\n\t\thbox_large = np.load(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN))\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\tlargebox_pin = hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy()\n\t\t#cuda.memcpy_htod_async(largebox_d, largebox_pin)\n\t\tlargebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\tHII_filter(largebox_d, DIM, np.int32(meta_z), ZERO, smoothR, block=block_size, grid=stitch_grid_size);\n\t\thbox_large[:, :, meta_z*N:(meta_z+1)*N] = largebox_d.get_async()\n\t#import IPython; IPython.embed()\n\tprint \"Executing FFT on host\"\n\t#hbox_large = hifft(hbox_large).astype(np.complex64).real\n\t#hbox_large = pyfftw.interfaces.numpy_fft.ifftn(hbox_large).real\n\thbox_large = fft_stitch(N, plan2d, plan1d, hbox_large, largebox_d).real\n\tprint \"Finished FFT on host\"\n\t#import IPython; IPython.embed()\n\n\t# for meta_x in xrange(META_GRID_SIZE):\n\t# \tfor meta_y in xrange(META_GRID_SIZE):\n\t# \t\tfor meta_z in xrange(META_GRID_SIZE):\n\t# \t\t\tlargebox_d = gpuarray.to_gpu(hbox_large[meta_x*N:(meta_x+1)*N, meta_y*N:(meta_y+1)*N, meta_z*N:(meta_z+1)*N])\n\t# \t\t\tHII_filter(largebox_d, N, np.int32(meta_x), np.int32(meta_y), np.int32(meta_z), ZERO, smoothR, block=block_size, grid=grid_size);\n\t# \t\t\thbox_large[meta_x*N:(meta_x+1)*N, meta_y*N:(meta_y+1)*N, meta_z*N:(meta_z+1)*N] = largebox_d.get()\n\t#plan = Plan(shape, dtype=np.complex64)\n\t#plan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\n\n\t# This saves a large resolution deltax\n\n\t\n\tprint \"downsampling\"\n\tsmallbox_d = gpuarray.zeros((HII_DIM,HII_DIM,M), dtype=np.float32)\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\tlargebox_pin = hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy()\n\t\tcuda.memcpy_dtoh_async(largecbox_pin, largebox_d)\n\t\t#largebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\tlargebox_d /= scale**3 #\n\t\tsubsample_kernel(largebox_d, smallbox_d, DIM, HII_DIM, PIXEL_FACTOR, block=block_size, grid=HII_stitch_grid_size) #subsample in real space\n\t\thbox_small[:, :, meta_z*M:(meta_z+1)*M] = smallbox_d.get_async()\n\tnp.save(parent_folder+\"/Boxes/smoothed_deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(HII_DIM, BOX_LEN), hbox_small)\n\t#import IPython; IPython.embed()\n\n\n\t# To get velocities: reload the k-space box\n\thbox_large = np.load(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN))\n\thvbox_large = np.zeros((DIM, DIM, DIM), dtype=np.float32)\n\thvbox_small = np.zeros(HII_shape, dtype=np.float32)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tlargevbox_d = gpuarray.zeros((DIM,DIM,N), dtype=np.complex64)\n\tsmallvbox_d = gpuarray.zeros((HII_DIM, HII_DIM, M), dtype=np.float32)\n\tfor num, mode in enumerate(['x', 'y', 'z']):\n\t\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t\tlargebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\t\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\t\t\tvelocity_kernel(largebox_d, largevbox_d, DIM, np.int32(meta_z), np.int32(num), block=block_size, grid=stitch_grid_size)\n\t\t\tHII_filter(largevbox_d, DIM, ZERO, smoothR, block=block_size, grid=stitch_grid_size)\n\t\t\tprint hvbox_large.shape, largevbox_d.shape\n\t\t\thvbox_large[:, :, meta_z*N:(meta_z+1)*N] = largevbox_d.get_async()\n\t\thvbox_large = fft_stitch(N, plan2d, plan1d, hvbox_large, largevbox_d).real\n\t\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t\tlargevbox_d = gpuarray.to_gpu_async(hvbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\t\tsubsample_kernel(largevbox_d.real, smallvbox_d, DIM, HII_DIM,PIXEL_FACTOR, block=block_size, grid=HII_stitch_grid_size)\n\t\t\thvbox_small[:, :, meta_z*M:(meta_z+1)*M] = smallvbox_d.get_async()\n\t\tnp.save(parent_folder+\"/Boxes/v{0}overddot_{1:d}_{2:.0f}Mpc\".format(mode, HII_DIM, BOX_LEN), smallvbox_d.get())\n\n\treturn",
"def test_3D_m6_2k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def test_linear_2d_merwe_column():\n\n\n def fx(x, dt):\n F = np.array([[1, dt, 0, 0],\n [0, 1, 0, 0],\n [0, 0, 1, dt],\n [0, 0, 0, 1]], dtype=float)\n\n return np.dot(F, x)\n\n def hx(x):\n return np.array([x[0], x[2]])\n\n\n dt = 0.1\n points = MerweScaledSigmaPoints2(4, .1, 2., -1)\n kf = UKF2(dim_x=4, dim_z=2, dt=dt, fx=fx, hx=hx, points=points)\n\n\n kf.x = np.array([[-1., 1., -1., 1]]).T\n kf.P*=0.0001\n #kf.R *=0\n #kf.Q\n\n zs = []\n for i in range(20):\n z = np.array([[i+randn()*0.1],\n [i+randn()*0.1]])\n zs.append(z)\n\n Ms, Ps = kf.batch_filter(zs)\n smooth_x, _, _ = kf.rts_smoother(Ms, Ps, dt=dt)\n\n if DO_PLOT:\n plt.figure()\n zs = np.asarray(zs)\n plt.plot(zs[:,0], marker='+', c='b')\n plt.plot(Ms[:,0], c='b')\n plt.plot(smooth_x[:,0], smooth_x[:,2], c='r')\n print(smooth_x)",
"def _apply_array_spin12_halffilling(self, h1e: 'Nparray',\n h2e: 'Nparray') -> 'Nparray':\n if fqe.settings.use_accelerated_code:\n #return self._apply_array_spin12_blocked(h1e, h2e)\n return self._apply_array_spin12_lm(h1e, h2e)\n else:\n h1e = copy.deepcopy(h1e)\n h2e = numpy.moveaxis(copy.deepcopy(h2e), 1, 2) * (-1.0)\n norb = self.norb()\n for k in range(norb * 2):\n h1e[:, :] -= h2e[:, k, k, :]\n\n (dveca, dvecb) = self.calculate_dvec_spin()\n out = numpy.einsum(\"ij,ijkl->kl\", h1e[:norb, :norb], dveca) \\\n + numpy.einsum(\"ij,ijkl->kl\", h1e[norb:, norb:], dvecb)\n ndveca = numpy.einsum(\"ijkl,klmn->ijmn\",\n h2e[:norb, :norb, :norb, :norb], dveca) \\\n + numpy.einsum(\"ijkl,klmn->ijmn\",\n h2e[:norb, :norb, norb:, norb:], dvecb)\n ndvecb = numpy.einsum(\"ijkl,klmn->ijmn\",\n h2e[norb:, norb:, :norb, :norb], dveca) \\\n + numpy.einsum(\"ijkl,klmn->ijmn\",\n h2e[norb:, norb:, norb:, norb:], dvecb)\n out += self._calculate_coeff_spin_with_dvec((ndveca, ndvecb))\n return out",
"def feature_processing(array2d):\n new_array2d = np.zeros([array2d.shape[0], 29])\n # items/ orders\n new_array2d[:, 0] = array2d[:, 4] / array2d[:, 3]\n # cancels / orders\n new_array2d[:, 1] = array2d[:, 5] / array2d[:, 3]\n # returns / items\n new_array2d[:, 2] = array2d[:, 6] / array2d[:, 4]\n # voucher / orders\n new_array2d[:, 3] = array2d[:, 10] / array2d[:, 3]\n # female_items / female_items + male_items\n new_array2d[:, 4] = array2d[:, 15] / ([1 if x == 0 else x for x in (array2d[:, 15] + array2d[:, 16])])\n # male_items / female_items + male_items\n new_array2d[:, 5] = array2d[:, 16] / ([1 if x == 0 else x for x in (array2d[:, 15] + array2d[:, 16])])\n # unisex_items / items\n new_array2d[:, 6] = array2d[:, 17] / array2d[:, 4]\n # wapp_items / items\n new_array2d[:, 7] = array2d[:, 18] / array2d[:, 4]\n # wftw_items / items\n new_array2d[:, 8] = array2d[:, 19] / array2d[:, 4]\n # mapp_items / items\n new_array2d[:, 9] = array2d[:, 20] / array2d[:, 4]\n # wacc_items / items\n new_array2d[:, 10] = array2d[:, 21] / array2d[:, 4]\n # macc_items / items\n new_array2d[:, 11] = array2d[:, 22] / array2d[:, 4]\n # mftw_items / items\n new_array2d[:, 12] = array2d[:, 23] / array2d[:, 4]\n # wspt_items / items\n new_array2d[:, 13] = array2d[:, 24] / array2d[:, 4]\n # mspt_items / items\n new_array2d[:, 14] = array2d[:, 25] / array2d[:, 4]\n # curvy_items / items\n # Curvy item has a strong correlation with gender, however they are very right-skewed use np.power(1/6) to smooth it\n new_array2d[:, 15] = np.power(array2d[:, 26] / array2d[:, 4], 1 / 6)\n # sacc_items / items\n new_array2d[:, 16] = array2d[:, 27] / array2d[:, 4]\n # msite_orders / orders\n new_array2d[:, 17] = array2d[:, 28] / array2d[:, 3]\n # desktop_orders / orders\n new_array2d[:, 18] = array2d[:, 29] / array2d[:, 3]\n # android_orders / orders\n new_array2d[:, 19] = array2d[:, 30] / array2d[:, 3]\n # ios_orders / orders\n new_array2d[:, 20] = array2d[:, 31] / array2d[:, 3]\n # other_device_orders / orders\n new_array2d[:, 21] = array2d[:, 32] / array2d[:, 3]\n # work_orders / orders\n new_array2d[:, 22] = array2d[:, 33] / array2d[:, 3]\n # home_orders / orders\n new_array2d[:, 23] = array2d[:, 34] / array2d[:, 3]\n # parcelpoint_orders / orders\n new_array2d[:, 24] = array2d[:, 35] / array2d[:, 3]\n # other_collection_orders / orders\n new_array2d[:, 25] = array2d[:, 36] / array2d[:, 3]\n # average_discount_onoffer\n new_array2d[:, 26] = array2d[:, 39]\n # average_discount_used\n new_array2d[:, 27] = array2d[:, 40]\n # revenue / order\n new_array2d[:, 28] = array2d[:, 41] / array2d[:, 3]\n\n # normalize by each feature\n new_array2d = normalize(new_array2d, axis=0, norm='max')\n return new_array2d",
"def test_filter_l2_1():\n box = Box(length=L, origin=O)\n f = Field(box, formula=func, name='f0')\n d_fine = Discretization([513, 513, 513])\n d_coarse = Discretization([257, 257, 257], ghosts=[2, 2, 2])\n op = MultiresolutionFilter(d_in=d_fine, d_out=d_coarse,\n variables={f: d_coarse},\n method={Remesh: L2_1, })\n op.discretize()\n op.setup()\n topo_coarse = op.discreteFields[f].topology\n topo_fine = [t for t in f.discreteFields.keys()\n if not t is topo_coarse][0]\n f.initialize(topo=topo_fine)\n f_out = f.discreteFields[topo_coarse]\n op.apply(simu)\n valid = [npw.zeros(f_out[0].shape), ]\n valid = func(valid, *topo_coarse.mesh.coords)\n assert np.allclose(valid[0][topo_coarse.mesh.iCompute],\n f_out[0][topo_coarse.mesh.iCompute]), \\\n np.max(np.abs(valid[0][topo_coarse.mesh.iCompute] -\n f_out[0][topo_coarse.mesh.iCompute]))",
"def test_sw2():\n B1 = 100\n B2 = 200\n h = 18\n t = 1\n H = h + 2 * t\n E1 = 20000\n E2 = 10000\n sections = ((B1, t, 0, E1), (B2, t, h + t, E2))\n EI, top, bot = bm.EI(sections, E1)\n EIc = E1 * B1 * (H ** 3 - h ** 3) / 12\n assert 0.99 < EI / EIc < 1.01",
"def init():\n\tN = np.int32(DIM) #prepare for stitching\n\t#HII_DIM = np.int32(HII_DIM)\n\tf_pixel_factor = DIM/HII_DIM;\n\tscale = np.float32(BOX_LEN)/DIM\n\tHII_scale = np.float32(BOX_LEN)/HII_DIM\n\tshape = (N,N,N)\n\t\n\tMRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=0)\n\n\tkernel_source = open(cmd_folder+\"/initialize.cu\").read()\n\tkernel_code = kernel_source % {\n\n\t\t'DELTAK': DELTA_K,\n\t\t'VOLUME': VOLUME,\n\t\t'DIM': DIM\n\t}\n\tmain_module = nvcc.SourceModule(kernel_code)\n\tinit_kernel = main_module.get_function(\"init_kernel\")\n\tHII_filter = main_module.get_function(\"HII_filter\")\n\tadj_complex_conj = main_module.get_function(\"adj_complex_conj\")\n\tsubsample_kernel = main_module.get_function(\"subsample\")\n\tvelocity_kernel = main_module.get_function(\"set_velocity\")\n\tpspec_texture = main_module.get_texref(\"pspec\")\n\n\tinterpPspec, interpSize = init_pspec() #interpPspec contains both k array and P array\n\tinterp_cu = cuda.matrix_to_array(interpPspec, order='F')\n\tcuda.bind_array_to_texref(interp_cu, pspec_texture)\n\n\tlargebox_d = gpuarray.zeros(shape, dtype=np.float32)\n\tinit_kernel(largebox_d, np.int32(DIM), block=block_size, grid=grid_size)\n\n\t#import IPython; IPython.embed()\n\tlargebox_d_imag = gpuarray.zeros(shape, dtype=np.float32)\n\tinit_kernel(largebox_d_imag, np.int32(DIM), block=block_size, grid=grid_size)\n\n\tlargebox_d *= MRGgen.gen_normal(shape, dtype=np.float32)\n\tlargebox_d_imag *= MRGgen.gen_normal(shape, dtype=np.float32)\n\tlargebox_d = largebox_d + np.complex64(1.j) * largebox_d_imag\n\n\t#adj_complex_conj(largebox_d, DIM, block=block_size, grid=grid_size)\n\tlargebox = largebox_d.get()\n\t#np.save(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc\".format(DIM, BOX_LEN), largebox)\n\n\t#save real space box before smoothing\n\tplan = Plan(shape, dtype=np.complex64)\n\tplan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\tlargebox_d /= scale**3\n\tnp.save(parent_folder+\"/Boxes/deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(DIM, BOX_LEN), largebox_d.real.get_async())\n\n\t#save real space box after smoothing and subsampling\n\t# host largebox is still in k space, no need to reload from disk\n\tlargebox_d = gpuarray.to_gpu(largebox)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tHII_filter(largebox_d, N, ZERO, smoothR, block=block_size, grid=grid_size);\n\tplan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\tlargebox_d /= scale**3\n\tsmallbox_d = gpuarray.zeros(HII_shape, dtype=np.float32)\n\tsubsample_kernel(largebox_d.real, smallbox_d, N, HII_DIM, PIXEL_FACTOR, block=block_size, grid=HII_grid_size) #subsample in real space\n\tnp.save(parent_folder+\"/Boxes/smoothed_deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(HII_DIM, BOX_LEN), smallbox_d.get_async())\n\n\t# reload the k-space box for velocity boxes\n\tlargebox_d = gpuarray.to_gpu(largebox)\n\t\n\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tlargevbox_d = gpuarray.zeros((DIM,DIM,DIM), dtype=np.complex64)\n\tsmallbox_d = gpuarray.zeros(HII_shape, dtype=np.float32)\n\tfor num, mode in enumerate(['x', 'y', 'z']):\n\t\tvelocity_kernel(largebox_d, largevbox_d, DIM, np.int32(num), block=block_size, grid=grid_size)\n\t\tHII_filter(largevbox_d, DIM, ZERO, smoothR, block=block_size, grid=grid_size)\n\t\tplan.execute(largevbox_d, inverse=True)\n\t\tlargevbox_d /= scale**3\n\t\t#import IPython; IPython.embed()\n\t\tsubsample_kernel(largevbox_d.real, smallbox_d, DIM, HII_DIM,PIXEL_FACTOR, block=block_size, grid=HII_grid_size)\n\t\tnp.save(parent_folder+\"/Boxes/v{0}overddot_{1:d}_{2:.0f}Mpc\".format(mode, HII_DIM, BOX_LEN), smallbox_d.get())\n\n\treturn",
"def test_MeshMat_1group(self):\n\n MS_grp = self.meshsol.get_group(\"stator\")\n cells_grp, nb_cell, indices = MS_grp.get_mesh().get_cell()\n solution = np.array([[0, 1, 2], [1, 2, 3]])\n result_tgl = cells_grp[\"triangle\"]\n testA = np.sum(abs(solution - result_tgl))\n msg = (\n \"Wrong output: returned \" + str(result_tgl) + \", expected: \" + str(solution)\n )\n self.assertAlmostEqual(testA, 0, msg=msg, delta=self.DELTA)\n\n MS_grp = self.meshsol.get_group(\"rotor\")\n cells_grp, nb_cell, indices = MS_grp.get_mesh().get_cell()\n solution = np.array([[3, 3], [1, 2], [2, 3]])\n results = cells_grp[\"triangle\"] # The point indices have changed !\n points = MS_grp.get_mesh().get_point(results)\n testA = np.sum(abs(solution - points))\n msg = \"Wrong output: returned \" + str(results) + \", expected: \" + str(solution)\n self.assertAlmostEqual(testA, 0, msg=msg, delta=self.DELTA)",
"def test_power_spectral_density_from_spatially_resolved_magnetisation_confined_to_mesh_region(tmpdir, debug=False):\n os.chdir(str(tmpdir))\n RTOL = 1e-10\n\n H1 = 1e6 # external field in A/m\n alpha1 = 0.5 # some sort of damping constant\n omega1 = gamma * H1 # precession frequency\n\n H2 = 2.8e4 # external field in A/m\n alpha2 = 0.3 # some sort of damping constant\n omega2 = gamma * H2 # precession frequency\n\n ##\n # Step 1: Construct a time series of artificial magnetisation\n # data and save it to a bunch of .npy files.\n ##\n t_step = 1e-11\n t_ini = 0\n t_end = 10e-9\n\n N1 = 42 # in a real application this would be the number of mesh vertices\n N2 = 23 # in a real application this would be the number of mesh vertices\n fft_test_helpers.create_test_npy_files_with_two_regions(\n str(tmpdir), t_step, t_ini, t_end, omega1, alpha1, N1, omega2, alpha2, N2)\n\n ##\n # Step 2: compute the FFT of a resampled time series, both by\n # hand and using FFT_m.\n ##\n # XXX TODO: Resampling timesteps is not supported when using .npy\n # files. Either simplify the code below, or implement saving to\n # .h5 files so that it's easier to implement resampling for\n # spatially resolved data, too.\n ##\n t_step_res = t_step\n t_ini_res = t_ini\n t_end_res = t_end\n ts_resampled = np.arange(t_ini_res, t_end_res, t_step_res)\n\n # Compute time series based on resampled timesteps\n mx_res = exp(-ts_resampled * 1e8 / alpha1) * sin(omega1 * ts_resampled)\n my_res = exp(-ts_resampled * 1e8 / alpha1) * cos(omega1 * ts_resampled)\n mz_res = 1 - sqrt(mx_res ** 2 + my_res ** 2)\n\n # Compute 'analytical' Fourier transform of resampled time series and\n # determine the power of the spectrum for each component. We also need\n # to multiply by the number of mesh nodes because the numerical algorithm\n # sums up all contributions at the individual nodes (but we can just\n # multiply because they are all identical by construction).\n psd_mx_expected = N1 * np.absolute(np.fft.rfft(mx_res)) ** 2\n psd_my_expected = N1 * np.absolute(np.fft.rfft(my_res)) ** 2\n psd_mz_expected = N1 * np.absolute(np.fft.rfft(mz_res)) ** 2\n\n # Compute Fourier transform of resampled time series using FFT_m\n freqs_computed, psd_mx_computed, psd_my_computed, psd_mz_computed = \\\n compute_power_spectral_density('m_ringdown*.npy', t_step_res, t_ini=t_ini_res,\n t_end=t_end_res, subtract_values=None, restrict_to_vertices=xrange(N1))\n\n # Check that the analytically determined power spectra are the same as the\n # computed ones.\n assert(np.allclose(psd_mx_expected, psd_mx_computed, atol=0, rtol=RTOL))\n assert(np.allclose(psd_my_expected, psd_my_computed, atol=0, rtol=RTOL))\n assert(np.allclose(psd_mz_expected, psd_mz_computed, atol=0, rtol=RTOL))\n\n if debug:\n # Plot the spectra for debugging\n fig = plt.figure(figsize=(20, 5))\n ax = fig.gca()\n ax.plot(freqs_computed, psd_mx_expected, label='psd_mx_expected')\n ax.plot(freqs_computed, psd_my_expected, label='psd_my_expected')\n ax.plot(freqs_computed, psd_mz_expected, label='psd_mz_expected')\n ax.plot(freqs_computed, psd_mx_computed, label='psd_mx_computed')\n ax.plot(freqs_computed, psd_my_computed, label='psd_my_computed')\n ax.plot(freqs_computed, psd_mz_computed, label='psd_mz_computed')\n ax.legend(loc='best')\n fig.savefig('psd_m_McMichaelStiles.png')",
"def test_2_2_3D_rec_splits(self):\n check = [(-3.0, -2.0, 0.0), (4.0, 10.0, 1.0), (4.0, -2.0, 0.0),\n (4.0, 10.0, 0.0), (4.0, -2.0, 1.0), (-3.0, 10.0, 0.0),\n (-3.0, 10.0, 1.0), (-3.0, -2.0, 1.0), (0.5, 4.0, 0.5),\n (-3.0, 4.0, 0.5), (-3.0, -2.0, 0.5), (-3.0, 4.0, 0.0),\n (0.5, -2.0, 0.5), (0.5, -2.0, 0.0), (0.5, 4.0, 0.0),\n (-1.25, 1.0, 0.25), (4.0, 4.0, 0.5), (4.0, 10.0, 0.5),\n (4.0, 4.0, 1.0), (0.5, 10.0, 0.5), (0.5, 10.0, 1.0),\n (0.5, 4.0, 1.0), (2.25, 7.0, 0.75), (4.0, -2.0, 0.5),\n (4.0, 4.0, 0.0), (2.25, 1.0, 0.25), (0.5, 10.0, 0.0),\n (2.25, 7.0, 0.25), (0.5, -2.0, 1.0), (2.25, 1.0, 0.75),\n (-3.0, 10.0, 0.5), (-1.25, 7.0, 0.25), (-3.0, 4.0, 1.0),\n (-1.25, 7.0, 0.75), (-1.25, 1.0, 0.75), (0.5, 1.0, 0.25),\n (0.5, 4.0, 0.25), (0.5, 1.0, 0.5), (-1.25, 4.0, 0.25),\n (-1.25, 4.0, 0.5), (-1.25, 1.0, 0.5), (-0.375, 2.5, 0.375),\n (-3.0, 1.0, 0.25), (-3.0, -2.0, 0.25), (-3.0, 1.0, 0.0),\n (-1.25, -2.0, 0.25), (-1.25, -2.0, 0.0), (-1.25, 1.0, 0.0),\n (-2.125, -0.5, 0.125), (-3.0, 4.0, 0.25), (-3.0, 1.0, 0.5),\n (-2.125, 2.5, 0.375), (-1.25, -2.0, 0.5),\n (-2.125, -0.5, 0.375), (-1.25, 4.0, 0.0), (-2.125, 2.5, 0.125),\n (0.5, -2.0, 0.25), (-0.375, -0.5, 0.375), (0.5, 1.0, 0.0),\n (-0.375, -0.5, 0.125), (-0.375, 2.5, 0.125), (0.5, 7.0, 0.75),\n (0.5, 4.0, 0.75), (0.5, 7.0, 0.5), (2.25, 4.0, 0.75),\n (2.25, 4.0, 0.5), (2.25, 7.0, 0.5), (1.375, 5.5, 0.625),\n (4.0, 7.0, 0.75), (4.0, 10.0, 0.75), (4.0, 7.0, 1.0),\n (2.25, 10.0, 0.75), (2.25, 10.0, 1.0), (2.25, 7.0, 1.0),\n (3.125, 8.5, 0.875), (4.0, 4.0, 0.75), (4.0, 7.0, 0.5),\n (3.125, 5.5, 0.625), (2.25, 10.0, 0.5), (3.125, 8.5, 0.625),\n (2.25, 4.0, 1.0), (3.125, 5.5, 0.875), (0.5, 10.0, 0.75),\n (1.375, 8.5, 0.625), (0.5, 7.0, 1.0), (1.375, 8.5, 0.875),\n (1.375, 5.5, 0.875), (2.25, 4.0, 0.25), (2.25, 1.0, 0.5),\n (1.375, 2.5, 0.375), (4.0, 1.0, 0.25), (4.0, -2.0, 0.25),\n (4.0, 1.0, 0.0), (2.25, -2.0, 0.25), (2.25, -2.0, 0.0),\n (2.25, 1.0, 0.0), (3.125, -0.5, 0.125), (4.0, 4.0, 0.25),\n (4.0, 1.0, 0.5), (3.125, 2.5, 0.375), (2.25, -2.0, 0.5),\n (3.125, -0.5, 0.375), (2.25, 4.0, 0.0), (3.125, 2.5, 0.125),\n (1.375, -0.5, 0.375), (1.375, -0.5, 0.125),\n (1.375, 2.5, 0.125), (0.5, 7.0, 0.25), (1.375, 5.5, 0.375),\n (4.0, 7.0, 0.25), (4.0, 10.0, 0.25), (4.0, 7.0, 0.0),\n (2.25, 10.0, 0.25), (2.25, 10.0, 0.0), (2.25, 7.0, 0.0),\n (3.125, 8.5, 0.125), (3.125, 5.5, 0.375), (3.125, 8.5, 0.375),\n (3.125, 5.5, 0.125), (0.5, 10.0, 0.25), (1.375, 8.5, 0.375),\n (0.5, 7.0, 0.0), (1.375, 8.5, 0.125), (1.375, 5.5, 0.125),\n (0.5, 1.0, 0.75), (1.375, 2.5, 0.625), (4.0, 1.0, 0.75),\n (4.0, -2.0, 0.75), (4.0, 1.0, 1.0), (2.25, -2.0, 0.75),\n (2.25, -2.0, 1.0), (2.25, 1.0, 1.0), (3.125, -0.5, 0.875),\n (3.125, 2.5, 0.625), (3.125, -0.5, 0.625), (3.125, 2.5, 0.875),\n (0.5, -2.0, 0.75), (1.375, -0.5, 0.625), (0.5, 1.0, 1.0),\n (1.375, -0.5, 0.875), (1.375, 2.5, 0.875), (-1.25, 7.0, 0.5),\n (-0.375, 5.5, 0.375), (-3.0, 7.0, 0.25), (-3.0, 10.0, 0.25),\n (-3.0, 7.0, 0.0), (-1.25, 10.0, 0.25), (-1.25, 10.0, 0.0),\n (-1.25, 7.0, 0.0), (-2.125, 8.5, 0.125), (-3.0, 7.0, 0.5),\n (-2.125, 5.5, 0.375), (-1.25, 10.0, 0.5), (-2.125, 8.5, 0.375),\n (-2.125, 5.5, 0.125), (-0.375, 8.5, 0.375),\n (-0.375, 8.5, 0.125), (-0.375, 5.5, 0.125), (-1.25, 4.0, 0.75),\n (-0.375, 5.5, 0.625), (-3.0, 7.0, 0.75), (-3.0, 10.0, 0.75),\n (-3.0, 7.0, 1.0), (-1.25, 10.0, 0.75), (-1.25, 10.0, 1.0),\n (-1.25, 7.0, 1.0), (-2.125, 8.5, 0.875), (-3.0, 4.0, 0.75),\n (-2.125, 5.5, 0.625), (-2.125, 8.5, 0.625), (-1.25, 4.0, 1.0),\n (-2.125, 5.5, 0.875), (-0.375, 8.5, 0.625),\n (-0.375, 8.5, 0.875), (-0.375, 5.5, 0.875),\n (-0.375, 2.5, 0.625), (-3.0, 1.0, 0.75), (-3.0, -2.0, 0.75),\n (-3.0, 1.0, 1.0), (-1.25, -2.0, 0.75), (-1.25, -2.0, 1.0),\n (-1.25, 1.0, 1.0), (-2.125, -0.5, 0.875), (-2.125, 2.5, 0.625),\n (-2.125, -0.5, 0.625), (-2.125, 2.5, 0.875),\n (-0.375, -0.5, 0.625), (-0.375, -0.5, 0.875),\n (-0.375, 2.5, 0.875)]\n nn_checks = {(2.25, 7.0, 0.75): [(4.0, 7.0, 0.75), (2.25, 7.0, 1.0),\n (4.0, 7.0, 0.5), (4.0, 7.0, 1.0),\n (4.0, 4.0, 0.75), (1.375, 5.5, 0.875),\n (2.25, 4.0, 1.0), (2.25, 4.0, 0.5),\n (2.25, 4.0, 0.75), (3.125, 8.5, 0.875),\n (3.125, 8.5, 0.625), (4.0, 10.0, 0.75),\n (2.25, 10.0, 1.0), (2.25, 10.0, 0.75),\n (2.25, 10.0, 0.5), (1.375, 8.5, 0.625),\n (1.375, 8.5, 0.875), (0.5, 7.0, 0.75),\n (0.5, 7.0, 0.5), (3.125, 5.5, 0.625),\n (3.125, 5.5, 0.875), (0.5, 10.0, 0.75),\n (0.5, 7.0, 1.0), (0.5, 4.0, 0.75),\n (2.25, 7.0, 0.5), (1.375, 5.5, 0.625)],\n (4.0, -2.0, 0.5): [(4.0, -2.0, 0.75), (4.0, -2.0, 0.25),\n (2.25, 1.0, 0.5), (2.25, -2.0, 0.75),\n (2.25, -2.0, 0.5), (2.25, -2.0, 0.25),\n (4.0, 1.0, 0.25), (4.0, 1.0, 0.75),\n (4.0, 1.0, 0.5), (3.125, -0.5, 0.375),\n (3.125, -0.5, 0.625)],\n (-2.125, -0.5, 0.875): [(-1.25, 1.0, 1.0),\n (-1.25, 1.0, 0.75),\n (-1.25, -2.0, 0.75),\n (-1.25, -2.0, 1.0),\n (-3.0, -2.0, 0.75),\n (-3.0, 1.0, 1.0), (-3, -2, 1),\n (-3.0, 1.0, 0.75)]}\n\n init_triangulation(3, 2, check, nn_checks, bounds=[(-3, 4), (-2, 10), (0, 1)])",
"def test_matrix22(gridsize=50):\n\n v1 = vec2(3,0)\n v2 = vec2(0,3)\n\n #rotate 45 degrees \n m22 = matrix22()\n m22.from_euler(45)\n\n # make a second matrix, also 45 degrees, should give us 90 total \n m22_2 = matrix22()\n m22_2.from_euler(45)\n m22 = m22_2 * m22\n\n # mutliply a vector by the matrix \n v3 = m22 * v2 \n\n fb = pixel_op() \n fb.create_buffer(800, 800)\n fb.graticule(gridsize)\n \n pts = [ (0,0), (0,1), (2,1), (0,2) ]\n #bloody_simple_2drender('2d_rotation.png', pts=pts, gridsize=50, pfb=fb)\n\n vecs = [v2,v3]\n bloody_simple_2drender('2d_rotation.png', vecs=vecs, gridsize=50, pfb=fb)\n\n #rotate the points by matrix multiplication \n pts = m22.batch_mult_pts(pts) \n bloody_simple_2drender('2d_rotation.png', pts=pts, gridsize=50, pfb=fb)\n fb.save('2d_rotation.png')",
"def test_3D_m6_1k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_1k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def test_2_layer():\r\n # angular frequency in radians * THz\r\n w = 100 * nu.THz\r\n # Relative permittivity of metal and dielectric\r\n em = -4.56 + 0.12j\r\n ed = 1.23 + 0.01j\r\n ex_list = ez_list = [ed, em]\r\n # Relative permeabilities\r\n mu_list = [1,1]\r\n # Dictionary of input parameters\r\n input_params = {'w': w, 'd_list': [inf,inf], 'ex_list': ex_list,\r\n 'ez_list': ez_list, 'mu_list': mu_list}\r\n \r\n # Calculate the theoretical kx\r\n theo_kx = (w / nu.c0) * cmath.sqrt((em * ed) / (em + ed))\r\n if theo_kx.imag < 0:\r\n theo_kx *= -1\r\n print('Theoretical kx:',\r\n '(%.7g+%.7gj) rad/um' % (theo_kx.real / nu.um**-1, theo_kx.imag / nu.um**-1))\r\n \r\n # If I use the theoretical kx value, the mode should be correct and\r\n # all my tests should pass.\r\n params = deepcopy(input_params)\r\n params['kx'] = theo_kx\r\n params = find_all_params_from_kx(params)\r\n kzd, kzm = params['kz_list']\r\n # check that kz_list is correct\r\n assert_floats_are_equal(kzd**2, (w**2 / nu.c0**2) * ed**2 / (em + ed))\r\n assert_floats_are_equal(kzm**2, (w**2 / nu.c0**2) * em**2 / (em + ed))\r\n # check that layer_bottom_list is correct\r\n assert params['layer_bottom_list'][0] == -inf\r\n assert params['layer_bottom_list'][1] == 0\r\n # Check that the boundary condition matrix agrees with hand-calculation\r\n bc_mat = bc_matrix(params)\r\n # ...top-left is Ex0down / H0down\r\n assert_floats_are_equal(bc_mat[0,0], -kzd / (w * ed * nu.eps0))\r\n # ...top-right is -Ex1up / H1up\r\n assert_floats_are_equal(bc_mat[0,1], -kzm / (w * em * nu.eps0))\r\n # ...bottom-left is eps0 * Ez0down / H0down\r\n assert_floats_are_equal(bc_mat[1,0], ed * -theo_kx / (w * ed * nu.eps0))\r\n # ...bottom-right is -eps1 * Ez1up / H1up\r\n assert_floats_are_equal(bc_mat[1,1], -em * -theo_kx / (w * em * nu.eps0))\r\n # Check that one of the eigenvalues is almost zero (compared to the size\r\n # of the matrix elements).\r\n eigenvalues = np.linalg.eig(bc_mat)[0]\r\n assert abs(eigenvalues).min() / abs(bc_mat).max() < 1e-6\r\n # Check that the mode passes all tests.\r\n assert check_mode(params, thorough=True) is True\r\n # Check that I can scale the fields and it still passes all tests.\r\n params_scaled = rescale_fields(1.23+4.56j, params)\r\n assert check_mode(params_scaled, thorough=True) is True\r\n \r\n # Now try my kx-finding algorithm, to see if it finds the right value.\r\n kx_list = find_kx(input_params)\r\n print('kx_list:',\r\n ['(%.7g+%.7gj) rad/um' % (kx.real / nu.um**-1, kx.imag / nu.um**-1)\r\n for kx in kx_list])\r\n kx = kx_list[0]\r\n assert_floats_are_equal(theo_kx, kx)\r\n \r\n plot_mode(params)\r\n \r\n print('If you see this message, all the tests succeeded!!')",
"def test_uv_degrid_gaussian_kernel():\n\n layout = read_layout(layout_path=f\"{test_data}/test_mwa.txt\")\n xyz = enh_xyz(layout=layout, latitude=mwa_geo.latitude.radians)\n uvw = xyz_uvw(xyz=xyz, freq=freq, dec0=mwa_geo.latitude.radians, ha0=0)\n uv = uv_degrid(\n max_lambda=1400, nside=20, uvw=uvw, sigma=3, kersize=21, kernel=\"gaussian\"\n )\n\n assert uv.shape == (20, 20)\n assert uv[0, 0] == 1.295932713086053e-05"
]
| [
"0.64069176",
"0.61746925",
"0.615539",
"0.60826963",
"0.5995368",
"0.59154975",
"0.58935124",
"0.5843884",
"0.57958406",
"0.5770665",
"0.5739061",
"0.56951934",
"0.5687086",
"0.5672048",
"0.5666419",
"0.56554663",
"0.56394726",
"0.563752",
"0.5588909",
"0.55872774",
"0.5571702",
"0.555686",
"0.5553663",
"0.5535326",
"0.5525987",
"0.5488485",
"0.54828733",
"0.5481193",
"0.5452498",
"0.5448989"
]
| 0.64465004 | 0 |
Testing M6 remeshing formula in 3D, 1 kernel, simple precision, o2_FullHalf splitting. | def test_3D_m6_1k_sFH():
scal, velo = setup_3D()
advec = Advection(velo, scal, discretization=d3d,
method={TimeIntegrator: RK2,
Interpolation: Linear,
Remesh: L4_2,
Support: 'gpu_1k',
Splitting: 'o2_FullHalf'}
)
advec_py = Advection(velo, scal, discretization=d3d,
method={TimeIntegrator: RK2,
Interpolation: Linear,
Remesh: L4_2,
Support: '',
Splitting: 'o2_FullHalf'}
)
assertion_3D_withPython(scal, velo, advec, advec_py) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_2_2_3D_cube_splits(self):\n check = [(0, 0, 0), (1, 1, 1), (1, 0, 0), (1, 1, 0), (1, 0, 1),\n (0, 1, 0),\n (0, 1, 1), (0, 0, 1), (0.5, 0.5, 0.5), (0.0, 0.5, 0.5),\n (0.0, 0.0, 0.5), (0.0, 0.5, 0.0), (0.5, 0.0, 0.5),\n (0.5, 0.0, 0.0),\n (0.5, 0.5, 0.0), (0.25, 0.25, 0.25), (1.0, 0.5, 0.5),\n (1.0, 1.0, 0.5),\n (1.0, 0.5, 1.0), (0.5, 1.0, 0.5), (0.5, 1.0, 1.0),\n (0.5, 0.5, 1.0),\n (0.75, 0.75, 0.75), (1.0, 0.0, 0.5), (1.0, 0.5, 0.0),\n (0.75, 0.25, 0.25), (0.5, 1.0, 0.0), (0.75, 0.75, 0.25),\n (0.5, 0.0, 1.0), (0.75, 0.25, 0.75), (0.0, 1.0, 0.5),\n (0.25, 0.75, 0.25), (0.0, 0.5, 1.0), (0.25, 0.75, 0.75),\n (0.25, 0.25, 0.75), (0.5, 0.25, 0.25), (0.5, 0.5, 0.25),\n (0.5, 0.25, 0.5), (0.25, 0.5, 0.25), (0.25, 0.5, 0.5),\n (0.25, 0.25, 0.5), (0.375, 0.375, 0.375), (0.0, 0.25, 0.25),\n (0.0, 0.0, 0.25), (0.0, 0.25, 0.0), (0.25, 0.0, 0.25),\n (0.25, 0.0, 0.0), (0.25, 0.25, 0.0), (0.125, 0.125, 0.125),\n (0.0, 0.5, 0.25), (0.0, 0.25, 0.5), (0.125, 0.375, 0.375),\n (0.25, 0.0, 0.5), (0.125, 0.125, 0.375), (0.25, 0.5, 0.0),\n (0.125, 0.375, 0.125), (0.5, 0.0, 0.25), (0.375, 0.125, 0.375),\n (0.5, 0.25, 0.0), (0.375, 0.125, 0.125), (0.375, 0.375, 0.125),\n (0.5, 0.75, 0.75), (0.5, 0.5, 0.75), (0.5, 0.75, 0.5),\n (0.75, 0.5, 0.75), (0.75, 0.5, 0.5), (0.75, 0.75, 0.5),\n (0.625, 0.625, 0.625), (1.0, 0.75, 0.75), (1.0, 1.0, 0.75),\n (1.0, 0.75, 1.0), (0.75, 1.0, 0.75), (0.75, 1.0, 1.0),\n (0.75, 0.75, 1.0), (0.875, 0.875, 0.875), (1.0, 0.5, 0.75),\n (1.0, 0.75, 0.5), (0.875, 0.625, 0.625), (0.75, 1.0, 0.5),\n (0.875, 0.875, 0.625), (0.75, 0.5, 1.0), (0.875, 0.625, 0.875),\n (0.5, 1.0, 0.75), (0.625, 0.875, 0.625), (0.5, 0.75, 1.0),\n (0.625, 0.875, 0.875), (0.625, 0.625, 0.875),\n (0.75, 0.5, 0.25),\n (0.75, 0.25, 0.5), (0.625, 0.375, 0.375), (1.0, 0.25, 0.25),\n (1.0, 0.0, 0.25), (1.0, 0.25, 0.0), (0.75, 0.0, 0.25),\n (0.75, 0.0, 0.0), (0.75, 0.25, 0.0), (0.875, 0.125, 0.125),\n (1.0, 0.5, 0.25), (1.0, 0.25, 0.5), (0.875, 0.375, 0.375),\n (0.75, 0.0, 0.5), (0.875, 0.125, 0.375), (0.75, 0.5, 0.0),\n (0.875, 0.375, 0.125), (0.625, 0.125, 0.375),\n (0.625, 0.125, 0.125),\n (0.625, 0.375, 0.125), (0.5, 0.75, 0.25),\n (0.625, 0.625, 0.375),\n (1.0, 0.75, 0.25), (1.0, 1.0, 0.25), (1.0, 0.75, 0.0),\n (0.75, 1.0, 0.25), (0.75, 1.0, 0.0), (0.75, 0.75, 0.0),\n (0.875, 0.875, 0.125), (0.875, 0.625, 0.375),\n (0.875, 0.875, 0.375),\n (0.875, 0.625, 0.125), (0.5, 1.0, 0.25), (0.625, 0.875, 0.375),\n (0.5, 0.75, 0.0), (0.625, 0.875, 0.125), (0.625, 0.625, 0.125),\n (0.5, 0.25, 0.75), (0.625, 0.375, 0.625), (1.0, 0.25, 0.75),\n (1.0, 0.0, 0.75), (1.0, 0.25, 1.0), (0.75, 0.0, 0.75),\n (0.75, 0.0, 1.0), (0.75, 0.25, 1.0), (0.875, 0.125, 0.875),\n (0.875, 0.375, 0.625), (0.875, 0.125, 0.625),\n (0.875, 0.375, 0.875),\n (0.5, 0.0, 0.75), (0.625, 0.125, 0.625), (0.5, 0.25, 1.0),\n (0.625, 0.125, 0.875), (0.625, 0.375, 0.875),\n (0.25, 0.75, 0.5),\n (0.375, 0.625, 0.375), (0.0, 0.75, 0.25), (0.0, 1.0, 0.25),\n (0.0, 0.75, 0.0), (0.25, 1.0, 0.25), (0.25, 1.0, 0.0),\n (0.25, 0.75, 0.0), (0.125, 0.875, 0.125), (0.0, 0.75, 0.5),\n (0.125, 0.625, 0.375), (0.25, 1.0, 0.5), (0.125, 0.875, 0.375),\n (0.125, 0.625, 0.125), (0.375, 0.875, 0.375),\n (0.375, 0.875, 0.125),\n (0.375, 0.625, 0.125), (0.25, 0.5, 0.75),\n (0.375, 0.625, 0.625),\n (0.0, 0.75, 0.75), (0.0, 1.0, 0.75), (0.0, 0.75, 1.0),\n (0.25, 1.0, 0.75), (0.25, 1.0, 1.0), (0.25, 0.75, 1.0),\n (0.125, 0.875, 0.875), (0.0, 0.5, 0.75), (0.125, 0.625, 0.625),\n (0.125, 0.875, 0.625), (0.25, 0.5, 1.0), (0.125, 0.625, 0.875),\n (0.375, 0.875, 0.625), (0.375, 0.875, 0.875),\n (0.375, 0.625, 0.875),\n (0.375, 0.375, 0.625), (0.0, 0.25, 0.75), (0.0, 0.0, 0.75),\n (0.0, 0.25, 1.0), (0.25, 0.0, 0.75), (0.25, 0.0, 1.0),\n (0.25, 0.25, 1.0), (0.125, 0.125, 0.875),\n (0.125, 0.375, 0.625),\n (0.125, 0.125, 0.625), (0.125, 0.375, 0.875),\n (0.375, 0.125, 0.625),\n (0.375, 0.125, 0.875), (0.375, 0.375, 0.875)]\n\n nn_checks = {(0.5, 0.25, 0.25): [(0.375, 0.375, 0.125), (0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25),\n (0.625, 0.375, 0.375),\n (0.625, 0.125, 0.375),\n (0.625, 0.125, 0.125),\n (0.5, 0.5, 0.25), (0.25, 0.25, 0.25),\n (0.375, 0.375, 0.375),\n (0.5, 0.25, 0.5), (0.5, 0.5, 0.5),\n (0.5, 0.0, 0.25),\n (0.375, 0.125, 0.375), (0.5, 0.0, 0.5),\n (0.5, 0.25, 0.0),\n (0.375, 0.125, 0.125), (0.5, 0.0, 0.0),\n (0.625, 0.375, 0.125)],\n (0.625, 0.625, 0.875): [(0.75, 0.5, 1.0),\n (0.75, 0.75, 1.0),\n (0.5, 0.75, 1.0), (0.5, 0.5, 1.0),\n (0.5, 0.5, 0.75),\n (0.5, 0.75, 0.75),\n (0.75, 0.5, 0.75),\n (0.75, 0.75, 0.75)],\n (0, 0, 0): [(0.0, 0.25, 0.0), (0.125, 0.125, 0.125),\n (0.0, 0.0, 0.25), (0.25, 0.0, 0.0),\n (0.0, 0.25, 0.25), (0.25, 0.25, 0.0),\n (0.25, 0.0, 0.25)]}\n\n init_triangulation(3, 2, check, nn_checks)",
"def test_3D_m6_2k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def test_3_2_4D_cube_splits(self):\n check = [(0, 0, 0, 0), (1, 1, 1, 1), (1, 0, 0, 0), (1, 1, 0, 0),\n (1, 1, 1, 0),\n (1, 1, 0, 1), (1, 0, 1, 0), (1, 0, 1, 1), (1, 0, 0, 1),\n (0, 1, 0, 0),\n (0, 1, 1, 0), (0, 1, 1, 1), (0, 1, 0, 1), (0, 0, 1, 0),\n (0, 0, 1, 1),\n (0, 0, 0, 1), (0.5, 0.5, 0.5, 0.5), (0.0, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.5, 0.5), (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.5), (0.0, 0.5, 0.0, 0.0),\n (0.0, 0.5, 0.5, 0.0),\n (0.5, 0.0, 0.5, 0.5), (0.5, 0.0, 0.0, 0.5),\n (0.5, 0.0, 0.0, 0.0),\n (0.5, 0.0, 0.5, 0.0), (0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.0, 0.0),\n (0.5, 0.5, 0.5, 0.0), (0.25, 0.25, 0.25, 0.25),\n (1.0, 0.5, 0.5, 0.5),\n (1.0, 1.0, 0.5, 0.5), (1.0, 1.0, 1.0, 0.5),\n (1.0, 1.0, 0.5, 1.0),\n (1.0, 0.5, 1.0, 0.5), (1.0, 0.5, 1.0, 1.0),\n (1.0, 0.5, 0.5, 1.0),\n (0.5, 1.0, 0.5, 0.5), (0.5, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 1.0),\n (0.5, 1.0, 0.5, 1.0), (0.5, 0.5, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0),\n (0.5, 0.5, 0.5, 1.0), (0.75, 0.75, 0.75, 0.75),\n (1.0, 0.0, 0.5, 0.5),\n (1.0, 0.0, 0.0, 0.5), (1.0, 0.0, 0.5, 0.0),\n (1.0, 0.5, 0.0, 0.5),\n (1.0, 0.5, 0.0, 0.0), (1.0, 0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25, 0.25),\n (1.0, 1.0, 0.0, 0.5), (1.0, 1.0, 0.5, 0.0),\n (0.5, 1.0, 0.0, 0.5),\n (0.5, 1.0, 0.0, 0.0), (0.5, 1.0, 0.5, 0.0),\n (0.75, 0.75, 0.25, 0.25),\n (1.0, 0.5, 1.0, 0.0), (0.5, 1.0, 1.0, 0.0),\n (0.5, 0.5, 1.0, 0.0),\n (0.75, 0.75, 0.75, 0.25), (1.0, 0.5, 0.0, 1.0),\n (0.5, 1.0, 0.0, 1.0),\n (0.5, 0.5, 0.0, 1.0), (0.75, 0.75, 0.25, 0.75),\n (1.0, 0.0, 1.0, 0.5),\n (0.5, 0.0, 1.0, 0.5), (0.5, 0.0, 1.0, 0.0),\n (0.75, 0.25, 0.75, 0.25),\n (1.0, 0.0, 0.5, 1.0), (0.5, 0.0, 1.0, 1.0),\n (0.5, 0.0, 0.5, 1.0),\n (0.75, 0.25, 0.75, 0.75), (0.5, 0.0, 0.0, 1.0),\n (0.75, 0.25, 0.25, 0.75), (0.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 0.0, 0.5),\n (0.0, 1.0, 0.5, 0.0), (0.25, 0.75, 0.25, 0.25),\n (0.0, 1.0, 1.0, 0.5),\n (0.0, 0.5, 1.0, 0.5), (0.0, 0.5, 1.0, 0.0),\n (0.25, 0.75, 0.75, 0.25),\n (0.0, 1.0, 0.5, 1.0), (0.0, 0.5, 1.0, 1.0),\n (0.0, 0.5, 0.5, 1.0),\n (0.25, 0.75, 0.75, 0.75), (0.0, 0.5, 0.0, 1.0),\n (0.25, 0.75, 0.25, 0.75), (0.0, 0.0, 1.0, 0.5),\n (0.25, 0.25, 0.75, 0.25), (0.0, 0.0, 0.5, 1.0),\n (0.25, 0.25, 0.75, 0.75), (0.25, 0.25, 0.25, 0.75)]\n\n nn_checks = {(0, 0, 0, 0): [(0.0, 0.5, 0.5, 0.0), (0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.5),\n (0.25, 0.25, 0.25, 0.25),\n (0.5, 0.0, 0.0, 0.5), (0.5, 0.0, 0.5, 0.0),\n (0.5, 0.5, 0.0, 0.0), (0.0, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.0), (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.5, 0.5, 0.5), (0.0, 0.0, 0.5, 0.5),\n (0.0, 0.5, 0.0, 0.5), (0.5, 0.0, 0.0, 0.0),\n (0.5, 0.5, 0.5, 0.0)],\n (1.0, 1.0, 0.5, 0.5): [(1.0, 1.0, 0.5, 1.0), (1, 1, 0, 1),\n (1.0, 1.0, 1.0, 0.5),\n (1.0, 0.5, 0.5, 0.5), (1, 1, 1, 0),\n (1.0, 1.0, 0.5, 0.0),\n (1.0, 1.0, 0.0, 0.5), (1, 1, 0, 0),\n (1, 1, 1, 1), (0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5),\n (0.75, 0.75, 0.75, 0.75),\n (0.75, 0.75, 0.25, 0.25),\n (0.75, 0.75, 0.75, 0.25),\n (0.75, 0.75, 0.25, 0.75)],\n (0.25, 0.25, 0.25, 0.75): [(0.0, 0.0, 0.5, 1.0),\n (0.0, 0.5, 0.0, 1.0),\n (0.5, 0.0, 0.0, 1.0),\n (0.5, 0.5, 0.5, 1.0),\n (0, 0, 0, 1),\n (0.0, 0.5, 0.5, 1.0),\n (0.5, 0.0, 0.5, 1.0),\n (0.5, 0.5, 0.0, 1.0),\n (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.0, 0.5),\n (0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.5, 0.5),\n (0.5, 0.0, 0.0, 0.5)]}\n\n init_triangulation(4, 1, check, nn_checks)",
"def test_multiple_case(self):\r\n\r\n shp = (3, 3)\r\n fx, fy, fz, fw = fmatrices('xyzw')\r\n dx, dy, dz, dw = dmatrices('xyzw')\r\n fv = fvector('r').dimshuffle('x', 0)\r\n dv = dvector('s').dimshuffle('x', 0)\r\n fxv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fyv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fzv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fwv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fvv = theano._asarray(numpy.random.rand(shp[0]), dtype='float32').reshape(1, shp[0])\r\n dxv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dyv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dzv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dwv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dvv = theano._asarray(numpy.random.rand(shp[0]), dtype='float64').reshape(1, shp[0])\r\n\r\n #We must be sure that the Canonizer is working, but that we don't have other\r\n # optimisation that could hide bug in the Canonizer as local_elemwise_fusion\r\n mode = compile.mode.get_default_mode()\r\n old_optimizer = mode._optimizer\r\n try:\r\n mode._optimizer = gof.Query([\"canonicalize\"])\r\n mode._optimizer = mode._optimizer.including('ShapeOpt')\r\n mode._optimizer = mode._optimizer.excluding(\r\n 'local_elemwise_fusion')\r\n\r\n #test x / x -> 1\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([(fx/fx,[fx],[fxv],'float32'),\r\n (dx/dx,[dx],[dxv],'float64'),\r\n (fv/fv,[fv],[fvv],'float32'),\r\n (dv/dv,[dv],[dvv],'float64'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert (out == numpy.ones(shp, dtype=out_dtype)).all()\r\n topo = f.maker.fgraph.toposort()\r\n if sym_inputs[0].broadcastable[0]:\r\n assert len(topo) == 2\r\n assert isinstance(topo[0].op, Shape_i)\r\n assert isinstance(topo[1].op, tensor.Alloc)\r\n else:\r\n assert len(topo) == 3\r\n assert isinstance(topo[0].op, Shape_i)\r\n assert isinstance(topo[1].op, Shape_i)\r\n assert isinstance(topo[2].op, tensor.Alloc)\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (x * y) / x -> y\r\n for id,(g, sym_inputs, val_inputs, nb_elemwise, out_dtype) in enumerate([\r\n ((dx*dy)/dx,[dx,dy],[dxv,dyv],0,'float64'),\r\n ((fx*fy)/fx,[fx,fy],[fxv,fyv],0,'float32'),\r\n ((dv*dy)/dv,[dv,dy],[dvv,dyv],0,'float64'),\r\n ((fv*fy)/fv,[fv,fy],[fvv,fyv],0,'float32'),\r\n #must broadcast as their is a dimshuffle in the computation\r\n ((dx*dv)/dx,[dx,dv],[dxv,dvv],1,'float64'),\r\n #topo: [Elemwise{second,no_inplace}(x, <TensorType(float64, row)>)]\r\n ((fx*fv)/fx,[fx,fv],[fxv,fvv],1,'float32')\r\n #topo: [Elemwise{second,no_inplace}(x, <TensorType(float32, row)>)]\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert(out_dtype == out.dtype)\r\n assert numpy.allclose(out, val_inputs[1])\r\n topo = f.maker.fgraph.toposort()\r\n print \"ID TOPO\", id, topo, sym_inputs\r\n for r, t in f.maker.fgraph.shape_feature.shape_of.items():\r\n print ' ', r, t\r\n if topo and not(len(topo)==1 and topo[0].op==deep_copy_op):\r\n for node in topo[:-1]:\r\n assert isinstance(node.op, Shape_i)\r\n assert isinstance(topo[-1].op, tensor.Alloc)\r\n\r\n #test x / y / x -> 1 / y\r\n for id,(g, sym_inputs, val_inputs, nb_elemwise, out_dtype) in enumerate([\r\n ((dx/dy)/dx,[dx,dy],[dxv,dyv],1,'float64'),\r\n ((fx/fy)/fx,[fx,fy],[fxv,fyv],1,'float32'),\r\n ((dv/dy)/dv,[dv,dy],[dvv,dyv],1,'float64'),\r\n ((fv/fy)/fv,[fv,fy],[fvv,fyv],1,'float32'),\r\n #must broadcast as their is a dimshuffle in the computation\r\n\r\n ((dx/dv)/dx,[dx,dv],[dxv,dvv],1,'float64'),\r\n #topo: [Shape_i, Shape_i, Elemwise{inv,no_inplace}(<TensorType(float64, row)>), Alloc]\r\n ((fx/fv)/fx,[fx,fv],[fxv,fvv],1,'float32'),\r\n #topo:[Shape_i, Shape_i, Elemwise{inv,no_inplace}(<TensorType(float32, row)>), Alloc]\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (1 / val_inputs[1]))\r\n topo = f.maker.fgraph.toposort()\r\n print topo\r\n elem = [t for t in topo if isinstance(t.op, T.Elemwise)]\r\n assert len(elem) == nb_elemwise\r\n assert isinstance(elem[0].op, (T.Elemwise, ))\r\n assert isinstance(elem[0].op.scalar_op, (\r\n theano.scalar.basic.Inv, theano.scalar.basic.TrueDiv))\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (a / b) * (b / c) * (c / d) -> a / d\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((dx / dy) * (dy / dz) * (dz / dw),[dx,dy,dz,dw],[dxv,dyv,dzv,dwv],'float64'),\r\n ((fx / fy) * (fy / fz) * (fz / fw),[fx,fy,fz,fw],[fxv,fyv,fzv,fwv],'float32'),\r\n ((dv / dy) * (dy / dz) * (dz / dw),[dv,dy,dz,dw],[dvv,dyv,dzv,dwv],'float64'),\r\n ((fv / fy) * (fy / fz) * (fz / fw),[fv,fy,fz,fw],[fvv,fyv,fzv,fwv],'float32'),\r\n ((dx / dv) * (dv / dz) * (dz / dw),[dx,dv,dz,dw],[dxv,dvv,dzv,dwv],'float64'),\r\n ((fx / fv) * (fv / fz) * (fz / fw),[fx,fv,fz,fw],[fxv,fvv,fzv,fwv],'float32'),\r\n ((dx / dy) * (dy / dv) * (dv / dw),[dx,dy,dv,dw],[dxv,dyv,dvv,dwv],'float64'),\r\n ((fx / fy) * (fy / fv) * (fv / fw),[fx,fy,fv,fw],[fxv,fyv,fvv,fwv],'float32'),\r\n ((dx / dy) * (dy / dz) * (dz / dv),[dx,dy,dz,dv],[dxv,dyv,dzv,dvv],'float64'),\r\n ((fx / fy) * (fy / fz) * (fz / fv),[fx,fy,fz,fv],[fxv,fyv,fzv,fvv],'float32'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (val_inputs[0] / val_inputs[3]))\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 1\r\n assert isinstance(topo[0].op, (T.Elemwise, ))\r\n assert isinstance(topo[0].op.scalar_op,\r\n theano.scalar.basic.TrueDiv)\r\n assert len(topo[0].inputs) == 2\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (2.0 * x) / (4.0 * y) -> (0.5 * x) / y\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n (((2.0*dx)/(4.0*dy)),[dx,dy],[dxv,dyv],'float64'),\r\n (((2.0*fx)/(4.0*fy)),[fx,fy],[fxv,fyv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n (((2.0*dv)/(4.0*dy)),[dv,dy],[dvv,dyv],'float64'),\r\n (((2.0*fv)/(4.0*fy)),[fv,fy],[fvv,fyv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n (((2.0*dx)/(4.0*dv)),[dx,dv],[dxv,dvv],'float64'),\r\n (((2.0*fx)/(4.0*fv)),[fx,fv],[fxv,fvv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (0.5 *\r\n val_inputs[0] / val_inputs[1]))\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 2\r\n assert isinstance(topo[0].op, (T.Elemwise, ))\r\n assert isinstance(topo[0].op.scalar_op,\r\n theano.scalar.basic.Mul)\r\n assert len(topo[0].inputs) == 2\r\n assert isinstance(topo[1].op, (T.Elemwise, ))\r\n assert isinstance(topo[1].op.scalar_op,\r\n theano.scalar.basic.TrueDiv)\r\n assert len(topo[1].inputs) == 2\r\n assert(out_dtype == out.dtype)\r\n\r\n #test 2 * x / 2 -> x\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((2*dx)/2,[dx],[dxv],'float64'),\r\n ((2*fx)/2,[fx],[fxv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2*dv)/2,[dv],[dvv],'float64'),\r\n ((2*fv)/2,[fv],[fvv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, val_inputs[0])\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 1\r\n topo[0].op == deep_copy_op\r\n assert(out_dtype == out.dtype)\r\n\r\n #test x / abs(x) -> sign(x)\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n (dx/abs(dx),[dx],[0.5-dxv],'float64'),\r\n (fx/abs(fx),[fx],[0.5-fxv], 'float32'),\r\n (dx/abs(dx),[dx],[0.1*dxv],'float64'),\r\n (fx/abs(fx),[fx],[0.1*fxv], 'float32'),\r\n (dv/abs(dv),[dv],[0.5-dvv],'float64'),\r\n (fv/abs(fv),[fv],[0.5-fvv], 'float32'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.all(numpy.isfinite(out))\r\n assert numpy.allclose(out, numpy.sign(val_inputs[0]))\r\n assert(out_dtype == out.dtype)\r\n assert len(f.maker.fgraph.toposort()) == 1\r\n\r\n #test (2*x) / (3*abs(x)) -> sign(x)\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((2 * dx) / (3 * abs(dx)), [dx], [0.5 - dxv], 'float64'),\r\n ((2 * fx) / (3 * abs(fx)), [fx], [0.5 - fxv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2 * dx) / (3 * abs(dx)), [dx], [0.1 * dxv], 'float64'),\r\n ((2 * fx) / (3 * abs(fx)), [fx], [0.1 * fxv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2 * dv) / (3 * abs(dv)), [dv], [0.5 - dvv], 'float64'),\r\n ((2 * fv) / (3 * abs(fv)), [fv], [0.5 - fvv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n topo = f.maker.fgraph.toposort()\r\n out = f(*val_inputs)\r\n assert numpy.all(numpy.isfinite(out))\r\n assert numpy.allclose(out, numpy.sign(val_inputs[0]) * 2 / 3)\r\n assert(out_dtype == out.dtype)\r\n finally:\r\n mode._optimizer = old_optimizer",
"def test_1_2_2D_cube_splits(self):\n check = [(0, 0), (1, 1), (1, 0), (0, 1), (0.5, 0.5), (0.0, 0.5),\n (0.5, 0.0),\n (0.25, 0.25), (1.0, 0.5), (0.5, 1.0), (0.75, 0.75),\n (0.75, 0.25),\n (0.25, 0.75), (0.5, 0.25), (0.25, 0.5), (0.375, 0.375),\n (0.0, 0.25),\n (0.25, 0.0), (0.125, 0.125), (0.125, 0.375), (0.375, 0.125),\n (0.5, 0.75), (0.75, 0.5), (0.625, 0.625), (1.0, 0.75),\n (0.75, 1.0),\n (0.875, 0.875), (0.875, 0.625), (0.625, 0.875), (0.625, 0.375),\n (1.0, 0.25), (0.75, 0.0), (0.875, 0.125), (0.875, 0.375),\n (0.625, 0.125), (0.375, 0.625), (0.0, 0.75), (0.25, 1.0),\n (0.125, 0.875), (0.125, 0.625), (0.375, 0.875)]\n\n nn_checks = {(0, 0): [(0.25, 0.0), (0.0, 0.25), (0.125, 0.125)],\n (0.625, 0.375): [(0.5, 0.5), (0.75, 0.25), (0.75, 0.5),\n (0.5, 0.25)],\n (0, 1): [(0.25, 1.0), (0.125, 0.875),(0.0, 0.75)],\n (0.625, 0.125): [(0.5, 0.0), (0.75, 0.25), (0.75, 0.0),\n (0.5, 0.25)]}\n\n\n init_triangulation(2, 2, check, nn_checks)",
"def test_MeshMat_1group(self):\n\n MS_grp = self.meshsol.get_group(\"stator\")\n cells_grp, nb_cell, indices = MS_grp.get_mesh().get_cell()\n solution = np.array([[0, 1, 2], [1, 2, 3]])\n result_tgl = cells_grp[\"triangle\"]\n testA = np.sum(abs(solution - result_tgl))\n msg = (\n \"Wrong output: returned \" + str(result_tgl) + \", expected: \" + str(solution)\n )\n self.assertAlmostEqual(testA, 0, msg=msg, delta=self.DELTA)\n\n MS_grp = self.meshsol.get_group(\"rotor\")\n cells_grp, nb_cell, indices = MS_grp.get_mesh().get_cell()\n solution = np.array([[3, 3], [1, 2], [2, 3]])\n results = cells_grp[\"triangle\"] # The point indices have changed !\n points = MS_grp.get_mesh().get_point(results)\n testA = np.sum(abs(solution - points))\n msg = \"Wrong output: returned \" + str(results) + \", expected: \" + str(solution)\n self.assertAlmostEqual(testA, 0, msg=msg, delta=self.DELTA)",
"def test_2_2_3D_rec_splits(self):\n check = [(-3.0, -2.0, 0.0), (4.0, 10.0, 1.0), (4.0, -2.0, 0.0),\n (4.0, 10.0, 0.0), (4.0, -2.0, 1.0), (-3.0, 10.0, 0.0),\n (-3.0, 10.0, 1.0), (-3.0, -2.0, 1.0), (0.5, 4.0, 0.5),\n (-3.0, 4.0, 0.5), (-3.0, -2.0, 0.5), (-3.0, 4.0, 0.0),\n (0.5, -2.0, 0.5), (0.5, -2.0, 0.0), (0.5, 4.0, 0.0),\n (-1.25, 1.0, 0.25), (4.0, 4.0, 0.5), (4.0, 10.0, 0.5),\n (4.0, 4.0, 1.0), (0.5, 10.0, 0.5), (0.5, 10.0, 1.0),\n (0.5, 4.0, 1.0), (2.25, 7.0, 0.75), (4.0, -2.0, 0.5),\n (4.0, 4.0, 0.0), (2.25, 1.0, 0.25), (0.5, 10.0, 0.0),\n (2.25, 7.0, 0.25), (0.5, -2.0, 1.0), (2.25, 1.0, 0.75),\n (-3.0, 10.0, 0.5), (-1.25, 7.0, 0.25), (-3.0, 4.0, 1.0),\n (-1.25, 7.0, 0.75), (-1.25, 1.0, 0.75), (0.5, 1.0, 0.25),\n (0.5, 4.0, 0.25), (0.5, 1.0, 0.5), (-1.25, 4.0, 0.25),\n (-1.25, 4.0, 0.5), (-1.25, 1.0, 0.5), (-0.375, 2.5, 0.375),\n (-3.0, 1.0, 0.25), (-3.0, -2.0, 0.25), (-3.0, 1.0, 0.0),\n (-1.25, -2.0, 0.25), (-1.25, -2.0, 0.0), (-1.25, 1.0, 0.0),\n (-2.125, -0.5, 0.125), (-3.0, 4.0, 0.25), (-3.0, 1.0, 0.5),\n (-2.125, 2.5, 0.375), (-1.25, -2.0, 0.5),\n (-2.125, -0.5, 0.375), (-1.25, 4.0, 0.0), (-2.125, 2.5, 0.125),\n (0.5, -2.0, 0.25), (-0.375, -0.5, 0.375), (0.5, 1.0, 0.0),\n (-0.375, -0.5, 0.125), (-0.375, 2.5, 0.125), (0.5, 7.0, 0.75),\n (0.5, 4.0, 0.75), (0.5, 7.0, 0.5), (2.25, 4.0, 0.75),\n (2.25, 4.0, 0.5), (2.25, 7.0, 0.5), (1.375, 5.5, 0.625),\n (4.0, 7.0, 0.75), (4.0, 10.0, 0.75), (4.0, 7.0, 1.0),\n (2.25, 10.0, 0.75), (2.25, 10.0, 1.0), (2.25, 7.0, 1.0),\n (3.125, 8.5, 0.875), (4.0, 4.0, 0.75), (4.0, 7.0, 0.5),\n (3.125, 5.5, 0.625), (2.25, 10.0, 0.5), (3.125, 8.5, 0.625),\n (2.25, 4.0, 1.0), (3.125, 5.5, 0.875), (0.5, 10.0, 0.75),\n (1.375, 8.5, 0.625), (0.5, 7.0, 1.0), (1.375, 8.5, 0.875),\n (1.375, 5.5, 0.875), (2.25, 4.0, 0.25), (2.25, 1.0, 0.5),\n (1.375, 2.5, 0.375), (4.0, 1.0, 0.25), (4.0, -2.0, 0.25),\n (4.0, 1.0, 0.0), (2.25, -2.0, 0.25), (2.25, -2.0, 0.0),\n (2.25, 1.0, 0.0), (3.125, -0.5, 0.125), (4.0, 4.0, 0.25),\n (4.0, 1.0, 0.5), (3.125, 2.5, 0.375), (2.25, -2.0, 0.5),\n (3.125, -0.5, 0.375), (2.25, 4.0, 0.0), (3.125, 2.5, 0.125),\n (1.375, -0.5, 0.375), (1.375, -0.5, 0.125),\n (1.375, 2.5, 0.125), (0.5, 7.0, 0.25), (1.375, 5.5, 0.375),\n (4.0, 7.0, 0.25), (4.0, 10.0, 0.25), (4.0, 7.0, 0.0),\n (2.25, 10.0, 0.25), (2.25, 10.0, 0.0), (2.25, 7.0, 0.0),\n (3.125, 8.5, 0.125), (3.125, 5.5, 0.375), (3.125, 8.5, 0.375),\n (3.125, 5.5, 0.125), (0.5, 10.0, 0.25), (1.375, 8.5, 0.375),\n (0.5, 7.0, 0.0), (1.375, 8.5, 0.125), (1.375, 5.5, 0.125),\n (0.5, 1.0, 0.75), (1.375, 2.5, 0.625), (4.0, 1.0, 0.75),\n (4.0, -2.0, 0.75), (4.0, 1.0, 1.0), (2.25, -2.0, 0.75),\n (2.25, -2.0, 1.0), (2.25, 1.0, 1.0), (3.125, -0.5, 0.875),\n (3.125, 2.5, 0.625), (3.125, -0.5, 0.625), (3.125, 2.5, 0.875),\n (0.5, -2.0, 0.75), (1.375, -0.5, 0.625), (0.5, 1.0, 1.0),\n (1.375, -0.5, 0.875), (1.375, 2.5, 0.875), (-1.25, 7.0, 0.5),\n (-0.375, 5.5, 0.375), (-3.0, 7.0, 0.25), (-3.0, 10.0, 0.25),\n (-3.0, 7.0, 0.0), (-1.25, 10.0, 0.25), (-1.25, 10.0, 0.0),\n (-1.25, 7.0, 0.0), (-2.125, 8.5, 0.125), (-3.0, 7.0, 0.5),\n (-2.125, 5.5, 0.375), (-1.25, 10.0, 0.5), (-2.125, 8.5, 0.375),\n (-2.125, 5.5, 0.125), (-0.375, 8.5, 0.375),\n (-0.375, 8.5, 0.125), (-0.375, 5.5, 0.125), (-1.25, 4.0, 0.75),\n (-0.375, 5.5, 0.625), (-3.0, 7.0, 0.75), (-3.0, 10.0, 0.75),\n (-3.0, 7.0, 1.0), (-1.25, 10.0, 0.75), (-1.25, 10.0, 1.0),\n (-1.25, 7.0, 1.0), (-2.125, 8.5, 0.875), (-3.0, 4.0, 0.75),\n (-2.125, 5.5, 0.625), (-2.125, 8.5, 0.625), (-1.25, 4.0, 1.0),\n (-2.125, 5.5, 0.875), (-0.375, 8.5, 0.625),\n (-0.375, 8.5, 0.875), (-0.375, 5.5, 0.875),\n (-0.375, 2.5, 0.625), (-3.0, 1.0, 0.75), (-3.0, -2.0, 0.75),\n (-3.0, 1.0, 1.0), (-1.25, -2.0, 0.75), (-1.25, -2.0, 1.0),\n (-1.25, 1.0, 1.0), (-2.125, -0.5, 0.875), (-2.125, 2.5, 0.625),\n (-2.125, -0.5, 0.625), (-2.125, 2.5, 0.875),\n (-0.375, -0.5, 0.625), (-0.375, -0.5, 0.875),\n (-0.375, 2.5, 0.875)]\n nn_checks = {(2.25, 7.0, 0.75): [(4.0, 7.0, 0.75), (2.25, 7.0, 1.0),\n (4.0, 7.0, 0.5), (4.0, 7.0, 1.0),\n (4.0, 4.0, 0.75), (1.375, 5.5, 0.875),\n (2.25, 4.0, 1.0), (2.25, 4.0, 0.5),\n (2.25, 4.0, 0.75), (3.125, 8.5, 0.875),\n (3.125, 8.5, 0.625), (4.0, 10.0, 0.75),\n (2.25, 10.0, 1.0), (2.25, 10.0, 0.75),\n (2.25, 10.0, 0.5), (1.375, 8.5, 0.625),\n (1.375, 8.5, 0.875), (0.5, 7.0, 0.75),\n (0.5, 7.0, 0.5), (3.125, 5.5, 0.625),\n (3.125, 5.5, 0.875), (0.5, 10.0, 0.75),\n (0.5, 7.0, 1.0), (0.5, 4.0, 0.75),\n (2.25, 7.0, 0.5), (1.375, 5.5, 0.625)],\n (4.0, -2.0, 0.5): [(4.0, -2.0, 0.75), (4.0, -2.0, 0.25),\n (2.25, 1.0, 0.5), (2.25, -2.0, 0.75),\n (2.25, -2.0, 0.5), (2.25, -2.0, 0.25),\n (4.0, 1.0, 0.25), (4.0, 1.0, 0.75),\n (4.0, 1.0, 0.5), (3.125, -0.5, 0.375),\n (3.125, -0.5, 0.625)],\n (-2.125, -0.5, 0.875): [(-1.25, 1.0, 1.0),\n (-1.25, 1.0, 0.75),\n (-1.25, -2.0, 0.75),\n (-1.25, -2.0, 1.0),\n (-3.0, -2.0, 0.75),\n (-3.0, 1.0, 1.0), (-3, -2, 1),\n (-3.0, 1.0, 0.75)]}\n\n init_triangulation(3, 2, check, nn_checks, bounds=[(-3, 4), (-2, 10), (0, 1)])",
"def test_4_2_5D_cube_splits(self):\n check = [(0, 0, 0, 0, 0), (1, 1, 1, 1, 1), (1, 0, 0, 0, 0),\n (1, 1, 0, 0, 0), (1, 1, 1, 0, 0), (1, 1, 1, 1, 0),\n (1, 1, 1, 0, 1), (1, 1, 0, 1, 0), (1, 1, 0, 1, 1),\n (1, 1, 0, 0, 1), (1, 0, 1, 0, 0), (1, 0, 1, 1, 0),\n (1, 0, 1, 1, 1), (1, 0, 1, 0, 1), (1, 0, 0, 1, 0),\n (1, 0, 0, 1, 1), (1, 0, 0, 0, 1), (0, 1, 0, 0, 0),\n (0, 1, 1, 0, 0), (0, 1, 1, 1, 0), (0, 1, 1, 1, 1),\n (0, 1, 1, 0, 1), (0, 1, 0, 1, 0), (0, 1, 0, 1, 1),\n (0, 1, 0, 0, 1), (0, 0, 1, 0, 0), (0, 0, 1, 1, 0),\n (0, 0, 1, 1, 1), (0, 0, 1, 0, 1), (0, 0, 0, 1, 0),\n (0, 0, 0, 1, 1), (0, 0, 0, 0, 1), (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5, 0.5), (0.0, 0.0, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.0, 0.5, 0.5), (0.0, 0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.0, 0.5, 0.0), (0.0, 0.0, 0.5, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.0, 0.0), (0.0, 0.0, 0.5, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.5, 0.5), (0.0, 0.5, 0.0, 0.0, 0.5),\n (0.0, 0.5, 0.0, 0.0, 0.0), (0.0, 0.5, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.5, 0.0, 0.5), (0.0, 0.5, 0.5, 0.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.0), (0.5, 0.0, 0.5, 0.5, 0.5),\n (0.5, 0.0, 0.0, 0.5, 0.5), (0.5, 0.0, 0.0, 0.0, 0.5),\n (0.5, 0.0, 0.0, 0.0, 0.0), (0.5, 0.0, 0.0, 0.5, 0.0),\n (0.5, 0.0, 0.5, 0.0, 0.5), (0.5, 0.0, 0.5, 0.0, 0.0),\n (0.5, 0.0, 0.5, 0.5, 0.0), (0.5, 0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.0, 0.5), (0.5, 0.5, 0.0, 0.0, 0.0),\n (0.5, 0.5, 0.0, 0.5, 0.0), (0.5, 0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.5, 0.0, 0.0), (0.5, 0.5, 0.5, 0.5, 0.0),\n (0.25, 0.25, 0.25, 0.25, 0.25), (1.0, 0.5, 0.5, 0.5, 0.5),\n (1.0, 1.0, 0.5, 0.5, 0.5), (1.0, 1.0, 1.0, 0.5, 0.5),\n (1.0, 1.0, 1.0, 1.0, 0.5), (1.0, 1.0, 1.0, 0.5, 1.0),\n (1.0, 1.0, 0.5, 1.0, 0.5), (1.0, 1.0, 0.5, 1.0, 1.0),\n (1.0, 1.0, 0.5, 0.5, 1.0), (1.0, 0.5, 1.0, 0.5, 0.5),\n (1.0, 0.5, 1.0, 1.0, 0.5), (1.0, 0.5, 1.0, 1.0, 1.0),\n (1.0, 0.5, 1.0, 0.5, 1.0), (1.0, 0.5, 0.5, 1.0, 0.5),\n (1.0, 0.5, 0.5, 1.0, 1.0), (1.0, 0.5, 0.5, 0.5, 1.0),\n (0.5, 1.0, 0.5, 0.5, 0.5), (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5), (0.5, 1.0, 1.0, 1.0, 1.0),\n (0.5, 1.0, 1.0, 0.5, 1.0), (0.5, 1.0, 0.5, 1.0, 0.5),\n (0.5, 1.0, 0.5, 1.0, 1.0), (0.5, 1.0, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 0.5), (0.5, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 1.0), (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.5, 0.5, 1.0, 0.5), (0.5, 0.5, 0.5, 1.0, 1.0),\n (0.5, 0.5, 0.5, 0.5, 1.0), (0.75, 0.75, 0.75, 0.75, 0.75),\n (1.0, 0.0, 0.5, 0.5, 0.5), (1.0, 0.0, 0.0, 0.5, 0.5),\n (1.0, 0.0, 0.0, 0.0, 0.5), (1.0, 0.0, 0.0, 0.5, 0.0),\n (1.0, 0.0, 0.5, 0.0, 0.5), (1.0, 0.0, 0.5, 0.0, 0.0),\n (1.0, 0.0, 0.5, 0.5, 0.0), (1.0, 0.5, 0.0, 0.5, 0.5),\n (1.0, 0.5, 0.0, 0.0, 0.5), (1.0, 0.5, 0.0, 0.0, 0.0),\n (1.0, 0.5, 0.0, 0.5, 0.0), (1.0, 0.5, 0.5, 0.0, 0.5),\n (1.0, 0.5, 0.5, 0.0, 0.0), (1.0, 0.5, 0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25, 0.25, 0.25), (1.0, 1.0, 0.0, 0.5, 0.5),\n (1.0, 1.0, 0.0, 0.0, 0.5), (1.0, 1.0, 0.0, 0.5, 0.0),\n (1.0, 1.0, 0.5, 0.0, 0.5), (1.0, 1.0, 0.5, 0.0, 0.0),\n (1.0, 1.0, 0.5, 0.5, 0.0), (0.5, 1.0, 0.0, 0.5, 0.5),\n (0.5, 1.0, 0.0, 0.0, 0.5), (0.5, 1.0, 0.0, 0.0, 0.0),\n (0.5, 1.0, 0.0, 0.5, 0.0), (0.5, 1.0, 0.5, 0.0, 0.5),\n (0.5, 1.0, 0.5, 0.0, 0.0), (0.5, 1.0, 0.5, 0.5, 0.0),\n (0.75, 0.75, 0.25, 0.25, 0.25), (1.0, 1.0, 1.0, 0.0, 0.5),\n (1.0, 1.0, 1.0, 0.5, 0.0), (1.0, 0.5, 1.0, 0.0, 0.5),\n (1.0, 0.5, 1.0, 0.0, 0.0), (1.0, 0.5, 1.0, 0.5, 0.0),\n (0.5, 1.0, 1.0, 0.0, 0.5), (0.5, 1.0, 1.0, 0.0, 0.0),\n (0.5, 1.0, 1.0, 0.5, 0.0), (0.5, 0.5, 1.0, 0.0, 0.5),\n (0.5, 0.5, 1.0, 0.0, 0.0), (0.5, 0.5, 1.0, 0.5, 0.0),\n (0.75, 0.75, 0.75, 0.25, 0.25), (1.0, 1.0, 0.5, 1.0, 0.0),\n (1.0, 0.5, 1.0, 1.0, 0.0), (1.0, 0.5, 0.5, 1.0, 0.0),\n (0.5, 1.0, 1.0, 1.0, 0.0), (0.5, 1.0, 0.5, 1.0, 0.0),\n (0.5, 0.5, 1.0, 1.0, 0.0), (0.5, 0.5, 0.5, 1.0, 0.0),\n (0.75, 0.75, 0.75, 0.75, 0.25), (1.0, 1.0, 0.5, 0.0, 1.0),\n (1.0, 0.5, 1.0, 0.0, 1.0), (1.0, 0.5, 0.5, 0.0, 1.0),\n (0.5, 1.0, 1.0, 0.0, 1.0), (0.5, 1.0, 0.5, 0.0, 1.0),\n (0.5, 0.5, 1.0, 0.0, 1.0), (0.5, 0.5, 0.5, 0.0, 1.0),\n (0.75, 0.75, 0.75, 0.25, 0.75), (1.0, 1.0, 0.0, 1.0, 0.5),\n (1.0, 0.5, 0.0, 1.0, 0.5), (1.0, 0.5, 0.0, 1.0, 0.0),\n (0.5, 1.0, 0.0, 1.0, 0.5), (0.5, 1.0, 0.0, 1.0, 0.0),\n (0.5, 0.5, 0.0, 1.0, 0.5), (0.5, 0.5, 0.0, 1.0, 0.0),\n (0.75, 0.75, 0.25, 0.75, 0.25), (1.0, 1.0, 0.0, 0.5, 1.0),\n (1.0, 0.5, 0.0, 1.0, 1.0), (1.0, 0.5, 0.0, 0.5, 1.0),\n (0.5, 1.0, 0.0, 1.0, 1.0), (0.5, 1.0, 0.0, 0.5, 1.0),\n (0.5, 0.5, 0.0, 1.0, 1.0), (0.5, 0.5, 0.0, 0.5, 1.0),\n (0.75, 0.75, 0.25, 0.75, 0.75), (1.0, 0.5, 0.0, 0.0, 1.0),\n (0.5, 1.0, 0.0, 0.0, 1.0), (0.5, 0.5, 0.0, 0.0, 1.0),\n (0.75, 0.75, 0.25, 0.25, 0.75), (1.0, 0.0, 1.0, 0.5, 0.5),\n (1.0, 0.0, 1.0, 0.0, 0.5), (1.0, 0.0, 1.0, 0.5, 0.0),\n (0.5, 0.0, 1.0, 0.5, 0.5), (0.5, 0.0, 1.0, 0.0, 0.5),\n (0.5, 0.0, 1.0, 0.0, 0.0), (0.5, 0.0, 1.0, 0.5, 0.0),\n (0.75, 0.25, 0.75, 0.25, 0.25), (1.0, 0.0, 1.0, 1.0, 0.5),\n (1.0, 0.0, 0.5, 1.0, 0.5), (1.0, 0.0, 0.5, 1.0, 0.0),\n (0.5, 0.0, 1.0, 1.0, 0.5), (0.5, 0.0, 1.0, 1.0, 0.0),\n (0.5, 0.0, 0.5, 1.0, 0.5), (0.5, 0.0, 0.5, 1.0, 0.0),\n (0.75, 0.25, 0.75, 0.75, 0.25), (1.0, 0.0, 1.0, 0.5, 1.0),\n (1.0, 0.0, 0.5, 1.0, 1.0), (1.0, 0.0, 0.5, 0.5, 1.0),\n (0.5, 0.0, 1.0, 1.0, 1.0), (0.5, 0.0, 1.0, 0.5, 1.0),\n (0.5, 0.0, 0.5, 1.0, 1.0), (0.5, 0.0, 0.5, 0.5, 1.0),\n (0.75, 0.25, 0.75, 0.75, 0.75), (1.0, 0.0, 0.5, 0.0, 1.0),\n (0.5, 0.0, 1.0, 0.0, 1.0), (0.5, 0.0, 0.5, 0.0, 1.0),\n (0.75, 0.25, 0.75, 0.25, 0.75), (1.0, 0.0, 0.0, 1.0, 0.5),\n (0.5, 0.0, 0.0, 1.0, 0.5), (0.5, 0.0, 0.0, 1.0, 0.0),\n (0.75, 0.25, 0.25, 0.75, 0.25), (1.0, 0.0, 0.0, 0.5, 1.0),\n (0.5, 0.0, 0.0, 1.0, 1.0), (0.5, 0.0, 0.0, 0.5, 1.0),\n (0.75, 0.25, 0.25, 0.75, 0.75), (0.5, 0.0, 0.0, 0.0, 1.0),\n (0.75, 0.25, 0.25, 0.25, 0.75), (0.0, 1.0, 0.5, 0.5, 0.5),\n (0.0, 1.0, 0.0, 0.5, 0.5), (0.0, 1.0, 0.0, 0.0, 0.5),\n (0.0, 1.0, 0.0, 0.5, 0.0), (0.0, 1.0, 0.5, 0.0, 0.5),\n (0.0, 1.0, 0.5, 0.0, 0.0), (0.0, 1.0, 0.5, 0.5, 0.0),\n (0.25, 0.75, 0.25, 0.25, 0.25), (0.0, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.0, 0.5), (0.0, 1.0, 1.0, 0.5, 0.0),\n (0.0, 0.5, 1.0, 0.5, 0.5), (0.0, 0.5, 1.0, 0.0, 0.5),\n (0.0, 0.5, 1.0, 0.0, 0.0), (0.0, 0.5, 1.0, 0.5, 0.0),\n (0.25, 0.75, 0.75, 0.25, 0.25), (0.0, 1.0, 1.0, 1.0, 0.5),\n (0.0, 1.0, 0.5, 1.0, 0.5), (0.0, 1.0, 0.5, 1.0, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.5), (0.0, 0.5, 1.0, 1.0, 0.0),\n (0.0, 0.5, 0.5, 1.0, 0.5), (0.0, 0.5, 0.5, 1.0, 0.0),\n (0.25, 0.75, 0.75, 0.75, 0.25), (0.0, 1.0, 1.0, 0.5, 1.0),\n (0.0, 1.0, 0.5, 1.0, 1.0), (0.0, 1.0, 0.5, 0.5, 1.0),\n (0.0, 0.5, 1.0, 1.0, 1.0), (0.0, 0.5, 1.0, 0.5, 1.0),\n (0.0, 0.5, 0.5, 1.0, 1.0), (0.0, 0.5, 0.5, 0.5, 1.0),\n (0.25, 0.75, 0.75, 0.75, 0.75), (0.0, 1.0, 0.5, 0.0, 1.0),\n (0.0, 0.5, 1.0, 0.0, 1.0), (0.0, 0.5, 0.5, 0.0, 1.0),\n (0.25, 0.75, 0.75, 0.25, 0.75), (0.0, 1.0, 0.0, 1.0, 0.5),\n (0.0, 0.5, 0.0, 1.0, 0.5), (0.0, 0.5, 0.0, 1.0, 0.0),\n (0.25, 0.75, 0.25, 0.75, 0.25), (0.0, 1.0, 0.0, 0.5, 1.0),\n (0.0, 0.5, 0.0, 1.0, 1.0), (0.0, 0.5, 0.0, 0.5, 1.0),\n (0.25, 0.75, 0.25, 0.75, 0.75), (0.0, 0.5, 0.0, 0.0, 1.0),\n (0.25, 0.75, 0.25, 0.25, 0.75), (0.0, 0.0, 1.0, 0.5, 0.5),\n (0.0, 0.0, 1.0, 0.0, 0.5), (0.0, 0.0, 1.0, 0.5, 0.0),\n (0.25, 0.25, 0.75, 0.25, 0.25), (0.0, 0.0, 1.0, 1.0, 0.5),\n (0.0, 0.0, 0.5, 1.0, 0.5), (0.0, 0.0, 0.5, 1.0, 0.0),\n (0.25, 0.25, 0.75, 0.75, 0.25), (0.0, 0.0, 1.0, 0.5, 1.0),\n (0.0, 0.0, 0.5, 1.0, 1.0), (0.0, 0.0, 0.5, 0.5, 1.0),\n (0.25, 0.25, 0.75, 0.75, 0.75), (0.0, 0.0, 0.5, 0.0, 1.0),\n (0.25, 0.25, 0.75, 0.25, 0.75), (0.0, 0.0, 0.0, 1.0, 0.5),\n (0.25, 0.25, 0.25, 0.75, 0.25), (0.0, 0.0, 0.0, 0.5, 1.0),\n (0.25, 0.25, 0.25, 0.75, 0.75), (0.25, 0.25, 0.25, 0.25, 0.75)]\n\n nn_checks = {(1, 1, 1, 1, 1): [(1.0, 1.0, 1.0, 0.5, 1.0),\n (1.0, 1.0, 0.5, 1.0, 1.0),\n (1.0, 0.5, 0.5, 0.5, 0.5),\n (1.0, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 1.0),\n (1.0, 0.5, 0.5, 1.0, 0.5),\n (1.0, 0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.5, 0.5, 1.0, 1.0),\n (0.5, 1.0, 0.5, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.5, 1.0, 0.5, 1.0, 0.5),\n (1.0, 1.0, 1.0, 0.5, 0.5),\n (1.0, 1.0, 0.5, 1.0, 0.5),\n (1.0, 1.0, 0.5, 0.5, 0.5),\n (1.0, 1.0, 1.0, 1.0, 0.5),\n (1.0, 1.0, 0.5, 0.5, 1.0),\n (1.0, 0.5, 1.0, 0.5, 1.0),\n (1.0, 0.5, 0.5, 1.0, 1.0),\n (0.5, 1.0, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 0.5),\n (1.0, 0.5, 0.5, 0.5, 1.0),\n (1.0, 0.5, 1.0, 1.0, 1.0),\n (0.5, 0.5, 0.5, 1.0, 0.5),\n (0.75, 0.75, 0.75, 0.75, 0.75),\n (0.5, 0.5, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 1.0, 1.0),\n (0.5, 1.0, 1.0, 0.5, 1.0),\n (0.5, 1.0, 0.5, 1.0, 1.0)],\n (0.25, 0.75, 0.75, 0.75, 0.25): [(0.5, 1.0, 1.0, 1.0, 0.0),\n (0.5, 0.5, 0.5, 1.0, 0.5),\n (0, 1, 1, 1, 0),\n (0.5, 1.0, 0.5, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5),\n (0.0, 1.0, 0.5, 0.5, 0.5),\n (0.0, 1.0, 1.0, 1.0, 0.5),\n (0.5, 1.0, 0.5, 1.0, 0.5),\n (0.0, 1.0, 0.5, 1.0, 0.5),\n (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.5, 0.0),\n (0.0, 1.0, 0.5, 1.0, 0.0),\n (0.5, 1.0, 1.0, 0.5, 0.0),\n (0.5, 1.0, 0.5, 1.0, 0.0),\n (0.5, 1.0, 0.5, 0.5, 0.0),\n (0.0, 1.0, 0.5, 0.5, 0.0),\n (0.5, 0.5, 1.0, 0.5, 0.0),\n (0.5, 0.5, 0.5, 1.0, 0.0),\n (0.0, 0.5, 1.0, 0.5, 0.5),\n (0.0, 0.5, 0.5, 1.0, 0.5),\n (0.0, 0.5, 1.0, 0.5, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 0.0),\n (0.0, 0.5, 0.5, 1.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.0),\n (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.5, 0.5, 1.0, 1.0, 0.5),\n (\n 0.5, 0.5, 1.0, 0.5, 0.5)],\n (0.0, 0.0, 1.0, 0.5, 1.0): [(0.5, 0.0, 0.5, 0.5, 1.0),\n (0.0, 0.5, 0.5, 0.5, 1.0),\n (0.5, 0.5, 0.5, 0.5, 1.0),\n (0.0, 0.0, 0.5, 0.5, 1.0),\n (0, 0, 1, 1, 1),\n (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.0, 1.0, 0.5, 1.0),\n (0.0, 0.5, 1.0, 0.5, 1.0),\n (0, 0, 1, 0, 1),\n (0.5, 0.0, 1.0, 0.5, 0.5),\n (0.0, 0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 1.0, 0.5, 0.5),\n (0.0, 0.0, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.5, 0.5, 0.5),\n (0.25, 0.25, 0.75, 0.75, 0.75),\n (0.5, 0.0, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5, 0.5), (\n 0.25, 0.25, 0.75, 0.25, 0.75)]}\n\n init_triangulation(5, 1, check, nn_checks)",
"def test_3D_m6_2k():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def test_2D_m6_1k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_3D_m6_1k():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_1k',\n Splitting: 'o2'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2'},\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def test_2D_m6_2k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_3D_m4_1k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: 'gpu_1k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def magnetic_reynolds(uu, param, grid, aa=list(), bb=list(), jj=list(),\n nghost=3, lmix=True):\n if len(bb) ==0 and len(aa) ==0 and len(jj) ==0:\n print('magnetic_reynolds WARNING: no aa, bb nor jj provided\\n'+\n 'aa or bb must be provided or aa for only hyper resistivity') \n #resistive force\n lres, lhyper3 = False, False\n for iresi in param.iresistivity:\n iresi = str.strip(iresi,'\\n')\n if 'hyper' not in iresi and len(iresi) > 0:\n lres = True\n if 'hyper3' in iresi:\n lhyper3 = True\n fresi = np.zeros_like(uu)\n if lres:\n if lhyper3:\n lhyper3 = lhyper3==lmix\n if len(jj) == 0:\n if len(aa) == 0:\n print('magnetic_reynolds WARNING: calculating jj without aa\\n',\n 'provide aa or jj directly for accurate boundary values')\n jj = curl(bb,grid.dx,grid.dy,grid.dz,x=grid.x,y=grid.y, \n coordinate_system=param.coord_system)\n else:\n jj = curl2(aa,grid.dx,grid.dy,grid.dz,x=grid.x,y=grid.y, \n coordinate_system=param.coord_system)\n for j in range(0,3):\n jj[j, :nghost,:,:] = jj[j,-2*nghost:-nghost,:,:]\n jj[j,-nghost:,:,:] = jj[j, nghost: 2*nghost,:,:]\n jj[j,:, :nghost,:] = jj[j,:,-2*nghost:-nghost,:]\n jj[j,:,-nghost:,:] = jj[j,:, nghost: 2*nghost,:]\n jj[j,:,:, :nghost] = jj[j,:,:,-2*nghost:-nghost]\n jj[j,:,:,-nghost:] = jj[j,:,:, nghost: 2*nghost]\n fresi = fresi + param.eta*param.mu0*jj\n for iresi in param.iresistivity:\n iresi = str.strip(iresi,'\\n')\n if 'eta-const' not in iresi and 'hyper' not in iresi\\\n and len(iresi) > 0:\n print('magnetic_reynolds WARNING: '+iresi+' not implemented\\n'+\n 'terms may be missing from the standard resistive forces')\n if lhyper3:\n if len(aa) == 0:\n print('magnetic_reynolds WARNING: no aa provided\\n'+\n 'aa must be provided for hyper resistivity')\n return 1\n else:\n del6a = np.zeros_like(aa)\n for j in range(0,3):\n del6a[j] = del6(aa[j],grid.dx,grid.dy,grid.dz)\n del6a[j, :nghost,:,:] = del6a[j,-2*nghost:-nghost,:,:]\n del6a[j,-nghost:,:,:] = del6a[j, nghost: 2*nghost,:,:]\n del6a[j,:, :nghost,:] = del6a[j,:,-2*nghost:-nghost,:]\n del6a[j,:,-nghost:,:] = del6a[j,:, nghost: 2*nghost,:]\n del6a[j,:,:, :nghost] = del6a[j,:,:,-2*nghost:-nghost]\n del6a[j,:,:,-nghost:] = del6a[j,:,:, nghost: 2*nghost]\n #del6 for non-cartesian tba\n #del6a[j] = del6(aa[j],grid.dx,grid.dy,grid.dz,x=grid.x,y=grid.y,\n # coordinate_system=param.coord_system)\n #effective at l > 5 grid.dx? \n fresi = fresi + param.eta_hyper3*del6a\n del(del6a)\n fresi2 = np.sqrt(dot2(fresi))\n del(fresi)\n #advective force\n if len(bb) == 0:\n if len(aa) == 0:\n print('magnetic_reynolds WARNING: calculating uu x bb without bb\\n',\n 'provide aa or bb directly to proceed')\n return 1\n else:\n bb = curl(aa,grid.dx,grid.dy,grid.dz,x=grid.x,y=grid.y, \n coordinate_system=param.coord_system)\n for j in range(0,3):\n bb[j, :nghost,:,:] = bb[j,-2*nghost:-nghost,:,:]\n bb[j,-nghost:,:,:] = bb[j, nghost: 2*nghost,:,:]\n bb[j,:, :nghost,:] = bb[j,:,-2*nghost:-nghost,:]\n bb[j,:,-nghost:,:] = bb[j,:, nghost: 2*nghost,:]\n bb[j,:,:, :nghost] = bb[j,:,:,-2*nghost:-nghost]\n bb[j,:,:,-nghost:] = bb[j,:,:, nghost: 2*nghost]\n advec = cross(uu,bb)\n advec2 = np.sqrt(dot2(advec))\n del(advec)\n #avoid division by zero\n if fresi2.max() > 0:\n fresi2[np.where(fresi2==0)] = fresi2[np.where(fresi2>0)].min()\n Rm = advec2/fresi2\n #set minimum floor to exclude zero-valued Rm \n if Rm.max() > 0:\n Rm[np.where(Rm==0)] = Rm[np.where(Rm>0)].min()\n else:\n print('Rm undefined')\n else:\n Rm = advec2\n print('Rm undefined')\n return Rm",
"def test_3D_m4_2k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def test_3D_m8_1k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: 'gpu_1k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def subdivision(mesh):\n\t\n\t\n\t# 1. generate new nodes in the centre of quad\n\t# 1/4 o-------o 1/4 o: existing vertices\n\t# | | *: newly-generated vertices\n\t# | * |\n\t# | |\n\t# 1/4 o-------o 1/4\n\n\tnew_coor = mesh.give_nodes().give_coor()\n\t\n\tfor face_index in range(mesh.give_model_inf()[2]): \n\t\tnew_x, new_y, new_z = (0, 0, 0)\n\t\tfor vertex_index in range(4):\n\t\t\tmesh.give_faces()\n\t\t\tnode_index = mesh.give_faces().give_node_list(face_index)[vertex_index]\n\n\t\t\tnew_x += 0.25*mesh.give_nodes().give_coor(node_index)[0]\n\t\t\tnew_y += 0.25*mesh.give_nodes().give_coor(node_index)[1]\n\t\t\tnew_z += 0.25*mesh.give_nodes().give_coor(node_index)[2]\n\t\t\t\n\t\tnew_coor.append((new_x, new_y, new_z))\n\t\t\n\t# generating new nodes on the edge\n\t# figure out one edge is shared by how many surfaces\n\tedge_shared_by_faces_list = helper.find_edge_shared_by_which_faces(mesh.give_edges(), mesh.give_faces())\n\t\n\tfor edge_index in range(mesh.give_model_inf()[1]):\n\n\t\tnew_x, new_y, new_z = (0., 0., 0.)\n\t\t\n\t# 2. generate new node on boundary edge\n\t# o: existing vertices\n\t# 1/2 o---*---o 1/2 *: newly-generated vertices\n\t# \n\n\t\tnew_coor = mesh.give_nodes().give_coor()\n\t\tif len(edge_shared_by_faces_list[edge_index]) == 1:\t\n\t\t\tnew_x, new_y, new_z = (0., 0., 0.)\n\t\t\tfor vertex_index in range(2):\n\t\t\t\tthis_node = mesh.give_edges().give_node(edge_index)[vertex_index]\n\t\t\t\tnew_x += 0.5*mesh.give_nodes().give_coor()[this_node][0]\n\t\t\t\tnew_y += 0.5*mesh.give_nodes().give_coor()[this_node][1]\n\t\t\t\tnew_z += 0.5*mesh.give_nodes().give_coor()[this_node][2]\n\t\t\t\t\n\t\t\tnew_coor.append((new_x, new_y, new_z))\n\t\t\t\t\n\t# 3. generate new node on interior edge\n\t# 1/16 o-------o 1/16 o: existing vertices\n\t# | | *: newly-generated vertices\n\t# 3/8 o---*---o 3/8\n\t# | |\n\t# 1/16 o-------o 1/16\n\n\t\telse:\n\t\t\tnew_x, new_y, new_z = (0., 0., 0.)\n\t\t\tconsidered_node = []\n\t\t\tfor vertex_index in range(2):\n\t\t\t\tthis_node = mesh.give_edges().give_node(edge_index)[vertex_index]\n\t\t\t\tconsidered_node.append(this_node)\n\t\t\t\tnew_x += 3./8.*mesh.give_nodes().give_coor()[this_node][0]\n\t\t\t\tnew_y += 3./8.*mesh.give_nodes().give_coor()[this_node][1]\n\t\t\t\tnew_z += 3./8.*mesh.give_nodes().give_coor()[this_node][2]\n\t\t\t\n\t\t\t# faces contain this node\n\t\t\tpotential_node = []\n\t\t\tfor face_index in edge_shared_by_faces_list[edge_index]:\t\t\n\t\t\t\tfor vertex_index in range(4):\n\t\t\t\t\t\tpotential_node.append(mesh.give_faces().give_node_list(face_index)[vertex_index])\n\t\t\t\n\t\t\touter_node = []\n\t\t\tfor node in potential_node:\n\t\t\t\tif (node not in considered_node) & (node not in outer_node):\n\t\t\t\t\touter_node.append(node)\n\t\t\t\t\t\n\t\t\tfor vertex_index in outer_node:\n\t\t\t\tnew_x += 1./16.*mesh.give_nodes().give_coor()[vertex_index][0]\n\t\t\t\tnew_y += 1./16.*mesh.give_nodes().give_coor()[vertex_index][1]\n\t\t\t\tnew_z += 1./16.*mesh.give_nodes().give_coor()[vertex_index][2]\n\t\t\t\n\t\t\tnew_coor.append((new_x, new_y, new_z))\n\n\t# update the links of edges and surfaces\n\tnew_edge_list = []\n\tnew_face_list = []\n\tfor face_index in range(mesh.give_model_inf()[2]):\n\t\told_node0 = mesh.give_faces().give_node_list(face_index)[0]\n\t\told_node1 = mesh.give_faces().give_node_list(face_index)[1]\n\t\told_node2 = mesh.give_faces().give_node_list(face_index)[2]\n\t\told_node3 = mesh.give_faces().give_node_list(face_index)[3]\n\t\t\n\t\told_edge0 = mesh.give_faces().give_edge_list(face_index)[0]\n\t\told_edge1 = mesh.give_faces().give_edge_list(face_index)[1]\n\t\told_edge2 = mesh.give_faces().give_edge_list(face_index)[2]\n\t\told_edge3 = mesh.give_faces().give_edge_list(face_index)[3]\n\t\t\n\t\tnew_node4 = old_edge0 + mesh.give_model_inf()[0] + mesh.give_model_inf()[2] \n\t\tnew_node5 = old_edge1 + mesh.give_model_inf()[0] + mesh.give_model_inf()[2]\n\t\tnew_node6 = old_edge2 + mesh.give_model_inf()[0] + mesh.give_model_inf()[2]\n\t\tnew_node7 = old_edge3 + mesh.give_model_inf()[0] + mesh.give_model_inf()[2]\t\n\t\tnew_node8 = mesh.give_model_inf()[0] + face_index\n\t\t\n\t\tif helper.in_list((old_node0, new_node4), new_edge_list) == False: \n\t\t\tnew_edge_list.append((old_node0, new_node4))\n\t\tif helper.in_list((new_node4, new_node8), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node4, new_node8))\n\t\tif helper.in_list((new_node8, new_node7), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node8, new_node7))\n\t\tif helper.in_list((new_node7, old_node0), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node7, old_node0))\n\t\tif helper.in_list((new_node4, old_node1), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node4, old_node1))\n\t\tif helper.in_list((old_node1, new_node5), new_edge_list) == False: \n\t\t\tnew_edge_list.append((old_node1, new_node5))\n\t\tif helper.in_list((new_node5, new_node8), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node5, new_node8))\n\t\tif helper.in_list((new_node7, old_node3), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node7, old_node3))\n\t\tif helper.in_list((old_node3, new_node6), new_edge_list) == False: \n\t\t\tnew_edge_list.append((old_node3, new_node6))\n\t\tif helper.in_list((new_node6, new_node8), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node6, new_node8))\n\t\tif helper.in_list((new_node6, old_node2), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node6, old_node2))\n\t\tif helper.in_list((old_node2, new_node5), new_edge_list) == False: \n\t\t\tnew_edge_list.append((old_node2, new_node5))\n\t\n\t\tnew_face_list.append((old_node0, new_node4, new_node8, new_node7))\n\t\tnew_face_list.append((new_node4, old_node1, new_node5, new_node8))\n\t\tnew_face_list.append((new_node7, new_node8, new_node6, old_node3))\n\t\tnew_face_list.append((new_node8, new_node5, old_node2, new_node6))\n\t\t\n\tnew_edges = geo.Edge(new_edge_list)\n\t\n\tnew_faces = geo.Face(new_face_list, new_edges)\n\t\t\n\t# update existing nodes\t\n\tfor node_index in range(mesh.give_model_inf()[0]):\n\t\t\n\t\tring1, ring2 = helper.find_neighbour_node(new_edges, new_faces, node_index)\n\t\tvalence = helper.find_valence(node_index, new_faces) \n\t\t#: valence: the number of faces sharing on specific edge\n\n\t# 4. update existing corner vertex\n\t# 2/4 @---* 1/4 *: newly-generated vertices\n\t# | | @: existing vertices to be updated\n\t# 1/4 *---* 0 The higher mask values on neighbouring vertices, \n\t# the more likely a square mesh will be refined into a sphere.\n\t \n\t\tif valence == 1:\n\n\t\t\tnew_x, new_y, new_z = (0, 0, 0)\n\t\t\tprint\n\t\t\tfor node_in_ring1 in ring1:\n\t\t\t\tnew_x += 1./4.*mesh.give_nodes().give_coor()[node_in_ring1][0]\n\t\t\t\tnew_y += 1./4.*mesh.give_nodes().give_coor()[node_in_ring1][1]\n\t\t\t\tnew_z += 1./4.*mesh.give_nodes().give_coor()[node_in_ring1][2]\n\n\t\t\tfor node_in_ring2 in ring2:\n\t\t\t\tnew_x += 0.*mesh.give_nodes().give_coor()[node_in_ring2][0]\n\t\t\t\tnew_y += 0.*mesh.give_nodes().give_coor()[node_in_ring2][1]\n\t\t\t\tnew_z += 0.*mesh.give_nodes().give_coor()[node_in_ring2][2]\n\t\t\t\t\n\t\t\tnew_x += 2./4.*mesh.give_nodes().give_coor()[node_index][0]\n\t\t\tnew_y += 2./4.*mesh.give_nodes().give_coor()[node_index][1]\n\t\t\tnew_z += 2./4.*mesh.give_nodes().give_coor()[node_index][2]\n\n\t# 5. update existing boundary joint vertex\n\t# 3/4\n\t# 1/8 *---*---* 1/8 *: newly-generated vertices\n\t# | | | @: existing vertices to be updated\n\t# 0 *---*---* 0\n\n\t\telif valence == 2:\n\t\t\t\n\t\t\tnew_x, new_y, new_z = (0, 0, 0)\n\t\t\tfor node_in_ring1 in ring1:\n\t\t\t\tif helper.find_valence(node_in_ring1, new_faces) <= 2: \n\t\t\t\t\tnew_x += 1./8.*mesh.give_nodes().give_coor()[node_in_ring1][0]\n\t\t\t\t\tnew_y += 1./8.*mesh.give_nodes().give_coor()[node_in_ring1][1]\n\t\t\t\t\tnew_z += 1./8.*mesh.give_nodes().give_coor()[node_in_ring1][2]\n\t\t\t\t\t\n\t\t\tnew_x += 3./4.*mesh.give_nodes().give_coor()[node_index][0]\n\t\t\tnew_y += 3./4.*mesh.give_nodes().give_coor()[node_index][1]\n\t\t\tnew_z += 3./4.*mesh.give_nodes().give_coor()[node_index][2]\n\t\n\t# 6. update new node on interior edge\n\t# * r/k\n\t# /\\ b/k*\n\t# *__/ \\___ r/k\n\t# \\ \\ /¬¬/ *: newly-generated vertices: \n\t# \\ \\/ / b = 3/2/valence, r = 1/4/valence\n\t# *--@--* b/k\t @: existing vertices to be updated: 1-b-r\t\t\n\t# / /\\ \\\n\t# /__/ \\__\\\n\t# * \\ / * r/k\n\t# \\/\n\t\t\n\t\telse:\n\t\t\tnew_x, new_y, new_z = (0, 0, 0)\n\t\t\tbeta = 3./2./valence\n\t\t\tgamma = 1./4./valence\n\t\t\tfor node_in_ring1 in ring1:\n\t\t\t\tnew_x += beta/valence*mesh.give_nodes().give_coor()[node_in_ring1][0]\n\t\t\t\tnew_y += beta/valence*mesh.give_nodes().give_coor()[node_in_ring1][1]\n\t\t\t\tnew_z += beta/valence*mesh.give_nodes().give_coor()[node_in_ring1][2]\n\t\t\t\n\t\t\tfor node_in_ring2 in ring2:\n\t\t\t\tnew_x += gamma/valence*mesh.give_nodes().give_coor()[node_in_ring2][0]\n\t\t\t\tnew_y += gamma/valence*mesh.give_nodes().give_coor()[node_in_ring2][1]\n\t\t\t\tnew_z += gamma/valence*mesh.give_nodes().give_coor()[node_in_ring2][2]\n\t\t\t\n\t\t\tnew_x += (1. - beta - gamma)*mesh.give_nodes().give_coor()[node_index][0]\n\t\t\tnew_y += (1. - beta - gamma)*mesh.give_nodes().give_coor()[node_index][1]\n\t\t\tnew_z += (1. - beta - gamma)*mesh.give_nodes().give_coor()[node_index][2]\n\t\t\n\t\tnew_coor[node_index] = (new_x, new_y, new_z)\n\t\n\tnew_nodes = geo.Node(new_coor)\n\t\n\tmesh.update(new_nodes, new_edges, new_faces)\n\t\n\t# return new_mesh\n\treturn mesh",
"def init_stitch(N):\n\tif N is None:\n\t\tN = np.int32(HII_DIM) #prepare for stitching\n\tMETA_GRID_SIZE = DIM/N\n\tM = np.int32(HII_DIM/META_GRID_SIZE)\n\t#HII_DIM = np.int32(HII_DIM)\n\tf_pixel_factor = DIM/HII_DIM;\n\tscale = np.float32(BOX_LEN/DIM)\n\tprint 'scale', scale\n\tHII_scale = np.float32(BOX_LEN/HII_DIM)\n\tshape = (DIM,DIM,N)\n\tstitch_grid_size = (DIM/(block_size[0]),\n\t\t\t\t\t\tDIM/(block_size[0]),\n\t\t\t\t\t\tN/(block_size[0]))\n\tHII_stitch_grid_size = (HII_DIM/(block_size[0]),\n\t\t\t\t\t\tHII_DIM/(block_size[0]),\n\t\t\t\t\t\tM/(block_size[0]))\n\t#ratio of large box to small size\n\tkernel_source = open(cmd_folder+\"/initialize_stitch.cu\").read()\n\tkernel_code = kernel_source % {\n\n\t\t'DELTAK': DELTA_K,\n\t\t'DIM': DIM, \n\t\t'VOLUME': VOLUME,\n\t\t'META_BLOCKDIM': N\n\t}\n\tmain_module = nvcc.SourceModule(kernel_code)\n\tinit_stitch = main_module.get_function(\"init_kernel\")\n\tHII_filter = main_module.get_function(\"HII_filter\")\n\tsubsample_kernel = main_module.get_function(\"subsample\")\n\tvelocity_kernel = main_module.get_function(\"set_velocity\")\n\tpspec_texture = main_module.get_texref(\"pspec\")\n\tMRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=0)\n\tplan2d = Plan((np.int64(DIM), np.int64(DIM)), dtype=np.complex64)\n\tplan1d = Plan((np.int64(DIM)), dtype=np.complex64)\n\tprint \"init pspec\"\n\tinterpPspec, interpSize = init_pspec() #interpPspec contains both k array and P array\n\tinterp_cu = cuda.matrix_to_array(interpPspec, order='F')\n\tcuda.bind_array_to_texref(interp_cu, pspec_texture)\n\t#hbox_large = pyfftw.empty_aligned((DIM, DIM, DIM), dtype='complex64')\n\thbox_large = np.zeros((DIM, DIM, DIM), dtype=np.complex64)\n\t#hbox_small = np.zeros(HII_shape, dtype=np.float32)\n\t#hbox_large = n\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\n\t# Set up pinned memory for transfer\n\t#largebox_hs = cuda.aligned_empty(shape=shape, dtype=np.float32, alignment=resource.getpagesize())\n\tlargebox_pin = cuda.pagelocked_empty(shape=shape, dtype=np.float32)\n\tlargecbox_pin = cuda.pagelocked_empty(shape=shape, dtype=np.complex64)\n\n\tlargebox_d = gpuarray.zeros(shape, dtype=np.float32)\n\tlargebox_d_imag = gpuarray.zeros(shape, dtype=np.float32)\n\tprint \"init boxes\"\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t# MRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=meta_x*N**3)\n\t\tinit_stitch(largebox_d, DIM, np.int32(meta_z),block=block_size, grid=stitch_grid_size)\n\t\tinit_stitch(largebox_d_imag, DIM, np.int32(meta_z),block=block_size, grid=stitch_grid_size)\n\t\tlargebox_d *= MRGgen.gen_normal(shape, dtype=np.float32)\n\t\tlargebox_d_imag *= MRGgen.gen_normal(shape, dtype=np.float32)\n\t\tlargebox_d = largebox_d + np.complex64(1.j) * largebox_d_imag\n\t\tcuda.memcpy_dtoh_async(largecbox_pin, largebox_d)\n\t\thbox_large[:, :, meta_z*N:(meta_z+1)*N] = largecbox_pin.copy()\n\t#if want to get velocity need to use this\n\tif True:\n\t\tprint \"saving kbox\"\n\t\tnp.save(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN), hbox_large)\n\n\tprint \"Executing FFT on device\"\n\t#hbox_large = pyfftw.interfaces.numpy_fft.ifftn(hbox_large).real\n\thbox_large = fft_stitch(N, plan2d, plan1d, hbox_large, largebox_d).real\n\tprint hbox_large.dtype\n\tprint \"Finished FFT on device\"\n\tnp.save(parent_folder+\"/Boxes/deltax_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN), hbox_large)\n\t\n\tif True:\n\t\tprint \"loading kbox\"\n\t\thbox_large = np.load(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN))\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\tlargebox_pin = hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy()\n\t\t#cuda.memcpy_htod_async(largebox_d, largebox_pin)\n\t\tlargebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\tHII_filter(largebox_d, DIM, np.int32(meta_z), ZERO, smoothR, block=block_size, grid=stitch_grid_size);\n\t\thbox_large[:, :, meta_z*N:(meta_z+1)*N] = largebox_d.get_async()\n\t#import IPython; IPython.embed()\n\tprint \"Executing FFT on host\"\n\t#hbox_large = hifft(hbox_large).astype(np.complex64).real\n\t#hbox_large = pyfftw.interfaces.numpy_fft.ifftn(hbox_large).real\n\thbox_large = fft_stitch(N, plan2d, plan1d, hbox_large, largebox_d).real\n\tprint \"Finished FFT on host\"\n\t#import IPython; IPython.embed()\n\n\t# for meta_x in xrange(META_GRID_SIZE):\n\t# \tfor meta_y in xrange(META_GRID_SIZE):\n\t# \t\tfor meta_z in xrange(META_GRID_SIZE):\n\t# \t\t\tlargebox_d = gpuarray.to_gpu(hbox_large[meta_x*N:(meta_x+1)*N, meta_y*N:(meta_y+1)*N, meta_z*N:(meta_z+1)*N])\n\t# \t\t\tHII_filter(largebox_d, N, np.int32(meta_x), np.int32(meta_y), np.int32(meta_z), ZERO, smoothR, block=block_size, grid=grid_size);\n\t# \t\t\thbox_large[meta_x*N:(meta_x+1)*N, meta_y*N:(meta_y+1)*N, meta_z*N:(meta_z+1)*N] = largebox_d.get()\n\t#plan = Plan(shape, dtype=np.complex64)\n\t#plan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\n\n\t# This saves a large resolution deltax\n\n\t\n\tprint \"downsampling\"\n\tsmallbox_d = gpuarray.zeros((HII_DIM,HII_DIM,M), dtype=np.float32)\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\tlargebox_pin = hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy()\n\t\tcuda.memcpy_dtoh_async(largecbox_pin, largebox_d)\n\t\t#largebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\tlargebox_d /= scale**3 #\n\t\tsubsample_kernel(largebox_d, smallbox_d, DIM, HII_DIM, PIXEL_FACTOR, block=block_size, grid=HII_stitch_grid_size) #subsample in real space\n\t\thbox_small[:, :, meta_z*M:(meta_z+1)*M] = smallbox_d.get_async()\n\tnp.save(parent_folder+\"/Boxes/smoothed_deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(HII_DIM, BOX_LEN), hbox_small)\n\t#import IPython; IPython.embed()\n\n\n\t# To get velocities: reload the k-space box\n\thbox_large = np.load(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN))\n\thvbox_large = np.zeros((DIM, DIM, DIM), dtype=np.float32)\n\thvbox_small = np.zeros(HII_shape, dtype=np.float32)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tlargevbox_d = gpuarray.zeros((DIM,DIM,N), dtype=np.complex64)\n\tsmallvbox_d = gpuarray.zeros((HII_DIM, HII_DIM, M), dtype=np.float32)\n\tfor num, mode in enumerate(['x', 'y', 'z']):\n\t\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t\tlargebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\t\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\t\t\tvelocity_kernel(largebox_d, largevbox_d, DIM, np.int32(meta_z), np.int32(num), block=block_size, grid=stitch_grid_size)\n\t\t\tHII_filter(largevbox_d, DIM, ZERO, smoothR, block=block_size, grid=stitch_grid_size)\n\t\t\tprint hvbox_large.shape, largevbox_d.shape\n\t\t\thvbox_large[:, :, meta_z*N:(meta_z+1)*N] = largevbox_d.get_async()\n\t\thvbox_large = fft_stitch(N, plan2d, plan1d, hvbox_large, largevbox_d).real\n\t\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t\tlargevbox_d = gpuarray.to_gpu_async(hvbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\t\tsubsample_kernel(largevbox_d.real, smallvbox_d, DIM, HII_DIM,PIXEL_FACTOR, block=block_size, grid=HII_stitch_grid_size)\n\t\t\thvbox_small[:, :, meta_z*M:(meta_z+1)*M] = smallvbox_d.get_async()\n\t\tnp.save(parent_folder+\"/Boxes/v{0}overddot_{1:d}_{2:.0f}Mpc\".format(mode, HII_DIM, BOX_LEN), smallvbox_d.get())\n\n\treturn",
"def init():\n\tN = np.int32(DIM) #prepare for stitching\n\t#HII_DIM = np.int32(HII_DIM)\n\tf_pixel_factor = DIM/HII_DIM;\n\tscale = np.float32(BOX_LEN)/DIM\n\tHII_scale = np.float32(BOX_LEN)/HII_DIM\n\tshape = (N,N,N)\n\t\n\tMRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=0)\n\n\tkernel_source = open(cmd_folder+\"/initialize.cu\").read()\n\tkernel_code = kernel_source % {\n\n\t\t'DELTAK': DELTA_K,\n\t\t'VOLUME': VOLUME,\n\t\t'DIM': DIM\n\t}\n\tmain_module = nvcc.SourceModule(kernel_code)\n\tinit_kernel = main_module.get_function(\"init_kernel\")\n\tHII_filter = main_module.get_function(\"HII_filter\")\n\tadj_complex_conj = main_module.get_function(\"adj_complex_conj\")\n\tsubsample_kernel = main_module.get_function(\"subsample\")\n\tvelocity_kernel = main_module.get_function(\"set_velocity\")\n\tpspec_texture = main_module.get_texref(\"pspec\")\n\n\tinterpPspec, interpSize = init_pspec() #interpPspec contains both k array and P array\n\tinterp_cu = cuda.matrix_to_array(interpPspec, order='F')\n\tcuda.bind_array_to_texref(interp_cu, pspec_texture)\n\n\tlargebox_d = gpuarray.zeros(shape, dtype=np.float32)\n\tinit_kernel(largebox_d, np.int32(DIM), block=block_size, grid=grid_size)\n\n\t#import IPython; IPython.embed()\n\tlargebox_d_imag = gpuarray.zeros(shape, dtype=np.float32)\n\tinit_kernel(largebox_d_imag, np.int32(DIM), block=block_size, grid=grid_size)\n\n\tlargebox_d *= MRGgen.gen_normal(shape, dtype=np.float32)\n\tlargebox_d_imag *= MRGgen.gen_normal(shape, dtype=np.float32)\n\tlargebox_d = largebox_d + np.complex64(1.j) * largebox_d_imag\n\n\t#adj_complex_conj(largebox_d, DIM, block=block_size, grid=grid_size)\n\tlargebox = largebox_d.get()\n\t#np.save(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc\".format(DIM, BOX_LEN), largebox)\n\n\t#save real space box before smoothing\n\tplan = Plan(shape, dtype=np.complex64)\n\tplan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\tlargebox_d /= scale**3\n\tnp.save(parent_folder+\"/Boxes/deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(DIM, BOX_LEN), largebox_d.real.get_async())\n\n\t#save real space box after smoothing and subsampling\n\t# host largebox is still in k space, no need to reload from disk\n\tlargebox_d = gpuarray.to_gpu(largebox)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tHII_filter(largebox_d, N, ZERO, smoothR, block=block_size, grid=grid_size);\n\tplan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\tlargebox_d /= scale**3\n\tsmallbox_d = gpuarray.zeros(HII_shape, dtype=np.float32)\n\tsubsample_kernel(largebox_d.real, smallbox_d, N, HII_DIM, PIXEL_FACTOR, block=block_size, grid=HII_grid_size) #subsample in real space\n\tnp.save(parent_folder+\"/Boxes/smoothed_deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(HII_DIM, BOX_LEN), smallbox_d.get_async())\n\n\t# reload the k-space box for velocity boxes\n\tlargebox_d = gpuarray.to_gpu(largebox)\n\t\n\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tlargevbox_d = gpuarray.zeros((DIM,DIM,DIM), dtype=np.complex64)\n\tsmallbox_d = gpuarray.zeros(HII_shape, dtype=np.float32)\n\tfor num, mode in enumerate(['x', 'y', 'z']):\n\t\tvelocity_kernel(largebox_d, largevbox_d, DIM, np.int32(num), block=block_size, grid=grid_size)\n\t\tHII_filter(largevbox_d, DIM, ZERO, smoothR, block=block_size, grid=grid_size)\n\t\tplan.execute(largevbox_d, inverse=True)\n\t\tlargevbox_d /= scale**3\n\t\t#import IPython; IPython.embed()\n\t\tsubsample_kernel(largevbox_d.real, smallbox_d, DIM, HII_DIM,PIXEL_FACTOR, block=block_size, grid=HII_grid_size)\n\t\tnp.save(parent_folder+\"/Boxes/v{0}overddot_{1:d}_{2:.0f}Mpc\".format(mode, HII_DIM, BOX_LEN), smallbox_d.get())\n\n\treturn",
"def test_3D_m8_2k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def test_spatial_smoothing_xesmf_reduce_spatial_dims_MPI_curv(\r\n PM_ds_control_3d_full,\r\n):\r\n da = PM_ds_control_3d_full\r\n step = 5\r\n actual = spatial_smoothing_xesmf(\r\n da,\r\n d_lon_lat_kws={\"lon\": step},\r\n )\r\n expected_lat_size = 180 // step\r\n assert actual[\"lon\"].size < da.lon.size\r\n assert actual[\"lat\"].size == expected_lat_size",
"def test_reconstruction_against_simulation(subarray_and_event_gamma_off_axis_500_gev):\n\n # 4-LST bright event already calibrated\n # we'll clean it and parametrize it again in the TelescopeFrame\n subarray, event = subarray_and_event_gamma_off_axis_500_gev\n\n # define reconstructor\n reconstructor = HillasReconstructor(subarray)\n\n hillas_dict = {}\n telescope_pointings = {}\n\n for tel_id, dl1 in event.dl1.tel.items():\n\n telescope_pointings[tel_id] = SkyCoord(\n alt=event.pointing.tel[tel_id].altitude,\n az=event.pointing.tel[tel_id].azimuth,\n frame=AltAz(),\n )\n\n geom_CameraFrame = subarray.tel[tel_id].camera.geometry\n\n # this could be done also out of this loop,\n # but in case of real data each telescope would have a\n # different telescope_pointing\n geom_TelescopeFrame = geom_CameraFrame.transform_to(\n TelescopeFrame(telescope_pointing=telescope_pointings[tel_id])\n )\n\n mask = tailcuts_clean(\n geom_TelescopeFrame,\n dl1.image,\n picture_thresh=5.0,\n boundary_thresh=2.5,\n keep_isolated_pixels=False,\n min_number_picture_neighbors=2,\n )\n\n try:\n hillas_dict[tel_id] = hillas_parameters(\n geom_TelescopeFrame[mask], dl1.image[mask]\n )\n\n # the original event is created from a\n # pytest fixture with \"session\" scope, so it's always the same\n # and if we used the same event we would overwrite the image\n # parameters for the next tests, thus causing their failure\n test_event = deepcopy(event)\n test_event.dl1.tel[tel_id].parameters = ImageParametersContainer()\n test_event.dl1.tel[tel_id].parameters.hillas = hillas_dict[tel_id]\n\n except HillasParameterizationError as e:\n print(e)\n continue\n\n # Get shower geometry\n reconstructor(event)\n # get the result from the correct DL2 container\n result = event.dl2.stereo.geometry[\"HillasReconstructor\"]\n\n # get the reconstructed coordinates in the sky\n reco_coord = SkyCoord(alt=result.alt, az=result.az, frame=AltAz())\n # get the simulated coordinates in the sky\n true_coord = SkyCoord(\n alt=event.simulation.shower.alt, az=event.simulation.shower.az, frame=AltAz()\n )\n\n # check that we are not more far than 0.1 degrees\n assert reco_coord.separation(true_coord) < 0.1 * u.deg",
"def test_2D_m6_1k():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2'},\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_2_layer():\r\n # angular frequency in radians * THz\r\n w = 100 * nu.THz\r\n # Relative permittivity of metal and dielectric\r\n em = -4.56 + 0.12j\r\n ed = 1.23 + 0.01j\r\n ex_list = ez_list = [ed, em]\r\n # Relative permeabilities\r\n mu_list = [1,1]\r\n # Dictionary of input parameters\r\n input_params = {'w': w, 'd_list': [inf,inf], 'ex_list': ex_list,\r\n 'ez_list': ez_list, 'mu_list': mu_list}\r\n \r\n # Calculate the theoretical kx\r\n theo_kx = (w / nu.c0) * cmath.sqrt((em * ed) / (em + ed))\r\n if theo_kx.imag < 0:\r\n theo_kx *= -1\r\n print('Theoretical kx:',\r\n '(%.7g+%.7gj) rad/um' % (theo_kx.real / nu.um**-1, theo_kx.imag / nu.um**-1))\r\n \r\n # If I use the theoretical kx value, the mode should be correct and\r\n # all my tests should pass.\r\n params = deepcopy(input_params)\r\n params['kx'] = theo_kx\r\n params = find_all_params_from_kx(params)\r\n kzd, kzm = params['kz_list']\r\n # check that kz_list is correct\r\n assert_floats_are_equal(kzd**2, (w**2 / nu.c0**2) * ed**2 / (em + ed))\r\n assert_floats_are_equal(kzm**2, (w**2 / nu.c0**2) * em**2 / (em + ed))\r\n # check that layer_bottom_list is correct\r\n assert params['layer_bottom_list'][0] == -inf\r\n assert params['layer_bottom_list'][1] == 0\r\n # Check that the boundary condition matrix agrees with hand-calculation\r\n bc_mat = bc_matrix(params)\r\n # ...top-left is Ex0down / H0down\r\n assert_floats_are_equal(bc_mat[0,0], -kzd / (w * ed * nu.eps0))\r\n # ...top-right is -Ex1up / H1up\r\n assert_floats_are_equal(bc_mat[0,1], -kzm / (w * em * nu.eps0))\r\n # ...bottom-left is eps0 * Ez0down / H0down\r\n assert_floats_are_equal(bc_mat[1,0], ed * -theo_kx / (w * ed * nu.eps0))\r\n # ...bottom-right is -eps1 * Ez1up / H1up\r\n assert_floats_are_equal(bc_mat[1,1], -em * -theo_kx / (w * em * nu.eps0))\r\n # Check that one of the eigenvalues is almost zero (compared to the size\r\n # of the matrix elements).\r\n eigenvalues = np.linalg.eig(bc_mat)[0]\r\n assert abs(eigenvalues).min() / abs(bc_mat).max() < 1e-6\r\n # Check that the mode passes all tests.\r\n assert check_mode(params, thorough=True) is True\r\n # Check that I can scale the fields and it still passes all tests.\r\n params_scaled = rescale_fields(1.23+4.56j, params)\r\n assert check_mode(params_scaled, thorough=True) is True\r\n \r\n # Now try my kx-finding algorithm, to see if it finds the right value.\r\n kx_list = find_kx(input_params)\r\n print('kx_list:',\r\n ['(%.7g+%.7gj) rad/um' % (kx.real / nu.um**-1, kx.imag / nu.um**-1)\r\n for kx in kx_list])\r\n kx = kx_list[0]\r\n assert_floats_are_equal(theo_kx, kx)\r\n \r\n plot_mode(params)\r\n \r\n print('If you see this message, all the tests succeeded!!')",
"def test_power_spectral_density_from_spatially_resolved_magnetisation_confined_to_mesh_region(tmpdir, debug=False):\n os.chdir(str(tmpdir))\n RTOL = 1e-10\n\n H1 = 1e6 # external field in A/m\n alpha1 = 0.5 # some sort of damping constant\n omega1 = gamma * H1 # precession frequency\n\n H2 = 2.8e4 # external field in A/m\n alpha2 = 0.3 # some sort of damping constant\n omega2 = gamma * H2 # precession frequency\n\n ##\n # Step 1: Construct a time series of artificial magnetisation\n # data and save it to a bunch of .npy files.\n ##\n t_step = 1e-11\n t_ini = 0\n t_end = 10e-9\n\n N1 = 42 # in a real application this would be the number of mesh vertices\n N2 = 23 # in a real application this would be the number of mesh vertices\n fft_test_helpers.create_test_npy_files_with_two_regions(\n str(tmpdir), t_step, t_ini, t_end, omega1, alpha1, N1, omega2, alpha2, N2)\n\n ##\n # Step 2: compute the FFT of a resampled time series, both by\n # hand and using FFT_m.\n ##\n # XXX TODO: Resampling timesteps is not supported when using .npy\n # files. Either simplify the code below, or implement saving to\n # .h5 files so that it's easier to implement resampling for\n # spatially resolved data, too.\n ##\n t_step_res = t_step\n t_ini_res = t_ini\n t_end_res = t_end\n ts_resampled = np.arange(t_ini_res, t_end_res, t_step_res)\n\n # Compute time series based on resampled timesteps\n mx_res = exp(-ts_resampled * 1e8 / alpha1) * sin(omega1 * ts_resampled)\n my_res = exp(-ts_resampled * 1e8 / alpha1) * cos(omega1 * ts_resampled)\n mz_res = 1 - sqrt(mx_res ** 2 + my_res ** 2)\n\n # Compute 'analytical' Fourier transform of resampled time series and\n # determine the power of the spectrum for each component. We also need\n # to multiply by the number of mesh nodes because the numerical algorithm\n # sums up all contributions at the individual nodes (but we can just\n # multiply because they are all identical by construction).\n psd_mx_expected = N1 * np.absolute(np.fft.rfft(mx_res)) ** 2\n psd_my_expected = N1 * np.absolute(np.fft.rfft(my_res)) ** 2\n psd_mz_expected = N1 * np.absolute(np.fft.rfft(mz_res)) ** 2\n\n # Compute Fourier transform of resampled time series using FFT_m\n freqs_computed, psd_mx_computed, psd_my_computed, psd_mz_computed = \\\n compute_power_spectral_density('m_ringdown*.npy', t_step_res, t_ini=t_ini_res,\n t_end=t_end_res, subtract_values=None, restrict_to_vertices=xrange(N1))\n\n # Check that the analytically determined power spectra are the same as the\n # computed ones.\n assert(np.allclose(psd_mx_expected, psd_mx_computed, atol=0, rtol=RTOL))\n assert(np.allclose(psd_my_expected, psd_my_computed, atol=0, rtol=RTOL))\n assert(np.allclose(psd_mz_expected, psd_mz_computed, atol=0, rtol=RTOL))\n\n if debug:\n # Plot the spectra for debugging\n fig = plt.figure(figsize=(20, 5))\n ax = fig.gca()\n ax.plot(freqs_computed, psd_mx_expected, label='psd_mx_expected')\n ax.plot(freqs_computed, psd_my_expected, label='psd_my_expected')\n ax.plot(freqs_computed, psd_mz_expected, label='psd_mz_expected')\n ax.plot(freqs_computed, psd_mx_computed, label='psd_mx_computed')\n ax.plot(freqs_computed, psd_my_computed, label='psd_my_computed')\n ax.plot(freqs_computed, psd_mz_computed, label='psd_mz_computed')\n ax.legend(loc='best')\n fig.savefig('psd_m_McMichaelStiles.png')",
"def test_2():\n\n n1 = 10\n n2 = 100\n ndim = 3\n\n semi_axes = np.random.random((n1,ndim))\n coords = np.array([sample_ellipsoidal_volume(n2, semi_axes[i]) for i in range(0,n1)])\n\n Is = iterative_inertia_tensors_3D(coords)\n\n assert np.shape(Is)==(n1,ndim,ndim)",
"def MeshMachine(main):\n\n # oDesign definition\n oDesign = main['ANSYS']['oDesign']\n\n # Data for the rotor mesh\n RotorName = main['ANSYS']['Rotor&Magnets']['Name'][0]\n RotorNumMaxElem = main['ANSYS']['Mesh']['Rotor']['NumMaxElem']\n RotorMaxLength = main['ANSYS']['Mesh']['Rotor']['MaxLength']\n\n # Data for the magnets mesh\n PMNames = main['ANSYS']['Rotor&Magnets']['PMNames']\n PMNumMaxElem = main['ANSYS']['Mesh']['Magnets']['NumMaxElem']\n PMMaxLength = main['ANSYS']['Mesh']['Magnets']['MaxLength']\n\n # Data for the Stator mesh\n StatorName = main['ANSYS']['Stator']['Name']\n StatorNormalDev = main['ANSYS']['Mesh']['Stator']['NormalDev']\n StatorAspectRatio = main['ANSYS']['Mesh']['Stator']['AspectRatio']\n\n # Data for the Stator mesh\n CoilNames = main['ANSYS']['Winding']['CoilNames']\n WindingNumMaxElem = main['ANSYS']['Mesh']['Winding']['NumMaxElem']\n WindingMaxLength = main['ANSYS']['Mesh']['Winding']['MaxLength']\n\n WindingName = []\n for phase in CoilNames:\n for direction in phase:\n WindingName += direction\n\n # Creating meshes\n oModule = oDesign.GetModule(\"MeshSetup\")\n\n # Rotor meshes\n oModule.AssignLengthOp(\n [\n \"NAME:Rotor\",\n \"RefineInside:=\", True,\n \"Enabled:=\", True,\n \"Objects:=\", [RotorName],\n \"RestrictElem:=\", False,\n \"NumMaxElem:=\", str(RotorNumMaxElem),\n \"RestrictLength:=\", True,\n \"MaxLength:=\", str(RotorMaxLength)+\"mm\"\n ]\n )\n # Magnet meshes\n oModule.AssignLengthOp(\n [\n \"NAME:Magnets\",\n \"RefineInside:=\", True,\n \"Enabled:=\", True,\n \"Objects:=\", PMNames,\n \"RestrictElem:=\", False,\n \"NumMaxElem:=\", str(PMNumMaxElem),\n \"RestrictLength:=\", True,\n \"MaxLength:=\", str(PMMaxLength)+\"mm\"\n ]\n )\n # Stator meshes\n oModule.AssignTrueSurfOp(\n [\n \"NAME:Stator\",\n \"Objects:=\", [StatorName],\n \"CurvedSurfaceApproxChoice:=\", \"ManualSettings\",\n \"SurfDevChoice:=\", 0,\n \"NormalDevChoice:=\", 2,\n \"NormalDev:=\", str(StatorNormalDev) + \"deg\",\n \"AspectRatioChoice:=\", 2,\n \"AspectRatio:=\", str(StatorAspectRatio)\n ]\n )\n\n # Coil meshes\n oModule.AssignLengthOp(\n [\n \"NAME:Coils\",\n \"RefineInside:=\"\t, True,\n \"Enabled:=\"\t\t, True,\n \"Objects:=\"\t\t, WindingName,\n \"RestrictElem:=\"\t, False,\n \"NumMaxElem:=\"\t\t, str(WindingNumMaxElem),\n \"RestrictLength:=\"\t, True,\n \"MaxLength:=\"\t\t, str(WindingMaxLength) +\"mm\"\n ]\n )\n\n return main",
"def lpt_prototype(mesh,\n nc=FLAGS.nc,\n bs=FLAGS.box_size,\n batch_size=FLAGS.batch_size,\n a0=FLAGS.a0,\n a=FLAGS.af,\n nsteps=FLAGS.nsteps):\n\n stages = np.linspace(a0, a, nsteps, endpoint=True)\n klin = np.loadtxt('../flowpm/data/Planck15_a1p00.txt').T[0]\n plin = np.loadtxt('../flowpm/data/Planck15_a1p00.txt').T[1]\n ipklin = iuspline(klin, plin)\n\n # Define the named dimensions\n # Parameters of the small scales decomposition\n n_block_x = FLAGS.nx\n n_block_y = FLAGS.ny\n n_block_z = 1\n halo_size = FLAGS.hsize\n\n if halo_size >= 0.5 * min(nc // n_block_x, nc // n_block_y, nc // n_block_z):\n new_size = int(0.5 *\n min(nc // n_block_x, nc // n_block_y, nc // n_block_z))\n print('WARNING: REDUCING HALO SIZE from %d to %d' % (halo_size, new_size))\n halo_size = new_size\n\n # Parameters of the large scales decomposition\n downsampling_factor = FLAGS.dsample\n lnc = nc // 2**downsampling_factor\n\n #\n\n fx_dim = mtf.Dimension(\"nx\", nc)\n fy_dim = mtf.Dimension(\"ny\", nc)\n fz_dim = mtf.Dimension(\"nz\", nc)\n\n tfx_dim = mtf.Dimension(\"tx\", nc)\n tfy_dim = mtf.Dimension(\"ty\", nc)\n tfz_dim = mtf.Dimension(\"tz\", nc)\n\n # Dimensions of the low resolution grid\n x_dim = mtf.Dimension(\"nx_lr\", lnc)\n y_dim = mtf.Dimension(\"ny_lr\", lnc)\n z_dim = mtf.Dimension(\"nz_lr\", lnc)\n\n tx_dim = mtf.Dimension(\"tx_lr\", lnc)\n ty_dim = mtf.Dimension(\"ty_lr\", lnc)\n tz_dim = mtf.Dimension(\"tz_lr\", lnc)\n\n nx_dim = mtf.Dimension('nx_block', n_block_x)\n ny_dim = mtf.Dimension('ny_block', n_block_y)\n nz_dim = mtf.Dimension('nz_block', n_block_z)\n\n sx_dim = mtf.Dimension('sx_block', nc // n_block_x)\n sy_dim = mtf.Dimension('sy_block', nc // n_block_y)\n sz_dim = mtf.Dimension('sz_block', nc // n_block_z)\n\n k_dims = [tx_dim, ty_dim, tz_dim]\n\n batch_dim = mtf.Dimension(\"batch\", batch_size)\n pk_dim = mtf.Dimension(\"npk\", len(plin))\n pk = mtf.import_tf_tensor(mesh, plin.astype('float32'), shape=[pk_dim])\n\n # Compute necessary Fourier kernels\n kvec = flowpm.kernels.fftk((nc, nc, nc), symmetric=False)\n kx = mtf.import_tf_tensor(mesh,\n kvec[0].squeeze().astype('float32'),\n shape=[tfx_dim])\n ky = mtf.import_tf_tensor(mesh,\n kvec[1].squeeze().astype('float32'),\n shape=[tfy_dim])\n kz = mtf.import_tf_tensor(mesh,\n kvec[2].squeeze().astype('float32'),\n shape=[tfz_dim])\n kv = [ky, kz, kx]\n\n # kvec for low resolution grid\n kvec_lr = flowpm.kernels.fftk([lnc, lnc, lnc], symmetric=False)\n\n kx_lr = mtf.import_tf_tensor(mesh,\n kvec_lr[0].squeeze().astype('float32') /\n 2**downsampling_factor,\n shape=[tx_dim])\n ky_lr = mtf.import_tf_tensor(mesh,\n kvec_lr[1].squeeze().astype('float32') /\n 2**downsampling_factor,\n shape=[ty_dim])\n kz_lr = mtf.import_tf_tensor(mesh,\n kvec_lr[2].squeeze().astype('float32') /\n 2**downsampling_factor,\n shape=[tz_dim])\n kv_lr = [ky_lr, kz_lr, kx_lr]\n\n # kvec for high resolution blocks\n padded_sx_dim = mtf.Dimension('padded_sx_block',\n nc // n_block_x + 2 * halo_size)\n padded_sy_dim = mtf.Dimension('padded_sy_block',\n nc // n_block_y + 2 * halo_size)\n padded_sz_dim = mtf.Dimension('padded_sz_block',\n nc // n_block_z + 2 * halo_size)\n kvec_hr = flowpm.kernels.fftk([\n nc // n_block_x + 2 * halo_size, nc // n_block_y + 2 * halo_size,\n nc // n_block_z + 2 * halo_size\n ],\n symmetric=False)\n\n kx_hr = mtf.import_tf_tensor(mesh,\n kvec_hr[0].squeeze().astype('float32'),\n shape=[padded_sx_dim])\n ky_hr = mtf.import_tf_tensor(mesh,\n kvec_hr[1].squeeze().astype('float32'),\n shape=[padded_sy_dim])\n kz_hr = mtf.import_tf_tensor(mesh,\n kvec_hr[2].squeeze().astype('float32'),\n shape=[padded_sz_dim])\n kv_hr = [ky_hr, kz_hr, kx_hr]\n\n shape = [batch_dim, fx_dim, fy_dim, fz_dim]\n lr_shape = [batch_dim, x_dim, y_dim, z_dim]\n hr_shape = [batch_dim, nx_dim, ny_dim, nz_dim, sx_dim, sy_dim, sz_dim]\n part_shape = [batch_dim, fx_dim, fy_dim, fz_dim]\n\n # Begin simulation\n\n initc = mtfpm.linear_field(mesh, shape, bs, nc, pk, kv)\n\n # Reshaping array into high resolution mesh\n field = mtf.slicewise(lambda x: tf.expand_dims(\n tf.expand_dims(tf.expand_dims(x, axis=1), axis=1), axis=1), [initc],\n output_dtype=tf.float32,\n output_shape=hr_shape,\n name='my_reshape',\n splittable_dims=lr_shape[:-1] + hr_shape[1:4] +\n part_shape[1:3])\n\n for block_size_dim in hr_shape[-3:]:\n field = mtf.pad(field, [halo_size, halo_size], block_size_dim.name)\n\n for blocks_dim, block_size_dim in zip(hr_shape[1:4], field.shape[-3:]):\n field = mpm.halo_reduce(field, blocks_dim, block_size_dim, halo_size)\n\n field = mtf.reshape(field, field.shape + [mtf.Dimension('h_dim', 1)])\n high = field\n low = mesh_utils.downsample(field, downsampling_factor, antialias=True)\n\n low = mtf.reshape(low, low.shape[:-1])\n high = mtf.reshape(high, high.shape[:-1])\n\n for block_size_dim in hr_shape[-3:]:\n low = mtf.slice(low, halo_size // 2**downsampling_factor,\n block_size_dim.size // 2**downsampling_factor,\n block_size_dim.name)\n # Hack usisng custom reshape because mesh is pretty dumb\n low = mtf.slicewise(lambda x: x[:, 0, 0, 0], [low],\n output_dtype=tf.float32,\n output_shape=lr_shape,\n name='my_dumb_reshape',\n splittable_dims=lr_shape[:-1] + hr_shape[:4])\n\n state = mtfpm.lpt_init(\n low,\n high,\n 0.1,\n kv_lr,\n kv_hr,\n halo_size,\n hr_shape,\n lr_shape,\n part_shape[1:],\n downsampling_factor=downsampling_factor,\n antialias=True,\n )\n\n # Here we can run our nbody\n final_state = state #mtfpm.nbody(state, stages, lr_shape, hr_shape, k_dims, kv_lr, kv_hr, halo_size, downsampling_factor=downsampling_factor)\n\n # paint the field\n final_field = mtf.zeros(mesh, shape=hr_shape)\n for block_size_dim in hr_shape[-3:]:\n final_field = mtf.pad(final_field, [halo_size, halo_size],\n block_size_dim.name)\n final_field = mesh_utils.cic_paint(final_field, final_state[0], halo_size)\n # Halo exchange\n for blocks_dim, block_size_dim in zip(hr_shape[1:4], final_field.shape[-3:]):\n final_field = mpm.halo_reduce(final_field, blocks_dim, block_size_dim,\n halo_size)\n # Remove borders\n for block_size_dim in hr_shape[-3:]:\n final_field = mtf.slice(final_field, halo_size, block_size_dim.size,\n block_size_dim.name)\n\n #final_field = mtf.reshape(final_field, [batch_dim, fx_dim, fy_dim, fz_dim])\n # Hack usisng custom reshape because mesh is pretty dumb\n final_field = mtf.slicewise(lambda x: x[:, 0, 0, 0], [final_field],\n output_dtype=tf.float32,\n output_shape=[batch_dim, fx_dim, fy_dim, fz_dim],\n name='my_dumb_reshape',\n splittable_dims=part_shape[:-1] + hr_shape[:4])\n\n return initc, final_field\n\n ##",
"def refugia_adj_5_simsplit(params, ns):\n #20 parameters \n nu1a, nu2a, nu3a, nu1b, nu2b, nu3b, m2_12, m2_13, m2_21, m2_23, m2_31, m2_32, m3_12, m3_13, m3_21, m3_23, m3_31, m3_32, T2, T3 = params\n sts = moments.LinearSystem_1D.steady_state_1D(ns[0] + ns[1] + ns[2])\n fs = moments.Spectrum(sts)\n fs = moments.Manips.split_1D_to_2D(fs, ns[0], ns[1] + ns[2]) \n fs = moments.Manips.split_2D_to_3D_2(fs, ns[1], ns[2])\n ## Population function and migration matrix for T2\n nu_T2 = [nu1a, nu2a, nu3a]\n mig2 = numpy.array([[0, m2_12, m2_13],[m2_21, 0, m2_23], [m2_31, m2_32, 0]]) \n fs.integrate(nu_T2, T2, m=mig2)\n ## Population function and migration matrix for T3\n nu_T3 = [nu1b, nu2b, nu3b]\n mig3 = numpy.array([[0, m3_12, m3_13],[m3_21, 0, m3_23], [m3_31, m3_32, 0]]) \n fs.integrate(nu_T3, T3, m=mig3) \n return fs",
"def _apply_array_spatial12_halffilling(self, h1e: 'Nparray',\n h2e: 'Nparray') -> 'Nparray':\n if fqe.settings.use_accelerated_code:\n return self._apply_array_spatial12_lm(h1e, h2e)\n else:\n h1e = copy.deepcopy(h1e)\n h2e = numpy.moveaxis(copy.deepcopy(h2e), 1, 2) * (-1.0)\n norb = self.norb()\n for k in range(norb):\n h1e[:, :] -= h2e[:, k, k, :]\n\n if numpy.iscomplex(h1e).any() or numpy.iscomplex(h2e).any():\n dvec = self.calculate_dvec_spatial()\n out = numpy.einsum(\"ij,ijkl->kl\", h1e, dvec)\n dvec = numpy.einsum(\"ijkl,klmn->ijmn\", h2e, dvec)\n out += self._calculate_coeff_spatial_with_dvec(dvec)\n else:\n nij = norb * (norb + 1) // 2\n h1ec = numpy.zeros((nij), dtype=self._dtype)\n h2ec = numpy.zeros((nij, nij), dtype=self._dtype)\n for i in range(norb):\n for j in range(i + 1):\n ijn = j + i * (i + 1) // 2\n h1ec[ijn] = h1e[i, j]\n for k in range(norb):\n for l in range(k + 1):\n kln = l + k * (k + 1) // 2\n h2ec[ijn, kln] = h2e[i, j, k, l]\n dvec = self._calculate_dvec_spatial_compressed()\n out = numpy.einsum(\"i,ikl->kl\", h1ec, dvec)\n dvec = numpy.einsum(\"ik,kmn->imn\", h2ec, dvec)\n for i in range(self.norb()):\n for j in range(self.norb()):\n ijn = min(i, j) + max(i, j) * (max(i, j) + 1) // 2\n work = self._core.alpha_map(j, i)\n for source, target, parity in work:\n out[source, :] += dvec[ijn, target, :] * parity\n work = self._core.beta_map(j, i)\n for source, target, parity in work:\n out[:, source] += dvec[ijn, :, target] * parity\n\n return out"
]
| [
"0.62784815",
"0.62468976",
"0.61536515",
"0.6124293",
"0.6061565",
"0.5950473",
"0.59444374",
"0.59142166",
"0.59101874",
"0.59000397",
"0.5889153",
"0.57894564",
"0.57214046",
"0.56897634",
"0.5670261",
"0.56692517",
"0.5658906",
"0.56425905",
"0.5625305",
"0.5608881",
"0.55810225",
"0.5578149",
"0.55724096",
"0.55723095",
"0.5565918",
"0.5522184",
"0.5478985",
"0.5474956",
"0.54734933",
"0.54666036"
]
| 0.6311414 | 0 |
Testing M6 remeshing formula in 3D, 2 kernel, simple precision, o2_FullHalf splitting. | def test_3D_m6_2k_sFH():
scal, velo = setup_3D()
advec = Advection(velo, scal, discretization=d3d,
method={TimeIntegrator: RK2,
Interpolation: Linear,
Remesh: L4_2,
Support: 'gpu_2k',
Splitting: 'o2_FullHalf'}
)
advec_py = Advection(velo, scal, discretization=d3d,
method={TimeIntegrator: RK2,
Interpolation: Linear,
Remesh: L4_2,
Support: '',
Splitting: 'o2_FullHalf'}
)
assertion_3D_withPython(scal, velo, advec, advec_py) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_2_2_3D_cube_splits(self):\n check = [(0, 0, 0), (1, 1, 1), (1, 0, 0), (1, 1, 0), (1, 0, 1),\n (0, 1, 0),\n (0, 1, 1), (0, 0, 1), (0.5, 0.5, 0.5), (0.0, 0.5, 0.5),\n (0.0, 0.0, 0.5), (0.0, 0.5, 0.0), (0.5, 0.0, 0.5),\n (0.5, 0.0, 0.0),\n (0.5, 0.5, 0.0), (0.25, 0.25, 0.25), (1.0, 0.5, 0.5),\n (1.0, 1.0, 0.5),\n (1.0, 0.5, 1.0), (0.5, 1.0, 0.5), (0.5, 1.0, 1.0),\n (0.5, 0.5, 1.0),\n (0.75, 0.75, 0.75), (1.0, 0.0, 0.5), (1.0, 0.5, 0.0),\n (0.75, 0.25, 0.25), (0.5, 1.0, 0.0), (0.75, 0.75, 0.25),\n (0.5, 0.0, 1.0), (0.75, 0.25, 0.75), (0.0, 1.0, 0.5),\n (0.25, 0.75, 0.25), (0.0, 0.5, 1.0), (0.25, 0.75, 0.75),\n (0.25, 0.25, 0.75), (0.5, 0.25, 0.25), (0.5, 0.5, 0.25),\n (0.5, 0.25, 0.5), (0.25, 0.5, 0.25), (0.25, 0.5, 0.5),\n (0.25, 0.25, 0.5), (0.375, 0.375, 0.375), (0.0, 0.25, 0.25),\n (0.0, 0.0, 0.25), (0.0, 0.25, 0.0), (0.25, 0.0, 0.25),\n (0.25, 0.0, 0.0), (0.25, 0.25, 0.0), (0.125, 0.125, 0.125),\n (0.0, 0.5, 0.25), (0.0, 0.25, 0.5), (0.125, 0.375, 0.375),\n (0.25, 0.0, 0.5), (0.125, 0.125, 0.375), (0.25, 0.5, 0.0),\n (0.125, 0.375, 0.125), (0.5, 0.0, 0.25), (0.375, 0.125, 0.375),\n (0.5, 0.25, 0.0), (0.375, 0.125, 0.125), (0.375, 0.375, 0.125),\n (0.5, 0.75, 0.75), (0.5, 0.5, 0.75), (0.5, 0.75, 0.5),\n (0.75, 0.5, 0.75), (0.75, 0.5, 0.5), (0.75, 0.75, 0.5),\n (0.625, 0.625, 0.625), (1.0, 0.75, 0.75), (1.0, 1.0, 0.75),\n (1.0, 0.75, 1.0), (0.75, 1.0, 0.75), (0.75, 1.0, 1.0),\n (0.75, 0.75, 1.0), (0.875, 0.875, 0.875), (1.0, 0.5, 0.75),\n (1.0, 0.75, 0.5), (0.875, 0.625, 0.625), (0.75, 1.0, 0.5),\n (0.875, 0.875, 0.625), (0.75, 0.5, 1.0), (0.875, 0.625, 0.875),\n (0.5, 1.0, 0.75), (0.625, 0.875, 0.625), (0.5, 0.75, 1.0),\n (0.625, 0.875, 0.875), (0.625, 0.625, 0.875),\n (0.75, 0.5, 0.25),\n (0.75, 0.25, 0.5), (0.625, 0.375, 0.375), (1.0, 0.25, 0.25),\n (1.0, 0.0, 0.25), (1.0, 0.25, 0.0), (0.75, 0.0, 0.25),\n (0.75, 0.0, 0.0), (0.75, 0.25, 0.0), (0.875, 0.125, 0.125),\n (1.0, 0.5, 0.25), (1.0, 0.25, 0.5), (0.875, 0.375, 0.375),\n (0.75, 0.0, 0.5), (0.875, 0.125, 0.375), (0.75, 0.5, 0.0),\n (0.875, 0.375, 0.125), (0.625, 0.125, 0.375),\n (0.625, 0.125, 0.125),\n (0.625, 0.375, 0.125), (0.5, 0.75, 0.25),\n (0.625, 0.625, 0.375),\n (1.0, 0.75, 0.25), (1.0, 1.0, 0.25), (1.0, 0.75, 0.0),\n (0.75, 1.0, 0.25), (0.75, 1.0, 0.0), (0.75, 0.75, 0.0),\n (0.875, 0.875, 0.125), (0.875, 0.625, 0.375),\n (0.875, 0.875, 0.375),\n (0.875, 0.625, 0.125), (0.5, 1.0, 0.25), (0.625, 0.875, 0.375),\n (0.5, 0.75, 0.0), (0.625, 0.875, 0.125), (0.625, 0.625, 0.125),\n (0.5, 0.25, 0.75), (0.625, 0.375, 0.625), (1.0, 0.25, 0.75),\n (1.0, 0.0, 0.75), (1.0, 0.25, 1.0), (0.75, 0.0, 0.75),\n (0.75, 0.0, 1.0), (0.75, 0.25, 1.0), (0.875, 0.125, 0.875),\n (0.875, 0.375, 0.625), (0.875, 0.125, 0.625),\n (0.875, 0.375, 0.875),\n (0.5, 0.0, 0.75), (0.625, 0.125, 0.625), (0.5, 0.25, 1.0),\n (0.625, 0.125, 0.875), (0.625, 0.375, 0.875),\n (0.25, 0.75, 0.5),\n (0.375, 0.625, 0.375), (0.0, 0.75, 0.25), (0.0, 1.0, 0.25),\n (0.0, 0.75, 0.0), (0.25, 1.0, 0.25), (0.25, 1.0, 0.0),\n (0.25, 0.75, 0.0), (0.125, 0.875, 0.125), (0.0, 0.75, 0.5),\n (0.125, 0.625, 0.375), (0.25, 1.0, 0.5), (0.125, 0.875, 0.375),\n (0.125, 0.625, 0.125), (0.375, 0.875, 0.375),\n (0.375, 0.875, 0.125),\n (0.375, 0.625, 0.125), (0.25, 0.5, 0.75),\n (0.375, 0.625, 0.625),\n (0.0, 0.75, 0.75), (0.0, 1.0, 0.75), (0.0, 0.75, 1.0),\n (0.25, 1.0, 0.75), (0.25, 1.0, 1.0), (0.25, 0.75, 1.0),\n (0.125, 0.875, 0.875), (0.0, 0.5, 0.75), (0.125, 0.625, 0.625),\n (0.125, 0.875, 0.625), (0.25, 0.5, 1.0), (0.125, 0.625, 0.875),\n (0.375, 0.875, 0.625), (0.375, 0.875, 0.875),\n (0.375, 0.625, 0.875),\n (0.375, 0.375, 0.625), (0.0, 0.25, 0.75), (0.0, 0.0, 0.75),\n (0.0, 0.25, 1.0), (0.25, 0.0, 0.75), (0.25, 0.0, 1.0),\n (0.25, 0.25, 1.0), (0.125, 0.125, 0.875),\n (0.125, 0.375, 0.625),\n (0.125, 0.125, 0.625), (0.125, 0.375, 0.875),\n (0.375, 0.125, 0.625),\n (0.375, 0.125, 0.875), (0.375, 0.375, 0.875)]\n\n nn_checks = {(0.5, 0.25, 0.25): [(0.375, 0.375, 0.125), (0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25),\n (0.625, 0.375, 0.375),\n (0.625, 0.125, 0.375),\n (0.625, 0.125, 0.125),\n (0.5, 0.5, 0.25), (0.25, 0.25, 0.25),\n (0.375, 0.375, 0.375),\n (0.5, 0.25, 0.5), (0.5, 0.5, 0.5),\n (0.5, 0.0, 0.25),\n (0.375, 0.125, 0.375), (0.5, 0.0, 0.5),\n (0.5, 0.25, 0.0),\n (0.375, 0.125, 0.125), (0.5, 0.0, 0.0),\n (0.625, 0.375, 0.125)],\n (0.625, 0.625, 0.875): [(0.75, 0.5, 1.0),\n (0.75, 0.75, 1.0),\n (0.5, 0.75, 1.0), (0.5, 0.5, 1.0),\n (0.5, 0.5, 0.75),\n (0.5, 0.75, 0.75),\n (0.75, 0.5, 0.75),\n (0.75, 0.75, 0.75)],\n (0, 0, 0): [(0.0, 0.25, 0.0), (0.125, 0.125, 0.125),\n (0.0, 0.0, 0.25), (0.25, 0.0, 0.0),\n (0.0, 0.25, 0.25), (0.25, 0.25, 0.0),\n (0.25, 0.0, 0.25)]}\n\n init_triangulation(3, 2, check, nn_checks)",
"def test_3D_m6_1k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_1k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def test_multiple_case(self):\r\n\r\n shp = (3, 3)\r\n fx, fy, fz, fw = fmatrices('xyzw')\r\n dx, dy, dz, dw = dmatrices('xyzw')\r\n fv = fvector('r').dimshuffle('x', 0)\r\n dv = dvector('s').dimshuffle('x', 0)\r\n fxv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fyv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fzv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fwv = theano._asarray(numpy.random.rand(*shp), dtype='float32')\r\n fvv = theano._asarray(numpy.random.rand(shp[0]), dtype='float32').reshape(1, shp[0])\r\n dxv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dyv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dzv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dwv = theano._asarray(numpy.random.rand(*shp), dtype='float64')\r\n dvv = theano._asarray(numpy.random.rand(shp[0]), dtype='float64').reshape(1, shp[0])\r\n\r\n #We must be sure that the Canonizer is working, but that we don't have other\r\n # optimisation that could hide bug in the Canonizer as local_elemwise_fusion\r\n mode = compile.mode.get_default_mode()\r\n old_optimizer = mode._optimizer\r\n try:\r\n mode._optimizer = gof.Query([\"canonicalize\"])\r\n mode._optimizer = mode._optimizer.including('ShapeOpt')\r\n mode._optimizer = mode._optimizer.excluding(\r\n 'local_elemwise_fusion')\r\n\r\n #test x / x -> 1\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([(fx/fx,[fx],[fxv],'float32'),\r\n (dx/dx,[dx],[dxv],'float64'),\r\n (fv/fv,[fv],[fvv],'float32'),\r\n (dv/dv,[dv],[dvv],'float64'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert (out == numpy.ones(shp, dtype=out_dtype)).all()\r\n topo = f.maker.fgraph.toposort()\r\n if sym_inputs[0].broadcastable[0]:\r\n assert len(topo) == 2\r\n assert isinstance(topo[0].op, Shape_i)\r\n assert isinstance(topo[1].op, tensor.Alloc)\r\n else:\r\n assert len(topo) == 3\r\n assert isinstance(topo[0].op, Shape_i)\r\n assert isinstance(topo[1].op, Shape_i)\r\n assert isinstance(topo[2].op, tensor.Alloc)\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (x * y) / x -> y\r\n for id,(g, sym_inputs, val_inputs, nb_elemwise, out_dtype) in enumerate([\r\n ((dx*dy)/dx,[dx,dy],[dxv,dyv],0,'float64'),\r\n ((fx*fy)/fx,[fx,fy],[fxv,fyv],0,'float32'),\r\n ((dv*dy)/dv,[dv,dy],[dvv,dyv],0,'float64'),\r\n ((fv*fy)/fv,[fv,fy],[fvv,fyv],0,'float32'),\r\n #must broadcast as their is a dimshuffle in the computation\r\n ((dx*dv)/dx,[dx,dv],[dxv,dvv],1,'float64'),\r\n #topo: [Elemwise{second,no_inplace}(x, <TensorType(float64, row)>)]\r\n ((fx*fv)/fx,[fx,fv],[fxv,fvv],1,'float32')\r\n #topo: [Elemwise{second,no_inplace}(x, <TensorType(float32, row)>)]\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert(out_dtype == out.dtype)\r\n assert numpy.allclose(out, val_inputs[1])\r\n topo = f.maker.fgraph.toposort()\r\n print \"ID TOPO\", id, topo, sym_inputs\r\n for r, t in f.maker.fgraph.shape_feature.shape_of.items():\r\n print ' ', r, t\r\n if topo and not(len(topo)==1 and topo[0].op==deep_copy_op):\r\n for node in topo[:-1]:\r\n assert isinstance(node.op, Shape_i)\r\n assert isinstance(topo[-1].op, tensor.Alloc)\r\n\r\n #test x / y / x -> 1 / y\r\n for id,(g, sym_inputs, val_inputs, nb_elemwise, out_dtype) in enumerate([\r\n ((dx/dy)/dx,[dx,dy],[dxv,dyv],1,'float64'),\r\n ((fx/fy)/fx,[fx,fy],[fxv,fyv],1,'float32'),\r\n ((dv/dy)/dv,[dv,dy],[dvv,dyv],1,'float64'),\r\n ((fv/fy)/fv,[fv,fy],[fvv,fyv],1,'float32'),\r\n #must broadcast as their is a dimshuffle in the computation\r\n\r\n ((dx/dv)/dx,[dx,dv],[dxv,dvv],1,'float64'),\r\n #topo: [Shape_i, Shape_i, Elemwise{inv,no_inplace}(<TensorType(float64, row)>), Alloc]\r\n ((fx/fv)/fx,[fx,fv],[fxv,fvv],1,'float32'),\r\n #topo:[Shape_i, Shape_i, Elemwise{inv,no_inplace}(<TensorType(float32, row)>), Alloc]\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (1 / val_inputs[1]))\r\n topo = f.maker.fgraph.toposort()\r\n print topo\r\n elem = [t for t in topo if isinstance(t.op, T.Elemwise)]\r\n assert len(elem) == nb_elemwise\r\n assert isinstance(elem[0].op, (T.Elemwise, ))\r\n assert isinstance(elem[0].op.scalar_op, (\r\n theano.scalar.basic.Inv, theano.scalar.basic.TrueDiv))\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (a / b) * (b / c) * (c / d) -> a / d\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((dx / dy) * (dy / dz) * (dz / dw),[dx,dy,dz,dw],[dxv,dyv,dzv,dwv],'float64'),\r\n ((fx / fy) * (fy / fz) * (fz / fw),[fx,fy,fz,fw],[fxv,fyv,fzv,fwv],'float32'),\r\n ((dv / dy) * (dy / dz) * (dz / dw),[dv,dy,dz,dw],[dvv,dyv,dzv,dwv],'float64'),\r\n ((fv / fy) * (fy / fz) * (fz / fw),[fv,fy,fz,fw],[fvv,fyv,fzv,fwv],'float32'),\r\n ((dx / dv) * (dv / dz) * (dz / dw),[dx,dv,dz,dw],[dxv,dvv,dzv,dwv],'float64'),\r\n ((fx / fv) * (fv / fz) * (fz / fw),[fx,fv,fz,fw],[fxv,fvv,fzv,fwv],'float32'),\r\n ((dx / dy) * (dy / dv) * (dv / dw),[dx,dy,dv,dw],[dxv,dyv,dvv,dwv],'float64'),\r\n ((fx / fy) * (fy / fv) * (fv / fw),[fx,fy,fv,fw],[fxv,fyv,fvv,fwv],'float32'),\r\n ((dx / dy) * (dy / dz) * (dz / dv),[dx,dy,dz,dv],[dxv,dyv,dzv,dvv],'float64'),\r\n ((fx / fy) * (fy / fz) * (fz / fv),[fx,fy,fz,fv],[fxv,fyv,fzv,fvv],'float32'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (val_inputs[0] / val_inputs[3]))\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 1\r\n assert isinstance(topo[0].op, (T.Elemwise, ))\r\n assert isinstance(topo[0].op.scalar_op,\r\n theano.scalar.basic.TrueDiv)\r\n assert len(topo[0].inputs) == 2\r\n assert(out_dtype == out.dtype)\r\n\r\n #test (2.0 * x) / (4.0 * y) -> (0.5 * x) / y\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n (((2.0*dx)/(4.0*dy)),[dx,dy],[dxv,dyv],'float64'),\r\n (((2.0*fx)/(4.0*fy)),[fx,fy],[fxv,fyv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n (((2.0*dv)/(4.0*dy)),[dv,dy],[dvv,dyv],'float64'),\r\n (((2.0*fv)/(4.0*fy)),[fv,fy],[fvv,fyv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n (((2.0*dx)/(4.0*dv)),[dx,dv],[dxv,dvv],'float64'),\r\n (((2.0*fx)/(4.0*fv)),[fx,fv],[fxv,fvv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, (0.5 *\r\n val_inputs[0] / val_inputs[1]))\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 2\r\n assert isinstance(topo[0].op, (T.Elemwise, ))\r\n assert isinstance(topo[0].op.scalar_op,\r\n theano.scalar.basic.Mul)\r\n assert len(topo[0].inputs) == 2\r\n assert isinstance(topo[1].op, (T.Elemwise, ))\r\n assert isinstance(topo[1].op.scalar_op,\r\n theano.scalar.basic.TrueDiv)\r\n assert len(topo[1].inputs) == 2\r\n assert(out_dtype == out.dtype)\r\n\r\n #test 2 * x / 2 -> x\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((2*dx)/2,[dx],[dxv],'float64'),\r\n ((2*fx)/2,[fx],[fxv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2*dv)/2,[dv],[dvv],'float64'),\r\n ((2*fv)/2,[fv],[fvv], {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.allclose(out, val_inputs[0])\r\n topo = f.maker.fgraph.toposort()\r\n assert len(topo) == 1\r\n topo[0].op == deep_copy_op\r\n assert(out_dtype == out.dtype)\r\n\r\n #test x / abs(x) -> sign(x)\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n (dx/abs(dx),[dx],[0.5-dxv],'float64'),\r\n (fx/abs(fx),[fx],[0.5-fxv], 'float32'),\r\n (dx/abs(dx),[dx],[0.1*dxv],'float64'),\r\n (fx/abs(fx),[fx],[0.1*fxv], 'float32'),\r\n (dv/abs(dv),[dv],[0.5-dvv],'float64'),\r\n (fv/abs(fv),[fv],[0.5-fvv], 'float32'),\r\n ]):\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n out = f(*val_inputs)\r\n assert numpy.all(numpy.isfinite(out))\r\n assert numpy.allclose(out, numpy.sign(val_inputs[0]))\r\n assert(out_dtype == out.dtype)\r\n assert len(f.maker.fgraph.toposort()) == 1\r\n\r\n #test (2*x) / (3*abs(x)) -> sign(x)\r\n for id, (g, sym_inputs, val_inputs, out_dtype) in enumerate([\r\n ((2 * dx) / (3 * abs(dx)), [dx], [0.5 - dxv], 'float64'),\r\n ((2 * fx) / (3 * abs(fx)), [fx], [0.5 - fxv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2 * dx) / (3 * abs(dx)), [dx], [0.1 * dxv], 'float64'),\r\n ((2 * fx) / (3 * abs(fx)), [fx], [0.1 * fxv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ((2 * dv) / (3 * abs(dv)), [dv], [0.5 - dvv], 'float64'),\r\n ((2 * fv) / (3 * abs(fv)), [fv], [0.5 - fvv],\r\n {'custom': 'float32', 'numpy+floatX': config.floatX, 'numpy': 'float64'}),\r\n ]):\r\n\r\n if isinstance(out_dtype, dict):\r\n out_dtype = out_dtype[config.cast_policy]\r\n f = compile.function(list(sym_inputs), g,\r\n mode=mode)\r\n topo = f.maker.fgraph.toposort()\r\n out = f(*val_inputs)\r\n assert numpy.all(numpy.isfinite(out))\r\n assert numpy.allclose(out, numpy.sign(val_inputs[0]) * 2 / 3)\r\n assert(out_dtype == out.dtype)\r\n finally:\r\n mode._optimizer = old_optimizer",
"def test_1_2_2D_cube_splits(self):\n check = [(0, 0), (1, 1), (1, 0), (0, 1), (0.5, 0.5), (0.0, 0.5),\n (0.5, 0.0),\n (0.25, 0.25), (1.0, 0.5), (0.5, 1.0), (0.75, 0.75),\n (0.75, 0.25),\n (0.25, 0.75), (0.5, 0.25), (0.25, 0.5), (0.375, 0.375),\n (0.0, 0.25),\n (0.25, 0.0), (0.125, 0.125), (0.125, 0.375), (0.375, 0.125),\n (0.5, 0.75), (0.75, 0.5), (0.625, 0.625), (1.0, 0.75),\n (0.75, 1.0),\n (0.875, 0.875), (0.875, 0.625), (0.625, 0.875), (0.625, 0.375),\n (1.0, 0.25), (0.75, 0.0), (0.875, 0.125), (0.875, 0.375),\n (0.625, 0.125), (0.375, 0.625), (0.0, 0.75), (0.25, 1.0),\n (0.125, 0.875), (0.125, 0.625), (0.375, 0.875)]\n\n nn_checks = {(0, 0): [(0.25, 0.0), (0.0, 0.25), (0.125, 0.125)],\n (0.625, 0.375): [(0.5, 0.5), (0.75, 0.25), (0.75, 0.5),\n (0.5, 0.25)],\n (0, 1): [(0.25, 1.0), (0.125, 0.875),(0.0, 0.75)],\n (0.625, 0.125): [(0.5, 0.0), (0.75, 0.25), (0.75, 0.0),\n (0.5, 0.25)]}\n\n\n init_triangulation(2, 2, check, nn_checks)",
"def test_3_2_4D_cube_splits(self):\n check = [(0, 0, 0, 0), (1, 1, 1, 1), (1, 0, 0, 0), (1, 1, 0, 0),\n (1, 1, 1, 0),\n (1, 1, 0, 1), (1, 0, 1, 0), (1, 0, 1, 1), (1, 0, 0, 1),\n (0, 1, 0, 0),\n (0, 1, 1, 0), (0, 1, 1, 1), (0, 1, 0, 1), (0, 0, 1, 0),\n (0, 0, 1, 1),\n (0, 0, 0, 1), (0.5, 0.5, 0.5, 0.5), (0.0, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.5, 0.5), (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.5), (0.0, 0.5, 0.0, 0.0),\n (0.0, 0.5, 0.5, 0.0),\n (0.5, 0.0, 0.5, 0.5), (0.5, 0.0, 0.0, 0.5),\n (0.5, 0.0, 0.0, 0.0),\n (0.5, 0.0, 0.5, 0.0), (0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.0, 0.0),\n (0.5, 0.5, 0.5, 0.0), (0.25, 0.25, 0.25, 0.25),\n (1.0, 0.5, 0.5, 0.5),\n (1.0, 1.0, 0.5, 0.5), (1.0, 1.0, 1.0, 0.5),\n (1.0, 1.0, 0.5, 1.0),\n (1.0, 0.5, 1.0, 0.5), (1.0, 0.5, 1.0, 1.0),\n (1.0, 0.5, 0.5, 1.0),\n (0.5, 1.0, 0.5, 0.5), (0.5, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 1.0),\n (0.5, 1.0, 0.5, 1.0), (0.5, 0.5, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0),\n (0.5, 0.5, 0.5, 1.0), (0.75, 0.75, 0.75, 0.75),\n (1.0, 0.0, 0.5, 0.5),\n (1.0, 0.0, 0.0, 0.5), (1.0, 0.0, 0.5, 0.0),\n (1.0, 0.5, 0.0, 0.5),\n (1.0, 0.5, 0.0, 0.0), (1.0, 0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25, 0.25),\n (1.0, 1.0, 0.0, 0.5), (1.0, 1.0, 0.5, 0.0),\n (0.5, 1.0, 0.0, 0.5),\n (0.5, 1.0, 0.0, 0.0), (0.5, 1.0, 0.5, 0.0),\n (0.75, 0.75, 0.25, 0.25),\n (1.0, 0.5, 1.0, 0.0), (0.5, 1.0, 1.0, 0.0),\n (0.5, 0.5, 1.0, 0.0),\n (0.75, 0.75, 0.75, 0.25), (1.0, 0.5, 0.0, 1.0),\n (0.5, 1.0, 0.0, 1.0),\n (0.5, 0.5, 0.0, 1.0), (0.75, 0.75, 0.25, 0.75),\n (1.0, 0.0, 1.0, 0.5),\n (0.5, 0.0, 1.0, 0.5), (0.5, 0.0, 1.0, 0.0),\n (0.75, 0.25, 0.75, 0.25),\n (1.0, 0.0, 0.5, 1.0), (0.5, 0.0, 1.0, 1.0),\n (0.5, 0.0, 0.5, 1.0),\n (0.75, 0.25, 0.75, 0.75), (0.5, 0.0, 0.0, 1.0),\n (0.75, 0.25, 0.25, 0.75), (0.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 0.0, 0.5),\n (0.0, 1.0, 0.5, 0.0), (0.25, 0.75, 0.25, 0.25),\n (0.0, 1.0, 1.0, 0.5),\n (0.0, 0.5, 1.0, 0.5), (0.0, 0.5, 1.0, 0.0),\n (0.25, 0.75, 0.75, 0.25),\n (0.0, 1.0, 0.5, 1.0), (0.0, 0.5, 1.0, 1.0),\n (0.0, 0.5, 0.5, 1.0),\n (0.25, 0.75, 0.75, 0.75), (0.0, 0.5, 0.0, 1.0),\n (0.25, 0.75, 0.25, 0.75), (0.0, 0.0, 1.0, 0.5),\n (0.25, 0.25, 0.75, 0.25), (0.0, 0.0, 0.5, 1.0),\n (0.25, 0.25, 0.75, 0.75), (0.25, 0.25, 0.25, 0.75)]\n\n nn_checks = {(0, 0, 0, 0): [(0.0, 0.5, 0.5, 0.0), (0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.5),\n (0.25, 0.25, 0.25, 0.25),\n (0.5, 0.0, 0.0, 0.5), (0.5, 0.0, 0.5, 0.0),\n (0.5, 0.5, 0.0, 0.0), (0.0, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.0), (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.5, 0.5, 0.5), (0.0, 0.0, 0.5, 0.5),\n (0.0, 0.5, 0.0, 0.5), (0.5, 0.0, 0.0, 0.0),\n (0.5, 0.5, 0.5, 0.0)],\n (1.0, 1.0, 0.5, 0.5): [(1.0, 1.0, 0.5, 1.0), (1, 1, 0, 1),\n (1.0, 1.0, 1.0, 0.5),\n (1.0, 0.5, 0.5, 0.5), (1, 1, 1, 0),\n (1.0, 1.0, 0.5, 0.0),\n (1.0, 1.0, 0.0, 0.5), (1, 1, 0, 0),\n (1, 1, 1, 1), (0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5),\n (0.75, 0.75, 0.75, 0.75),\n (0.75, 0.75, 0.25, 0.25),\n (0.75, 0.75, 0.75, 0.25),\n (0.75, 0.75, 0.25, 0.75)],\n (0.25, 0.25, 0.25, 0.75): [(0.0, 0.0, 0.5, 1.0),\n (0.0, 0.5, 0.0, 1.0),\n (0.5, 0.0, 0.0, 1.0),\n (0.5, 0.5, 0.5, 1.0),\n (0, 0, 0, 1),\n (0.0, 0.5, 0.5, 1.0),\n (0.5, 0.0, 0.5, 1.0),\n (0.5, 0.5, 0.0, 1.0),\n (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.0, 0.5),\n (0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.5, 0.5),\n (0.5, 0.0, 0.0, 0.5)]}\n\n init_triangulation(4, 1, check, nn_checks)",
"def test_2D_m6_1k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_2_2_3D_rec_splits(self):\n check = [(-3.0, -2.0, 0.0), (4.0, 10.0, 1.0), (4.0, -2.0, 0.0),\n (4.0, 10.0, 0.0), (4.0, -2.0, 1.0), (-3.0, 10.0, 0.0),\n (-3.0, 10.0, 1.0), (-3.0, -2.0, 1.0), (0.5, 4.0, 0.5),\n (-3.0, 4.0, 0.5), (-3.0, -2.0, 0.5), (-3.0, 4.0, 0.0),\n (0.5, -2.0, 0.5), (0.5, -2.0, 0.0), (0.5, 4.0, 0.0),\n (-1.25, 1.0, 0.25), (4.0, 4.0, 0.5), (4.0, 10.0, 0.5),\n (4.0, 4.0, 1.0), (0.5, 10.0, 0.5), (0.5, 10.0, 1.0),\n (0.5, 4.0, 1.0), (2.25, 7.0, 0.75), (4.0, -2.0, 0.5),\n (4.0, 4.0, 0.0), (2.25, 1.0, 0.25), (0.5, 10.0, 0.0),\n (2.25, 7.0, 0.25), (0.5, -2.0, 1.0), (2.25, 1.0, 0.75),\n (-3.0, 10.0, 0.5), (-1.25, 7.0, 0.25), (-3.0, 4.0, 1.0),\n (-1.25, 7.0, 0.75), (-1.25, 1.0, 0.75), (0.5, 1.0, 0.25),\n (0.5, 4.0, 0.25), (0.5, 1.0, 0.5), (-1.25, 4.0, 0.25),\n (-1.25, 4.0, 0.5), (-1.25, 1.0, 0.5), (-0.375, 2.5, 0.375),\n (-3.0, 1.0, 0.25), (-3.0, -2.0, 0.25), (-3.0, 1.0, 0.0),\n (-1.25, -2.0, 0.25), (-1.25, -2.0, 0.0), (-1.25, 1.0, 0.0),\n (-2.125, -0.5, 0.125), (-3.0, 4.0, 0.25), (-3.0, 1.0, 0.5),\n (-2.125, 2.5, 0.375), (-1.25, -2.0, 0.5),\n (-2.125, -0.5, 0.375), (-1.25, 4.0, 0.0), (-2.125, 2.5, 0.125),\n (0.5, -2.0, 0.25), (-0.375, -0.5, 0.375), (0.5, 1.0, 0.0),\n (-0.375, -0.5, 0.125), (-0.375, 2.5, 0.125), (0.5, 7.0, 0.75),\n (0.5, 4.0, 0.75), (0.5, 7.0, 0.5), (2.25, 4.0, 0.75),\n (2.25, 4.0, 0.5), (2.25, 7.0, 0.5), (1.375, 5.5, 0.625),\n (4.0, 7.0, 0.75), (4.0, 10.0, 0.75), (4.0, 7.0, 1.0),\n (2.25, 10.0, 0.75), (2.25, 10.0, 1.0), (2.25, 7.0, 1.0),\n (3.125, 8.5, 0.875), (4.0, 4.0, 0.75), (4.0, 7.0, 0.5),\n (3.125, 5.5, 0.625), (2.25, 10.0, 0.5), (3.125, 8.5, 0.625),\n (2.25, 4.0, 1.0), (3.125, 5.5, 0.875), (0.5, 10.0, 0.75),\n (1.375, 8.5, 0.625), (0.5, 7.0, 1.0), (1.375, 8.5, 0.875),\n (1.375, 5.5, 0.875), (2.25, 4.0, 0.25), (2.25, 1.0, 0.5),\n (1.375, 2.5, 0.375), (4.0, 1.0, 0.25), (4.0, -2.0, 0.25),\n (4.0, 1.0, 0.0), (2.25, -2.0, 0.25), (2.25, -2.0, 0.0),\n (2.25, 1.0, 0.0), (3.125, -0.5, 0.125), (4.0, 4.0, 0.25),\n (4.0, 1.0, 0.5), (3.125, 2.5, 0.375), (2.25, -2.0, 0.5),\n (3.125, -0.5, 0.375), (2.25, 4.0, 0.0), (3.125, 2.5, 0.125),\n (1.375, -0.5, 0.375), (1.375, -0.5, 0.125),\n (1.375, 2.5, 0.125), (0.5, 7.0, 0.25), (1.375, 5.5, 0.375),\n (4.0, 7.0, 0.25), (4.0, 10.0, 0.25), (4.0, 7.0, 0.0),\n (2.25, 10.0, 0.25), (2.25, 10.0, 0.0), (2.25, 7.0, 0.0),\n (3.125, 8.5, 0.125), (3.125, 5.5, 0.375), (3.125, 8.5, 0.375),\n (3.125, 5.5, 0.125), (0.5, 10.0, 0.25), (1.375, 8.5, 0.375),\n (0.5, 7.0, 0.0), (1.375, 8.5, 0.125), (1.375, 5.5, 0.125),\n (0.5, 1.0, 0.75), (1.375, 2.5, 0.625), (4.0, 1.0, 0.75),\n (4.0, -2.0, 0.75), (4.0, 1.0, 1.0), (2.25, -2.0, 0.75),\n (2.25, -2.0, 1.0), (2.25, 1.0, 1.0), (3.125, -0.5, 0.875),\n (3.125, 2.5, 0.625), (3.125, -0.5, 0.625), (3.125, 2.5, 0.875),\n (0.5, -2.0, 0.75), (1.375, -0.5, 0.625), (0.5, 1.0, 1.0),\n (1.375, -0.5, 0.875), (1.375, 2.5, 0.875), (-1.25, 7.0, 0.5),\n (-0.375, 5.5, 0.375), (-3.0, 7.0, 0.25), (-3.0, 10.0, 0.25),\n (-3.0, 7.0, 0.0), (-1.25, 10.0, 0.25), (-1.25, 10.0, 0.0),\n (-1.25, 7.0, 0.0), (-2.125, 8.5, 0.125), (-3.0, 7.0, 0.5),\n (-2.125, 5.5, 0.375), (-1.25, 10.0, 0.5), (-2.125, 8.5, 0.375),\n (-2.125, 5.5, 0.125), (-0.375, 8.5, 0.375),\n (-0.375, 8.5, 0.125), (-0.375, 5.5, 0.125), (-1.25, 4.0, 0.75),\n (-0.375, 5.5, 0.625), (-3.0, 7.0, 0.75), (-3.0, 10.0, 0.75),\n (-3.0, 7.0, 1.0), (-1.25, 10.0, 0.75), (-1.25, 10.0, 1.0),\n (-1.25, 7.0, 1.0), (-2.125, 8.5, 0.875), (-3.0, 4.0, 0.75),\n (-2.125, 5.5, 0.625), (-2.125, 8.5, 0.625), (-1.25, 4.0, 1.0),\n (-2.125, 5.5, 0.875), (-0.375, 8.5, 0.625),\n (-0.375, 8.5, 0.875), (-0.375, 5.5, 0.875),\n (-0.375, 2.5, 0.625), (-3.0, 1.0, 0.75), (-3.0, -2.0, 0.75),\n (-3.0, 1.0, 1.0), (-1.25, -2.0, 0.75), (-1.25, -2.0, 1.0),\n (-1.25, 1.0, 1.0), (-2.125, -0.5, 0.875), (-2.125, 2.5, 0.625),\n (-2.125, -0.5, 0.625), (-2.125, 2.5, 0.875),\n (-0.375, -0.5, 0.625), (-0.375, -0.5, 0.875),\n (-0.375, 2.5, 0.875)]\n nn_checks = {(2.25, 7.0, 0.75): [(4.0, 7.0, 0.75), (2.25, 7.0, 1.0),\n (4.0, 7.0, 0.5), (4.0, 7.0, 1.0),\n (4.0, 4.0, 0.75), (1.375, 5.5, 0.875),\n (2.25, 4.0, 1.0), (2.25, 4.0, 0.5),\n (2.25, 4.0, 0.75), (3.125, 8.5, 0.875),\n (3.125, 8.5, 0.625), (4.0, 10.0, 0.75),\n (2.25, 10.0, 1.0), (2.25, 10.0, 0.75),\n (2.25, 10.0, 0.5), (1.375, 8.5, 0.625),\n (1.375, 8.5, 0.875), (0.5, 7.0, 0.75),\n (0.5, 7.0, 0.5), (3.125, 5.5, 0.625),\n (3.125, 5.5, 0.875), (0.5, 10.0, 0.75),\n (0.5, 7.0, 1.0), (0.5, 4.0, 0.75),\n (2.25, 7.0, 0.5), (1.375, 5.5, 0.625)],\n (4.0, -2.0, 0.5): [(4.0, -2.0, 0.75), (4.0, -2.0, 0.25),\n (2.25, 1.0, 0.5), (2.25, -2.0, 0.75),\n (2.25, -2.0, 0.5), (2.25, -2.0, 0.25),\n (4.0, 1.0, 0.25), (4.0, 1.0, 0.75),\n (4.0, 1.0, 0.5), (3.125, -0.5, 0.375),\n (3.125, -0.5, 0.625)],\n (-2.125, -0.5, 0.875): [(-1.25, 1.0, 1.0),\n (-1.25, 1.0, 0.75),\n (-1.25, -2.0, 0.75),\n (-1.25, -2.0, 1.0),\n (-3.0, -2.0, 0.75),\n (-3.0, 1.0, 1.0), (-3, -2, 1),\n (-3.0, 1.0, 0.75)]}\n\n init_triangulation(3, 2, check, nn_checks, bounds=[(-3, 4), (-2, 10), (0, 1)])",
"def test_3D_m6_2k():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def test_2D_m6_2k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_4_2_5D_cube_splits(self):\n check = [(0, 0, 0, 0, 0), (1, 1, 1, 1, 1), (1, 0, 0, 0, 0),\n (1, 1, 0, 0, 0), (1, 1, 1, 0, 0), (1, 1, 1, 1, 0),\n (1, 1, 1, 0, 1), (1, 1, 0, 1, 0), (1, 1, 0, 1, 1),\n (1, 1, 0, 0, 1), (1, 0, 1, 0, 0), (1, 0, 1, 1, 0),\n (1, 0, 1, 1, 1), (1, 0, 1, 0, 1), (1, 0, 0, 1, 0),\n (1, 0, 0, 1, 1), (1, 0, 0, 0, 1), (0, 1, 0, 0, 0),\n (0, 1, 1, 0, 0), (0, 1, 1, 1, 0), (0, 1, 1, 1, 1),\n (0, 1, 1, 0, 1), (0, 1, 0, 1, 0), (0, 1, 0, 1, 1),\n (0, 1, 0, 0, 1), (0, 0, 1, 0, 0), (0, 0, 1, 1, 0),\n (0, 0, 1, 1, 1), (0, 0, 1, 0, 1), (0, 0, 0, 1, 0),\n (0, 0, 0, 1, 1), (0, 0, 0, 0, 1), (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5, 0.5), (0.0, 0.0, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.0, 0.5, 0.5), (0.0, 0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.0, 0.5, 0.0), (0.0, 0.0, 0.5, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.0, 0.0), (0.0, 0.0, 0.5, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.5, 0.5), (0.0, 0.5, 0.0, 0.0, 0.5),\n (0.0, 0.5, 0.0, 0.0, 0.0), (0.0, 0.5, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.5, 0.0, 0.5), (0.0, 0.5, 0.5, 0.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.0), (0.5, 0.0, 0.5, 0.5, 0.5),\n (0.5, 0.0, 0.0, 0.5, 0.5), (0.5, 0.0, 0.0, 0.0, 0.5),\n (0.5, 0.0, 0.0, 0.0, 0.0), (0.5, 0.0, 0.0, 0.5, 0.0),\n (0.5, 0.0, 0.5, 0.0, 0.5), (0.5, 0.0, 0.5, 0.0, 0.0),\n (0.5, 0.0, 0.5, 0.5, 0.0), (0.5, 0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.0, 0.5), (0.5, 0.5, 0.0, 0.0, 0.0),\n (0.5, 0.5, 0.0, 0.5, 0.0), (0.5, 0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.5, 0.0, 0.0), (0.5, 0.5, 0.5, 0.5, 0.0),\n (0.25, 0.25, 0.25, 0.25, 0.25), (1.0, 0.5, 0.5, 0.5, 0.5),\n (1.0, 1.0, 0.5, 0.5, 0.5), (1.0, 1.0, 1.0, 0.5, 0.5),\n (1.0, 1.0, 1.0, 1.0, 0.5), (1.0, 1.0, 1.0, 0.5, 1.0),\n (1.0, 1.0, 0.5, 1.0, 0.5), (1.0, 1.0, 0.5, 1.0, 1.0),\n (1.0, 1.0, 0.5, 0.5, 1.0), (1.0, 0.5, 1.0, 0.5, 0.5),\n (1.0, 0.5, 1.0, 1.0, 0.5), (1.0, 0.5, 1.0, 1.0, 1.0),\n (1.0, 0.5, 1.0, 0.5, 1.0), (1.0, 0.5, 0.5, 1.0, 0.5),\n (1.0, 0.5, 0.5, 1.0, 1.0), (1.0, 0.5, 0.5, 0.5, 1.0),\n (0.5, 1.0, 0.5, 0.5, 0.5), (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5), (0.5, 1.0, 1.0, 1.0, 1.0),\n (0.5, 1.0, 1.0, 0.5, 1.0), (0.5, 1.0, 0.5, 1.0, 0.5),\n (0.5, 1.0, 0.5, 1.0, 1.0), (0.5, 1.0, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 0.5), (0.5, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 1.0), (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.5, 0.5, 1.0, 0.5), (0.5, 0.5, 0.5, 1.0, 1.0),\n (0.5, 0.5, 0.5, 0.5, 1.0), (0.75, 0.75, 0.75, 0.75, 0.75),\n (1.0, 0.0, 0.5, 0.5, 0.5), (1.0, 0.0, 0.0, 0.5, 0.5),\n (1.0, 0.0, 0.0, 0.0, 0.5), (1.0, 0.0, 0.0, 0.5, 0.0),\n (1.0, 0.0, 0.5, 0.0, 0.5), (1.0, 0.0, 0.5, 0.0, 0.0),\n (1.0, 0.0, 0.5, 0.5, 0.0), (1.0, 0.5, 0.0, 0.5, 0.5),\n (1.0, 0.5, 0.0, 0.0, 0.5), (1.0, 0.5, 0.0, 0.0, 0.0),\n (1.0, 0.5, 0.0, 0.5, 0.0), (1.0, 0.5, 0.5, 0.0, 0.5),\n (1.0, 0.5, 0.5, 0.0, 0.0), (1.0, 0.5, 0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25, 0.25, 0.25), (1.0, 1.0, 0.0, 0.5, 0.5),\n (1.0, 1.0, 0.0, 0.0, 0.5), (1.0, 1.0, 0.0, 0.5, 0.0),\n (1.0, 1.0, 0.5, 0.0, 0.5), (1.0, 1.0, 0.5, 0.0, 0.0),\n (1.0, 1.0, 0.5, 0.5, 0.0), (0.5, 1.0, 0.0, 0.5, 0.5),\n (0.5, 1.0, 0.0, 0.0, 0.5), (0.5, 1.0, 0.0, 0.0, 0.0),\n (0.5, 1.0, 0.0, 0.5, 0.0), (0.5, 1.0, 0.5, 0.0, 0.5),\n (0.5, 1.0, 0.5, 0.0, 0.0), (0.5, 1.0, 0.5, 0.5, 0.0),\n (0.75, 0.75, 0.25, 0.25, 0.25), (1.0, 1.0, 1.0, 0.0, 0.5),\n (1.0, 1.0, 1.0, 0.5, 0.0), (1.0, 0.5, 1.0, 0.0, 0.5),\n (1.0, 0.5, 1.0, 0.0, 0.0), (1.0, 0.5, 1.0, 0.5, 0.0),\n (0.5, 1.0, 1.0, 0.0, 0.5), (0.5, 1.0, 1.0, 0.0, 0.0),\n (0.5, 1.0, 1.0, 0.5, 0.0), (0.5, 0.5, 1.0, 0.0, 0.5),\n (0.5, 0.5, 1.0, 0.0, 0.0), (0.5, 0.5, 1.0, 0.5, 0.0),\n (0.75, 0.75, 0.75, 0.25, 0.25), (1.0, 1.0, 0.5, 1.0, 0.0),\n (1.0, 0.5, 1.0, 1.0, 0.0), (1.0, 0.5, 0.5, 1.0, 0.0),\n (0.5, 1.0, 1.0, 1.0, 0.0), (0.5, 1.0, 0.5, 1.0, 0.0),\n (0.5, 0.5, 1.0, 1.0, 0.0), (0.5, 0.5, 0.5, 1.0, 0.0),\n (0.75, 0.75, 0.75, 0.75, 0.25), (1.0, 1.0, 0.5, 0.0, 1.0),\n (1.0, 0.5, 1.0, 0.0, 1.0), (1.0, 0.5, 0.5, 0.0, 1.0),\n (0.5, 1.0, 1.0, 0.0, 1.0), (0.5, 1.0, 0.5, 0.0, 1.0),\n (0.5, 0.5, 1.0, 0.0, 1.0), (0.5, 0.5, 0.5, 0.0, 1.0),\n (0.75, 0.75, 0.75, 0.25, 0.75), (1.0, 1.0, 0.0, 1.0, 0.5),\n (1.0, 0.5, 0.0, 1.0, 0.5), (1.0, 0.5, 0.0, 1.0, 0.0),\n (0.5, 1.0, 0.0, 1.0, 0.5), (0.5, 1.0, 0.0, 1.0, 0.0),\n (0.5, 0.5, 0.0, 1.0, 0.5), (0.5, 0.5, 0.0, 1.0, 0.0),\n (0.75, 0.75, 0.25, 0.75, 0.25), (1.0, 1.0, 0.0, 0.5, 1.0),\n (1.0, 0.5, 0.0, 1.0, 1.0), (1.0, 0.5, 0.0, 0.5, 1.0),\n (0.5, 1.0, 0.0, 1.0, 1.0), (0.5, 1.0, 0.0, 0.5, 1.0),\n (0.5, 0.5, 0.0, 1.0, 1.0), (0.5, 0.5, 0.0, 0.5, 1.0),\n (0.75, 0.75, 0.25, 0.75, 0.75), (1.0, 0.5, 0.0, 0.0, 1.0),\n (0.5, 1.0, 0.0, 0.0, 1.0), (0.5, 0.5, 0.0, 0.0, 1.0),\n (0.75, 0.75, 0.25, 0.25, 0.75), (1.0, 0.0, 1.0, 0.5, 0.5),\n (1.0, 0.0, 1.0, 0.0, 0.5), (1.0, 0.0, 1.0, 0.5, 0.0),\n (0.5, 0.0, 1.0, 0.5, 0.5), (0.5, 0.0, 1.0, 0.0, 0.5),\n (0.5, 0.0, 1.0, 0.0, 0.0), (0.5, 0.0, 1.0, 0.5, 0.0),\n (0.75, 0.25, 0.75, 0.25, 0.25), (1.0, 0.0, 1.0, 1.0, 0.5),\n (1.0, 0.0, 0.5, 1.0, 0.5), (1.0, 0.0, 0.5, 1.0, 0.0),\n (0.5, 0.0, 1.0, 1.0, 0.5), (0.5, 0.0, 1.0, 1.0, 0.0),\n (0.5, 0.0, 0.5, 1.0, 0.5), (0.5, 0.0, 0.5, 1.0, 0.0),\n (0.75, 0.25, 0.75, 0.75, 0.25), (1.0, 0.0, 1.0, 0.5, 1.0),\n (1.0, 0.0, 0.5, 1.0, 1.0), (1.0, 0.0, 0.5, 0.5, 1.0),\n (0.5, 0.0, 1.0, 1.0, 1.0), (0.5, 0.0, 1.0, 0.5, 1.0),\n (0.5, 0.0, 0.5, 1.0, 1.0), (0.5, 0.0, 0.5, 0.5, 1.0),\n (0.75, 0.25, 0.75, 0.75, 0.75), (1.0, 0.0, 0.5, 0.0, 1.0),\n (0.5, 0.0, 1.0, 0.0, 1.0), (0.5, 0.0, 0.5, 0.0, 1.0),\n (0.75, 0.25, 0.75, 0.25, 0.75), (1.0, 0.0, 0.0, 1.0, 0.5),\n (0.5, 0.0, 0.0, 1.0, 0.5), (0.5, 0.0, 0.0, 1.0, 0.0),\n (0.75, 0.25, 0.25, 0.75, 0.25), (1.0, 0.0, 0.0, 0.5, 1.0),\n (0.5, 0.0, 0.0, 1.0, 1.0), (0.5, 0.0, 0.0, 0.5, 1.0),\n (0.75, 0.25, 0.25, 0.75, 0.75), (0.5, 0.0, 0.0, 0.0, 1.0),\n (0.75, 0.25, 0.25, 0.25, 0.75), (0.0, 1.0, 0.5, 0.5, 0.5),\n (0.0, 1.0, 0.0, 0.5, 0.5), (0.0, 1.0, 0.0, 0.0, 0.5),\n (0.0, 1.0, 0.0, 0.5, 0.0), (0.0, 1.0, 0.5, 0.0, 0.5),\n (0.0, 1.0, 0.5, 0.0, 0.0), (0.0, 1.0, 0.5, 0.5, 0.0),\n (0.25, 0.75, 0.25, 0.25, 0.25), (0.0, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.0, 0.5), (0.0, 1.0, 1.0, 0.5, 0.0),\n (0.0, 0.5, 1.0, 0.5, 0.5), (0.0, 0.5, 1.0, 0.0, 0.5),\n (0.0, 0.5, 1.0, 0.0, 0.0), (0.0, 0.5, 1.0, 0.5, 0.0),\n (0.25, 0.75, 0.75, 0.25, 0.25), (0.0, 1.0, 1.0, 1.0, 0.5),\n (0.0, 1.0, 0.5, 1.0, 0.5), (0.0, 1.0, 0.5, 1.0, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.5), (0.0, 0.5, 1.0, 1.0, 0.0),\n (0.0, 0.5, 0.5, 1.0, 0.5), (0.0, 0.5, 0.5, 1.0, 0.0),\n (0.25, 0.75, 0.75, 0.75, 0.25), (0.0, 1.0, 1.0, 0.5, 1.0),\n (0.0, 1.0, 0.5, 1.0, 1.0), (0.0, 1.0, 0.5, 0.5, 1.0),\n (0.0, 0.5, 1.0, 1.0, 1.0), (0.0, 0.5, 1.0, 0.5, 1.0),\n (0.0, 0.5, 0.5, 1.0, 1.0), (0.0, 0.5, 0.5, 0.5, 1.0),\n (0.25, 0.75, 0.75, 0.75, 0.75), (0.0, 1.0, 0.5, 0.0, 1.0),\n (0.0, 0.5, 1.0, 0.0, 1.0), (0.0, 0.5, 0.5, 0.0, 1.0),\n (0.25, 0.75, 0.75, 0.25, 0.75), (0.0, 1.0, 0.0, 1.0, 0.5),\n (0.0, 0.5, 0.0, 1.0, 0.5), (0.0, 0.5, 0.0, 1.0, 0.0),\n (0.25, 0.75, 0.25, 0.75, 0.25), (0.0, 1.0, 0.0, 0.5, 1.0),\n (0.0, 0.5, 0.0, 1.0, 1.0), (0.0, 0.5, 0.0, 0.5, 1.0),\n (0.25, 0.75, 0.25, 0.75, 0.75), (0.0, 0.5, 0.0, 0.0, 1.0),\n (0.25, 0.75, 0.25, 0.25, 0.75), (0.0, 0.0, 1.0, 0.5, 0.5),\n (0.0, 0.0, 1.0, 0.0, 0.5), (0.0, 0.0, 1.0, 0.5, 0.0),\n (0.25, 0.25, 0.75, 0.25, 0.25), (0.0, 0.0, 1.0, 1.0, 0.5),\n (0.0, 0.0, 0.5, 1.0, 0.5), (0.0, 0.0, 0.5, 1.0, 0.0),\n (0.25, 0.25, 0.75, 0.75, 0.25), (0.0, 0.0, 1.0, 0.5, 1.0),\n (0.0, 0.0, 0.5, 1.0, 1.0), (0.0, 0.0, 0.5, 0.5, 1.0),\n (0.25, 0.25, 0.75, 0.75, 0.75), (0.0, 0.0, 0.5, 0.0, 1.0),\n (0.25, 0.25, 0.75, 0.25, 0.75), (0.0, 0.0, 0.0, 1.0, 0.5),\n (0.25, 0.25, 0.25, 0.75, 0.25), (0.0, 0.0, 0.0, 0.5, 1.0),\n (0.25, 0.25, 0.25, 0.75, 0.75), (0.25, 0.25, 0.25, 0.25, 0.75)]\n\n nn_checks = {(1, 1, 1, 1, 1): [(1.0, 1.0, 1.0, 0.5, 1.0),\n (1.0, 1.0, 0.5, 1.0, 1.0),\n (1.0, 0.5, 0.5, 0.5, 0.5),\n (1.0, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 1.0),\n (1.0, 0.5, 0.5, 1.0, 0.5),\n (1.0, 0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.5, 0.5, 1.0, 1.0),\n (0.5, 1.0, 0.5, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.5, 1.0, 0.5, 1.0, 0.5),\n (1.0, 1.0, 1.0, 0.5, 0.5),\n (1.0, 1.0, 0.5, 1.0, 0.5),\n (1.0, 1.0, 0.5, 0.5, 0.5),\n (1.0, 1.0, 1.0, 1.0, 0.5),\n (1.0, 1.0, 0.5, 0.5, 1.0),\n (1.0, 0.5, 1.0, 0.5, 1.0),\n (1.0, 0.5, 0.5, 1.0, 1.0),\n (0.5, 1.0, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 0.5),\n (1.0, 0.5, 0.5, 0.5, 1.0),\n (1.0, 0.5, 1.0, 1.0, 1.0),\n (0.5, 0.5, 0.5, 1.0, 0.5),\n (0.75, 0.75, 0.75, 0.75, 0.75),\n (0.5, 0.5, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 1.0, 1.0),\n (0.5, 1.0, 1.0, 0.5, 1.0),\n (0.5, 1.0, 0.5, 1.0, 1.0)],\n (0.25, 0.75, 0.75, 0.75, 0.25): [(0.5, 1.0, 1.0, 1.0, 0.0),\n (0.5, 0.5, 0.5, 1.0, 0.5),\n (0, 1, 1, 1, 0),\n (0.5, 1.0, 0.5, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5),\n (0.0, 1.0, 0.5, 0.5, 0.5),\n (0.0, 1.0, 1.0, 1.0, 0.5),\n (0.5, 1.0, 0.5, 1.0, 0.5),\n (0.0, 1.0, 0.5, 1.0, 0.5),\n (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.5, 0.0),\n (0.0, 1.0, 0.5, 1.0, 0.0),\n (0.5, 1.0, 1.0, 0.5, 0.0),\n (0.5, 1.0, 0.5, 1.0, 0.0),\n (0.5, 1.0, 0.5, 0.5, 0.0),\n (0.0, 1.0, 0.5, 0.5, 0.0),\n (0.5, 0.5, 1.0, 0.5, 0.0),\n (0.5, 0.5, 0.5, 1.0, 0.0),\n (0.0, 0.5, 1.0, 0.5, 0.5),\n (0.0, 0.5, 0.5, 1.0, 0.5),\n (0.0, 0.5, 1.0, 0.5, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 0.0),\n (0.0, 0.5, 0.5, 1.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.0),\n (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.5, 0.5, 1.0, 1.0, 0.5),\n (\n 0.5, 0.5, 1.0, 0.5, 0.5)],\n (0.0, 0.0, 1.0, 0.5, 1.0): [(0.5, 0.0, 0.5, 0.5, 1.0),\n (0.0, 0.5, 0.5, 0.5, 1.0),\n (0.5, 0.5, 0.5, 0.5, 1.0),\n (0.0, 0.0, 0.5, 0.5, 1.0),\n (0, 0, 1, 1, 1),\n (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.0, 1.0, 0.5, 1.0),\n (0.0, 0.5, 1.0, 0.5, 1.0),\n (0, 0, 1, 0, 1),\n (0.5, 0.0, 1.0, 0.5, 0.5),\n (0.0, 0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 1.0, 0.5, 0.5),\n (0.0, 0.0, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.5, 0.5, 0.5),\n (0.25, 0.25, 0.75, 0.75, 0.75),\n (0.5, 0.0, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5, 0.5), (\n 0.25, 0.25, 0.75, 0.25, 0.75)]}\n\n init_triangulation(5, 1, check, nn_checks)",
"def test_MeshMat_1group(self):\n\n MS_grp = self.meshsol.get_group(\"stator\")\n cells_grp, nb_cell, indices = MS_grp.get_mesh().get_cell()\n solution = np.array([[0, 1, 2], [1, 2, 3]])\n result_tgl = cells_grp[\"triangle\"]\n testA = np.sum(abs(solution - result_tgl))\n msg = (\n \"Wrong output: returned \" + str(result_tgl) + \", expected: \" + str(solution)\n )\n self.assertAlmostEqual(testA, 0, msg=msg, delta=self.DELTA)\n\n MS_grp = self.meshsol.get_group(\"rotor\")\n cells_grp, nb_cell, indices = MS_grp.get_mesh().get_cell()\n solution = np.array([[3, 3], [1, 2], [2, 3]])\n results = cells_grp[\"triangle\"] # The point indices have changed !\n points = MS_grp.get_mesh().get_point(results)\n testA = np.sum(abs(solution - points))\n msg = \"Wrong output: returned \" + str(results) + \", expected: \" + str(solution)\n self.assertAlmostEqual(testA, 0, msg=msg, delta=self.DELTA)",
"def test_3D_m6_1k():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_1k',\n Splitting: 'o2'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2'},\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def init_stitch(N):\n\tif N is None:\n\t\tN = np.int32(HII_DIM) #prepare for stitching\n\tMETA_GRID_SIZE = DIM/N\n\tM = np.int32(HII_DIM/META_GRID_SIZE)\n\t#HII_DIM = np.int32(HII_DIM)\n\tf_pixel_factor = DIM/HII_DIM;\n\tscale = np.float32(BOX_LEN/DIM)\n\tprint 'scale', scale\n\tHII_scale = np.float32(BOX_LEN/HII_DIM)\n\tshape = (DIM,DIM,N)\n\tstitch_grid_size = (DIM/(block_size[0]),\n\t\t\t\t\t\tDIM/(block_size[0]),\n\t\t\t\t\t\tN/(block_size[0]))\n\tHII_stitch_grid_size = (HII_DIM/(block_size[0]),\n\t\t\t\t\t\tHII_DIM/(block_size[0]),\n\t\t\t\t\t\tM/(block_size[0]))\n\t#ratio of large box to small size\n\tkernel_source = open(cmd_folder+\"/initialize_stitch.cu\").read()\n\tkernel_code = kernel_source % {\n\n\t\t'DELTAK': DELTA_K,\n\t\t'DIM': DIM, \n\t\t'VOLUME': VOLUME,\n\t\t'META_BLOCKDIM': N\n\t}\n\tmain_module = nvcc.SourceModule(kernel_code)\n\tinit_stitch = main_module.get_function(\"init_kernel\")\n\tHII_filter = main_module.get_function(\"HII_filter\")\n\tsubsample_kernel = main_module.get_function(\"subsample\")\n\tvelocity_kernel = main_module.get_function(\"set_velocity\")\n\tpspec_texture = main_module.get_texref(\"pspec\")\n\tMRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=0)\n\tplan2d = Plan((np.int64(DIM), np.int64(DIM)), dtype=np.complex64)\n\tplan1d = Plan((np.int64(DIM)), dtype=np.complex64)\n\tprint \"init pspec\"\n\tinterpPspec, interpSize = init_pspec() #interpPspec contains both k array and P array\n\tinterp_cu = cuda.matrix_to_array(interpPspec, order='F')\n\tcuda.bind_array_to_texref(interp_cu, pspec_texture)\n\t#hbox_large = pyfftw.empty_aligned((DIM, DIM, DIM), dtype='complex64')\n\thbox_large = np.zeros((DIM, DIM, DIM), dtype=np.complex64)\n\t#hbox_small = np.zeros(HII_shape, dtype=np.float32)\n\t#hbox_large = n\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\n\t# Set up pinned memory for transfer\n\t#largebox_hs = cuda.aligned_empty(shape=shape, dtype=np.float32, alignment=resource.getpagesize())\n\tlargebox_pin = cuda.pagelocked_empty(shape=shape, dtype=np.float32)\n\tlargecbox_pin = cuda.pagelocked_empty(shape=shape, dtype=np.complex64)\n\n\tlargebox_d = gpuarray.zeros(shape, dtype=np.float32)\n\tlargebox_d_imag = gpuarray.zeros(shape, dtype=np.float32)\n\tprint \"init boxes\"\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t# MRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=meta_x*N**3)\n\t\tinit_stitch(largebox_d, DIM, np.int32(meta_z),block=block_size, grid=stitch_grid_size)\n\t\tinit_stitch(largebox_d_imag, DIM, np.int32(meta_z),block=block_size, grid=stitch_grid_size)\n\t\tlargebox_d *= MRGgen.gen_normal(shape, dtype=np.float32)\n\t\tlargebox_d_imag *= MRGgen.gen_normal(shape, dtype=np.float32)\n\t\tlargebox_d = largebox_d + np.complex64(1.j) * largebox_d_imag\n\t\tcuda.memcpy_dtoh_async(largecbox_pin, largebox_d)\n\t\thbox_large[:, :, meta_z*N:(meta_z+1)*N] = largecbox_pin.copy()\n\t#if want to get velocity need to use this\n\tif True:\n\t\tprint \"saving kbox\"\n\t\tnp.save(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN), hbox_large)\n\n\tprint \"Executing FFT on device\"\n\t#hbox_large = pyfftw.interfaces.numpy_fft.ifftn(hbox_large).real\n\thbox_large = fft_stitch(N, plan2d, plan1d, hbox_large, largebox_d).real\n\tprint hbox_large.dtype\n\tprint \"Finished FFT on device\"\n\tnp.save(parent_folder+\"/Boxes/deltax_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN), hbox_large)\n\t\n\tif True:\n\t\tprint \"loading kbox\"\n\t\thbox_large = np.load(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN))\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\tlargebox_pin = hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy()\n\t\t#cuda.memcpy_htod_async(largebox_d, largebox_pin)\n\t\tlargebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\tHII_filter(largebox_d, DIM, np.int32(meta_z), ZERO, smoothR, block=block_size, grid=stitch_grid_size);\n\t\thbox_large[:, :, meta_z*N:(meta_z+1)*N] = largebox_d.get_async()\n\t#import IPython; IPython.embed()\n\tprint \"Executing FFT on host\"\n\t#hbox_large = hifft(hbox_large).astype(np.complex64).real\n\t#hbox_large = pyfftw.interfaces.numpy_fft.ifftn(hbox_large).real\n\thbox_large = fft_stitch(N, plan2d, plan1d, hbox_large, largebox_d).real\n\tprint \"Finished FFT on host\"\n\t#import IPython; IPython.embed()\n\n\t# for meta_x in xrange(META_GRID_SIZE):\n\t# \tfor meta_y in xrange(META_GRID_SIZE):\n\t# \t\tfor meta_z in xrange(META_GRID_SIZE):\n\t# \t\t\tlargebox_d = gpuarray.to_gpu(hbox_large[meta_x*N:(meta_x+1)*N, meta_y*N:(meta_y+1)*N, meta_z*N:(meta_z+1)*N])\n\t# \t\t\tHII_filter(largebox_d, N, np.int32(meta_x), np.int32(meta_y), np.int32(meta_z), ZERO, smoothR, block=block_size, grid=grid_size);\n\t# \t\t\thbox_large[meta_x*N:(meta_x+1)*N, meta_y*N:(meta_y+1)*N, meta_z*N:(meta_z+1)*N] = largebox_d.get()\n\t#plan = Plan(shape, dtype=np.complex64)\n\t#plan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\n\n\t# This saves a large resolution deltax\n\n\t\n\tprint \"downsampling\"\n\tsmallbox_d = gpuarray.zeros((HII_DIM,HII_DIM,M), dtype=np.float32)\n\tfor meta_z in xrange(META_GRID_SIZE):\n\t\tlargebox_pin = hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy()\n\t\tcuda.memcpy_dtoh_async(largecbox_pin, largebox_d)\n\t\t#largebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\tlargebox_d /= scale**3 #\n\t\tsubsample_kernel(largebox_d, smallbox_d, DIM, HII_DIM, PIXEL_FACTOR, block=block_size, grid=HII_stitch_grid_size) #subsample in real space\n\t\thbox_small[:, :, meta_z*M:(meta_z+1)*M] = smallbox_d.get_async()\n\tnp.save(parent_folder+\"/Boxes/smoothed_deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(HII_DIM, BOX_LEN), hbox_small)\n\t#import IPython; IPython.embed()\n\n\n\t# To get velocities: reload the k-space box\n\thbox_large = np.load(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc.npy\".format(DIM, BOX_LEN))\n\thvbox_large = np.zeros((DIM, DIM, DIM), dtype=np.float32)\n\thvbox_small = np.zeros(HII_shape, dtype=np.float32)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tlargevbox_d = gpuarray.zeros((DIM,DIM,N), dtype=np.complex64)\n\tsmallvbox_d = gpuarray.zeros((HII_DIM, HII_DIM, M), dtype=np.float32)\n\tfor num, mode in enumerate(['x', 'y', 'z']):\n\t\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t\tlargebox_d = gpuarray.to_gpu_async(hbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\t\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\t\t\tvelocity_kernel(largebox_d, largevbox_d, DIM, np.int32(meta_z), np.int32(num), block=block_size, grid=stitch_grid_size)\n\t\t\tHII_filter(largevbox_d, DIM, ZERO, smoothR, block=block_size, grid=stitch_grid_size)\n\t\t\tprint hvbox_large.shape, largevbox_d.shape\n\t\t\thvbox_large[:, :, meta_z*N:(meta_z+1)*N] = largevbox_d.get_async()\n\t\thvbox_large = fft_stitch(N, plan2d, plan1d, hvbox_large, largevbox_d).real\n\t\tfor meta_z in xrange(META_GRID_SIZE):\n\t\t\tlargevbox_d = gpuarray.to_gpu_async(hvbox_large[:, :, meta_z*N:(meta_z+1)*N].copy())\n\t\t\tsubsample_kernel(largevbox_d.real, smallvbox_d, DIM, HII_DIM,PIXEL_FACTOR, block=block_size, grid=HII_stitch_grid_size)\n\t\t\thvbox_small[:, :, meta_z*M:(meta_z+1)*M] = smallvbox_d.get_async()\n\t\tnp.save(parent_folder+\"/Boxes/v{0}overddot_{1:d}_{2:.0f}Mpc\".format(mode, HII_DIM, BOX_LEN), smallvbox_d.get())\n\n\treturn",
"def init():\n\tN = np.int32(DIM) #prepare for stitching\n\t#HII_DIM = np.int32(HII_DIM)\n\tf_pixel_factor = DIM/HII_DIM;\n\tscale = np.float32(BOX_LEN)/DIM\n\tHII_scale = np.float32(BOX_LEN)/HII_DIM\n\tshape = (N,N,N)\n\t\n\tMRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=0)\n\n\tkernel_source = open(cmd_folder+\"/initialize.cu\").read()\n\tkernel_code = kernel_source % {\n\n\t\t'DELTAK': DELTA_K,\n\t\t'VOLUME': VOLUME,\n\t\t'DIM': DIM\n\t}\n\tmain_module = nvcc.SourceModule(kernel_code)\n\tinit_kernel = main_module.get_function(\"init_kernel\")\n\tHII_filter = main_module.get_function(\"HII_filter\")\n\tadj_complex_conj = main_module.get_function(\"adj_complex_conj\")\n\tsubsample_kernel = main_module.get_function(\"subsample\")\n\tvelocity_kernel = main_module.get_function(\"set_velocity\")\n\tpspec_texture = main_module.get_texref(\"pspec\")\n\n\tinterpPspec, interpSize = init_pspec() #interpPspec contains both k array and P array\n\tinterp_cu = cuda.matrix_to_array(interpPspec, order='F')\n\tcuda.bind_array_to_texref(interp_cu, pspec_texture)\n\n\tlargebox_d = gpuarray.zeros(shape, dtype=np.float32)\n\tinit_kernel(largebox_d, np.int32(DIM), block=block_size, grid=grid_size)\n\n\t#import IPython; IPython.embed()\n\tlargebox_d_imag = gpuarray.zeros(shape, dtype=np.float32)\n\tinit_kernel(largebox_d_imag, np.int32(DIM), block=block_size, grid=grid_size)\n\n\tlargebox_d *= MRGgen.gen_normal(shape, dtype=np.float32)\n\tlargebox_d_imag *= MRGgen.gen_normal(shape, dtype=np.float32)\n\tlargebox_d = largebox_d + np.complex64(1.j) * largebox_d_imag\n\n\t#adj_complex_conj(largebox_d, DIM, block=block_size, grid=grid_size)\n\tlargebox = largebox_d.get()\n\t#np.save(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc\".format(DIM, BOX_LEN), largebox)\n\n\t#save real space box before smoothing\n\tplan = Plan(shape, dtype=np.complex64)\n\tplan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\tlargebox_d /= scale**3\n\tnp.save(parent_folder+\"/Boxes/deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(DIM, BOX_LEN), largebox_d.real.get_async())\n\n\t#save real space box after smoothing and subsampling\n\t# host largebox is still in k space, no need to reload from disk\n\tlargebox_d = gpuarray.to_gpu(largebox)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tHII_filter(largebox_d, N, ZERO, smoothR, block=block_size, grid=grid_size);\n\tplan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\tlargebox_d /= scale**3\n\tsmallbox_d = gpuarray.zeros(HII_shape, dtype=np.float32)\n\tsubsample_kernel(largebox_d.real, smallbox_d, N, HII_DIM, PIXEL_FACTOR, block=block_size, grid=HII_grid_size) #subsample in real space\n\tnp.save(parent_folder+\"/Boxes/smoothed_deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(HII_DIM, BOX_LEN), smallbox_d.get_async())\n\n\t# reload the k-space box for velocity boxes\n\tlargebox_d = gpuarray.to_gpu(largebox)\n\t\n\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tlargevbox_d = gpuarray.zeros((DIM,DIM,DIM), dtype=np.complex64)\n\tsmallbox_d = gpuarray.zeros(HII_shape, dtype=np.float32)\n\tfor num, mode in enumerate(['x', 'y', 'z']):\n\t\tvelocity_kernel(largebox_d, largevbox_d, DIM, np.int32(num), block=block_size, grid=grid_size)\n\t\tHII_filter(largevbox_d, DIM, ZERO, smoothR, block=block_size, grid=grid_size)\n\t\tplan.execute(largevbox_d, inverse=True)\n\t\tlargevbox_d /= scale**3\n\t\t#import IPython; IPython.embed()\n\t\tsubsample_kernel(largevbox_d.real, smallbox_d, DIM, HII_DIM,PIXEL_FACTOR, block=block_size, grid=HII_grid_size)\n\t\tnp.save(parent_folder+\"/Boxes/v{0}overddot_{1:d}_{2:.0f}Mpc\".format(mode, HII_DIM, BOX_LEN), smallbox_d.get())\n\n\treturn",
"def test_3D_m4_2k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def magnetic_reynolds(uu, param, grid, aa=list(), bb=list(), jj=list(),\n nghost=3, lmix=True):\n if len(bb) ==0 and len(aa) ==0 and len(jj) ==0:\n print('magnetic_reynolds WARNING: no aa, bb nor jj provided\\n'+\n 'aa or bb must be provided or aa for only hyper resistivity') \n #resistive force\n lres, lhyper3 = False, False\n for iresi in param.iresistivity:\n iresi = str.strip(iresi,'\\n')\n if 'hyper' not in iresi and len(iresi) > 0:\n lres = True\n if 'hyper3' in iresi:\n lhyper3 = True\n fresi = np.zeros_like(uu)\n if lres:\n if lhyper3:\n lhyper3 = lhyper3==lmix\n if len(jj) == 0:\n if len(aa) == 0:\n print('magnetic_reynolds WARNING: calculating jj without aa\\n',\n 'provide aa or jj directly for accurate boundary values')\n jj = curl(bb,grid.dx,grid.dy,grid.dz,x=grid.x,y=grid.y, \n coordinate_system=param.coord_system)\n else:\n jj = curl2(aa,grid.dx,grid.dy,grid.dz,x=grid.x,y=grid.y, \n coordinate_system=param.coord_system)\n for j in range(0,3):\n jj[j, :nghost,:,:] = jj[j,-2*nghost:-nghost,:,:]\n jj[j,-nghost:,:,:] = jj[j, nghost: 2*nghost,:,:]\n jj[j,:, :nghost,:] = jj[j,:,-2*nghost:-nghost,:]\n jj[j,:,-nghost:,:] = jj[j,:, nghost: 2*nghost,:]\n jj[j,:,:, :nghost] = jj[j,:,:,-2*nghost:-nghost]\n jj[j,:,:,-nghost:] = jj[j,:,:, nghost: 2*nghost]\n fresi = fresi + param.eta*param.mu0*jj\n for iresi in param.iresistivity:\n iresi = str.strip(iresi,'\\n')\n if 'eta-const' not in iresi and 'hyper' not in iresi\\\n and len(iresi) > 0:\n print('magnetic_reynolds WARNING: '+iresi+' not implemented\\n'+\n 'terms may be missing from the standard resistive forces')\n if lhyper3:\n if len(aa) == 0:\n print('magnetic_reynolds WARNING: no aa provided\\n'+\n 'aa must be provided for hyper resistivity')\n return 1\n else:\n del6a = np.zeros_like(aa)\n for j in range(0,3):\n del6a[j] = del6(aa[j],grid.dx,grid.dy,grid.dz)\n del6a[j, :nghost,:,:] = del6a[j,-2*nghost:-nghost,:,:]\n del6a[j,-nghost:,:,:] = del6a[j, nghost: 2*nghost,:,:]\n del6a[j,:, :nghost,:] = del6a[j,:,-2*nghost:-nghost,:]\n del6a[j,:,-nghost:,:] = del6a[j,:, nghost: 2*nghost,:]\n del6a[j,:,:, :nghost] = del6a[j,:,:,-2*nghost:-nghost]\n del6a[j,:,:,-nghost:] = del6a[j,:,:, nghost: 2*nghost]\n #del6 for non-cartesian tba\n #del6a[j] = del6(aa[j],grid.dx,grid.dy,grid.dz,x=grid.x,y=grid.y,\n # coordinate_system=param.coord_system)\n #effective at l > 5 grid.dx? \n fresi = fresi + param.eta_hyper3*del6a\n del(del6a)\n fresi2 = np.sqrt(dot2(fresi))\n del(fresi)\n #advective force\n if len(bb) == 0:\n if len(aa) == 0:\n print('magnetic_reynolds WARNING: calculating uu x bb without bb\\n',\n 'provide aa or bb directly to proceed')\n return 1\n else:\n bb = curl(aa,grid.dx,grid.dy,grid.dz,x=grid.x,y=grid.y, \n coordinate_system=param.coord_system)\n for j in range(0,3):\n bb[j, :nghost,:,:] = bb[j,-2*nghost:-nghost,:,:]\n bb[j,-nghost:,:,:] = bb[j, nghost: 2*nghost,:,:]\n bb[j,:, :nghost,:] = bb[j,:,-2*nghost:-nghost,:]\n bb[j,:,-nghost:,:] = bb[j,:, nghost: 2*nghost,:]\n bb[j,:,:, :nghost] = bb[j,:,:,-2*nghost:-nghost]\n bb[j,:,:,-nghost:] = bb[j,:,:, nghost: 2*nghost]\n advec = cross(uu,bb)\n advec2 = np.sqrt(dot2(advec))\n del(advec)\n #avoid division by zero\n if fresi2.max() > 0:\n fresi2[np.where(fresi2==0)] = fresi2[np.where(fresi2>0)].min()\n Rm = advec2/fresi2\n #set minimum floor to exclude zero-valued Rm \n if Rm.max() > 0:\n Rm[np.where(Rm==0)] = Rm[np.where(Rm>0)].min()\n else:\n print('Rm undefined')\n else:\n Rm = advec2\n print('Rm undefined')\n return Rm",
"def test_3D_m8_2k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: 'gpu_2k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def test_3D_m4_1k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: 'gpu_1k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def test_2D_m6_2k():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2'},\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_2D_m6_1k():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: 'gpu_2k',\n Splitting: 'o2'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L4_2,\n Support: '',\n Splitting: 'o2'},\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)",
"def test_2_layer():\r\n # angular frequency in radians * THz\r\n w = 100 * nu.THz\r\n # Relative permittivity of metal and dielectric\r\n em = -4.56 + 0.12j\r\n ed = 1.23 + 0.01j\r\n ex_list = ez_list = [ed, em]\r\n # Relative permeabilities\r\n mu_list = [1,1]\r\n # Dictionary of input parameters\r\n input_params = {'w': w, 'd_list': [inf,inf], 'ex_list': ex_list,\r\n 'ez_list': ez_list, 'mu_list': mu_list}\r\n \r\n # Calculate the theoretical kx\r\n theo_kx = (w / nu.c0) * cmath.sqrt((em * ed) / (em + ed))\r\n if theo_kx.imag < 0:\r\n theo_kx *= -1\r\n print('Theoretical kx:',\r\n '(%.7g+%.7gj) rad/um' % (theo_kx.real / nu.um**-1, theo_kx.imag / nu.um**-1))\r\n \r\n # If I use the theoretical kx value, the mode should be correct and\r\n # all my tests should pass.\r\n params = deepcopy(input_params)\r\n params['kx'] = theo_kx\r\n params = find_all_params_from_kx(params)\r\n kzd, kzm = params['kz_list']\r\n # check that kz_list is correct\r\n assert_floats_are_equal(kzd**2, (w**2 / nu.c0**2) * ed**2 / (em + ed))\r\n assert_floats_are_equal(kzm**2, (w**2 / nu.c0**2) * em**2 / (em + ed))\r\n # check that layer_bottom_list is correct\r\n assert params['layer_bottom_list'][0] == -inf\r\n assert params['layer_bottom_list'][1] == 0\r\n # Check that the boundary condition matrix agrees with hand-calculation\r\n bc_mat = bc_matrix(params)\r\n # ...top-left is Ex0down / H0down\r\n assert_floats_are_equal(bc_mat[0,0], -kzd / (w * ed * nu.eps0))\r\n # ...top-right is -Ex1up / H1up\r\n assert_floats_are_equal(bc_mat[0,1], -kzm / (w * em * nu.eps0))\r\n # ...bottom-left is eps0 * Ez0down / H0down\r\n assert_floats_are_equal(bc_mat[1,0], ed * -theo_kx / (w * ed * nu.eps0))\r\n # ...bottom-right is -eps1 * Ez1up / H1up\r\n assert_floats_are_equal(bc_mat[1,1], -em * -theo_kx / (w * em * nu.eps0))\r\n # Check that one of the eigenvalues is almost zero (compared to the size\r\n # of the matrix elements).\r\n eigenvalues = np.linalg.eig(bc_mat)[0]\r\n assert abs(eigenvalues).min() / abs(bc_mat).max() < 1e-6\r\n # Check that the mode passes all tests.\r\n assert check_mode(params, thorough=True) is True\r\n # Check that I can scale the fields and it still passes all tests.\r\n params_scaled = rescale_fields(1.23+4.56j, params)\r\n assert check_mode(params_scaled, thorough=True) is True\r\n \r\n # Now try my kx-finding algorithm, to see if it finds the right value.\r\n kx_list = find_kx(input_params)\r\n print('kx_list:',\r\n ['(%.7g+%.7gj) rad/um' % (kx.real / nu.um**-1, kx.imag / nu.um**-1)\r\n for kx in kx_list])\r\n kx = kx_list[0]\r\n assert_floats_are_equal(theo_kx, kx)\r\n \r\n plot_mode(params)\r\n \r\n print('If you see this message, all the tests succeeded!!')",
"def test_3D_m8_1k_sFH():\n scal, velo = setup_3D()\n\n advec = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: 'gpu_1k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d3d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: M8Prime,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_3D_withPython(scal, velo, advec, advec_py)",
"def subdivision(mesh):\n\t\n\t\n\t# 1. generate new nodes in the centre of quad\n\t# 1/4 o-------o 1/4 o: existing vertices\n\t# | | *: newly-generated vertices\n\t# | * |\n\t# | |\n\t# 1/4 o-------o 1/4\n\n\tnew_coor = mesh.give_nodes().give_coor()\n\t\n\tfor face_index in range(mesh.give_model_inf()[2]): \n\t\tnew_x, new_y, new_z = (0, 0, 0)\n\t\tfor vertex_index in range(4):\n\t\t\tmesh.give_faces()\n\t\t\tnode_index = mesh.give_faces().give_node_list(face_index)[vertex_index]\n\n\t\t\tnew_x += 0.25*mesh.give_nodes().give_coor(node_index)[0]\n\t\t\tnew_y += 0.25*mesh.give_nodes().give_coor(node_index)[1]\n\t\t\tnew_z += 0.25*mesh.give_nodes().give_coor(node_index)[2]\n\t\t\t\n\t\tnew_coor.append((new_x, new_y, new_z))\n\t\t\n\t# generating new nodes on the edge\n\t# figure out one edge is shared by how many surfaces\n\tedge_shared_by_faces_list = helper.find_edge_shared_by_which_faces(mesh.give_edges(), mesh.give_faces())\n\t\n\tfor edge_index in range(mesh.give_model_inf()[1]):\n\n\t\tnew_x, new_y, new_z = (0., 0., 0.)\n\t\t\n\t# 2. generate new node on boundary edge\n\t# o: existing vertices\n\t# 1/2 o---*---o 1/2 *: newly-generated vertices\n\t# \n\n\t\tnew_coor = mesh.give_nodes().give_coor()\n\t\tif len(edge_shared_by_faces_list[edge_index]) == 1:\t\n\t\t\tnew_x, new_y, new_z = (0., 0., 0.)\n\t\t\tfor vertex_index in range(2):\n\t\t\t\tthis_node = mesh.give_edges().give_node(edge_index)[vertex_index]\n\t\t\t\tnew_x += 0.5*mesh.give_nodes().give_coor()[this_node][0]\n\t\t\t\tnew_y += 0.5*mesh.give_nodes().give_coor()[this_node][1]\n\t\t\t\tnew_z += 0.5*mesh.give_nodes().give_coor()[this_node][2]\n\t\t\t\t\n\t\t\tnew_coor.append((new_x, new_y, new_z))\n\t\t\t\t\n\t# 3. generate new node on interior edge\n\t# 1/16 o-------o 1/16 o: existing vertices\n\t# | | *: newly-generated vertices\n\t# 3/8 o---*---o 3/8\n\t# | |\n\t# 1/16 o-------o 1/16\n\n\t\telse:\n\t\t\tnew_x, new_y, new_z = (0., 0., 0.)\n\t\t\tconsidered_node = []\n\t\t\tfor vertex_index in range(2):\n\t\t\t\tthis_node = mesh.give_edges().give_node(edge_index)[vertex_index]\n\t\t\t\tconsidered_node.append(this_node)\n\t\t\t\tnew_x += 3./8.*mesh.give_nodes().give_coor()[this_node][0]\n\t\t\t\tnew_y += 3./8.*mesh.give_nodes().give_coor()[this_node][1]\n\t\t\t\tnew_z += 3./8.*mesh.give_nodes().give_coor()[this_node][2]\n\t\t\t\n\t\t\t# faces contain this node\n\t\t\tpotential_node = []\n\t\t\tfor face_index in edge_shared_by_faces_list[edge_index]:\t\t\n\t\t\t\tfor vertex_index in range(4):\n\t\t\t\t\t\tpotential_node.append(mesh.give_faces().give_node_list(face_index)[vertex_index])\n\t\t\t\n\t\t\touter_node = []\n\t\t\tfor node in potential_node:\n\t\t\t\tif (node not in considered_node) & (node not in outer_node):\n\t\t\t\t\touter_node.append(node)\n\t\t\t\t\t\n\t\t\tfor vertex_index in outer_node:\n\t\t\t\tnew_x += 1./16.*mesh.give_nodes().give_coor()[vertex_index][0]\n\t\t\t\tnew_y += 1./16.*mesh.give_nodes().give_coor()[vertex_index][1]\n\t\t\t\tnew_z += 1./16.*mesh.give_nodes().give_coor()[vertex_index][2]\n\t\t\t\n\t\t\tnew_coor.append((new_x, new_y, new_z))\n\n\t# update the links of edges and surfaces\n\tnew_edge_list = []\n\tnew_face_list = []\n\tfor face_index in range(mesh.give_model_inf()[2]):\n\t\told_node0 = mesh.give_faces().give_node_list(face_index)[0]\n\t\told_node1 = mesh.give_faces().give_node_list(face_index)[1]\n\t\told_node2 = mesh.give_faces().give_node_list(face_index)[2]\n\t\told_node3 = mesh.give_faces().give_node_list(face_index)[3]\n\t\t\n\t\told_edge0 = mesh.give_faces().give_edge_list(face_index)[0]\n\t\told_edge1 = mesh.give_faces().give_edge_list(face_index)[1]\n\t\told_edge2 = mesh.give_faces().give_edge_list(face_index)[2]\n\t\told_edge3 = mesh.give_faces().give_edge_list(face_index)[3]\n\t\t\n\t\tnew_node4 = old_edge0 + mesh.give_model_inf()[0] + mesh.give_model_inf()[2] \n\t\tnew_node5 = old_edge1 + mesh.give_model_inf()[0] + mesh.give_model_inf()[2]\n\t\tnew_node6 = old_edge2 + mesh.give_model_inf()[0] + mesh.give_model_inf()[2]\n\t\tnew_node7 = old_edge3 + mesh.give_model_inf()[0] + mesh.give_model_inf()[2]\t\n\t\tnew_node8 = mesh.give_model_inf()[0] + face_index\n\t\t\n\t\tif helper.in_list((old_node0, new_node4), new_edge_list) == False: \n\t\t\tnew_edge_list.append((old_node0, new_node4))\n\t\tif helper.in_list((new_node4, new_node8), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node4, new_node8))\n\t\tif helper.in_list((new_node8, new_node7), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node8, new_node7))\n\t\tif helper.in_list((new_node7, old_node0), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node7, old_node0))\n\t\tif helper.in_list((new_node4, old_node1), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node4, old_node1))\n\t\tif helper.in_list((old_node1, new_node5), new_edge_list) == False: \n\t\t\tnew_edge_list.append((old_node1, new_node5))\n\t\tif helper.in_list((new_node5, new_node8), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node5, new_node8))\n\t\tif helper.in_list((new_node7, old_node3), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node7, old_node3))\n\t\tif helper.in_list((old_node3, new_node6), new_edge_list) == False: \n\t\t\tnew_edge_list.append((old_node3, new_node6))\n\t\tif helper.in_list((new_node6, new_node8), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node6, new_node8))\n\t\tif helper.in_list((new_node6, old_node2), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node6, old_node2))\n\t\tif helper.in_list((old_node2, new_node5), new_edge_list) == False: \n\t\t\tnew_edge_list.append((old_node2, new_node5))\n\t\n\t\tnew_face_list.append((old_node0, new_node4, new_node8, new_node7))\n\t\tnew_face_list.append((new_node4, old_node1, new_node5, new_node8))\n\t\tnew_face_list.append((new_node7, new_node8, new_node6, old_node3))\n\t\tnew_face_list.append((new_node8, new_node5, old_node2, new_node6))\n\t\t\n\tnew_edges = geo.Edge(new_edge_list)\n\t\n\tnew_faces = geo.Face(new_face_list, new_edges)\n\t\t\n\t# update existing nodes\t\n\tfor node_index in range(mesh.give_model_inf()[0]):\n\t\t\n\t\tring1, ring2 = helper.find_neighbour_node(new_edges, new_faces, node_index)\n\t\tvalence = helper.find_valence(node_index, new_faces) \n\t\t#: valence: the number of faces sharing on specific edge\n\n\t# 4. update existing corner vertex\n\t# 2/4 @---* 1/4 *: newly-generated vertices\n\t# | | @: existing vertices to be updated\n\t# 1/4 *---* 0 The higher mask values on neighbouring vertices, \n\t# the more likely a square mesh will be refined into a sphere.\n\t \n\t\tif valence == 1:\n\n\t\t\tnew_x, new_y, new_z = (0, 0, 0)\n\t\t\tprint\n\t\t\tfor node_in_ring1 in ring1:\n\t\t\t\tnew_x += 1./4.*mesh.give_nodes().give_coor()[node_in_ring1][0]\n\t\t\t\tnew_y += 1./4.*mesh.give_nodes().give_coor()[node_in_ring1][1]\n\t\t\t\tnew_z += 1./4.*mesh.give_nodes().give_coor()[node_in_ring1][2]\n\n\t\t\tfor node_in_ring2 in ring2:\n\t\t\t\tnew_x += 0.*mesh.give_nodes().give_coor()[node_in_ring2][0]\n\t\t\t\tnew_y += 0.*mesh.give_nodes().give_coor()[node_in_ring2][1]\n\t\t\t\tnew_z += 0.*mesh.give_nodes().give_coor()[node_in_ring2][2]\n\t\t\t\t\n\t\t\tnew_x += 2./4.*mesh.give_nodes().give_coor()[node_index][0]\n\t\t\tnew_y += 2./4.*mesh.give_nodes().give_coor()[node_index][1]\n\t\t\tnew_z += 2./4.*mesh.give_nodes().give_coor()[node_index][2]\n\n\t# 5. update existing boundary joint vertex\n\t# 3/4\n\t# 1/8 *---*---* 1/8 *: newly-generated vertices\n\t# | | | @: existing vertices to be updated\n\t# 0 *---*---* 0\n\n\t\telif valence == 2:\n\t\t\t\n\t\t\tnew_x, new_y, new_z = (0, 0, 0)\n\t\t\tfor node_in_ring1 in ring1:\n\t\t\t\tif helper.find_valence(node_in_ring1, new_faces) <= 2: \n\t\t\t\t\tnew_x += 1./8.*mesh.give_nodes().give_coor()[node_in_ring1][0]\n\t\t\t\t\tnew_y += 1./8.*mesh.give_nodes().give_coor()[node_in_ring1][1]\n\t\t\t\t\tnew_z += 1./8.*mesh.give_nodes().give_coor()[node_in_ring1][2]\n\t\t\t\t\t\n\t\t\tnew_x += 3./4.*mesh.give_nodes().give_coor()[node_index][0]\n\t\t\tnew_y += 3./4.*mesh.give_nodes().give_coor()[node_index][1]\n\t\t\tnew_z += 3./4.*mesh.give_nodes().give_coor()[node_index][2]\n\t\n\t# 6. update new node on interior edge\n\t# * r/k\n\t# /\\ b/k*\n\t# *__/ \\___ r/k\n\t# \\ \\ /¬¬/ *: newly-generated vertices: \n\t# \\ \\/ / b = 3/2/valence, r = 1/4/valence\n\t# *--@--* b/k\t @: existing vertices to be updated: 1-b-r\t\t\n\t# / /\\ \\\n\t# /__/ \\__\\\n\t# * \\ / * r/k\n\t# \\/\n\t\t\n\t\telse:\n\t\t\tnew_x, new_y, new_z = (0, 0, 0)\n\t\t\tbeta = 3./2./valence\n\t\t\tgamma = 1./4./valence\n\t\t\tfor node_in_ring1 in ring1:\n\t\t\t\tnew_x += beta/valence*mesh.give_nodes().give_coor()[node_in_ring1][0]\n\t\t\t\tnew_y += beta/valence*mesh.give_nodes().give_coor()[node_in_ring1][1]\n\t\t\t\tnew_z += beta/valence*mesh.give_nodes().give_coor()[node_in_ring1][2]\n\t\t\t\n\t\t\tfor node_in_ring2 in ring2:\n\t\t\t\tnew_x += gamma/valence*mesh.give_nodes().give_coor()[node_in_ring2][0]\n\t\t\t\tnew_y += gamma/valence*mesh.give_nodes().give_coor()[node_in_ring2][1]\n\t\t\t\tnew_z += gamma/valence*mesh.give_nodes().give_coor()[node_in_ring2][2]\n\t\t\t\n\t\t\tnew_x += (1. - beta - gamma)*mesh.give_nodes().give_coor()[node_index][0]\n\t\t\tnew_y += (1. - beta - gamma)*mesh.give_nodes().give_coor()[node_index][1]\n\t\t\tnew_z += (1. - beta - gamma)*mesh.give_nodes().give_coor()[node_index][2]\n\t\t\n\t\tnew_coor[node_index] = (new_x, new_y, new_z)\n\t\n\tnew_nodes = geo.Node(new_coor)\n\t\n\tmesh.update(new_nodes, new_edges, new_faces)\n\t\n\t# return new_mesh\n\treturn mesh",
"def test_reconstruction_against_simulation(subarray_and_event_gamma_off_axis_500_gev):\n\n # 4-LST bright event already calibrated\n # we'll clean it and parametrize it again in the TelescopeFrame\n subarray, event = subarray_and_event_gamma_off_axis_500_gev\n\n # define reconstructor\n reconstructor = HillasReconstructor(subarray)\n\n hillas_dict = {}\n telescope_pointings = {}\n\n for tel_id, dl1 in event.dl1.tel.items():\n\n telescope_pointings[tel_id] = SkyCoord(\n alt=event.pointing.tel[tel_id].altitude,\n az=event.pointing.tel[tel_id].azimuth,\n frame=AltAz(),\n )\n\n geom_CameraFrame = subarray.tel[tel_id].camera.geometry\n\n # this could be done also out of this loop,\n # but in case of real data each telescope would have a\n # different telescope_pointing\n geom_TelescopeFrame = geom_CameraFrame.transform_to(\n TelescopeFrame(telescope_pointing=telescope_pointings[tel_id])\n )\n\n mask = tailcuts_clean(\n geom_TelescopeFrame,\n dl1.image,\n picture_thresh=5.0,\n boundary_thresh=2.5,\n keep_isolated_pixels=False,\n min_number_picture_neighbors=2,\n )\n\n try:\n hillas_dict[tel_id] = hillas_parameters(\n geom_TelescopeFrame[mask], dl1.image[mask]\n )\n\n # the original event is created from a\n # pytest fixture with \"session\" scope, so it's always the same\n # and if we used the same event we would overwrite the image\n # parameters for the next tests, thus causing their failure\n test_event = deepcopy(event)\n test_event.dl1.tel[tel_id].parameters = ImageParametersContainer()\n test_event.dl1.tel[tel_id].parameters.hillas = hillas_dict[tel_id]\n\n except HillasParameterizationError as e:\n print(e)\n continue\n\n # Get shower geometry\n reconstructor(event)\n # get the result from the correct DL2 container\n result = event.dl2.stereo.geometry[\"HillasReconstructor\"]\n\n # get the reconstructed coordinates in the sky\n reco_coord = SkyCoord(alt=result.alt, az=result.az, frame=AltAz())\n # get the simulated coordinates in the sky\n true_coord = SkyCoord(\n alt=event.simulation.shower.alt, az=event.simulation.shower.az, frame=AltAz()\n )\n\n # check that we are not more far than 0.1 degrees\n assert reco_coord.separation(true_coord) < 0.1 * u.deg",
"def test_2():\n\n n1 = 10\n n2 = 100\n ndim = 3\n\n semi_axes = np.random.random((n1,ndim))\n coords = np.array([sample_ellipsoidal_volume(n2, semi_axes[i]) for i in range(0,n1)])\n\n Is = iterative_inertia_tensors_3D(coords)\n\n assert np.shape(Is)==(n1,ndim,ndim)",
"def test_power_spectral_density_from_spatially_resolved_magnetisation_confined_to_mesh_region(tmpdir, debug=False):\n os.chdir(str(tmpdir))\n RTOL = 1e-10\n\n H1 = 1e6 # external field in A/m\n alpha1 = 0.5 # some sort of damping constant\n omega1 = gamma * H1 # precession frequency\n\n H2 = 2.8e4 # external field in A/m\n alpha2 = 0.3 # some sort of damping constant\n omega2 = gamma * H2 # precession frequency\n\n ##\n # Step 1: Construct a time series of artificial magnetisation\n # data and save it to a bunch of .npy files.\n ##\n t_step = 1e-11\n t_ini = 0\n t_end = 10e-9\n\n N1 = 42 # in a real application this would be the number of mesh vertices\n N2 = 23 # in a real application this would be the number of mesh vertices\n fft_test_helpers.create_test_npy_files_with_two_regions(\n str(tmpdir), t_step, t_ini, t_end, omega1, alpha1, N1, omega2, alpha2, N2)\n\n ##\n # Step 2: compute the FFT of a resampled time series, both by\n # hand and using FFT_m.\n ##\n # XXX TODO: Resampling timesteps is not supported when using .npy\n # files. Either simplify the code below, or implement saving to\n # .h5 files so that it's easier to implement resampling for\n # spatially resolved data, too.\n ##\n t_step_res = t_step\n t_ini_res = t_ini\n t_end_res = t_end\n ts_resampled = np.arange(t_ini_res, t_end_res, t_step_res)\n\n # Compute time series based on resampled timesteps\n mx_res = exp(-ts_resampled * 1e8 / alpha1) * sin(omega1 * ts_resampled)\n my_res = exp(-ts_resampled * 1e8 / alpha1) * cos(omega1 * ts_resampled)\n mz_res = 1 - sqrt(mx_res ** 2 + my_res ** 2)\n\n # Compute 'analytical' Fourier transform of resampled time series and\n # determine the power of the spectrum for each component. We also need\n # to multiply by the number of mesh nodes because the numerical algorithm\n # sums up all contributions at the individual nodes (but we can just\n # multiply because they are all identical by construction).\n psd_mx_expected = N1 * np.absolute(np.fft.rfft(mx_res)) ** 2\n psd_my_expected = N1 * np.absolute(np.fft.rfft(my_res)) ** 2\n psd_mz_expected = N1 * np.absolute(np.fft.rfft(mz_res)) ** 2\n\n # Compute Fourier transform of resampled time series using FFT_m\n freqs_computed, psd_mx_computed, psd_my_computed, psd_mz_computed = \\\n compute_power_spectral_density('m_ringdown*.npy', t_step_res, t_ini=t_ini_res,\n t_end=t_end_res, subtract_values=None, restrict_to_vertices=xrange(N1))\n\n # Check that the analytically determined power spectra are the same as the\n # computed ones.\n assert(np.allclose(psd_mx_expected, psd_mx_computed, atol=0, rtol=RTOL))\n assert(np.allclose(psd_my_expected, psd_my_computed, atol=0, rtol=RTOL))\n assert(np.allclose(psd_mz_expected, psd_mz_computed, atol=0, rtol=RTOL))\n\n if debug:\n # Plot the spectra for debugging\n fig = plt.figure(figsize=(20, 5))\n ax = fig.gca()\n ax.plot(freqs_computed, psd_mx_expected, label='psd_mx_expected')\n ax.plot(freqs_computed, psd_my_expected, label='psd_my_expected')\n ax.plot(freqs_computed, psd_mz_expected, label='psd_mz_expected')\n ax.plot(freqs_computed, psd_mx_computed, label='psd_mx_computed')\n ax.plot(freqs_computed, psd_my_computed, label='psd_my_computed')\n ax.plot(freqs_computed, psd_mz_computed, label='psd_mz_computed')\n ax.legend(loc='best')\n fig.savefig('psd_m_McMichaelStiles.png')",
"def test_spatial_smoothing_xesmf_reduce_spatial_dims_MPI_curv(\r\n PM_ds_control_3d_full,\r\n):\r\n da = PM_ds_control_3d_full\r\n step = 5\r\n actual = spatial_smoothing_xesmf(\r\n da,\r\n d_lon_lat_kws={\"lon\": step},\r\n )\r\n expected_lat_size = 180 // step\r\n assert actual[\"lon\"].size < da.lon.size\r\n assert actual[\"lat\"].size == expected_lat_size",
"def _apply_array_spatial12_halffilling(self, h1e: 'Nparray',\n h2e: 'Nparray') -> 'Nparray':\n if fqe.settings.use_accelerated_code:\n return self._apply_array_spatial12_lm(h1e, h2e)\n else:\n h1e = copy.deepcopy(h1e)\n h2e = numpy.moveaxis(copy.deepcopy(h2e), 1, 2) * (-1.0)\n norb = self.norb()\n for k in range(norb):\n h1e[:, :] -= h2e[:, k, k, :]\n\n if numpy.iscomplex(h1e).any() or numpy.iscomplex(h2e).any():\n dvec = self.calculate_dvec_spatial()\n out = numpy.einsum(\"ij,ijkl->kl\", h1e, dvec)\n dvec = numpy.einsum(\"ijkl,klmn->ijmn\", h2e, dvec)\n out += self._calculate_coeff_spatial_with_dvec(dvec)\n else:\n nij = norb * (norb + 1) // 2\n h1ec = numpy.zeros((nij), dtype=self._dtype)\n h2ec = numpy.zeros((nij, nij), dtype=self._dtype)\n for i in range(norb):\n for j in range(i + 1):\n ijn = j + i * (i + 1) // 2\n h1ec[ijn] = h1e[i, j]\n for k in range(norb):\n for l in range(k + 1):\n kln = l + k * (k + 1) // 2\n h2ec[ijn, kln] = h2e[i, j, k, l]\n dvec = self._calculate_dvec_spatial_compressed()\n out = numpy.einsum(\"i,ikl->kl\", h1ec, dvec)\n dvec = numpy.einsum(\"ik,kmn->imn\", h2ec, dvec)\n for i in range(self.norb()):\n for j in range(self.norb()):\n ijn = min(i, j) + max(i, j) * (max(i, j) + 1) // 2\n work = self._core.alpha_map(j, i)\n for source, target, parity in work:\n out[source, :] += dvec[ijn, target, :] * parity\n work = self._core.beta_map(j, i)\n for source, target, parity in work:\n out[:, source] += dvec[ijn, :, target] * parity\n\n return out",
"def test_dmi_uses_unit_length_2dmesh():\n A = 8.78e-12 # J/m\n D = 1.58e-3 # J/m^2\n Ms = 3.84e5 # A/m\n\n energies = []\n\n # unit_lengths 1e-9 and 1 are common, let's throw in an intermediate length\n # just to challenge the system a little:\n for unit_length in (1, 1e-4, 1e-9):\n radius = 200e-9 / unit_length\n maxh = 5e-9 / unit_length\n helical_period = (4 * pi * A / D) / unit_length\n k = 2 * pi / helical_period\n # HF 27 April 2014: The next command fails in dolfin 1.3\n # mesh = df.CircleMesh(df.Point(0, 0), radius, maxh)\n # The actual shape of the domain shouldn't matter for the test,\n # so let's use a Rectangular mesh which should work the same:\n\n nx = ny = int(round(radius / maxh))\n mesh = df.RectangleMesh(df.Point(0, 0), df.Point(radius, radius), nx, ny)\n\n S3 = df.VectorFunctionSpace(mesh, \"CG\", 1, dim=3)\n m_expr = df.Expression((\"0\", \"cos(k * x[0])\", \"sin(k * x[0])\"), k=k, degree=1)\n m = Field(S3, m_expr, name='m')\n dmi = DMI(D)\n Ms_dg = Field(df.FunctionSpace(mesh, 'DG', 0), Ms)\n dmi.setup(m, Ms_dg, unit_length=unit_length)\n energies.append(dmi.compute_energy())\n\n H = df.Function(S3)\n H.vector()[:] = dmi.compute_field()\n print H(0.0, 0.0)\n\n print \"Using unit_length = {}.\".format(unit_length)\n print \"Helical period {}.\".format(helical_period)\n print \"Energy {}.\".format(dmi.compute_energy())\n\n rel_diff_energies = abs(energies[0] - energies[1]) / abs(energies[1])\n print \"Relative difference of energy {}.\".format(rel_diff_energies)\n assert rel_diff_energies < 1e-13\n\n rel_diff_energies2 = abs(energies[0] - energies[2]) / abs(energies[2])\n print \"Relative difference2 of energy {}.\".format(rel_diff_energies2)\n assert rel_diff_energies2 < 1e-13",
"def test_2D_m4_1k_sFH():\n scal, velo = setup_2D()\n\n advec = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: 'gpu_1k',\n Splitting: 'o2_FullHalf'}\n )\n advec_py = Advection(velo, scal, discretization=d2d,\n method={TimeIntegrator: RK2,\n Interpolation: Linear,\n Remesh: L2_1,\n Support: '',\n Splitting: 'o2_FullHalf'}\n )\n assertion_2D_withPython(scal, velo, advec, advec_py)"
]
| [
"0.62774265",
"0.6248158",
"0.615843",
"0.61519164",
"0.61302435",
"0.5975631",
"0.5969894",
"0.59696215",
"0.59506017",
"0.5889328",
"0.5815332",
"0.58082706",
"0.5774815",
"0.57495856",
"0.57449704",
"0.57132375",
"0.56907326",
"0.5676495",
"0.56660473",
"0.5663885",
"0.5656594",
"0.56406915",
"0.5604522",
"0.55667853",
"0.5563626",
"0.55628735",
"0.5544974",
"0.553212",
"0.55262494",
"0.55244297"
]
| 0.6285738 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.