code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
import numpy as np
import tensorflow as tf
def discretize(value,action_dim,n_outputs):
discretization = tf.round(value)
discretization = tf.minimum(tf.constant(n_outputs-1, dtype=tf.float32,shape=[1,action_dim]),
tf.maximum(tf.constant(0, dtype=tf.float32,shape=[1,action_dim]), tf.to_float(discretization)))
return tf.to_int32(discretization)
if __name__=='__main__':
value=np.array((0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9))
a=discretize(value,value.shape[0],2)
with tf.Session() as sess:
print(a.eval())
|
[
"tensorflow.Session",
"tensorflow.constant",
"tensorflow.round",
"tensorflow.to_int32",
"numpy.array",
"tensorflow.to_float"
] |
[((113, 128), 'tensorflow.round', 'tf.round', (['value'], {}), '(value)\n', (121, 128), True, 'import tensorflow as tf\n'), ((359, 386), 'tensorflow.to_int32', 'tf.to_int32', (['discretization'], {}), '(discretization)\n', (370, 386), True, 'import tensorflow as tf\n'), ((426, 484), 'numpy.array', 'np.array', (['(0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9)'], {}), '((0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9))\n', (434, 484), True, 'import numpy as np\n'), ((162, 229), 'tensorflow.constant', 'tf.constant', (['(n_outputs - 1)'], {'dtype': 'tf.float32', 'shape': '[1, action_dim]'}), '(n_outputs - 1, dtype=tf.float32, shape=[1, action_dim])\n', (173, 229), True, 'import tensorflow as tf\n'), ((534, 546), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (544, 546), True, 'import tensorflow as tf\n'), ((262, 317), 'tensorflow.constant', 'tf.constant', (['(0)'], {'dtype': 'tf.float32', 'shape': '[1, action_dim]'}), '(0, dtype=tf.float32, shape=[1, action_dim])\n', (273, 317), True, 'import tensorflow as tf\n'), ((317, 344), 'tensorflow.to_float', 'tf.to_float', (['discretization'], {}), '(discretization)\n', (328, 344), True, 'import tensorflow as tf\n')]
|
import copy
import sys
import ChessAI.GameController.game_figures as Figures
from ChessBoard.chess_board import Board
from ChessBoard.chess_figure import FigureType, Side
from Vector2d.Vector2d import Vector2d, Move
class GameBoard:
default_white_king_pos = Vector2d(4, 7)
default_black_king_pos = Vector2d(4, 0)
default_white_pawn_row = 6
default_black_pawn_row = 1
default_white_rook_right_pos = Vector2d(7, 7)
default_white_rook_left_pos = Vector2d(0, 7)
default_black_rook_right_pos = Vector2d(7, 0)
default_black_rook_left_pos = Vector2d(0, 0)
def __init__(self, chess_board):
self.board = [[None for j in range(0, Board.ROW_SIZE)]
for i in range(0, Board.COLUMN_SIZE)]
for i in range(0, Board.COLUMN_SIZE):
for j in range(0, Board.ROW_SIZE):
if chess_board.board[j][i] is None:
continue
figure_type = chess_board.board[j][i].figure_type
side = chess_board.board[j][i].side
cur_pos = Vector2d(j, i)
if figure_type == FigureType.KING:
was_moved = True
if side == Side.WHITE:
if cur_pos == GameBoard.default_white_king_pos:
was_moved = False
elif side == Side.BLACK:
if cur_pos == GameBoard.default_black_king_pos:
was_moved = False
self.board[j][i] = Figures.King(side, cur_pos, was_moved)
elif figure_type == FigureType.QUEEN:
self.board[j][i] = Figures.Queen(side, cur_pos)
elif figure_type == FigureType.ROOK:
was_moved = True
if side == Side.WHITE:
if cur_pos == GameBoard.default_white_rook_left_pos or cur_pos == GameBoard.default_white_rook_right_pos:
was_moved = False
elif side == Side.BLACK:
if cur_pos == GameBoard.default_black_rook_left_pos or cur_pos == GameBoard.default_black_rook_right_pos:
was_moved = False
self.board[j][i] = Figures.Rook(side, cur_pos, was_moved)
elif figure_type == FigureType.KNIGHT:
self.board[j][i] = Figures.Knight(side, cur_pos)
elif figure_type == FigureType.BISHOP:
self.board[j][i] = Figures.Bishop(side, cur_pos)
elif figure_type == FigureType.PAWN:
was_moved = True
if side == Side.WHITE:
if i == GameBoard.default_white_pawn_row:
was_moved = False
elif side == Side.BLACK:
if i == GameBoard.default_black_pawn_row:
was_moved = False
self.board[j][i] = Figures.Pawn(side, cur_pos, was_moved)
else:
continue
def serialize_to_str(self):
str_board = ['.' for j in range(0, Board.ROW_SIZE)
for i in range(0, Board.COLUMN_SIZE)]
for i in range(0, Board.COLUMN_SIZE):
for j in range(0, Board.ROW_SIZE):
if self.board[j][i] is None:
continue
str_board[i * Board.ROW_SIZE + j] = self.board[j][i].serialized_letter()
res = ""
for i in range(0, Board.COLUMN_SIZE * Board.ROW_SIZE):
res += str_board[i]
return res
def deserialize_from_str(self, board_as_str):
self.board = [[None for j in range(0, Board.ROW_SIZE)]
for i in range(0, Board.COLUMN_SIZE)]
str_board = ['.' for j in range(0, Board.ROW_SIZE)
for i in range(0, Board.COLUMN_SIZE)]
for i in range(0, Board.COLUMN_SIZE):
for j in range(0, Board.ROW_SIZE):
str_board[i * Board.ROW_SIZE + j] = str(board_as_str).__getitem__(i * Board.ROW_SIZE + j)
for i in range(0, Board.COLUMN_SIZE):
for j in range(0, Board.ROW_SIZE):
letter = str_board[i * Board.ROW_SIZE + j]
if letter.isupper():
side = Side.WHITE
else:
side = Side.BLACK
letter = letter.lower()
cur_pos = Vector2d(j, i)
if letter == 'k':
self.board[j][i] = Figures.King(side, cur_pos, False)
elif letter == 'i':
self.board[j][i] = Figures.King(side, cur_pos, True)
elif letter == 'b':
self.board[j][i] = Figures.Bishop(side, cur_pos)
elif letter == 'r':
self.board[j][i] = Figures.Rook(side, cur_pos, False)
elif letter == 'o':
self.board[j][i] = Figures.Rook(side, cur_pos, True)
elif letter == 'n':
self.board[j][i] = Figures.Knight(side, cur_pos)
elif letter == 'q':
self.board[j][i] = Figures.Queen(side, cur_pos)
elif letter == 'p':
self.board[j][i] = Figures.Pawn(side, cur_pos)
elif letter == 'a':
self.board[j][i] = Figures.Pawn(side, cur_pos, True)
elif letter == 'w':
self.board[j][i] = Figures.Pawn(side, cur_pos, False, True)
def export_chess_board(self):
export_board = ['.' for j in range(0, Board.ROW_SIZE)
for i in range(0, Board.COLUMN_SIZE)]
for i in range(0, Board.COLUMN_SIZE):
for j in range(0, Board.ROW_SIZE):
if self.board[j][i] is None:
continue
figure_type = self.board[j][i].figure_type
side = self.board[j][i].side
if figure_type == FigureType.KING:
latter = 'k'
elif figure_type == FigureType.QUEEN:
latter = 'q'
elif figure_type == FigureType.ROOK:
latter = 'r'
elif figure_type == FigureType.KNIGHT:
latter = 'n'
elif figure_type == FigureType.BISHOP:
latter = 'b'
elif figure_type == FigureType.PAWN:
latter = 'p'
if side == Side.WHITE:
latter = latter.upper()
export_board[i * Board.ROW_SIZE + j] = latter
return export_board
def print(self):
sys.stdout.write(" ")
sys.stdout.write(" ")
sys.stdout.write(" ")
for i in range(0, Board.ROW_SIZE):
sys.stdout.write(i.__str__())
sys.stdout.write(" ")
print()
print()
for i in range(0, Board.COLUMN_SIZE):
sys.stdout.write(i.__str__())
sys.stdout.write(" ")
sys.stdout.write(" ")
for j in range(0, Board.ROW_SIZE):
if self.board[j][i] is not None:
self.board[j][i].print()
sys.stdout.write(" ")
else:
sys.stdout.write("*")
sys.stdout.write(" ")
print()
def print_attacked_cells(self):
for i in range(0, Board.COLUMN_SIZE):
for j in range(0, Board.ROW_SIZE):
if self.board[j][i] is not None:
attack_cells = self.board[j][i].generate_moves(self)
self.board[j][i].print()
sys.stdout.write(": ")
for k in range(len(attack_cells)):
sys.stdout.write(attack_cells[k].x.__str__())
sys.stdout.write(" ")
sys.stdout.write(attack_cells[k].y.__str__())
sys.stdout.write("; ")
print()
def get_by_pos(self, x, y):
return self.board[x][y]
def get(self, position):
return self.board[position.x][position.y]
def set(self, position, game_object):
self.board[position.x][position.y] = game_object
def get_king_cell(self, side):
for i in range(0, Board.COLUMN_SIZE):
for j in range(0, Board.ROW_SIZE):
if self.board[j][i] is not None:
if isinstance(self.board[j][i], Figures.King) and self.board[j][i].side == side:
return Vector2d(j, i)
def get_figures_list(self, side):
figures = []
for i in range(0, Board.COLUMN_SIZE):
for j in range(0, Board.ROW_SIZE):
if self.board[j][i] is not None:
if self.board[j][i].side == side:
figures.append(self.board[j][i])
return figures
def make_move(self, move):
self.get(move.point_from).make_move(self, move.point_to)
def summary_attacked_cells(self, side):
attacked_cells = []
for j in range(Board.ROW_SIZE):
for i in range(Board.COLUMN_SIZE):
figure = self.get_by_pos(j, i)
if figure is not None and figure.side == side:
if isinstance(figure, Figures.King):
attacked_cells = attacked_cells + figure.generate_moves(self, False)
elif isinstance(figure, Figures.Pawn):
attacked_cells = attacked_cells + figure.generate_moves(self, True)
else:
attacked_cells = attacked_cells + figure.generate_moves(self)
return attacked_cells
def summary_moves(self, side, my_turn=True):
summary_moves = []
attacked_cells = []
for j in range(Board.ROW_SIZE):
for i in range(Board.COLUMN_SIZE):
attacked_cells.clear()
figure = self.get_by_pos(j, i)
if figure is not None and figure.side == side:
if isinstance(figure, Figures.King):
attacked_cells = attacked_cells + figure.generate_moves(self, my_turn)
else:
attacked_cells = attacked_cells + figure.generate_moves(self)
for k in range(len(attacked_cells)):
summary_moves.append(Move(Vector2d(j, i), attacked_cells[k]))
return summary_moves
def is_that_check(self, my_side):
attacked_cells = self.summary_attacked_cells(my_side)
enemy_king_cell = self.get_king_cell(Side.get_oposite(my_side))
return enemy_king_cell in attacked_cells
def is_that_mate(self, my_side):
enemy_figures = self.get_figures_list(Side.get_oposite(my_side))
for i in range(len(enemy_figures)):
cur_figure = enemy_figures[i]
available_moves = cur_figure.generate_moves(self)
for j in range(len(available_moves)):
new_chess_board = copy.deepcopy(self)
if new_chess_board.get(cur_figure.position) is None:
print(cur_figure.position.x)
print(cur_figure.position.y)
new_chess_board.make_move(Move(cur_figure.position, available_moves[j]))
if new_chess_board.is_that_check(my_side) is False:
return False
return True
def is_that_stalemate(self, my_side):
enemy_figures = self.get_figures_list(Side.get_oposite(my_side))
for i in range(len(enemy_figures)):
cur_figure = enemy_figures[i]
if isinstance(cur_figure, Figures.King) is not True:
available_moves = cur_figure.generate_moves(self)
if len(available_moves) != 0:
return False
else:
available_moves = cur_figure.generate_moves(self)
for j in range(len(available_moves)):
new_chess_board = copy.deepcopy(self)
if new_chess_board.get(cur_figure.position) is None:
print(cur_figure.position.x)
print(cur_figure.position.y)
new_chess_board.make_move(Move(cur_figure.position, available_moves[j]))
if new_chess_board.is_that_check(my_side) is False:
return False
return True
def evaluate(self, side):
total = 0
for j in range(Board.ROW_SIZE):
for i in range(Board.COLUMN_SIZE):
pos = Vector2d(j, i)
figure = self.get(pos)
if figure is not None:
if figure.side is side:
sign = 1
else:
sign = -1
total = total + (figure.evaluate(j, i) * sign)
return total
def delete_double_move(self, side_to_del):
for j in range(Board.ROW_SIZE):
for i in range(Board.COLUMN_SIZE):
figure = self.get_by_pos(j, i)
if figure is not None and figure.side == side_to_del:
if isinstance(figure, Figures.Pawn):
figure.double_move = False
def swap_pawn(self, position, figure_lat):
side = self.board[position.x][position.y].side
lower = figure_lat.lower()
if lower == 'q':
self.board[position.x][position.y] = Figures.Queen(side, position)
if lower == 'b':
self.board[position.x][position.y] = Figures.Bishop(side, position)
if lower == 'n':
self.board[position.x][position.y] = Figures.Knight(side, position)
if lower == 'r':
self.board[position.x][position.y] = Figures.Rook(side, position, True)
|
[
"sys.stdout.write",
"Vector2d.Vector2d.Move",
"copy.deepcopy",
"ChessBoard.chess_figure.Side.get_oposite",
"ChessAI.GameController.game_figures.Bishop",
"ChessAI.GameController.game_figures.King",
"ChessAI.GameController.game_figures.Rook",
"ChessAI.GameController.game_figures.Knight",
"Vector2d.Vector2d.Vector2d",
"ChessAI.GameController.game_figures.Queen",
"ChessAI.GameController.game_figures.Pawn"
] |
[((265, 279), 'Vector2d.Vector2d.Vector2d', 'Vector2d', (['(4)', '(7)'], {}), '(4, 7)\n', (273, 279), False, 'from Vector2d.Vector2d import Vector2d, Move\n'), ((309, 323), 'Vector2d.Vector2d.Vector2d', 'Vector2d', (['(4)', '(0)'], {}), '(4, 0)\n', (317, 323), False, 'from Vector2d.Vector2d import Vector2d, Move\n'), ((423, 437), 'Vector2d.Vector2d.Vector2d', 'Vector2d', (['(7)', '(7)'], {}), '(7, 7)\n', (431, 437), False, 'from Vector2d.Vector2d import Vector2d, Move\n'), ((472, 486), 'Vector2d.Vector2d.Vector2d', 'Vector2d', (['(0)', '(7)'], {}), '(0, 7)\n', (480, 486), False, 'from Vector2d.Vector2d import Vector2d, Move\n'), ((523, 537), 'Vector2d.Vector2d.Vector2d', 'Vector2d', (['(7)', '(0)'], {}), '(7, 0)\n', (531, 537), False, 'from Vector2d.Vector2d import Vector2d, Move\n'), ((572, 586), 'Vector2d.Vector2d.Vector2d', 'Vector2d', (['(0)', '(0)'], {}), '(0, 0)\n', (580, 586), False, 'from Vector2d.Vector2d import Vector2d, Move\n'), ((6731, 6752), 'sys.stdout.write', 'sys.stdout.write', (['""" """'], {}), "(' ')\n", (6747, 6752), False, 'import sys\n'), ((6761, 6782), 'sys.stdout.write', 'sys.stdout.write', (['""" """'], {}), "(' ')\n", (6777, 6782), False, 'import sys\n'), ((6791, 6812), 'sys.stdout.write', 'sys.stdout.write', (['""" """'], {}), "(' ')\n", (6807, 6812), False, 'import sys\n'), ((6910, 6931), 'sys.stdout.write', 'sys.stdout.write', (['""" """'], {}), "(' ')\n", (6926, 6931), False, 'import sys\n'), ((7065, 7086), 'sys.stdout.write', 'sys.stdout.write', (['""" """'], {}), "(' ')\n", (7081, 7086), False, 'import sys\n'), ((7099, 7120), 'sys.stdout.write', 'sys.stdout.write', (['""" """'], {}), "(' ')\n", (7115, 7120), False, 'import sys\n'), ((10725, 10750), 'ChessBoard.chess_figure.Side.get_oposite', 'Side.get_oposite', (['my_side'], {}), '(my_side)\n', (10741, 10750), False, 'from ChessBoard.chess_figure import FigureType, Side\n'), ((10885, 10910), 'ChessBoard.chess_figure.Side.get_oposite', 'Side.get_oposite', (['my_side'], {}), '(my_side)\n', (10901, 10910), False, 'from ChessBoard.chess_figure import FigureType, Side\n'), ((11630, 11655), 'ChessBoard.chess_figure.Side.get_oposite', 'Side.get_oposite', (['my_side'], {}), '(my_side)\n', (11646, 11655), False, 'from ChessBoard.chess_figure import FigureType, Side\n'), ((13599, 13628), 'ChessAI.GameController.game_figures.Queen', 'Figures.Queen', (['side', 'position'], {}), '(side, position)\n', (13612, 13628), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((13703, 13733), 'ChessAI.GameController.game_figures.Bishop', 'Figures.Bishop', (['side', 'position'], {}), '(side, position)\n', (13717, 13733), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((13808, 13838), 'ChessAI.GameController.game_figures.Knight', 'Figures.Knight', (['side', 'position'], {}), '(side, position)\n', (13822, 13838), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((13913, 13947), 'ChessAI.GameController.game_figures.Rook', 'Figures.Rook', (['side', 'position', '(True)'], {}), '(side, position, True)\n', (13925, 13947), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((1068, 1082), 'Vector2d.Vector2d.Vector2d', 'Vector2d', (['j', 'i'], {}), '(j, i)\n', (1076, 1082), False, 'from Vector2d.Vector2d import Vector2d, Move\n'), ((4481, 4495), 'Vector2d.Vector2d.Vector2d', 'Vector2d', (['j', 'i'], {}), '(j, i)\n', (4489, 4495), False, 'from Vector2d.Vector2d import Vector2d, Move\n'), ((11144, 11163), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (11157, 11163), False, 'import copy\n'), ((12709, 12723), 'Vector2d.Vector2d.Vector2d', 'Vector2d', (['j', 'i'], {}), '(j, i)\n', (12717, 12723), False, 'from Vector2d.Vector2d import Vector2d, Move\n'), ((1535, 1573), 'ChessAI.GameController.game_figures.King', 'Figures.King', (['side', 'cur_pos', 'was_moved'], {}), '(side, cur_pos, was_moved)\n', (1547, 1573), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((4570, 4604), 'ChessAI.GameController.game_figures.King', 'Figures.King', (['side', 'cur_pos', '(False)'], {}), '(side, cur_pos, False)\n', (4582, 4604), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((7282, 7303), 'sys.stdout.write', 'sys.stdout.write', (['""" """'], {}), "(' ')\n", (7298, 7303), False, 'import sys\n'), ((7346, 7367), 'sys.stdout.write', 'sys.stdout.write', (['"""*"""'], {}), "('*')\n", (7362, 7367), False, 'import sys\n'), ((7388, 7409), 'sys.stdout.write', 'sys.stdout.write', (['""" """'], {}), "(' ')\n", (7404, 7409), False, 'import sys\n'), ((7747, 7769), 'sys.stdout.write', 'sys.stdout.write', (['""": """'], {}), "(': ')\n", (7763, 7769), False, 'import sys\n'), ((11373, 11418), 'Vector2d.Vector2d.Move', 'Move', (['cur_figure.position', 'available_moves[j]'], {}), '(cur_figure.position, available_moves[j])\n', (11377, 11418), False, 'from Vector2d.Vector2d import Vector2d, Move\n'), ((12129, 12148), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (12142, 12148), False, 'import copy\n'), ((1668, 1696), 'ChessAI.GameController.game_figures.Queen', 'Figures.Queen', (['side', 'cur_pos'], {}), '(side, cur_pos)\n', (1681, 1696), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((4680, 4713), 'ChessAI.GameController.game_figures.King', 'Figures.King', (['side', 'cur_pos', '(True)'], {}), '(side, cur_pos, True)\n', (4692, 4713), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((7919, 7940), 'sys.stdout.write', 'sys.stdout.write', (['""" """'], {}), "(' ')\n", (7935, 7940), False, 'import sys\n'), ((8035, 8057), 'sys.stdout.write', 'sys.stdout.write', (['"""; """'], {}), "('; ')\n", (8051, 8057), False, 'import sys\n'), ((8641, 8655), 'Vector2d.Vector2d.Vector2d', 'Vector2d', (['j', 'i'], {}), '(j, i)\n', (8649, 8655), False, 'from Vector2d.Vector2d import Vector2d, Move\n'), ((12375, 12420), 'Vector2d.Vector2d.Move', 'Move', (['cur_figure.position', 'available_moves[j]'], {}), '(cur_figure.position, available_moves[j])\n', (12379, 12420), False, 'from Vector2d.Vector2d import Vector2d, Move\n'), ((2267, 2305), 'ChessAI.GameController.game_figures.Rook', 'Figures.Rook', (['side', 'cur_pos', 'was_moved'], {}), '(side, cur_pos, was_moved)\n', (2279, 2305), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((4789, 4818), 'ChessAI.GameController.game_figures.Bishop', 'Figures.Bishop', (['side', 'cur_pos'], {}), '(side, cur_pos)\n', (4803, 4818), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((2401, 2430), 'ChessAI.GameController.game_figures.Knight', 'Figures.Knight', (['side', 'cur_pos'], {}), '(side, cur_pos)\n', (2415, 2430), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((4894, 4928), 'ChessAI.GameController.game_figures.Rook', 'Figures.Rook', (['side', 'cur_pos', '(False)'], {}), '(side, cur_pos, False)\n', (4906, 4928), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((10514, 10528), 'Vector2d.Vector2d.Vector2d', 'Vector2d', (['j', 'i'], {}), '(j, i)\n', (10522, 10528), False, 'from Vector2d.Vector2d import Vector2d, Move\n'), ((2526, 2555), 'ChessAI.GameController.game_figures.Bishop', 'Figures.Bishop', (['side', 'cur_pos'], {}), '(side, cur_pos)\n', (2540, 2555), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((5004, 5037), 'ChessAI.GameController.game_figures.Rook', 'Figures.Rook', (['side', 'cur_pos', '(True)'], {}), '(side, cur_pos, True)\n', (5016, 5037), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((2998, 3036), 'ChessAI.GameController.game_figures.Pawn', 'Figures.Pawn', (['side', 'cur_pos', 'was_moved'], {}), '(side, cur_pos, was_moved)\n', (3010, 3036), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((5113, 5142), 'ChessAI.GameController.game_figures.Knight', 'Figures.Knight', (['side', 'cur_pos'], {}), '(side, cur_pos)\n', (5127, 5142), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((5218, 5246), 'ChessAI.GameController.game_figures.Queen', 'Figures.Queen', (['side', 'cur_pos'], {}), '(side, cur_pos)\n', (5231, 5246), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((5322, 5349), 'ChessAI.GameController.game_figures.Pawn', 'Figures.Pawn', (['side', 'cur_pos'], {}), '(side, cur_pos)\n', (5334, 5349), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((5425, 5458), 'ChessAI.GameController.game_figures.Pawn', 'Figures.Pawn', (['side', 'cur_pos', '(True)'], {}), '(side, cur_pos, True)\n', (5437, 5458), True, 'import ChessAI.GameController.game_figures as Figures\n'), ((5534, 5574), 'ChessAI.GameController.game_figures.Pawn', 'Figures.Pawn', (['side', 'cur_pos', '(False)', '(True)'], {}), '(side, cur_pos, False, True)\n', (5546, 5574), True, 'import ChessAI.GameController.game_figures as Figures\n')]
|
import cbmpy
import numpy as np
import os
import sys
import pandas as pd
import re
modelLoc = sys.argv[1]
growthMediumLoc = sys.argv[2]
scriptLoc = sys.argv[3]
proteomicsLoc = sys.argv[4]
resultsFolder = sys.argv[5]
model = cbmpy.CBRead.readSBML3FBC(modelLoc, scan_notes_gpr = False)
growthData = pd.read_csv(growthMediumLoc)
proteomicsData = pd.read_csv(proteomicsLoc)
resultsPath = '%s/%s' %(scriptLoc, resultsFolder)
if not (os.path.isdir(resultsPath)): os.mkdir(resultsPath)
os.chdir(resultsPath)
"""
Total protein volume constraint for <NAME>i
See the supplementary material of the paper for the derivation of the constraint
"""
protSum=float(0.62/0.34)
pID = 'UP000000625'
constraint = []
UniProtIDs = pd.read_csv('proteinMasses.txt', sep = '\t')
for entry in UniProtIDs.index: constraint.append([(7.3*pow(10,-4)*float(UniProtIDs['Mass'][entry].replace(',',''))), 'P_%s_synthesis' %(UniProtIDs['Entry'][entry])])
model.addUserConstraint(pid = None, fluxes = constraint, operator = '<=', rhs = protSum)
os.chdir(resultsPath)
"""
Here, we define the multiplier for the concentrations of nutrients in the growth medium. We will use this to perform glucose (and amino acid, for the supplemented MOPS variants) limitation simulations.
"""
multiplier = 1.0 #No changes in Glc abundance
for i in growthData['Reaction ID']:
model.setReactionLowerBound(i, multiplier * growthData['Lower Bound'].loc[growthData['Reaction ID']==i].values[0])
fbaResult = cbmpy.CBCPLEX.cplx_analyzeModel(model)
fva = cbmpy.CBCPLEX.cplx_FluxVariabilityAnalysis(model, pre_opt=True)
cbmpy.CBWrite.writeFVAdata(fva[0], fva[1], 'glcTitration_%s_%.2f.csv' %(os.path.split(growthMediumLoc)[1].replace('.csv', ''), j))
|
[
"os.mkdir",
"pandas.read_csv",
"cbmpy.CBCPLEX.cplx_analyzeModel",
"os.path.isdir",
"cbmpy.CBCPLEX.cplx_FluxVariabilityAnalysis",
"cbmpy.CBRead.readSBML3FBC",
"os.path.split",
"os.chdir"
] |
[((226, 283), 'cbmpy.CBRead.readSBML3FBC', 'cbmpy.CBRead.readSBML3FBC', (['modelLoc'], {'scan_notes_gpr': '(False)'}), '(modelLoc, scan_notes_gpr=False)\n', (251, 283), False, 'import cbmpy\n'), ((299, 327), 'pandas.read_csv', 'pd.read_csv', (['growthMediumLoc'], {}), '(growthMediumLoc)\n', (310, 327), True, 'import pandas as pd\n'), ((345, 371), 'pandas.read_csv', 'pd.read_csv', (['proteomicsLoc'], {}), '(proteomicsLoc)\n', (356, 371), True, 'import pandas as pd\n'), ((481, 502), 'os.chdir', 'os.chdir', (['resultsPath'], {}), '(resultsPath)\n', (489, 502), False, 'import os\n'), ((711, 753), 'pandas.read_csv', 'pd.read_csv', (['"""proteinMasses.txt"""'], {'sep': '"""\t"""'}), "('proteinMasses.txt', sep='\\t')\n", (722, 753), True, 'import pandas as pd\n'), ((1014, 1035), 'os.chdir', 'os.chdir', (['resultsPath'], {}), '(resultsPath)\n', (1022, 1035), False, 'import os\n'), ((1458, 1496), 'cbmpy.CBCPLEX.cplx_analyzeModel', 'cbmpy.CBCPLEX.cplx_analyzeModel', (['model'], {}), '(model)\n', (1489, 1496), False, 'import cbmpy\n'), ((1504, 1567), 'cbmpy.CBCPLEX.cplx_FluxVariabilityAnalysis', 'cbmpy.CBCPLEX.cplx_FluxVariabilityAnalysis', (['model'], {'pre_opt': '(True)'}), '(model, pre_opt=True)\n', (1546, 1567), False, 'import cbmpy\n'), ((430, 456), 'os.path.isdir', 'os.path.isdir', (['resultsPath'], {}), '(resultsPath)\n', (443, 456), False, 'import os\n'), ((459, 480), 'os.mkdir', 'os.mkdir', (['resultsPath'], {}), '(resultsPath)\n', (467, 480), False, 'import os\n'), ((1640, 1670), 'os.path.split', 'os.path.split', (['growthMediumLoc'], {}), '(growthMediumLoc)\n', (1653, 1670), False, 'import os\n')]
|
import numpy as np
from scipy.interpolate import RegularGridInterpolator
from scipy.ndimage.filters import gaussian_filter
"""
Elastic deformation of images as described in
<NAME>, "Best Practices for
Convolutional Neural Networks applied to Visual
Document Analysis", in
Proc. of the International Conference on Document Analysis and
Recognition, 2003.
Modified from:
https://gist.github.com/chsasank/4d8f68caf01f041a6453e67fb30f8f5a
https://github.com/fcalvet/image_tools/blob/master/image_augmentation.py#L62
Modified to take 3D inputs
Deforms both the image and corresponding label file
Label volumes are interpolated via nearest neighbour
"""
def elastic_transform_3d(img_numpy, labels=None, alpha=1, sigma=20, c_val=0.0, method="linear"):
"""
:param img_numpy: 3D medical image modality
:param labels: 3D medical image labels
:param alpha: scaling factor of gaussian filter
:param sigma: standard deviation of random gaussian filter
:param c_val: fill value
:param method: interpolation method. supported methods : ("linear", "nearest")
:return: deformed image and/or label
"""
assert img_numpy.ndim == 3 , 'Wrong img shape, provide 3D img'
if labels is not None:
assert img_numpy.shape == labels.shape , "Shapes of img and label do not much!"
shape = img_numpy.shape
# Define 3D coordinate system
coords = np.arange(shape[0]), np.arange(shape[1]), np.arange(shape[2])
# Interpolated img
im_intrps = RegularGridInterpolator(coords, img_numpy,
method=method,
bounds_error=False,
fill_value=c_val)
# Get random elastic deformations
dx = gaussian_filter((np.random.rand(*shape) * 2 - 1), sigma,
mode="constant", cval=0.) * alpha
dy = gaussian_filter((np.random.rand(*shape) * 2 - 1), sigma,
mode="constant", cval=0.) * alpha
dz = gaussian_filter((np.random.rand(*shape) * 2 - 1), sigma,
mode="constant", cval=0.) * alpha
# Define sample points
x, y, z = np.mgrid[0:shape[0], 0:shape[1], 0:shape[2]]
indices = np.reshape(x + dx, (-1, 1)), \
np.reshape(y + dy, (-1, 1)), \
np.reshape(z + dz, (-1, 1))
# Interpolate 3D image image
img_numpy = im_intrps(indices).reshape(shape)
# Interpolate labels
if labels is not None:
lab_intrp = RegularGridInterpolator(coords, labels,
method="nearest",
bounds_error=False,
fill_value=0)
labels = lab_intrp(indices).reshape(shape).astype(labels.dtype)
return img_numpy, labels
return img_numpy
|
[
"numpy.random.rand",
"scipy.interpolate.RegularGridInterpolator",
"numpy.arange",
"numpy.reshape"
] |
[((1499, 1599), 'scipy.interpolate.RegularGridInterpolator', 'RegularGridInterpolator', (['coords', 'img_numpy'], {'method': 'method', 'bounds_error': '(False)', 'fill_value': 'c_val'}), '(coords, img_numpy, method=method, bounds_error=\n False, fill_value=c_val)\n', (1522, 1599), False, 'from scipy.interpolate import RegularGridInterpolator\n'), ((1397, 1416), 'numpy.arange', 'np.arange', (['shape[0]'], {}), '(shape[0])\n', (1406, 1416), True, 'import numpy as np\n'), ((1418, 1437), 'numpy.arange', 'np.arange', (['shape[1]'], {}), '(shape[1])\n', (1427, 1437), True, 'import numpy as np\n'), ((1439, 1458), 'numpy.arange', 'np.arange', (['shape[2]'], {}), '(shape[2])\n', (1448, 1458), True, 'import numpy as np\n'), ((2230, 2257), 'numpy.reshape', 'np.reshape', (['(x + dx)', '(-1, 1)'], {}), '(x + dx, (-1, 1))\n', (2240, 2257), True, 'import numpy as np\n'), ((2275, 2302), 'numpy.reshape', 'np.reshape', (['(y + dy)', '(-1, 1)'], {}), '(y + dy, (-1, 1))\n', (2285, 2302), True, 'import numpy as np\n'), ((2320, 2347), 'numpy.reshape', 'np.reshape', (['(z + dz)', '(-1, 1)'], {}), '(z + dz, (-1, 1))\n', (2330, 2347), True, 'import numpy as np\n'), ((2505, 2601), 'scipy.interpolate.RegularGridInterpolator', 'RegularGridInterpolator', (['coords', 'labels'], {'method': '"""nearest"""', 'bounds_error': '(False)', 'fill_value': '(0)'}), "(coords, labels, method='nearest', bounds_error=\n False, fill_value=0)\n", (2528, 2601), False, 'from scipy.interpolate import RegularGridInterpolator\n'), ((1780, 1802), 'numpy.random.rand', 'np.random.rand', (['*shape'], {}), '(*shape)\n', (1794, 1802), True, 'import numpy as np\n'), ((1905, 1927), 'numpy.random.rand', 'np.random.rand', (['*shape'], {}), '(*shape)\n', (1919, 1927), True, 'import numpy as np\n'), ((2030, 2052), 'numpy.random.rand', 'np.random.rand', (['*shape'], {}), '(*shape)\n', (2044, 2052), True, 'import numpy as np\n')]
|
# Copyright 2020 The TensorFlow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""General helper functions."""
from os import path
import numpy as np
from skimage import measure
import tensorflow.compat.v1 as tf
from tensorflow_graphics.projects.cvxnet.lib.libmise import mise
from tensorflow_graphics.projects.nasa.lib import datasets
from tensorflow_graphics.projects.nasa.lib import models
import tensorflow_probability as tfp
from tqdm import trange
import trimesh
tf.disable_eager_execution()
tfd = tfp.distributions
def define_flags():
"""Define command line flags."""
flags = tf.app.flags
# Dataset Parameters
flags.DEFINE_enum("dataset", "amass",
list(k for k in datasets.dataset_dict.keys()),
"Name of the dataset.")
flags.DEFINE_string("data_dir", None, "Directory to load data from.")
flags.mark_flag_as_required("data_dir")
flags.DEFINE_integer("sample_bbox", 1024, "Number of bbox samples.")
flags.DEFINE_integer("sample_surf", 1024, "Number of surface samples.")
flags.DEFINE_integer("batch_size", 12, "Batch size.")
flags.DEFINE_integer("motion", 0, "Index of the motion for evaluation.")
flags.DEFINE_integer("subject", 0, "Index of the subject for training.")
# Model Parameters
flags.DEFINE_enum("model", "nasa", list(k for k in models.model_dict.keys()),
"Name of the model.")
flags.DEFINE_integer("n_parts", 24, "Number of parts.")
flags.DEFINE_integer("total_dim", 960,
"Dimension of the latent vector (in total).")
flags.DEFINE_bool("shared_decoder", False, "Whether to use shared decoder.")
flags.DEFINE_float("soft_blend", 5., "The constant to blend parts.")
flags.DEFINE_bool("projection", True,
"Whether to use projected shape features.")
flags.DEFINE_float("level_set", 0.5, "The value of the level_set.")
flags.DEFINE_integer("n_dims", 3, "The dimension of the query points.")
# Training Parameters
flags.DEFINE_float("lr", 1e-4, "Learning rate")
flags.DEFINE_string("train_dir", None, "Training directory.")
flags.mark_flag_as_required("train_dir")
flags.DEFINE_integer("max_steps", 200000, "Number of optimization steps.")
flags.DEFINE_integer("save_every", 5000,
"Number of steps to save checkpoint.")
flags.DEFINE_integer("summary_every", 500,
"Number of steps to save checkpoint.")
flags.DEFINE_float("label_w", 0.5, "Weight of labed vertices loss.")
flags.DEFINE_float("minimal_w", 0.05, "Weight of minimal loss.")
flags.DEFINE_bool("use_vert", True,
"Whether to use vertices on the mesh for training.")
flags.DEFINE_bool("use_joint", True,
"Whether to use joint-based transformation.")
flags.DEFINE_integer("sample_vert", 2048, "Number of vertex samples.")
# Evalulation Parameters
flags.DEFINE_bool("gen_mesh_only", False, "Whether to generate meshes only.")
# Tracking Parameters
flags.DEFINE_float("theta_lr", 5e-4, "Learning rate")
flags.DEFINE_integer("max_steps_per_frame", 1792,
"Number of optimization steps for tracking each frame.")
flags.DEFINE_enum("gradient_type", "reparam", ["vanilla", "reparam"],
"Type of gradient to use in theta optimization.")
flags.DEFINE_integer("sample_track_vert", 1024,
"Number of vertex samples for tracking each frame.")
flags.DEFINE_integer("n_noisy_samples", 8,
"Number of noisy samples per vertex")
flags.DEFINE_float("bandwidth", 1e-2, "Bandwidth of the gaussian noises.")
flags.DEFINE_bool(
"left_trans", False,
"Whether to use left side transformation (True) or right side (False).")
flags.DEFINE_string("joint_data", None, "Path to load joint data.")
flags.DEFINE_float("glue_w", 20., "Weight of length constraint loss.")
flags.DEFINE_float("trans_range", 1., "The range of allowed translations.")
def gen_mesh(sess,
feed_dict,
latent_holder,
point_holder,
latent,
occ,
batch_val,
hparams,
idx=0):
"""Generating meshes given a trained NASA model."""
scale = 1.1 # Scale of the padded bbox regarding the tight one.
level_set = hparams.level_set
latent_val = sess.run(latent, feed_dict)
mesh_extractor = mise.MISE(32, 3, level_set)
points = mesh_extractor.query()
gt_verts = batch_val["vert"].reshape([-1, 3])
gt_bbox = np.stack([gt_verts.min(axis=0), gt_verts.max(axis=0)], axis=0)
gt_center = (gt_bbox[0] + gt_bbox[1]) * 0.5
gt_scale = (gt_bbox[1] - gt_bbox[0]).max()
while points.shape[0] != 0:
orig_points = points
points = points.astype(np.float32)
points = (np.expand_dims(points, axis=0) / mesh_extractor.resolution -
0.5) * scale
points = points * gt_scale + gt_center
n_points = points.shape[1]
values = []
for i in range(0, n_points,
100000): # Add this to prevent OOM due to points overload.
feed_dict[latent_holder] = latent_val
feed_dict[point_holder] = np.expand_dims(points[:, i:i + 100000], axis=1)
value = sess.run(occ[:, idx], feed_dict)
values.append(value)
values = np.concatenate(values, axis=1)
values = values[0, :, 0].astype(np.float64)
mesh_extractor.update(orig_points, values)
points = mesh_extractor.query()
value_grid = mesh_extractor.to_dense()
try:
value_grid = np.pad(value_grid, 1, "constant", constant_values=-1e6)
verts, faces, normals, unused_var = measure.marching_cubes_lewiner(
value_grid, min(level_set, value_grid.max()))
del normals
verts -= 1
verts /= np.array([
value_grid.shape[0] - 3, value_grid.shape[1] - 3,
value_grid.shape[2] - 3
],
dtype=np.float32)
verts = scale * (verts - 0.5)
verts = verts * gt_scale + gt_center
faces = np.stack([faces[..., 1], faces[..., 0], faces[..., 2]], axis=-1)
mesh = trimesh.Trimesh(vertices=verts, faces=faces)
return mesh
except: # pylint: disable=bare-except
return None
def save_mesh(sess,
feed_dict,
latent_holder,
point_holder,
latent,
occ,
batch_val,
hparams,
pth="meshes"):
"""Generate and save meshes to disk given a trained NASA model."""
name = batch_val["name"][0].decode("utf-8")
subject, motion, frame = amass_name_helper(name)
pth = path.join(hparams.train_dir, pth, frame)
if not tf.io.gfile.isdir(pth):
tf.io.gfile.makedirs(pth)
start = hparams.n_parts
for i in range(start, hparams.n_parts + 1):
mesh_model = gen_mesh(
sess,
feed_dict,
latent_holder,
point_holder,
latent,
occ,
batch_val,
hparams,
idx=i)
mesh_name = "full_pred.obj"
if mesh_model is not None:
with tf.io.gfile.GFile(path.join(pth, mesh_name), "w") as fout:
mesh_model.export(fout, file_type="obj")
return subject, motion, frame, mesh_model
def save_pointcloud(data, hparams, pth="pointcloud"):
"""Save pointcloud to disk."""
name = data["name"][0].decode("utf-8")
unused_subject, unused_motion, frame = amass_name_helper(name)
pth = path.join(hparams.train_dir, pth, frame)
if not tf.io.gfile.isdir(pth):
tf.io.gfile.makedirs(pth)
mesh_name = "pointcloud.obj"
with tf.io.gfile.GFile(path.join(pth, mesh_name), "w") as fout:
pointcloud = data["vert"].reshape([-1, 3])
for v in pointcloud:
fout.write("v {0} {1} {2}\n".format(*v.tolist()))
def amass_name_helper(name):
name, frame = name.split("-")
subject = name[:5]
motion = name[6:]
return subject, motion, frame
def make_summary_feed_dict(
iou_hook,
iou,
best_hook,
best_iou,
):
feed_dict = {}
feed_dict[iou_hook] = iou
feed_dict[best_hook] = best_iou
return feed_dict
def parse_global_step(ckpt):
basename = path.basename(ckpt)
return int(basename.split("-")[-1])
def compute_iou(sess, feed_dict, latent_holder, point_holder, latent, occ,
point, label, hparams):
"""Compute IoU."""
iou = 0.
eps = 1e-9
latent_val = sess.run(latent, feed_dict)
n_points = point.shape[2]
preds = []
for start in range(0, n_points, 100000):
feed_dict[point_holder] = point[:, :, start:start + 100000]
feed_dict[latent_holder] = latent_val
pred = sess.run(occ, feed_dict)
preds.append(pred)
pred = np.concatenate(preds, axis=2)
pred = (pred >= hparams.level_set).astype(np.float32)
label = (label[:, :1] >= 0.5).astype(np.float32).squeeze(axis=1)
iou += np.sum(pred * label) / np.maximum(np.sum(np.maximum(pred, label)), eps)
return iou
def compute_glue_loss(connect, end_pts, inv_transforms, inv_first_frame_trans,
joints, hparams):
"""Compute the prior term as a glue loss."""
n_dims = hparams.n_dims
# Invert the transformation
r_inv = inv_transforms[..., :n_dims, :n_dims]
t_inv = inv_transforms[..., :n_dims, -1:]
r = tf.transpose(r_inv, [0, 2, 1])
t = -tf.matmul(r, t_inv)
transforms = tf.concat(
[tf.concat([r, t], axis=-1), inv_transforms[..., -1:, :]], axis=-2)
transforms = tf.matmul(transforms, inv_first_frame_trans)
# Compute transformations of father joints and apply it to vectors from frame0
father_transforms = tf.reduce_sum(
tf.expand_dims(transforms, axis=1) *
connect.reshape([hparams.n_parts, hparams.n_parts, 1, 1]),
axis=0)
end_pts_homo = tf.expand_dims(
tf.concat([end_pts, tf.ones_like(end_pts[..., :1])], axis=-1), axis=-1)
end_pts_transformed = tf.matmul(father_transforms, end_pts_homo)
end_pts_transformed = tf.squeeze(end_pts_transformed, axis=-1)[..., :n_dims]
# Compute vectors in current configuration
pred_links = tf.reshape(joints, [hparams.n_parts, n_dims])
# Compute distance between links and transformed vectors
return tf.reduce_sum(tf.square(pred_links - end_pts_transformed))
def vanilla_theta_gradient(model_fn, batch_holder, hparams):
"""A vanilla gradient estimator for the pose, theta."""
latent_holder, latent, occ_eval = model_fn(batch_holder, None, None,
"gen_mesh")
if hparams.sample_vert > 0:
points = batch_holder["point"]
weights = batch_holder["weight"]
n_vert = tf.shape(points)[2]
sample_indices = tf.random.uniform([1, 1, hparams.sample_vert],
minval=0,
maxval=n_vert,
dtype=tf.int32)
points = tf.gather(points, sample_indices, axis=2, batch_dims=2)
weights = tf.gather(weights, sample_indices, axis=2, batch_dims=2)
batch_holder["point"] = points
batch_holder["weight"] = weights
unused_var0, unused_var1, occ = model_fn(batch_holder, None, None, "gen_mesh")
return latent_holder, latent, occ_eval, tf.reduce_mean(
tf.square(occ - hparams.level_set))
def reparam_theta_gradient(model_fn, batch_holder, hparams):
"""A gradient estimaor for the pose, theta, using the reparam trick."""
sigma = hparams.bandwidth
n_samples = hparams.n_noisy_samples
latent_holder, latent, occ_eval = model_fn(batch_holder, None, None,
"gen_mesh")
if hparams.sample_vert > 0:
points = batch_holder["point"]
weights = batch_holder["weight"]
n_vert = tf.shape(points)[2]
sample_indices = tf.random.uniform([1, 1, hparams.sample_vert],
minval=0,
maxval=n_vert,
dtype=tf.int32)
points = tf.gather(points, sample_indices, axis=2, batch_dims=2)
weights = tf.gather(weights, sample_indices, axis=2, batch_dims=2)
batch_holder["point"] = points
batch_holder["weight"] = weights
dist = tfd.Normal(loc=0., scale=sigma)
n_pts = hparams.sample_vert if hparams.sample_vert > 0 else hparams.n_vert
noises = dist.sample((1, hparams.n_parts, n_pts, n_samples, hparams.n_dims))
unused_var0, unused_var1, occ = model_fn(batch_holder, noises, None,
"gen_mesh")
occ = tf.reshape(occ, [1, hparams.n_parts + 1, -1, n_samples, 1])
occ = tf.reduce_mean(occ[:, hparams.n_parts:], axis=3)
return latent_holder, latent, occ_eval, tf.reduce_mean(
tf.square(occ - hparams.level_set))
def optimize_theta(feed_dict, loss, reset_op, train_op, rec_loss, glue_loss,
sess, k, hparams):
"""Optimize the pose, theta, during tracking."""
sess.run(reset_op)
loss_val = 0
glue_val = 0
with trange(hparams.max_steps_per_frame) as t:
for unused_i in t:
loss_val, unused_var, rec_val, glue_val = sess.run(
[loss, train_op, rec_loss, glue_loss], feed_dict)
t.set_description("Frame_{0} {1:.4f}|{2:.4f}".format(
k, rec_val, glue_val))
return loss_val, glue_val
|
[
"numpy.sum",
"numpy.maximum",
"tensorflow.compat.v1.reduce_mean",
"tensorflow.compat.v1.transpose",
"tensorflow.compat.v1.matmul",
"tensorflow.compat.v1.gather",
"tensorflow.compat.v1.disable_eager_execution",
"os.path.join",
"numpy.pad",
"tensorflow.compat.v1.square",
"tensorflow.compat.v1.squeeze",
"tensorflow.compat.v1.expand_dims",
"tensorflow.compat.v1.io.gfile.makedirs",
"numpy.stack",
"trimesh.Trimesh",
"os.path.basename",
"tqdm.trange",
"tensorflow.compat.v1.shape",
"tensorflow.compat.v1.reshape",
"numpy.concatenate",
"tensorflow.compat.v1.io.gfile.isdir",
"tensorflow_graphics.projects.nasa.lib.datasets.dataset_dict.keys",
"tensorflow_graphics.projects.nasa.lib.models.model_dict.keys",
"tensorflow.compat.v1.concat",
"numpy.expand_dims",
"tensorflow.compat.v1.random.uniform",
"tensorflow.compat.v1.ones_like",
"tensorflow_graphics.projects.cvxnet.lib.libmise.mise.MISE",
"numpy.array"
] |
[((980, 1008), 'tensorflow.compat.v1.disable_eager_execution', 'tf.disable_eager_execution', ([], {}), '()\n', (1006, 1008), True, 'import tensorflow.compat.v1 as tf\n'), ((4902, 4929), 'tensorflow_graphics.projects.cvxnet.lib.libmise.mise.MISE', 'mise.MISE', (['(32)', '(3)', 'level_set'], {}), '(32, 3, level_set)\n', (4911, 4929), False, 'from tensorflow_graphics.projects.cvxnet.lib.libmise import mise\n'), ((7066, 7106), 'os.path.join', 'path.join', (['hparams.train_dir', 'pth', 'frame'], {}), '(hparams.train_dir, pth, frame)\n', (7075, 7106), False, 'from os import path\n'), ((7858, 7898), 'os.path.join', 'path.join', (['hparams.train_dir', 'pth', 'frame'], {}), '(hparams.train_dir, pth, frame)\n', (7867, 7898), False, 'from os import path\n'), ((8551, 8570), 'os.path.basename', 'path.basename', (['ckpt'], {}), '(ckpt)\n', (8564, 8570), False, 'from os import path\n'), ((9072, 9101), 'numpy.concatenate', 'np.concatenate', (['preds'], {'axis': '(2)'}), '(preds, axis=2)\n', (9086, 9101), True, 'import numpy as np\n'), ((9642, 9672), 'tensorflow.compat.v1.transpose', 'tf.transpose', (['r_inv', '[0, 2, 1]'], {}), '(r_inv, [0, 2, 1])\n', (9654, 9672), True, 'import tensorflow.compat.v1 as tf\n'), ((9815, 9859), 'tensorflow.compat.v1.matmul', 'tf.matmul', (['transforms', 'inv_first_frame_trans'], {}), '(transforms, inv_first_frame_trans)\n', (9824, 9859), True, 'import tensorflow.compat.v1 as tf\n'), ((10236, 10278), 'tensorflow.compat.v1.matmul', 'tf.matmul', (['father_transforms', 'end_pts_homo'], {}), '(father_transforms, end_pts_homo)\n', (10245, 10278), True, 'import tensorflow.compat.v1 as tf\n'), ((10419, 10464), 'tensorflow.compat.v1.reshape', 'tf.reshape', (['joints', '[hparams.n_parts, n_dims]'], {}), '(joints, [hparams.n_parts, n_dims])\n', (10429, 10464), True, 'import tensorflow.compat.v1 as tf\n'), ((12832, 12891), 'tensorflow.compat.v1.reshape', 'tf.reshape', (['occ', '[1, hparams.n_parts + 1, -1, n_samples, 1]'], {}), '(occ, [1, hparams.n_parts + 1, -1, n_samples, 1])\n', (12842, 12891), True, 'import tensorflow.compat.v1 as tf\n'), ((12901, 12949), 'tensorflow.compat.v1.reduce_mean', 'tf.reduce_mean', (['occ[:, hparams.n_parts:]'], {'axis': '(3)'}), '(occ[:, hparams.n_parts:], axis=3)\n', (12915, 12949), True, 'import tensorflow.compat.v1 as tf\n'), ((5787, 5817), 'numpy.concatenate', 'np.concatenate', (['values'], {'axis': '(1)'}), '(values, axis=1)\n', (5801, 5817), True, 'import numpy as np\n'), ((6015, 6076), 'numpy.pad', 'np.pad', (['value_grid', '(1)', '"""constant"""'], {'constant_values': '(-1000000.0)'}), "(value_grid, 1, 'constant', constant_values=-1000000.0)\n", (6021, 6076), True, 'import numpy as np\n'), ((6241, 6349), 'numpy.array', 'np.array', (['[value_grid.shape[0] - 3, value_grid.shape[1] - 3, value_grid.shape[2] - 3]'], {'dtype': 'np.float32'}), '([value_grid.shape[0] - 3, value_grid.shape[1] - 3, value_grid.\n shape[2] - 3], dtype=np.float32)\n', (6249, 6349), True, 'import numpy as np\n'), ((6476, 6540), 'numpy.stack', 'np.stack', (['[faces[..., 1], faces[..., 0], faces[..., 2]]'], {'axis': '(-1)'}), '([faces[..., 1], faces[..., 0], faces[..., 2]], axis=-1)\n', (6484, 6540), True, 'import numpy as np\n'), ((6552, 6596), 'trimesh.Trimesh', 'trimesh.Trimesh', ([], {'vertices': 'verts', 'faces': 'faces'}), '(vertices=verts, faces=faces)\n', (6567, 6596), False, 'import trimesh\n'), ((7116, 7138), 'tensorflow.compat.v1.io.gfile.isdir', 'tf.io.gfile.isdir', (['pth'], {}), '(pth)\n', (7133, 7138), True, 'import tensorflow.compat.v1 as tf\n'), ((7144, 7169), 'tensorflow.compat.v1.io.gfile.makedirs', 'tf.io.gfile.makedirs', (['pth'], {}), '(pth)\n', (7164, 7169), True, 'import tensorflow.compat.v1 as tf\n'), ((7908, 7930), 'tensorflow.compat.v1.io.gfile.isdir', 'tf.io.gfile.isdir', (['pth'], {}), '(pth)\n', (7925, 7930), True, 'import tensorflow.compat.v1 as tf\n'), ((7936, 7961), 'tensorflow.compat.v1.io.gfile.makedirs', 'tf.io.gfile.makedirs', (['pth'], {}), '(pth)\n', (7956, 7961), True, 'import tensorflow.compat.v1 as tf\n'), ((9234, 9254), 'numpy.sum', 'np.sum', (['(pred * label)'], {}), '(pred * label)\n', (9240, 9254), True, 'import numpy as np\n'), ((9680, 9699), 'tensorflow.compat.v1.matmul', 'tf.matmul', (['r', 't_inv'], {}), '(r, t_inv)\n', (9689, 9699), True, 'import tensorflow.compat.v1 as tf\n'), ((10303, 10343), 'tensorflow.compat.v1.squeeze', 'tf.squeeze', (['end_pts_transformed'], {'axis': '(-1)'}), '(end_pts_transformed, axis=-1)\n', (10313, 10343), True, 'import tensorflow.compat.v1 as tf\n'), ((10548, 10591), 'tensorflow.compat.v1.square', 'tf.square', (['(pred_links - end_pts_transformed)'], {}), '(pred_links - end_pts_transformed)\n', (10557, 10591), True, 'import tensorflow.compat.v1 as tf\n'), ((10998, 11089), 'tensorflow.compat.v1.random.uniform', 'tf.random.uniform', (['[1, 1, hparams.sample_vert]'], {'minval': '(0)', 'maxval': 'n_vert', 'dtype': 'tf.int32'}), '([1, 1, hparams.sample_vert], minval=0, maxval=n_vert,\n dtype=tf.int32)\n', (11015, 11089), True, 'import tensorflow.compat.v1 as tf\n'), ((11216, 11271), 'tensorflow.compat.v1.gather', 'tf.gather', (['points', 'sample_indices'], {'axis': '(2)', 'batch_dims': '(2)'}), '(points, sample_indices, axis=2, batch_dims=2)\n', (11225, 11271), True, 'import tensorflow.compat.v1 as tf\n'), ((11286, 11342), 'tensorflow.compat.v1.gather', 'tf.gather', (['weights', 'sample_indices'], {'axis': '(2)', 'batch_dims': '(2)'}), '(weights, sample_indices, axis=2, batch_dims=2)\n', (11295, 11342), True, 'import tensorflow.compat.v1 as tf\n'), ((12084, 12175), 'tensorflow.compat.v1.random.uniform', 'tf.random.uniform', (['[1, 1, hparams.sample_vert]'], {'minval': '(0)', 'maxval': 'n_vert', 'dtype': 'tf.int32'}), '([1, 1, hparams.sample_vert], minval=0, maxval=n_vert,\n dtype=tf.int32)\n', (12101, 12175), True, 'import tensorflow.compat.v1 as tf\n'), ((12302, 12357), 'tensorflow.compat.v1.gather', 'tf.gather', (['points', 'sample_indices'], {'axis': '(2)', 'batch_dims': '(2)'}), '(points, sample_indices, axis=2, batch_dims=2)\n', (12311, 12357), True, 'import tensorflow.compat.v1 as tf\n'), ((12372, 12428), 'tensorflow.compat.v1.gather', 'tf.gather', (['weights', 'sample_indices'], {'axis': '(2)', 'batch_dims': '(2)'}), '(weights, sample_indices, axis=2, batch_dims=2)\n', (12381, 12428), True, 'import tensorflow.compat.v1 as tf\n'), ((13276, 13311), 'tqdm.trange', 'trange', (['hparams.max_steps_per_frame'], {}), '(hparams.max_steps_per_frame)\n', (13282, 13311), False, 'from tqdm import trange\n'), ((5652, 5699), 'numpy.expand_dims', 'np.expand_dims', (['points[:, i:i + 100000]'], {'axis': '(1)'}), '(points[:, i:i + 100000], axis=1)\n', (5666, 5699), True, 'import numpy as np\n'), ((8019, 8044), 'os.path.join', 'path.join', (['pth', 'mesh_name'], {}), '(pth, mesh_name)\n', (8028, 8044), False, 'from os import path\n'), ((9733, 9759), 'tensorflow.compat.v1.concat', 'tf.concat', (['[r, t]'], {'axis': '(-1)'}), '([r, t], axis=-1)\n', (9742, 9759), True, 'import tensorflow.compat.v1 as tf\n'), ((9985, 10019), 'tensorflow.compat.v1.expand_dims', 'tf.expand_dims', (['transforms'], {'axis': '(1)'}), '(transforms, axis=1)\n', (9999, 10019), True, 'import tensorflow.compat.v1 as tf\n'), ((10957, 10973), 'tensorflow.compat.v1.shape', 'tf.shape', (['points'], {}), '(points)\n', (10965, 10973), True, 'import tensorflow.compat.v1 as tf\n'), ((11560, 11594), 'tensorflow.compat.v1.square', 'tf.square', (['(occ - hparams.level_set)'], {}), '(occ - hparams.level_set)\n', (11569, 11594), True, 'import tensorflow.compat.v1 as tf\n'), ((12043, 12059), 'tensorflow.compat.v1.shape', 'tf.shape', (['points'], {}), '(points)\n', (12051, 12059), True, 'import tensorflow.compat.v1 as tf\n'), ((13014, 13048), 'tensorflow.compat.v1.square', 'tf.square', (['(occ - hparams.level_set)'], {}), '(occ - hparams.level_set)\n', (13023, 13048), True, 'import tensorflow.compat.v1 as tf\n'), ((9275, 9298), 'numpy.maximum', 'np.maximum', (['pred', 'label'], {}), '(pred, label)\n', (9285, 9298), True, 'import numpy as np\n'), ((10160, 10190), 'tensorflow.compat.v1.ones_like', 'tf.ones_like', (['end_pts[..., :1]'], {}), '(end_pts[..., :1])\n', (10172, 10190), True, 'import tensorflow.compat.v1 as tf\n'), ((1213, 1241), 'tensorflow_graphics.projects.nasa.lib.datasets.dataset_dict.keys', 'datasets.dataset_dict.keys', ([], {}), '()\n', (1239, 1241), False, 'from tensorflow_graphics.projects.nasa.lib import datasets\n'), ((1828, 1852), 'tensorflow_graphics.projects.nasa.lib.models.model_dict.keys', 'models.model_dict.keys', ([], {}), '()\n', (1850, 1852), False, 'from tensorflow_graphics.projects.nasa.lib import models\n'), ((5287, 5317), 'numpy.expand_dims', 'np.expand_dims', (['points'], {'axis': '(0)'}), '(points, axis=0)\n', (5301, 5317), True, 'import numpy as np\n'), ((7520, 7545), 'os.path.join', 'path.join', (['pth', 'mesh_name'], {}), '(pth, mesh_name)\n', (7529, 7545), False, 'from os import path\n')]
|
import keras
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
from keras.datasets import mnist
x_train = None
y_train = None
x_test = None
y_test = None
def init():
global x_train, y_train, x_test, y_test
(x_train_tmp, y_train_tmp), (x_test_tmp, y_test_tmp) = mnist.load_data()
x_train = x_train_tmp.reshape(-1, 784)
x_test = x_test_tmp.reshape(-1, 784)
train_size = x_train.shape[0]
test_size = x_test.shape[0]
y_train = np.zeros((train_size, 10))
for i in range(train_size):
y_train[i][y_train_tmp[i]] = 1
y_test = np.zeros((test_size, 10))
for i in range(test_size):
y_test[i][y_test_tmp[i]] = 1
pass
if __name__ == '__main__':
import time
init()
model = Sequential()
model.add(Dense(units=1000, activation='sigmoid', input_dim=784))
model.add(Dense(units=500, activation='sigmoid'))
model.add(Dense(units=10, activation='softmax'))
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
start_time = time.time()
model.fit(x_train, y_train, epochs=10, batch_size=1000)
loss_and_metrics = model.evaluate(x_test, y_test, batch_size=1000)
print(loss_and_metrics)
print('Total Time: ', (time.time() - start_time))
|
[
"keras.datasets.mnist.load_data",
"numpy.zeros",
"time.time",
"keras.layers.Dense",
"keras.models.Sequential"
] |
[((308, 325), 'keras.datasets.mnist.load_data', 'mnist.load_data', ([], {}), '()\n', (323, 325), False, 'from keras.datasets import mnist\n'), ((490, 516), 'numpy.zeros', 'np.zeros', (['(train_size, 10)'], {}), '((train_size, 10))\n', (498, 516), True, 'import numpy as np\n'), ((601, 626), 'numpy.zeros', 'np.zeros', (['(test_size, 10)'], {}), '((test_size, 10))\n', (609, 626), True, 'import numpy as np\n'), ((773, 785), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (783, 785), False, 'from keras.models import Sequential\n'), ((1073, 1084), 'time.time', 'time.time', ([], {}), '()\n', (1082, 1084), False, 'import time\n'), ((800, 854), 'keras.layers.Dense', 'Dense', ([], {'units': '(1000)', 'activation': '"""sigmoid"""', 'input_dim': '(784)'}), "(units=1000, activation='sigmoid', input_dim=784)\n", (805, 854), False, 'from keras.layers import Dense\n'), ((870, 908), 'keras.layers.Dense', 'Dense', ([], {'units': '(500)', 'activation': '"""sigmoid"""'}), "(units=500, activation='sigmoid')\n", (875, 908), False, 'from keras.layers import Dense\n'), ((924, 961), 'keras.layers.Dense', 'Dense', ([], {'units': '(10)', 'activation': '"""softmax"""'}), "(units=10, activation='softmax')\n", (929, 961), False, 'from keras.layers import Dense\n'), ((1271, 1282), 'time.time', 'time.time', ([], {}), '()\n', (1280, 1282), False, 'import time\n')]
|
import logging
from pathlib import Path
from scrapy import Spider, Request
from scrapy.crawler import CrawlerProcess
from scrapy_playwright.page import PageCoroutine
class HandleTimeoutMiddleware:
def process_exception(self, request, exception, spider):
logging.info("Caught exception: %s", exception.__class__)
return Request(
url="https://httpbin.org/get",
meta={
"playwright": True,
"playwright_page_coroutines": [
PageCoroutine(
"screenshot", path=Path(__file__).parent / "recovered.png", full_page=True
),
],
},
)
class HandleExceptionSpider(Spider):
"""
Handle exceptions in the Playwright downloader, such as TimeoutError
"""
name = "awesome"
custom_settings = {
"PLAYWRIGHT_DEFAULT_NAVIGATION_TIMEOUT": 1000,
"DOWNLOADER_MIDDLEWARES": {
HandleTimeoutMiddleware: 100,
},
}
def start_requests(self):
yield Request(
url="https://httpbin.org/delay/300",
meta={"playwright": True},
)
def parse(self, response):
yield {"url": response.url}
if __name__ == "__main__":
process = CrawlerProcess(
settings={
"TWISTED_REACTOR": "twisted.internet.asyncioreactor.AsyncioSelectorReactor",
"DOWNLOAD_HANDLERS": {
"https": "scrapy_playwright.handler.ScrapyPlaywrightDownloadHandler",
# "http": "scrapy_playwright.handler.ScrapyPlaywrightDownloadHandler",
},
"RETRY_TIMES": 0,
}
)
process.crawl(HandleExceptionSpider)
process.start()
|
[
"logging.info",
"pathlib.Path",
"scrapy.Request",
"scrapy.crawler.CrawlerProcess"
] |
[((1285, 1514), 'scrapy.crawler.CrawlerProcess', 'CrawlerProcess', ([], {'settings': "{'TWISTED_REACTOR':\n 'twisted.internet.asyncioreactor.AsyncioSelectorReactor',\n 'DOWNLOAD_HANDLERS': {'https':\n 'scrapy_playwright.handler.ScrapyPlaywrightDownloadHandler'},\n 'RETRY_TIMES': 0}"}), "(settings={'TWISTED_REACTOR':\n 'twisted.internet.asyncioreactor.AsyncioSelectorReactor',\n 'DOWNLOAD_HANDLERS': {'https':\n 'scrapy_playwright.handler.ScrapyPlaywrightDownloadHandler'},\n 'RETRY_TIMES': 0})\n", (1299, 1514), False, 'from scrapy.crawler import CrawlerProcess\n'), ((269, 326), 'logging.info', 'logging.info', (['"""Caught exception: %s"""', 'exception.__class__'], {}), "('Caught exception: %s', exception.__class__)\n", (281, 326), False, 'import logging\n'), ((1067, 1138), 'scrapy.Request', 'Request', ([], {'url': '"""https://httpbin.org/delay/300"""', 'meta': "{'playwright': True}"}), "(url='https://httpbin.org/delay/300', meta={'playwright': True})\n", (1074, 1138), False, 'from scrapy import Spider, Request\n'), ((575, 589), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (579, 589), False, 'from pathlib import Path\n')]
|
from machine import Pin, PWM
# Initialization
pwmFan = PWM(Pin(21), duty=0)
reverseFan = Pin(22, Pin.OUT)
# Turn Fan forward 70% speed
reverseFan.value(0)
pwmFan.duty(70)
# Decrease speed
pwmFan.duty(50)
# Decrease speed further (it might stop)
pwmFan.duty(30)
# Turn Fan backwards 70% speed
reverseFan.value(1)
pwmFan.duty(30)
# Decrease speed
pwmFan.duty(50)
# Decrease speed further (it might stop)
pwmFan.duty(70)
# Clean up
reverseFan(0)
pwmFan.deinit()
|
[
"machine.Pin"
] |
[((89, 105), 'machine.Pin', 'Pin', (['(22)', 'Pin.OUT'], {}), '(22, Pin.OUT)\n', (92, 105), False, 'from machine import Pin, PWM\n'), ((59, 66), 'machine.Pin', 'Pin', (['(21)'], {}), '(21)\n', (62, 66), False, 'from machine import Pin, PWM\n')]
|
import sys
read = sys.stdin.buffer.read
readline = sys.stdin.buffer.readline
readlines = sys.stdin.buffer.readlines
sys.setrecursionlimit(10 ** 7)
n = int(readline())
a, b = map(int, readline().split())
p = list(map(int, readline().split()))
memo = [0, 0, 0]
for check in p:
if check <= a:
memo[0] += 1
elif a < check <= b:
memo[1] += 1
else:
memo[2] += 1
print(min(memo))
|
[
"sys.setrecursionlimit"
] |
[((116, 146), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(10 ** 7)'], {}), '(10 ** 7)\n', (137, 146), False, 'import sys\n')]
|
"""
MIT License
Copyright (c) 2020 <NAME>
"""
from flask import Blueprint
user_bp = Blueprint("user", __name__)
from . import views
|
[
"flask.Blueprint"
] |
[((91, 118), 'flask.Blueprint', 'Blueprint', (['"""user"""', '__name__'], {}), "('user', __name__)\n", (100, 118), False, 'from flask import Blueprint\n')]
|
#!/usr/bin/env python3
import asyncio
import unittest
import sixtynine
class TestSixtynine(unittest.TestCase):
def setUp(self):
self.loop = asyncio.get_event_loop()
def tearDown(self):
self.loop.close()
def test_mouthful(self):
self.assertEqual(self.loop.run_until_complete(sixtynine.mouthful()), 69)
|
[
"asyncio.get_event_loop",
"sixtynine.mouthful"
] |
[((156, 180), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (178, 180), False, 'import asyncio\n'), ((316, 336), 'sixtynine.mouthful', 'sixtynine.mouthful', ([], {}), '()\n', (334, 336), False, 'import sixtynine\n')]
|
#!/usr/bin/env python3
from pyaim import CCPPasswordRESTSecure
aimccp = CCPPasswordRESTSecure('https://cyberark.dvdangelo33.dev/', "clientcert.pem", verify=True)
r = aimccp.GetPassword(appid='pyAIM',safe='D-AWS-AccessKeys',username='AnsibleAWSUser')
print(r)
|
[
"pyaim.CCPPasswordRESTSecure"
] |
[((74, 167), 'pyaim.CCPPasswordRESTSecure', 'CCPPasswordRESTSecure', (['"""https://cyberark.dvdangelo33.dev/"""', '"""clientcert.pem"""'], {'verify': '(True)'}), "('https://cyberark.dvdangelo33.dev/', 'clientcert.pem',\n verify=True)\n", (95, 167), False, 'from pyaim import CCPPasswordRESTSecure\n')]
|
from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.plugin import priority, register_plugin, plugins
log = logging.getLogger('builtins')
def all_builtins():
"""Helper function to return an iterator over all builtin plugins."""
return (plugin for plugin in plugins.itervalues() if plugin.builtin)
class PluginDisableBuiltins(object):
"""Disables all (or specific) builtin plugins from a task."""
def __init__(self):
# cannot trust that on_task_start would have been executed
self.disabled = []
# TODO: schemas are registered to a uri at plugin load, the list of builtins will not be complete at that time
schema = {
'oneOf': [
{'type': 'boolean'},
{'type': 'array', 'items': {'type': 'string', 'enum': [p.name for p in all_builtins()]}}
]
}
def debug(self):
log.debug('Builtin plugins: %s' % ', '.join(plugin.name for plugin in all_builtins()))
@priority(255)
def on_task_start(self, task, config):
self.disabled = []
if not config:
return
for plugin in all_builtins():
if config is True or plugin.name in config:
plugin.builtin = False
self.disabled.append(plugin.name)
log.debug('Disabled builtin plugin(s): %s' % ', '.join(self.disabled))
@priority(-255)
def on_task_exit(self, task, config):
if not self.disabled:
return
for name in self.disabled:
plugin.plugins[name].builtin = True
log.debug('Enabled builtin plugin(s): %s' % ', '.join(self.disabled))
self.disabled = []
on_task_abort = on_task_exit
register_plugin(PluginDisableBuiltins, 'disable_builtins', api_ver=2)
|
[
"flexget.plugin.priority",
"flexget.plugin.plugins.itervalues",
"flexget.plugin.register_plugin",
"logging.getLogger"
] |
[((178, 207), 'logging.getLogger', 'logging.getLogger', (['"""builtins"""'], {}), "('builtins')\n", (195, 207), False, 'import logging\n'), ((1749, 1818), 'flexget.plugin.register_plugin', 'register_plugin', (['PluginDisableBuiltins', '"""disable_builtins"""'], {'api_ver': '(2)'}), "(PluginDisableBuiltins, 'disable_builtins', api_ver=2)\n", (1764, 1818), False, 'from flexget.plugin import priority, register_plugin, plugins\n'), ((1024, 1037), 'flexget.plugin.priority', 'priority', (['(255)'], {}), '(255)\n', (1032, 1037), False, 'from flexget.plugin import priority, register_plugin, plugins\n'), ((1419, 1433), 'flexget.plugin.priority', 'priority', (['(-255)'], {}), '(-255)\n', (1427, 1433), False, 'from flexget.plugin import priority, register_plugin, plugins\n'), ((337, 357), 'flexget.plugin.plugins.itervalues', 'plugins.itervalues', ([], {}), '()\n', (355, 357), False, 'from flexget.plugin import priority, register_plugin, plugins\n')]
|
from dotenv import load_dotenv
from os import environ, path
from pathlib import Path
load_dotenv(verbose=True)
parent_path = Path(__file__).parent
dotenv_path = path.join(parent_path, ".env")
load_dotenv(dotenv_path)
CSRF_COOKIE_SECURE = True
CSRF_COOKIE_HTTPONLY = True
SECRET_KEY = environ.get("SECRET_KEY")
SPOTIFY_OAUTH_CLIENT_ID = environ.get("SPOTIFY_OAUTH_CLIENT_ID")
SPOTIFY_OAUTH_CLIENT_SECRET = environ.get("SPOTIFY_OAUTH_CLIENT_SECRET")
CLOUD_STORAGE_BUCKET = environ.get("CLOUD_STORAGE_BUCKET")
FLASK_DEBUG = environ.get("FLASK_DEBUG")
TESTING = environ.get("TESTING")
if FLASK_DEBUG or TESTING:
CALLBACK_URL = environ.get("CALLBACK_URL_DEV")
SQLALCHEMY_DATABASE_URI = environ.get("DATABASE_URL_DEV")
else:
CALLBACK_URL = environ.get("CALLBACK_URL")
SQLALCHEMY_DATABASE_URI = environ.get("DATABASE_URL")
SQLALCHEMY_TRACK_MODIFICATIONS = True
|
[
"dotenv.load_dotenv",
"os.environ.get",
"pathlib.Path",
"os.path.join"
] |
[((87, 112), 'dotenv.load_dotenv', 'load_dotenv', ([], {'verbose': '(True)'}), '(verbose=True)\n', (98, 112), False, 'from dotenv import load_dotenv\n'), ((164, 194), 'os.path.join', 'path.join', (['parent_path', '""".env"""'], {}), "(parent_path, '.env')\n", (173, 194), False, 'from os import environ, path\n'), ((195, 219), 'dotenv.load_dotenv', 'load_dotenv', (['dotenv_path'], {}), '(dotenv_path)\n', (206, 219), False, 'from dotenv import load_dotenv\n'), ((289, 314), 'os.environ.get', 'environ.get', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (300, 314), False, 'from os import environ, path\n'), ((342, 380), 'os.environ.get', 'environ.get', (['"""SPOTIFY_OAUTH_CLIENT_ID"""'], {}), "('SPOTIFY_OAUTH_CLIENT_ID')\n", (353, 380), False, 'from os import environ, path\n'), ((411, 453), 'os.environ.get', 'environ.get', (['"""SPOTIFY_OAUTH_CLIENT_SECRET"""'], {}), "('SPOTIFY_OAUTH_CLIENT_SECRET')\n", (422, 453), False, 'from os import environ, path\n'), ((478, 513), 'os.environ.get', 'environ.get', (['"""CLOUD_STORAGE_BUCKET"""'], {}), "('CLOUD_STORAGE_BUCKET')\n", (489, 513), False, 'from os import environ, path\n'), ((529, 555), 'os.environ.get', 'environ.get', (['"""FLASK_DEBUG"""'], {}), "('FLASK_DEBUG')\n", (540, 555), False, 'from os import environ, path\n'), ((566, 588), 'os.environ.get', 'environ.get', (['"""TESTING"""'], {}), "('TESTING')\n", (577, 588), False, 'from os import environ, path\n'), ((128, 142), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (132, 142), False, 'from pathlib import Path\n'), ((636, 667), 'os.environ.get', 'environ.get', (['"""CALLBACK_URL_DEV"""'], {}), "('CALLBACK_URL_DEV')\n", (647, 667), False, 'from os import environ, path\n'), ((698, 729), 'os.environ.get', 'environ.get', (['"""DATABASE_URL_DEV"""'], {}), "('DATABASE_URL_DEV')\n", (709, 729), False, 'from os import environ, path\n'), ((755, 782), 'os.environ.get', 'environ.get', (['"""CALLBACK_URL"""'], {}), "('CALLBACK_URL')\n", (766, 782), False, 'from os import environ, path\n'), ((813, 840), 'os.environ.get', 'environ.get', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (824, 840), False, 'from os import environ, path\n')]
|
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, PasswordField
from wtforms.validators import Email,DataRequired,Length, ValidationError
from SIS.models import Info
import email_validator
class sisForm(FlaskForm):
rollNo = StringField('Roll No',
validators=[DataRequired()])
prn = StringField('Roll No',
validators=[DataRequired(),Length(min=9,max=10)])
name = StringField('Name',
validators=[DataRequired(),Length(min=2,max=40)])
mobNo = StringField('Mobile No',
validators=[DataRequired(),Length(min=9,max=10)])
email = StringField('Email',
validators=[DataRequired(), Email()])
city = StringField('Name',
validators=[DataRequired(),Length(min=2,max=40)])
state = StringField('Name',
validators=[DataRequired(),Length(min=2,max=40)])
submit = SubmitField('Submit')
def validate_rollNo(self,rollNo):
info = Info.query.filter_by(rollNo=rollNo.data).first()
if info:
raise ValidationError('This Roll No is already there in the database.')
def validate_prn(self,prn):
info = Info.query.filter_by(prn=prn.data).first()
if info:
raise ValidationError('This PRN is already there in the database.')
def validate_mobNo(self,mobNo):
info = Info.query.filter_by(mobNo=mobNo.data).first()
if info:
raise ValidationError('This Mobile Number is already there in the database.')
def validate_email(self,email):
info = Info.query.filter_by(email=email.data).first()
if info:
raise ValidationError('This Email is already there in the database.')
class adminForm(FlaskForm):
email = StringField('Email',
validators=[DataRequired(), Email()])
password = PasswordField('Password',
validators=[DataRequired(),Length(min=2,max=10)])
submit = SubmitField('Submit')
|
[
"wtforms.validators.Email",
"wtforms.validators.Length",
"SIS.models.Info.query.filter_by",
"wtforms.SubmitField",
"wtforms.validators.DataRequired",
"wtforms.validators.ValidationError"
] |
[((975, 996), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (986, 996), False, 'from wtforms import StringField, SubmitField, PasswordField\n'), ((2053, 2074), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (2064, 2074), False, 'from wtforms import StringField, SubmitField, PasswordField\n'), ((1135, 1200), 'wtforms.validators.ValidationError', 'ValidationError', (['"""This Roll No is already there in the database."""'], {}), "('This Roll No is already there in the database.')\n", (1150, 1200), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((1331, 1392), 'wtforms.validators.ValidationError', 'ValidationError', (['"""This PRN is already there in the database."""'], {}), "('This PRN is already there in the database.')\n", (1346, 1392), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((1531, 1602), 'wtforms.validators.ValidationError', 'ValidationError', (['"""This Mobile Number is already there in the database."""'], {}), "('This Mobile Number is already there in the database.')\n", (1546, 1602), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((1737, 1800), 'wtforms.validators.ValidationError', 'ValidationError', (['"""This Email is already there in the database."""'], {}), "('This Email is already there in the database.')\n", (1752, 1800), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((316, 330), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (328, 330), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((402, 416), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (414, 416), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((417, 438), 'wtforms.validators.Length', 'Length', ([], {'min': '(9)', 'max': '(10)'}), '(min=9, max=10)\n', (423, 438), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((507, 521), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (519, 521), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((522, 543), 'wtforms.validators.Length', 'Length', ([], {'min': '(2)', 'max': '(40)'}), '(min=2, max=40)\n', (528, 543), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((618, 632), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (630, 632), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((633, 654), 'wtforms.validators.Length', 'Length', ([], {'min': '(9)', 'max': '(10)'}), '(min=9, max=10)\n', (639, 654), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((725, 739), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (737, 739), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((741, 748), 'wtforms.validators.Email', 'Email', ([], {}), '()\n', (746, 748), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((818, 832), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (830, 832), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((833, 854), 'wtforms.validators.Length', 'Length', ([], {'min': '(2)', 'max': '(40)'}), '(min=2, max=40)\n', (839, 854), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((924, 938), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (936, 938), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((939, 960), 'wtforms.validators.Length', 'Length', ([], {'min': '(2)', 'max': '(40)'}), '(min=2, max=40)\n', (945, 960), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((1051, 1091), 'SIS.models.Info.query.filter_by', 'Info.query.filter_by', ([], {'rollNo': 'rollNo.data'}), '(rollNo=rollNo.data)\n', (1071, 1091), False, 'from SIS.models import Info\n'), ((1253, 1287), 'SIS.models.Info.query.filter_by', 'Info.query.filter_by', ([], {'prn': 'prn.data'}), '(prn=prn.data)\n', (1273, 1287), False, 'from SIS.models import Info\n'), ((1449, 1487), 'SIS.models.Info.query.filter_by', 'Info.query.filter_by', ([], {'mobNo': 'mobNo.data'}), '(mobNo=mobNo.data)\n', (1469, 1487), False, 'from SIS.models import Info\n'), ((1655, 1693), 'SIS.models.Info.query.filter_by', 'Info.query.filter_by', ([], {'email': 'email.data'}), '(email=email.data)\n', (1675, 1693), False, 'from SIS.models import Info\n'), ((1899, 1913), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (1911, 1913), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((1915, 1922), 'wtforms.validators.Email', 'Email', ([], {}), '()\n', (1920, 1922), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((2002, 2016), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (2014, 2016), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n'), ((2017, 2038), 'wtforms.validators.Length', 'Length', ([], {'min': '(2)', 'max': '(10)'}), '(min=2, max=10)\n', (2023, 2038), False, 'from wtforms.validators import Email, DataRequired, Length, ValidationError\n')]
|
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from django import forms
class JsonMixinForm(forms.Form):
boolean = forms.BooleanField()
char = forms.CharField(
min_length=3,
max_length=6)
integer = forms.IntegerField(
min_value=3,
max_value=6)
|
[
"django.forms.BooleanField",
"django.forms.CharField",
"django.forms.IntegerField"
] |
[((140, 160), 'django.forms.BooleanField', 'forms.BooleanField', ([], {}), '()\n', (158, 160), False, 'from django import forms\n'), ((172, 215), 'django.forms.CharField', 'forms.CharField', ([], {'min_length': '(3)', 'max_length': '(6)'}), '(min_length=3, max_length=6)\n', (187, 215), False, 'from django import forms\n'), ((255, 299), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(3)', 'max_value': '(6)'}), '(min_value=3, max_value=6)\n', (273, 299), False, 'from django import forms\n')]
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
# Examples:
url(r'^service/.*/(?P<service_id>[0-9]+)/edit/?$', 'r_pass.views.edit'),
url(r'^service/.*/(?P<service_id>[0-9]+)/?$', 'r_pass.views.service'),
url(r'^create/?$', 'r_pass.views.create'),
url(r'', 'r_pass.views.home'),
)
|
[
"django.conf.urls.url"
] |
[((100, 170), 'django.conf.urls.url', 'url', (['"""^service/.*/(?P<service_id>[0-9]+)/edit/?$"""', '"""r_pass.views.edit"""'], {}), "('^service/.*/(?P<service_id>[0-9]+)/edit/?$', 'r_pass.views.edit')\n", (103, 170), False, 'from django.conf.urls import patterns, include, url\n'), ((177, 245), 'django.conf.urls.url', 'url', (['"""^service/.*/(?P<service_id>[0-9]+)/?$"""', '"""r_pass.views.service"""'], {}), "('^service/.*/(?P<service_id>[0-9]+)/?$', 'r_pass.views.service')\n", (180, 245), False, 'from django.conf.urls import patterns, include, url\n'), ((252, 292), 'django.conf.urls.url', 'url', (['"""^create/?$"""', '"""r_pass.views.create"""'], {}), "('^create/?$', 'r_pass.views.create')\n", (255, 292), False, 'from django.conf.urls import patterns, include, url\n'), ((299, 327), 'django.conf.urls.url', 'url', (['""""""', '"""r_pass.views.home"""'], {}), "('', 'r_pass.views.home')\n", (302, 327), False, 'from django.conf.urls import patterns, include, url\n')]
|
from regex_matchers import retrieve_exceptions
from utils import chunks
import threading
import glob
from pathlib import Path
import os
def extract_exceptions(files):
for path in files:
fileName = Path(path).stem
outputFile = f"ignored_data/exceptions/{fileName}.txt"
if os.path.isfile(outputFile):
continue
with open(path, "r+", encoding="utf-8", errors='ignore') as file:
lines = "\n".join(file.readlines())
excs = retrieve_exceptions(lines)
if len(excs) == 0:
continue
print(path)
with open(f"ignored_data/exceptions/{fileName}.txt", "a", encoding="utf-8", errors="ignore") as output:
for exception in excs:
output.write(exception.__str__() + "\n")
def orchestrate_extraction(threads=8):
files = glob.glob("ignored_data/downloads/*.xml")
files.sort()
chunked_files = chunks(files, threads)
threads = []
for chunk in chunked_files:
t = threading.Thread(target=extract_exceptions, args=(chunk,))
threads.append(t)
t.start()
for t in threads:
t.join()
files = glob.glob("ignored_data/exceptions/*.txt")
for path in files:
with open(f"ignored_data/exceptions.txt", "a", encoding="utf-8", errors="ignore") as output:
with open(path, "r+", encoding="utf-8", errors='ignore') as file:
lines = "\n".join(file.readlines())
output.write(lines)
def load_exceptions(filename):
with open(f"ignored_data/{filename}", "r+", encoding="utf-8", errors='ignore') as file:
lines = "\n".join(file.readlines())
return retrieve_exceptions(lines)
def retrieve_exception_dictionary(filename):
exceptions = load_exceptions(filename)
ex_dict = {}
for exception in exceptions:
if exception.exception not in ex_dict:
ex_dict[exception.exception] = []
ex_dict[exception.exception].append(exception)
return ex_dict
def debug_print(filename):
ex_dict = retrieve_exception_dictionary(filename)
ex_dict_keys = list(ex_dict.keys())
ex_dict_keys.sort()
for key in ex_dict_keys:
values = ex_dict[key]
if len(values) < 2:
continue
print(key)
for value in values:
print(f"\t{value}")
# debug_print("exceptions_minimized.txt")
|
[
"regex_matchers.retrieve_exceptions",
"threading.Thread",
"os.path.isfile",
"pathlib.Path",
"utils.chunks",
"glob.glob"
] |
[((867, 908), 'glob.glob', 'glob.glob', (['"""ignored_data/downloads/*.xml"""'], {}), "('ignored_data/downloads/*.xml')\n", (876, 908), False, 'import glob\n'), ((946, 968), 'utils.chunks', 'chunks', (['files', 'threads'], {}), '(files, threads)\n', (952, 968), False, 'from utils import chunks\n'), ((1191, 1233), 'glob.glob', 'glob.glob', (['"""ignored_data/exceptions/*.txt"""'], {}), "('ignored_data/exceptions/*.txt')\n", (1200, 1233), False, 'import glob\n'), ((300, 326), 'os.path.isfile', 'os.path.isfile', (['outputFile'], {}), '(outputFile)\n', (314, 326), False, 'import os\n'), ((1031, 1089), 'threading.Thread', 'threading.Thread', ([], {'target': 'extract_exceptions', 'args': '(chunk,)'}), '(target=extract_exceptions, args=(chunk,))\n', (1047, 1089), False, 'import threading\n'), ((1707, 1733), 'regex_matchers.retrieve_exceptions', 'retrieve_exceptions', (['lines'], {}), '(lines)\n', (1726, 1733), False, 'from regex_matchers import retrieve_exceptions\n'), ((210, 220), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (214, 220), False, 'from pathlib import Path\n'), ((491, 517), 'regex_matchers.retrieve_exceptions', 'retrieve_exceptions', (['lines'], {}), '(lines)\n', (510, 517), False, 'from regex_matchers import retrieve_exceptions\n')]
|
from __future__ import print_function
import sys, h5py as h5, numpy as np, yt, csv
from time import time, sleep
from PreFRBLE.file_system import *
from PreFRBLE.parameter import *
from time import time
def TimeElapsed( func, *args, **kwargs ):
""" measure time taken to compute function """
def MeasureTime():
t0 = time()
res = func( *args, **kwargs)
print( "{} took {} s".format( func.__name__, time()-t0 ) )
return res
return MeasureTime()
from time import sleep
## wrapper to write hdf5 files consistently
def Write2h5( filename='', datas=[], keys=[] ):
""" conveniently write datas to keys in filename. overwrite existing entries """
if type(keys) is str:
sys.exit( 'Write2h5 needs list of datas and keys' )
### small workaround to allow for parallel computation. Use with caution, might corrupt nodes in your h5 file. in that case, visit:
### https://stackoverflow.com/questions/47979751/recover-data-from-corrupted-file/61147632?noredirect=1#comment108190378_61147632
tries = 0
while tries < 30:
#try:
with h5.File( filename, 'a' ) as f:
for data, key in zip( datas, keys ):
try:
f[key][()]
f.__delitem__( key )
except:
pass
f.create_dataset( key, data=data )
break
#except:
sleep(3e-2)
tries += 1
pass
else:
print( "couldn't write ", keys )
sys.exit(1)
## Read FRBcat
#FRB_dtype = [('ID','S'),('DM','f'),('DM_gal','f'), ('RM','f'),('tau','f'),('host_redshift','S'), ('tele','S')]
#FRB_dtype = [('ID','U9'),('DM','f'),('DM_gal','f'), ('RM','U10'),('tau','U10'),('host_redshift','U4'), ('tele','U10')]
FRB_dtype = [('ID','U9'),('DM','f'),('DM_gal','f'), ('RM','f'),('tau','f'),('host_redshift','f'), ('tele','U10')]
def GetFRBcat( telescopes=None, RM=None, tau=None, print_number=False ):
"""
read all FRBs in FRBcat, downloaded to frbcat_file
Parameters
----------
telescopes : list
list of considered telescopes, FRBs of other telescopes are ignored
RM : boolean
if True, only return FRBs observed with RM
tau : boolean
if True, only return FRBs observed with temproal broadening
print_number : boolean
if True, print number of extractet FRBs
Returns
-------
FRBs : array
structured numpy.array containing values listed in FRBcat
"""
### read all FRBs from FRBcat
### optional: read only those FRBs observed by telescope with RM and tau
### print_number:True print number of extracted FRBs
FRBs = []
with open( frbcat_file, 'r') as f:
reader = csv.reader( f )
header = np.array(next(reader))
# header = np.array(reader.next())
i_ID = 0
i_DM = np.where( header == 'rmp_dm' )[0][0]
i_DM_gal = np.where( header == 'rop_mw_dm_limit' )[0][0]
i_RM = np.where( header == 'rmp_rm' )[0][0]
i_tau = np.where( header == 'rmp_scattering' )[0][0]
i_zs = np.where( header == 'rmp_redshift_host' )[0][0]
i_tele = np.where( header == 'telescope' )[0][0]
i_s = [i_ID, i_DM, i_DM_gal, i_RM, i_tau, i_zs, i_tele] ## order must fit order of FRB_dtype
for row in reader:
if telescopes and ( row[i_tele] not in [telescopes_FRBcat[tele] for tele in telescopes] ) :
continue
if tau and ( row[i_tau] == 'null' ) :
continue
if RM and ( row[i_RM] == 'null' ) :
continue
FRBs.append( tuple( [ decode(row[i].split('&')[0], dtype) for i, dtype in zip( i_s, np.array(FRB_dtype)[:,1] ) ] ) )
return np.array( FRBs, dtype=FRB_dtype )
def decode( string, dtype='U' ):
""" short wrapper to decode byte-strings read from FRBcat """
if 'f' in dtype:
if 'null' in string:
return float('NaN')
return float(string)
return string
def GetFRBsMeasures( measure='DM', FRBs=None ):
""" returns measures of FRBs in FRBcat read with GetFRBcat() """
if measure == 'DM':
return FRBs['DM']-FRBs['DM_gal']
elif measure == 'RM':
return FRBs['RM']
## flocker to keep parallel processes from writing to same file simultaneously
## provided by derpston, https://github.com/derpston/python-simpleflock/blob/master/src/simpleflock.py#L14
import os, fcntl, errno
class SimpleFlock:
"""Provides the simplest possible interface to flock-based file locking. Intended for use with the `with` syntax. It will create/truncate/delete the lock file as necessary."""
def __init__(self, path, timeout = None):
self._path = path
self._timeout = timeout
self._fd = None
def __enter__(self):
self._fd = os.open(self._path, os.O_CREAT)
start_lock_search = time()
while True:
try:
fcntl.flock(self._fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
# Lock acquired!
return
except (OSError, IOError) as ex:
if ex.errno != errno.EAGAIN: # Resource temporarily unavailable
raise
elif self._timeout is not None and time() > (start_lock_search + self._timeout):
# Exceeded the user-specified timeout.
print( "timeout exceeded" )
raise
# TODO It would be nice to avoid an arbitrary sleep here, but spinning
# without a delay is also undesirable.
sleep(0.1)
def __exit__(self, *args):
fcntl.flock(self._fd, fcntl.LOCK_UN)
os.close(self._fd)
self._fd = None
# Try to remove the lock file, but don't try too hard because it is
# unnecessary. This is mostly to help the user see whether a lock
# exists by examining the filesystem.
try:
os.unlink(self._path)
except:
pass
''' USAGE
with SimpleFlock("locktest", 2): ## "locktest" is a temporary file that tells whether the lock is active
## perform action on the locked file(s)
## file is locked when with starts until its left
## if file is locked, code is paused until lock is released, then with is performed
'''
def first(iterable, condition = lambda x: True):
"""
Returns the first item in the `iterable` that satisfies the `condition`.
If the condition is not given, returns the first item of the iterable.
Returns -1 if no item satysfing the condition is found.
>>> first( (1,2,3), condition=lambda x: x % 2 == 0)
2
>>> first(range(3, 100))
3
>>> first( (1,2,3), condition=lambda x: x > 9)
-1
THANKS TO Caridorc
https://stackoverflow.com/questions/2361426/get-the-first-item-from-an-iterable-that-matches-a-condition
"""
try:
return next(x for x in iterable if condition(x))
except:
return -1
## wrapper to show time needed for some function
'''
def HowLong( f, *args, print_additional='', **kwargs ):
""" wrapper to print the time needed to call function f """
t0 = time()
ret = f( *args, **kwargs )
t = time() - t0
print( "Running %s took %i minutes and %.1f seconds %s" % (f.__name__, t//60, t%60, print_additional ) )
return ret
'''
|
[
"os.open",
"h5py.File",
"csv.reader",
"os.unlink",
"fcntl.flock",
"time.sleep",
"time.time",
"numpy.where",
"numpy.array",
"os.close",
"sys.exit"
] |
[((3834, 3865), 'numpy.array', 'np.array', (['FRBs'], {'dtype': 'FRB_dtype'}), '(FRBs, dtype=FRB_dtype)\n', (3842, 3865), True, 'import sys, h5py as h5, numpy as np, yt, csv\n'), ((333, 339), 'time.time', 'time', ([], {}), '()\n', (337, 339), False, 'from time import time\n'), ((735, 784), 'sys.exit', 'sys.exit', (['"""Write2h5 needs list of datas and keys"""'], {}), "('Write2h5 needs list of datas and keys')\n", (743, 784), False, 'import sys, h5py as h5, numpy as np, yt, csv\n'), ((1473, 1484), 'time.sleep', 'sleep', (['(0.03)'], {}), '(0.03)\n', (1478, 1484), False, 'from time import sleep\n'), ((1585, 1596), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1593, 1596), False, 'import sys, h5py as h5, numpy as np, yt, csv\n'), ((2814, 2827), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (2824, 2827), False, 'import sys, h5py as h5, numpy as np, yt, csv\n'), ((4911, 4942), 'os.open', 'os.open', (['self._path', 'os.O_CREAT'], {}), '(self._path, os.O_CREAT)\n', (4918, 4942), False, 'import os, fcntl, errno\n'), ((4969, 4975), 'time.time', 'time', ([], {}), '()\n', (4973, 4975), False, 'from time import time\n'), ((5667, 5703), 'fcntl.flock', 'fcntl.flock', (['self._fd', 'fcntl.LOCK_UN'], {}), '(self._fd, fcntl.LOCK_UN)\n', (5678, 5703), False, 'import os, fcntl, errno\n'), ((5710, 5728), 'os.close', 'os.close', (['self._fd'], {}), '(self._fd)\n', (5718, 5728), False, 'import os, fcntl, errno\n'), ((1124, 1146), 'h5py.File', 'h5.File', (['filename', '"""a"""'], {}), "(filename, 'a')\n", (1131, 1146), True, 'import sys, h5py as h5, numpy as np, yt, csv\n'), ((5619, 5629), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (5624, 5629), False, 'from time import sleep\n'), ((5962, 5983), 'os.unlink', 'os.unlink', (['self._path'], {}), '(self._path)\n', (5971, 5983), False, 'import os, fcntl, errno\n'), ((2953, 2981), 'numpy.where', 'np.where', (["(header == 'rmp_dm')"], {}), "(header == 'rmp_dm')\n", (2961, 2981), True, 'import sys, h5py as h5, numpy as np, yt, csv\n'), ((3009, 3046), 'numpy.where', 'np.where', (["(header == 'rop_mw_dm_limit')"], {}), "(header == 'rop_mw_dm_limit')\n", (3017, 3046), True, 'import sys, h5py as h5, numpy as np, yt, csv\n'), ((3070, 3098), 'numpy.where', 'np.where', (["(header == 'rmp_rm')"], {}), "(header == 'rmp_rm')\n", (3078, 3098), True, 'import sys, h5py as h5, numpy as np, yt, csv\n'), ((3123, 3159), 'numpy.where', 'np.where', (["(header == 'rmp_scattering')"], {}), "(header == 'rmp_scattering')\n", (3131, 3159), True, 'import sys, h5py as h5, numpy as np, yt, csv\n'), ((3183, 3222), 'numpy.where', 'np.where', (["(header == 'rmp_redshift_host')"], {}), "(header == 'rmp_redshift_host')\n", (3191, 3222), True, 'import sys, h5py as h5, numpy as np, yt, csv\n'), ((3248, 3279), 'numpy.where', 'np.where', (["(header == 'telescope')"], {}), "(header == 'telescope')\n", (3256, 3279), True, 'import sys, h5py as h5, numpy as np, yt, csv\n'), ((5020, 5072), 'fcntl.flock', 'fcntl.flock', (['self._fd', '(fcntl.LOCK_EX | fcntl.LOCK_NB)'], {}), '(self._fd, fcntl.LOCK_EX | fcntl.LOCK_NB)\n', (5031, 5072), False, 'import os, fcntl, errno\n'), ((430, 436), 'time.time', 'time', ([], {}), '()\n', (434, 436), False, 'from time import time\n'), ((5307, 5313), 'time.time', 'time', ([], {}), '()\n', (5311, 5313), False, 'from time import time\n'), ((3790, 3809), 'numpy.array', 'np.array', (['FRB_dtype'], {}), '(FRB_dtype)\n', (3798, 3809), True, 'import sys, h5py as h5, numpy as np, yt, csv\n')]
|
import logging
import os
import shutil
import sys
import tempfile
from pyflink.dataset import ExecutionEnvironment
from pyflink.table import BatchTableEnvironment, TableConfig, DataTypes
from pyflink.table import expressions as expr
from pyflink.table.descriptors import OldCsv, FileSystem, Schema
from pyflink.table.expressions import lit
def demo01():
exec_env = ExecutionEnvironment.get_execution_environment()
exec_env.set_parallelism(1)
t_config = TableConfig()
t_env = BatchTableEnvironment.create(exec_env, t_config)
# StreamExecutionEnvironment
t_env.connect(FileSystem().path(r'F:\github\openjw\penter\bigdata_study\pyflink1.x\batch\demo01\input')) \
.with_format(OldCsv()
.field('word', DataTypes.STRING())) \
.with_schema(Schema()
.field('word', DataTypes.STRING())) \
.create_temporary_table('mySource')
# 文件存在会报错
t_env.connect(FileSystem().path(r'F:\github\openjw\penter\bigdata_study\pyflink1.x\batch\demo01\output')) \
.with_format(OldCsv()
.field_delimiter('\t')
.field('word', DataTypes.STRING())
.field('count', DataTypes.BIGINT())) \
.with_schema(Schema()
.field('word', DataTypes.STRING())
.field('count', DataTypes.BIGINT())) \
.create_temporary_table('mySink')
tab = t_env.from_path('mySource')
tab.group_by(tab.word) \
.select(tab.word, lit(1).count) \
.execute_insert('mySink').wait()
def demo02():
exec_env = ExecutionEnvironment.get_execution_environment()
exec_env.set_parallelism(1)
t_config = TableConfig()
t_env = BatchTableEnvironment.create(exec_env, t_config)
# StreamExecutionEnvironment
my_source_ddl = """
create table mySource (
word VARCHAR
) with (
'connector' = 'filesystem',
'format.type' = 'csv',
'connector.path' = 'F:/github/openjw/penter/bigdata_study/pyflink1.x/batch/demo01/input'
)
"""
my_sink_ddl = """
create table mySink (
word VARCHAR,
`count` BIGINT
) with (
'connector' = 'filesystem',
'format.type' = 'csv',
'connector.path' = 'F:/github/openjw/penter/bigdata_study/pyflink1.x/batch/demo01/output'
)
"""
t_env.execute_sql(my_source_ddl)
t_env.execute_sql(my_sink_ddl)
tab = t_env.from_path('mySource')
tab.group_by(tab.word) \
.select(tab.word, lit(1).count) \
.execute_insert('mySink').wait()
if __name__ == '__main__':
# demo01()
demo02() # 跑不起来
|
[
"pyflink.table.expressions.lit",
"pyflink.dataset.ExecutionEnvironment.get_execution_environment",
"pyflink.table.descriptors.OldCsv",
"pyflink.table.DataTypes.STRING",
"pyflink.table.TableConfig",
"pyflink.table.BatchTableEnvironment.create",
"pyflink.table.DataTypes.BIGINT",
"pyflink.table.descriptors.Schema",
"pyflink.table.descriptors.FileSystem"
] |
[((372, 420), 'pyflink.dataset.ExecutionEnvironment.get_execution_environment', 'ExecutionEnvironment.get_execution_environment', ([], {}), '()\n', (418, 420), False, 'from pyflink.dataset import ExecutionEnvironment\n'), ((468, 481), 'pyflink.table.TableConfig', 'TableConfig', ([], {}), '()\n', (479, 481), False, 'from pyflink.table import BatchTableEnvironment, TableConfig, DataTypes\n'), ((494, 542), 'pyflink.table.BatchTableEnvironment.create', 'BatchTableEnvironment.create', (['exec_env', 't_config'], {}), '(exec_env, t_config)\n', (522, 542), False, 'from pyflink.table import BatchTableEnvironment, TableConfig, DataTypes\n'), ((1595, 1643), 'pyflink.dataset.ExecutionEnvironment.get_execution_environment', 'ExecutionEnvironment.get_execution_environment', ([], {}), '()\n', (1641, 1643), False, 'from pyflink.dataset import ExecutionEnvironment\n'), ((1691, 1704), 'pyflink.table.TableConfig', 'TableConfig', ([], {}), '()\n', (1702, 1704), False, 'from pyflink.table import BatchTableEnvironment, TableConfig, DataTypes\n'), ((1717, 1765), 'pyflink.table.BatchTableEnvironment.create', 'BatchTableEnvironment.create', (['exec_env', 't_config'], {}), '(exec_env, t_config)\n', (1745, 1765), False, 'from pyflink.table import BatchTableEnvironment, TableConfig, DataTypes\n'), ((843, 861), 'pyflink.table.DataTypes.STRING', 'DataTypes.STRING', ([], {}), '()\n', (859, 861), False, 'from pyflink.table import BatchTableEnvironment, TableConfig, DataTypes\n'), ((1349, 1367), 'pyflink.table.DataTypes.BIGINT', 'DataTypes.BIGINT', ([], {}), '()\n', (1365, 1367), False, 'from pyflink.table import BatchTableEnvironment, TableConfig, DataTypes\n'), ((798, 806), 'pyflink.table.descriptors.Schema', 'Schema', ([], {}), '()\n', (804, 806), False, 'from pyflink.table.descriptors import OldCsv, FileSystem, Schema\n'), ((754, 772), 'pyflink.table.DataTypes.STRING', 'DataTypes.STRING', ([], {}), '()\n', (770, 772), False, 'from pyflink.table import BatchTableEnvironment, TableConfig, DataTypes\n'), ((1203, 1221), 'pyflink.table.DataTypes.BIGINT', 'DataTypes.BIGINT', ([], {}), '()\n', (1219, 1221), False, 'from pyflink.table import BatchTableEnvironment, TableConfig, DataTypes\n'), ((1292, 1310), 'pyflink.table.DataTypes.STRING', 'DataTypes.STRING', ([], {}), '()\n', (1308, 1310), False, 'from pyflink.table import BatchTableEnvironment, TableConfig, DataTypes\n'), ((1508, 1514), 'pyflink.table.expressions.lit', 'lit', (['(1)'], {}), '(1)\n', (1511, 1514), False, 'from pyflink.table.expressions import lit\n'), ((2577, 2583), 'pyflink.table.expressions.lit', 'lit', (['(1)'], {}), '(1)\n', (2580, 2583), False, 'from pyflink.table.expressions import lit\n'), ((709, 717), 'pyflink.table.descriptors.OldCsv', 'OldCsv', ([], {}), '()\n', (715, 717), False, 'from pyflink.table.descriptors import OldCsv, FileSystem, Schema\n'), ((1247, 1255), 'pyflink.table.descriptors.Schema', 'Schema', ([], {}), '()\n', (1253, 1255), False, 'from pyflink.table.descriptors import OldCsv, FileSystem, Schema\n'), ((1146, 1164), 'pyflink.table.DataTypes.STRING', 'DataTypes.STRING', ([], {}), '()\n', (1162, 1164), False, 'from pyflink.table import BatchTableEnvironment, TableConfig, DataTypes\n'), ((595, 607), 'pyflink.table.descriptors.FileSystem', 'FileSystem', ([], {}), '()\n', (605, 607), False, 'from pyflink.table.descriptors import OldCsv, FileSystem, Schema\n'), ((942, 954), 'pyflink.table.descriptors.FileSystem', 'FileSystem', ([], {}), '()\n', (952, 954), False, 'from pyflink.table.descriptors import OldCsv, FileSystem, Schema\n'), ((1057, 1065), 'pyflink.table.descriptors.OldCsv', 'OldCsv', ([], {}), '()\n', (1063, 1065), False, 'from pyflink.table.descriptors import OldCsv, FileSystem, Schema\n')]
|
import git
import ray
from ray import tune
from ray.tune import CLIReporter
from agent0.common.utils import parse_arguments
from agent0.nips_encoder.trainer import Trainer, Config
if __name__ == '__main__':
repo = git.Repo(search_parent_directories=True)
sha = repo.git.rev_parse(repo.head.object.hexsha, short=True)
sha_long = repo.head.object.hexsha
cfg = Config(sha=sha_long)
args = parse_arguments(cfg)
cfg = Config(**vars(args))
ray.init(memory=20 * 2 ** 30, object_store_memory=80 * 2 ** 30)
reporter = CLIReporter(
metric_columns=["game", "speed", "loss", "adam_lr", "time_remain", "time_past"]
)
analysis = tune.run(
Trainer,
name='nips_encoder_tune',
verbose=1,
stop=lambda trial_id, result: result['epoch'] > cfg.epochs,
checkpoint_at_end=True,
progress_reporter=reporter,
checkpoint_freq=cfg.replay_size // cfg.batch_size,
resources_per_trial={"gpu": 1},
config=vars(cfg),
fail_fast=True,
reuse_actors=True,
restore=cfg.restore_checkpoint,
)
|
[
"ray.init",
"ray.tune.CLIReporter",
"git.Repo",
"agent0.nips_encoder.trainer.Config",
"agent0.common.utils.parse_arguments"
] |
[((220, 260), 'git.Repo', 'git.Repo', ([], {'search_parent_directories': '(True)'}), '(search_parent_directories=True)\n', (228, 260), False, 'import git\n'), ((377, 397), 'agent0.nips_encoder.trainer.Config', 'Config', ([], {'sha': 'sha_long'}), '(sha=sha_long)\n', (383, 397), False, 'from agent0.nips_encoder.trainer import Trainer, Config\n'), ((409, 429), 'agent0.common.utils.parse_arguments', 'parse_arguments', (['cfg'], {}), '(cfg)\n', (424, 429), False, 'from agent0.common.utils import parse_arguments\n'), ((466, 529), 'ray.init', 'ray.init', ([], {'memory': '(20 * 2 ** 30)', 'object_store_memory': '(80 * 2 ** 30)'}), '(memory=20 * 2 ** 30, object_store_memory=80 * 2 ** 30)\n', (474, 529), False, 'import ray\n'), ((545, 641), 'ray.tune.CLIReporter', 'CLIReporter', ([], {'metric_columns': "['game', 'speed', 'loss', 'adam_lr', 'time_remain', 'time_past']"}), "(metric_columns=['game', 'speed', 'loss', 'adam_lr',\n 'time_remain', 'time_past'])\n", (556, 641), False, 'from ray.tune import CLIReporter\n')]
|
from __future__ import annotations
import typing as t
from dataclasses import dataclass
from pathlib import Path
from loguru import logger
from rich.console import Console
from rich.console import ConsoleOptions
from rich.console import Group
from rich.console import group
from rich.console import RenderResult
from rich.markdown import Markdown
from rich.panel import Panel
from rich.table import Table
from rich.tree import Tree
from dagos.core.components import SoftwareComponent
class SoftwareEnvironmentRegistry(type):
"""A metaclass responsible for registering software environments."""
environments: t.List[SoftwareEnvironment] = []
def __call__(cls, *args: t.Any, **kwds: t.Any) -> t.Any:
"""The registry hooks into the object construction lifecycle to register
software environments.
"""
environment = super().__call__(*args, **kwds)
if cls not in cls.environments:
cls.environments.append(environment)
return environment
@classmethod
def find_environment(cls, name: str) -> t.Optional[SoftwareEnvironment]:
for environment in cls.environments:
if environment.name == name:
return environment
return None
@dataclass
class Platform:
env: t.List[EnvironmentVariable]
packages: t.List[Packages]
images: t.List[Image]
def __rich_console__(
self, console: Console, options: ConsoleOptions
) -> t.Generator[RenderResult]:
parent_table = Table(box=None)
parent_table.add_column()
parent_table.add_column()
common_package_table = Table(title="Common Packages", show_header=False)
common_package_table.add_column("")
common_package_tree = Tree("packages")
for packages in self.packages:
common_package_tree.add(packages.__rich__())
common_package_table.add_row(common_package_tree)
image_table = Table(title=f"Targeted Container Images ({len(self.images)})")
image_table.add_column("ID")
image_table.add_column("Packages")
for image in self.images:
package_tree = Tree("packages")
for packages in image.packages:
package_tree.add(packages.__rich__())
image_table.add_row(image.id, package_tree)
parent_table.add_row(common_package_table, image_table)
yield parent_table
@dataclass
class EnvironmentVariable:
name: str
value: str
@dataclass
class Packages:
package_list: t.List[str]
manager: str = "system"
dependency: t.Optional[str] = None
def __rich__(self) -> Tree:
title = (
self.manager
if self.dependency is None
else f"{self.manager} ({self.dependency})"
)
tree = Tree(title)
for package in self.package_list:
tree.add(package)
return tree
@dataclass
class Image:
id: str
packages: t.List[Packages]
@dataclass
class Component:
name: str
purpose: t.Optional[str]
version: t.Optional[str]
software_component: t.Optional[SoftwareComponent]
class SoftwareEnvironment(metaclass=SoftwareEnvironmentRegistry):
"""Base class for software environments."""
path: Path
name: str
description: t.Optional[str]
platform: Platform
components: t.List[Component]
def __init__(
self,
path: Path,
name: str,
description: t.Optional[str],
platform: Platform,
components: t.List[Component],
) -> None:
""""""
self.path = path
self.name = name
self.description = description
self.platform = platform
self.components = components
def collect_components(self) -> t.List[SoftwareComponent]:
collected_components: t.List[SoftwareComponent] = []
unknown_components: t.List[str] = []
for component in self.components:
if component.software_component:
logger.trace("Requested component '{}' is known!", component.name)
# TODO: Check if selected platform supports component?
collected_components.append(component.software_component)
else:
unknown_components.append(component.name)
if len(unknown_components) > 0:
logger.error(
"{} of the {} requested components are unknown, specifically: {}",
len(unknown_components),
len(self.components),
", ".join(unknown_components),
)
return collected_components
def __rich__(self) -> Panel:
@group()
def get_renderables():
yield Markdown(f"{self.description}\n")
yield self.platform
table = Table(
title=f"Software Components ({len(self.components)})",
title_justify="left",
show_lines=True,
expand=True,
)
table.add_column("Name")
table.add_column("Purpose", ratio=1)
table.add_column("Version", justify="right")
table.add_column("Found?", justify="center")
table.add_column("Valid?", justify="center")
for component in self.components:
table.add_row(
component.name,
component.purpose,
component.version,
":white_check_mark:"
if component.software_component
else ":cross_mark:",
":white_check_mark:"
if component.software_component.is_valid()
else ":cross_mark:",
)
yield table
return Panel(
Group(get_renderables()),
title=f"Environment: {self.name}",
title_align="left",
subtitle=f"Path: {self.path}",
subtitle_align="right",
)
|
[
"loguru.logger.trace",
"rich.tree.Tree",
"rich.markdown.Markdown",
"rich.console.group",
"rich.table.Table"
] |
[((1514, 1529), 'rich.table.Table', 'Table', ([], {'box': 'None'}), '(box=None)\n', (1519, 1529), False, 'from rich.table import Table\n'), ((1630, 1679), 'rich.table.Table', 'Table', ([], {'title': '"""Common Packages"""', 'show_header': '(False)'}), "(title='Common Packages', show_header=False)\n", (1635, 1679), False, 'from rich.table import Table\n'), ((1755, 1771), 'rich.tree.Tree', 'Tree', (['"""packages"""'], {}), "('packages')\n", (1759, 1771), False, 'from rich.tree import Tree\n'), ((2807, 2818), 'rich.tree.Tree', 'Tree', (['title'], {}), '(title)\n', (2811, 2818), False, 'from rich.tree import Tree\n'), ((4669, 4676), 'rich.console.group', 'group', ([], {}), '()\n', (4674, 4676), False, 'from rich.console import group\n'), ((2154, 2170), 'rich.tree.Tree', 'Tree', (['"""packages"""'], {}), "('packages')\n", (2158, 2170), False, 'from rich.tree import Tree\n'), ((4011, 4077), 'loguru.logger.trace', 'logger.trace', (['"""Requested component \'{}\' is known!"""', 'component.name'], {}), '("Requested component \'{}\' is known!", component.name)\n', (4023, 4077), False, 'from loguru import logger\n'), ((4726, 4759), 'rich.markdown.Markdown', 'Markdown', (['f"""{self.description}\n"""'], {}), "(f'{self.description}\\n')\n", (4734, 4759), False, 'from rich.markdown import Markdown\n')]
|
from gpiozero import Robot, Motor, MotionSensor
from signal import pause
robot = Robot(left=Motor(4, 14), right=Motor(17, 18))
pir = MotionSensor(5)
pir.when_motion = robot.forward
pir.when_no_motion = robot.stop
pause()
|
[
"gpiozero.Motor",
"signal.pause",
"gpiozero.MotionSensor"
] |
[((134, 149), 'gpiozero.MotionSensor', 'MotionSensor', (['(5)'], {}), '(5)\n', (146, 149), False, 'from gpiozero import Robot, Motor, MotionSensor\n'), ((216, 223), 'signal.pause', 'pause', ([], {}), '()\n', (221, 223), False, 'from signal import pause\n'), ((93, 105), 'gpiozero.Motor', 'Motor', (['(4)', '(14)'], {}), '(4, 14)\n', (98, 105), False, 'from gpiozero import Robot, Motor, MotionSensor\n'), ((113, 126), 'gpiozero.Motor', 'Motor', (['(17)', '(18)'], {}), '(17, 18)\n', (118, 126), False, 'from gpiozero import Robot, Motor, MotionSensor\n')]
|
#!/usr/bin/env python
"""SequenceMotifDecomposer is a motif finder algorithm.
@author: <NAME>
@email: <EMAIL>
"""
import logging
import multiprocessing as mp
import os
from collections import defaultdict
from eden import apply_async
import numpy as np
from scipy.sparse import vstack
from eden.util.iterated_maximum_subarray import compute_max_subarrays_sequence
from itertools import izip
import time
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.linear_model import SGDClassifier
from sklearn.cluster import MiniBatchKMeans
from eden.sequence import Vectorizer
from StringIO import StringIO
from Bio import SeqIO
from Bio.Align.Applications import MuscleCommandline
from Bio.Alphabet import IUPAC
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from corebio.seq import Alphabet, SeqList
import weblogolib as wbl
from scipy.cluster.hierarchy import linkage
import regex as re
from collections import Counter
from sklearn import metrics
from eden.util.NeedlemanWunsh import edit_distance
import random
import pylab as plt
import joblib
from scipy.optimize import curve_fit
import multiprocessing
logger = logging.getLogger(__name__)
def sigmoid(x, a, b):
"""sigmoid."""
return 1 / (1 + np.exp(-(x - a) / b))
class PValueEvaluator(object):
"""Fit a parametrized sigmoid on the empirical cumulative distribution."""
def __init__(self, random_state=1):
"""Constructor."""
self.random_state = random_state
self.a = -4
self.b = 1
def ecdf(self, x):
"""Empirical cumulative distribution function."""
xs = np.sort(x)
ys = np.arange(1, len(xs) + 1) / float(len(xs))
return xs, ys
def fit(self, scores):
"""fit."""
if scores:
xs, ys = self.ecdf(scores)
popt, pcov = curve_fit(sigmoid, xs, ys)
self.a, self.b = popt
else:
logger.debug('Warning: reverting to default values')
logger.debug('ECDF fit on %d values' % (len(scores)))
logger.debug('Optimal params: a:%.2f b:%.2f' % (self.a, self.b))
def predict(self, value):
"""pvalue."""
y = sigmoid(value, self.a, self.b)
p_val = 1 - y
return p_val
def ecdf(x):
"""Empirical cumulative distribution function."""
xs = np.sort(x)
ys = np.arange(1, len(xs) + 1) / float(len(xs))
return xs, ys
def letter_regex(k, size, regex_th=0.3):
"""letter_regex."""
code = []
for letter, count in k:
if count / float(size) > regex_th:
if letter != '-':
code.append(letter)
if len(code) == 0:
code_str = None
elif len(code) == 1:
code_str = code[0]
else:
code_str = '(' + '|'.join(code) + ')'
return code_str
def consensus_regex(trimmed_align_seqs, regex_th):
"""consensus_regex."""
cluster = []
for h, align_seq in trimmed_align_seqs:
str_list = [c for c in align_seq]
concat_str = np.array(str_list, dtype=np.dtype('a'))
cluster.append(concat_str)
cluster = np.vstack(cluster)
size = len(trimmed_align_seqs)
for i, row in enumerate(cluster.T):
c = Counter(row)
k = c.most_common()
code = ''
for i, row in enumerate(cluster.T):
c = Counter(row)
k = c.most_common()
l = letter_regex(k, size, regex_th=regex_th)
if l:
code += l
return code
def find_occurrences(needle, haystack):
"""find_occurrences."""
for h, s in haystack:
matches = re.findall(needle, s, overlapped=True)
if len(matches):
yield 1
else:
yield 0
def occurrences(needle, haystack):
"""occurrences."""
counts = sum(find_occurrences(needle, haystack))
size = len(haystack)
return counts, float(counts) / size
def extract_consensus(seqs, motives, regex_th):
"""extract_consensus."""
for id in motives:
c_regex = consensus_regex(motives[id]['trimmed_align_seqs'], regex_th)
counts, freq = occurrences(c_regex, seqs)
yield freq, id, c_regex, counts, motives[id]['consensus_seq']
def plot_location(needle, haystack,
cluster_id=None, nbins=20, size=(17, 2), fname=None):
"""plot_location."""
locs = []
for h, s in haystack:
for match in re.finditer(needle, s):
s = match.start()
e = match.end()
m = s + (e - s) / 2
locs.append(m)
plt.figure(figsize=size)
n, bins, patches = plt.hist(
locs, nbins, normed=0, facecolor='blue', alpha=0.3)
plt.grid()
plt.title(needle)
plt.xlabel('Position')
plt.ylabel('Num occurrences')
if fname:
plt.draw()
figname = '%s_loc_%d.png' % (fname, cluster_id)
plt.savefig(
figname, bbox_inches='tight', transparent=True, pad_inches=0)
else:
figname = None
plt.show()
plt.close()
return figname
def extract_location(needle, haystack):
"""extract_location."""
locs = []
for h, s in haystack:
for match in re.finditer(needle, s):
s = match.start()
e = match.end()
m = s + (e - s) / 2
locs.append(m)
if locs:
avg_loc = np.percentile(locs, 50)
std_loc = np.percentile(locs, 70) - np.percentile(locs, 30)
else:
avg_loc = -1
std_loc = 0
return avg_loc, std_loc
def hits(motives, ids=None):
"""hits."""
for i in ids:
for h, s in motives[i]['seqs']:
tokens = h.split('<loc>')
seq_id = tokens[0]
begin, end = tokens[1].split(':')
yield (seq_id, int(begin), int(end), i)
def compute_cooccurence(motives, ids=None):
"""compute_cooccurence."""
if ids is None:
ids = [id for id in motives]
seqs_summary = defaultdict(list)
for seq_id, begin, end, i in hits(motives, ids=ids):
seqs_summary[seq_id].append((begin, end, i))
distances = defaultdict(list)
size = max(id for id in motives) + 1
cooccurence_mtx = np.zeros((size, size))
for seq_id in sorted(seqs_summary):
cluster_ids = [cluster_id
for begin, end, cluster_id in seqs_summary[seq_id]]
centers = defaultdict(list)
for begin, end, cluster_id in seqs_summary[seq_id]:
centers[cluster_id].append(begin + (end - begin) / 2)
cluster_ids = set(cluster_ids)
for i in cluster_ids:
for j in cluster_ids:
cooccurence_mtx[i, j] += 1
if i != j:
# find closest instance j from any instance in i
d_ij = []
for c_i in centers[i]:
for c_j in centers[j]:
d_ij.append(abs(c_i - c_j))
selected_abs = min(d_ij)
for c_i in centers[i]:
for c_j in centers[j]:
if selected_abs == abs(c_i - c_j):
selected = c_i - c_j
distances[(i, j)].append(selected)
cooccurence_mtx = np.nan_to_num(cooccurence_mtx)
orig_cooccurence_mtx = cooccurence_mtx.copy()
cooccurence_list = []
for i, row in enumerate(cooccurence_mtx):
norm = row[i]
if norm != 0:
row /= norm
else:
row = np.zeros(row.shape)
row[i] = 0
cooccurence_list.append(row)
norm_cooccurence_mtx = np.vstack(cooccurence_list)
return orig_cooccurence_mtx, norm_cooccurence_mtx, distances
def plot_distance(cluster_id_i,
cluster_id_j,
regex_i,
regex_j,
distances,
nbins=5,
size=(6, 2),
fname=None):
"""plot_distance."""
ds = distances[(cluster_id_i, cluster_id_j)]
plt.figure(figsize=size)
n, bins, patches = plt.hist(
ds, nbins, normed=0, facecolor='green', alpha=0.3)
plt.grid()
plt.title('%s vs %s' % (regex_i, regex_j))
plt.xlabel('Relative position')
plt.ylabel('Num occurrences')
if fname:
plt.draw()
figname = '%s_dist_%d_vs_%d.png' % (fname, cluster_id_i, cluster_id_j)
plt.savefig(
figname, bbox_inches='tight', transparent=True, pad_inches=0)
else:
figname = None
plt.show()
plt.close()
return figname
def mean_shift_decomposition(sig, half_windw_size=5):
"""mean_shift_decomposition."""
sig_len = len(sig)
for i in range(half_windw_size, sig_len - half_windw_size):
min_sig = np.min(sig[i - half_windw_size:i + half_windw_size])
if min_sig == sig[i]:
yield i
def box_decomposition(sig, half_windw_size=5):
"""box_decomposition."""
ids = list(mean_shift_decomposition(sig, half_windw_size))
for i in range(len(ids) - 1):
start = ids[i]
end = ids[i + 1]
width = end - start
val = sum(sig[start:end])
yield val, start, end, width
def cumulative_score(seqs, smod):
"""cumulative_score."""
median_len = np.median([len(s) for h, s in seqs])
sigs = None
for scores in smod.score(seqs):
sig = np.array(scores)
if len(sig) != median_len:
logger.debug('Length mismatch: %d != %d' % (len(sig), median_len))
if sigs is None:
if len(sig) >= median_len:
sigs = sig[:median_len]
else:
if len(sig) >= median_len:
sigs = sigs + sig[:median_len]
sig = np.array(sigs) / float(len(seqs))
return sig
def trim_seqs(seqs, smod, half_windw_size=7):
"""trim_seqs."""
sig = cumulative_score(seqs, smod)
val, start, end, width = max(box_decomposition(sig, half_windw_size))
logger.debug('val:%.1f beg:%s end:%s width:%s' % (val, start, end, width))
for h, s in seqs:
if s[start:end]:
yield (h, s[start:end])
def plot_cumulative_score(smod,
seqs,
size=(6, 2),
fname=None):
"""plot_cumulative_score."""
sig = cumulative_score(seqs, smod)
plt.figure(figsize=size)
sigp = np.copy(sig)
sigp[sigp < 0] = 0
plt.bar(range(len(sigp)), sigp, alpha=0.3, color='g')
sign = np.copy(sig)
sign[sign >= 0] = 0
plt.bar(range(len(sign)), sign, alpha=0.3, color='r')
plt.grid()
plt.xlabel('Position')
plt.ylabel('Importance score')
if fname:
plt.draw()
figname = '%s_importance.png' % (fname)
plt.savefig(
figname, bbox_inches='tight', transparent=True, pad_inches=0)
else:
figname = None
plt.show()
plt.close()
return figname
# ------------------------------------------------------------------------------
def serial_pre_process(iterable, vectorizer=None):
"""serial_pre_process."""
data_matrix = vectorizer.transform(iterable)
return data_matrix
def chunks(iterable, n):
"""chunks."""
iterable = iter(iterable)
while True:
items = []
for i in range(n):
it = iterable.next()
items.append(it)
yield items
def multiprocess_vectorize(iterable,
vectorizer=None,
pos_block_size=100,
n_jobs=-1):
"""multiprocess_vectorize."""
start_time = time.time()
if n_jobs == -1:
pool = mp.Pool()
else:
pool = mp.Pool(n_jobs)
results = [apply_async(
pool, serial_pre_process,
args=(seqs, vectorizer))
for seqs in chunks(iterable, pos_block_size)]
logger.debug('Setup %.2f secs' % (time.time() - start_time))
logger.debug('Vectorizing')
start_time = time.time()
matrices = []
for i, p in enumerate(results):
loc_start_time = time.time()
pos_data_matrix = p.get()
matrices += pos_data_matrix
d_time = time.time() - start_time
d_loc_time = time.time() - loc_start_time
size = pos_data_matrix.shape
logger.debug('%d %s (%.2f secs) (delta: %.2f)' %
(i, size, d_time, d_loc_time))
pool.close()
pool.join()
data_matrix = vstack(matrices)
return data_matrix
def multiprocess_fit(pos_iterable, neg_iterable,
vectorizer=None,
estimator=None,
pos_block_size=100,
neg_block_size=100,
n_jobs=-1):
"""multiprocess_fit."""
start_time = time.time()
classes = np.array([1, -1])
if n_jobs == -1:
pool = mp.Pool()
else:
pool = mp.Pool(n_jobs)
pos_results = [apply_async(
pool, serial_pre_process,
args=(seqs, vectorizer))
for seqs in chunks(pos_iterable, pos_block_size)]
neg_results = [apply_async(
pool, serial_pre_process,
args=(seqs, vectorizer))
for seqs in chunks(neg_iterable, neg_block_size)]
logger.debug('Setup %.2f secs' % (time.time() - start_time))
logger.debug('Fitting')
start_time = time.time()
for i, (p, n) in enumerate(izip(pos_results, neg_results)):
loc_start_time = time.time()
pos_data_matrix = p.get()
y = [1] * pos_data_matrix.shape[0]
neg_data_matrix = n.get()
y += [-1] * neg_data_matrix.shape[0]
y = np.array(y)
data_matrix = vstack([pos_data_matrix, neg_data_matrix])
estimator.partial_fit(data_matrix, y, classes=classes)
d_time = time.time() - start_time
d_loc_time = time.time() - loc_start_time
size = pos_data_matrix.shape
logger.debug('%d %s (%.2f secs) (delta: %.2f)' %
(i, size, d_time, d_loc_time))
pool.close()
pool.join()
return estimator
def multiprocess_performance(pos_iterable, neg_iterable,
vectorizer=None,
estimator=None,
pos_block_size=100,
neg_block_size=100,
n_jobs=-1):
"""multiprocess_performance."""
start_time = time.time()
if n_jobs == -1:
pool = mp.Pool()
else:
pool = mp.Pool(n_jobs)
pos_results = [apply_async(
pool, serial_pre_process,
args=(seqs, vectorizer))
for seqs in chunks(pos_iterable, pos_block_size)]
neg_results = [apply_async(
pool, serial_pre_process,
args=(seqs, vectorizer))
for seqs in chunks(neg_iterable, neg_block_size)]
logger.debug('Setup %.2f secs' % (time.time() - start_time))
logger.debug('Performance evaluation')
start_time = time.time()
preds = []
binary_preds = []
true_targets = []
for i, (p, n) in enumerate(izip(pos_results, neg_results)):
loc_start_time = time.time()
pos_data_matrix = p.get()
y = [1] * pos_data_matrix.shape[0]
neg_data_matrix = n.get()
y += [-1] * neg_data_matrix.shape[0]
y = np.array(y)
true_targets.append(y)
data_matrix = vstack([pos_data_matrix, neg_data_matrix])
pred = estimator.decision_function(data_matrix)
preds.append(pred)
binary_pred = estimator.predict(data_matrix)
binary_preds.append(binary_pred)
d_time = time.time() - start_time
d_loc_time = time.time() - loc_start_time
size = pos_data_matrix.shape
logger.debug('%d %s (%.2f secs) (delta: %.2f)' %
(i, size, d_time, d_loc_time))
pool.close()
pool.join()
preds = np.hstack(preds)
binary_preds = np.hstack(binary_preds)
true_targets = np.hstack(true_targets)
return preds, binary_preds, true_targets
def serial_subarray(iterable,
vectorizer=None,
estimator=None,
min_subarray_size=5,
max_subarray_size=10):
"""serial_subarray."""
annotated_seqs = vectorizer.annotate(iterable, estimator=estimator)
subarrays_items = []
for (orig_header, orig_seq), (seq, score) in zip(iterable, annotated_seqs):
subarrays = compute_max_subarrays_sequence(
seq=seq, score=score,
min_subarray_size=min_subarray_size,
max_subarray_size=max_subarray_size,
margin=1,
output='all')
subseqs = []
for subarray in subarrays:
subseq_seq = subarray['subarray_string']
begin = subarray['begin']
end = subarray['end']
score = subarray['score']
header = orig_header
header += '<loc>%d:%d<loc>' % (begin, end)
header += '<score>%.4f<score>' % (score)
header += '<subseq>%s<subseq>' % (subseq_seq)
subseq = (header, seq)
subseqs.append(subseq)
subarrays_items += subseqs
return subarrays_items
def multiprocess_subarray(iterable,
vectorizer=None,
estimator=None,
min_subarray_size=5,
max_subarray_size=10,
block_size=100,
n_jobs=-1):
"""multiprocess_subarray."""
start_time = time.time()
if n_jobs == -1:
pool = mp.Pool()
else:
pool = mp.Pool(n_jobs)
results = [apply_async(
pool, serial_subarray,
args=(seqs,
vectorizer,
estimator,
min_subarray_size,
max_subarray_size))
for seqs in chunks(iterable, block_size)]
logger.debug('Setup %.2f secs' % (time.time() - start_time))
logger.debug('Annotating')
start_time = time.time()
subarrays_items = []
for i, p in enumerate(results):
loc_start_time = time.time()
subarrays_item = p.get()
subarrays_items += subarrays_item
d_time = time.time() - start_time
d_loc_time = time.time() - loc_start_time
logger.debug('%d (%.2f secs) (delta: %.2f)' %
(i, d_time, d_loc_time))
pool.close()
pool.join()
return subarrays_items
def serial_score(iterable,
vectorizer=None,
estimator=None):
"""serial_score."""
annotated_seqs = vectorizer.annotate(iterable, estimator=estimator)
scores = [score for seq, score in annotated_seqs]
return scores
def multiprocess_score(iterable,
vectorizer=None,
estimator=None,
block_size=100,
n_jobs=-1):
"""multiprocess_score."""
start_time = time.time()
if n_jobs == -1:
pool = mp.Pool()
else:
pool = mp.Pool(n_jobs)
results = [apply_async(
pool, serial_score,
args=(seqs,
vectorizer,
estimator))
for seqs in chunks(iterable, block_size)]
logger.debug('Setup %.2f secs' % (time.time() - start_time))
logger.debug('Predicting')
start_time = time.time()
scores_items = []
for i, p in enumerate(results):
loc_start_time = time.time()
scores = p.get()
scores_items += scores
d_time = time.time() - start_time
d_loc_time = time.time() - loc_start_time
logger.debug('%d (%.2f secs) (delta: %.2f)' %
(i, d_time, d_loc_time))
pool.close()
pool.join()
return scores_items
# ------------------------------------------------------------------------------
def _fasta_to_fasta(lines):
seq = ""
for line in lines:
if line:
if line[0] == '>':
if seq:
yield seq
seq = ""
line_str = str(line)
yield line_str.strip()
else:
line_str = line.split()
if line_str:
seq += str(line_str[0]).strip()
if seq:
yield seq
# ------------------------------------------------------------------------------
class MuscleAlignWrapper(object):
"""A wrapper to perform Muscle Alignment on sequences."""
def __init__(self,
diags=False,
maxiters=16,
maxhours=None,
# TODO: check if this alphabet is required
# it over-rides tool.alphabet
alphabet='dna', # ['dna', 'rna', 'protein']
):
"""Initialize an instance."""
self.diags = diags
self.maxiters = maxiters
self.maxhours = maxhours
if alphabet == 'protein':
self.alphabet = IUPAC.protein
elif alphabet == 'rna':
self.alphabet = IUPAC.unambiguous_rna
else:
self.alphabet = IUPAC.unambiguous_dna
def _seq_to_stdin_fasta(self, seqs):
# seperating headers
headers, instances = [list(x) for x in zip(*seqs)]
instances_seqrecord = []
for i, j in enumerate(instances):
instances_seqrecord.append(
SeqRecord(Seq(j, self.alphabet), id=str(i)))
handle = StringIO()
SeqIO.write(instances_seqrecord, handle, "fasta")
data = handle.getvalue()
return headers, data
def _perform_ma(self, data):
params = {'maxiters': 7}
if self.diags is True:
params['diags'] = True
if self.maxhours is not None:
params['maxhours'] = self.maxhours
muscle_cline = MuscleCommandline(**params)
stdout, stderr = muscle_cline(stdin=data)
return stdout
def _fasta_to_seqs(self, headers, stdout):
out = list(_fasta_to_fasta(stdout.split('\n')))
motif_seqs = [''] * len(headers)
for i in range(len(out[:-1]))[::2]:
id = int(out[i].split(' ')[0].split('>')[1])
motif_seqs[id] = out[i + 1]
return zip(headers, motif_seqs)
def transform(self, seqs=[]):
"""Carry out alignment."""
headers, data = self._seq_to_stdin_fasta(seqs)
stdout = self._perform_ma(data)
aligned_seqs = self._fasta_to_seqs(headers, stdout)
return aligned_seqs
# ------------------------------------------------------------------------------
class Weblogo(object):
"""A wrapper of weblogolib for creating sequence."""
def __init__(self,
output_format='png', # ['eps','png','png_print','jpeg']
stacks_per_line=40,
sequence_type='dna', # ['protein','dna','rna']
ignore_lower_case=False,
# ['bits','nats','digits','kT','kJ/mol','kcal/mol','probability']
units='bits',
first_position=1,
logo_range=list(),
# composition = 'auto',
scale_stack_widths=True,
error_bars=True,
title='',
figure_label='',
show_x_axis=True,
x_label='',
show_y_axis=True,
y_label='',
y_axis_tic_spacing=1.0,
show_ends=False,
# ['auto','base','pairing','charge','chemistry','classic','monochrome']
color_scheme='classic',
resolution=96,
fineprint='',
):
"""Initialize an instance."""
options = wbl.LogoOptions()
options.stacks_per_line = stacks_per_line
options.sequence_type = sequence_type
options.ignore_lower_case = ignore_lower_case
options.unit_name = units
options.first_index = first_position
if logo_range:
options.logo_start = logo_range[0]
options.logo_end = logo_range[1]
options.scale_width = scale_stack_widths
options.show_errorbars = error_bars
if title:
options.title = title
if figure_label:
options.logo_label = figure_label
options.show_xaxis = show_x_axis
if x_label:
options.xaxis_label = x_label
options.show_yaxis = show_y_axis
if y_label:
options.yaxis_label = y_label
options.yaxis_tic_interval = y_axis_tic_spacing
options.show_ends = show_ends
options.color_scheme = wbl.std_color_schemes[color_scheme]
options.resolution = resolution
if fineprint:
options.fineprint = fineprint
self.options = options
self.output_format = output_format
def create_logo(self, seqs=[]):
"""Create sequence logo for input sequences."""
# seperate headers
headers, instances = [list(x)
for x in zip(*seqs)]
if self.options.sequence_type is 'rna':
alphabet = Alphabet('ACGU')
elif self.options.sequence_type is 'protein':
alphabet = Alphabet('ACDEFGHIKLMNPQRSTVWY')
else:
alphabet = Alphabet('AGCT')
motif_corebio = SeqList(alist=instances, alphabet=alphabet)
data = wbl.LogoData().from_seqs(motif_corebio)
format = wbl.LogoFormat(data, self.options)
if self.output_format == 'png':
return wbl.png_formatter(data, format)
elif self.output_format == 'png_print':
return wbl.png_print_formatter(data, format)
elif self.output_format == 'jpeg':
return wbl.jpeg_formatter(data, format)
else:
return wbl.eps_formatter(data, format)
# ------------------------------------------------------------------------------
class SequenceMotifDecomposer(BaseEstimator, ClassifierMixin):
"""SequenceMotifDecomposer."""
def __init__(self,
complexity=5,
n_clusters=10,
min_subarray_size=4,
max_subarray_size=10,
estimator=SGDClassifier(warm_start=True),
class_estimator=SGDClassifier(),
clusterer=MiniBatchKMeans(),
pos_block_size=300,
neg_block_size=300,
n_jobs=-1):
"""Construct."""
self.complexity = complexity
self.n_clusters = n_clusters
self.min_subarray_size = min_subarray_size
self.max_subarray_size = max_subarray_size
self.pos_block_size = pos_block_size
self.neg_block_size = neg_block_size
self.n_jobs = n_jobs
self.vectorizer = Vectorizer(complexity=complexity,
auto_weights=True,
nbits=15)
self.estimator = estimator
self.class_estimator = class_estimator
self.clusterer = clusterer
self.clusterer_is_fit = False
def save(self, model_name):
"""save."""
joblib.dump(self, model_name, compress=1)
def load(self, obj):
"""load."""
self.__dict__.update(joblib.load(obj).__dict__)
def fit(self, pos_seqs=None, neg_seqs=None):
"""fit."""
try:
self.estimator = multiprocess_fit(
pos_seqs, neg_seqs,
vectorizer=self.vectorizer,
estimator=self.estimator,
pos_block_size=self.pos_block_size,
neg_block_size=self.neg_block_size,
n_jobs=self.n_jobs)
self.fit_decomposition(neg_seqs)
return self
except Exception as e:
logger.debug('Failed iteration. Reason: %s' % e)
logger.debug('Exception', exc_info=True)
def performance(self, pos_seqs=None, neg_seqs=None):
"""performance."""
try:
y_pred, y_binary, y_test = multiprocess_performance(
pos_seqs, neg_seqs,
vectorizer=self.vectorizer,
estimator=self.estimator,
pos_block_size=self.pos_block_size,
neg_block_size=self.neg_block_size,
n_jobs=self.n_jobs)
# confusion matrix
cm = metrics.confusion_matrix(y_test, y_binary)
np.set_printoptions(precision=2)
logger.info('Confusion matrix:')
logger.info(cm)
# classification
logger.info('Classification:')
logger.info(metrics.classification_report(y_test, y_binary))
# roc
logger.info('ROC: %.3f' % (metrics.roc_auc_score(y_test, y_pred)))
except Exception as e:
logger.debug('Failed iteration. Reason: %s' % e)
logger.debug('Exception', exc_info=True)
def _decompose_header(self, header):
score = header.split('<score>')[1]
score = float(score)
loc = header.split('<loc>')[1]
begin, end = loc.split(':')
begin = int(begin)
end = int(end)
subseq = header.split('<subseq>')[1]
orig_header = header.split('<loc>')[0]
return orig_header, score, begin, end, subseq
def decompose(self, seqs=None, p_value=0.05):
"""decomposition_scores."""
try:
subarrays_items = multiprocess_subarray(
seqs,
vectorizer=self.vectorizer,
estimator=self.estimator,
min_subarray_size=self.min_subarray_size,
max_subarray_size=self.max_subarray_size,
block_size=self.pos_block_size,
n_jobs=self.n_jobs)
for header, seq in subarrays_items:
components = self._decompose_header(header)
orig_header, score, begin, end, subseq = components
p = self.compute_p_value(score)
if p <= p_value:
yield orig_header, begin, end, p, subseq
except Exception as e:
logger.debug('Failed iteration. Reason: %s' % e)
logger.debug('Exception', exc_info=True)
def decomposition_scores(self, seqs=None):
"""decomposition_scores."""
try:
subarrays_items = multiprocess_subarray(
seqs,
vectorizer=self.vectorizer,
estimator=self.estimator,
min_subarray_size=self.min_subarray_size,
max_subarray_size=self.max_subarray_size,
block_size=self.pos_block_size,
n_jobs=self.n_jobs)
for header, seq in subarrays_items:
yield self._decompose_header(header)
except Exception as e:
logger.debug('Failed iteration. Reason: %s' % e)
logger.debug('Exception', exc_info=True)
def fit_decomposition(self, seqs=None):
"""fit_decomposition."""
self.a, self.b = -4, 1
scores = [score for header, score, begin, end, subseq in
self.decomposition_scores(seqs)]
if scores:
xs, ys = ecdf(scores)
popt, pcov = curve_fit(sigmoid, xs, ys)
self.a, self.b = popt
else:
logger.debug('Warning: reverting to default values')
logger.debug('ECDF fit on %d values' % (len(scores)))
logger.debug('Optimal params: a:%.2f b:%.2f' % (self.a, self.b))
def compute_p_value(self, value):
"""p_value."""
y = sigmoid(value, self.a, self.b)
p_val = 1 - y
return p_val
def compute_clusters(self, seqs=None, p_value=0.05):
"""compute_clusters."""
try:
subsequences = []
iterable = self.decompose(seqs, p_value=p_value)
for header, begin, end, p, subseq in iterable:
new_header = header
new_header += '<loc>' + str(begin) + ':'
new_header += str(end) + '<loc>'
subsequences.append((new_header, subseq))
if not subsequences:
raise Exception('No subarray was selected. Increase p_value.')
logger.debug('Working on: %d fragments' % len(subsequences))
n = multiprocessing.cpu_count()
pos_block_size = len(subsequences) / n
data_matrix = multiprocess_vectorize(
subsequences,
vectorizer=self.vectorizer,
pos_block_size=pos_block_size,
n_jobs=self.n_jobs)
logger.debug('Clustering')
logger.debug('working on %d instances' % data_matrix.shape[0])
start_time = time.time()
self.clusterer.set_params(n_clusters=self.n_clusters)
if self.clusterer_is_fit:
preds = self.class_estimator.predict(data_matrix)
else:
preds = self.clusterer.fit_predict(data_matrix)
self.class_estimator.fit(data_matrix, preds)
self.clusterer_is_fit = True
dtime = time.time() - start_time
logger.debug('...done in %.2f secs' % (dtime))
self.clusters = defaultdict(list)
for pred, seq in zip(preds, subsequences):
self.clusters[pred].append(seq)
logger.debug('After clustering, %d motives' % len(self.clusters))
return self.clusters
except Exception as e:
logger.debug('Failed iteration. Reason: %s' % e)
logger.debug('Exception', exc_info=True)
def score(self, seqs=None):
"""fit."""
try:
for score in multiprocess_score(seqs,
vectorizer=self.vectorizer,
estimator=self.estimator,
block_size=self.pos_block_size,
n_jobs=self.n_jobs):
yield score
except Exception as e:
logger.debug('Failed iteration. Reason: %s' % e)
logger.debug('Exception', exc_info=True)
def _order_clusters(self, clusters, complexity=3):
sep = ' ' * (complexity * 2)
# join all sequences in a cluster with enough space that
# kmers dont interfere
cluster_seqs = []
for cluster_id in clusters:
if len(clusters[cluster_id]) > 0:
seqs = [s for h, s in clusters[cluster_id]]
seq = sep.join(seqs)
cluster_seqs.append(seq)
# vectorize the seqs and compute their gram matrix K
cluster_vecs = Vectorizer(complexity).transform(cluster_seqs)
gram_matrix = metrics.pairwise.pairwise_kernels(
cluster_vecs, metric='linear')
c = linkage(gram_matrix, method='single')
orders = []
for id1, id2 in c[:, 0:2]:
if id1 < len(cluster_seqs):
orders.append(int(id1))
if id2 < len(cluster_seqs):
orders.append(int(id2))
return orders
def _compute_consensus_seq(self, align_seqs):
cluster = []
for h, align_seq in align_seqs:
str_list = [c for c in align_seq]
concat_str = np.array(str_list, dtype=np.dtype('a'))
cluster.append(concat_str)
cluster = np.vstack(cluster)
seq = ''
for i, row in enumerate(cluster.T):
c = Counter(row)
k = c.most_common()
seq += k[0][0]
return seq
def _compute_score(self, align_seqs, min_freq=0.8):
dim = len(align_seqs)
cluster = []
for h, align_seq in align_seqs:
str_list = [c for c in align_seq]
concat_str = np.array(str_list, dtype=np.dtype('a'))
cluster.append(concat_str)
cluster = np.vstack(cluster)
score = 0
to_be_removed = []
for i, row in enumerate(cluster.T):
c = Counter(row)
k = c.most_common()
if k[0][0] == '-':
to_be_removed.append(i)
val = k[1][1]
else:
val = k[0][1]
if float(val) / dim >= min_freq:
score += 1
trimmed_align_seqs = []
for h, align_seq in align_seqs:
trimmed_align_seq = [a for i, a in enumerate(align_seq)
if i not in to_be_removed]
trimmed_align_seqs.append((h, ''.join(trimmed_align_seq)))
return score, trimmed_align_seqs
def _is_high_quality(self,
seqs,
min_score=4,
min_freq=0.6,
min_cluster_size=10,
sample_size=200):
ma = MuscleAlignWrapper(alphabet='rna')
if len(seqs) > sample_size:
sample_seqs = random.sample(seqs, sample_size)
else:
sample_seqs = seqs
align_seqs = ma.transform(seqs=sample_seqs)
score, trimmed_align_seqs = self._compute_score(align_seqs,
min_freq=min_freq)
if score >= min_score and len(align_seqs) > min_cluster_size:
return True
else:
return False
def compute_motif(self,
seqs=None,
min_score=4,
min_freq=0.6,
min_cluster_size=10,
regex_th=.3,
sample_size=200):
"""compute_motif."""
ma = MuscleAlignWrapper(alphabet='rna')
if len(seqs) > sample_size:
sample_seqs = random.sample(seqs, sample_size)
else:
sample_seqs = seqs
align_seqs = ma.transform(seqs=sample_seqs)
score, trimmed_align_seqs = self._compute_score(align_seqs,
min_freq=min_freq)
if score >= min_score and len(align_seqs) > min_cluster_size:
consensus_seq = self._compute_consensus_seq(trimmed_align_seqs)
regex_seq = consensus_regex(trimmed_align_seqs, regex_th)
motif = {'consensus_seq': consensus_seq,
'regex_seq': regex_seq,
'trimmed_align_seqs': trimmed_align_seqs,
'align_seqs': align_seqs,
'seqs': seqs}
return True, motif
else:
return False, None
def compute_motives(self,
clusters,
min_score=4,
min_freq=0.6,
min_cluster_size=10,
regex_th=.3,
sample_size=200):
"""compute_motives."""
if not clusters:
raise Exception('Error: No clusters.')
mcs = min_cluster_size
logger.debug('Alignment')
motives = dict()
for cluster_id in clusters:
start_time = time.time()
# align with muscle
is_high_quality, motif = self.compute_motif(
seqs=clusters[cluster_id],
min_score=min_score,
min_freq=min_freq,
min_cluster_size=mcs,
regex_th=regex_th,
sample_size=sample_size)
if is_high_quality:
motives[cluster_id] = motif
dtime = time.time() - start_time
logger.debug(
'Cluster %d (#%d) (%.2f secs)' %
(cluster_id, len(clusters[cluster_id]), dtime))
logger.debug('After motives computation, %d motives' % len(motives))
return motives
def _identify_mergeable_clusters(self, motives, similarity_th=0.8):
for i in motives:
for j in motives:
if j > i:
seq_i = motives[i]['consensus_seq']
seq_j = motives[j]['consensus_seq']
nw_score = edit_distance(seq_i, seq_j, gap_penalty=-1)
rel_nw_score = 2 * nw_score / (len(seq_i) + len(seq_j))
if rel_nw_score > similarity_th:
yield rel_nw_score, i, j
def merge(self,
motives,
similarity_th=0.5,
min_score=4,
min_freq=0.5,
min_cluster_size=10,
regex_th=.3,
sample_size=200):
"""merge."""
while True:
ms = sorted([m for m in self._identify_mergeable_clusters(
motives, similarity_th=similarity_th)], reverse=True)
success = False
for rel_nw_score, i, j in ms:
if motives.get(i, None) and motives.get(j, None):
n_i = len(motives[i]['seqs'])
n_j = len(motives[j]['seqs'])
seqs = motives[i]['seqs'] + motives[j]['seqs']
is_high_quality, motif = self.compute_motif(
seqs=seqs,
min_score=min_score,
min_freq=min_freq,
min_cluster_size=min_cluster_size,
regex_th=regex_th,
sample_size=sample_size)
if is_high_quality:
info1 = 'Joining: %d (#%d), %d (#%d) score: %.2f' % \
(i, n_i, j, n_j, rel_nw_score)
info2 = ' deleting: %d [%d is now #%d]' % \
(j, i, n_i + n_j)
logger.debug(info1 + info2)
# update motives
motives[i] = motif
del motives[j]
success = True
if success is False:
break
# TODO: run the predictor to learn the new class definition
logger.debug('After merge, %d motives' % len(motives))
return motives
def quality_filter(self,
seqs=None,
motives=None,
freq_th=None,
std_th=None):
"""quality_filter."""
_motives = dict()
for cluster_id in motives:
regex_seq = motives[cluster_id]['regex_seq']
counts, freq = occurrences(regex_seq, seqs)
motives[cluster_id]['freq'] = freq
motives[cluster_id]['counts'] = counts
avg, std = extract_location(regex_seq, seqs)
motives[cluster_id]['avg_pos'] = avg
motives[cluster_id]['std_pos'] = std
if freq_th is None or freq >= freq_th:
if std_th is None or std <= std_th:
_motives[cluster_id] = motives[cluster_id]
if len(_motives) == 0:
logger.warning('Quality filter is too strict. Ignoring filter.')
return motives
else:
logger.debug('After quality filter, %d motives' % len(_motives))
return _motives
def select_motives(self,
seqs=None,
p_value=0.05,
similarity_th=0.5,
min_score=4,
min_freq=0.5,
min_cluster_size=10,
regex_th=.3,
sample_size=200,
freq_th=None,
std_th=None):
"""select_motives."""
orig_clusters = self.compute_clusters(seqs, p_value=p_value)
motives = self.compute_motives(
orig_clusters,
min_score=min_score,
min_freq=min_freq,
min_cluster_size=min_cluster_size,
regex_th=regex_th,
sample_size=sample_size)
motives = self.merge(
motives,
similarity_th=similarity_th,
min_score=min_score,
min_freq=min_freq,
min_cluster_size=min_cluster_size,
regex_th=regex_th,
sample_size=sample_size)
motives = self.quality_filter(
seqs,
motives,
freq_th=freq_th,
std_th=std_th)
return motives
def compute_logo(self,
cluster_id=None,
motif=None):
"""compute_logo."""
alphabet = 'rna'
color_scheme = 'classic'
wb = Weblogo(output_format='png',
sequence_type=alphabet,
resolution=200,
stacks_per_line=60,
units='bits',
color_scheme=color_scheme)
logo_image = wb.create_logo(seqs=motif['trimmed_align_seqs'])
logo_txt = []
info = ' - num subarrays: %d' % len(motif['seqs'])
logo_txt.append(info)
info = ' - consensus sequence: %s' % motif['consensus_seq']
logo_txt.append(info)
info = ' - consensus regex: %s' % motif['regex_seq']
logo_txt.append(info)
return logo_image, logo_txt
def compute_logos(self,
motives,
ids=None):
"""compute_logos."""
if motives:
if ids is None:
ids = [cluster_id for cluster_id in motives]
logos = dict()
for cluster_id in ids:
logo_image, logo_txt = self.compute_logo(
cluster_id=cluster_id,
motif=motives[cluster_id])
logos[cluster_id] = (logo_image, logo_txt)
return logos
else:
logger.warning(
'No logo to compute. Try more permissive parameters.')
def _save_logo(self, logo, cluster_id, fname):
imagename = '%s_logo_cl_%d.png' % (fname, cluster_id)
with open(imagename, 'wb') as f:
f.write(logo)
return imagename
def _wrap_image(self, fname, fill_width=True, output_type='screen'):
pwd = os.getcwd()
url = pwd + '/' + fname
txt = []
if fill_width:
if output_type == 'pdf':
txt.append('<p align="left"><img src="file://' + url +
'" style="width: 100%"></p>')
else:
txt.append('<p align="left"><img src="' + fname +
'" style="width: 100%"></p>')
else:
if output_type == 'pdf':
txt.append('<p align="left"><img src="file://' + url +
'"></p>')
else:
txt.append('<p align="left"><img src="' + fname +
'"></p>')
return '\n'.join(txt)
def report(self,
pos_seqs,
all_seqs,
motives,
nbins=40,
size=(17, 2),
output_type='screen',
fname=None):
"""Report in markdown format."""
txt = []
if motives:
_, norm_cooccurence_mtx, distances = compute_cooccurence(motives)
info = '### Summary: %d motives' % len(motives)
txt.append(info)
figname = plot_cumulative_score(
self, pos_seqs, size=size, fname=fname)
txt.append(self._wrap_image(figname, output_type=output_type))
for freq, cluster_id in sorted([(motives[i]['freq'], i)
for i in motives], reverse=True):
info = ' - %.2s %s' % \
(cluster_id, motives[cluster_id]['consensus_seq'])
txt.append(info)
for freq, cluster_id in sorted([(motives[i]['freq'], i)
for i in motives], reverse=True):
info = '#### Motif id: %d' % cluster_id
txt.append(info)
logo_image, logo_txts = self.compute_logo(
cluster_id, motif=motives[cluster_id])
figname = self._save_logo(logo_image, cluster_id, fname)
for logo_txt in logo_txts:
txt.append(logo_txt)
co = motives[cluster_id]['counts']
fr = motives[cluster_id]['freq']
info = ' - num occurrences of regex: %d' % (co)
txt.append(info)
info = ' - freq of occurrences of regex: %.2f' % (fr)
txt.append(info)
av = motives[cluster_id]['avg_pos']
st = motives[cluster_id]['std_pos']
info = ' - average location: %.1f +- %.1f' % (av, st)
txt.append(info)
txt.append(self._wrap_image(figname,
fill_width=False,
output_type=output_type))
regex_i = motives[cluster_id]['regex_seq']
figname = plot_location(
regex_i, all_seqs, cluster_id=cluster_id,
nbins=nbins, size=size, fname=fname)
txt.append(self._wrap_image(figname, output_type=output_type))
for j in motives:
regex_i = motives[i]['regex_seq']
if j != cluster_id:
regex_j = motives[j]['regex_seq']
ds = distances[(cluster_id, j)]
info = ' - num co-occurences %d %s vs %d %s: %d' % \
(cluster_id, regex_i, j, regex_j, len(ds))
txt.append(info)
if len(ds):
figname = plot_distance(
cluster_id, j,
regex_i, regex_j,
distances,
nbins=nbins, size=size, fname=fname)
txt.append(self._wrap_image(
figname,
output_type=output_type))
txt.append('_' * 100)
else:
logger.warning(
'No motives to report. Try more permissive parameters.')
txt = '\n'.join(txt)
return txt
|
[
"pylab.close",
"sklearn.cluster.MiniBatchKMeans",
"weblogolib.jpeg_formatter",
"Bio.Seq.Seq",
"Bio.SeqIO.write",
"numpy.nan_to_num",
"weblogolib.eps_formatter",
"random.sample",
"scipy.cluster.hierarchy.linkage",
"joblib.dump",
"eden.apply_async",
"sklearn.metrics.classification_report",
"collections.defaultdict",
"pylab.figure",
"numpy.exp",
"StringIO.StringIO",
"multiprocessing.cpu_count",
"pylab.title",
"numpy.set_printoptions",
"numpy.copy",
"sklearn.linear_model.SGDClassifier",
"eden.util.iterated_maximum_subarray.compute_max_subarrays_sequence",
"pylab.draw",
"pylab.ylabel",
"weblogolib.LogoData",
"pylab.xlabel",
"collections.Counter",
"pylab.hist",
"sklearn.metrics.roc_auc_score",
"corebio.seq.SeqList",
"pylab.grid",
"numpy.hstack",
"numpy.percentile",
"numpy.sort",
"pylab.savefig",
"numpy.min",
"Bio.Align.Applications.MuscleCommandline",
"sklearn.metrics.pairwise.pairwise_kernels",
"scipy.optimize.curve_fit",
"multiprocessing.Pool",
"weblogolib.png_formatter",
"eden.util.NeedlemanWunsh.edit_distance",
"numpy.vstack",
"weblogolib.LogoFormat",
"weblogolib.LogoOptions",
"pylab.show",
"scipy.sparse.vstack",
"regex.findall",
"os.getcwd",
"numpy.dtype",
"numpy.zeros",
"eden.sequence.Vectorizer",
"time.time",
"corebio.seq.Alphabet",
"numpy.array",
"itertools.izip",
"sklearn.metrics.confusion_matrix",
"joblib.load",
"weblogolib.png_print_formatter",
"logging.getLogger",
"regex.finditer"
] |
[((1145, 1172), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1162, 1172), False, 'import logging\n'), ((2325, 2335), 'numpy.sort', 'np.sort', (['x'], {}), '(x)\n', (2332, 2335), True, 'import numpy as np\n'), ((3092, 3110), 'numpy.vstack', 'np.vstack', (['cluster'], {}), '(cluster)\n', (3101, 3110), True, 'import numpy as np\n'), ((4503, 4527), 'pylab.figure', 'plt.figure', ([], {'figsize': 'size'}), '(figsize=size)\n', (4513, 4527), True, 'import pylab as plt\n'), ((4551, 4611), 'pylab.hist', 'plt.hist', (['locs', 'nbins'], {'normed': '(0)', 'facecolor': '"""blue"""', 'alpha': '(0.3)'}), "(locs, nbins, normed=0, facecolor='blue', alpha=0.3)\n", (4559, 4611), True, 'import pylab as plt\n'), ((4625, 4635), 'pylab.grid', 'plt.grid', ([], {}), '()\n', (4633, 4635), True, 'import pylab as plt\n'), ((4640, 4657), 'pylab.title', 'plt.title', (['needle'], {}), '(needle)\n', (4649, 4657), True, 'import pylab as plt\n'), ((4662, 4684), 'pylab.xlabel', 'plt.xlabel', (['"""Position"""'], {}), "('Position')\n", (4672, 4684), True, 'import pylab as plt\n'), ((4689, 4718), 'pylab.ylabel', 'plt.ylabel', (['"""Num occurrences"""'], {}), "('Num occurrences')\n", (4699, 4718), True, 'import pylab as plt\n'), ((4959, 4970), 'pylab.close', 'plt.close', ([], {}), '()\n', (4968, 4970), True, 'import pylab as plt\n'), ((5889, 5906), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (5900, 5906), False, 'from collections import defaultdict\n'), ((6034, 6051), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (6045, 6051), False, 'from collections import defaultdict\n'), ((6115, 6137), 'numpy.zeros', 'np.zeros', (['(size, size)'], {}), '((size, size))\n', (6123, 6137), True, 'import numpy as np\n'), ((7196, 7226), 'numpy.nan_to_num', 'np.nan_to_num', (['cooccurence_mtx'], {}), '(cooccurence_mtx)\n', (7209, 7226), True, 'import numpy as np\n'), ((7552, 7579), 'numpy.vstack', 'np.vstack', (['cooccurence_list'], {}), '(cooccurence_list)\n', (7561, 7579), True, 'import numpy as np\n'), ((7961, 7985), 'pylab.figure', 'plt.figure', ([], {'figsize': 'size'}), '(figsize=size)\n', (7971, 7985), True, 'import pylab as plt\n'), ((8009, 8068), 'pylab.hist', 'plt.hist', (['ds', 'nbins'], {'normed': '(0)', 'facecolor': '"""green"""', 'alpha': '(0.3)'}), "(ds, nbins, normed=0, facecolor='green', alpha=0.3)\n", (8017, 8068), True, 'import pylab as plt\n'), ((8082, 8092), 'pylab.grid', 'plt.grid', ([], {}), '()\n', (8090, 8092), True, 'import pylab as plt\n'), ((8097, 8139), 'pylab.title', 'plt.title', (["('%s vs %s' % (regex_i, regex_j))"], {}), "('%s vs %s' % (regex_i, regex_j))\n", (8106, 8139), True, 'import pylab as plt\n'), ((8144, 8175), 'pylab.xlabel', 'plt.xlabel', (['"""Relative position"""'], {}), "('Relative position')\n", (8154, 8175), True, 'import pylab as plt\n'), ((8180, 8209), 'pylab.ylabel', 'plt.ylabel', (['"""Num occurrences"""'], {}), "('Num occurrences')\n", (8190, 8209), True, 'import pylab as plt\n'), ((8473, 8484), 'pylab.close', 'plt.close', ([], {}), '()\n', (8482, 8484), True, 'import pylab as plt\n'), ((10268, 10292), 'pylab.figure', 'plt.figure', ([], {'figsize': 'size'}), '(figsize=size)\n', (10278, 10292), True, 'import pylab as plt\n'), ((10304, 10316), 'numpy.copy', 'np.copy', (['sig'], {}), '(sig)\n', (10311, 10316), True, 'import numpy as np\n'), ((10409, 10421), 'numpy.copy', 'np.copy', (['sig'], {}), '(sig)\n', (10416, 10421), True, 'import numpy as np\n'), ((10508, 10518), 'pylab.grid', 'plt.grid', ([], {}), '()\n', (10516, 10518), True, 'import pylab as plt\n'), ((10523, 10545), 'pylab.xlabel', 'plt.xlabel', (['"""Position"""'], {}), "('Position')\n", (10533, 10545), True, 'import pylab as plt\n'), ((10550, 10580), 'pylab.ylabel', 'plt.ylabel', (['"""Importance score"""'], {}), "('Importance score')\n", (10560, 10580), True, 'import pylab as plt\n'), ((10813, 10824), 'pylab.close', 'plt.close', ([], {}), '()\n', (10822, 10824), True, 'import pylab as plt\n'), ((11519, 11530), 'time.time', 'time.time', ([], {}), '()\n', (11528, 11530), False, 'import time\n'), ((11883, 11894), 'time.time', 'time.time', ([], {}), '()\n', (11892, 11894), False, 'import time\n'), ((12346, 12362), 'scipy.sparse.vstack', 'vstack', (['matrices'], {}), '(matrices)\n', (12352, 12362), False, 'from scipy.sparse import vstack\n'), ((12672, 12683), 'time.time', 'time.time', ([], {}), '()\n', (12681, 12683), False, 'import time\n'), ((12698, 12715), 'numpy.array', 'np.array', (['[1, -1]'], {}), '([1, -1])\n', (12706, 12715), True, 'import numpy as np\n'), ((13229, 13240), 'time.time', 'time.time', ([], {}), '()\n', (13238, 13240), False, 'import time\n'), ((14286, 14297), 'time.time', 'time.time', ([], {}), '()\n', (14295, 14297), False, 'import time\n'), ((14826, 14837), 'time.time', 'time.time', ([], {}), '()\n', (14835, 14837), False, 'import time\n'), ((15735, 15751), 'numpy.hstack', 'np.hstack', (['preds'], {}), '(preds)\n', (15744, 15751), True, 'import numpy as np\n'), ((15771, 15794), 'numpy.hstack', 'np.hstack', (['binary_preds'], {}), '(binary_preds)\n', (15780, 15794), True, 'import numpy as np\n'), ((15814, 15837), 'numpy.hstack', 'np.hstack', (['true_targets'], {}), '(true_targets)\n', (15823, 15837), True, 'import numpy as np\n'), ((17406, 17417), 'time.time', 'time.time', ([], {}), '()\n', (17415, 17417), False, 'import time\n'), ((17867, 17878), 'time.time', 'time.time', ([], {}), '()\n', (17876, 17878), False, 'import time\n'), ((18805, 18816), 'time.time', 'time.time', ([], {}), '()\n', (18814, 18816), False, 'import time\n'), ((19197, 19208), 'time.time', 'time.time', ([], {}), '()\n', (19206, 19208), False, 'import time\n'), ((1613, 1623), 'numpy.sort', 'np.sort', (['x'], {}), '(x)\n', (1620, 1623), True, 'import numpy as np\n'), ((3198, 3210), 'collections.Counter', 'Counter', (['row'], {}), '(row)\n', (3205, 3210), False, 'from collections import Counter\n'), ((3305, 3317), 'collections.Counter', 'Counter', (['row'], {}), '(row)\n', (3312, 3317), False, 'from collections import Counter\n'), ((3565, 3603), 'regex.findall', 're.findall', (['needle', 's'], {'overlapped': '(True)'}), '(needle, s, overlapped=True)\n', (3575, 3603), True, 'import regex as re\n'), ((4358, 4380), 'regex.finditer', 're.finditer', (['needle', 's'], {}), '(needle, s)\n', (4369, 4380), True, 'import regex as re\n'), ((4741, 4751), 'pylab.draw', 'plt.draw', ([], {}), '()\n', (4749, 4751), True, 'import pylab as plt\n'), ((4816, 4889), 'pylab.savefig', 'plt.savefig', (['figname'], {'bbox_inches': '"""tight"""', 'transparent': '(True)', 'pad_inches': '(0)'}), "(figname, bbox_inches='tight', transparent=True, pad_inches=0)\n", (4827, 4889), True, 'import pylab as plt\n'), ((4944, 4954), 'pylab.show', 'plt.show', ([], {}), '()\n', (4952, 4954), True, 'import pylab as plt\n'), ((5121, 5143), 'regex.finditer', 're.finditer', (['needle', 's'], {}), '(needle, s)\n', (5132, 5143), True, 'import regex as re\n'), ((5293, 5316), 'numpy.percentile', 'np.percentile', (['locs', '(50)'], {}), '(locs, 50)\n', (5306, 5316), True, 'import numpy as np\n'), ((6305, 6322), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (6316, 6322), False, 'from collections import defaultdict\n'), ((8232, 8242), 'pylab.draw', 'plt.draw', ([], {}), '()\n', (8240, 8242), True, 'import pylab as plt\n'), ((8330, 8403), 'pylab.savefig', 'plt.savefig', (['figname'], {'bbox_inches': '"""tight"""', 'transparent': '(True)', 'pad_inches': '(0)'}), "(figname, bbox_inches='tight', transparent=True, pad_inches=0)\n", (8341, 8403), True, 'import pylab as plt\n'), ((8458, 8468), 'pylab.show', 'plt.show', ([], {}), '()\n', (8466, 8468), True, 'import pylab as plt\n'), ((8701, 8753), 'numpy.min', 'np.min', (['sig[i - half_windw_size:i + half_windw_size]'], {}), '(sig[i - half_windw_size:i + half_windw_size])\n', (8707, 8753), True, 'import numpy as np\n'), ((9310, 9326), 'numpy.array', 'np.array', (['scores'], {}), '(scores)\n', (9318, 9326), True, 'import numpy as np\n'), ((9655, 9669), 'numpy.array', 'np.array', (['sigs'], {}), '(sigs)\n', (9663, 9669), True, 'import numpy as np\n'), ((10603, 10613), 'pylab.draw', 'plt.draw', ([], {}), '()\n', (10611, 10613), True, 'import pylab as plt\n'), ((10670, 10743), 'pylab.savefig', 'plt.savefig', (['figname'], {'bbox_inches': '"""tight"""', 'transparent': '(True)', 'pad_inches': '(0)'}), "(figname, bbox_inches='tight', transparent=True, pad_inches=0)\n", (10681, 10743), True, 'import pylab as plt\n'), ((10798, 10808), 'pylab.show', 'plt.show', ([], {}), '()\n', (10806, 10808), True, 'import pylab as plt\n'), ((11567, 11576), 'multiprocessing.Pool', 'mp.Pool', ([], {}), '()\n', (11574, 11576), True, 'import multiprocessing as mp\n'), ((11602, 11617), 'multiprocessing.Pool', 'mp.Pool', (['n_jobs'], {}), '(n_jobs)\n', (11609, 11617), True, 'import multiprocessing as mp\n'), ((11634, 11696), 'eden.apply_async', 'apply_async', (['pool', 'serial_pre_process'], {'args': '(seqs, vectorizer)'}), '(pool, serial_pre_process, args=(seqs, vectorizer))\n', (11645, 11696), False, 'from eden import apply_async\n'), ((11974, 11985), 'time.time', 'time.time', ([], {}), '()\n', (11983, 11985), False, 'import time\n'), ((12752, 12761), 'multiprocessing.Pool', 'mp.Pool', ([], {}), '()\n', (12759, 12761), True, 'import multiprocessing as mp\n'), ((12787, 12802), 'multiprocessing.Pool', 'mp.Pool', (['n_jobs'], {}), '(n_jobs)\n', (12794, 12802), True, 'import multiprocessing as mp\n'), ((12823, 12885), 'eden.apply_async', 'apply_async', (['pool', 'serial_pre_process'], {'args': '(seqs, vectorizer)'}), '(pool, serial_pre_process, args=(seqs, vectorizer))\n', (12834, 12885), False, 'from eden import apply_async\n'), ((12980, 13042), 'eden.apply_async', 'apply_async', (['pool', 'serial_pre_process'], {'args': '(seqs, vectorizer)'}), '(pool, serial_pre_process, args=(seqs, vectorizer))\n', (12991, 13042), False, 'from eden import apply_async\n'), ((13272, 13302), 'itertools.izip', 'izip', (['pos_results', 'neg_results'], {}), '(pos_results, neg_results)\n', (13276, 13302), False, 'from itertools import izip\n'), ((13330, 13341), 'time.time', 'time.time', ([], {}), '()\n', (13339, 13341), False, 'import time\n'), ((13510, 13521), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (13518, 13521), True, 'import numpy as np\n'), ((13544, 13586), 'scipy.sparse.vstack', 'vstack', (['[pos_data_matrix, neg_data_matrix]'], {}), '([pos_data_matrix, neg_data_matrix])\n', (13550, 13586), False, 'from scipy.sparse import vstack\n'), ((14334, 14343), 'multiprocessing.Pool', 'mp.Pool', ([], {}), '()\n', (14341, 14343), True, 'import multiprocessing as mp\n'), ((14369, 14384), 'multiprocessing.Pool', 'mp.Pool', (['n_jobs'], {}), '(n_jobs)\n', (14376, 14384), True, 'import multiprocessing as mp\n'), ((14405, 14467), 'eden.apply_async', 'apply_async', (['pool', 'serial_pre_process'], {'args': '(seqs, vectorizer)'}), '(pool, serial_pre_process, args=(seqs, vectorizer))\n', (14416, 14467), False, 'from eden import apply_async\n'), ((14562, 14624), 'eden.apply_async', 'apply_async', (['pool', 'serial_pre_process'], {'args': '(seqs, vectorizer)'}), '(pool, serial_pre_process, args=(seqs, vectorizer))\n', (14573, 14624), False, 'from eden import apply_async\n'), ((14928, 14958), 'itertools.izip', 'izip', (['pos_results', 'neg_results'], {}), '(pos_results, neg_results)\n', (14932, 14958), False, 'from itertools import izip\n'), ((14986, 14997), 'time.time', 'time.time', ([], {}), '()\n', (14995, 14997), False, 'import time\n'), ((15166, 15177), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (15174, 15177), True, 'import numpy as np\n'), ((15231, 15273), 'scipy.sparse.vstack', 'vstack', (['[pos_data_matrix, neg_data_matrix]'], {}), '([pos_data_matrix, neg_data_matrix])\n', (15237, 15273), False, 'from scipy.sparse import vstack\n'), ((16296, 16455), 'eden.util.iterated_maximum_subarray.compute_max_subarrays_sequence', 'compute_max_subarrays_sequence', ([], {'seq': 'seq', 'score': 'score', 'min_subarray_size': 'min_subarray_size', 'max_subarray_size': 'max_subarray_size', 'margin': '(1)', 'output': '"""all"""'}), "(seq=seq, score=score, min_subarray_size=\n min_subarray_size, max_subarray_size=max_subarray_size, margin=1,\n output='all')\n", (16326, 16455), False, 'from eden.util.iterated_maximum_subarray import compute_max_subarrays_sequence\n'), ((17454, 17463), 'multiprocessing.Pool', 'mp.Pool', ([], {}), '()\n', (17461, 17463), True, 'import multiprocessing as mp\n'), ((17489, 17504), 'multiprocessing.Pool', 'mp.Pool', (['n_jobs'], {}), '(n_jobs)\n', (17496, 17504), True, 'import multiprocessing as mp\n'), ((17521, 17633), 'eden.apply_async', 'apply_async', (['pool', 'serial_subarray'], {'args': '(seqs, vectorizer, estimator, min_subarray_size, max_subarray_size)'}), '(pool, serial_subarray, args=(seqs, vectorizer, estimator,\n min_subarray_size, max_subarray_size))\n', (17532, 17633), False, 'from eden import apply_async\n'), ((17965, 17976), 'time.time', 'time.time', ([], {}), '()\n', (17974, 17976), False, 'import time\n'), ((18853, 18862), 'multiprocessing.Pool', 'mp.Pool', ([], {}), '()\n', (18860, 18862), True, 'import multiprocessing as mp\n'), ((18888, 18903), 'multiprocessing.Pool', 'mp.Pool', (['n_jobs'], {}), '(n_jobs)\n', (18895, 18903), True, 'import multiprocessing as mp\n'), ((18920, 18987), 'eden.apply_async', 'apply_async', (['pool', 'serial_score'], {'args': '(seqs, vectorizer, estimator)'}), '(pool, serial_score, args=(seqs, vectorizer, estimator))\n', (18931, 18987), False, 'from eden import apply_async\n'), ((19292, 19303), 'time.time', 'time.time', ([], {}), '()\n', (19301, 19303), False, 'import time\n'), ((21299, 21309), 'StringIO.StringIO', 'StringIO', ([], {}), '()\n', (21307, 21309), False, 'from StringIO import StringIO\n'), ((21318, 21367), 'Bio.SeqIO.write', 'SeqIO.write', (['instances_seqrecord', 'handle', '"""fasta"""'], {}), "(instances_seqrecord, handle, 'fasta')\n", (21329, 21367), False, 'from Bio import SeqIO\n'), ((21672, 21699), 'Bio.Align.Applications.MuscleCommandline', 'MuscleCommandline', ([], {}), '(**params)\n', (21689, 21699), False, 'from Bio.Align.Applications import MuscleCommandline\n'), ((23594, 23611), 'weblogolib.LogoOptions', 'wbl.LogoOptions', ([], {}), '()\n', (23609, 23611), True, 'import weblogolib as wbl\n'), ((25205, 25248), 'corebio.seq.SeqList', 'SeqList', ([], {'alist': 'instances', 'alphabet': 'alphabet'}), '(alist=instances, alphabet=alphabet)\n', (25212, 25248), False, 'from corebio.seq import Alphabet, SeqList\n'), ((25322, 25356), 'weblogolib.LogoFormat', 'wbl.LogoFormat', (['data', 'self.options'], {}), '(data, self.options)\n', (25336, 25356), True, 'import weblogolib as wbl\n'), ((26088, 26118), 'sklearn.linear_model.SGDClassifier', 'SGDClassifier', ([], {'warm_start': '(True)'}), '(warm_start=True)\n', (26101, 26118), False, 'from sklearn.linear_model import SGDClassifier\n'), ((26153, 26168), 'sklearn.linear_model.SGDClassifier', 'SGDClassifier', ([], {}), '()\n', (26166, 26168), False, 'from sklearn.linear_model import SGDClassifier\n'), ((26197, 26214), 'sklearn.cluster.MiniBatchKMeans', 'MiniBatchKMeans', ([], {}), '()\n', (26212, 26214), False, 'from sklearn.cluster import MiniBatchKMeans\n'), ((26665, 26727), 'eden.sequence.Vectorizer', 'Vectorizer', ([], {'complexity': 'complexity', 'auto_weights': '(True)', 'nbits': '(15)'}), '(complexity=complexity, auto_weights=True, nbits=15)\n', (26675, 26727), False, 'from eden.sequence import Vectorizer\n'), ((27018, 27059), 'joblib.dump', 'joblib.dump', (['self', 'model_name'], {'compress': '(1)'}), '(self, model_name, compress=1)\n', (27029, 27059), False, 'import joblib\n'), ((34655, 34719), 'sklearn.metrics.pairwise.pairwise_kernels', 'metrics.pairwise.pairwise_kernels', (['cluster_vecs'], {'metric': '"""linear"""'}), "(cluster_vecs, metric='linear')\n", (34688, 34719), False, 'from sklearn import metrics\n'), ((34745, 34782), 'scipy.cluster.hierarchy.linkage', 'linkage', (['gram_matrix'], {'method': '"""single"""'}), "(gram_matrix, method='single')\n", (34752, 34782), False, 'from scipy.cluster.hierarchy import linkage\n'), ((35300, 35318), 'numpy.vstack', 'np.vstack', (['cluster'], {}), '(cluster)\n', (35309, 35318), True, 'import numpy as np\n'), ((35803, 35821), 'numpy.vstack', 'np.vstack', (['cluster'], {}), '(cluster)\n', (35812, 35821), True, 'import numpy as np\n'), ((45971, 45982), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (45980, 45982), False, 'import os\n'), ((1236, 1256), 'numpy.exp', 'np.exp', (['(-(x - a) / b)'], {}), '(-(x - a) / b)\n', (1242, 1256), True, 'import numpy as np\n'), ((1832, 1858), 'scipy.optimize.curve_fit', 'curve_fit', (['sigmoid', 'xs', 'ys'], {}), '(sigmoid, xs, ys)\n', (1841, 1858), False, 'from scipy.optimize import curve_fit\n'), ((5335, 5358), 'numpy.percentile', 'np.percentile', (['locs', '(70)'], {}), '(locs, 70)\n', (5348, 5358), True, 'import numpy as np\n'), ((5361, 5384), 'numpy.percentile', 'np.percentile', (['locs', '(30)'], {}), '(locs, 30)\n', (5374, 5384), True, 'import numpy as np\n'), ((7449, 7468), 'numpy.zeros', 'np.zeros', (['row.shape'], {}), '(row.shape)\n', (7457, 7468), True, 'import numpy as np\n'), ((12073, 12084), 'time.time', 'time.time', ([], {}), '()\n', (12082, 12084), False, 'import time\n'), ((12119, 12130), 'time.time', 'time.time', ([], {}), '()\n', (12128, 12130), False, 'import time\n'), ((13667, 13678), 'time.time', 'time.time', ([], {}), '()\n', (13676, 13678), False, 'import time\n'), ((13713, 13724), 'time.time', 'time.time', ([], {}), '()\n', (13722, 13724), False, 'import time\n'), ((15468, 15479), 'time.time', 'time.time', ([], {}), '()\n', (15477, 15479), False, 'import time\n'), ((15514, 15525), 'time.time', 'time.time', ([], {}), '()\n', (15523, 15525), False, 'import time\n'), ((18069, 18080), 'time.time', 'time.time', ([], {}), '()\n', (18078, 18080), False, 'import time\n'), ((18115, 18126), 'time.time', 'time.time', ([], {}), '()\n', (18124, 18126), False, 'import time\n'), ((19377, 19388), 'time.time', 'time.time', ([], {}), '()\n', (19386, 19388), False, 'import time\n'), ((19423, 19434), 'time.time', 'time.time', ([], {}), '()\n', (19432, 19434), False, 'import time\n'), ((25000, 25016), 'corebio.seq.Alphabet', 'Alphabet', (['"""ACGU"""'], {}), "('ACGU')\n", (25008, 25016), False, 'from corebio.seq import Alphabet, SeqList\n'), ((25417, 25448), 'weblogolib.png_formatter', 'wbl.png_formatter', (['data', 'format'], {}), '(data, format)\n', (25434, 25448), True, 'import weblogolib as wbl\n'), ((28240, 28282), 'sklearn.metrics.confusion_matrix', 'metrics.confusion_matrix', (['y_test', 'y_binary'], {}), '(y_test, y_binary)\n', (28264, 28282), False, 'from sklearn import metrics\n'), ((28295, 28327), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'precision': '(2)'}), '(precision=2)\n', (28314, 28327), True, 'import numpy as np\n'), ((31111, 31137), 'scipy.optimize.curve_fit', 'curve_fit', (['sigmoid', 'xs', 'ys'], {}), '(sigmoid, xs, ys)\n', (31120, 31137), False, 'from scipy.optimize import curve_fit\n'), ((32189, 32216), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (32214, 32216), False, 'import multiprocessing\n'), ((32614, 32625), 'time.time', 'time.time', ([], {}), '()\n', (32623, 32625), False, 'import time\n'), ((33118, 33135), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (33129, 33135), False, 'from collections import defaultdict\n'), ((35396, 35408), 'collections.Counter', 'Counter', (['row'], {}), '(row)\n', (35403, 35408), False, 'from collections import Counter\n'), ((35927, 35939), 'collections.Counter', 'Counter', (['row'], {}), '(row)\n', (35934, 35939), False, 'from collections import Counter\n'), ((36844, 36876), 'random.sample', 'random.sample', (['seqs', 'sample_size'], {}), '(seqs, sample_size)\n', (36857, 36876), False, 'import random\n'), ((37640, 37672), 'random.sample', 'random.sample', (['seqs', 'sample_size'], {}), '(seqs, sample_size)\n', (37653, 37672), False, 'import random\n'), ((38970, 38981), 'time.time', 'time.time', ([], {}), '()\n', (38979, 38981), False, 'import time\n'), ((3028, 3041), 'numpy.dtype', 'np.dtype', (['"""a"""'], {}), "('a')\n", (3036, 3041), True, 'import numpy as np\n'), ((11806, 11817), 'time.time', 'time.time', ([], {}), '()\n', (11815, 11817), False, 'import time\n'), ((13156, 13167), 'time.time', 'time.time', ([], {}), '()\n', (13165, 13167), False, 'import time\n'), ((14738, 14749), 'time.time', 'time.time', ([], {}), '()\n', (14747, 14749), False, 'import time\n'), ((17791, 17802), 'time.time', 'time.time', ([], {}), '()\n', (17800, 17802), False, 'import time\n'), ((19121, 19132), 'time.time', 'time.time', ([], {}), '()\n', (19130, 19132), False, 'import time\n'), ((25094, 25126), 'corebio.seq.Alphabet', 'Alphabet', (['"""ACDEFGHIKLMNPQRSTVWY"""'], {}), "('ACDEFGHIKLMNPQRSTVWY')\n", (25102, 25126), False, 'from corebio.seq import Alphabet, SeqList\n'), ((25164, 25180), 'corebio.seq.Alphabet', 'Alphabet', (['"""AGCT"""'], {}), "('AGCT')\n", (25172, 25180), False, 'from corebio.seq import Alphabet, SeqList\n'), ((25264, 25278), 'weblogolib.LogoData', 'wbl.LogoData', ([], {}), '()\n', (25276, 25278), True, 'import weblogolib as wbl\n'), ((25516, 25553), 'weblogolib.png_print_formatter', 'wbl.png_print_formatter', (['data', 'format'], {}), '(data, format)\n', (25539, 25553), True, 'import weblogolib as wbl\n'), ((27135, 27151), 'joblib.load', 'joblib.load', (['obj'], {}), '(obj)\n', (27146, 27151), False, 'import joblib\n'), ((28498, 28545), 'sklearn.metrics.classification_report', 'metrics.classification_report', (['y_test', 'y_binary'], {}), '(y_test, y_binary)\n', (28527, 28545), False, 'from sklearn import metrics\n'), ((33004, 33015), 'time.time', 'time.time', ([], {}), '()\n', (33013, 33015), False, 'import time\n'), ((34586, 34608), 'eden.sequence.Vectorizer', 'Vectorizer', (['complexity'], {}), '(complexity)\n', (34596, 34608), False, 'from eden.sequence import Vectorizer\n'), ((21246, 21267), 'Bio.Seq.Seq', 'Seq', (['j', 'self.alphabet'], {}), '(j, self.alphabet)\n', (21249, 21267), False, 'from Bio.Seq import Seq\n'), ((25616, 25648), 'weblogolib.jpeg_formatter', 'wbl.jpeg_formatter', (['data', 'format'], {}), '(data, format)\n', (25634, 25648), True, 'import weblogolib as wbl\n'), ((25682, 25713), 'weblogolib.eps_formatter', 'wbl.eps_formatter', (['data', 'format'], {}), '(data, format)\n', (25699, 25713), True, 'import weblogolib as wbl\n'), ((28605, 28642), 'sklearn.metrics.roc_auc_score', 'metrics.roc_auc_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (28626, 28642), False, 'from sklearn import metrics\n'), ((35228, 35241), 'numpy.dtype', 'np.dtype', (['"""a"""'], {}), "('a')\n", (35236, 35241), True, 'import numpy as np\n'), ((35731, 35744), 'numpy.dtype', 'np.dtype', (['"""a"""'], {}), "('a')\n", (35739, 35744), True, 'import numpy as np\n'), ((39400, 39411), 'time.time', 'time.time', ([], {}), '()\n', (39409, 39411), False, 'import time\n'), ((39974, 40017), 'eden.util.NeedlemanWunsh.edit_distance', 'edit_distance', (['seq_i', 'seq_j'], {'gap_penalty': '(-1)'}), '(seq_i, seq_j, gap_penalty=-1)\n', (39987, 40017), False, 'from eden.util.NeedlemanWunsh import edit_distance\n')]
|
from __future__ import print_function
import logging
import os
import re
import socket
import sys
import time
from process_tests import TestProcess
from process_tests import TestSocket
from process_tests import dump_on_error
from process_tests import wait_for_strings
from remote_pdb import set_trace
TIMEOUT = int(os.getenv('REMOTE_PDB_TEST_TIMEOUT', 10))
def test_simple():
with TestProcess(sys.executable, __file__, 'daemon', 'test_simple') as proc:
with dump_on_error(proc.read):
wait_for_strings(proc.read, TIMEOUT,
'{a1}',
'{b1}',
'RemotePdb session open at ')
host, port = re.findall("RemotePdb session open at (.+):(.+),", proc.read())[0]
with TestSocket(socket.create_connection((host, int(port)), timeout=TIMEOUT)) as client:
with dump_on_error(client.read):
wait_for_strings(proc.read, TIMEOUT, 'accepted connection from')
wait_for_strings(client.read, TIMEOUT, "-> print('{b2}')")
client.fh.write(b'quit\r\n')
wait_for_strings(proc.read, TIMEOUT, 'DIED.')
def test_redirect():
with TestProcess(sys.executable, __file__, 'daemon', 'test_redirect') as proc:
with dump_on_error(proc.read):
wait_for_strings(proc.read, TIMEOUT,
'{a1}',
'{b1}',
'RemotePdb session open at ')
host, port = re.findall("RemotePdb session open at (.+):(.+),", proc.read())[0]
with TestSocket(socket.create_connection((host, int(port)), timeout=TIMEOUT)) as client:
with dump_on_error(client.read):
wait_for_strings(proc.read, TIMEOUT, 'accepted connection from')
wait_for_strings(client.read, TIMEOUT, "-> print('{b2}')")
client.fh.write(b'break func_a\r\n')
client.fh.write(b'continue\r\n')
wait_for_strings(client.read, TIMEOUT, 'Breakpoint', '{b2}')
wait_for_strings(client.read, TIMEOUT, "-> print('{a2}')")
client.fh.write(b'continue\r\n')
wait_for_strings(client.read, TIMEOUT, "{=>")
wait_for_strings(proc.read, TIMEOUT, 'DIED.')
assert 'Restoring streams' not in proc.read()
def test_simple_break():
with TestProcess(sys.executable, __file__, 'daemon', 'test_simple') as proc:
with dump_on_error(proc.read):
wait_for_strings(proc.read, TIMEOUT,
'{a1}',
'{b1}',
'RemotePdb session open at ')
host, port = re.findall("RemotePdb session open at (.+):(.+),", proc.read())[0]
with TestSocket(socket.create_connection((host, int(port)), timeout=TIMEOUT)) as client:
with dump_on_error(client.read):
wait_for_strings(proc.read, TIMEOUT, 'accepted connection from')
wait_for_strings(client.read, TIMEOUT, "-> print('{b2}')")
client.fh.write(b'break func_a\r\n')
client.fh.write(b'continue\r\n')
wait_for_strings(client.read, TIMEOUT, "-> print('{a2}')")
client.fh.write(b'continue\r\n')
wait_for_strings(proc.read, TIMEOUT, 'DIED.')
assert 'Restoring streams' not in proc.read()
def func_b(patch_stdstreams):
print('{b1}')
set_trace(patch_stdstreams=patch_stdstreams)
print('{b2}')
def func_a(block=lambda _: None, patch_stdstreams=False):
print('{a1}')
func_b(patch_stdstreams)
print('{a2}')
x = block('{a3} ?')
print('{=> %s}' % x)
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG,
format='%(process)d %(asctime)s,%(msecs)05d %(name)s %(levelname)s %(message)s',
datefmt="%x~%X"
)
test_name = sys.argv[2]
if test_name == 'test_simple':
func_a()
elif test_name == 'test_redirect':
func_a(patch_stdstreams=True)
time.sleep(TIMEOUT)
else:
raise RuntimeError('Invalid test spec %r.' % test_name)
logging.info('DIED.')
|
[
"remote_pdb.set_trace",
"logging.basicConfig",
"time.sleep",
"logging.info",
"process_tests.wait_for_strings",
"process_tests.dump_on_error",
"process_tests.TestProcess",
"os.getenv"
] |
[((319, 359), 'os.getenv', 'os.getenv', (['"""REMOTE_PDB_TEST_TIMEOUT"""', '(10)'], {}), "('REMOTE_PDB_TEST_TIMEOUT', 10)\n", (328, 359), False, 'import os\n'), ((3582, 3626), 'remote_pdb.set_trace', 'set_trace', ([], {'patch_stdstreams': 'patch_stdstreams'}), '(patch_stdstreams=patch_stdstreams)\n', (3591, 3626), False, 'from remote_pdb import set_trace\n'), ((3852, 3999), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""%(process)d %(asctime)s,%(msecs)05d %(name)s %(levelname)s %(message)s"""', 'datefmt': '"""%x~%X"""'}), "(level=logging.DEBUG, format=\n '%(process)d %(asctime)s,%(msecs)05d %(name)s %(levelname)s %(message)s',\n datefmt='%x~%X')\n", (3871, 3999), False, 'import logging\n'), ((4285, 4306), 'logging.info', 'logging.info', (['"""DIED."""'], {}), "('DIED.')\n", (4297, 4306), False, 'import logging\n'), ((391, 453), 'process_tests.TestProcess', 'TestProcess', (['sys.executable', '__file__', '"""daemon"""', '"""test_simple"""'], {}), "(sys.executable, __file__, 'daemon', 'test_simple')\n", (402, 453), False, 'from process_tests import TestProcess\n'), ((1229, 1293), 'process_tests.TestProcess', 'TestProcess', (['sys.executable', '__file__', '"""daemon"""', '"""test_redirect"""'], {}), "(sys.executable, __file__, 'daemon', 'test_redirect')\n", (1240, 1293), False, 'from process_tests import TestProcess\n'), ((2471, 2533), 'process_tests.TestProcess', 'TestProcess', (['sys.executable', '__file__', '"""daemon"""', '"""test_simple"""'], {}), "(sys.executable, __file__, 'daemon', 'test_simple')\n", (2482, 2533), False, 'from process_tests import TestProcess\n'), ((476, 500), 'process_tests.dump_on_error', 'dump_on_error', (['proc.read'], {}), '(proc.read)\n', (489, 500), False, 'from process_tests import dump_on_error\n'), ((514, 600), 'process_tests.wait_for_strings', 'wait_for_strings', (['proc.read', 'TIMEOUT', '"""{a1}"""', '"""{b1}"""', '"""RemotePdb session open at """'], {}), "(proc.read, TIMEOUT, '{a1}', '{b1}',\n 'RemotePdb session open at ')\n", (530, 600), False, 'from process_tests import wait_for_strings\n'), ((1151, 1196), 'process_tests.wait_for_strings', 'wait_for_strings', (['proc.read', 'TIMEOUT', '"""DIED."""'], {}), "(proc.read, TIMEOUT, 'DIED.')\n", (1167, 1196), False, 'from process_tests import wait_for_strings\n'), ((1316, 1340), 'process_tests.dump_on_error', 'dump_on_error', (['proc.read'], {}), '(proc.read)\n', (1329, 1340), False, 'from process_tests import dump_on_error\n'), ((1354, 1440), 'process_tests.wait_for_strings', 'wait_for_strings', (['proc.read', 'TIMEOUT', '"""{a1}"""', '"""{b1}"""', '"""RemotePdb session open at """'], {}), "(proc.read, TIMEOUT, '{a1}', '{b1}',\n 'RemotePdb session open at ')\n", (1370, 1440), False, 'from process_tests import wait_for_strings\n'), ((2331, 2376), 'process_tests.wait_for_strings', 'wait_for_strings', (['proc.read', 'TIMEOUT', '"""DIED."""'], {}), "(proc.read, TIMEOUT, 'DIED.')\n", (2347, 2376), False, 'from process_tests import wait_for_strings\n'), ((2556, 2580), 'process_tests.dump_on_error', 'dump_on_error', (['proc.read'], {}), '(proc.read)\n', (2569, 2580), False, 'from process_tests import dump_on_error\n'), ((2594, 2680), 'process_tests.wait_for_strings', 'wait_for_strings', (['proc.read', 'TIMEOUT', '"""{a1}"""', '"""{b1}"""', '"""RemotePdb session open at """'], {}), "(proc.read, TIMEOUT, '{a1}', '{b1}',\n 'RemotePdb session open at ')\n", (2610, 2680), False, 'from process_tests import wait_for_strings\n'), ((3424, 3469), 'process_tests.wait_for_strings', 'wait_for_strings', (['proc.read', 'TIMEOUT', '"""DIED."""'], {}), "(proc.read, TIMEOUT, 'DIED.')\n", (3440, 3469), False, 'from process_tests import wait_for_strings\n'), ((4187, 4206), 'time.sleep', 'time.sleep', (['TIMEOUT'], {}), '(TIMEOUT)\n', (4197, 4206), False, 'import time\n'), ((898, 924), 'process_tests.dump_on_error', 'dump_on_error', (['client.read'], {}), '(client.read)\n', (911, 924), False, 'from process_tests import dump_on_error\n'), ((946, 1010), 'process_tests.wait_for_strings', 'wait_for_strings', (['proc.read', 'TIMEOUT', '"""accepted connection from"""'], {}), "(proc.read, TIMEOUT, 'accepted connection from')\n", (962, 1010), False, 'from process_tests import wait_for_strings\n'), ((1031, 1089), 'process_tests.wait_for_strings', 'wait_for_strings', (['client.read', 'TIMEOUT', '"""-> print(\'{b2}\')"""'], {}), '(client.read, TIMEOUT, "-> print(\'{b2}\')")\n', (1047, 1089), False, 'from process_tests import wait_for_strings\n'), ((1738, 1764), 'process_tests.dump_on_error', 'dump_on_error', (['client.read'], {}), '(client.read)\n', (1751, 1764), False, 'from process_tests import dump_on_error\n'), ((1786, 1850), 'process_tests.wait_for_strings', 'wait_for_strings', (['proc.read', 'TIMEOUT', '"""accepted connection from"""'], {}), "(proc.read, TIMEOUT, 'accepted connection from')\n", (1802, 1850), False, 'from process_tests import wait_for_strings\n'), ((1871, 1929), 'process_tests.wait_for_strings', 'wait_for_strings', (['client.read', 'TIMEOUT', '"""-> print(\'{b2}\')"""'], {}), '(client.read, TIMEOUT, "-> print(\'{b2}\')")\n', (1887, 1929), False, 'from process_tests import wait_for_strings\n'), ((2060, 2120), 'process_tests.wait_for_strings', 'wait_for_strings', (['client.read', 'TIMEOUT', '"""Breakpoint"""', '"""{b2}"""'], {}), "(client.read, TIMEOUT, 'Breakpoint', '{b2}')\n", (2076, 2120), False, 'from process_tests import wait_for_strings\n'), ((2141, 2199), 'process_tests.wait_for_strings', 'wait_for_strings', (['client.read', 'TIMEOUT', '"""-> print(\'{a2}\')"""'], {}), '(client.read, TIMEOUT, "-> print(\'{a2}\')")\n', (2157, 2199), False, 'from process_tests import wait_for_strings\n'), ((2273, 2318), 'process_tests.wait_for_strings', 'wait_for_strings', (['client.read', 'TIMEOUT', '"""{=>"""'], {}), "(client.read, TIMEOUT, '{=>')\n", (2289, 2318), False, 'from process_tests import wait_for_strings\n'), ((2978, 3004), 'process_tests.dump_on_error', 'dump_on_error', (['client.read'], {}), '(client.read)\n', (2991, 3004), False, 'from process_tests import dump_on_error\n'), ((3026, 3090), 'process_tests.wait_for_strings', 'wait_for_strings', (['proc.read', 'TIMEOUT', '"""accepted connection from"""'], {}), "(proc.read, TIMEOUT, 'accepted connection from')\n", (3042, 3090), False, 'from process_tests import wait_for_strings\n'), ((3111, 3169), 'process_tests.wait_for_strings', 'wait_for_strings', (['client.read', 'TIMEOUT', '"""-> print(\'{b2}\')"""'], {}), '(client.read, TIMEOUT, "-> print(\'{b2}\')")\n', (3127, 3169), False, 'from process_tests import wait_for_strings\n'), ((3300, 3358), 'process_tests.wait_for_strings', 'wait_for_strings', (['client.read', 'TIMEOUT', '"""-> print(\'{a2}\')"""'], {}), '(client.read, TIMEOUT, "-> print(\'{a2}\')")\n', (3316, 3358), False, 'from process_tests import wait_for_strings\n')]
|
import yaml
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
import os
import torch
import torchvision
import matplotlib.pyplot as plt
import seaborn as sns
import torch.nn as nn
from torch.utils.data import Dataset, DataLoader
import torchvision.transforms as transforms
from torchvision import datasets,models
import math
import torch.optim as optim
from torch.optim import lr_scheduler
import copy
import time
from PIL import Image
from datetime import datetime
from utils import *
data_dir = '.'
test_path = os.path.join(data_dir, 'test')
sample_sub = pd.read_csv(os.path.join(data_dir, 'sample_submission.csv'))
sample_sub['path'] = sample_sub['file_name'].apply(lambda x: os.path.join(test_path, x))
# Get configs from config file
stream = open("config.yaml", 'r')
config_dict = yaml.safe_load(stream)
batch_size = config_dict['batch_size']
learning_rate = config_dict['lr']
model_pth = config_dict['model_pth']
train_data = config_dict['train_data']
valid_data = config_dict['valid_data']
test_data = config_dict['test_data']
# Apply transforms
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
data_transforms = {
'train': transforms.Compose([
transforms.Resize((230, 230)),
transforms.RandomRotation(30,),
transforms.RandomCrop(224),
transforms.RandomHorizontalFlip(),
transforms.RandomVerticalFlip(),
transforms.ToTensor(),
normalize
]),
'valid': transforms.Compose([
transforms.Resize((400, 400)),
transforms.CenterCrop((224, 224)),
transforms.ToTensor(),
normalize
]),
'test': transforms.Compose([
transforms.Resize((224, 224)),
transforms.ToTensor(),
normalize
]),
}
# Load dataloaders
image_datasets = {x: datasets.ImageFolder(os.path.join(data_dir, x),
data_transforms[x])
for x in ['train', 'valid']}
dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=batch_size, shuffle= True, num_workers=0)
for x in ['train', 'valid']}
dataset_sizes = {x: len(image_datasets[x]) for x in ['train', 'valid']}
class_names = image_datasets['train'].classes
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Trains Model
def train_model2(model, criterion, optimizer,
num_epochs=3, dataloaders= dataloaders, print_progress=False):
"""
:param model: Model type object
:param criterion: Loss function
:param optimizer: Optimizer
:param num_epochs: Number of epochs
:param dataloaders: Dataloaders, must be a dictionary having train and val as keys
:param print_progress: prints progress if true
:return: trained model object
"""
min_val_loss = np.Inf
best_model_wts = copy.deepcopy(model.state_dict())
since = time.time()
best_epoch = -1
for epoch in range(num_epochs):
valid_loss = 0.0
train_loss = 0.0
model.train()
running_corrects = 0
for iter1, (inputs, labels) in enumerate(dataloaders['train']):
inputs = inputs.to(device)
inputs = inputs.type(torch.float)
labels = labels.to(device)
labels = labels.type(torch.long)
optimizer.zero_grad()
out = model(inputs)
_, preds = torch.max(out, 1)
# out = torch.mul(out,100)
loss = criterion(out, labels)
loss.backward()
optimizer.step()
train_loss += loss.item() * inputs.size(0)
# running_corrects += torch.sum(preds == labels.data)
if print_progress:
print(
f"Epoch: {epoch}\t{100 * (iter1 + 1) / len(dataloaders['train']):.2f}" + '%',
end='\r')
else:
print()
with torch.no_grad():
model.eval()
for iter2, (inputs, labels) in enumerate(dataloaders['valid']):
inputs = inputs.to(device)
inputs = inputs.type(torch.float)
labels = labels.to(device)
labels = labels.type(torch.long)
output1 = model(inputs)
_, preds1 = torch.max(output1, 1)
# output1 = torch.mul(output1,100).to(device)
loss = criterion(output1, labels)
valid_loss += loss.item() * inputs.size(0)
running_corrects += torch.sum(preds1 == labels.data)
print(
f'Epoch: {epoch}\t{100 * (iter2 + 1) / len(dataloaders["valid"]):.2f} %',
end='\r')
len_train1 = 6552
len_val1 = len(dataloaders['valid'].dataset)
train_loss = train_loss / len_train1
valid_loss = valid_loss / len_val1
if print_progress:
print(
f'\nEpoch: {epoch + 1} \tTraining Loss: {math.sqrt(train_loss):.4f} \tValidation Loss: {math.sqrt(valid_loss):.4f}')
time_elapsed = time.time() - since
print('Training complete in {:.0f}m {:.0f}s'.format(time_elapsed // 60, time_elapsed % 60))
print(f'Accuracy : {100 * running_corrects / len_val1} %')
if valid_loss < min_val_loss:
min_val_loss = valid_loss
best_epoch = epoch
best_model_wts = copy.deepcopy(model.state_dict())
print('Best val Loss: {:4f}'.format(math.sqrt(min_val_loss)))
print(f'Epoch completed: {epoch+1}')
print(f'Best Epoch: {best_epoch+1}')
model.load_state_dict(best_model_wts)
return model
def process_image(img_path):
"""
:param img_path: Path of image to be processed
:returns processed numpy array
Scales, crops, and normalizes a PIL image for a PyTorch model,
returns a Numpy array
"""
img = Image.open(img_path)
# Resize
if img.size[0] > img.size[1]:
img.thumbnail((10000, 256))
else:
img.thumbnail((256, 10000))
# Crop Image
left_margin = (img.width - 224) / 2
bottom_margin = (img.height - 224) / 2
right_margin = left_margin + 224
top_margin = bottom_margin + 224
img = img.crop((left_margin, bottom_margin, right_margin,
top_margin))
# Normalize
img = np.array(img) / 255
mean = np.array([0.485, 0.456, 0.406]) # provided mean
std = np.array([0.229, 0.224, 0.225]) # provided std
img = (img - mean) / std
return img
# Load test dataset from class defined in utils
test_dataset = TestDataset(data_dir+'test', sample_sub,data_transforms['test'])
test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size, shuffle=False)
# Load Class to idx dictionary
class_to_idx = image_datasets['valid'].class_to_idx
idx_to_class = {val: key for key, val in class_to_idx.items()}
def predict(model_path, dataloader, print_progress=False):
"""
:param model_path: Path of Model used for prediction
:param dataloader: Test DataLoader
:param print_progress: Prints progress if True
:return: Prediction(as a list) on test folder defined by config file
"""
model = torch.load(model_path)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.to(device)
model.eval()
predictions = {}
with torch.no_grad():
for ii, (images, _, img_names) in enumerate(dataloader, start=1):
if print_progress:
if ii % 5 == 0:
print('Batch {}/{}'.format(ii, len(dataloader)))
images = images.to(device)
logps = model(images)
ps = torch.exp(logps)
# Top indices
_, top_indices = ps.topk(1)
top_indices = top_indices.detach().cpu().numpy().tolist()
# Convert indices to classes
top_classes = [idx_to_class[idx[0]] for idx in top_indices]
# print("Img:" ,img_names)
for i, img_name in enumerate(img_names):
predictions[img_name] = top_classes[i]
print('\nPrediction Generation Completed')
return predictions
|
[
"yaml.safe_load",
"torchvision.transforms.Normalize",
"torch.no_grad",
"os.path.join",
"torch.utils.data.DataLoader",
"torchvision.transforms.RandomRotation",
"torch.load",
"torch.exp",
"torchvision.transforms.CenterCrop",
"torchvision.transforms.RandomHorizontalFlip",
"math.sqrt",
"torch.cuda.is_available",
"torch.max",
"torch.sum",
"torchvision.transforms.RandomCrop",
"torchvision.transforms.Resize",
"torchvision.transforms.RandomVerticalFlip",
"time.time",
"PIL.Image.open",
"numpy.array",
"torchvision.transforms.ToTensor"
] |
[((596, 626), 'os.path.join', 'os.path.join', (['data_dir', '"""test"""'], {}), "(data_dir, 'test')\n", (608, 626), False, 'import os\n'), ((876, 898), 'yaml.safe_load', 'yaml.safe_load', (['stream'], {}), '(stream)\n', (890, 898), False, 'import yaml\n'), ((1167, 1242), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n', (1187, 1242), True, 'import torchvision.transforms as transforms\n'), ((6924, 7003), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['test_dataset'], {'batch_size': 'batch_size', 'shuffle': '(False)'}), '(test_dataset, batch_size=batch_size, shuffle=False)\n', (6951, 7003), False, 'import torch\n'), ((653, 700), 'os.path.join', 'os.path.join', (['data_dir', '"""sample_submission.csv"""'], {}), "(data_dir, 'sample_submission.csv')\n", (665, 700), False, 'import os\n'), ((2101, 2203), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['image_datasets[x]'], {'batch_size': 'batch_size', 'shuffle': '(True)', 'num_workers': '(0)'}), '(image_datasets[x], batch_size=batch_size,\n shuffle=True, num_workers=0)\n', (2128, 2203), False, 'import torch\n'), ((3033, 3044), 'time.time', 'time.time', ([], {}), '()\n', (3042, 3044), False, 'import time\n'), ((6118, 6138), 'PIL.Image.open', 'Image.open', (['img_path'], {}), '(img_path)\n', (6128, 6138), False, 'from PIL import Image\n'), ((6617, 6648), 'numpy.array', 'np.array', (['[0.485, 0.456, 0.406]'], {}), '([0.485, 0.456, 0.406])\n', (6625, 6648), True, 'import numpy as np\n'), ((6677, 6708), 'numpy.array', 'np.array', (['[0.229, 0.224, 0.225]'], {}), '([0.229, 0.224, 0.225])\n', (6685, 6708), True, 'import numpy as np\n'), ((7482, 7504), 'torch.load', 'torch.load', (['model_path'], {}), '(model_path)\n', (7492, 7504), False, 'import torch\n'), ((764, 790), 'os.path.join', 'os.path.join', (['test_path', 'x'], {}), '(test_path, x)\n', (776, 790), False, 'import os\n'), ((1944, 1969), 'os.path.join', 'os.path.join', (['data_dir', 'x'], {}), '(data_dir, x)\n', (1956, 1969), False, 'import os\n'), ((2402, 2427), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2425, 2427), False, 'import torch\n'), ((6585, 6598), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (6593, 6598), True, 'import numpy as np\n'), ((7656, 7671), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (7669, 7671), False, 'import torch\n'), ((1308, 1337), 'torchvision.transforms.Resize', 'transforms.Resize', (['(230, 230)'], {}), '((230, 230))\n', (1325, 1337), True, 'import torchvision.transforms as transforms\n'), ((1348, 1377), 'torchvision.transforms.RandomRotation', 'transforms.RandomRotation', (['(30)'], {}), '(30)\n', (1373, 1377), True, 'import torchvision.transforms as transforms\n'), ((1389, 1415), 'torchvision.transforms.RandomCrop', 'transforms.RandomCrop', (['(224)'], {}), '(224)\n', (1410, 1415), True, 'import torchvision.transforms as transforms\n'), ((1426, 1459), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (1457, 1459), True, 'import torchvision.transforms as transforms\n'), ((1470, 1501), 'torchvision.transforms.RandomVerticalFlip', 'transforms.RandomVerticalFlip', ([], {}), '()\n', (1499, 1501), True, 'import torchvision.transforms as transforms\n'), ((1512, 1533), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1531, 1533), True, 'import torchvision.transforms as transforms\n'), ((1607, 1636), 'torchvision.transforms.Resize', 'transforms.Resize', (['(400, 400)'], {}), '((400, 400))\n', (1624, 1636), True, 'import torchvision.transforms as transforms\n'), ((1647, 1680), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['(224, 224)'], {}), '((224, 224))\n', (1668, 1680), True, 'import torchvision.transforms as transforms\n'), ((1691, 1712), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1710, 1712), True, 'import torchvision.transforms as transforms\n'), ((1785, 1814), 'torchvision.transforms.Resize', 'transforms.Resize', (['(224, 224)'], {}), '((224, 224))\n', (1802, 1814), True, 'import torchvision.transforms as transforms\n'), ((1825, 1846), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1844, 1846), True, 'import torchvision.transforms as transforms\n'), ((3554, 3571), 'torch.max', 'torch.max', (['out', '(1)'], {}), '(out, 1)\n', (3563, 3571), False, 'import torch\n'), ((5261, 5272), 'time.time', 'time.time', ([], {}), '()\n', (5270, 5272), False, 'import time\n'), ((7542, 7567), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (7565, 7567), False, 'import torch\n'), ((7978, 7994), 'torch.exp', 'torch.exp', (['logps'], {}), '(logps)\n', (7987, 7994), False, 'import torch\n'), ((4081, 4096), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4094, 4096), False, 'import torch\n'), ((4468, 4489), 'torch.max', 'torch.max', (['output1', '(1)'], {}), '(output1, 1)\n', (4477, 4489), False, 'import torch\n'), ((4701, 4733), 'torch.sum', 'torch.sum', (['(preds1 == labels.data)'], {}), '(preds1 == labels.data)\n', (4710, 4733), False, 'import torch\n'), ((5669, 5692), 'math.sqrt', 'math.sqrt', (['min_val_loss'], {}), '(min_val_loss)\n', (5678, 5692), False, 'import math\n'), ((5161, 5182), 'math.sqrt', 'math.sqrt', (['train_loss'], {}), '(train_loss)\n', (5170, 5182), False, 'import math\n'), ((5208, 5229), 'math.sqrt', 'math.sqrt', (['valid_loss'], {}), '(valid_loss)\n', (5217, 5229), False, 'import math\n')]
|
import asyncio
import sys
import binascii
import bitcoin.core
import pylibbitcoin.client
def block_header(client):
index = sys.argv[2]
return client.block_header(int(index))
def last_height(client):
return client.last_height()
def block_height(client):
hash = sys.argv[2]
return client.block_height(hash)
def transaction(client):
hash = sys.argv[2]
return client.transaction(hash)
def transaction_index(client):
hash = sys.argv[2]
return client.transaction_index(hash)
def block_transaction_hashes(client):
height = int(sys.argv[2])
return client.block_transaction_hashes(height)
def spend(client):
hash = sys.argv[2]
index = int(sys.argv[3])
return client.spend(hash, index)
async def subscribe_address(client):
address = sys.argv[2]
return await client.subscribe_address(address)
async def _read_from(queue):
while True:
print(await queue.get())
def unsubscribe_address(client):
address = sys.argv[2]
return client.unsubscribe_address(address)
def broadcast(client):
# Grab a raw block from https://blockchain.info/block/000000000000000000a7b4999c723ed9f308425708577c76827ade51062e135a?format=hex # noqa: E501
# This might seem odd but this is a sanity check a client should probably do. # noqa: E501
block = bitcoin.core.CBlock.deserialize(binascii.unhexlify(sys.argv[2]))
return client.broadcast(binascii.hexlify(block.serialize()))
async def history3(client):
address = sys.argv[2]
start_height = 10_000
return await client.history3(address, start_height)
commands = {
"last_height": last_height,
"block_header": block_header,
"block_height": block_height,
"transaction": transaction,
"transaction_index": transaction_index,
"spend": spend,
"subscribe_address": subscribe_address,
"unsubscribe_address": unsubscribe_address,
"broadcast": broadcast,
"history3": history3,
"block_transaction_hashes": block_transaction_hashes,
}
def main():
if len(sys.argv) < 2:
sys.exit("Usage: %s last_height|block_header|<cmd>" % sys.argv[0])
command = sys.argv[1]
if command not in commands:
sys.exit("Command can be %s" % str.join(", ", iter(commands)))
# client = pylibbitcoin.client.Client("tcp://127.0.0.1:9999", settings=pylibbitcoin.client.ClientSettings(timeout=5))
# client = pylibbitcoin.client.Client("tcp://mainnet.libbitcoin.net:9091")
client = pylibbitcoin.client.Client("mainnet.libbitcoin.net", {"query": 9091,
"heartbeat": 9092, "block": 9093, "tx": 9094})
loop = asyncio.get_event_loop()
error_code, result = loop.run_until_complete(commands[sys.argv[1]](client))
print("Error code: {}".format(error_code))
print("Result: {}".format(result))
if type(result) == asyncio.queues.Queue:
loop.run_until_complete(_read_from(result))
number_of_pending_responses = loop.run_until_complete(client.stop())
print("Number of pending responses lost: {}".format(number_of_pending_responses))
loop.close()
if __name__ == '__main__':
main()
|
[
"binascii.unhexlify",
"asyncio.get_event_loop",
"sys.exit"
] |
[((2612, 2636), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (2634, 2636), False, 'import asyncio\n'), ((1363, 1394), 'binascii.unhexlify', 'binascii.unhexlify', (['sys.argv[2]'], {}), '(sys.argv[2])\n', (1381, 1394), False, 'import binascii\n'), ((2064, 2130), 'sys.exit', 'sys.exit', (["('Usage: %s last_height|block_header|<cmd>' % sys.argv[0])"], {}), "('Usage: %s last_height|block_header|<cmd>' % sys.argv[0])\n", (2072, 2130), False, 'import sys\n')]
|
import pyrosetta
import pandas as pd
from typing import Tuple, List, Dict, Set, Any, Optional, Sequence
from .base import BaseDocumentarian
class AttributeDocumentarian(BaseDocumentarian):
"""
Analyses a Pyrosetta object and determines what is different from default.
For example. Give a working XML script:
>>> xml_obj = pyrosetta.rosetta.protocols.rosetta_scripts.RosettaScriptsParser()
>>> protocol = xml_obj.generate_mover_and_apply_to_pose(pose, 'script.xml')
>>> protocol.apply(pose)
One can reverse engineer it, thusly:
>>> pm = protocol.get_mover(1)
>>> print(pm.mover_name()) # mover called in script!
>>> AttributeDocumentarian(pm).compare(evo) # -> pd.DataFrame
---------------------------
Attributes:
* target: instance
* target_cls: class
* base: The tuple of classes inherited (``__mro__``)
* uninherited: The set of attributes that are absent in the parent class
* citation: string of citation
Methods:
* describe(): describe attributes
* test(): calls the methods
* compare(): compares the results of a ``test()`` to that of a blank instance
"""
@property
def uninherited(self) -> Set[str]:
"""
The set of attributes that are absent in the parent class.
Has no idea if other were overwritten though!
:rtype: Set[str]
"""
if len(self.base) > 1:
return set(dir(self.base[0])) - set(dir(self.base[1]))
def describe(self, iterable: Optional[Sequence[str]] = None) -> None:
"""
Describe attributes by calling help.
If ``iterable`` is provided, it will print only those.
"""
if iterable is None:
iterable = dir(self.target)
for methodname in iterable:
print(f'## {methodname}')
method = getattr(self.target, methodname)
help(method)
def test(self,
iterable: Optional[Sequence[str]] = None,
silent: bool = True) -> Dict[str, Any]:
"""
Calls without arguments the methods.
If ``iterable`` is provided, it will call only those.
Returns a dictionary of the results.
"""
if iterable is None:
iterable = dir(self.target)
results = {}
for methodname in iterable:
method = getattr(self.target, methodname)
try:
result = method()
results[methodname] = result
if silent is False:
print(f'Calling worked for {methodname}: {result}')
except TypeError as error:
results[methodname] = 'N/A'
if silent is False:
print(f'Calling failed for {methodname}: {result}')
return results
def test_uninherited(self, silent: bool = True) -> dict:
"""
Calls without arguments the methods that where not inherited.
"""
return self.test(self.uninherited, silent)
def compare(self, reference: Optional[pyrosetta.rosetta.protocols.moves.Mover] = None) -> pd.DataFrame:
"""
Tests the methods (see ``test()`` and compares them to a generic instance
or to ``reference`` if provided.
"""
c = self.test()
if reference is None:
reference = self.target_cls()
refexplorer = self.__class__(reference)
r = refexplorer.test()
return self._make_table(c, r)
def compare_uninherited(self, reference: Optional[pyrosetta.rosetta.protocols.moves.Mover] = None) -> pd.DataFrame:
"""
Tests the uninherited methods (see ``test()`` and compares them to a generic instance
or to ``reference`` if provided.
"""
c = self.test_uninherited()
if reference is None:
reference = self.target_cls()
refexplorer = self.__class__(reference)
r = refexplorer.test_uninherited()
return self._make_table(c, r)
def _make_table(self, case: Dict[str, Any], ref: Dict[str, Any]) -> pd.DataFrame:
assert case, f'make_table cannot make a table without data (case={case}, ref={ref})'
proto = [{'attribute': k,
'target': ref[k],
'reference': case[k],
'equal': str(ref[k]) == str(case[k])} for k in case.keys()]
comparison = pd.DataFrame(proto)
return comparison.set_index(['attribute'])
|
[
"pandas.DataFrame"
] |
[((4398, 4417), 'pandas.DataFrame', 'pd.DataFrame', (['proto'], {}), '(proto)\n', (4410, 4417), True, 'import pandas as pd\n')]
|
import more_itertools as mit
import numpy as np
# Methods to do dynamic error thresholding on timeseries data
# Implementation inspired by: https://arxiv.org/pdf/1802.04431.pdf
def get_forecast_errors(y_hat,
y_true,
window_size=5,
batch_size=30,
smoothing_percent=0.05,
smoothed=True):
"""
Calculates the forecasting error for two arrays of data. If smoothed errors desired,
runs EWMA.
Args:
y_hat (list): forecasted values. len(y_hat)==len(y_true).
y_true (list): true values. len(y_hat)==len(y_true).
window_size (int):
batch_size (int):
smoothing_percent (float):
smoothed (bool): whether the returned errors should be smoothed with EWMA.
Returns:
(list): error residuals. Smoothed if specified by user.
"""
errors = [abs(y_h - y_t) for y_h, y_t in zip(y_hat, y_true)]
if not smoothed:
return errors
historical_error_window = int(window_size * batch_size * smoothing_percent)
moving_avg = []
for i in range(len(errors)):
left_window = i - historical_error_window
right_window = i + historical_error_window + 1
if left_window < 0:
left_window = 0
if right_window > len(errors):
right_window = len(errors)
moving_avg.append(np.mean(errors[left_window:right_window]))
return moving_avg
def extract_anomalies(y_true, smoothed_errors, window_size, batch_size, error_buffer):
"""
Extracts anomalies from the errors.
Args:
y_true ():
smoothed_errors ():
window_size (int):
batch_size (int):
error_buffer (int):
Returns:
"""
if len(y_true) <= batch_size * window_size:
raise ValueError("Window size (%s) larger than y_true (len=%s)."
% (batch_size, len(y_true)))
num_windows = int((len(y_true) - (batch_size * window_size)) / batch_size)
anomalies_indices = []
for i in range(num_windows + 1):
prev_index = i * batch_size
curr_index = (window_size * batch_size) + (i * batch_size)
if i == num_windows + 1:
curr_index = len(y_true)
window_smoothed_errors = smoothed_errors[prev_index:curr_index]
window_y_true = y_true[prev_index:curr_index]
epsilon, sd_threshold = compute_threshold(window_smoothed_errors, error_buffer)
window_anom_indices = get_anomalies(
window_smoothed_errors,
window_y_true,
sd_threshold,
i,
anomalies_indices,
error_buffer
)
# get anomalies from inverse of smoothed errors
# This was done in the implementation of NASA paper but
# wasn't referenced in the paper
# we get the inverse by flipping around the mean
mu = np.mean(window_smoothed_errors)
smoothed_errors_inv = [mu + (mu - e) for e in window_smoothed_errors]
epsilon_inv, sd_inv = compute_threshold(smoothed_errors_inv, error_buffer)
inv_anom_indices = get_anomalies(
smoothed_errors_inv,
window_y_true,
sd_inv,
i,
anomalies_indices,
len(y_true)
)
anomalies_indices = list(set(anomalies_indices + inv_anom_indices))
anomalies_indices.extend([i_a + i * batch_size for i_a in window_anom_indices])
# group anomalies
anomalies_indices = sorted(list(set(anomalies_indices)))
anomalies_groups = [list(group) for group in mit.consecutive_groups(anomalies_indices)]
anomaly_sequences = [(g[0], g[-1]) for g in anomalies_groups if not g[0] == g[-1]]
# generate "scores" for anomalies based on the max distance from epsilon for each sequence
anomalies_scores = []
for e_seq in anomaly_sequences:
denominator = np.mean(smoothed_errors) + np.std(smoothed_errors)
score = max([
abs(smoothed_errors[x] - epsilon) / denominator
for x in range(e_seq[0], e_seq[1])
])
anomalies_scores.append(score)
return anomaly_sequences, anomalies_scores
def compute_threshold(smoothed_errors, error_buffer, sd_limit=12.0):
"""Helper method for `extract_anomalies` method.
Calculates the epsilon (threshold) for anomalies.
"""
mu = np.mean(smoothed_errors)
sigma = np.std(smoothed_errors)
max_epsilon = 0
sd_threshold = sd_limit
# The treshold is determined dynamically by testing multiple Zs.
# z is drawn from an ordered set of positive values representing the
# number of standard deviations above mean(smoothed_errors)
# here we iterate in increments of 0.5 on the range that the NASA paper found to be good
for z in np.arange(2.5, sd_limit, 0.5):
epsilon = mu + (sigma * z)
below_epsilon, below_indices, above_epsilon = [], [], []
for i in range(len(smoothed_errors)):
e = smoothed_errors[i]
if e < epsilon:
# save to compute delta mean and delta std
# these are important for epsilon calculation
below_epsilon.append(e)
below_indices.append(i)
if e > epsilon:
# above_epsilon values are anomalies
for j in range(0, error_buffer):
if (i + j) not in above_epsilon and (i + j) < len(smoothed_errors):
above_epsilon.append(i + j)
if (i - j) not in above_epsilon and (i - j) >= 0:
above_epsilon.append(i - j)
if len(above_epsilon) == 0:
continue
# generate sequences
above_epsilon = sorted(list(set(above_epsilon)))
groups = [list(group) for group in mit.consecutive_groups(above_epsilon)]
above_sequences = [(g[0], g[-1]) for g in groups if not g[0] == g[-1]]
mean_perc_decrease = (mu - np.mean(below_epsilon)) / mu
sd_perc_decrease = (sigma - np.std(below_epsilon)) / sigma
epsilon = (mean_perc_decrease + sd_perc_decrease) /\
(len(above_sequences)**2 + len(above_epsilon))
# update the largest epsilon we've seen so far
if epsilon > max_epsilon:
sd_threshold = z
max_epsilon = epsilon
# sd_threshold can be multiplied by sigma to get epsilon
return max_epsilon, sd_threshold
def get_anomalies(smoothed_errors, y_true, z, window, all_anomalies, error_buffer):
"""
Helper method to get anomalies.
"""
mu = np.mean(smoothed_errors)
sigma = np.std(smoothed_errors)
epsilon = mu + (z * sigma)
# compare to epsilon
errors_seq, anomaly_indices, max_error_below_e = group_consecutive_anomalies(
smoothed_errors,
epsilon,
y_true,
error_buffer,
window,
all_anomalies
)
if len(errors_seq) > 0:
anomaly_indices = prune_anomalies(
errors_seq,
smoothed_errors,
max_error_below_e,
anomaly_indices
)
return anomaly_indices
def group_consecutive_anomalies(smoothed_errors,
epsilon,
y_true,
error_buffer,
window,
all_anomalies,
batch_size=30):
upper_percentile, lower_percentile = np.percentile(y_true, [95, 5])
accepted_range = upper_percentile - lower_percentile
minimum_index = 100 # have a cutoff value for anomalies until model is trained enough
anomaly_indices = []
max_error_below_e = 0
for i in range(len(smoothed_errors)):
if smoothed_errors[i] <= epsilon or smoothed_errors[i] <= 0.05 * accepted_range:
# not an anomaly
continue
for j in range(error_buffer):
if (i + j) < len(smoothed_errors) and (i + j) not in anomaly_indices:
if (i + j) > minimum_index:
anomaly_indices.append(i + j)
if (i - j) < len(smoothed_errors) and (i - j) not in anomaly_indices:
if (i - j) > minimum_index:
anomaly_indices.append(i - j)
# get all the errors that are below epsilon and which
# weren't identified as anomalies to process them
for i in range(len(smoothed_errors)):
adjusted_index = i + (window - 1) * batch_size
if smoothed_errors[i] > max_error_below_e and adjusted_index not in all_anomalies:
if i not in anomaly_indices:
max_error_below_e = smoothed_errors[i]
# group anomalies into continuous sequences
anomaly_indices = sorted(list(set(anomaly_indices)))
groups = [list(group) for group in mit.consecutive_groups(anomaly_indices)]
e_seq = [(g[0], g[-1]) for g in groups if g[0] != g[-1]]
return e_seq, anomaly_indices, max_error_below_e
def prune_anomalies(e_seq, smoothed_errors, max_error_below_e, anomaly_indices):
""" Helper method that removes anomalies which don't meet
a minimum separation from next anomaly.
"""
# min accepted perc decrease btwn max errors in anomalous sequences
MIN_PERCENT_DECREASE = 0.05
e_seq_max, smoothed_errors_max = [], []
for error_seq in e_seq:
if len(smoothed_errors[error_seq[0]:error_seq[1]]) > 0:
sliced_errors = smoothed_errors[error_seq[0]:error_seq[1]]
e_seq_max.append(max(sliced_errors))
smoothed_errors_max.append(max(sliced_errors))
smoothed_errors_max.sort(reverse=True)
if max_error_below_e > 0:
smoothed_errors_max.append(max_error_below_e)
indices_remove = []
for i in range(len(smoothed_errors_max)):
if i < len(smoothed_errors_max) - 1:
delta = smoothed_errors_max[i] - smoothed_errors_max[i + 1]
perc_change = delta / smoothed_errors_max[i]
if perc_change < MIN_PERCENT_DECREASE:
indices_remove.append(e_seq_max.index(smoothed_errors_max[i]))
for index in sorted(indices_remove, reverse=True):
del e_seq[index]
pruned_indices = []
for i in anomaly_indices:
for error_seq in e_seq:
if i >= error_seq[0] and i <= error_seq[1]:
pruned_indices.append(i)
return pruned_indices
|
[
"numpy.std",
"more_itertools.consecutive_groups",
"numpy.percentile",
"numpy.mean",
"numpy.arange"
] |
[((4460, 4484), 'numpy.mean', 'np.mean', (['smoothed_errors'], {}), '(smoothed_errors)\n', (4467, 4484), True, 'import numpy as np\n'), ((4497, 4520), 'numpy.std', 'np.std', (['smoothed_errors'], {}), '(smoothed_errors)\n', (4503, 4520), True, 'import numpy as np\n'), ((4884, 4913), 'numpy.arange', 'np.arange', (['(2.5)', 'sd_limit', '(0.5)'], {}), '(2.5, sd_limit, 0.5)\n', (4893, 4913), True, 'import numpy as np\n'), ((6688, 6712), 'numpy.mean', 'np.mean', (['smoothed_errors'], {}), '(smoothed_errors)\n', (6695, 6712), True, 'import numpy as np\n'), ((6725, 6748), 'numpy.std', 'np.std', (['smoothed_errors'], {}), '(smoothed_errors)\n', (6731, 6748), True, 'import numpy as np\n'), ((7589, 7619), 'numpy.percentile', 'np.percentile', (['y_true', '[95, 5]'], {}), '(y_true, [95, 5])\n', (7602, 7619), True, 'import numpy as np\n'), ((2982, 3013), 'numpy.mean', 'np.mean', (['window_smoothed_errors'], {}), '(window_smoothed_errors)\n', (2989, 3013), True, 'import numpy as np\n'), ((1428, 1469), 'numpy.mean', 'np.mean', (['errors[left_window:right_window]'], {}), '(errors[left_window:right_window])\n', (1435, 1469), True, 'import numpy as np\n'), ((3676, 3717), 'more_itertools.consecutive_groups', 'mit.consecutive_groups', (['anomalies_indices'], {}), '(anomalies_indices)\n', (3698, 3717), True, 'import more_itertools as mit\n'), ((3986, 4010), 'numpy.mean', 'np.mean', (['smoothed_errors'], {}), '(smoothed_errors)\n', (3993, 4010), True, 'import numpy as np\n'), ((4013, 4036), 'numpy.std', 'np.std', (['smoothed_errors'], {}), '(smoothed_errors)\n', (4019, 4036), True, 'import numpy as np\n'), ((8938, 8977), 'more_itertools.consecutive_groups', 'mit.consecutive_groups', (['anomaly_indices'], {}), '(anomaly_indices)\n', (8960, 8977), True, 'import more_itertools as mit\n'), ((5908, 5945), 'more_itertools.consecutive_groups', 'mit.consecutive_groups', (['above_epsilon'], {}), '(above_epsilon)\n', (5930, 5945), True, 'import more_itertools as mit\n'), ((6062, 6084), 'numpy.mean', 'np.mean', (['below_epsilon'], {}), '(below_epsilon)\n', (6069, 6084), True, 'import numpy as np\n'), ((6127, 6148), 'numpy.std', 'np.std', (['below_epsilon'], {}), '(below_epsilon)\n', (6133, 6148), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
from collections import defaultdict
import commonware.log
from amo.utils import find_language
import mkt
log = commonware.log.getLogger('z.webapps')
def get_locale_properties(manifest, property, default_locale=None):
locale_dict = {}
for locale in manifest.get('locales', {}):
if property in manifest['locales'][locale]:
locale_dict[locale] = manifest['locales'][locale][property]
# Add in the default locale name.
default = manifest.get('default_locale') or default_locale
root_property = manifest.get(property)
if default and root_property:
locale_dict[default] = root_property
return locale_dict
def get_supported_locales(manifest):
"""
Returns a list of locales found in the "locales" property of the manifest.
This will convert locales found in the SHORTER_LANGUAGES setting to their
full locale. It will also remove locales not found in AMO_LANGUAGES.
Note: The default_locale is not included.
"""
return sorted(filter(None, map(find_language, set(
manifest.get('locales', {}).keys()))))
def dehydrate_content_rating(rating):
"""
{body.id, rating.id} to translated rating.label.
"""
try:
body = mkt.ratingsbodies.dehydrate_ratings_body(
mkt.ratingsbodies.RATINGS_BODIES[int(rating['body'])])
except TypeError:
# Legacy ES format (bug 943371).
return {}
rating = mkt.ratingsbodies.dehydrate_rating(
body.ratings[int(rating['rating'])])
return rating.label
def dehydrate_content_ratings(content_ratings):
"""Dehydrate an object of content ratings from rating IDs to dict."""
for body in content_ratings or {}:
# Dehydrate all content ratings.
content_ratings[body] = dehydrate_content_rating(content_ratings[body])
return content_ratings
def dehydrate_descriptors(keys, body=None):
"""
List of keys to lists of descriptor slugs by body.
['ESRB_BLOOD, ...] to {'esrb': ['blood'], ...}.
"""
results = defaultdict(list)
for key in keys:
obj = mkt.ratingdescriptors.RATING_DESCS.get(key)
if obj:
# Slugify and remove body prefix.
body, label = key.lower().replace('_', '-').split('-', 1)
if label != 'no-descs':
results[body].append(label)
return dict(results)
def dehydrate_interactives(keys):
"""
List of keys to list of interactive slugs.
['SOCIAL_NETWORKING', ...] to ['social-networking', ...].
"""
results = []
for key in keys:
obj = mkt.ratinginteractives.RATING_INTERACTIVES.get(key)
if obj:
results.append(key.lower().replace('_', '-'))
return results
|
[
"collections.defaultdict",
"mkt.ratinginteractives.RATING_INTERACTIVES.get",
"mkt.ratingdescriptors.RATING_DESCS.get"
] |
[((2061, 2078), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (2072, 2078), False, 'from collections import defaultdict\n'), ((2114, 2157), 'mkt.ratingdescriptors.RATING_DESCS.get', 'mkt.ratingdescriptors.RATING_DESCS.get', (['key'], {}), '(key)\n', (2152, 2157), False, 'import mkt\n'), ((2609, 2660), 'mkt.ratinginteractives.RATING_INTERACTIVES.get', 'mkt.ratinginteractives.RATING_INTERACTIVES.get', (['key'], {}), '(key)\n', (2655, 2660), False, 'import mkt\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 7 21:27:18 2019
@author: biyef
"""
from PIL import Image, ImageFilter
import tensorflow as tf
import matplotlib.pyplot as plt
import mnist_lenet5_backward
import mnist_lenet5_forward
import numpy as np
def imageprepare():
im = Image.open('D:/workspace/machine-learning/mnist/img/origin-9.png')
plt.imshow(im)
plt.show()
#print(type(im.getdata()))
tv = list(im.getdata())
tva = [(255-x)*1.0/255.0 for x in tv]
#return np.asarray(im)
return tva
result=imageprepare()
#x = tf.placeholder(tf.float32, [None, 784])
#x = result
with tf.Graph().as_default() as g:
x = tf.placeholder(tf.float32,[1,
mnist_lenet5_forward.IMAGE_SIZE,
mnist_lenet5_forward.IMAGE_SIZE,
mnist_lenet5_forward.NUM_CHANNELS])
#x = tf.placeholder(tf.float32, [None, 784])
#ipt = imageprepare()
#y_ = tf.placeholder(tf.float32, [None, mnist_lenet5_forward.OUTPUT_NODE])
#y = mnist_lenet5_forward.forward(x,False,None)
# x = tf.placeholder(tf.float32,[
# [ipt],
# mnist_lenet5_forward.IMAGE_SIZE,
# mnist_lenet5_forward.IMAGE_SIZE,
# mnist_lenet5_forward.NUM_CHANNELS])
# y_ = tf.placeholder(tf.float32, [None, mnist_lenet5_forward.OUTPUT_NODE])
# y = mnist_lenet5_forward.forward(x,False,None)
#
# ema = tf.train.ExponentialMovingAverage(mnist_lenet5_backward.MOVING_AVERAGE_DECAY)
# ema_restore = ema.variables_to_restore()
# saver = tf.train.Saver(ema_restore)
#
# correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
# accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
#image = tf.image.decode_png('D:/workspace/machine-learning/mnist/img/origin-2.png')
# image = tf.cast(image, tf.float32)
y_conv = mnist_lenet5_forward.forward(x,False,None)
#eva = mnist_lenet5_forward.forward([image],False,None)
#prediction = tf.argmax(y,1)
saver = tf.train.Saver()
with tf.Session(graph=g) as sess:
init_op = tf.global_variables_initializer()
sess.run(init_op)
ckpt = tf.train.get_checkpoint_state(mnist_lenet5_backward.MODEL_SAVE_PATH)
saver.restore(sess, ckpt.model_checkpoint_path)
reshaped_xs = np.reshape([result],(
1,
mnist_lenet5_forward.IMAGE_SIZE,
mnist_lenet5_forward.IMAGE_SIZE,
mnist_lenet5_forward.NUM_CHANNELS))
# reshaped_x = np.reshape([ipt],(
# [ipt],
# mnist_lenet5_forward.IMAGE_SIZE,
# mnist_lenet5_forward.IMAGE_SIZE,
# mnist_lenet5_forward.NUM_CHANNELS))
# accuracy_score = sess.run(accuracy, feed_dict={x:reshaped_x,y_:[2]})
prediction=tf.argmax(y_conv,1)
predint=prediction.eval(feed_dict={x: reshaped_xs}, session=sess)
print('recognize result:')
print(predint[0])
|
[
"matplotlib.pyplot.show",
"tensorflow.train.Saver",
"tensorflow.global_variables_initializer",
"matplotlib.pyplot.imshow",
"tensorflow.argmax",
"tensorflow.Session",
"PIL.Image.open",
"mnist_lenet5_forward.forward",
"tensorflow.placeholder",
"numpy.reshape",
"tensorflow.Graph",
"tensorflow.train.get_checkpoint_state"
] |
[((282, 348), 'PIL.Image.open', 'Image.open', (['"""D:/workspace/machine-learning/mnist/img/origin-9.png"""'], {}), "('D:/workspace/machine-learning/mnist/img/origin-9.png')\n", (292, 348), False, 'from PIL import Image, ImageFilter\n'), ((353, 367), 'matplotlib.pyplot.imshow', 'plt.imshow', (['im'], {}), '(im)\n', (363, 367), True, 'import matplotlib.pyplot as plt\n'), ((372, 382), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (380, 382), True, 'import matplotlib.pyplot as plt\n'), ((653, 789), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[1, mnist_lenet5_forward.IMAGE_SIZE, mnist_lenet5_forward.IMAGE_SIZE,\n mnist_lenet5_forward.NUM_CHANNELS]'], {}), '(tf.float32, [1, mnist_lenet5_forward.IMAGE_SIZE,\n mnist_lenet5_forward.IMAGE_SIZE, mnist_lenet5_forward.NUM_CHANNELS])\n', (667, 789), True, 'import tensorflow as tf\n'), ((1805, 1849), 'mnist_lenet5_forward.forward', 'mnist_lenet5_forward.forward', (['x', '(False)', 'None'], {}), '(x, False, None)\n', (1833, 1849), False, 'import mnist_lenet5_forward\n'), ((1954, 1970), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (1968, 1970), True, 'import tensorflow as tf\n'), ((1981, 2000), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'g'}), '(graph=g)\n', (1991, 2000), True, 'import tensorflow as tf\n'), ((2028, 2061), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (2059, 2061), True, 'import tensorflow as tf\n'), ((2104, 2172), 'tensorflow.train.get_checkpoint_state', 'tf.train.get_checkpoint_state', (['mnist_lenet5_backward.MODEL_SAVE_PATH'], {}), '(mnist_lenet5_backward.MODEL_SAVE_PATH)\n', (2133, 2172), True, 'import tensorflow as tf\n'), ((2251, 2381), 'numpy.reshape', 'np.reshape', (['[result]', '(1, mnist_lenet5_forward.IMAGE_SIZE, mnist_lenet5_forward.IMAGE_SIZE,\n mnist_lenet5_forward.NUM_CHANNELS)'], {}), '([result], (1, mnist_lenet5_forward.IMAGE_SIZE,\n mnist_lenet5_forward.IMAGE_SIZE, mnist_lenet5_forward.NUM_CHANNELS))\n', (2261, 2381), True, 'import numpy as np\n'), ((2748, 2768), 'tensorflow.argmax', 'tf.argmax', (['y_conv', '(1)'], {}), '(y_conv, 1)\n', (2757, 2768), True, 'import tensorflow as tf\n'), ((614, 624), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (622, 624), True, 'import tensorflow as tf\n')]
|
import threading
import pickle
import json
import sys
import random
import uuid
import time
sys.path.append('..')
from game import Game, GameState
from utils import string_to_byte, byte_to_string
class GameController():
SPECIAL_KEYWORD = b"xaxaxayarmaW"
MAX_RECEIVE_TIME_DIFFERENCE = 0.010 # in seconds
def __init__(self):
self.active_connections = [None, None]
self.game = Game(4, 4)
self.lock = threading.Lock()
self.receive_question_ts = [None, None]
self.both_players_received = False
self.calibration_acks = [[], []]
self.calibrations = [[{} for _ in range(10)], [{} for _ in range(10)]]
self.ts_difference = 0 # Average difference between timestamps of player 0 and 1.
self.received_acks_cnt = [0, 0]
self.ping_difference = 0
self.ts_info = [{}, {}]
self.answer_ts = [None, None]
def add_connection(self, conn):
id = 1
if self.active_connections[0] == None:
id = 0
self.active_connections[id] = conn
return id
def remove_player(self, id):
with self.lock:
self.active_connections[id] = None
self.game.players_names[id] = None
self.game.reset_board()
self.calibration_acks = [[], []]
self.calibrations = [[{} for _ in range(10)], [{} for _ in range(10)]]
self.ts_difference = 0
self.received_acks_cnt = [0, 0]
self.ping_difference = 0
self.notify_players()
def restart_game(self):
with self.lock:
self.game.reset_board()
self.generate_question()
self.notify_players()
def enter_name(self, id, name):
ready = False
def calibrate_timestamps(self):
def connection_thread(self, conn, id, i):
message = json.dumps({"TYPE": "CALIBRATION", "PAYLOAD": str(i)})
self.calibrations[id][i]["server_send"] = time.time()
conn.sendall(string_to_byte(message) + self.SPECIAL_KEYWORD)
for i in range(10):
for idx, conn in enumerate(self.active_connections):
if conn:
threading.Thread(target=connection_thread, args=(self, conn, idx, i), daemon=True).start()
time.sleep(0.2)
with self.lock:
self.game.players_names[id] = name
self.send_id(id)
if self.game.players_names[1 - id] != None:
ready = True
if ready:
threading.Thread(target=calibrate_timestamps, args=(self,), daemon=True).start()
def notify_players(self):
print("Sending Game information to the all players")
def connection_thread(self, conn, id):
if self.game.state == GameState.QUESTION:
self.ts_info[id][self.game.question_uuid] = {}
self.ts_info[id][self.game.question_uuid]["server_send"] = time.time()
conn.sendall(pickle.dumps(self.game) + self.SPECIAL_KEYWORD)
for idx, conn in enumerate(self.active_connections):
if conn:
threading.Thread(target=connection_thread, args=(self, conn, idx), daemon=True).start()
def generate_question(self):
print("Generating New Question...")
operator_list = ["+", "-", "*"]
operator = random.choice(operator_list)
limit = 20 if operator == "*" else 100
number_1 = random.randint(1, limit)
number_2 = random.randint(1, limit)
question = str(number_1) + operator + str(number_2)
answer = str(eval(question))
with self.lock:
self.game.state = GameState.QUESTION
self.game.question = question
self.game.answer = answer
self.game.question_uuid = str(uuid.uuid4())
self.receive_question_ts = [None, None]
self.both_players_received = False
self.answer_ts = [None, None]
print("Generated the Question: " + question + " / UUID: " + self.game.question_uuid)
def send_id(self, id):
conn = self.active_connections[id]
message = {
"TYPE": "ID",
"PAYLOAD": id
}
print(f"Sending ID to the Player {id}")
conn.sendall(string_to_byte(json.dumps(message)) + self.SPECIAL_KEYWORD)
def close_connections(self):
for conn in self.active_connections:
if conn:
conn.close()
def calculate_score(self, id, coordinate_x, coordinate_y, character):
directions = [[-1, 0], [-1, -1], [0, -1], [1, -1]]
with self.lock:
self.game.board[coordinate_x][coordinate_y] = character
for x in range(coordinate_x - 1, coordinate_x + 2):
for y in range(coordinate_y - 1, coordinate_y + 2):
for direction in directions:
sequence = ""
sequence_coordinates = []
for i in range(3):
sequence_coordinates.append([x - (i - 1) * direction[0], y - (i - 1) * direction[1]])
if sequence_coordinates[-1][0] < 0 or sequence_coordinates[-1][1] < 0 or \
sequence_coordinates[-1][0] >= self.game.row or sequence_coordinates[-1][1] >= self.game.col:
sequence = "NOO"
break
sequence += self.game.board[sequence_coordinates[-1][0]][sequence_coordinates[-1][1]]
if sequence == "SOS" and sequence_coordinates not in self.game.complete_lines:
self.game.scores[id] += 1
self.game.complete_lines.append(sequence_coordinates)
for coordinate in sequence_coordinates:
self.game.marked_boxes.add(coordinate[0] * self.game.col + coordinate[1])
def move(self, id, move):
with self.lock:
if self.game.state != GameState.MOVE or self.game.turn != id: # or not self.both_players_received:
return
coordinate_x, coordinate_y, character = move
self.calculate_score(id, coordinate_x, coordinate_y, character)
self.generate_question()
self.notify_players()
def give_turn(self, id, question_uuid, duration):
print(f"Player {id} duration: {duration} seconds")
with self.lock:
if self.game.state != GameState.QUESTION or self.game.question_uuid != question_uuid:
return
self.answer_ts[id] = duration
if self.answer_ts[1 - id]:
return
if not self.answer_ts[1 - id]:
time.sleep(abs(2 * self.ping_difference))
with self.lock:
self.game.state = GameState.MOVE
if self.answer_ts[1-id] and self.answer_ts[1-id] < self.answer_ts[id]:
self.game.turn = 1 - id
else:
self.game.turn = id
self.notify_players()
# Returns the normalized timestamp difference between acknowledgment of two players in seconds.
def get_timestamp_diff(self):
return abs(self.receive_question_ts[0] - self.receive_question_ts[1] - self.ts_difference - self.ping_difference)
def check_question_ack(self, id, client_rec, client_send, uuid):
self.ts_info[id][uuid]["server_rec"] = time.time()
self.ts_info[id][uuid]["client_rec"] = client_rec
self.ts_info[id][uuid]["client_send"] = client_send
with self.lock:
if self.game.state != GameState.QUESTION:
return
if self.game.question_uuid == uuid:
self.receive_question_ts[id] = client_rec
if self.receive_question_ts[1 - id]:
if self.get_timestamp_diff() <= self.MAX_RECEIVE_TIME_DIFFERENCE:
print("Both player has received the question " + uuid)
self.both_players_received = True
return
else:
return
else:
return
time.sleep(0.2)
with self.lock:
if self.game.question_uuid != uuid:
return
if self.receive_question_ts[1 - id]:
if self.get_timestamp_diff() <= self.MAX_RECEIVE_TIME_DIFFERENCE:
self.both_players_received = True
print("Both player has received the question " + uuid)
self.add_new_calibration_ts(uuid)
return
else:
self.add_new_calibration_ts(uuid)
self.generate_question()
self.notify_players()
def add_new_calibration_ts(self, uuid):
self.calibrations[0].append(self.ts_info[0][uuid])
self.calibrations[0] = self.calibrations[0][1:]
self.calibrations[1].append(self.ts_info[1][uuid])
self.calibrations[1] = self.calibrations[1][1:]
self.update_time_difference()
def update_time_difference(self):
ping0 = sum([(c["client_rec"]-c["server_send"]-c["client_send"]+c["server_rec"]) / 2 for c in self.calibrations[0][-6:]]) / 6
ping1 = sum([(c["client_rec"]-c["server_send"]-c["client_send"]+c["server_rec"]) / 2 for c in self.calibrations[1][-6:]]) / 6
print("Player 0 has a ping: ", ping0 * 1000, " ms")
print("Player 1 has a ping: ", ping1 * 1000, " ms")
self.ping_difference = ping0 - ping1
self.game.wait_times = [max(0, -self.ping_difference), max(0, self.ping_difference)]
delta0 = sum([(c["client_rec"]-c["server_send"]+c["client_send"]-c["server_rec"]) / 2 for c in self.calibrations[0][-6:]]) / 6
delta1 = sum([(c["client_rec"]-c["server_send"]+c["client_send"]-c["server_rec"]) / 2 for c in self.calibrations[1][-6:]]) / 6
self.ts_difference = delta0 - delta1
print("Calculated time difference in seconds is: ", self.ts_difference)
def add_calibration_ack(self, id, client_rec_ts, client_send_ts, ack_id):
self.calibrations[id][ack_id]["server_rec"] = time.time()
self.calibrations[id][ack_id]["client_rec"] = client_rec_ts
self.calibrations[id][ack_id]["client_send"] = client_send_ts
ready_to_start = False
with self.lock:
self.received_acks_cnt[id] += 1
if self.received_acks_cnt[id] == 10 and self.received_acks_cnt[1 - id] == 10:
self.update_time_difference()
ready_to_start = True
if ready_to_start:
self.generate_question()
self.notify_players()
|
[
"sys.path.append",
"threading.Thread",
"uuid.uuid4",
"random.randint",
"pickle.dumps",
"random.choice",
"json.dumps",
"time.time",
"threading.Lock",
"time.sleep",
"utils.string_to_byte",
"game.Game"
] |
[((93, 114), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (108, 114), False, 'import sys\n'), ((406, 416), 'game.Game', 'Game', (['(4)', '(4)'], {}), '(4, 4)\n', (410, 416), False, 'from game import Game, GameState\n'), ((437, 453), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (451, 453), False, 'import threading\n'), ((3415, 3443), 'random.choice', 'random.choice', (['operator_list'], {}), '(operator_list)\n', (3428, 3443), False, 'import random\n'), ((3512, 3536), 'random.randint', 'random.randint', (['(1)', 'limit'], {}), '(1, limit)\n', (3526, 3536), False, 'import random\n'), ((3556, 3580), 'random.randint', 'random.randint', (['(1)', 'limit'], {}), '(1, limit)\n', (3570, 3580), False, 'import random\n'), ((7636, 7647), 'time.time', 'time.time', ([], {}), '()\n', (7645, 7647), False, 'import time\n'), ((8390, 8405), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (8400, 8405), False, 'import time\n'), ((10414, 10425), 'time.time', 'time.time', ([], {}), '()\n', (10423, 10425), False, 'import time\n'), ((1996, 2007), 'time.time', 'time.time', ([], {}), '()\n', (2005, 2007), False, 'import time\n'), ((2347, 2362), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (2357, 2362), False, 'import time\n'), ((3005, 3016), 'time.time', 'time.time', ([], {}), '()\n', (3014, 3016), False, 'import time\n'), ((3875, 3887), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (3885, 3887), False, 'import uuid\n'), ((2581, 2653), 'threading.Thread', 'threading.Thread', ([], {'target': 'calibrate_timestamps', 'args': '(self,)', 'daemon': '(True)'}), '(target=calibrate_timestamps, args=(self,), daemon=True)\n', (2597, 2653), False, 'import threading\n'), ((3042, 3065), 'pickle.dumps', 'pickle.dumps', (['self.game'], {}), '(self.game)\n', (3054, 3065), False, 'import pickle\n'), ((4365, 4384), 'json.dumps', 'json.dumps', (['message'], {}), '(message)\n', (4375, 4384), False, 'import json\n'), ((2037, 2060), 'utils.string_to_byte', 'string_to_byte', (['message'], {}), '(message)\n', (2051, 2060), False, 'from utils import string_to_byte, byte_to_string\n'), ((3189, 3268), 'threading.Thread', 'threading.Thread', ([], {'target': 'connection_thread', 'args': '(self, conn, idx)', 'daemon': '(True)'}), '(target=connection_thread, args=(self, conn, idx), daemon=True)\n', (3205, 3268), False, 'import threading\n'), ((2240, 2326), 'threading.Thread', 'threading.Thread', ([], {'target': 'connection_thread', 'args': '(self, conn, idx, i)', 'daemon': '(True)'}), '(target=connection_thread, args=(self, conn, idx, i),\n daemon=True)\n', (2256, 2326), False, 'import threading\n')]
|
from jiwer import wer
ground_truth = "কুমিল্লার খাদি সারা দেশে পরিচিত"
hypothesis = "কুমিল্লার খাদে সারা দেশে পরিচিত"
error = wer(ground_truth, hypothesis)
error
|
[
"jiwer.wer"
] |
[((128, 157), 'jiwer.wer', 'wer', (['ground_truth', 'hypothesis'], {}), '(ground_truth, hypothesis)\n', (131, 157), False, 'from jiwer import wer\n')]
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2012, Fraunhofer FKIE/US, <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Fraunhofer nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
from multimaster_msgs_fkie.msg import Capability
from multimaster_msgs_fkie.srv import ListDescription, ListNodes, Task, ListDescriptionResponse, ListNodesResponse # , LoadLaunch
from rosgraph.rosenv import ROS_NAMESPACE
from roslaunch import ROSLaunchConfig, XmlLoader
import os
import rosgraph.masterapi
import rosgraph.names
import roslib.names
import roslib.network
import rospy
import shlex
import std_srvs.srv
import subprocess
import sys
import threading
from .screen_handler import ScreenHandler # , ScreenHandlerException
class LoadException(Exception):
''' The exception throwing while searching for the given launch file. '''
pass
class StartException(Exception):
''' The exception throwing while run a node containing in the loaded configuration. '''
pass
class DefaultCfg(object):
def __init__(self):
self.nodes = []
'''@ivar: the list with names of nodes with name spaces.'''
self.sensors = {}
'''@ivar: Sensor description: C{dict(node name : [(sensor type, sensor name, sensor description), ...])}'''
self.robot_descr = ('', '', '')
'''@ivar: robot description as tupel of (type, name, text) '''
self.package = ''
self.file = ''
self.__lock = threading.RLock()
# Load parameter
self.launch_file = rospy.get_param('~launch_file', '')
rospy.loginfo("launch_file: %s" % self.launch_file)
self.package = rospy.get_param('~package', '')
rospy.loginfo("package: %s" % self.package)
self.do_autostart = rospy.get_param('~autostart', False)
rospy.loginfo("do_autostart: %s" % self.do_autostart)
self.load_params_at_start = rospy.get_param('~load_params_at_start', True)
self.parameter_loaded = False
rospy.loginfo("load_params_at_start: %s" % self.load_params_at_start)
self.argv = rospy.get_param('~argv', [])
rospy.loginfo("argv: %s" % self.argv)
if not isinstance(self.argv, list):
self.argv = ["%s" % self.argv]
sys.argv.extend(self.argv)
if self.do_autostart:
rospy.set_param('~autostart', False)
# initialize the ROS services
# rospy.Service('~load', LoadLaunch, self.rosservice_load_launch)
self._reload_service = rospy.Service('~reload', std_srvs.srv.Empty, self.rosservice_reload)
rospy.Service('~description', ListDescription, self.rosservice_description)
self.runService = None
'''@ivar: The service will be created on each load of a launch file to
inform the caller about a new configuration. '''
self.listService = None
'''@ivar: The service will be created on each load of a launch file to
inform the caller about a new configuration. '''
self.description_response = ListDescriptionResponse()
# variables to print the pending autostart nodes
self._pending_starts = set()
self._pending_starts_last_printed = set()
def _filter_args(self, argv):
afilter = ['__ns:=', '__name:=', '_package:=', '_launch_file:=']
result = []
for a in argv:
in_filter = False
for f in afilter:
if a.startswith(f):
in_filter = True
break
if ':=' not in a or in_filter:
continue
result.append(a)
return result
def load(self, delay_service_creation=0.):
'''
Load the launch file configuration
'''
with self.__lock:
self._pending_starts.clear()
# shutdown the services to inform the caller about a new configuration.
if self.runService is not None:
self.runService.shutdown('reload config')
self.runService = None
if self.listService is not None:
self.listService.shutdown('reload config')
self.listService = None
self.nodes = [] # the name of nodes with namespace
self.sensors = {} # sensor descriptions
launch_path = self.getPath(self.launch_file, self.package)
rospy.loginfo("loading launch file: %s", launch_path)
self.masteruri = self._masteruri_from_ros()
self.roscfg = ROSLaunchConfig()
loader = XmlLoader()
argv = self._filter_args(sys.argv)
# remove namespace from sys.argv to avoid load the launchfile info local namespace
sys.argv = list(argv)
# set the global environment to empty namespace
os.environ[ROS_NAMESPACE] = rospy.names.SEP
rospy.set_param('~argv_used', list(set(argv)))
loader.load(launch_path, self.roscfg, verbose=False, argv=argv)
# create the list with node names
for item in self.roscfg.nodes:
if item.machine_name and not item.machine_name == 'localhost':
machine = self.roscfg.machines[item.machine_name]
if roslib.network.is_local_address(machine.address):
self.nodes.append(roslib.names.ns_join(item.namespace, item.name))
else:
self.nodes.append(roslib.names.ns_join(item.namespace, item.name))
# get the robot description
self.description_response = dr = ListDescriptionResponse()
dr.robot_name = ''
dr.robot_type = ''
dr.robot_descr = ''
for param, p in self.roscfg.params.items():
if param.endswith('robots'):
if isinstance(p.value, list):
if len(p.value) > 0 and len(p.value[0]) != 5:
print("WRONG format, expected: ['host(ROS master Name)', 'type', 'name', 'images', 'description'] -> ignore", param)
else:
for entry in p.value:
try:
print(entry[0], rospy.get_param('/mastername', ''))
if not entry[0] or entry[0] == rospy.get_param('/mastername', ''):
dr.robot_name = self._decode(entry[2])
dr.robot_type = entry[1]
dr.robot_images = entry[3].split(',')
dr.robot_descr = self._decode(entry[4])
break
except:
pass
# get the sensor description
tmp_cap_dict = self.getCapabilitiesDesrc()
for machine, ns_dict in tmp_cap_dict.items():
if machine in self.roscfg.machines:
machine = self.roscfg.machines[machine].address
if not machine or roslib.network.is_local_address(machine):
for ns, group_dict in ns_dict.items():
for group, descr_dict in group_dict.items():
if descr_dict['nodes']:
cap = Capability()
cap.namespace = ns
cap.name = group
cap.type = descr_dict['type']
cap.images = list(descr_dict['images'])
cap.description = descr_dict['description']
cap.nodes = list(descr_dict['nodes'])
dr.capabilities.append(cap)
# load parameters into the ROS parameter server
if self.load_params_at_start:
self.loadParams()
# initialize the ROS services
# HACK to let the node_manager to update the view
if delay_service_creation > 0.:
t = threading.Timer(delay_service_creation, self._timed_service_creation)
t.start()
else:
self._timed_service_creation()
# self.timer = rospy.Timer(rospy.Duration(2), self.timed_service_creation, True)
# if self.nodes:
# self.runService = rospy.Service('~run', Task, self.rosservice_start_node)
# self.listServic = rospy.Service('~list_nodes', ListNodes, self.rosservice_list_nodes)
# except:
# import traceback
# print traceback.format_exc()
if self.do_autostart:
if not self.parameter_loaded:
self.loadParams()
for n in self.nodes:
try:
self.runNode(n, self.do_autostart)
except Exception as e:
rospy.logwarn("Error while start %s: %s", n, e)
self.do_autostart = False
def _decode(self, val):
'''
Replaces the '\\n' by LF (Line Feed) and decode the string entry from system default
coding to unicode.
@param val: the string coding as system default
@type val: str
@return: the decoded string
@rtype: C{unicode} or original on error
'''
result = val.replace("\\n ", "\n")
try:
result = result.decode(sys.getfilesystemencoding())
except:
pass
return result
def getCapabilitiesDesrc(self):
'''
Parses the launch file for C{capabilities} and C{capability_group} parameter
and creates dictionary for grouping the nodes.
@return: the capabilities description stored in this configuration
@rtype: C{dict(machine : dict(namespace: dict(group:dict('type' : str, 'description' : str, 'nodes' : [str]))))}
'''
result = dict()
capabilies_descr = dict()
if self.roscfg is not None:
# get the capabilities description
# use two separate loops, to create the description list first
for param, p in self.roscfg.params.items():
if param.endswith('capabilities'):
if isinstance(p.value, list):
if len(p.value) > 0 and len(p.value[0]) != 4:
print("WRONG format, expected: ['name', 'type', 'images', 'description'] -> ignore", param)
else:
for entry in p.value:
capabilies_descr[entry[0]] = {'type': ''.join([entry[1]]), 'images': entry[2].split(','), 'description': self._decode(entry[3])}
# get the capability nodes
for item in self.roscfg.nodes:
node_fullname = roslib.names.ns_join(item.namespace, item.name)
machine_name = item.machine_name if item.machine_name is not None and not item.machine_name == 'localhost' else ''
added = False
cap_param = roslib.names.ns_join(node_fullname, 'capability_group')
cap_ns = node_fullname
# find the capability group parameter in namespace
while cap_param not in self.roscfg.params and cap_param.count(roslib.names.SEP) > 1:
cap_ns = roslib.names.namespace(cap_ns).rstrip(roslib.names.SEP)
if not cap_ns:
cap_ns = roslib.names.SEP
cap_param = roslib.names.ns_join(cap_ns, 'capability_group')
if cap_ns == node_fullname:
cap_ns = item.namespace.rstrip(roslib.names.SEP) # if the parameter group parameter found, assign node to the group
if not cap_ns:
cap_ns = roslib.names.SEP
# if the 'capability_group' parameter found, assign node to the group
if cap_param in self.roscfg.params and self.roscfg.params[cap_param].value:
p = self.roscfg.params[cap_param]
if machine_name not in result:
result[machine_name] = dict()
for (ns, groups) in result[machine_name].items():
if ns == cap_ns and p.value in groups:
groups[p.value]['nodes'].append(node_fullname)
added = True
break
if not added:
ns = cap_ns
# add new group in the namespace of the node
if ns not in result[machine_name]:
result[machine_name][ns] = dict()
if p.value not in result[machine_name][ns]:
try:
result[machine_name][ns][p.value] = {'type': capabilies_descr[p.value]['type'],
'images': capabilies_descr[p.value]['images'],
'description': capabilies_descr[p.value]['description'],
'nodes': []}
except:
result[machine_name][ns][p.value] = {'type': '',
'images': [],
'description': '',
'nodes': []}
result[machine_name][ns][p.value]['nodes'].append(node_fullname)
return result
def _masteruri_from_ros(self):
'''
Returns the master URI depending on ROS distribution API.
@return: ROS master URI
@rtype: C{str}
'''
try:
import rospkg.distro
distro = rospkg.distro.current_distro_codename()
if distro in ['electric', 'diamondback', 'cturtle']:
return roslib.rosenv.get_master_uri()
else:
return rosgraph.rosenv.get_master_uri()
except:
return roslib.rosenv.get_master_uri()
def _timed_service_creation(self):
with self.__lock:
try:
if self.runService is None:
self.runService = rospy.Service('~run', Task, self.rosservice_start_node)
if self.listService is None:
self.listService = rospy.Service('~list_nodes', ListNodes, self.rosservice_list_nodes)
except:
import traceback
print(traceback.format_exc())
def getPath(self, path, package=''):
'''
Searches for a launch file. If package is given, try first to find the launch
file in the given package. If more then one launch file with the same name
found in the package, the first one will be tacked.
@param path: the file name of the launch file
@type path: C{str}
@param package: the package containing the launch file or an empty string,
if the C{file} is an absolute path
@type package: C{str}
@return: the absolute path of the launch file
@rtype: C{str}
@raise LoadException: if the given file is not found
'''
launch_file = path
# if package is set, try to find the launch file in the given package
if package:
paths = roslib.packages.find_resource(package, launch_file)
if len(paths) > 0:
# if more then one launch file is found, take the first one
launch_file = paths[0]
if os.path.isfile(launch_file) and os.path.exists(launch_file):
return launch_file
raise LoadException('File %s in package [%s] not found!' % (path, package))
def rosservice_list_nodes(self, req):
'''
Callback for the ROS service to get the list with available nodes.
'''
return ListNodesResponse(self.nodes)
def rosservice_start_node(self, req):
'''
Callback for the ROS service to start a node.
'''
self.runNode(req.node)
return []
def rosservice_reload(self, req):
self.load(2.)
return []
# def rosservice_load_launch(self, req):
# '''
# Load the launch file
# '''
# try:
# self.__lock.acquire()
# self.load(req.package, req.file, req.argv)
# finally:
# self.__lock.release()
# return []
def rosservice_description(self, req):
'''
Returns the current description.
'''
return self.description_response
def loadParams(self):
'''
Loads all parameter into ROS parameter server.
'''
params = dict()
for param, value in self.roscfg.params.items():
params[param] = value
# rospy.loginfo("register PARAMS:\n%s", '\n'.join(params))
self._load_parameters(self.masteruri, params, self.roscfg.clear_params)
self.parameter_loaded = True
def runNode(self, node, autostart=False):
'''
Start the node with given name from the currently loaded configuration.
@param node: the name of the node
@type node: C{str}
@raise StartException: if an error occurred while start.
'''
if not self.parameter_loaded:
self.loadParams()
n = None
for item in self.roscfg.nodes:
itemname = rospy.names.ns_join(item.namespace, item.name)
if itemname == node:
n = item
break
if n is None:
raise StartException("Node '%s' not found!" % node)
if autostart and self._get_start_exclude(rospy.names.ns_join(n.namespace, n.name)):
# skip autostart
rospy.loginfo("%s is in exclude list, skip autostart", n.name)
return
# env = n.env_args
prefix = n.launch_prefix if n.launch_prefix is not None else ''
args = ['__ns:=%s' % n.namespace, '__name:=%s' % n.name]
if not (n.cwd is None):
args.append('__cwd:=%s' % n.cwd)
# add remaps
for remap in n.remap_args:
args.append('%s:=%s' % (remap[0], remap[1]))
# masteruri = self.masteruri
# if n.machine_name and not n.machine_name == 'localhost':
# machine = self.roscfg.machines[n.machine_name]
# TODO: env-loader support?
# if machine.env_args:
# env[len(env):] = machine.env_args
# nm.screen().testScreen()
cmd = self._get_node(n.package, n.type)
# determine the current working path, Default: the package of the node
cwd = self.get_ros_home()
if not (n.cwd is None):
if n.cwd == 'ROS_HOME':
cwd = self.get_ros_home()
elif n.cwd == 'node':
cwd = os.path.dirname(cmd[0])
respawn = ['']
if n.respawn:
respawn = self._get_node('node_manager_fkie', 'respawn')
# set the respawn environment variables
respawn_params = self._get_respawn_params(rospy.names.ns_join(n.namespace, n.name))
if respawn_params['max'] > 0:
n.env_args.append(('RESPAWN_MAX', '%d' % respawn_params['max']))
if respawn_params['min_runtime'] > 0:
n.env_args.append(('RESPAWN_MIN_RUNTIME', '%d' % respawn_params['min_runtime']))
if respawn_params['delay'] > 0:
n.env_args.append(('RESPAWN_DELAY', '%d' % respawn_params['delay']))
node_cmd = [respawn[0], prefix, cmd[0]]
cmd_args = [ScreenHandler.getSceenCmd(node)]
cmd_args[len(cmd_args):] = node_cmd
cmd_args.append(n.args)
cmd_args[len(cmd_args):] = args
# print 'runNode: ', cmd_args
popen_cmd = shlex.split(str(' '.join(cmd_args)))
rospy.loginfo("run node '%s as': %s", node, str(' '.join(popen_cmd)))
# remove the 'BASH_ENV' and 'ENV' from environment
new_env = dict(os.environ)
try:
for k in ['BASH_ENV', 'ENV']:
del new_env[k]
except:
pass
# add node environment parameter
for k, v in n.env_args:
new_env[k] = v
# the ROS_NAMESPACE environment is used in cpp plugins in rqt
if n.namespace:
new_env['ROS_NAMESPACE'] = n.namespace
# set delayed autostart parameter
self._run_node(popen_cmd, cwd, new_env, rospy.names.ns_join(n.namespace, n.name), autostart)
if len(cmd) > 1:
raise StartException('Multiple executables are found! The first one was started! Exceutables:\n%s' % str(cmd))
def _run_node(self, cmd, cwd, env, node, autostart=False):
self._pending_starts.add(node)
start_now = True
start_delay = self._get_start_delay(node)
start_required = self._get_start_required(node)
if autostart and start_required:
start_now = False
# get published topics from ROS master
master = rosgraph.masterapi.Master(self.masteruri)
for topic, datatype in master.getPublishedTopics(''):
if start_required == topic:
start_now = True
break
if not start_now:
# Start the timer for waiting for the topic
start_timer = threading.Timer(3., self._run_node, args=(cmd, cwd, env, node, autostart))
start_timer.start()
if start_now and autostart and start_delay > 0:
start_now = False
# start timer for delayed start
start_timer = threading.Timer(start_delay, self._run_node, args=(cmd, cwd, env, node, False))
start_timer.start()
if start_now:
ps = subprocess.Popen(cmd, cwd=cwd, env=env)
# wait for process to avoid 'defunct' processes
thread = threading.Thread(target=ps.wait)
thread.setDaemon(True)
thread.start()
# remove from pending autostarts
try:
self._pending_starts.remove(node)
except:
pass
# print the current pending autostarts
if self._pending_starts_last_printed != self._pending_starts:
self._pending_starts_last_printed.clear()
self._pending_starts_last_printed.update(self._pending_starts)
rospy.loginfo("Pending autostarts %d: %s", len(self._pending_starts), self._pending_starts)
def _get_node(self, pkg, filename):
cmd = None
try:
cmd = roslib.packages.find_node(pkg, filename)
except roslib.packages.ROSPkgException as e:
# multiple nodes, invalid package
raise StartException(str(e))
except Exception as e:
raise StartException(str(e))
# handle different result types str or array of string
if sys.version_info[0] <= 2:
import types
string_types = types.StringTypes
else:
string_types = (str,)
if isinstance(cmd, string_types):
cmd = [cmd]
if cmd is None or len(cmd) == 0:
raise StartException('%s in package [%s] not found!' % (filename, pkg))
return cmd
def _get_start_exclude(self, node):
param_name = rospy.names.ns_join(node, 'default_cfg/autostart/exclude')
try:
return bool(self.roscfg.params[param_name].value)
except:
pass
return False
def _get_start_delay(self, node):
param_name = rospy.names.ns_join(node, 'default_cfg/autostart/delay')
try:
return float(self.roscfg.params[param_name].value)
except:
pass
return 0.
def _get_start_required(self, node):
param_name = rospy.names.ns_join(node, 'default_cfg/autostart/required/publisher')
topic = ''
try:
topic = self.roscfg.params[param_name].value
if rosgraph.names.is_private(topic):
rospy.logwarn('Private for autostart required topic `%s` is ignored!' % topic)
topic = ''
elif not rosgraph.names.is_global(topic):
topic = rospy.names.ns_join(rosgraph.names.namespace(node), topic)
except:
pass
return topic
def _get_respawn_params(self, node):
result = {'max': 0, 'min_runtime': 0, 'delay': 0}
respawn_max = rospy.names.ns_join(node, 'respawn/max')
respawn_min_runtime = rospy.names.ns_join(node, 'respawn/min_runtime')
respawn_delay = rospy.names.ns_join(node, 'respawn/delay')
try:
result['max'] = int(self.roscfg.params[respawn_max].value)
except:
pass
try:
result['min_runtime'] = int(self.roscfg.params[respawn_min_runtime].value)
except:
pass
try:
result['delay'] = int(self.roscfg.params[respawn_delay].value)
except:
pass
return result
def get_ros_home(self):
'''
Returns the ROS HOME path depending on ROS distribution API.
@return: ROS HOME path
@rtype: C{str}
'''
try:
import rospkg.distro
distro = rospkg.distro.current_distro_codename()
if distro in ['electric', 'diamondback', 'cturtle']:
import roslib.rosenv
return roslib.rosenv.get_ros_home()
else:
import rospkg
return rospkg.get_ros_home()
except:
import traceback
print(traceback.format_exc())
import roslib.rosenv
return roslib.rosenv.get_ros_home()
@classmethod
def _load_parameters(cls, masteruri, params, clear_params):
"""
Load parameters onto the parameter server
"""
try:
import xmlrpclib
except ImportError:
import xmlrpc.client as xmlrpclib
param_server = xmlrpclib.ServerProxy(masteruri)
p = None
try:
# multi-call style xmlrpc
param_server_multi = xmlrpclib.MultiCall(param_server)
# clear specified parameter namespaces
# #2468 unify clear params to prevent error
for p in clear_params:
param_server_multi.deleteParam(rospy.get_name(), p)
r = param_server_multi()
# for code, msg, _ in r:
# if code != 1:
# raise StartException("Failed to clear parameter: %s"%(msg))
# multi-call objects are not reusable
param_server_multi = xmlrpclib.MultiCall(param_server)
for p in params.itervalues():
# suppressing this as it causes too much spam
# printlog("setting parameter [%s]"%p.key)
param_server_multi.setParam(rospy.get_name(), p.key, p.value)
r = param_server_multi()
for code, msg, _ in r:
if code != 1:
raise StartException("Failed to set parameter: %s" % (msg))
except Exception:
raise # re-raise as this is fatal
|
[
"threading.Timer",
"roslaunch.ROSLaunchConfig",
"roslaunch.XmlLoader",
"multimaster_msgs_fkie.msg.Capability",
"os.path.isfile",
"rospy.get_name",
"rospy.logwarn",
"multimaster_msgs_fkie.srv.ListNodesResponse",
"rospy.set_param",
"os.path.exists",
"os.path.dirname",
"sys.getfilesystemencoding",
"rospy.names.ns_join",
"sys.argv.extend",
"traceback.format_exc",
"multimaster_msgs_fkie.srv.ListDescriptionResponse",
"rospkg.distro.current_distro_codename",
"xmlrpc.client.ServerProxy",
"threading.Thread",
"subprocess.Popen",
"threading.RLock",
"rospy.loginfo",
"rospy.Service",
"rospkg.get_ros_home",
"rospy.get_param",
"xmlrpc.client.MultiCall"
] |
[((2902, 2919), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (2917, 2919), False, 'import threading\n'), ((2972, 3007), 'rospy.get_param', 'rospy.get_param', (['"""~launch_file"""', '""""""'], {}), "('~launch_file', '')\n", (2987, 3007), False, 'import rospy\n'), ((3016, 3067), 'rospy.loginfo', 'rospy.loginfo', (["('launch_file: %s' % self.launch_file)"], {}), "('launch_file: %s' % self.launch_file)\n", (3029, 3067), False, 'import rospy\n'), ((3091, 3122), 'rospy.get_param', 'rospy.get_param', (['"""~package"""', '""""""'], {}), "('~package', '')\n", (3106, 3122), False, 'import rospy\n'), ((3131, 3174), 'rospy.loginfo', 'rospy.loginfo', (["('package: %s' % self.package)"], {}), "('package: %s' % self.package)\n", (3144, 3174), False, 'import rospy\n'), ((3203, 3239), 'rospy.get_param', 'rospy.get_param', (['"""~autostart"""', '(False)'], {}), "('~autostart', False)\n", (3218, 3239), False, 'import rospy\n'), ((3248, 3301), 'rospy.loginfo', 'rospy.loginfo', (["('do_autostart: %s' % self.do_autostart)"], {}), "('do_autostart: %s' % self.do_autostart)\n", (3261, 3301), False, 'import rospy\n'), ((3338, 3384), 'rospy.get_param', 'rospy.get_param', (['"""~load_params_at_start"""', '(True)'], {}), "('~load_params_at_start', True)\n", (3353, 3384), False, 'import rospy\n'), ((3431, 3500), 'rospy.loginfo', 'rospy.loginfo', (["('load_params_at_start: %s' % self.load_params_at_start)"], {}), "('load_params_at_start: %s' % self.load_params_at_start)\n", (3444, 3500), False, 'import rospy\n'), ((3521, 3549), 'rospy.get_param', 'rospy.get_param', (['"""~argv"""', '[]'], {}), "('~argv', [])\n", (3536, 3549), False, 'import rospy\n'), ((3558, 3595), 'rospy.loginfo', 'rospy.loginfo', (["('argv: %s' % self.argv)"], {}), "('argv: %s' % self.argv)\n", (3571, 3595), False, 'import rospy\n'), ((3691, 3717), 'sys.argv.extend', 'sys.argv.extend', (['self.argv'], {}), '(self.argv)\n', (3706, 3717), False, 'import sys\n'), ((3935, 4003), 'rospy.Service', 'rospy.Service', (['"""~reload"""', 'std_srvs.srv.Empty', 'self.rosservice_reload'], {}), "('~reload', std_srvs.srv.Empty, self.rosservice_reload)\n", (3948, 4003), False, 'import rospy\n'), ((4012, 4087), 'rospy.Service', 'rospy.Service', (['"""~description"""', 'ListDescription', 'self.rosservice_description'], {}), "('~description', ListDescription, self.rosservice_description)\n", (4025, 4087), False, 'import rospy\n'), ((4451, 4476), 'multimaster_msgs_fkie.srv.ListDescriptionResponse', 'ListDescriptionResponse', ([], {}), '()\n', (4474, 4476), False, 'from multimaster_msgs_fkie.srv import ListDescription, ListNodes, Task, ListDescriptionResponse, ListNodesResponse\n'), ((17592, 17621), 'multimaster_msgs_fkie.srv.ListNodesResponse', 'ListNodesResponse', (['self.nodes'], {}), '(self.nodes)\n', (17609, 17621), False, 'from multimaster_msgs_fkie.srv import ListDescription, ListNodes, Task, ListDescriptionResponse, ListNodesResponse\n'), ((24989, 25047), 'rospy.names.ns_join', 'rospy.names.ns_join', (['node', '"""default_cfg/autostart/exclude"""'], {}), "(node, 'default_cfg/autostart/exclude')\n", (25008, 25047), False, 'import rospy\n'), ((25237, 25293), 'rospy.names.ns_join', 'rospy.names.ns_join', (['node', '"""default_cfg/autostart/delay"""'], {}), "(node, 'default_cfg/autostart/delay')\n", (25256, 25293), False, 'import rospy\n'), ((25484, 25553), 'rospy.names.ns_join', 'rospy.names.ns_join', (['node', '"""default_cfg/autostart/required/publisher"""'], {}), "(node, 'default_cfg/autostart/required/publisher')\n", (25503, 25553), False, 'import rospy\n'), ((26127, 26167), 'rospy.names.ns_join', 'rospy.names.ns_join', (['node', '"""respawn/max"""'], {}), "(node, 'respawn/max')\n", (26146, 26167), False, 'import rospy\n'), ((26198, 26246), 'rospy.names.ns_join', 'rospy.names.ns_join', (['node', '"""respawn/min_runtime"""'], {}), "(node, 'respawn/min_runtime')\n", (26217, 26246), False, 'import rospy\n'), ((26271, 26313), 'rospy.names.ns_join', 'rospy.names.ns_join', (['node', '"""respawn/delay"""'], {}), "(node, 'respawn/delay')\n", (26290, 26313), False, 'import rospy\n'), ((27700, 27732), 'xmlrpc.client.ServerProxy', 'xmlrpclib.ServerProxy', (['masteruri'], {}), '(masteruri)\n', (27721, 27732), True, 'import xmlrpc.client as xmlrpclib\n'), ((3760, 3796), 'rospy.set_param', 'rospy.set_param', (['"""~autostart"""', '(False)'], {}), "('~autostart', False)\n", (3775, 3796), False, 'import rospy\n'), ((5793, 5846), 'rospy.loginfo', 'rospy.loginfo', (['"""loading launch file: %s"""', 'launch_path'], {}), "('loading launch file: %s', launch_path)\n", (5806, 5846), False, 'import rospy\n'), ((5929, 5946), 'roslaunch.ROSLaunchConfig', 'ROSLaunchConfig', ([], {}), '()\n', (5944, 5946), False, 'from roslaunch import ROSLaunchConfig, XmlLoader\n'), ((5968, 5979), 'roslaunch.XmlLoader', 'XmlLoader', ([], {}), '()\n', (5977, 5979), False, 'from roslaunch import ROSLaunchConfig, XmlLoader\n'), ((7003, 7028), 'multimaster_msgs_fkie.srv.ListDescriptionResponse', 'ListDescriptionResponse', ([], {}), '()\n', (7026, 7028), False, 'from multimaster_msgs_fkie.srv import ListDescription, ListNodes, Task, ListDescriptionResponse, ListNodesResponse\n'), ((15464, 15503), 'rospkg.distro.current_distro_codename', 'rospkg.distro.current_distro_codename', ([], {}), '()\n', (15501, 15503), False, 'import rospkg\n'), ((17259, 17286), 'os.path.isfile', 'os.path.isfile', (['launch_file'], {}), '(launch_file)\n', (17273, 17286), False, 'import os\n'), ((17291, 17318), 'os.path.exists', 'os.path.exists', (['launch_file'], {}), '(launch_file)\n', (17305, 17318), False, 'import os\n'), ((19088, 19134), 'rospy.names.ns_join', 'rospy.names.ns_join', (['item.namespace', 'item.name'], {}), '(item.namespace, item.name)\n', (19107, 19134), False, 'import rospy\n'), ((19435, 19497), 'rospy.loginfo', 'rospy.loginfo', (['"""%s is in exclude list, skip autostart"""', 'n.name'], {}), "('%s is in exclude list, skip autostart', n.name)\n", (19448, 19497), False, 'import rospy\n'), ((22105, 22145), 'rospy.names.ns_join', 'rospy.names.ns_join', (['n.namespace', 'n.name'], {}), '(n.namespace, n.name)\n', (22124, 22145), False, 'import rospy\n'), ((23285, 23364), 'threading.Timer', 'threading.Timer', (['start_delay', 'self._run_node'], {'args': '(cmd, cwd, env, node, False)'}), '(start_delay, self._run_node, args=(cmd, cwd, env, node, False))\n', (23300, 23364), False, 'import threading\n'), ((23436, 23475), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'cwd': 'cwd', 'env': 'env'}), '(cmd, cwd=cwd, env=env)\n', (23452, 23475), False, 'import subprocess\n'), ((23557, 23589), 'threading.Thread', 'threading.Thread', ([], {'target': 'ps.wait'}), '(target=ps.wait)\n', (23573, 23589), False, 'import threading\n'), ((26950, 26989), 'rospkg.distro.current_distro_codename', 'rospkg.distro.current_distro_codename', ([], {}), '()\n', (26987, 26989), False, 'import rospkg\n'), ((27834, 27867), 'xmlrpc.client.MultiCall', 'xmlrpclib.MultiCall', (['param_server'], {}), '(param_server)\n', (27853, 27867), True, 'import xmlrpc.client as xmlrpclib\n'), ((28324, 28357), 'xmlrpc.client.MultiCall', 'xmlrpclib.MultiCall', (['param_server'], {}), '(param_server)\n', (28343, 28357), True, 'import xmlrpc.client as xmlrpclib\n'), ((9552, 9621), 'threading.Timer', 'threading.Timer', (['delay_service_creation', 'self._timed_service_creation'], {}), '(delay_service_creation, self._timed_service_creation)\n', (9567, 9621), False, 'import threading\n'), ((10901, 10928), 'sys.getfilesystemencoding', 'sys.getfilesystemencoding', ([], {}), '()\n', (10926, 10928), False, 'import sys\n'), ((19351, 19391), 'rospy.names.ns_join', 'rospy.names.ns_join', (['n.namespace', 'n.name'], {}), '(n.namespace, n.name)\n', (19370, 19391), False, 'import rospy\n'), ((20731, 20771), 'rospy.names.ns_join', 'rospy.names.ns_join', (['n.namespace', 'n.name'], {}), '(n.namespace, n.name)\n', (20750, 20771), False, 'import rospy\n'), ((23018, 23093), 'threading.Timer', 'threading.Timer', (['(3.0)', 'self._run_node'], {'args': '(cmd, cwd, env, node, autostart)'}), '(3.0, self._run_node, args=(cmd, cwd, env, node, autostart))\n', (23033, 23093), False, 'import threading\n'), ((25708, 25786), 'rospy.logwarn', 'rospy.logwarn', (["('Private for autostart required topic `%s` is ignored!' % topic)"], {}), "('Private for autostart required topic `%s` is ignored!' % topic)\n", (25721, 25786), False, 'import rospy\n'), ((27215, 27236), 'rospkg.get_ros_home', 'rospkg.get_ros_home', ([], {}), '()\n', (27234, 27236), False, 'import rospkg\n'), ((15928, 15983), 'rospy.Service', 'rospy.Service', (['"""~run"""', 'Task', 'self.rosservice_start_node'], {}), "('~run', Task, self.rosservice_start_node)\n", (15941, 15983), False, 'import rospy\n'), ((16068, 16135), 'rospy.Service', 'rospy.Service', (['"""~list_nodes"""', 'ListNodes', 'self.rosservice_list_nodes'], {}), "('~list_nodes', ListNodes, self.rosservice_list_nodes)\n", (16081, 16135), False, 'import rospy\n'), ((20487, 20510), 'os.path.dirname', 'os.path.dirname', (['cmd[0]'], {}), '(cmd[0])\n', (20502, 20510), False, 'import os\n'), ((27300, 27322), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (27320, 27322), False, 'import traceback\n'), ((28058, 28074), 'rospy.get_name', 'rospy.get_name', ([], {}), '()\n', (28072, 28074), False, 'import rospy\n'), ((28565, 28581), 'rospy.get_name', 'rospy.get_name', ([], {}), '()\n', (28579, 28581), False, 'import rospy\n'), ((16211, 16233), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (16231, 16233), False, 'import traceback\n'), ((10384, 10431), 'rospy.logwarn', 'rospy.logwarn', (['"""Error while start %s: %s"""', 'n', 'e'], {}), "('Error while start %s: %s', n, e)\n", (10397, 10431), False, 'import rospy\n'), ((8795, 8807), 'multimaster_msgs_fkie.msg.Capability', 'Capability', ([], {}), '()\n', (8805, 8807), False, 'from multimaster_msgs_fkie.msg import Capability\n'), ((7659, 7693), 'rospy.get_param', 'rospy.get_param', (['"""/mastername"""', '""""""'], {}), "('/mastername', '')\n", (7674, 7693), False, 'import rospy\n'), ((7762, 7796), 'rospy.get_param', 'rospy.get_param', (['"""/mastername"""', '""""""'], {}), "('/mastername', '')\n", (7777, 7796), False, 'import rospy\n')]
|
import numpy as np
from sklearn.neighbors import KNeighborsClassifier
def classify_from_embeddings(model,
train_images,
train_labels,
test_images,
test_labels,
k=5,
distance_metric='mahalanobis',
distance_weighting='distance'):
# Create training embeddings.
train_embeddings = np.array([model.inference(b) for b in train_images])
train_embeddings = train_embeddings.reshape((-1, model.embedding_size))
# Create testing embeddings.
test_embeddings = np.array([model.inference(b) for b in testing_images])
test_embeddings = test_embeddings.reshape((-1, model.embedding_size))
# Train kNN.
classifier = KNeighborsClassifier(n_neighbors=k,
weights=distance_weighting,
algorithm='auto',
metric=distance_metric,
n_jobs=-1)
classifier.fit(train_embeddings, train_labels)
# Get predictions.
test_predictions = classifier.predict(test_embeddings)
# Return accuracy of kNN.
accuracy = classifier.score(test_labels, test_predictions)
return accuracy
|
[
"sklearn.neighbors.KNeighborsClassifier"
] |
[((857, 978), 'sklearn.neighbors.KNeighborsClassifier', 'KNeighborsClassifier', ([], {'n_neighbors': 'k', 'weights': 'distance_weighting', 'algorithm': '"""auto"""', 'metric': 'distance_metric', 'n_jobs': '(-1)'}), "(n_neighbors=k, weights=distance_weighting, algorithm=\n 'auto', metric=distance_metric, n_jobs=-1)\n", (877, 978), False, 'from sklearn.neighbors import KNeighborsClassifier\n')]
|
import pint
import pytest
@pytest.fixture(scope="session")
def ureg():
"""Application-wide units registry."""
registry = pint.get_application_registry()
# Used by .compat.ixmp, .compat.pyam
registry.define("USD = [USD]")
registry.define("case = [case]")
yield registry
|
[
"pint.get_application_registry",
"pytest.fixture"
] |
[((29, 60), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (43, 60), False, 'import pytest\n'), ((131, 162), 'pint.get_application_registry', 'pint.get_application_registry', ([], {}), '()\n', (160, 162), False, 'import pint\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import jsonlines
import torch
import random
import numpy as np
import _pickle as cPickle
class Flickr30kRetrievalDatabase(torch.utils.data.Dataset):
def __init__(self, imdb_path, dataset_type, test_id_file_path, hard_neg_file_path):
super().__init__()
self._dataset_type = dataset_type
self._load_annotations(imdb_path, test_id_file_path, hard_neg_file_path)
self._metadata = {}
@property
def metadata(self):
return self._metadata
@metadata.setter
def metadata(self, x):
self._metadata = x
def _load_annotations(self, imdb_path, test_id_path, hard_neg_file_path):
if self._dataset_type != "train":
self.imgs = []
with jsonlines.open(imdb_path) as reader:
# Build an index which maps image id with a list of caption annotations.
entries = []
imgid2entry = {}
count = 0
remove_ids = []
if test_id_path:
remove_ids = np.load(test_id_path)
remove_ids = [int(x) for x in remove_ids]
for annotation in reader:
image_id = int(annotation["img_path"].split(".")[0])
if self._dataset_type != "train":
self.imgs.append(image_id)
if self._dataset_type == "train" and int(image_id) in remove_ids:
continue
imgid2entry[image_id] = []
for sentences in annotation["sentences"]:
entries.append({"caption": sentences, "image_id": image_id})
imgid2entry[image_id].append(count)
count += 1
self._entries = entries
self.imgid2entry = imgid2entry
self.image_id_list = [*self.imgid2entry]
if self._dataset_type == "train":
with open(hard_neg_file_path, "rb") as f:
image_info = cPickle.load(f)
for key, value in image_info.items():
setattr(self, key, value)
self.train_imgId2pool = {
imageId: i for i, imageId in enumerate(self.train_image_list)
}
self.db_size = len(self._entries)
def __len__(self):
return self.db_size
def __getitem__(self, idx):
entry = self._entries[idx]
if self._dataset_type != "train":
return entry, self.imgs
image_id = entry["image_id"]
while True:
# sample a random image:
img_id2 = random.choice(self.image_id_list)
if img_id2 != image_id:
break
entry2 = self._entries[random.choice(self.imgid2entry[img_id2])]
# random image wrong
while True:
# sample a random image:
img_id3 = random.choice(self.image_id_list)
if img_id3 != image_id:
break
entry3 = self._entries[self.imgid2entry[img_id3][0]]
if self._dataset_type == "train":
# random hard caption.
rand_img_id_pool = self.train_hard_pool[self.train_imgId2pool[image_id]]
pool_img_idx = int(
rand_img_id_pool[np.random.randint(1, len(rand_img_id_pool))]
)
img_id4 = self.train_image_list[pool_img_idx]
else:
while True:
# sample a random image:
img_id4 = random.choice(self.image_id_list)
if img_id4 != image_id:
break
entry4 = self._entries[random.choice(self.imgid2entry[img_id4])]
return [entry, entry2, entry3, entry4]
|
[
"_pickle.load",
"numpy.load",
"jsonlines.open",
"random.choice"
] |
[((902, 927), 'jsonlines.open', 'jsonlines.open', (['imdb_path'], {}), '(imdb_path)\n', (916, 927), False, 'import jsonlines\n'), ((2703, 2736), 'random.choice', 'random.choice', (['self.image_id_list'], {}), '(self.image_id_list)\n', (2716, 2736), False, 'import random\n'), ((2827, 2867), 'random.choice', 'random.choice', (['self.imgid2entry[img_id2]'], {}), '(self.imgid2entry[img_id2])\n', (2840, 2867), False, 'import random\n'), ((2978, 3011), 'random.choice', 'random.choice', (['self.image_id_list'], {}), '(self.image_id_list)\n', (2991, 3011), False, 'import random\n'), ((3714, 3754), 'random.choice', 'random.choice', (['self.imgid2entry[img_id4]'], {}), '(self.imgid2entry[img_id4])\n', (3727, 3754), False, 'import random\n'), ((1189, 1210), 'numpy.load', 'np.load', (['test_id_path'], {}), '(test_id_path)\n', (1196, 1210), True, 'import numpy as np\n'), ((2104, 2119), '_pickle.load', 'cPickle.load', (['f'], {}), '(f)\n', (2116, 2119), True, 'import _pickle as cPickle\n'), ((3582, 3615), 'random.choice', 'random.choice', (['self.image_id_list'], {}), '(self.image_id_list)\n', (3595, 3615), False, 'import random\n')]
|
""" Script for generating a reversed dictionary """
import argparse
import numpy as np
import sys
def parse_arguments(args_to_parse):
description = "Load a *.npy archive of a dictionary and swap (reverse) the dictionary keys and values around"
parser = argparse.ArgumentParser(description=description)
general = parser.add_argument_group('General options')
general.add_argument(
'-i', '--input-file', type=str, required=True,
help="The file path to the word vector dictionary into *.npy format"
)
general.add_argument(
'-o', '--output-file', type=str, required=True,
help="The target file to save the reversed dictionary"
)
args = parser.parse_args(args_to_parse)
return args
def main(args):
wordvec = np.load(args.input_file).item()
reversed_wordvec = {str(v): k for k, v in wordvec.items()}
np.save(args.output_file, reversed_wordvec)
if __name__=='__main__':
args = parse_arguments(sys.argv[1:])
main(args)
|
[
"numpy.load",
"numpy.save",
"argparse.ArgumentParser"
] |
[((264, 312), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'description'}), '(description=description)\n', (287, 312), False, 'import argparse\n'), ((877, 920), 'numpy.save', 'np.save', (['args.output_file', 'reversed_wordvec'], {}), '(args.output_file, reversed_wordvec)\n', (884, 920), True, 'import numpy as np\n'), ((778, 802), 'numpy.load', 'np.load', (['args.input_file'], {}), '(args.input_file)\n', (785, 802), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
import rospy
from geometry_msgs.msg import Twist
from sensor_msgs.msg import Joy
toggle = False
def callback(data):
global toggle
twist = Twist()
twist.linear.x = 1.5*data.axes[1]
twist.linear.y = -1.5*data.axes[0]
twist.angular.z = 1.5*data.axes[3]
if(data.buttons[4] == 1):
toggle = True
pub.publish(twist)
elif(toggle == True):
twist.linear.x = 0
twist.linear.y = 0
twist.angular.z = 0
pub.publish(twist)
toggle = False
# Intializes everything
def start():
# publishing to "turtle1/cmd_vel" to control turtle1
global pub
pub = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
# subscribed to joystick inputs on topic "joy"
rospy.Subscriber("joy", Joy, callback)
# starts the node
rospy.init_node('Xbox360Joy')
rospy.spin()
if __name__ == '__main__':
start()
|
[
"rospy.Subscriber",
"rospy.Publisher",
"geometry_msgs.msg.Twist",
"rospy.init_node",
"rospy.spin"
] |
[((164, 171), 'geometry_msgs.msg.Twist', 'Twist', ([], {}), '()\n', (169, 171), False, 'from geometry_msgs.msg import Twist\n'), ((579, 628), 'rospy.Publisher', 'rospy.Publisher', (['"""/cmd_vel"""', 'Twist'], {'queue_size': '(10)'}), "('/cmd_vel', Twist, queue_size=10)\n", (594, 628), False, 'import rospy\n'), ((678, 716), 'rospy.Subscriber', 'rospy.Subscriber', (['"""joy"""', 'Joy', 'callback'], {}), "('joy', Joy, callback)\n", (694, 716), False, 'import rospy\n'), ((737, 766), 'rospy.init_node', 'rospy.init_node', (['"""Xbox360Joy"""'], {}), "('Xbox360Joy')\n", (752, 766), False, 'import rospy\n'), ((768, 780), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (778, 780), False, 'import rospy\n')]
|
import time
from load_gym import load_gym
import action_helpers as ah
import dl_model_1 as m1
def append_winnings(all_states, all_winnings, winnings):
while len(all_winnings) < len(all_states):
id = len(all_winnings)
player_id = all_states[id].player_to_act
all_winnings.append(winnings[player_id])
def play_hand(gym, model0, rnd_odds0, model1, rnd_odds1, what_if_play):
state = gym.startHand()
all_states = []
all_actions = []
all_winnings = []
hand_saldo = []
while state.status != "hand_finished":
if state.player_to_act == 0:
[action, action_ind], ammount = m1.calculate_action(model0, state, rnd_odds0)
else:
[action, action_ind], ammount = m1.calculate_action(model1, state, rnd_odds1)
is_fake_action = False
# In case of fold we can continue playing...
if (action == ah.ACTION_FOLD[0]) and what_if_play:
print("Player:", state.player_to_act, "wanted to fold - randomizing action ******")
winn = [0, 0]
winn[state.player_to_act] = 0
winn[state.other_player_ind] = state.pot_size
print("Winnings:", winn)
append_winnings(all_states, all_winnings, winn)
if len(hand_saldo) == 0:
hand_saldo = [0, 0]
hand_saldo[state.player_to_act] = state.stack_size - state.start_stack_size
hand_saldo[state.other_player_ind] = -hand_saldo[state.player_to_act]
print("Hand saldo at the moment of first fold:", hand_saldo)
# randomize new action and continue playing...
[action, action_ind], ammount = ah.randomize_action(state.pot_size, state.ammount_to_call, state.stack_size, never_fold=True)
is_fake_action = True
if state.player_to_act == 0:
all_states.append(state)
all_actions.append(action_ind)
print("Calculated action:", action, ammount)
state = gym.act(action, ammount, is_fake_action)
append_winnings(all_states, all_winnings, state.winnings)
print("All winings:", all_winnings)
if len(hand_saldo) == 0:
hand_saldo = [state.saldo[0], state.saldo[1]]
print("Taking state saldo ----")
print("Final hand saldo:", [state.saldo[0], state.saldo[1]])
print("Returned hand saldo:", hand_saldo)
return all_states, all_actions, all_winnings, hand_saldo
def play_manu_hands(gym, model0, rnd_odds0, model1, rnd_odds1, num_hands, what_if_play):
all_states = []
all_actions = []
all_winnings = []
total_saldo = [0, 0]
for i in range(num_hands):
print("")
print("Hand: ", i)
states, actions, winnings, saldo = play_hand(gym, model0, rnd_odds0, model1, rnd_odds1, what_if_play)
total_saldo[0] += saldo[0]
total_saldo[1] += saldo[1]
print("Avg saldo per hand:", round(total_saldo[0] / (i + 1), 2), ",", round(total_saldo[1] / (i + 1), 2))
for st in states:
all_states.append(st)
for act in actions:
all_actions.append(act)
for winn in winnings:
all_winnings.append(winn)
total_saldo[0] /= num_hands
total_saldo[1] /= num_hands
print("")
print("Bot 0 score: ", total_saldo[0], "per hand")
print("Bot 1 score: ", total_saldo[1], "per hand")
print("")
print("Colected ", len(all_states), " data pairs for training.")
return all_states, all_actions, all_winnings, total_saldo
def load_opp_models(model_paths, rnd_odds):
models = []
opp_names = []
for i in range(len(model_paths)):
opp_model = m1.create_model_1()
opp_model.load_weights(model_paths[i])
models.append(opp_model)
if rnd_odds[i] == 100:
opp_names.append("random")
else:
opp_names.append(model_paths[i])
return models, rnd_odds, opp_names
gym = load_gym()
f = open("log.txt", "w")
training_model = m1.create_model_1()
training_model.load_weights("weights0012.h5")
training_model_rnd_odds = 5
#opp_models, rnd_odds, opp_name = load_opp_models(["model_1_lvl_00.h5", "model_1_lvl_00.h5", "model_1_lvl_01.h5", "model_1_lvl_02.h5"], [100, 0, 0, 0])
opp_models, rnd_odds, opp_name = load_opp_models(["model_1_lvl_01.h5"], [0])
num_iters = 50000
num_hands = 4000
what_if_play = True
do_training = True
training_epochs = 30
# Leveling params
saldo_limit_for_next_lvl = 200
next_level = 4
max_opp_models = 20
for i in range(num_iters):
print("\nIteration:", i, "\n", file=f)
f.flush()
states = []
actions = []
winnings = []
#saldos = []
go_to_next_level = True
# Play against opp models
for j in range(len(opp_models)):
print("Playing vs", opp_name[j], file=f)
f.flush()
start_time = time.time()
st, act, winn, saldo = play_manu_hands(gym, training_model, training_model_rnd_odds, opp_models[j], rnd_odds[j], num_hands=num_hands, what_if_play=what_if_play)
elapsed_time = time.time() - start_time
states.append(st)
actions.append(act)
winnings.append(winn)
#saldos.append(saldo)
if saldo[0] < saldo_limit_for_next_lvl:
go_to_next_level = False
print("Played", num_hands, "hands in", round(elapsed_time), "seconds", round(1000 * elapsed_time / num_hands), "ms per hand", file=f)
print("Saldo vs", opp_name[j], saldo, "\n", file=f)
f.flush()
if do_training and go_to_next_level:
file_name = "model_1_lvl_" + str(next_level).zfill(2) + ".h5"
print("Went to next level:", file_name, "\n", file=f)
f.flush()
training_model.save_weights(file_name)
next_level += 1
# Push training model to opponent models
opp_models.append(training_model)
rnd_odds.append(0)
opp_name.append(file_name)
if len(opp_models) > max_opp_models:
opp_models.pop(0)
rnd_odds.pop(0)
opp_name.pop(0)
# Make new training model. Continue where last one left off
training_model = m1.create_model_1()
training_model.load_weights(file_name)
if do_training:
print("Now training\n", file=f)
f.flush()
for j in range(len(states)):
real_epochs = training_epochs
#if (saldos[j][0] < 0):
# real_epochs *= 2
start_time = time.time()
m1.train_model(training_model, states[j], actions[j], winnings[j], batch_size=128, validation_split=0.1, epochs=real_epochs)
elapsed_time = time.time() - start_time
print("Trained", real_epochs, "epochs in", round(elapsed_time), "seconds", round(elapsed_time / real_epochs, 2), "seconds per epoch", file=f)
f.flush()
file_name = "weights" + str(i).zfill(4) + ".h5"
training_model.save_weights(file_name)
print("\nSaved weights:", file_name, file=f)
f.flush()
f.close()
|
[
"dl_model_1.create_model_1",
"load_gym.load_gym",
"time.time",
"dl_model_1.train_model",
"action_helpers.randomize_action",
"dl_model_1.calculate_action"
] |
[((3952, 3962), 'load_gym.load_gym', 'load_gym', ([], {}), '()\n', (3960, 3962), False, 'from load_gym import load_gym\n'), ((4006, 4025), 'dl_model_1.create_model_1', 'm1.create_model_1', ([], {}), '()\n', (4023, 4025), True, 'import dl_model_1 as m1\n'), ((3673, 3692), 'dl_model_1.create_model_1', 'm1.create_model_1', ([], {}), '()\n', (3690, 3692), True, 'import dl_model_1 as m1\n'), ((4852, 4863), 'time.time', 'time.time', ([], {}), '()\n', (4861, 4863), False, 'import time\n'), ((6147, 6166), 'dl_model_1.create_model_1', 'm1.create_model_1', ([], {}), '()\n', (6164, 6166), True, 'import dl_model_1 as m1\n'), ((642, 687), 'dl_model_1.calculate_action', 'm1.calculate_action', (['model0', 'state', 'rnd_odds0'], {}), '(model0, state, rnd_odds0)\n', (661, 687), True, 'import dl_model_1 as m1\n'), ((746, 791), 'dl_model_1.calculate_action', 'm1.calculate_action', (['model1', 'state', 'rnd_odds1'], {}), '(model1, state, rnd_odds1)\n', (765, 791), True, 'import dl_model_1 as m1\n'), ((1689, 1786), 'action_helpers.randomize_action', 'ah.randomize_action', (['state.pot_size', 'state.ammount_to_call', 'state.stack_size'], {'never_fold': '(True)'}), '(state.pot_size, state.ammount_to_call, state.stack_size,\n never_fold=True)\n', (1708, 1786), True, 'import action_helpers as ah\n'), ((5056, 5067), 'time.time', 'time.time', ([], {}), '()\n', (5065, 5067), False, 'import time\n'), ((6470, 6481), 'time.time', 'time.time', ([], {}), '()\n', (6479, 6481), False, 'import time\n'), ((6494, 6622), 'dl_model_1.train_model', 'm1.train_model', (['training_model', 'states[j]', 'actions[j]', 'winnings[j]'], {'batch_size': '(128)', 'validation_split': '(0.1)', 'epochs': 'real_epochs'}), '(training_model, states[j], actions[j], winnings[j],\n batch_size=128, validation_split=0.1, epochs=real_epochs)\n', (6508, 6622), True, 'import dl_model_1 as m1\n'), ((6646, 6657), 'time.time', 'time.time', ([], {}), '()\n', (6655, 6657), False, 'import time\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-10-27 18:49
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('project', '0003_voting'),
]
operations = [
migrations.RemoveField(
model_name='foto',
name='category',
),
migrations.RemoveField(
model_name='foto',
name='designer',
),
migrations.RemoveField(
model_name='foto',
name='tags',
),
migrations.AddField(
model_name='foto',
name='profiles',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='project.Profile'),
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.models.ForeignKey"
] |
[((319, 377), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""foto"""', 'name': '"""category"""'}), "(model_name='foto', name='category')\n", (341, 377), False, 'from django.db import migrations, models\n'), ((422, 480), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""foto"""', 'name': '"""designer"""'}), "(model_name='foto', name='designer')\n", (444, 480), False, 'from django.db import migrations, models\n'), ((525, 579), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""foto"""', 'name': '"""tags"""'}), "(model_name='foto', name='tags')\n", (547, 579), False, 'from django.db import migrations, models\n'), ((723, 822), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""project.Profile"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='project.Profile')\n", (740, 822), False, 'from django.db import migrations, models\n')]
|
from pathlib import Path
import os
from pysam import VariantFile
import pytest
import yaml
from vembrane import errors
from vembrane import __version__, filter_vcf
CASES = Path(__file__).parent.joinpath("testcases")
def test_version():
assert __version__ == "0.1.0"
@pytest.mark.parametrize(
"testcase", [d for d in os.listdir(CASES) if not d.startswith(".")]
)
def test_filter(testcase):
path = CASES.joinpath(testcase)
with open(path.joinpath("config.yaml")) as config_fp:
config = yaml.load(config_fp, Loader=yaml.FullLoader)
vcf = VariantFile(path.joinpath("test.vcf"))
if "raises" in config:
exception = getattr(errors, config["raises"])
from vembrane import check_filter_expression
with pytest.raises(exception):
# FIXME we have to explicitly check the filter expression here
# until we change from calling filter_vcf
# to actually invoking vembrane.main
check_filter_expression(config.get("filter_expression"))
list(
filter_vcf(
vcf,
config.get("filter_expression"),
config.get("ann_key", "ANN"),
config.get("keep_unmatched", False),
)
)
else:
expected = list(VariantFile(path.joinpath("expected.vcf")))
result = list(
filter_vcf(
vcf,
config.get("filter_expression"),
config.get("ann_key", "ANN"),
config.get("keep_unmatched", False),
)
)
assert result == expected
|
[
"pytest.raises",
"yaml.load",
"os.listdir",
"pathlib.Path"
] |
[((515, 559), 'yaml.load', 'yaml.load', (['config_fp'], {'Loader': 'yaml.FullLoader'}), '(config_fp, Loader=yaml.FullLoader)\n', (524, 559), False, 'import yaml\n'), ((174, 188), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (178, 188), False, 'from pathlib import Path\n'), ((759, 783), 'pytest.raises', 'pytest.raises', (['exception'], {}), '(exception)\n', (772, 783), False, 'import pytest\n'), ((330, 347), 'os.listdir', 'os.listdir', (['CASES'], {}), '(CASES)\n', (340, 347), False, 'import os\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import with_statement
from __future__ import unicode_literals
import pytest
import sys
from sets import Set
from dnscherry.auth.modLdap import Auth, CaFileDontExist
import cherrypy
import logging
import ldap
cfg = {
'auth.ldap.module': 'dnscherry.backend.ldap',
'auth.ldap.groupdn': 'ou=groups,dc=example,dc=org',
'auth.ldap.userdn': 'ou=People,dc=example,dc=org',
'auth.ldap.binddn': 'cn=dnscherry,dc=example,dc=org',
'auth.ldap.bindpassword': 'password',
'auth.ldap.uri': 'ldap://ldap.dnscherry.org:389',
'auth.ldap.ca': './tests/test_env/etc/dnscherry/TEST-cacert.pem',
'auth.ldap.starttls': 'off',
'auth.ldap.checkcert': 'off',
'auth.ldap.user.filter.tmpl': '(uid=%(login)s)',
'auth.ldap.group.filter.tmpl': '(member=%(userdn)s)',
'auth.ldap.dn_user_attr': 'uid',
'auth.ldap.group_attr.member': "%(dn)s",
'auth.ldap.timeout': 10,
}
def syslog_error(msg='', context='',
severity=logging.INFO, traceback=False):
pass
cherrypy.log.error = syslog_error
attr = ['shéll', 'shell', 'cn', 'uid', 'uidNumber', 'gidNumber', 'home', 'userPassword', 'givenName', 'email', 'sn']
class TestError(object):
def testNominal(self):
inv = Auth(cfg, cherrypy.log)
return True
def testConnectSSLNoCheck(self):
cfg2 = cfg.copy()
cfg2['uri'] = 'ldaps://ldap.dnscherry.org:636'
cfg2['checkcert'] = 'off'
inv = Auth(cfg2, cherrypy.log)
ldap = inv._connect()
ldap.simple_bind_s(inv.binddn, inv.bindpassword)
def testConnect(self):
inv = Auth(cfg, cherrypy.log)
ldap = inv._connect()
ldap.simple_bind_s(inv.binddn, inv.bindpassword)
return True
def testConnectSSL(self):
cfg2 = cfg.copy()
cfg2['uri'] = 'ldaps://ldap.dnscherry.org:636'
cfg2['checkcert'] = 'on'
inv = Auth(cfg2, cherrypy.log)
ldap = inv._connect()
ldap.simple_bind_s(inv.binddn, inv.bindpassword)
def testLdapUnavaible(self):
cfg2 = cfg.copy()
cfg2['uri'] = 'ldaps://notaldap:636'
cfg2['checkcert'] = 'on'
inv = Auth(cfg2, cherrypy.log)
try:
ldapc = inv._connect()
ldapc.simple_bind_s(inv.binddn, inv.bindpassword)
except ldap.SERVER_DOWN as e:
return
def testMissingCA(self):
cfg2 = cfg.copy()
cfg2['uri'] = 'ldaps://ldap.dnscherry.org:636'
cfg2['checkcert'] = 'on'
cfg2['ca'] = './test/cfg/not_a_ca.crt'
try:
inv = Auth(cfg2, cherrypy.log)
ldapc = inv._connect()
except CaFileDontExist as e:
return
def testConnectSSLWrongCA(self):
cfg2 = cfg.copy()
cfg2['uri'] = 'ldaps://ldap.dnscherry.org:636'
cfg2['checkcert'] = 'on'
inv = Auth(cfg2, cherrypy.log)
ldapc = inv._connect()
try:
ldapc.simple_bind_s(inv.binddn, inv.bindpassword)
except ldap.SERVER_DOWN as e:
assert e[0]['info'] == 'TLS: hostname does not match CN in peer certificate'
def testConnectStartTLS(self):
cfg2 = cfg.copy()
cfg2['uri'] = 'ldap://ldap.dnscherry.org:390'
cfg2['checkcert'] = 'off'
cfg2['starttls'] = 'on'
cfg2['ca'] = './test/cfg/ca.crt'
inv = Auth(cfg2, cherrypy.log)
ldapc = inv._connect()
ldapc.simple_bind_s(inv.binddn, inv.bindpassword)
def testAuthSuccess(self):
inv = Auth(cfg, cherrypy.log)
ret = inv.check_credentials('jwatson', '<PASSWORD>')
assert ret == True
def testAuthFailure(self):
inv = Auth(cfg, cherrypy.log)
res = inv.check_credentials('notauser', 'password') or inv.check_credentials('jwatson', '<PASSWORD>')
assert res == False
def testMissingParam(self):
cfg2 = {}
return True
try:
inv = Auth(cfg2, cherrypy.log)
except MissingKey:
return
|
[
"dnscherry.auth.modLdap.Auth",
"ldap.simple_bind_s"
] |
[((1219, 1242), 'dnscherry.auth.modLdap.Auth', 'Auth', (['cfg', 'cherrypy.log'], {}), '(cfg, cherrypy.log)\n', (1223, 1242), False, 'from dnscherry.auth.modLdap import Auth, CaFileDontExist\n'), ((1430, 1454), 'dnscherry.auth.modLdap.Auth', 'Auth', (['cfg2', 'cherrypy.log'], {}), '(cfg2, cherrypy.log)\n', (1434, 1454), False, 'from dnscherry.auth.modLdap import Auth, CaFileDontExist\n'), ((1493, 1541), 'ldap.simple_bind_s', 'ldap.simple_bind_s', (['inv.binddn', 'inv.bindpassword'], {}), '(inv.binddn, inv.bindpassword)\n', (1511, 1541), False, 'import ldap\n'), ((1584, 1607), 'dnscherry.auth.modLdap.Auth', 'Auth', (['cfg', 'cherrypy.log'], {}), '(cfg, cherrypy.log)\n', (1588, 1607), False, 'from dnscherry.auth.modLdap import Auth, CaFileDontExist\n'), ((1646, 1694), 'ldap.simple_bind_s', 'ldap.simple_bind_s', (['inv.binddn', 'inv.bindpassword'], {}), '(inv.binddn, inv.bindpassword)\n', (1664, 1694), False, 'import ldap\n'), ((1874, 1898), 'dnscherry.auth.modLdap.Auth', 'Auth', (['cfg2', 'cherrypy.log'], {}), '(cfg2, cherrypy.log)\n', (1878, 1898), False, 'from dnscherry.auth.modLdap import Auth, CaFileDontExist\n'), ((1937, 1985), 'ldap.simple_bind_s', 'ldap.simple_bind_s', (['inv.binddn', 'inv.bindpassword'], {}), '(inv.binddn, inv.bindpassword)\n', (1955, 1985), False, 'import ldap\n'), ((2138, 2162), 'dnscherry.auth.modLdap.Auth', 'Auth', (['cfg2', 'cherrypy.log'], {}), '(cfg2, cherrypy.log)\n', (2142, 2162), False, 'from dnscherry.auth.modLdap import Auth, CaFileDontExist\n'), ((2834, 2858), 'dnscherry.auth.modLdap.Auth', 'Auth', (['cfg2', 'cherrypy.log'], {}), '(cfg2, cherrypy.log)\n', (2838, 2858), False, 'from dnscherry.auth.modLdap import Auth, CaFileDontExist\n'), ((3329, 3353), 'dnscherry.auth.modLdap.Auth', 'Auth', (['cfg2', 'cherrypy.log'], {}), '(cfg2, cherrypy.log)\n', (3333, 3353), False, 'from dnscherry.auth.modLdap import Auth, CaFileDontExist\n'), ((3489, 3512), 'dnscherry.auth.modLdap.Auth', 'Auth', (['cfg', 'cherrypy.log'], {}), '(cfg, cherrypy.log)\n', (3493, 3512), False, 'from dnscherry.auth.modLdap import Auth, CaFileDontExist\n'), ((3647, 3670), 'dnscherry.auth.modLdap.Auth', 'Auth', (['cfg', 'cherrypy.log'], {}), '(cfg, cherrypy.log)\n', (3651, 3670), False, 'from dnscherry.auth.modLdap import Auth, CaFileDontExist\n'), ((2552, 2576), 'dnscherry.auth.modLdap.Auth', 'Auth', (['cfg2', 'cherrypy.log'], {}), '(cfg2, cherrypy.log)\n', (2556, 2576), False, 'from dnscherry.auth.modLdap import Auth, CaFileDontExist\n'), ((3911, 3935), 'dnscherry.auth.modLdap.Auth', 'Auth', (['cfg2', 'cherrypy.log'], {}), '(cfg2, cherrypy.log)\n', (3915, 3935), False, 'from dnscherry.auth.modLdap import Auth, CaFileDontExist\n')]
|
from typing import Any
from gevent.monkey import patch_thread # type: ignore
from doge.common.doge import Executer, Request, Response
from doge.common.utils import import_string
patch_thread()
class BaseFilter(Executer):
def __init__(self, context: Any, _next: Executer):
self.next = _next
def execute(self, req: Request) -> Response:
return self.next.execute(req)
class FilterChain:
def __init__(self, context: Any):
self.context = context
def then(self, executer: Executer) -> Executer:
filters = self.context.url.get_param("filters", [])
for cls in reversed([import_string(f) for f in filters]):
executer = cls(self.context, executer)
return executer
|
[
"doge.common.utils.import_string",
"gevent.monkey.patch_thread"
] |
[((182, 196), 'gevent.monkey.patch_thread', 'patch_thread', ([], {}), '()\n', (194, 196), False, 'from gevent.monkey import patch_thread\n'), ((628, 644), 'doge.common.utils.import_string', 'import_string', (['f'], {}), '(f)\n', (641, 644), False, 'from doge.common.utils import import_string\n')]
|
#!/usr/bin/env python3
############################################################################################
# #
# Program purpose: Adds more number of elements to a deque object from an iterable #
# object. #
# Program Author : <NAME> <<EMAIL>> #
# Creation Date : December 27, 2019 #
# #
############################################################################################
from random import randint
from collections import deque
def create_random_deque(low: int, high: int, size: int) -> deque:
if size < 0:
raise ValueError(f'Invalid size ({size}) for new deque')
return deque([randint(low, high) for _ in range(size)])
def add_nums_to_deque(source_deque: deque, max_ext: int) -> None:
if max_ext < 0:
raise ValueError(f'Invalid max size ({max_ext}) for deque')
return source_deque.extend([randint(0, max_ext) for _ in range(max_ext)])
if __name__ == "__main__":
new_deque = create_random_deque(low=0, high=20, size=5)
print(f'New deque: {new_deque}')
# Extend deque with 5 random data.
add_nums_to_deque(source_deque=new_deque, max_ext=5)
print(f'Extended deque: {new_deque}')
|
[
"random.randint"
] |
[((973, 991), 'random.randint', 'randint', (['low', 'high'], {}), '(low, high)\n', (980, 991), False, 'from random import randint\n'), ((1202, 1221), 'random.randint', 'randint', (['(0)', 'max_ext'], {}), '(0, max_ext)\n', (1209, 1221), False, 'from random import randint\n')]
|
from django.db import models
from django.utils.translation import gettext_lazy as _
from django.urls import reverse
from parts.core.managers import AbstractUpdateViewManager
from parts.core.models import TimeStampModel
class PartsNumber(AbstractUpdateViewManager, TimeStampModel):
SOURCE_CODE = (
("01", "Nissan Japan-01"),
("02", "Nissan Taiwan-02"),
("05", "Nissan Thailand-05"),
("08", "Nissan Indonesia-08"),
)
PARTNUMBER_STATUS = (
("Active", "Active"),
("Depcreated", "Depcreated"),
("Obsolete", "Obsolete"),
("Deactivated", "Deactivated"),
)
partnumber = models.CharField(
max_length=200,
verbose_name=_("Parts Number")
)
source_code = models.CharField(
max_length=200,
verbose_name=_("Source Code"),
choices=SOURCE_CODE
)
bar_code = models.CharField(
max_length=200,
verbose_name=_("Barcode No.")
)
selling_price = models.IntegerField(
verbose_name=_("Selling Price")
)
status = models.CharField(
max_length=200,
verbose_name=_("Status"),
choices=PARTNUMBER_STATUS
)
unit_measure = models.ForeignKey(
"UnitMeasure",
verbose_name=_("Stock/UM"),
on_delete=models.CASCADE
)
class Meta:
db_table = _("partnumbers")
verbose_name = _("Part Number")
verbose_name_plural = _("Parts Number")
ordering = ["id"]
def __str__(self):
return self.partnumber
def get_absolute_url(self):
return reverse('parts_number_read_view', args=[str(self.id)])
# !Find way to handle this feat in template
@property
def add_leading_zero(self):
return str(self.selling_price) + ".00"
class UnitMeasure(AbstractUpdateViewManager, TimeStampModel):
um = models.CharField(
max_length=20,
verbose_name=_("Unit of Measure")
)
class Meta:
db_table = _("um")
verbose_name = _("Unit of Measure")
verbose_name_plural = _("Unit of Measures")
ordering = ["id"]
def __str__(self):
return self.um
class PartNumberClass(AbstractUpdateViewManager, TimeStampModel):
class_name = models.CharField(
max_length=20,
verbose_name=_("Class name")
)
charge_type = models.CharField(
max_length=20,
verbose_name=_("Charge Type")
)
class Meta:
db_table = _("partnumber_class")
verbose_name = _("Part Number Class")
verbose_name_plural = _("Part Number Classes")
ordering = ["id"]
def __str__(self):
return self.class_name.upper()
def get_absolute_url(self):
return reverse('item_class_read', args=[str(self.id)])
|
[
"django.utils.translation.gettext_lazy"
] |
[((1363, 1379), 'django.utils.translation.gettext_lazy', '_', (['"""partnumbers"""'], {}), "('partnumbers')\n", (1364, 1379), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1403, 1419), 'django.utils.translation.gettext_lazy', '_', (['"""Part Number"""'], {}), "('Part Number')\n", (1404, 1419), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1450, 1467), 'django.utils.translation.gettext_lazy', '_', (['"""Parts Number"""'], {}), "('Parts Number')\n", (1451, 1467), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1993, 2000), 'django.utils.translation.gettext_lazy', '_', (['"""um"""'], {}), "('um')\n", (1994, 2000), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2024, 2044), 'django.utils.translation.gettext_lazy', '_', (['"""Unit of Measure"""'], {}), "('Unit of Measure')\n", (2025, 2044), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2075, 2096), 'django.utils.translation.gettext_lazy', '_', (['"""Unit of Measures"""'], {}), "('Unit of Measures')\n", (2076, 2096), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2480, 2501), 'django.utils.translation.gettext_lazy', '_', (['"""partnumber_class"""'], {}), "('partnumber_class')\n", (2481, 2501), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2525, 2547), 'django.utils.translation.gettext_lazy', '_', (['"""Part Number Class"""'], {}), "('Part Number Class')\n", (2526, 2547), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2578, 2602), 'django.utils.translation.gettext_lazy', '_', (['"""Part Number Classes"""'], {}), "('Part Number Classes')\n", (2579, 2602), True, 'from django.utils.translation import gettext_lazy as _\n'), ((715, 732), 'django.utils.translation.gettext_lazy', '_', (['"""Parts Number"""'], {}), "('Parts Number')\n", (716, 732), True, 'from django.utils.translation import gettext_lazy as _\n'), ((820, 836), 'django.utils.translation.gettext_lazy', '_', (['"""Source Code"""'], {}), "('Source Code')\n", (821, 836), True, 'from django.utils.translation import gettext_lazy as _\n'), ((950, 966), 'django.utils.translation.gettext_lazy', '_', (['"""Barcode No."""'], {}), "('Barcode No.')\n", (951, 966), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1036, 1054), 'django.utils.translation.gettext_lazy', '_', (['"""Selling Price"""'], {}), "('Selling Price')\n", (1037, 1054), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1138, 1149), 'django.utils.translation.gettext_lazy', '_', (['"""Status"""'], {}), "('Status')\n", (1139, 1149), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1273, 1286), 'django.utils.translation.gettext_lazy', '_', (['"""Stock/UM"""'], {}), "('Stock/UM')\n", (1274, 1286), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1930, 1950), 'django.utils.translation.gettext_lazy', '_', (['"""Unit of Measure"""'], {}), "('Unit of Measure')\n", (1931, 1950), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2318, 2333), 'django.utils.translation.gettext_lazy', '_', (['"""Class name"""'], {}), "('Class name')\n", (2319, 2333), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2421, 2437), 'django.utils.translation.gettext_lazy', '_', (['"""Charge Type"""'], {}), "('Charge Type')\n", (2422, 2437), True, 'from django.utils.translation import gettext_lazy as _\n')]
|
# -*- encoding: utf-8 -*- #
__author__ = 'FeiZhang <EMAIL>'
__date__ = '2019-07-20'
from mysqlconn import MyConn
from settings import DB_CONFIG
from gendocx import gen_doc, doc_append_table
def main():
"""
entry point
:return:
"""
try:
my_conn = MyConn(DB_CONFIG)
conn = my_conn.conn
with conn.cursor() as cursor:
cursor.execute("SHOW TABLES")
tb_list = cursor.fetchall()
doc = gen_doc('数据库表结构说明', 'FEIZHANG')
for tb in tb_list:
print(tb)
tb_name = tb[0]
cursor.execute("SHOW FULL FIELDS FROM {}".format(tb_name))
# Field | Type | Collation | Null | Key | Default | Extra | Privileges | Comment
tb_rs = cursor.fetchall()
# get table comment info
cursor.execute("SELECT table_comment FROM INFORMATION_SCHEMA.TABLES WHERE table_schema='{}'\
AND table_name='{}'".format(DB_CONFIG['db'], tb_name))
tb_comment = cursor.fetchone()[0]
# print("列名", "数据类型", "Null", "Key", "Default", "栏位说明")
# for r in tb_rs:
# print(r[0], r[1], r[3], r[4], r[5], r[8])
doc_append_table(doc, tb_rs, tb_name, tb_comment)
output_file_name = "outputdoc/{}.docx".format(DB_CONFIG['db'])
with open(output_file_name, "w") as file:
doc.save(output_file_name)
finally:
conn.close()
if __name__ == '__main__':
main()
|
[
"gendocx.doc_append_table",
"mysqlconn.MyConn",
"gendocx.gen_doc"
] |
[((277, 294), 'mysqlconn.MyConn', 'MyConn', (['DB_CONFIG'], {}), '(DB_CONFIG)\n', (283, 294), False, 'from mysqlconn import MyConn\n'), ((461, 492), 'gendocx.gen_doc', 'gen_doc', (['"""数据库表结构说明"""', '"""FEIZHANG"""'], {}), "('数据库表结构说明', 'FEIZHANG')\n", (468, 492), False, 'from gendocx import gen_doc, doc_append_table\n'), ((1254, 1303), 'gendocx.doc_append_table', 'doc_append_table', (['doc', 'tb_rs', 'tb_name', 'tb_comment'], {}), '(doc, tb_rs, tb_name, tb_comment)\n', (1270, 1303), False, 'from gendocx import gen_doc, doc_append_table\n')]
|
#!/usr/bin/python3
#-*- coding: utf8 -*-
# @author : <NAME>
"""
Génère une page HTML.
"""
pass
# On fait les imports nécessaires selon le contexte
# Pour pouvoir créer un répertoire, ici pour y mettre les fichiers HTML
import os
# On fait les imports nécessaires selon le contexte
# Pour générer les fichiers HTML
if __name__ == "__main__":
from HTML_constantes import *
else:
from pyPack.HTML_constantes import *
############################################################################################################
# Générer le fichier pagesWeb/index.HTML
############################################################################################################
def main():
"""
Fonction principale qui sera appelée pour générer l'ensemble des pages HTML.
"""
pass
# On remonte d'un niveau
#os.chdir("../")
# On crée le dossier qui va accueillir les fichiers HTML si il n'existe pas
if not os.path.exists("./pagesWeb/"):
os.mkdir("./pagesWeb/")
# On ouvre en écriture le fichier html qui va recevoir le code
indexHTML = open("./pagesWeb/index.html", "w")
# On ajoute le doctype et le head
for elt in docTypeHeadStyle:
indexHTML.writelines(elt)
# On ouvre le body
indexHTML.writelines(["<body>\n"])
# On ajoute les éléments de la barre de navigation
for elt in barreDeNavigation:
indexHTML.writelines(elt)
# On ajoute une partie spécifique
indexHTML.writelines([
"""
<h2>ACCUEIL</h2>\n
<p> Le projet consiste à récupérer tous les exercices des sujets DNB en partage sur le site de l'APMEP<br>
Pour le moment le test se fait sur le premier exo du sujet de polynésie 2020<br><br>
Pour générer la documentation il faut installer le paquet python <a href="https://pdoc3.github.io/pdoc/" target="_blank"> pdoc3</a>
</p>
<h3>Auteur</h3>
<p><NAME></p>
<h3> Installation et utilisation </h3>
<p>La procédure a été testé sous <b>Linux</b> uniquement.
<ul>
<li>Télécharger cette <a href="https://github.com/slozano54/projetDNB/archive/master.zip"> archive zip</a></li>
<li>Décompresser l'archive</li>
<li>Déposer un sujet au format *.tex dans le dossier <b>sujets_corrections_tex</b></li>
<li>Lancer le script python <b>programmePrincipal.py</b> à la racine du projet.</li>
<li>Sous <b>Visual Studio Code</b> lancer Live server et aller dans le dossier <b>PagesWeb</b> et lancer index.html</li>
</ul>
</p>
<h3> Notes </h3>
<p>
Les fichiers de la documentations sont générés dans le dossier <b>docs/pyPack</b><br><br>
Les fichiers HTML sont générés dans le dossier <b>pagesWeb</b><br><br>
<a class="navButton" href="../exercices_corrections_pdf/" target="_blank"><span>voir les fichiers pdf</span></a>
<a class="navButton" href="../exercices_corrections_pdf_crop/" target="_blank"><span>voir les fichiers pdf ajustés</span></a>
<a class="navButton" href="../exercices_corrections_png/" target="_blank"><span>voir les fichiers png ajustés</span></a>
<br>
<a class="navButton" href="https://www.overleaf.com/docs?snip_uri=https://mathslozano.fr/mathaleaProjetDNB/tex_a_compiler/dnb_2013_04_pondichery_1.tex&engine=latex_dvipdf" target="_blank"><span>compiler un fichier tex sur overleaf</span></a>
<a class="navButton" href="../tex_a_compiler/dnb_2013_04_pondichery_1.tex" target="_blank"><span>télécharger le fichier source tex </span></a>
</p>
<h3> License <a href="https://choosealicense.com/licenses/mit/" target="_blank">MIT</a><h3>
"""
])
# On ferme le body
indexHTML.writelines([
"""
</body>\n
</html>\n
"""
])
#On ferme le fichier
indexHTML.close()
if __name__ == "__main__":
main()
|
[
"os.mkdir",
"os.path.exists"
] |
[((964, 993), 'os.path.exists', 'os.path.exists', (['"""./pagesWeb/"""'], {}), "('./pagesWeb/')\n", (978, 993), False, 'import os\n'), ((1003, 1026), 'os.mkdir', 'os.mkdir', (['"""./pagesWeb/"""'], {}), "('./pagesWeb/')\n", (1011, 1026), False, 'import os\n')]
|
from django.shortcuts import render, redirect, get_list_or_404, get_object_or_404
from database.models import user, restaurant, address
from helper import parse_req_body, userTypeChecker
import django.views
# Create your views here.
def home(request):
my_user = None
# makes sure user is deliverer
try:
my_user = request.user
userIs = userTypeChecker(my_user)
if userIs(user.Deliverer) != True:
response = redirect('home-nexus')
return response
except Exception as e:
print(e)
response = redirect('home-nexus')
return response
except:
response = redirect('home-nexus')
return response
my_deliverer = user.Deliverer.objects.get(user=my_user)
registered = len(user.Deliverer.objects.filter(user=my_user).exclude(restaurant__isnull=True)) > 0 and my_deliverer.status == 'H'
if registered != True: # if not registered
return redirect('deliverer-register')
if request.method == "POST": # If bidded
body = parse_req_body(request.body)
amount = body['amount']
order_id = body['orderId']
order = restaurant.Order.objects.get(id=order_id)
new_bid = restaurant.DeliveryBid(deliverer=my_deliverer, win=False, price=amount, order=order)
new_bid.save()
unchosen_orders = restaurant.Order.objects.filter(chose_bid=False).filter(restaurant = my_deliverer.restaurant)
pending_bids = restaurant.DeliveryBid.objects.filter(deliverer=my_deliverer).filter(win=False)
won_bids = restaurant.DeliveryBid.objects.filter(deliverer=my_deliverer).filter(win=True)
open_orders = []
for order in unchosen_orders:
if len(pending_bids.filter(order=order)) == 0:
open_orders.append(order)
print(open_orders)
print(pending_bids)
print(won_bids)
context = {
'warnings': my_deliverer.warnings,
'openOrders': open_orders,
'pendingBids': pending_bids,
'winningBids': won_bids
}
return render(request, 'deliverer/home.html', context=context)
def register(request):
my_user = None
try:
my_user = request.user
isType = userTypeChecker(my_user)
if isType(user.Deliverer) != True:
response = redirect('home-nexus')
return response
except:
response = redirect('home-nexus')
return response
my_deliverer = user.Deliverer.objects.get(user=my_user)
registered = len(user.Deliverer.objects.filter(user=my_user).exclude(restaurant__isnull=True)) > 0 and my_deliverer.status == 'H'
if registered:
return redirect('deliverer-home')
registering = my_deliverer.restaurant == None and my_deliverer.status != 'H'
restaurants = restaurant.Restaurant.objects.all()
context={'restaurants': restaurants, 'registering': registering}
if request.method == "POST":
body = parse_req_body(request.body)
resturant_id = int(body['id'])
reg_resturant = restaurant.Restaurant.objects.get(id=resturant_id)
my_deliverer.restaurant = reg_resturant
my_deliverer.save()
context['registering'] = False
return render(request, 'deliverer/register.html', context=context)
def order(request, pk):
my_user = request.user
order = get_object_or_404(restaurant.Order, pk=pk)
customer = order.customer
customer_address = address.CustomerAddress.objects.get(customer=customer)
my_resturant = user.Deliverer.objects.get(user=my_user).restaurant
restaurant_address = address.RestaurantAddress.objects.get(restaurant=my_resturant)
if(request.method == "POST"):
body = parse_req_body(request.body)
rating = int(body['rating'])
if 0 <= rating or rating <= 5:
order.status = 'D'
order.customer_rating = rating
try:
customer_status = restaurant.CustomerStatus.objects.get(customer=customer, restaurant=my_resturant)
except:
customer_status = restaurant.CustomerStatus(customer=customer, restaurant=my_resturant, status='N')
customer_status.save()
customer_status.update_status(rating)
order.save()
rating = order.delivery_rating
return render(request, 'deliverer/order.html', context={
'order': order,
'customerAddress': customer_address,
'restaurantAddress': restaurant_address
})
|
[
"helper.parse_req_body",
"database.models.restaurant.Order.objects.filter",
"database.models.restaurant.DeliveryBid.objects.filter",
"database.models.user.Deliverer.objects.get",
"helper.userTypeChecker",
"django.shortcuts.redirect",
"database.models.restaurant.Restaurant.objects.all",
"database.models.address.RestaurantAddress.objects.get",
"database.models.restaurant.DeliveryBid",
"django.shortcuts.get_object_or_404",
"database.models.address.CustomerAddress.objects.get",
"database.models.restaurant.CustomerStatus.objects.get",
"database.models.user.Deliverer.objects.filter",
"django.shortcuts.render",
"database.models.restaurant.Order.objects.get",
"database.models.restaurant.Restaurant.objects.get",
"database.models.restaurant.CustomerStatus"
] |
[((715, 755), 'database.models.user.Deliverer.objects.get', 'user.Deliverer.objects.get', ([], {'user': 'my_user'}), '(user=my_user)\n', (741, 755), False, 'from database.models import user, restaurant, address\n'), ((2034, 2089), 'django.shortcuts.render', 'render', (['request', '"""deliverer/home.html"""'], {'context': 'context'}), "(request, 'deliverer/home.html', context=context)\n", (2040, 2089), False, 'from django.shortcuts import render, redirect, get_list_or_404, get_object_or_404\n'), ((2431, 2471), 'database.models.user.Deliverer.objects.get', 'user.Deliverer.objects.get', ([], {'user': 'my_user'}), '(user=my_user)\n', (2457, 2471), False, 'from database.models import user, restaurant, address\n'), ((2767, 2802), 'database.models.restaurant.Restaurant.objects.all', 'restaurant.Restaurant.objects.all', ([], {}), '()\n', (2800, 2802), False, 'from database.models import user, restaurant, address\n'), ((3190, 3249), 'django.shortcuts.render', 'render', (['request', '"""deliverer/register.html"""'], {'context': 'context'}), "(request, 'deliverer/register.html', context=context)\n", (3196, 3249), False, 'from django.shortcuts import render, redirect, get_list_or_404, get_object_or_404\n'), ((3314, 3356), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['restaurant.Order'], {'pk': 'pk'}), '(restaurant.Order, pk=pk)\n', (3331, 3356), False, 'from django.shortcuts import render, redirect, get_list_or_404, get_object_or_404\n'), ((3410, 3464), 'database.models.address.CustomerAddress.objects.get', 'address.CustomerAddress.objects.get', ([], {'customer': 'customer'}), '(customer=customer)\n', (3445, 3464), False, 'from database.models import user, restaurant, address\n'), ((3562, 3624), 'database.models.address.RestaurantAddress.objects.get', 'address.RestaurantAddress.objects.get', ([], {'restaurant': 'my_resturant'}), '(restaurant=my_resturant)\n', (3599, 3624), False, 'from database.models import user, restaurant, address\n'), ((4283, 4434), 'django.shortcuts.render', 'render', (['request', '"""deliverer/order.html"""'], {'context': "{'order': order, 'customerAddress': customer_address, 'restaurantAddress':\n restaurant_address}"}), "(request, 'deliverer/order.html', context={'order': order,\n 'customerAddress': customer_address, 'restaurantAddress':\n restaurant_address})\n", (4289, 4434), False, 'from django.shortcuts import render, redirect, get_list_or_404, get_object_or_404\n'), ((365, 389), 'helper.userTypeChecker', 'userTypeChecker', (['my_user'], {}), '(my_user)\n', (380, 389), False, 'from helper import parse_req_body, userTypeChecker\n'), ((958, 988), 'django.shortcuts.redirect', 'redirect', (['"""deliverer-register"""'], {}), "('deliverer-register')\n", (966, 988), False, 'from django.shortcuts import render, redirect, get_list_or_404, get_object_or_404\n'), ((1050, 1078), 'helper.parse_req_body', 'parse_req_body', (['request.body'], {}), '(request.body)\n', (1064, 1078), False, 'from helper import parse_req_body, userTypeChecker\n'), ((1162, 1203), 'database.models.restaurant.Order.objects.get', 'restaurant.Order.objects.get', ([], {'id': 'order_id'}), '(id=order_id)\n', (1190, 1203), False, 'from database.models import user, restaurant, address\n'), ((1222, 1310), 'database.models.restaurant.DeliveryBid', 'restaurant.DeliveryBid', ([], {'deliverer': 'my_deliverer', 'win': '(False)', 'price': 'amount', 'order': 'order'}), '(deliverer=my_deliverer, win=False, price=amount,\n order=order)\n', (1244, 1310), False, 'from database.models import user, restaurant, address\n'), ((2191, 2215), 'helper.userTypeChecker', 'userTypeChecker', (['my_user'], {}), '(my_user)\n', (2206, 2215), False, 'from helper import parse_req_body, userTypeChecker\n'), ((2640, 2666), 'django.shortcuts.redirect', 'redirect', (['"""deliverer-home"""'], {}), "('deliverer-home')\n", (2648, 2666), False, 'from django.shortcuts import render, redirect, get_list_or_404, get_object_or_404\n'), ((2921, 2949), 'helper.parse_req_body', 'parse_req_body', (['request.body'], {}), '(request.body)\n', (2935, 2949), False, 'from helper import parse_req_body, userTypeChecker\n'), ((3013, 3063), 'database.models.restaurant.Restaurant.objects.get', 'restaurant.Restaurant.objects.get', ([], {'id': 'resturant_id'}), '(id=resturant_id)\n', (3046, 3063), False, 'from database.models import user, restaurant, address\n'), ((3485, 3525), 'database.models.user.Deliverer.objects.get', 'user.Deliverer.objects.get', ([], {'user': 'my_user'}), '(user=my_user)\n', (3511, 3525), False, 'from database.models import user, restaurant, address\n'), ((3675, 3703), 'helper.parse_req_body', 'parse_req_body', (['request.body'], {}), '(request.body)\n', (3689, 3703), False, 'from helper import parse_req_body, userTypeChecker\n'), ((456, 478), 'django.shortcuts.redirect', 'redirect', (['"""home-nexus"""'], {}), "('home-nexus')\n", (464, 478), False, 'from django.shortcuts import render, redirect, get_list_or_404, get_object_or_404\n'), ((570, 592), 'django.shortcuts.redirect', 'redirect', (['"""home-nexus"""'], {}), "('home-nexus')\n", (578, 592), False, 'from django.shortcuts import render, redirect, get_list_or_404, get_object_or_404\n'), ((648, 670), 'django.shortcuts.redirect', 'redirect', (['"""home-nexus"""'], {}), "('home-nexus')\n", (656, 670), False, 'from django.shortcuts import render, redirect, get_list_or_404, get_object_or_404\n'), ((1352, 1400), 'database.models.restaurant.Order.objects.filter', 'restaurant.Order.objects.filter', ([], {'chose_bid': '(False)'}), '(chose_bid=False)\n', (1383, 1400), False, 'from database.models import user, restaurant, address\n'), ((1465, 1526), 'database.models.restaurant.DeliveryBid.objects.filter', 'restaurant.DeliveryBid.objects.filter', ([], {'deliverer': 'my_deliverer'}), '(deliverer=my_deliverer)\n', (1502, 1526), False, 'from database.models import user, restaurant, address\n'), ((1560, 1621), 'database.models.restaurant.DeliveryBid.objects.filter', 'restaurant.DeliveryBid.objects.filter', ([], {'deliverer': 'my_deliverer'}), '(deliverer=my_deliverer)\n', (1597, 1621), False, 'from database.models import user, restaurant, address\n'), ((2282, 2304), 'django.shortcuts.redirect', 'redirect', (['"""home-nexus"""'], {}), "('home-nexus')\n", (2290, 2304), False, 'from django.shortcuts import render, redirect, get_list_or_404, get_object_or_404\n'), ((2364, 2386), 'django.shortcuts.redirect', 'redirect', (['"""home-nexus"""'], {}), "('home-nexus')\n", (2372, 2386), False, 'from django.shortcuts import render, redirect, get_list_or_404, get_object_or_404\n'), ((3905, 3991), 'database.models.restaurant.CustomerStatus.objects.get', 'restaurant.CustomerStatus.objects.get', ([], {'customer': 'customer', 'restaurant': 'my_resturant'}), '(customer=customer, restaurant=\n my_resturant)\n', (3942, 3991), False, 'from database.models import user, restaurant, address\n'), ((4041, 4126), 'database.models.restaurant.CustomerStatus', 'restaurant.CustomerStatus', ([], {'customer': 'customer', 'restaurant': 'my_resturant', 'status': '"""N"""'}), "(customer=customer, restaurant=my_resturant,\n status='N')\n", (4066, 4126), False, 'from database.models import user, restaurant, address\n'), ((782, 825), 'database.models.user.Deliverer.objects.filter', 'user.Deliverer.objects.filter', ([], {'user': 'my_user'}), '(user=my_user)\n', (811, 825), False, 'from database.models import user, restaurant, address\n'), ((2493, 2536), 'database.models.user.Deliverer.objects.filter', 'user.Deliverer.objects.filter', ([], {'user': 'my_user'}), '(user=my_user)\n', (2522, 2536), False, 'from database.models import user, restaurant, address\n')]
|
from fastapi import APIRouter
router = APIRouter()
@router.get("/items2/{item_id}")
async def read_item2(item_id: int):
return {"item_id": item_id}
|
[
"fastapi.APIRouter"
] |
[((40, 51), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (49, 51), False, 'from fastapi import APIRouter\n')]
|
# Scrapy settings for edzapp project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/topics/settings.html
#
from edzapp import constants
BOT_NAME = 'edzapp'
BOT_VERSION = '1.0'
SPIDER_MODULES = ['edzapp.spiders']
NEWSPIDER_MODULE = 'edzapp.spiders'
USER_AGENT = 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1'
ITEM_PIPELINES = [
# 'edzapp.pipelines.DjangoJobPipeline',
]
DOWNLOAD_DELAY = 3
ROLE = constants.ROLES['TEACHER/CLASSIFIED']
PARSE_JOB_PAGES = True
import sys
import os
# Directory containing django project
PROJECT_ROOT = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(PROJECT_ROOT, 'django_edzapp'))
# Set the django settings environment variable
os.environ['DJANGO_SETTINGS_MODULE'] = 'django_edzapp.settings'
try:
from local_settings import *
except ImportError:
pass
|
[
"os.path.realpath",
"os.path.join"
] |
[((728, 754), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (744, 754), False, 'import os\n'), ((776, 819), 'os.path.join', 'os.path.join', (['PROJECT_ROOT', '"""django_edzapp"""'], {}), "(PROJECT_ROOT, 'django_edzapp')\n", (788, 819), False, 'import os\n')]
|
import numpy as np
import cv2
import glob
import sys
sys.path.append("../")
import calipy
Rt_path = "./CameraData/Rt.json"
TVRt_path = "./Cameradata/TVRt.json"
Rt_back_to_front = calipy.Transform(Rt_path).inv()
Rt_TV_to_back = calipy.Transform(TVRt_path)
Rt_TV_to_front = Rt_back_to_front.dot(Rt_TV_to_back)
#origin = calipy.Transform()
#ren = calipy.vtkRenderer()
#ren.addCamera("front_cam", Rt_TV_to_front.inv().R, Rt_TV_to_front.inv().T, cs=0.3)
#ren.addCamera("back_cam", Rt_TV_to_back.inv().R, Rt_TV_to_back.inv().T, cs=0.5)
#TV_width = 1.70
#TV_height = 0.95
#objectPoints = np.array( [ [0,0,0],
# [TV_width, 0, 0],
# [0, TV_height,0],
# [TV_width, TV_height, 0] ] ).astype(np.float64)
#tvpoints_on_camera = np.transpose(objectPoints)
#ren.addLines("TV", np.transpose(tvpoints_on_camera), [0,1,3,2,0])
##ren.addCamera("TV_origin", TVRt.R, TVRt.T, cs=0.5)
#ren.render()
#exit()
origin = calipy.Transform()
ren = calipy.vtkRenderer()
ren.addCamera("front_cam", cs=0.5)
ren.addCamera("back_cam", Rt_back_to_front.R, Rt_back_to_front.T, cs=0.5)
TV_width = 1.70
TV_height = 0.95
objectPoints = np.array( [ [0,0,0],
[TV_width, 0, 0],
[0, TV_height,0],
[TV_width, TV_height, 0] ] ).astype(np.float64)
tvpoints_on_camera = Rt_TV_to_front.move(np.transpose(objectPoints))
ren.addLines("TV", np.transpose(tvpoints_on_camera), [0,1,3,2,0])
#ren.addCamera("TV_origin", TVRt.R, TVRt.T, cs=0.5)
ren.render()
Rt_back_to_front.saveJson("./CameraData/Rt_back_to_front.json")
Rt_TV_to_front.saveJson("./CameraData/Rt_TV_to_front.json")
|
[
"sys.path.append",
"numpy.transpose",
"calipy.Transform",
"calipy.vtkRenderer",
"numpy.array"
] |
[((53, 75), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (68, 75), False, 'import sys\n'), ((229, 256), 'calipy.Transform', 'calipy.Transform', (['TVRt_path'], {}), '(TVRt_path)\n', (245, 256), False, 'import calipy\n'), ((950, 968), 'calipy.Transform', 'calipy.Transform', ([], {}), '()\n', (966, 968), False, 'import calipy\n'), ((975, 995), 'calipy.vtkRenderer', 'calipy.vtkRenderer', ([], {}), '()\n', (993, 995), False, 'import calipy\n'), ((1353, 1379), 'numpy.transpose', 'np.transpose', (['objectPoints'], {}), '(objectPoints)\n', (1365, 1379), True, 'import numpy as np\n'), ((1400, 1432), 'numpy.transpose', 'np.transpose', (['tvpoints_on_camera'], {}), '(tvpoints_on_camera)\n', (1412, 1432), True, 'import numpy as np\n'), ((181, 206), 'calipy.Transform', 'calipy.Transform', (['Rt_path'], {}), '(Rt_path)\n', (197, 206), False, 'import calipy\n'), ((1155, 1243), 'numpy.array', 'np.array', (['[[0, 0, 0], [TV_width, 0, 0], [0, TV_height, 0], [TV_width, TV_height, 0]]'], {}), '([[0, 0, 0], [TV_width, 0, 0], [0, TV_height, 0], [TV_width,\n TV_height, 0]])\n', (1163, 1243), True, 'import numpy as np\n')]
|
import json
from asyncio import create_task
from pathlib import Path
from redbot.core.bot import Red
from .pfpimgen import PfpImgen
with open(Path(__file__).parent / "info.json") as fp:
__red_end_user_data_statement__ = json.load(fp)["end_user_data_statement"]
# from https://github.com/phenom4n4n/Fixator10-Cogs/blob/V3/adminutils/__init__.py
async def setup_after_ready(bot):
await bot.wait_until_red_ready()
cog = PfpImgen(bot)
for name, command in cog.all_commands.items():
if not command.parent:
if bot.get_command(name):
command.name = f"i{command.name}"
for alias in command.aliases:
if bot.get_command(alias):
command.aliases[command.aliases.index(alias)] = f"i{alias}"
bot.add_cog(cog)
def setup(bot):
create_task(setup_after_ready(bot))
|
[
"pathlib.Path",
"json.load"
] |
[((227, 240), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (236, 240), False, 'import json\n'), ((145, 159), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (149, 159), False, 'from pathlib import Path\n')]
|
from datetime import date
from django.test import TestCase
from django.shortcuts import reverse
from django_dynamic_fixture import G
from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, \
get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, \
add_to_group, clear_mailbox, get_emails
from wildlifelicensing.apps.applications.tests import helpers as app_helpers
from wildlifelicensing.apps.applications.models import AssessmentCondition, Condition, Assessment
from wildlifelicensing.apps.main.helpers import is_assessor, get_user_assessor_groups
from wildlifelicensing.apps.main.models import AssessorGroup
from ledger.accounts.models import EmailUser
class TestViewAccess(TestCase):
fixtures = ['licences.json', 'conditions.json', 'returns.json']
def setUp(self):
self.client = SocialClient()
self.user = get_or_create_default_customer()
self.officer = get_or_create_default_officer()
self.application = app_helpers.create_and_lodge_application(self.user, **{
'data': {
'title': 'My Application'
}
})
self.assessment = app_helpers.get_or_create_assessment(self.application)
self.condition = Condition.objects.first()
self.assessment_condition = AssessmentCondition.objects.create(assessment=self.assessment,
condition=self.condition,
order=1)
self.urls_get = [
reverse('wl_applications:enter_conditions', args=[self.application.pk]),
reverse('wl_applications:search_conditions')
]
self.urls_post = [
{
'url': reverse('wl_applications:create_condition', args=[self.application.pk]),
'data': {
'code': '123488374',
'text': 'condition text'
}
},
{
'url': reverse('wl_applications:set_assessment_condition_state'),
'data': {
'assessmentConditionID': self.assessment_condition.pk,
'acceptanceStatus': 'accepted',
}
},
{
'url': reverse('wl_applications:enter_conditions', args=[self.application.pk]),
'data': {
'conditionID': [self.condition.pk],
}
},
]
def tearDown(self):
self.client.logout()
def test_customer_access(self):
"""
A Customer cannot access any URL
"""
# not logged-in
for url in self.urls_get:
response = self.client.get(url, follow=True)
self.assertTrue(is_login_page(response))
for url in self.urls_post:
response = self.client.post(url['url'], url['data'], follow=True)
self.assertTrue(is_login_page(response))
# logged-in. Should throw a 403 or redirect to login
self.client.login(self.user.email)
for url in self.urls_get:
response = self.client.get(url, follow=True)
if response.status_code != 403:
self.assertRedirects(response, reverse('wl_dashboard:tables_customer'), status_code=302,
target_status_code=200)
for url in self.urls_post:
response = self.client.post(url['url'], url['data'], follow=True)
if response.status_code != 403:
self.assertRedirects(response, reverse('wl_dashboard:tables_customer'), status_code=302,
target_status_code=200)
def test_officer_access(self):
"""
Officer should be able to access any views
"""
self.client.login(self.officer.email)
for url in self.urls_get:
response = self.client.get(url, follow=False)
self.assertEqual(200, response.status_code)
for url in self.urls_post:
response = self.client.post(url['url'], url['data'], follow=True)
self.assertEquals(200, response.status_code)
def test_assessor_access_limited(self):
"""
Test that an assessor cannot edit an assessment that doesn't belong to their group
All accessor can search conditions
"""
assessor = get_or_create_default_assessor()
self.client.login(assessor.email)
# This assessor doesn't belong to a group
self.assertTrue(is_assessor(assessor))
self.assertFalse(get_user_assessor_groups(assessor))
# forbidden
urls_get_forbidden = [
reverse('wl_applications:enter_conditions', args=[self.application.pk]),
reverse('wl_applications:enter_conditions_assessor', args=[self.application.pk, self.assessment.pk]),
]
urls_post_forbidden = [
{
'url': reverse('wl_applications:create_condition', args=[self.application.pk]),
'data': {
'code': '123488374',
'text': 'condition text'
}
},
{
'url': reverse('wl_applications:set_assessment_condition_state'),
'data': {
'assessmentConditionID': self.assessment_condition.pk,
'acceptanceStatus': 'accepted',
}
},
{
'url': reverse('wl_applications:enter_conditions', args=[self.application.pk]),
'data': {
'conditionID': [self.condition.pk],
}
},
{
'url': reverse('wl_applications:enter_conditions_assessor',
args=[self.application.pk, self.assessment.pk]),
'data': {
'conditionID': [self.condition.pk],
}
},
]
# Allowed
urls_get_allowed = [
reverse('wl_applications:search_conditions')
]
urls_post_allowed = [
]
for url in urls_get_forbidden:
response = self.client.get(url, follow=True)
if response.status_code != 403:
self.assertRedirects(response, reverse('wl_dashboard:tables_assessor'), status_code=302,
target_status_code=200)
for url in urls_post_forbidden:
response = self.client.post(url['url'], url['data'], follow=True)
if response.status_code != 403:
self.assertRedirects(response, reverse('wl_dashboard:tables_assessor'), status_code=302,
target_status_code=200)
for url in urls_get_allowed:
response = self.client.get(url, follow=True)
self.assertEqual(200, response.status_code)
for url in urls_post_allowed:
response = self.client.post(url['url'], url['data'], follow=True)
self.assertEqual(200, response.status_code)
def test_assessor_access_normal(self):
"""
Test that an assessor can edit an assessment that belongs to their group
"""
assessor = get_or_create_default_assessor()
self.client.login(assessor.email)
# This assessor doesn't belong to a group
self.assertTrue(is_assessor(assessor))
# add the assessor to the assessment group
self.assertTrue(Assessment.objects.filter(application=self.application).count() > 0)
for assessment in Assessment.objects.filter(application=self.application):
add_assessor_to_assessor_group(assessor, assessment.assessor_group)
# forbidden
urls_get_forbidden = [
reverse('wl_applications:enter_conditions', args=[self.application.pk]),
]
urls_post_forbidden = [
{
'url': reverse('wl_applications:create_condition', args=[self.application.pk]),
'data': {
'code': '123488374',
'text': 'condition text'
}
},
{
'url': reverse('wl_applications:set_assessment_condition_state'),
'data': {
'assessmentConditionID': self.assessment_condition.pk,
'acceptanceStatus': 'accepted',
}
},
{
'url': reverse('wl_applications:enter_conditions', args=[self.application.pk]),
'data': {
'conditionID': [self.condition.pk],
}
},
]
# Allowed
urls_get_allowed = [
reverse('wl_applications:search_conditions'),
reverse('wl_applications:enter_conditions_assessor', args=[self.application.pk, self.assessment.pk]),
]
urls_post_allowed = [
{
'url': reverse('wl_applications:enter_conditions_assessor',
args=[self.application.pk, self.assessment.pk]),
'data': {
'conditionID': [self.condition.pk],
}
},
]
for url in urls_get_forbidden:
response = self.client.get(url, follow=True)
if response.status_code != 403:
self.assertRedirects(response, reverse('wl_dashboard:tables_assessor'), status_code=302,
target_status_code=200)
for url in urls_post_forbidden:
response = self.client.post(url['url'], url['data'], follow=True)
if response.status_code != 403:
self.assertRedirects(response, reverse('wl_dashboard:tables_assessor'), status_code=302,
target_status_code=200)
for url in urls_get_allowed:
response = self.client.get(url, follow=True)
self.assertEqual(200, response.status_code)
for url in urls_post_allowed:
response = self.client.post(url['url'], url['data'], follow=True)
self.assertEqual(200, response.status_code)
class TestAssignAssessor(TestCase):
fixtures = ['licences.json', 'conditions.json']
def setUp(self):
self.client = SocialClient()
self.user = get_or_create_default_customer()
self.officer = get_or_create_default_officer()
self.application = app_helpers.create_and_lodge_application(self.user, **{
'data': {
'title': 'My Application'
}
})
self.assessor_group = G(AssessorGroup, name='District7', email='<EMAIL>')
self.assessor_1 = G(EmailUser, email='<EMAIL>', dob='1967-04-04')
add_to_group(self.assessor_1, 'Assessors')
add_to_group(self.assessor_1, self.assessor_group)
self.assessor_2 = G(EmailUser, email='<EMAIL>', dob='1968-04-04')
add_to_group(self.assessor_2, 'Assessors')
add_to_group(self.assessor_2, self.assessor_group)
def _issue_assessment(self, application, assessor_group):
self.client.login(self.officer.email)
url = reverse('wl_applications:send_for_assessment')
payload = {
'applicationID': application.pk,
'assGroupID': assessor_group.pk
}
resp = self.client.post(url, data=payload)
self.assertEqual(resp.status_code, 200)
self.client.logout()
clear_mailbox()
data = resp.json()
return Assessment.objects.filter(pk=data['assessment']['id']).first()
def test_email_sent_to_assessor_group(self):
"""
Test that when an officer issue an assessment an email is sent to the group email
"""
# officer issue assessment
self.client.login(self.officer.email)
url = reverse('wl_applications:send_for_assessment')
payload = {
'applicationID': self.application.pk,
'assGroupID': self.assessor_group.pk
}
resp = self.client.post(url, data=payload)
self.assertEqual(resp.status_code, 200)
# we should have one email sent to the assessor
emails = get_emails()
self.assertEqual(len(emails), 1)
email = emails[0]
recipients = email.to
self.assertEqual(len(recipients), 1)
expected_recipient = self.assessor_group.email
self.assertEqual(recipients[0], expected_recipient)
# the response is a json response. It should contain the assessment id
expected_content_type = 'application/json'
self.assertEqual(resp['content-type'], expected_content_type)
data = resp.json()
self.assertTrue('assessment' in data)
self.assertTrue('id' in data['assessment'])
assessment = Assessment.objects.filter(pk=data['assessment']['id']).first()
self.assertIsNotNone(assessment)
self.assertEqual(assessment.application, self.application)
expected_status = 'awaiting_assessment'
self.assertEqual(assessment.status, expected_status)
# check more data
self.assertEqual(assessment.assessor_group, self.assessor_group)
self.assertEqual(assessment.officer, self.officer)
self.assertEqual(assessment.date_last_reminded, date.today())
self.assertEqual(assessment.conditions.count(), 0)
self.assertEqual(assessment.comment, '')
self.assertEqual(assessment.purpose, '')
def test_assign_assessment_send_email(self):
"""
Use case: assessor_1 assign the assessment to assessor_2.
Test that assessor_2 should receive an email with a link.
The email should be also log in the communication log
"""
assessment = self._issue_assessment(self.application, self.assessor_group)
previous_comm_log = app_helpers.get_communication_log(assessment.application)
previous_action_list = app_helpers.get_action_log(assessment.application)
url = reverse('wl_applications:assign_assessor')
self.client.login(self.assessor_1.email)
payload = {
'assessmentID': assessment.id,
'userID': self.assessor_2.id
}
resp = self.client.post(url, data=payload)
self.assertEqual(resp.status_code, 200)
# the response is a json response. It should contain the assessment id
expected_content_type = 'application/json'
self.assertEqual(resp['content-type'], expected_content_type)
# we should have one email sent to the assessor
emails = get_emails()
self.assertEqual(len(emails), 1)
email = emails[0]
recipients = email.to
self.assertEqual(len(recipients), 1)
expected_recipient = self.assessor_2.email
self.assertEqual(recipients[0], expected_recipient)
# the subject should contains 'assessment assigned'
self.assertTrue(email.subject.find('assessment assigned') > -1)
# the body should get a url to assess the application
expected_url = reverse('wl_applications:enter_conditions_assessor',
args=[assessment.application.pk, assessment.pk])
self.assertTrue(email.body.find(expected_url) > -1)
# test that the email has been logged.
new_comm_log = app_helpers.get_communication_log(assessment.application)
self.assertEqual(len(new_comm_log), len(previous_comm_log) + 1)
previous_recipients = [entry['to'] for entry in previous_comm_log]
self.assertNotIn(self.assessor_2.email, previous_recipients)
new_recipients = [entry['to'] for entry in new_comm_log]
self.assertIn(self.assessor_2.email, new_recipients)
# it should also be recorded in the action list
new_action_list = app_helpers.get_action_log(assessment.application)
self.assertEqual(len(new_action_list), len(previous_action_list) + 1)
def test_assign_to_me_no_email(self):
"""
Use case: assessor_1 assign the assessment to himself.
test that no email is sent
"""
assessment = self._issue_assessment(self.application, self.assessor_group)
previous_comm_log = app_helpers.get_communication_log(assessment.application)
previous_action_list = app_helpers.get_action_log(assessment.application)
url = reverse('wl_applications:assign_assessor')
self.client.login(self.assessor_1.email)
payload = {
'assessmentID': assessment.id,
'userID': self.assessor_1.id
}
resp = self.client.post(url, data=payload)
# the response is a json response. It should contain the assessment id
expected_content_type = 'application/json'
self.assertEqual(resp['content-type'], expected_content_type)
# we should have one email sent to the assessor
emails = get_emails()
self.assertEqual(len(emails), 0)
# com log should be unchanged.
new_comm_log = app_helpers.get_communication_log(assessment.application)
self.assertEqual(new_comm_log, previous_comm_log)
# but should be recorded in the action list
new_action_list = app_helpers.get_action_log(assessment.application)
self.assertEqual(len(new_action_list), len(previous_action_list) + 1)
|
[
"wildlifelicensing.apps.main.helpers.is_assessor",
"wildlifelicensing.apps.main.tests.helpers.add_assessor_to_assessor_group",
"wildlifelicensing.apps.applications.tests.helpers.get_or_create_assessment",
"wildlifelicensing.apps.main.tests.helpers.add_to_group",
"wildlifelicensing.apps.main.tests.helpers.clear_mailbox",
"wildlifelicensing.apps.applications.tests.helpers.get_action_log",
"wildlifelicensing.apps.main.tests.helpers.is_login_page",
"wildlifelicensing.apps.main.tests.helpers.SocialClient",
"django_dynamic_fixture.G",
"wildlifelicensing.apps.main.tests.helpers.get_emails",
"wildlifelicensing.apps.main.helpers.get_user_assessor_groups",
"wildlifelicensing.apps.applications.tests.helpers.create_and_lodge_application",
"wildlifelicensing.apps.main.tests.helpers.get_or_create_default_officer",
"wildlifelicensing.apps.applications.tests.helpers.get_communication_log",
"datetime.date.today",
"wildlifelicensing.apps.main.tests.helpers.get_or_create_default_customer",
"wildlifelicensing.apps.applications.models.Condition.objects.first",
"django.shortcuts.reverse",
"wildlifelicensing.apps.main.tests.helpers.get_or_create_default_assessor",
"wildlifelicensing.apps.applications.models.AssessmentCondition.objects.create",
"wildlifelicensing.apps.applications.models.Assessment.objects.filter"
] |
[((910, 924), 'wildlifelicensing.apps.main.tests.helpers.SocialClient', 'SocialClient', ([], {}), '()\n', (922, 924), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((945, 977), 'wildlifelicensing.apps.main.tests.helpers.get_or_create_default_customer', 'get_or_create_default_customer', ([], {}), '()\n', (975, 977), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((1001, 1032), 'wildlifelicensing.apps.main.tests.helpers.get_or_create_default_officer', 'get_or_create_default_officer', ([], {}), '()\n', (1030, 1032), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((1060, 1156), 'wildlifelicensing.apps.applications.tests.helpers.create_and_lodge_application', 'app_helpers.create_and_lodge_application', (['self.user'], {}), "(self.user, **{'data': {'title':\n 'My Application'}})\n", (1100, 1156), True, 'from wildlifelicensing.apps.applications.tests import helpers as app_helpers\n'), ((1231, 1285), 'wildlifelicensing.apps.applications.tests.helpers.get_or_create_assessment', 'app_helpers.get_or_create_assessment', (['self.application'], {}), '(self.application)\n', (1267, 1285), True, 'from wildlifelicensing.apps.applications.tests import helpers as app_helpers\n'), ((1311, 1336), 'wildlifelicensing.apps.applications.models.Condition.objects.first', 'Condition.objects.first', ([], {}), '()\n', (1334, 1336), False, 'from wildlifelicensing.apps.applications.models import AssessmentCondition, Condition, Assessment\n'), ((1373, 1475), 'wildlifelicensing.apps.applications.models.AssessmentCondition.objects.create', 'AssessmentCondition.objects.create', ([], {'assessment': 'self.assessment', 'condition': 'self.condition', 'order': '(1)'}), '(assessment=self.assessment, condition=\n self.condition, order=1)\n', (1407, 1475), False, 'from wildlifelicensing.apps.applications.models import AssessmentCondition, Condition, Assessment\n'), ((4510, 4542), 'wildlifelicensing.apps.main.tests.helpers.get_or_create_default_assessor', 'get_or_create_default_assessor', ([], {}), '()\n', (4540, 4542), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((7372, 7404), 'wildlifelicensing.apps.main.tests.helpers.get_or_create_default_assessor', 'get_or_create_default_assessor', ([], {}), '()\n', (7402, 7404), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((7714, 7769), 'wildlifelicensing.apps.applications.models.Assessment.objects.filter', 'Assessment.objects.filter', ([], {'application': 'self.application'}), '(application=self.application)\n', (7739, 7769), False, 'from wildlifelicensing.apps.applications.models import AssessmentCondition, Condition, Assessment\n'), ((10447, 10461), 'wildlifelicensing.apps.main.tests.helpers.SocialClient', 'SocialClient', ([], {}), '()\n', (10459, 10461), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((10482, 10514), 'wildlifelicensing.apps.main.tests.helpers.get_or_create_default_customer', 'get_or_create_default_customer', ([], {}), '()\n', (10512, 10514), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((10538, 10569), 'wildlifelicensing.apps.main.tests.helpers.get_or_create_default_officer', 'get_or_create_default_officer', ([], {}), '()\n', (10567, 10569), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((10597, 10693), 'wildlifelicensing.apps.applications.tests.helpers.create_and_lodge_application', 'app_helpers.create_and_lodge_application', (['self.user'], {}), "(self.user, **{'data': {'title':\n 'My Application'}})\n", (10637, 10693), True, 'from wildlifelicensing.apps.applications.tests import helpers as app_helpers\n'), ((10773, 10824), 'django_dynamic_fixture.G', 'G', (['AssessorGroup'], {'name': '"""District7"""', 'email': '"""<EMAIL>"""'}), "(AssessorGroup, name='District7', email='<EMAIL>')\n", (10774, 10824), False, 'from django_dynamic_fixture import G\n'), ((10851, 10898), 'django_dynamic_fixture.G', 'G', (['EmailUser'], {'email': '"""<EMAIL>"""', 'dob': '"""1967-04-04"""'}), "(EmailUser, email='<EMAIL>', dob='1967-04-04')\n", (10852, 10898), False, 'from django_dynamic_fixture import G\n'), ((10907, 10949), 'wildlifelicensing.apps.main.tests.helpers.add_to_group', 'add_to_group', (['self.assessor_1', '"""Assessors"""'], {}), "(self.assessor_1, 'Assessors')\n", (10919, 10949), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((10958, 11008), 'wildlifelicensing.apps.main.tests.helpers.add_to_group', 'add_to_group', (['self.assessor_1', 'self.assessor_group'], {}), '(self.assessor_1, self.assessor_group)\n', (10970, 11008), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((11036, 11083), 'django_dynamic_fixture.G', 'G', (['EmailUser'], {'email': '"""<EMAIL>"""', 'dob': '"""1968-04-04"""'}), "(EmailUser, email='<EMAIL>', dob='1968-04-04')\n", (11037, 11083), False, 'from django_dynamic_fixture import G\n'), ((11092, 11134), 'wildlifelicensing.apps.main.tests.helpers.add_to_group', 'add_to_group', (['self.assessor_2', '"""Assessors"""'], {}), "(self.assessor_2, 'Assessors')\n", (11104, 11134), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((11143, 11193), 'wildlifelicensing.apps.main.tests.helpers.add_to_group', 'add_to_group', (['self.assessor_2', 'self.assessor_group'], {}), '(self.assessor_2, self.assessor_group)\n', (11155, 11193), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((11318, 11364), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:send_for_assessment"""'], {}), "('wl_applications:send_for_assessment')\n", (11325, 11364), False, 'from django.shortcuts import reverse\n'), ((11620, 11635), 'wildlifelicensing.apps.main.tests.helpers.clear_mailbox', 'clear_mailbox', ([], {}), '()\n', (11633, 11635), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((12001, 12047), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:send_for_assessment"""'], {}), "('wl_applications:send_for_assessment')\n", (12008, 12047), False, 'from django.shortcuts import reverse\n'), ((12349, 12361), 'wildlifelicensing.apps.main.tests.helpers.get_emails', 'get_emails', ([], {}), '()\n', (12359, 12361), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((14013, 14070), 'wildlifelicensing.apps.applications.tests.helpers.get_communication_log', 'app_helpers.get_communication_log', (['assessment.application'], {}), '(assessment.application)\n', (14046, 14070), True, 'from wildlifelicensing.apps.applications.tests import helpers as app_helpers\n'), ((14102, 14152), 'wildlifelicensing.apps.applications.tests.helpers.get_action_log', 'app_helpers.get_action_log', (['assessment.application'], {}), '(assessment.application)\n', (14128, 14152), True, 'from wildlifelicensing.apps.applications.tests import helpers as app_helpers\n'), ((14168, 14210), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:assign_assessor"""'], {}), "('wl_applications:assign_assessor')\n", (14175, 14210), False, 'from django.shortcuts import reverse\n'), ((14747, 14759), 'wildlifelicensing.apps.main.tests.helpers.get_emails', 'get_emails', ([], {}), '()\n', (14757, 14759), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((15230, 15336), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:enter_conditions_assessor"""'], {'args': '[assessment.application.pk, assessment.pk]'}), "('wl_applications:enter_conditions_assessor', args=[assessment.\n application.pk, assessment.pk])\n", (15237, 15336), False, 'from django.shortcuts import reverse\n'), ((15494, 15551), 'wildlifelicensing.apps.applications.tests.helpers.get_communication_log', 'app_helpers.get_communication_log', (['assessment.application'], {}), '(assessment.application)\n', (15527, 15551), True, 'from wildlifelicensing.apps.applications.tests import helpers as app_helpers\n'), ((15977, 16027), 'wildlifelicensing.apps.applications.tests.helpers.get_action_log', 'app_helpers.get_action_log', (['assessment.application'], {}), '(assessment.application)\n', (16003, 16027), True, 'from wildlifelicensing.apps.applications.tests import helpers as app_helpers\n'), ((16383, 16440), 'wildlifelicensing.apps.applications.tests.helpers.get_communication_log', 'app_helpers.get_communication_log', (['assessment.application'], {}), '(assessment.application)\n', (16416, 16440), True, 'from wildlifelicensing.apps.applications.tests import helpers as app_helpers\n'), ((16472, 16522), 'wildlifelicensing.apps.applications.tests.helpers.get_action_log', 'app_helpers.get_action_log', (['assessment.application'], {}), '(assessment.application)\n', (16498, 16522), True, 'from wildlifelicensing.apps.applications.tests import helpers as app_helpers\n'), ((16538, 16580), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:assign_assessor"""'], {}), "('wl_applications:assign_assessor')\n", (16545, 16580), False, 'from django.shortcuts import reverse\n'), ((17069, 17081), 'wildlifelicensing.apps.main.tests.helpers.get_emails', 'get_emails', ([], {}), '()\n', (17079, 17081), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((17186, 17243), 'wildlifelicensing.apps.applications.tests.helpers.get_communication_log', 'app_helpers.get_communication_log', (['assessment.application'], {}), '(assessment.application)\n', (17219, 17243), True, 'from wildlifelicensing.apps.applications.tests import helpers as app_helpers\n'), ((17381, 17431), 'wildlifelicensing.apps.applications.tests.helpers.get_action_log', 'app_helpers.get_action_log', (['assessment.application'], {}), '(assessment.application)\n', (17407, 17431), True, 'from wildlifelicensing.apps.applications.tests import helpers as app_helpers\n'), ((1652, 1723), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:enter_conditions"""'], {'args': '[self.application.pk]'}), "('wl_applications:enter_conditions', args=[self.application.pk])\n", (1659, 1723), False, 'from django.shortcuts import reverse\n'), ((1737, 1781), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:search_conditions"""'], {}), "('wl_applications:search_conditions')\n", (1744, 1781), False, 'from django.shortcuts import reverse\n'), ((4659, 4680), 'wildlifelicensing.apps.main.helpers.is_assessor', 'is_assessor', (['assessor'], {}), '(assessor)\n', (4670, 4680), False, 'from wildlifelicensing.apps.main.helpers import is_assessor, get_user_assessor_groups\n'), ((4707, 4741), 'wildlifelicensing.apps.main.helpers.get_user_assessor_groups', 'get_user_assessor_groups', (['assessor'], {}), '(assessor)\n', (4731, 4741), False, 'from wildlifelicensing.apps.main.helpers import is_assessor, get_user_assessor_groups\n'), ((4807, 4878), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:enter_conditions"""'], {'args': '[self.application.pk]'}), "('wl_applications:enter_conditions', args=[self.application.pk])\n", (4814, 4878), False, 'from django.shortcuts import reverse\n'), ((4892, 4997), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:enter_conditions_assessor"""'], {'args': '[self.application.pk, self.assessment.pk]'}), "('wl_applications:enter_conditions_assessor', args=[self.application\n .pk, self.assessment.pk])\n", (4899, 4997), False, 'from django.shortcuts import reverse\n'), ((6152, 6196), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:search_conditions"""'], {}), "('wl_applications:search_conditions')\n", (6159, 6196), False, 'from django.shortcuts import reverse\n'), ((7521, 7542), 'wildlifelicensing.apps.main.helpers.is_assessor', 'is_assessor', (['assessor'], {}), '(assessor)\n', (7532, 7542), False, 'from wildlifelicensing.apps.main.helpers import is_assessor, get_user_assessor_groups\n'), ((7783, 7850), 'wildlifelicensing.apps.main.tests.helpers.add_assessor_to_assessor_group', 'add_assessor_to_assessor_group', (['assessor', 'assessment.assessor_group'], {}), '(assessor, assessment.assessor_group)\n', (7813, 7850), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((7915, 7986), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:enter_conditions"""'], {'args': '[self.application.pk]'}), "('wl_applications:enter_conditions', args=[self.application.pk])\n", (7922, 7986), False, 'from django.shortcuts import reverse\n'), ((8861, 8905), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:search_conditions"""'], {}), "('wl_applications:search_conditions')\n", (8868, 8905), False, 'from django.shortcuts import reverse\n'), ((8919, 9024), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:enter_conditions_assessor"""'], {'args': '[self.application.pk, self.assessment.pk]'}), "('wl_applications:enter_conditions_assessor', args=[self.application\n .pk, self.assessment.pk])\n", (8926, 9024), False, 'from django.shortcuts import reverse\n'), ((13461, 13473), 'datetime.date.today', 'date.today', ([], {}), '()\n', (13471, 13473), False, 'from datetime import date\n'), ((1857, 1928), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:create_condition"""'], {'args': '[self.application.pk]'}), "('wl_applications:create_condition', args=[self.application.pk])\n", (1864, 1928), False, 'from django.shortcuts import reverse\n'), ((2112, 2169), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:set_assessment_condition_state"""'], {}), "('wl_applications:set_assessment_condition_state')\n", (2119, 2169), False, 'from django.shortcuts import reverse\n'), ((2394, 2465), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:enter_conditions"""'], {'args': '[self.application.pk]'}), "('wl_applications:enter_conditions', args=[self.application.pk])\n", (2401, 2465), False, 'from django.shortcuts import reverse\n'), ((2892, 2915), 'wildlifelicensing.apps.main.tests.helpers.is_login_page', 'is_login_page', (['response'], {}), '(response)\n', (2905, 2915), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((3059, 3082), 'wildlifelicensing.apps.main.tests.helpers.is_login_page', 'is_login_page', (['response'], {}), '(response)\n', (3072, 3082), False, 'from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, add_to_group, clear_mailbox, get_emails\n'), ((5073, 5144), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:create_condition"""'], {'args': '[self.application.pk]'}), "('wl_applications:create_condition', args=[self.application.pk])\n", (5080, 5144), False, 'from django.shortcuts import reverse\n'), ((5328, 5385), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:set_assessment_condition_state"""'], {}), "('wl_applications:set_assessment_condition_state')\n", (5335, 5385), False, 'from django.shortcuts import reverse\n'), ((5610, 5681), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:enter_conditions"""'], {'args': '[self.application.pk]'}), "('wl_applications:enter_conditions', args=[self.application.pk])\n", (5617, 5681), False, 'from django.shortcuts import reverse\n'), ((5835, 5940), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:enter_conditions_assessor"""'], {'args': '[self.application.pk, self.assessment.pk]'}), "('wl_applications:enter_conditions_assessor', args=[self.application\n .pk, self.assessment.pk])\n", (5842, 5940), False, 'from django.shortcuts import reverse\n'), ((8067, 8138), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:create_condition"""'], {'args': '[self.application.pk]'}), "('wl_applications:create_condition', args=[self.application.pk])\n", (8074, 8138), False, 'from django.shortcuts import reverse\n'), ((8322, 8379), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:set_assessment_condition_state"""'], {}), "('wl_applications:set_assessment_condition_state')\n", (8329, 8379), False, 'from django.shortcuts import reverse\n'), ((8604, 8675), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:enter_conditions"""'], {'args': '[self.application.pk]'}), "('wl_applications:enter_conditions', args=[self.application.pk])\n", (8611, 8675), False, 'from django.shortcuts import reverse\n'), ((9098, 9203), 'django.shortcuts.reverse', 'reverse', (['"""wl_applications:enter_conditions_assessor"""'], {'args': '[self.application.pk, self.assessment.pk]'}), "('wl_applications:enter_conditions_assessor', args=[self.application\n .pk, self.assessment.pk])\n", (9105, 9203), False, 'from django.shortcuts import reverse\n'), ((11678, 11732), 'wildlifelicensing.apps.applications.models.Assessment.objects.filter', 'Assessment.objects.filter', ([], {'pk': "data['assessment']['id']"}), "(pk=data['assessment']['id'])\n", (11703, 11732), False, 'from wildlifelicensing.apps.applications.models import AssessmentCondition, Condition, Assessment\n'), ((12966, 13020), 'wildlifelicensing.apps.applications.models.Assessment.objects.filter', 'Assessment.objects.filter', ([], {'pk': "data['assessment']['id']"}), "(pk=data['assessment']['id'])\n", (12991, 13020), False, 'from wildlifelicensing.apps.applications.models import AssessmentCondition, Condition, Assessment\n'), ((3371, 3410), 'django.shortcuts.reverse', 'reverse', (['"""wl_dashboard:tables_customer"""'], {}), "('wl_dashboard:tables_customer')\n", (3378, 3410), False, 'from django.shortcuts import reverse\n'), ((3694, 3733), 'django.shortcuts.reverse', 'reverse', (['"""wl_dashboard:tables_customer"""'], {}), "('wl_dashboard:tables_customer')\n", (3701, 3733), False, 'from django.shortcuts import reverse\n'), ((6434, 6473), 'django.shortcuts.reverse', 'reverse', (['"""wl_dashboard:tables_assessor"""'], {}), "('wl_dashboard:tables_assessor')\n", (6441, 6473), False, 'from django.shortcuts import reverse\n'), ((6762, 6801), 'django.shortcuts.reverse', 'reverse', (['"""wl_dashboard:tables_assessor"""'], {}), "('wl_dashboard:tables_assessor')\n", (6769, 6801), False, 'from django.shortcuts import reverse\n'), ((9543, 9582), 'django.shortcuts.reverse', 'reverse', (['"""wl_dashboard:tables_assessor"""'], {}), "('wl_dashboard:tables_assessor')\n", (9550, 9582), False, 'from django.shortcuts import reverse\n'), ((9871, 9910), 'django.shortcuts.reverse', 'reverse', (['"""wl_dashboard:tables_assessor"""'], {}), "('wl_dashboard:tables_assessor')\n", (9878, 9910), False, 'from django.shortcuts import reverse\n'), ((7619, 7674), 'wildlifelicensing.apps.applications.models.Assessment.objects.filter', 'Assessment.objects.filter', ([], {'application': 'self.application'}), '(application=self.application)\n', (7644, 7674), False, 'from wildlifelicensing.apps.applications.models import AssessmentCondition, Condition, Assessment\n')]
|
from binomo import apiAlfaBinomo
if __name__ == '__main__':
aApiAlfa = apiAlfaBinomo('','', timeBotWait = 120, loginError = True) # timeBotWait 120seg (tiempo de espera hasta resolver el capcha), loginError True
aApiAlfa.actionDV('EURUSD') # par
aApiAlfa.buy("CALL") # compra o venta (PUT)
aApiAlfa.listOder() # lista de activos y su profit
aApiAlfa.mount(3000) # 3000,15000,30000,60000,150000,300000,600000,3000000 montos de compra
aApiAlfa.timeBuy(1) # 1,2,3,4,5 minutos
|
[
"binomo.apiAlfaBinomo"
] |
[((75, 130), 'binomo.apiAlfaBinomo', 'apiAlfaBinomo', (['""""""', '""""""'], {'timeBotWait': '(120)', 'loginError': '(True)'}), "('', '', timeBotWait=120, loginError=True)\n", (88, 130), False, 'from binomo import apiAlfaBinomo\n')]
|
from icemac.addressbook.i18n import _
from .interfaces import IBirthDate
import icemac.addressbook.browser.base
import zope.component
class ExportList(icemac.addressbook.browser.base.BaseView):
"""List available export formats."""
title = _('Export person data')
def exporters(self):
"""Iterable of exporters having enough data so export something."""
# XXX: This has no API, the exporters should be subscription adapters
# which return None if they have not enough data to export
# something and a dict consting of title and URL otherwise.
birthdate_data = zope.component.getMultiAdapter(
(self.context, self.request), IBirthDate)
if birthdate_data.icalendar_event is not None:
yield dict(title=_('iCalendar export of birth date (.ics file)'),
url=self.url(self.context, 'iCalendar'))
def back_url(self):
return self.url(self.context)
|
[
"icemac.addressbook.i18n._"
] |
[((250, 273), 'icemac.addressbook.i18n._', '_', (['"""Export person data"""'], {}), "('Export person data')\n", (251, 273), False, 'from icemac.addressbook.i18n import _\n'), ((794, 841), 'icemac.addressbook.i18n._', '_', (['"""iCalendar export of birth date (.ics file)"""'], {}), "('iCalendar export of birth date (.ics file)')\n", (795, 841), False, 'from icemac.addressbook.i18n import _\n')]
|
import os
PKG_PATH = os.path.abspath(os.path.dirname(__file__))
class PrivaError(Exception):
def __init__(self, code, message):
super().__init__(code, message)
def __str__(self):
return ': '.join(map(str, self.args))
class BasePriva:
"""Base class for Priva (private battles)."""
@classmethod
def rules(cls, language='en'):
"""Return descriptions of the rules."""
return 'No rules for %s' % cls.__name__
def start(self, *args, **kwargs):
"""Start the Priva."""
raise PrivaError(-1, 'Priva unimplemented.')
def end(self, *args, **kwargs):
"""End the Priva."""
raise PrivaError(-1, 'Priva unimplemented.')
def start_battle(self, *args, **kwargs):
"""Start a battle."""
raise PrivaError(-1, 'Priva unimplemented.')
def end_battle(self, *args, **kwargs):
"""End a battle."""
raise PrivaError(-1, 'Priva unimplemented.')
|
[
"os.path.dirname"
] |
[((40, 65), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (55, 65), False, 'import os\n')]
|
"""
@author: <NAME>
@title: SmartSearch - An Intelligent Search Engine.
@date: 05/06/2019
"""
import time
import argparse
from crawl_all_sites import crawl_for_sites
from generate_data import create_documents
from generate_data import create_data_directory
from clean_documents import remove_extra_lines_and_tabs
# Parse Arguments
parser = argparse.ArgumentParser(description="Crawler for Search Engine")
parser.add_argument(
"--initial_url",
type=str,
help="The initial URL to start the crawling process from. For example: 'https://www.cs.uic.edu/'"
)
parser.add_argument(
"--number_of_pages",
type=int,
help="The number of pages to crawl and create database from."
)
parser.add_argument(
"--domain",
type=str,
help="The domain in which crawling should happen. For example: 'uic.edu'"
)
args = parser.parse_args()
def crawler_driving_function():
"""
Driver Function to crawl for sites and create database.
"""
# Time to record the start time of the program execution.
db_creation_start_time = time.time()
# Time to record the start time of the crawling.
crawl_start_time = time.time()
print("################################################################################################")
print("Web Crawling startes now.\n\n")
# Initialize the user arguments.
main_url = args.initial_url
min_pages_to_crawl = args.number_of_pages
domain = args.domain
# Get the crawled sites and unknown sites.
sites_list, unknown_urls, broken_urls, parent_children_url_map = crawl_for_sites(main_url, min_pages_to_crawl, domain)
# Record crawl end time.
crawl_end_time = time.time()
print("\n\nWeb Crawling finished now.\n")
print("################################################################################################")
print("Total time to crawl the web: {} Minutes".format((crawl_end_time - crawl_start_time)/60))
# Check if there are any duplicate pages in the list.
if len(sites_list) == len(list(set(sites_list))):
print("No duplicate sites included.")
else:
print("Duplicates found. Removing Duplicates.")
sites_list = list(set(sites_list))
print("################################################################################################")
print("Now, extracting the text data from the crawled websites.")
print("################################################################################################")
if create_data_directory():
print("################################################################################################\n\n")
creation_flag = create_documents(sites_list, parent_children_url_map)
print("\n\nText extracted from the crawled pages.")
else:
raise Exception("DirectoryError: You do not have write privilege in the directory.")
print("################################################################################################")
print("Total time to create the database: {db_creation_time} Minutes.".format(db_creation_time=(time.time() - db_creation_start_time) / 60))
print("################################################################################################")
print("Unknown Achors Found:\n")
print(unknown_urls)
print("################################################################################################")
if broken_urls != []:
print("Broken / Unreachable URLs Found:\n")
print(broken_urls)
print("################################################################################################")
# Main funciton starts here..
if __name__ == "__main__":
crawler_driving_function()
|
[
"argparse.ArgumentParser",
"generate_data.create_documents",
"generate_data.create_data_directory",
"time.time",
"crawl_all_sites.crawl_for_sites"
] |
[((358, 422), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Crawler for Search Engine"""'}), "(description='Crawler for Search Engine')\n", (381, 422), False, 'import argparse\n'), ((1084, 1095), 'time.time', 'time.time', ([], {}), '()\n', (1093, 1095), False, 'import time\n'), ((1170, 1181), 'time.time', 'time.time', ([], {}), '()\n', (1179, 1181), False, 'import time\n'), ((1583, 1636), 'crawl_all_sites.crawl_for_sites', 'crawl_for_sites', (['main_url', 'min_pages_to_crawl', 'domain'], {}), '(main_url, min_pages_to_crawl, domain)\n', (1598, 1636), False, 'from crawl_all_sites import crawl_for_sites\n'), ((1685, 1696), 'time.time', 'time.time', ([], {}), '()\n', (1694, 1696), False, 'import time\n'), ((2502, 2525), 'generate_data.create_data_directory', 'create_data_directory', ([], {}), '()\n', (2523, 2525), False, 'from generate_data import create_data_directory\n'), ((2663, 2716), 'generate_data.create_documents', 'create_documents', (['sites_list', 'parent_children_url_map'], {}), '(sites_list, parent_children_url_map)\n', (2679, 2716), False, 'from generate_data import create_documents\n'), ((3082, 3093), 'time.time', 'time.time', ([], {}), '()\n', (3091, 3093), False, 'import time\n')]
|
from random import randrange
from pygame import *
import project10.config
class SquishSprite(pygame.sprite.Sprite):
def __init__(self, image):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.__loader__(image).convert()
self.rect = self.image.get_rect()
screen = pygame.display.get_surface()
shrink = -project10.config.margin * 2
self.area = screen.get_rect().inflate(shrink, shrink)
class Weight(SquishSprite):
def __init__(self, speed):
SquishSprite.__init__(self, project10.config.Weight_image)
self.speed = speed
self.reset()
self.landed = False
def reset(self):
x = randrange(self.area.left, self.area.right)
self.rect.midbottom = x, 0
def update(self):
self.rect.top += self.speed
self.landed = self.rect.top >= self.area.bottom
class Banana(SquishSprite):
def __init__(self):
SquishSprite.__init__(self, project10.config.Banana_image)
self.rect.bottom = self.area.bottom
self.pad_top = project10.config.Banana_pad_top
self.pad_side = project10.config.Banana_pad_side
def update(self):
self.rect.centerx = pygame.mouse.get_pos()[0]
self.rect = self.rect.clamp(self.area)
def touches(self, other):
bounds = self.rect.inflate(-self.pad_side, -self.pad_top)
bounds.bottom = self.rect.bottom
return bounds.colliderect(other.rect)
|
[
"random.randrange"
] |
[((689, 731), 'random.randrange', 'randrange', (['self.area.left', 'self.area.right'], {}), '(self.area.left, self.area.right)\n', (698, 731), False, 'from random import randrange\n')]
|
import responses
import pytest
from binance.spot import Spot as Client
from tests.util import mock_http_response
from tests.util import random_str
from binance.lib.utils import encoded_string
from binance.error import ParameterRequiredError
mock_item = {"key_1": "value_1", "key_2": "value_2"}
key = random_str()
secret = random_str()
email = "<EMAIL>"
subAccountApiKey = random_str()
complete_params = {"email": email, "subAccountApiKey": subAccountApiKey}
parameterized_test_params = [
({"email": None, "subAccountApiKey": None}),
({"email": "", "subAccountApiKey": subAccountApiKey}),
({"email": email, "subAccountApiKey": ""}),
]
client = Client(key, secret)
@pytest.mark.parametrize("params", parameterized_test_params)
def test_sub_account_api_get_ip_restriction_without_missing_param(params):
"""Tests the API endpoint to get IP Restriction for a sub-account API key without subAccountApiKey"""
client.sub_account_api_get_ip_restriction.when.called_with(**params).should.throw(
ParameterRequiredError
)
@mock_http_response(
responses.GET,
"/sapi/v1/sub-account/subAccountApi/ipRestriction\\?"
+ encoded_string(complete_params),
mock_item,
200,
)
def test_sub_account_api_get_ip_restriction():
"""Tests the API endpoint to get IP Restriction for a sub-account API key"""
client.sub_account_api_get_ip_restriction(**complete_params).should.equal(mock_item)
|
[
"pytest.mark.parametrize",
"binance.spot.Spot",
"binance.lib.utils.encoded_string",
"tests.util.random_str"
] |
[((303, 315), 'tests.util.random_str', 'random_str', ([], {}), '()\n', (313, 315), False, 'from tests.util import random_str\n'), ((325, 337), 'tests.util.random_str', 'random_str', ([], {}), '()\n', (335, 337), False, 'from tests.util import random_str\n'), ((376, 388), 'tests.util.random_str', 'random_str', ([], {}), '()\n', (386, 388), False, 'from tests.util import random_str\n'), ((662, 681), 'binance.spot.Spot', 'Client', (['key', 'secret'], {}), '(key, secret)\n', (668, 681), True, 'from binance.spot import Spot as Client\n'), ((685, 745), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""params"""', 'parameterized_test_params'], {}), "('params', parameterized_test_params)\n", (708, 745), False, 'import pytest\n'), ((1158, 1189), 'binance.lib.utils.encoded_string', 'encoded_string', (['complete_params'], {}), '(complete_params)\n', (1172, 1189), False, 'from binance.lib.utils import encoded_string\n')]
|
#
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from nfv_vim import database
from nfv_vim.tables._table import Table
_image_table = None
class ImageTable(Table):
"""
Image Table
"""
def __init__(self):
super(ImageTable, self).__init__()
def _persist_value(self, value):
database.database_image_add(value)
def _unpersist_value(self, key):
database.database_image_delete(key)
def tables_get_image_table():
"""
Get the image table
"""
return _image_table
def image_table_initialize():
"""
Initialize the image table
"""
global _image_table
_image_table = ImageTable()
_image_table.persist = False
images = database.database_image_get_list()
for image in images:
_image_table[image.uuid] = image
_image_table.persist = True
def image_table_finalize():
"""
Finalize the image table
"""
global _image_table
del _image_table
|
[
"nfv_vim.database.database_image_delete",
"nfv_vim.database.database_image_add",
"nfv_vim.database.database_image_get_list"
] |
[((754, 788), 'nfv_vim.database.database_image_get_list', 'database.database_image_get_list', ([], {}), '()\n', (786, 788), False, 'from nfv_vim import database\n'), ((358, 392), 'nfv_vim.database.database_image_add', 'database.database_image_add', (['value'], {}), '(value)\n', (385, 392), False, 'from nfv_vim import database\n'), ((439, 474), 'nfv_vim.database.database_image_delete', 'database.database_image_delete', (['key'], {}), '(key)\n', (469, 474), False, 'from nfv_vim import database\n')]
|
#!/usr/bin/env python
#
# mcmandelbrot
#
# An example package for AsynQueue:
# Asynchronous task queueing based on the Twisted framework, with task
# prioritization and a powerful worker interface.
#
# Copyright (C) 2015 by <NAME>,
# http://edsuom.com/AsynQueue
#
# See edsuom.com for API documentation as well as information about
# Ed's background and other projects, software and otherwise.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS
# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language
# governing permissions and limitations under the License.
"""
A Twisted web C{Resource} that serves clickable, zoomable
Mandelbrot Set images.
"""
import sys
from twisted.application import internet, service
from twisted.internet import defer
from twisted.web import server, resource, static, util, http
from mcmandelbrot import vroot, image
MY_PORT = 8080
VERBOSE = True
HTML_FILE = "mcm.html"
HOWTO = """
Click anywhere in the image to zoom in 5x at that location. Try
exploring the edges of the black “lakes.”
"""
ABOUT = """
Images genera­ted by the <i>mcmandelbrot</i> demo package
bun­dled with my <a
href="http://edsuom.com/AsynQueue">AsynQueue</a> asyn­chronous
processing pack­age, which is freely available per the Apache
License. A link back to <a
href="http://mcm.edsuom.com"><b>mcm.edsuom.com</b></a> would be
apprec­iated.
"""
BYLINE = " —<NAME>"
MORE_INFO = """
CPU and bandwidth resources for this site were con­tributed by <a
href="http://tellectual.com">Tellectual Press</a>, publisher of my
book <em>Evolving out of Eden</em>.
"""
class ResourceBag(object):
blankImage = ("blank.jpg", 'image/jpeg')
children = {}
def __init__(self, descriptions):
self.children[''] = RootResource(self.blankImage[0])
with vroot.openPackageFile(self.blankImage[0]) as fh:
imageData = fh.read()
self.children[self.blankImage[0]] = static.Data(
imageData, self.blankImage[1])
self.children['image.png'] = ImageResource(descriptions)
def shutdown(self):
return self.ir.shutdown()
def putChildren(self, root):
for path, res in self.children.iteritems():
root.putChild(path, res)
class RootResource(resource.Resource):
defaultParams = {
'cr': "-0.630",
'ci': "+0.000",
'crpm': "1.40" }
defaultTitle = \
"Interactive Mandelbrot Set: Driven by Twisted and AsynQueue"
formItems = (
("Real:", "cr" ),
("Imag:", "ci" ),
("+/-", "crpm" ))
inputSize = 10
pxHD = 2048
def __init__(self, blankImage):
self.blankImage = blankImage
self.vr = self.vRoot()
resource.Resource.__init__(self)
def render_GET(self, request):
request.setHeader("content-type", 'text/html')
kw = {'permalink': request.uri}
kw.update(self.defaultParams)
if request.args:
for key, values in request.args.iteritems():
kw[key] = http.unquote(values[0])
kw['onload'] = None
kw['img'] = self.imageURL(kw)
kw['hd'] = self.imageURL(kw, N=self.pxHD)
else:
kw['onload'] = "updateImage()"
kw['img'] = self.blankImage
kw['hd'] = self.imageURL(self.defaultParams, N=self.pxHD)
return self.vr(**kw)
def imageURL(self, params, **kw):
"""
Returns a URL for obtaining a Mandelbrot Set image with the
parameters in the supplied dict I{params}.
"""
def addPart():
parts.append("{}={}".format(name, value))
parts = []
for name, value in params.iteritems():
if name in self.defaultParams:
addPart()
for name, value in kw.iteritems():
addPart()
return "/image.png?{}".format('&'.join(parts))
def vRoot(self):
"""
Populates my vroot I{vr} with an etree that renders into the HTML
page.
"""
def heading():
with v.context():
v.nc('div', 'heading')
v.nc('p', 'bigger')
v.textX("Interactive Mandelbrot Set")
v.nc('div', 'subheading')
v.nc('p', 'smaller')
v.text("Powered by ")
v.nc('a')
v.text("Twisted")
v.set('href', "http://twistedmatrix.com")
v.tailX(" and ")
v.ns('a')
v.text("AsynQueue")
v.set('href', "http://edsuom.com/AsynQueue")
v.tail(".")
vr = vroot.VRoot(self.defaultTitle)
with vr as v:
v.nc('body')
v.addToMap('onload', 'onload')
v.nc('div', 'container')
v.set('id', 'container')
v.nc('div', 'first_part')
#--------------------------------------------------------
with v.context():
heading()
v.ngc('div', 'clear').text = " "
with v.context():
v.nc('div')
with v.context():
v.nc('form')
v.nc('div', 'form')
v.set('name', "position")
v.set('action', "javascript:updateImage()")
for label, name in v.nci(
self.formItems, 'div', 'form_item'):
v.nc('span', 'form_item')
v.text(label)
v.ns('input', 'position')
v.addToMap(name, 'value')
v.set('type', "text")
v.set('size', str(self.inputSize))
v.set('id', name)
v.nc('div', 'form_item')
e = v.ngc('input')
e.set('type', "submit")
e.set('value', "Reload")
v.ns('div', 'form_item')
e = v.ngc('button')
e.set('type', "button")
e.set('onclick', "zoomOut()")
e.text = "Zoom Out"
with v.context():
v.nc('div', 'about')
v.textX(ABOUT)
v.nc('span', 'byline')
v.textX(BYLINE)
v.nc('div', 'about large_only')
v.textX(MORE_INFO)
v.ns('div', 'second_part')
#--------------------------------------------------------
with v.context():
v.nc('div', 'image')
v.set('id', 'image')
with v.context():
v.nc('img', 'mandelbrot')
v.addToMap('img', 'src')
v.set('id', 'mandelbrot')
v.set('onclick', "zoomIn(event)")
v.set('onmousemove', "hover(event)")
v.nc('div', 'footer')
v.nc('div', 'left')
v.set('id', 'hover')
v.textX(HOWTO)
v.ns('div', 'right')
v.nc('a', 'bold')
v.text("2048px")
v.tailX(" | ")
v.set('id', 'hd')
v.addToMap('hd', 'href')
v.ns('a', 'bold')
v.text("Permalink")
v.set('id', 'permalink')
v.addToMap('permalink', 'href')
v.ns('div', 'about small_only')
v.set('id', 'more_info')
v.textX(MORE_INFO)
return vr
class ImageResource(resource.Resource):
isLeaf = True
def __init__(self, descriptions):
self.imager = image.Imager(descriptions, verbose=VERBOSE)
resource.Resource.__init__(self)
def shutdown(self):
return self.imager.shutdown()
def render_GET(self, request):
request.setHeader("content-disposition", "image.png")
request.setHeader("content-type", 'image/png')
self.imager.renderImage(request)
return server.NOT_DONE_YET
class MandelbrotSite(server.Site):
def __init__(self):
self.rb = ResourceBag([None])
siteResource = resource.Resource()
self.rb.putChildren(siteResource)
server.Site.__init__(self, siteResource)
def stopFactory(self):
super(MandelbrotSite, self).stopFactory()
return self.rb.shutdown()
if '/twistd' in sys.argv[0]:
site = MandelbrotSite()
application = service.Application("Interactive Mandelbrot Set HTTP Server")
internet.TCPServer(MY_PORT, site).setServiceParent(application)
|
[
"twisted.application.service.Application",
"twisted.application.internet.TCPServer",
"mcmandelbrot.vroot.VRoot",
"twisted.web.server.Site.__init__",
"mcmandelbrot.vroot.openPackageFile",
"twisted.web.static.Data",
"twisted.web.http.unquote",
"twisted.web.resource.Resource",
"mcmandelbrot.image.Imager",
"twisted.web.resource.Resource.__init__"
] |
[((8904, 8965), 'twisted.application.service.Application', 'service.Application', (['"""Interactive Mandelbrot Set HTTP Server"""'], {}), "('Interactive Mandelbrot Set HTTP Server')\n", (8923, 8965), False, 'from twisted.application import internet, service\n'), ((2325, 2367), 'twisted.web.static.Data', 'static.Data', (['imageData', 'self.blankImage[1]'], {}), '(imageData, self.blankImage[1])\n', (2336, 2367), False, 'from twisted.web import server, resource, static, util, http\n'), ((3113, 3145), 'twisted.web.resource.Resource.__init__', 'resource.Resource.__init__', (['self'], {}), '(self)\n', (3139, 3145), False, 'from twisted.web import server, resource, static, util, http\n'), ((5029, 5059), 'mcmandelbrot.vroot.VRoot', 'vroot.VRoot', (['self.defaultTitle'], {}), '(self.defaultTitle)\n', (5040, 5059), False, 'from mcmandelbrot import vroot, image\n'), ((8093, 8136), 'mcmandelbrot.image.Imager', 'image.Imager', (['descriptions'], {'verbose': 'VERBOSE'}), '(descriptions, verbose=VERBOSE)\n', (8105, 8136), False, 'from mcmandelbrot import vroot, image\n'), ((8145, 8177), 'twisted.web.resource.Resource.__init__', 'resource.Resource.__init__', (['self'], {}), '(self)\n', (8171, 8177), False, 'from twisted.web import server, resource, static, util, http\n'), ((8600, 8619), 'twisted.web.resource.Resource', 'resource.Resource', ([], {}), '()\n', (8617, 8619), False, 'from twisted.web import server, resource, static, util, http\n'), ((8670, 8710), 'twisted.web.server.Site.__init__', 'server.Site.__init__', (['self', 'siteResource'], {}), '(self, siteResource)\n', (8690, 8710), False, 'from twisted.web import server, resource, static, util, http\n'), ((2198, 2239), 'mcmandelbrot.vroot.openPackageFile', 'vroot.openPackageFile', (['self.blankImage[0]'], {}), '(self.blankImage[0])\n', (2219, 2239), False, 'from mcmandelbrot import vroot, image\n'), ((8970, 9003), 'twisted.application.internet.TCPServer', 'internet.TCPServer', (['MY_PORT', 'site'], {}), '(MY_PORT, site)\n', (8988, 9003), False, 'from twisted.application import internet, service\n'), ((3431, 3454), 'twisted.web.http.unquote', 'http.unquote', (['values[0]'], {}), '(values[0])\n', (3443, 3454), False, 'from twisted.web import server, resource, static, util, http\n')]
|
from .generator_traj import generate_traj, EmptyError
from .motion_type import random_rot
from ..features.prePostTools import traj_to_dist
import numpy as np
def generate_n_steps(N, nstep, ndim, sub=False, noise_level=0.25):
add = 0
if ndim == 3:
add = 1
size = nstep
X_train = np.zeros((N, size, (5 + add)))
if sub:
Y_trains = np.zeros((N, size, 10))
Y_train_cat = np.zeros((N, 27))
else:
Y_trains = np.zeros((N, size, 7))
Y_train_cat = np.zeros((N, 12))
Y_train_traj = []
# 12
for i in range(N):
# for i in range(1000):
# if i % 1000 == 0:
# print i
sigma = max(np.random.normal(0.5, 1), 0.05)
step = max(np.random.normal(1, 1), 0.2)
tryagain = True
while tryagain:
try:
clean = 4
if size >= 50:
clean = 8
clean = False
"""
ModelN,Model_num,s,sc,real_traj,norm,Z = generate_traj(size,sub=True,
clean=clean,diff_sigma=2.0,
delta_sigma_directed=1.,ndim=ndim,
anisentropy=0.1,deltav=0.2,rho_fixed=False)
"""
clean = 4
ModelN, Model_num, s, sc, real_traj, norm, Z = generate_traj(size, sub=sub,
clean=clean, diff_sigma=2.0,
delta_sigma_directed=6., ndim=ndim,
anisentropy=0.1, deltav=.4, rho_fixed=False,
random_rotation=False)
mu = 2
Ra0 = [0, 1.]
alpharot = 2 * 3.14 * np.random.random()
dt = real_traj[1:] - real_traj[:-1]
std = np.mean(np.sum(dt**2, axis=1) / 3)**0.5
noise_l = noise_level * np.random.rand()
real_traj += np.random.normal(0, noise_l * std, real_traj.shape)
real_traj = random_rot(real_traj, alpharot, ndim=ndim)
# print real_traj.shape
alligned_traj, normed, alpha, _ = traj_to_dist(real_traj, ndim=ndim)
simple = True
if not simple:
real_traj1 = np.array([Propertie(real_traj[::, 0]).smooth(2),
Propertie(real_traj[::, 1]).smooth(2)])
alligned_traj1, normed1, alpha1, _ = traj_to_dist(real_traj1.T, ndim=ndim)
real_traj2 = np.array([Propertie(real_traj[::, 0]).smooth(5),
Propertie(real_traj[::, 1]).smooth(5)])
alligned_traj2, normed2, alpha2, _ = traj_to_dist(real_traj2.T, ndim=ndim)
normed = np.concatenate((normed[::, :4], normed1[::, :4], normed2), axis=1)
for zero in Z:
normed[zero, ::] = 0
tryagain = False
except:
tryagain = True
Y_train_traj.append(real_traj)
X_train[i] = normed
Y_trains[i][range(size), np.array(sc, dtype=np.int)] = 1
Y_train_cat[i, Model_num] = 1
return X_train, Y_trains, Y_train_cat, Y_train_traj
# print np.sum(np.isnan(X_train))
|
[
"numpy.sum",
"numpy.zeros",
"numpy.random.random",
"numpy.array",
"numpy.random.normal",
"numpy.random.rand",
"numpy.concatenate"
] |
[((307, 335), 'numpy.zeros', 'np.zeros', (['(N, size, 5 + add)'], {}), '((N, size, 5 + add))\n', (315, 335), True, 'import numpy as np\n'), ((370, 393), 'numpy.zeros', 'np.zeros', (['(N, size, 10)'], {}), '((N, size, 10))\n', (378, 393), True, 'import numpy as np\n'), ((416, 433), 'numpy.zeros', 'np.zeros', (['(N, 27)'], {}), '((N, 27))\n', (424, 433), True, 'import numpy as np\n'), ((463, 485), 'numpy.zeros', 'np.zeros', (['(N, size, 7)'], {}), '((N, size, 7))\n', (471, 485), True, 'import numpy as np\n'), ((508, 525), 'numpy.zeros', 'np.zeros', (['(N, 12)'], {}), '((N, 12))\n', (516, 525), True, 'import numpy as np\n'), ((683, 707), 'numpy.random.normal', 'np.random.normal', (['(0.5)', '(1)'], {}), '(0.5, 1)\n', (699, 707), True, 'import numpy as np\n'), ((734, 756), 'numpy.random.normal', 'np.random.normal', (['(1)', '(1)'], {}), '(1, 1)\n', (750, 756), True, 'import numpy as np\n'), ((2264, 2315), 'numpy.random.normal', 'np.random.normal', (['(0)', '(noise_l * std)', 'real_traj.shape'], {}), '(0, noise_l * std, real_traj.shape)\n', (2280, 2315), True, 'import numpy as np\n'), ((3454, 3480), 'numpy.array', 'np.array', (['sc'], {'dtype': 'np.int'}), '(sc, dtype=np.int)\n', (3462, 3480), True, 'import numpy as np\n'), ((2043, 2061), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (2059, 2061), True, 'import numpy as np\n'), ((2218, 2234), 'numpy.random.rand', 'np.random.rand', ([], {}), '()\n', (2232, 2234), True, 'import numpy as np\n'), ((3125, 3189), 'numpy.concatenate', 'np.concatenate', (['(normed[:, :4], normed1[:, :4], normed2)'], {'axis': '(1)'}), '((normed[:, :4], normed1[:, :4], normed2), axis=1)\n', (3139, 3189), True, 'import numpy as np\n'), ((2145, 2168), 'numpy.sum', 'np.sum', (['(dt ** 2)'], {'axis': '(1)'}), '(dt ** 2, axis=1)\n', (2151, 2168), True, 'import numpy as np\n')]
|
import numpy as np, networkx as nx, math
from scipy.sparse.csgraph import dijkstra
from scipy.sparse import csr_matrix, identity
def make_Zs(Y,ind1,ind0,pscores1,pscores0,subsample=False):
"""Generates vector of Z_i's, used to construct HT estimator.
Parameters
----------
Y : numpy float array
n-dimensional outcome vector.
ind1 : numpy boolean array
n-dimensional vector of indicators for first exposure mapping.
ind0 : numpy boolean array
n-dimensional vector of indicators for second exposure mapping.
pscores1 : numpy float array
n-dimensional vector of probabilities of first exposure mapping for each unit.
pscores0 : numpy float array
n-dimensional vector of probabilities of second exposure mapping for each unit
subsample : numpy boolean array
When set to an object that's not a numpy array, the function will define subsample to be an n-dimensional array of ones, meaning it is assumed that all n units are included in the population. Otherwise, it must be an boolean array of the same dimension as Z where True components indicate population inclusion.
Returns
-------
n-dimensional numpy float array, where entries corresponding to the True entries of subsample are equal to the desired Z's, and entries corresponding to False subsample entries are set to -1000.
"""
if type(subsample) != np.ndarray: subsample = np.ones(Y.size, dtype=bool)
i1 = ind1[subsample]
i0 = ind0[subsample]
ps1 = pscores1[subsample]
ps0 = pscores0[subsample]
weight1 = i1.copy().astype('float')
weight0 = i0.copy().astype('float')
weight1[weight1 == 1] = i1[weight1 == 1] / ps1[weight1 == 1]
weight0[weight0 == 1] = i0[weight0 == 1] / ps0[weight0 == 1]
Z = np.ones(Y.size) * (-1000) # filler entries that won't be used
Z[subsample] = Y[subsample] * (weight1 - weight0)
return Z
def network_SE(Zs, A, subsample=False, K=0, exp_nbhd=True, disp=False, b=-1):
"""Network-dependence robust standard errors.
Returns our standard errors for the sample mean of each array in Zs.
Parameters
----------
Zs : a list of numpy float arrays
Each array is n-dimensional.
A : NetworkX undirected graph
Graph on n nodes. NOTE: Assumes nodes are labeled 0 through n-1, so that the data for node i is given by the ith component of each array in Zs.
subsample : numpy boolean array
When set to an object that's not a numpy array, the function will define subsample to be an n-dimensional array of ones, meaning it is assumed that all n units are included in the population. Otherwise, it must be an boolean array of the same dimension as each array in Zs where True components indicate population inclusion.
K : integer
K used to define the K-neighborhood exposure mapping.
exp_nbhd : boolean
Boolean for whether neighborhood growth is exponential (True) or polynomial (False). Used to determine recommended bandwidth.
b : float
User-specified bandwidth. If a negative value is specified, function will compute our recommended bandwidth choice.
disp : boolean
Boolean for whether to also return more than just the SE (see below).
Returns
-------
SE : float
List of network-dependence robust standard error, one for each array of Zs.
APL : float
Average path length of A.
b : int
Bandwidth.
PSD_failure : list of booleans
True if substitute PSD variance estimator needed to be used for that component of Zs.
"""
if type(Zs) == np.ndarray:
is_list = False
Z_list = [Zs] # handle case where Zs is just an array
else:
is_list = True
Z_list = Zs
if type(subsample) != np.ndarray:
subsample = np.ones(Z_list[0].size, dtype=bool) # handle case where subsample is False
n = subsample.sum()
SEs = []
PSD_failures = []
if b == 0:
for Z in Z_list:
SEs.append(Z[subsample].std() / math.sqrt(subsample.sum())) # iid SE
APL = 0
PSD_failures.append(False)
else:
# compute path distances
G = nx.to_scipy_sparse_matrix(A, nodelist=range(A.number_of_nodes()), format='csr')
dist_matrix = dijkstra(csgraph=G, directed=False, unweighted=True)
Gcc = [A.subgraph(c).copy() for c in sorted(nx.connected_components(A), key=len, reverse=True)]
giant = [i for i in Gcc[0]] # set of nodes in giant component
APL = dist_matrix[np.ix_(giant,giant)].sum() / len(giant) / (len(giant)-1) # average path length
# default bandwidth
if b < 0:
b = round(APL/2) if exp_nbhd else round(APL**(1/3)) # rec bandwidth
b = max(2*K,b)
weights = dist_matrix <= b # weight matrix
for Z in Z_list:
Zc = Z[subsample] - Z[subsample].mean() # demeaned data
# default variance estimator (not guaranteed PSD)
var_est = Zc.dot(weights[np.ix_(subsample,subsample)].dot(Zc[:,None])) / n
# PSD variance estimator from the older draft (Leung, 2019)
if var_est <= 0:
PSD_failures.append(True)
if b < 0: b = round(APL/4) if exp_nbhd else round(APL**(1/3)) # rec bandwidth
b = max(K,b)
b_neighbors = dist_matrix <= b
row_sums = np.squeeze(b_neighbors.dot(np.ones(Z.size)[:,None]))
b_norm = b_neighbors / np.sqrt(row_sums)[:,None]
weights = b_norm.dot(b_norm.T)
var_est = Zc.dot(weights[np.ix_(subsample,subsample)].dot(Zc[:,None])) / n
else:
PSD_failures.append(False)
SEs.append(math.sqrt(var_est / n))
if disp:
if is_list:
return SEs, APL, b, PSD_failures
else:
return SEs[0], APL, b, PSD_failures
else:
if is_list:
return SEs
else:
return SEs[0]
|
[
"math.sqrt",
"numpy.ix_",
"numpy.ones",
"networkx.connected_components",
"scipy.sparse.csgraph.dijkstra",
"numpy.sqrt"
] |
[((1433, 1460), 'numpy.ones', 'np.ones', (['Y.size'], {'dtype': 'bool'}), '(Y.size, dtype=bool)\n', (1440, 1460), True, 'import numpy as np, networkx as nx, math\n'), ((1789, 1804), 'numpy.ones', 'np.ones', (['Y.size'], {}), '(Y.size)\n', (1796, 1804), True, 'import numpy as np, networkx as nx, math\n'), ((3827, 3862), 'numpy.ones', 'np.ones', (['Z_list[0].size'], {'dtype': 'bool'}), '(Z_list[0].size, dtype=bool)\n', (3834, 3862), True, 'import numpy as np, networkx as nx, math\n'), ((4299, 4351), 'scipy.sparse.csgraph.dijkstra', 'dijkstra', ([], {'csgraph': 'G', 'directed': '(False)', 'unweighted': '(True)'}), '(csgraph=G, directed=False, unweighted=True)\n', (4307, 4351), False, 'from scipy.sparse.csgraph import dijkstra\n'), ((5764, 5786), 'math.sqrt', 'math.sqrt', (['(var_est / n)'], {}), '(var_est / n)\n', (5773, 5786), False, 'import numpy as np, networkx as nx, math\n'), ((4404, 4430), 'networkx.connected_components', 'nx.connected_components', (['A'], {}), '(A)\n', (4427, 4430), True, 'import numpy as np, networkx as nx, math\n'), ((5515, 5532), 'numpy.sqrt', 'np.sqrt', (['row_sums'], {}), '(row_sums)\n', (5522, 5532), True, 'import numpy as np, networkx as nx, math\n'), ((4552, 4572), 'numpy.ix_', 'np.ix_', (['giant', 'giant'], {}), '(giant, giant)\n', (4558, 4572), True, 'import numpy as np, networkx as nx, math\n'), ((5450, 5465), 'numpy.ones', 'np.ones', (['Z.size'], {}), '(Z.size)\n', (5457, 5465), True, 'import numpy as np, networkx as nx, math\n'), ((5031, 5059), 'numpy.ix_', 'np.ix_', (['subsample', 'subsample'], {}), '(subsample, subsample)\n', (5037, 5059), True, 'import numpy as np, networkx as nx, math\n'), ((5629, 5657), 'numpy.ix_', 'np.ix_', (['subsample', 'subsample'], {}), '(subsample, subsample)\n', (5635, 5657), True, 'import numpy as np, networkx as nx, math\n')]
|
from qtpy.QtWidgets import QLabel
from qthandy import opaque
def test_opaque(qtbot):
widget = QLabel('Test')
qtbot.addWidget(widget)
widget.show()
opaque(widget)
assert widget.graphicsEffect()
|
[
"qthandy.opaque",
"qtpy.QtWidgets.QLabel"
] |
[((101, 115), 'qtpy.QtWidgets.QLabel', 'QLabel', (['"""Test"""'], {}), "('Test')\n", (107, 115), False, 'from qtpy.QtWidgets import QLabel\n'), ((167, 181), 'qthandy.opaque', 'opaque', (['widget'], {}), '(widget)\n', (173, 181), False, 'from qthandy import opaque\n')]
|
import pandas as pd
import seaborn as sns
from matplotlib import pyplot as plt
import math
#getting a list of cytokines names/labels
cyt_list = 'IL1B,IL2,IL4,IL5,IL6,IL7,CXCL8,IL10,IL12B,IL13,IL17A,CSF3,CSF2,IFNG,CCL2,CCL4,TNF,IL1RN,IL9,IL15,CCL11,FGF2,CXCL10,PDGFB,CCL5,VEGFA,CCL3'.split(',')
#getting dataframe from csv previously exported
cyt_df = pd.read_excel('../database/db.xlsx', sheet_name = 'SM NM', usecols = 'F:AF,CB')
cyt_list.append('class')
cyt_df.columns = cyt_list
cyt_list.pop()
#cleaning df from NaN values
cyt_df.dropna(inplace = True) #done: 450 rows of 490 preserved ('no liquor' out too)
#getting cyt_df for each patients' class:
cyt_ctrl = cyt_df[cyt_df['class'] == 6]
cyt_rr = cyt_df[cyt_df['class'] == 3]
cyt_pp = cyt_df[cyt_df['class'] == 5]
cyt_sp = cyt_df[cyt_df['class'] == 4]
#Getting the distribution for each cytokine and
#superimposing it to the control cytokine distribution
sns.distributions._has_statsmodels = False #needed to avoid kde error coming from sns using statsmodel
#CTRL VS PP
for cyt in cyt_list:
plt.title('{} - PP vs CTRL\nN = {}'.format(cyt, len(cyt_pp)))
sns.distplot(cyt_ctrl[cyt], color = 'grey')
sns.distplot(cyt_pp[cyt], color = 'darkgreen')
plt.legend(['Control', 'PP'])
plt.xlabel('{} levels'.format(cyt))
plt.savefig('./../plots/ctrl_pp/{}.png'.format(cyt), dpi = 300)
print('Saved ctrl_pp/{}'.format(cyt))
plt.clf()
#CTRL VS SP
for cyt in cyt_list:
plt.title('{} - SP vs CTRL\nN = {}'.format(cyt, len(cyt_sp)))
sns.distplot(cyt_ctrl[cyt], color = 'grey')
sns.distplot(cyt_sp[cyt], color = 'darkgreen')
plt.legend(['Control', 'SP'])
plt.xlabel('{} levels'.format(cyt))
plt.savefig('./../plots/ctrl_sp/{}.png'.format(cyt), dpi = 300)
print('Saved ctrl_sp/{}'.format(cyt))
plt.clf()
#CTRL VS RR
for cyt in cyt_list:
plt.title('{} - RR vs CTRL\nN = {}'.format(cyt, len(cyt_rr)))
sns.distplot(cyt_ctrl[cyt], color = 'grey')
sns.distplot(cyt_rr[cyt], color = 'darkgreen')
plt.legend(['Control', 'RR'])
plt.xlabel('{} levels'.format(cyt))
plt.savefig('./../plots/ctrl_rr/{}.png'.format(cyt), dpi = 300)
print('Saved ctrl_rr/{}'.format(cyt))
plt.clf()
#creating dictionary for ctrl mean cytokine levels
ctrl_mean_list = []
for cyt in cyt_list:
mean = cyt_ctrl[cyt].astype(float).mean()
ctrl_mean_list.append(mean)
ctrl_mean_dict = dict(zip(cyt_list, ctrl_mean_list))
#getting a csv with more statistics:
cyt_lev_dfs = [cyt_ctrl, cyt_rr, cyt_pp, cyt_sp]
with open('data/cytokine_statistics/full_stats.tsv', 'w') as f:
f.write('cytokine\tctrl_mean\tctrl_std\tpp_mean\tpp_std\tsp_mean\tsp_std\trr_mean\trr_std\tpp_diff\tsp_diff\trr_diff\nrr_d')
for cyt in cyt_list:
ctrl_mean = ctrl_mean_dict[cyt]
ctrl_std = cyt_ctrl[cyt].astype(float).std()
pp_mean = cyt_pp[cyt].astype(float).mean()
pp_std = cyt_pp[cyt].astype(float).std()
pp_diff = (pp_mean - ctrl_mean)/math.sqrt(pp_std * ctrl_std) #define what to do with this value
sp_mean = cyt_sp[cyt].astype(float).mean()
sp_std = cyt_sp[cyt].astype(float).std()
sp_diff = (sp_mean - ctrl_mean)/math.sqrt(sp_std * ctrl_std)
rr_mean = cyt_rr[cyt].astype(float).mean()
rr_std = cyt_rr[cyt].astype(float).std()
rr_diff = (rr_mean - ctrl_mean)/math.sqrt(rr_std * ctrl_std)
rr_d = (rr_mean - ctrl_mean)
line = '{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n'.format(cyt, ctrl_mean, ctrl_std, pp_mean, pp_std, sp_mean, sp_std, rr_mean, rr_std, pp_diff, sp_diff, rr_diff, rr_d)
f.write(line)
stats_df = pd.read_csv('data/cytokine_statistics/full_stats.tsv', sep='\t')
print(stats_df)
|
[
"math.sqrt",
"matplotlib.pyplot.clf",
"pandas.read_csv",
"matplotlib.pyplot.legend",
"pandas.read_excel",
"seaborn.distplot"
] |
[((352, 427), 'pandas.read_excel', 'pd.read_excel', (['"""../database/db.xlsx"""'], {'sheet_name': '"""SM NM"""', 'usecols': '"""F:AF,CB"""'}), "('../database/db.xlsx', sheet_name='SM NM', usecols='F:AF,CB')\n", (365, 427), True, 'import pandas as pd\n'), ((3467, 3531), 'pandas.read_csv', 'pd.read_csv', (['"""data/cytokine_statistics/full_stats.tsv"""'], {'sep': '"""\t"""'}), "('data/cytokine_statistics/full_stats.tsv', sep='\\t')\n", (3478, 3531), True, 'import pandas as pd\n'), ((1117, 1158), 'seaborn.distplot', 'sns.distplot', (['cyt_ctrl[cyt]'], {'color': '"""grey"""'}), "(cyt_ctrl[cyt], color='grey')\n", (1129, 1158), True, 'import seaborn as sns\n'), ((1162, 1206), 'seaborn.distplot', 'sns.distplot', (['cyt_pp[cyt]'], {'color': '"""darkgreen"""'}), "(cyt_pp[cyt], color='darkgreen')\n", (1174, 1206), True, 'import seaborn as sns\n'), ((1210, 1239), 'matplotlib.pyplot.legend', 'plt.legend', (["['Control', 'PP']"], {}), "(['Control', 'PP'])\n", (1220, 1239), True, 'from matplotlib import pyplot as plt\n'), ((1382, 1391), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (1389, 1391), True, 'from matplotlib import pyplot as plt\n'), ((1490, 1531), 'seaborn.distplot', 'sns.distplot', (['cyt_ctrl[cyt]'], {'color': '"""grey"""'}), "(cyt_ctrl[cyt], color='grey')\n", (1502, 1531), True, 'import seaborn as sns\n'), ((1535, 1579), 'seaborn.distplot', 'sns.distplot', (['cyt_sp[cyt]'], {'color': '"""darkgreen"""'}), "(cyt_sp[cyt], color='darkgreen')\n", (1547, 1579), True, 'import seaborn as sns\n'), ((1583, 1612), 'matplotlib.pyplot.legend', 'plt.legend', (["['Control', 'SP']"], {}), "(['Control', 'SP'])\n", (1593, 1612), True, 'from matplotlib import pyplot as plt\n'), ((1755, 1764), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (1762, 1764), True, 'from matplotlib import pyplot as plt\n'), ((1863, 1904), 'seaborn.distplot', 'sns.distplot', (['cyt_ctrl[cyt]'], {'color': '"""grey"""'}), "(cyt_ctrl[cyt], color='grey')\n", (1875, 1904), True, 'import seaborn as sns\n'), ((1908, 1952), 'seaborn.distplot', 'sns.distplot', (['cyt_rr[cyt]'], {'color': '"""darkgreen"""'}), "(cyt_rr[cyt], color='darkgreen')\n", (1920, 1952), True, 'import seaborn as sns\n'), ((1956, 1985), 'matplotlib.pyplot.legend', 'plt.legend', (["['Control', 'RR']"], {}), "(['Control', 'RR'])\n", (1966, 1985), True, 'from matplotlib import pyplot as plt\n'), ((2128, 2137), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (2135, 2137), True, 'from matplotlib import pyplot as plt\n'), ((2861, 2889), 'math.sqrt', 'math.sqrt', (['(pp_std * ctrl_std)'], {}), '(pp_std * ctrl_std)\n', (2870, 2889), False, 'import math\n'), ((3047, 3075), 'math.sqrt', 'math.sqrt', (['(sp_std * ctrl_std)'], {}), '(sp_std * ctrl_std)\n', (3056, 3075), False, 'import math\n'), ((3198, 3226), 'math.sqrt', 'math.sqrt', (['(rr_std * ctrl_std)'], {}), '(rr_std * ctrl_std)\n', (3207, 3226), False, 'import math\n')]
|
# THE FOLLOWING CODE CAN BE USED IN YOUR SAGEMAKER NOTEBOOK TO TEST AN UPLOADED IMAGE TO YOUR S3 BUCKET AGAINST YOUR MODEL
import os
import urllib.request
import boto3
from IPython.display import Image
import cv2
import json
import numpy as np
# input the S3 bucket you are using for this project and the file path for a folder and file that contains your uploaded test image
test_image_bucket = 'deeplens-sagemaker-socksortingeast'
test_image_name = 'testimages/image0.jpeg'
tmp_file_name = 'tmp-test-image-jpg'
resized_file_name = 'resized-test-image.jpg'
s3 = boto3.client('s3')
with open(tmp_file_name, 'wb') as f:
s3.download_fileobj(test_image_bucket, test_image_name, f)
# width
W = 500
oriimg = cv2.imread(tmp_file_name)
height, width, depth = oriimg.shape
# scale the image
imgScale = W/width
newX,newY = oriimg.shape[1].imgScale, oriimg.shape[0]*imgScale
newimg = cv2.resize(oriimg, (int(newX),int(newY)))
cv2.imwrite(resized_file_name, newimg)
with open(resized_file_name, 'rb') as f:
payload = f.read()
payload = bytearray(payload)
result = json.loads(ic_classifier.predict(payload, initial_args={'ContentType': 'application/x-image'}))
# find the index of the class that matches the test image with the highest probability
index = np.argmax(result)
# input your own output categories
object_categories = ['BlueStripes', 'DarkGray', 'IronMan']
print("Result: label - " + object_categories[index] + ", probability - " + str(result[index]))
print()
print(result)
print(ic._current_job_name)
Image(resized_file_name)
|
[
"boto3.client",
"numpy.argmax",
"cv2.imwrite",
"cv2.imread",
"IPython.display.Image"
] |
[((567, 585), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (579, 585), False, 'import boto3\n'), ((712, 737), 'cv2.imread', 'cv2.imread', (['tmp_file_name'], {}), '(tmp_file_name)\n', (722, 737), False, 'import cv2\n'), ((925, 963), 'cv2.imwrite', 'cv2.imwrite', (['resized_file_name', 'newimg'], {}), '(resized_file_name, newimg)\n', (936, 963), False, 'import cv2\n'), ((1263, 1280), 'numpy.argmax', 'np.argmax', (['result'], {}), '(result)\n', (1272, 1280), True, 'import numpy as np\n'), ((1520, 1544), 'IPython.display.Image', 'Image', (['resized_file_name'], {}), '(resized_file_name)\n', (1525, 1544), False, 'from IPython.display import Image\n')]
|
#!/usr/bin/env python3
# date: 2020.05.29
# It use normal loop to animate point and checkMouse to close program on click
from graphics import * # PEP8: `import *` is not preferred
import random
import time
# --- main ---
win = GraphWin("My Window",500,500)
win.setBackground(color_rgb(0,0,0))
pt = Point(250, 250)
pt.setOutline(color_rgb(255,255,0))
pt.draw(win)
while True:
if win.checkMouse():
break
dx = random.randint(-10, 10)
dy = random.randint(-10, 10)
pt.move(dx, dy)
time.sleep(0.1)
win.close()
|
[
"random.randint",
"time.sleep"
] |
[((430, 453), 'random.randint', 'random.randint', (['(-10)', '(10)'], {}), '(-10, 10)\n', (444, 453), False, 'import random\n'), ((463, 486), 'random.randint', 'random.randint', (['(-10)', '(10)'], {}), '(-10, 10)\n', (477, 486), False, 'import random\n'), ((511, 526), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (521, 526), False, 'import time\n')]
|
import unittest
from gaphas.examples import Box
from gaphor import UML
from gaphor.application import Application
from gaphor.diagram.general.comment import CommentItem
from gaphor.ui.mainwindow import DiagramPage
class DiagramPageTestCase(unittest.TestCase):
def setUp(self):
Application.init(
services=[
"event_manager",
"component_registry",
"element_factory",
"main_window",
"properties",
"namespace",
"diagrams",
"toolbox",
"elementeditor",
"export_menu",
"tools_menu",
]
)
main_window = Application.get_service("main_window")
main_window.open()
self.element_factory = Application.get_service("element_factory")
self.diagram = self.element_factory.create(UML.Diagram)
self.page = DiagramPage(
self.diagram,
Application.get_service("event_manager"),
self.element_factory,
Application.get_service("properties"),
)
self.page.construct()
assert self.page.diagram == self.diagram
assert self.page.view.canvas == self.diagram.canvas
assert len(self.element_factory.lselect()) == 1
def tearDown(self):
self.page.close()
del self.page
self.diagram.unlink()
del self.diagram
Application.shutdown()
assert len(self.element_factory.lselect()) == 0
def test_creation(self):
pass
def test_placement(self):
box = Box()
self.diagram.canvas.add(box)
self.diagram.canvas.update_now()
self.page.view.request_update([box])
self.diagram.create(
CommentItem, subject=self.element_factory.create(UML.Comment)
)
assert len(self.element_factory.lselect()) == 2
|
[
"gaphas.examples.Box",
"gaphor.application.Application.shutdown",
"gaphor.application.Application.get_service",
"gaphor.application.Application.init"
] |
[((293, 497), 'gaphor.application.Application.init', 'Application.init', ([], {'services': "['event_manager', 'component_registry', 'element_factory', 'main_window',\n 'properties', 'namespace', 'diagrams', 'toolbox', 'elementeditor',\n 'export_menu', 'tools_menu']"}), "(services=['event_manager', 'component_registry',\n 'element_factory', 'main_window', 'properties', 'namespace', 'diagrams',\n 'toolbox', 'elementeditor', 'export_menu', 'tools_menu'])\n", (309, 497), False, 'from gaphor.application import Application\n'), ((725, 763), 'gaphor.application.Application.get_service', 'Application.get_service', (['"""main_window"""'], {}), "('main_window')\n", (748, 763), False, 'from gaphor.application import Application\n'), ((822, 864), 'gaphor.application.Application.get_service', 'Application.get_service', (['"""element_factory"""'], {}), "('element_factory')\n", (845, 864), False, 'from gaphor.application import Application\n'), ((1468, 1490), 'gaphor.application.Application.shutdown', 'Application.shutdown', ([], {}), '()\n', (1488, 1490), False, 'from gaphor.application import Application\n'), ((1635, 1640), 'gaphas.examples.Box', 'Box', ([], {}), '()\n', (1638, 1640), False, 'from gaphas.examples import Box\n'), ((1000, 1040), 'gaphor.application.Application.get_service', 'Application.get_service', (['"""event_manager"""'], {}), "('event_manager')\n", (1023, 1040), False, 'from gaphor.application import Application\n'), ((1088, 1125), 'gaphor.application.Application.get_service', 'Application.get_service', (['"""properties"""'], {}), "('properties')\n", (1111, 1125), False, 'from gaphor.application import Application\n')]
|
import pandas as pd
files = [
'students_wt_15.csv',
'students_st_16.csv',
'students_wt_16.csv',
'students_st_17.csv',
]
for filename in files:
path = f'input/{filename}'
students = pd.read_csv(path, index_col=0)
print('From:', students.columns)
students = students[['hash', 'Sex', 'Nationality', 'Discipline']]
print('To:', students.columns, '\n')
students.to_csv(path)
|
[
"pandas.read_csv"
] |
[((207, 237), 'pandas.read_csv', 'pd.read_csv', (['path'], {'index_col': '(0)'}), '(path, index_col=0)\n', (218, 237), True, 'import pandas as pd\n')]
|
import urllib.request as url
from paderbox.io.cache_dir import get_cache_dir
def fetch_file_from_url(fpath, file=None):
"""
Checks if local cache directory possesses an example named <file>.
If not found, loads data from urlpath and stores it under <fpath>
Args:
fpath: url to the example repository
file: name of the testfile
Returns: Path to file
"""
path = get_cache_dir()
if file is None:
# remove difficult letters
file = fpath.replace(':', '_').replace('/', '_')
if not (path / file).exists():
datapath = url.urlopen(fpath)
data = datapath.read()
with open(path / file, "wb") as f:
f.write(data)
return path / file
def get_file_path(file_name):
"""
Looks up path to a test audio file and returns to the local file.
Args:
file: audio file needed for the test
Returns: Path to audio test file
"""
_pesq = "https://github.com/ludlows/python-pesq/raw/master/audio/"
_pb_bss = "https://github.com/fgnt/pb_test_data/raw/master/bss_data/" \
"low_reverberation/"
url_ = {
'sample.wav': _pb_bss + "speech_source_0.wav",
'observation.wav': _pb_bss+"observation.wav", # multi channel
'speech_source_0.wav': _pb_bss+"speech_source_0.wav",
'speech_source_1.wav': _pb_bss+"speech_source_1.wav",
'speech_image_0.wav': _pb_bss+"speech_image_0.wav", # multi channel
'speech_image_1.wav': _pb_bss+"speech_image_1.wav", # multi channel
'noise_image.wav': _pb_bss+"noise_image.wav", # multi channel
'speech.wav': _pesq + "speech.wav",
"speech_bab_0dB.wav": _pesq + "speech_bab_0dB.wav",
# pylint: disable=line-too-long
# Found on https://www.isip.piconepress.com/projects/speech/software/tutorials/production/fundamentals/v1.0/section_02/s02_01_p04.html
'speech.sph': 'https://www.isip.piconepress.com/projects/speech/software/tutorials/production/fundamentals/v1.0/section_02/data/speech.sph',
'123_1pcbe_shn.sph': 'https://github.com/robd003/sph2pipe/raw/master/test/123_1pcbe_shn.sph',
'123_1pcle_shn.sph': 'https://github.com/robd003/sph2pipe/raw/master/test/123_1pcle_shn.sph',
'123_1ulaw_shn.sph': 'https://github.com/robd003/sph2pipe/raw/master/test/123_1ulaw_shn.sph',
'123_2alaw.sph': 'https://github.com/robd003/sph2pipe/raw/master/test/123_2alaw.sph',
}[file_name]
return fetch_file_from_url(url_, file_name)
|
[
"paderbox.io.cache_dir.get_cache_dir",
"urllib.request.urlopen"
] |
[((409, 424), 'paderbox.io.cache_dir.get_cache_dir', 'get_cache_dir', ([], {}), '()\n', (422, 424), False, 'from paderbox.io.cache_dir import get_cache_dir\n'), ((594, 612), 'urllib.request.urlopen', 'url.urlopen', (['fpath'], {}), '(fpath)\n', (605, 612), True, 'import urllib.request as url\n')]
|
from django.test import Client, TestCase
from django.contrib.auth import get_user_model
from django.urls import reverse, resolve
from .views import ProfileUpdateView
from .forms import CustomUserCreationForm
# Create your tests here.
class CustomUserTests(TestCase):
def test_create_user(self):
User = get_user_model()
user = User.objects.create_user(
username='test123',
email='<EMAIL>',
password = '<PASSWORD>',
)
self.assertEqual(user.username,'test123')
self.assertEqual(user.email, '<EMAIL>')
self.assertTrue(user.is_active)
self.assertFalse(user.is_staff)
self.assertFalse(user.is_superuser)
def test_create_superuser(self):
User = get_user_model()
user = User.objects.create_superuser(
username='test123',
email='<EMAIL>',
password = '<PASSWORD>',
)
self.assertEqual(user.username,'test123')
self.assertEqual(user.email, '<EMAIL>')
self.assertTrue(user.is_active)
self.assertTrue(user.is_staff)
self.assertTrue(user.is_superuser)
class SignupTests(TestCase):
username = 'newuser'
email = '<EMAIL>'
def setUp(self):
url = reverse('account_signup')
self.response = self.client.get(url)
def test_signup_template(self):
self.assertEqual(self.response.status_code, 200)
self.assertTemplateUsed(self.response, 'account/signup.html')
self.assertContains(self.response, 'Sign Up')
self.assertNotContains(
self.response, 'Hi there! I should not be on the page.')
def test_signup_form(self):
new_user = get_user_model().objects.create_user(
self.username, self.email)
self.assertEqual(get_user_model().objects.all().count(), 1)
self.assertEqual(get_user_model().objects.all()
[0].username, self.username)
self.assertEqual(get_user_model().objects.all()
[0].email, self.email)
class UpdateProfileTest(TestCase):
def setUp(self):
self.user = get_user_model().objects.create_user(
username='testuser',
email='<EMAIL>',
password='<PASSWORD>'
)
self.client.login(username='testuser', password='<PASSWORD>')
self.response = self.client.get(reverse('update_profile'))
def test_update_profile_template(self):
self.assertEqual(self.response.status_code, 200)
self.assertTemplateUsed(self.response, 'account/profile_update.html')
self.assertContains(self.response, 'Update Profile')
self.assertNotContains(
self.response, 'Hi there! I should not be on the page.')
|
[
"django.urls.reverse",
"django.contrib.auth.get_user_model"
] |
[((317, 333), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (331, 333), False, 'from django.contrib.auth import get_user_model\n'), ((763, 779), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (777, 779), False, 'from django.contrib.auth import get_user_model\n'), ((1273, 1298), 'django.urls.reverse', 'reverse', (['"""account_signup"""'], {}), "('account_signup')\n", (1280, 1298), False, 'from django.urls import reverse, resolve\n'), ((2407, 2432), 'django.urls.reverse', 'reverse', (['"""update_profile"""'], {}), "('update_profile')\n", (2414, 2432), False, 'from django.urls import reverse, resolve\n'), ((1715, 1731), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1729, 1731), False, 'from django.contrib.auth import get_user_model\n'), ((2153, 2169), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (2167, 2169), False, 'from django.contrib.auth import get_user_model\n'), ((1817, 1833), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1831, 1833), False, 'from django.contrib.auth import get_user_model\n'), ((1885, 1901), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1899, 1901), False, 'from django.contrib.auth import get_user_model\n'), ((1995, 2011), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (2009, 2011), False, 'from django.contrib.auth import get_user_model\n')]
|
import pickle
from collections import deque
from datetime import datetime
from textwrap import dedent
import pytest
from dateutil.parser import isoparse
from pytz import UTC
from validx import exc
def test_validation_error():
te = exc.InvalidTypeError(expected=int, actual=str)
assert te.context == deque([])
assert te.format_context() == ""
assert te.format_error() == "InvalidTypeError(expected=%r, actual=%r)" % (int, str)
assert repr(te) == "<InvalidTypeError(expected=%r, actual=%r)>" % (int, str)
te.add_context("x")
assert te.context == deque(["x"])
assert te.format_context() == "x"
assert repr(te) == "<x: InvalidTypeError(expected=%r, actual=%r)>" % (int, str)
te.add_context(1)
assert te.context == deque([1, "x"])
assert te.format_context() == "1.x"
assert repr(te) == "<1.x: InvalidTypeError(expected=%r, actual=%r)>" % (int, str)
te.add_context("a.b")
assert te.format_context() == "[a.b].1.x"
assert te.context == deque(["a.b", 1, "x"])
assert repr(te) == "<[a.b].1.x: InvalidTypeError(expected=%r, actual=%r)>" % (
int,
str,
)
assert repr(te) == str(te)
assert list(te) == [te]
assert len(te) == 1
assert te[0] == te
assert te == exc.InvalidTypeError(te.context, expected=int, actual=str)
assert te != exc.ConditionError(te.context, expected=int, actual=str)
assert te != exc.InvalidTypeError(te.context, expected=int, actual=float)
with pytest.raises(IndexError):
te[1]
te.sort()
assert list(te) == [te]
te.sort(reverse=True)
assert list(te) == [te]
assert pickle.loads(pickle.dumps(te)) == te
def test_mapping_key_error():
mke = exc.MissingKeyError("x")
fke = exc.ForbiddenKeyError("y")
assert mke.context == deque(["x"])
assert fke.context == deque(["y"])
assert repr(mke) == "<x: MissingKeyError()>"
assert repr(fke) == "<y: ForbiddenKeyError()>"
assert mke == exc.MissingKeyError(key="x")
assert mke == exc.MissingKeyError(deque(["x"]))
assert pickle.loads(pickle.dumps(mke)) == mke
assert pickle.loads(pickle.dumps(fke)) == fke
def test_schema_error():
mve_1 = exc.MaxValueError(context=deque(["y"]), expected=100, actual=200)
mve_2 = exc.MaxValueError(context=deque(["x"]), expected=100, actual=300)
se = exc.SchemaError(errors=[mve_1, mve_2])
assert se.context == deque([])
assert se == exc.SchemaError([mve_1, mve_2])
assert repr(se) == (
dedent(
"""
<SchemaError(errors=[
<y: MaxValueError(expected=100, actual=200)>,
<x: MaxValueError(expected=100, actual=300)>
])>
"""
).strip()
)
se.add_context("a")
assert se.context == deque([])
assert mve_1.context == deque(["a", "y"])
assert mve_2.context == deque(["a", "x"])
assert repr(se) == (
dedent(
"""
<SchemaError(errors=[
<a.y: MaxValueError(expected=100, actual=200)>,
<a.x: MaxValueError(expected=100, actual=300)>
])>
"""
).strip()
)
assert repr(se) == str(se)
assert list(se) == [mve_1, mve_2]
assert len(se) == 2
assert se[0] == mve_1
assert se[1] == mve_2
with pytest.raises(IndexError):
se[2]
se.sort()
assert list(se) == [mve_2, mve_1]
se.sort(reverse=True)
assert list(se) == [mve_1, mve_2]
assert pickle.loads(pickle.dumps(se)) == se
def test_extra():
assert exc.EXTRA_KEY == exc.Extra("KEY")
assert exc.EXTRA_VALUE == exc.Extra("VALUE")
assert exc.EXTRA_KEY != exc.EXTRA_VALUE
assert repr(exc.EXTRA_KEY) == "@KEY"
assert repr(exc.EXTRA_VALUE) == "@VALUE"
assert str(exc.EXTRA_KEY) == repr(exc.EXTRA_KEY)
assert pickle.loads(pickle.dumps(exc.EXTRA_KEY)) == exc.EXTRA_KEY
assert pickle.loads(pickle.dumps(exc.EXTRA_VALUE)) == exc.EXTRA_VALUE
def test_step():
step_1 = exc.Step(1)
step_2 = exc.Step(2)
assert step_1 != step_2
assert step_1 == exc.Step(1)
assert repr(step_1) == "#1"
assert repr(step_2) == "#2"
assert str(step_1) == repr(step_1)
assert pickle.loads(pickle.dumps(step_1)) == step_1
assert pickle.loads(pickle.dumps(step_2)) == step_2
def test_format_error():
assert exc.format_error(exc.InvalidTypeError(expected=int, actual=type(None))) == [
("", "Value should not be null.")
]
assert exc.format_error(exc.InvalidTypeError(expected=int, actual=str)) == [
("", "Expected type “int”, got “str”.")
]
assert exc.format_error(exc.OptionsError(expected=[1], actual=2)) == [
("", "Expected 1, got 2.")
]
assert exc.format_error(exc.OptionsError(expected=[1, 2, 3], actual=4)) == [
("", "Expected one of [1, 2, 3], got 4.")
]
assert exc.format_error(exc.MinValueError(expected=10, actual=5)) == [
("", "Expected value ≥ 10, got 5.")
]
assert exc.format_error(exc.MaxValueError(expected=10, actual=15)) == [
("", "Expected value ≤ 10, got 15.")
]
assert exc.format_error(
exc.FloatValueError(expected="finite", actual=float("-inf"))
) == [("", "Expected finite number, got -∞.")]
assert exc.format_error(
exc.FloatValueError(expected="finite", actual=float("+inf"))
) == [("", "Expected finite number, got +∞.")]
assert exc.format_error(
exc.FloatValueError(expected="number", actual=float("nan"))
) == [("", "Expected number, got NaN.")]
assert exc.format_error(exc.StrDecodeError(expected="utf-8", actual=b"\xFF")) == [
("", "Cannot decode value using “utf-8” encoding.")
]
assert exc.format_error(exc.MinLengthError(expected=10, actual=5)) == [
("", "Expected value length ≥ 10, got 5.")
]
assert exc.format_error(exc.MaxLengthError(expected=10, actual=15)) == [
("", "Expected value length ≤ 10, got 15.")
]
assert exc.format_error(exc.TupleLengthError(expected=1, actual=2)) == [
("", "Expected exactly 1 element, got 2.")
]
assert exc.format_error(exc.TupleLengthError(expected=3, actual=2)) == [
("", "Expected exactly 3 elements, got 2.")
]
assert exc.format_error(
exc.PatternMatchError(expected="^[0-9]+$", actual="xyz")
) == [("", "Cannot match “xyz” using “^[0-9]+$”.")]
assert exc.format_error(
exc.DatetimeParseError(expected="%Y-%m-%d", actual="08/18/2018")
) == [
("", "Cannot parse date/time value from “08/18/2018” using “%Y-%m-%d” format.")
]
assert exc.format_error(
exc.DatetimeParseError(expected=isoparse, actual="08/18/2018")
) == [("", "Cannot parse date/time value from “08/18/2018”.")]
assert exc.format_error(
exc.DatetimeTypeError(expected="naive", actual=datetime.now(UTC))
) == [("", "Naive date/time object is expected.")]
assert exc.format_error(
exc.DatetimeTypeError(expected="tzaware", actual=datetime.now())
) == [("", "Timezone-aware date/time object is expected.")]
assert exc.format_error(exc.RecursionMaxDepthError(expected=2, actual=3)) == [
("", "Too many nested structures, limit is 2.")
]
assert exc.format_error(exc.ForbiddenKeyError("x")) == [
("x", "Key is not allowed.")
]
assert exc.format_error(exc.MissingKeyError("x")) == [
("x", "Required key is not provided.")
]
# Test fallback
assert exc.format_error(exc.ConditionError(expected=1, actual=2)) == [
("", "ConditionError(expected=1, actual=2)")
]
assert exc.format_error(exc.FloatValueError(expected="something", actual=0.0)) == [
("", "FloatValueError(expected='something', actual=0.0)")
]
assert exc.format_error(
exc.DatetimeTypeError(expected="something", actual=datetime(2018, 12, 5))
) == [
(
"",
"DatetimeTypeError(expected='something', actual=datetime.datetime(2018, 12, 5, 0, 0))",
)
]
|
[
"validx.exc.OptionsError",
"validx.exc.MaxLengthError",
"validx.exc.Step",
"validx.exc.Extra",
"validx.exc.RecursionMaxDepthError",
"validx.exc.FloatValueError",
"validx.exc.ForbiddenKeyError",
"validx.exc.PatternMatchError",
"collections.deque",
"validx.exc.MaxValueError",
"validx.exc.MinLengthError",
"validx.exc.MinValueError",
"pytest.raises",
"validx.exc.SchemaError",
"validx.exc.StrDecodeError",
"datetime.datetime.now",
"pickle.dumps",
"validx.exc.MissingKeyError",
"validx.exc.ConditionError",
"validx.exc.DatetimeParseError",
"datetime.datetime",
"textwrap.dedent",
"validx.exc.InvalidTypeError",
"validx.exc.TupleLengthError"
] |
[((239, 285), 'validx.exc.InvalidTypeError', 'exc.InvalidTypeError', ([], {'expected': 'int', 'actual': 'str'}), '(expected=int, actual=str)\n', (259, 285), False, 'from validx import exc\n'), ((1708, 1732), 'validx.exc.MissingKeyError', 'exc.MissingKeyError', (['"""x"""'], {}), "('x')\n", (1727, 1732), False, 'from validx import exc\n'), ((1743, 1769), 'validx.exc.ForbiddenKeyError', 'exc.ForbiddenKeyError', (['"""y"""'], {}), "('y')\n", (1764, 1769), False, 'from validx import exc\n'), ((2340, 2378), 'validx.exc.SchemaError', 'exc.SchemaError', ([], {'errors': '[mve_1, mve_2]'}), '(errors=[mve_1, mve_2])\n', (2355, 2378), False, 'from validx import exc\n'), ((3993, 4004), 'validx.exc.Step', 'exc.Step', (['(1)'], {}), '(1)\n', (4001, 4004), False, 'from validx import exc\n'), ((4018, 4029), 'validx.exc.Step', 'exc.Step', (['(2)'], {}), '(2)\n', (4026, 4029), False, 'from validx import exc\n'), ((311, 320), 'collections.deque', 'deque', (['[]'], {}), '([])\n', (316, 320), False, 'from collections import deque\n'), ((576, 588), 'collections.deque', 'deque', (["['x']"], {}), "(['x'])\n", (581, 588), False, 'from collections import deque\n'), ((758, 773), 'collections.deque', 'deque', (["[1, 'x']"], {}), "([1, 'x'])\n", (763, 773), False, 'from collections import deque\n'), ((997, 1019), 'collections.deque', 'deque', (["['a.b', 1, 'x']"], {}), "(['a.b', 1, 'x'])\n", (1002, 1019), False, 'from collections import deque\n'), ((1260, 1318), 'validx.exc.InvalidTypeError', 'exc.InvalidTypeError', (['te.context'], {'expected': 'int', 'actual': 'str'}), '(te.context, expected=int, actual=str)\n', (1280, 1318), False, 'from validx import exc\n'), ((1336, 1392), 'validx.exc.ConditionError', 'exc.ConditionError', (['te.context'], {'expected': 'int', 'actual': 'str'}), '(te.context, expected=int, actual=str)\n', (1354, 1392), False, 'from validx import exc\n'), ((1410, 1470), 'validx.exc.InvalidTypeError', 'exc.InvalidTypeError', (['te.context'], {'expected': 'int', 'actual': 'float'}), '(te.context, expected=int, actual=float)\n', (1430, 1470), False, 'from validx import exc\n'), ((1480, 1505), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (1493, 1505), False, 'import pytest\n'), ((1796, 1808), 'collections.deque', 'deque', (["['x']"], {}), "(['x'])\n", (1801, 1808), False, 'from collections import deque\n'), ((1835, 1847), 'collections.deque', 'deque', (["['y']"], {}), "(['y'])\n", (1840, 1847), False, 'from collections import deque\n'), ((1966, 1994), 'validx.exc.MissingKeyError', 'exc.MissingKeyError', ([], {'key': '"""x"""'}), "(key='x')\n", (1985, 1994), False, 'from validx import exc\n'), ((2404, 2413), 'collections.deque', 'deque', (['[]'], {}), '([])\n', (2409, 2413), False, 'from collections import deque\n'), ((2431, 2462), 'validx.exc.SchemaError', 'exc.SchemaError', (['[mve_1, mve_2]'], {}), '([mve_1, mve_2])\n', (2446, 2462), False, 'from validx import exc\n'), ((2782, 2791), 'collections.deque', 'deque', (['[]'], {}), '([])\n', (2787, 2791), False, 'from collections import deque\n'), ((2820, 2837), 'collections.deque', 'deque', (["['a', 'y']"], {}), "(['a', 'y'])\n", (2825, 2837), False, 'from collections import deque\n'), ((2866, 2883), 'collections.deque', 'deque', (["['a', 'x']"], {}), "(['a', 'x'])\n", (2871, 2883), False, 'from collections import deque\n'), ((3314, 3339), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (3327, 3339), False, 'import pytest\n'), ((3568, 3584), 'validx.exc.Extra', 'exc.Extra', (['"""KEY"""'], {}), "('KEY')\n", (3577, 3584), False, 'from validx import exc\n'), ((3615, 3633), 'validx.exc.Extra', 'exc.Extra', (['"""VALUE"""'], {}), "('VALUE')\n", (3624, 3633), False, 'from validx import exc\n'), ((4079, 4090), 'validx.exc.Step', 'exc.Step', (['(1)'], {}), '(1)\n', (4087, 4090), False, 'from validx import exc\n'), ((1642, 1658), 'pickle.dumps', 'pickle.dumps', (['te'], {}), '(te)\n', (1654, 1658), False, 'import pickle\n'), ((2033, 2045), 'collections.deque', 'deque', (["['x']"], {}), "(['x'])\n", (2038, 2045), False, 'from collections import deque\n'), ((2071, 2088), 'pickle.dumps', 'pickle.dumps', (['mke'], {}), '(mke)\n', (2083, 2088), False, 'import pickle\n'), ((2121, 2138), 'pickle.dumps', 'pickle.dumps', (['fke'], {}), '(fke)\n', (2133, 2138), False, 'import pickle\n'), ((2212, 2224), 'collections.deque', 'deque', (["['y']"], {}), "(['y'])\n", (2217, 2224), False, 'from collections import deque\n'), ((2290, 2302), 'collections.deque', 'deque', (["['x']"], {}), "(['x'])\n", (2295, 2302), False, 'from collections import deque\n'), ((3496, 3512), 'pickle.dumps', 'pickle.dumps', (['se'], {}), '(se)\n', (3508, 3512), False, 'import pickle\n'), ((3841, 3868), 'pickle.dumps', 'pickle.dumps', (['exc.EXTRA_KEY'], {}), '(exc.EXTRA_KEY)\n', (3853, 3868), False, 'import pickle\n'), ((3911, 3940), 'pickle.dumps', 'pickle.dumps', (['exc.EXTRA_VALUE'], {}), '(exc.EXTRA_VALUE)\n', (3923, 3940), False, 'import pickle\n'), ((4218, 4238), 'pickle.dumps', 'pickle.dumps', (['step_1'], {}), '(step_1)\n', (4230, 4238), False, 'import pickle\n'), ((4274, 4294), 'pickle.dumps', 'pickle.dumps', (['step_2'], {}), '(step_2)\n', (4286, 4294), False, 'import pickle\n'), ((4497, 4543), 'validx.exc.InvalidTypeError', 'exc.InvalidTypeError', ([], {'expected': 'int', 'actual': 'str'}), '(expected=int, actual=str)\n', (4517, 4543), False, 'from validx import exc\n'), ((4632, 4672), 'validx.exc.OptionsError', 'exc.OptionsError', ([], {'expected': '[1]', 'actual': '(2)'}), '(expected=[1], actual=2)\n', (4648, 4672), False, 'from validx import exc\n'), ((4748, 4794), 'validx.exc.OptionsError', 'exc.OptionsError', ([], {'expected': '[1, 2, 3]', 'actual': '(4)'}), '(expected=[1, 2, 3], actual=4)\n', (4764, 4794), False, 'from validx import exc\n'), ((4885, 4925), 'validx.exc.MinValueError', 'exc.MinValueError', ([], {'expected': '(10)', 'actual': '(5)'}), '(expected=10, actual=5)\n', (4902, 4925), False, 'from validx import exc\n'), ((5010, 5051), 'validx.exc.MaxValueError', 'exc.MaxValueError', ([], {'expected': '(10)', 'actual': '(15)'}), '(expected=10, actual=15)\n', (5027, 5051), False, 'from validx import exc\n'), ((5577, 5629), 'validx.exc.StrDecodeError', 'exc.StrDecodeError', ([], {'expected': '"""utf-8"""', 'actual': "b'\\xff'"}), "(expected='utf-8', actual=b'\\xff')\n", (5595, 5629), False, 'from validx import exc\n'), ((5730, 5771), 'validx.exc.MinLengthError', 'exc.MinLengthError', ([], {'expected': '(10)', 'actual': '(5)'}), '(expected=10, actual=5)\n', (5748, 5771), False, 'from validx import exc\n'), ((5863, 5905), 'validx.exc.MaxLengthError', 'exc.MaxLengthError', ([], {'expected': '(10)', 'actual': '(15)'}), '(expected=10, actual=15)\n', (5881, 5905), False, 'from validx import exc\n'), ((5998, 6040), 'validx.exc.TupleLengthError', 'exc.TupleLengthError', ([], {'expected': '(1)', 'actual': '(2)'}), '(expected=1, actual=2)\n', (6018, 6040), False, 'from validx import exc\n'), ((6132, 6174), 'validx.exc.TupleLengthError', 'exc.TupleLengthError', ([], {'expected': '(3)', 'actual': '(2)'}), '(expected=3, actual=2)\n', (6152, 6174), False, 'from validx import exc\n'), ((6276, 6332), 'validx.exc.PatternMatchError', 'exc.PatternMatchError', ([], {'expected': '"""^[0-9]+$"""', 'actual': '"""xyz"""'}), "(expected='^[0-9]+$', actual='xyz')\n", (6297, 6332), False, 'from validx import exc\n'), ((6426, 6490), 'validx.exc.DatetimeParseError', 'exc.DatetimeParseError', ([], {'expected': '"""%Y-%m-%d"""', 'actual': '"""08/18/2018"""'}), "(expected='%Y-%m-%d', actual='08/18/2018')\n", (6448, 6490), False, 'from validx import exc\n'), ((6633, 6695), 'validx.exc.DatetimeParseError', 'exc.DatetimeParseError', ([], {'expected': 'isoparse', 'actual': '"""08/18/2018"""'}), "(expected=isoparse, actual='08/18/2018')\n", (6655, 6695), False, 'from validx import exc\n'), ((7115, 7163), 'validx.exc.RecursionMaxDepthError', 'exc.RecursionMaxDepthError', ([], {'expected': '(2)', 'actual': '(3)'}), '(expected=2, actual=3)\n', (7141, 7163), False, 'from validx import exc\n'), ((7260, 7286), 'validx.exc.ForbiddenKeyError', 'exc.ForbiddenKeyError', (['"""x"""'], {}), "('x')\n", (7281, 7286), False, 'from validx import exc\n'), ((7364, 7388), 'validx.exc.MissingKeyError', 'exc.MissingKeyError', (['"""x"""'], {}), "('x')\n", (7383, 7388), False, 'from validx import exc\n'), ((7497, 7537), 'validx.exc.ConditionError', 'exc.ConditionError', ([], {'expected': '(1)', 'actual': '(2)'}), '(expected=1, actual=2)\n', (7515, 7537), False, 'from validx import exc\n'), ((7631, 7684), 'validx.exc.FloatValueError', 'exc.FloatValueError', ([], {'expected': '"""something"""', 'actual': '(0.0)'}), "(expected='something', actual=0.0)\n", (7650, 7684), False, 'from validx import exc\n'), ((2496, 2706), 'textwrap.dedent', 'dedent', (['"""\n <SchemaError(errors=[\n <y: MaxValueError(expected=100, actual=200)>,\n <x: MaxValueError(expected=100, actual=300)>\n ])>\n """'], {}), '(\n """\n <SchemaError(errors=[\n <y: MaxValueError(expected=100, actual=200)>,\n <x: MaxValueError(expected=100, actual=300)>\n ])>\n """\n )\n', (2502, 2706), False, 'from textwrap import dedent\n'), ((2917, 3131), 'textwrap.dedent', 'dedent', (['"""\n <SchemaError(errors=[\n <a.y: MaxValueError(expected=100, actual=200)>,\n <a.x: MaxValueError(expected=100, actual=300)>\n ])>\n """'], {}), '(\n """\n <SchemaError(errors=[\n <a.y: MaxValueError(expected=100, actual=200)>,\n <a.x: MaxValueError(expected=100, actual=300)>\n ])>\n """\n )\n', (2923, 3131), False, 'from textwrap import dedent\n'), ((6847, 6864), 'datetime.datetime.now', 'datetime.now', (['UTC'], {}), '(UTC)\n', (6859, 6864), False, 'from datetime import datetime\n'), ((7007, 7021), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7019, 7021), False, 'from datetime import datetime\n'), ((7851, 7872), 'datetime.datetime', 'datetime', (['(2018)', '(12)', '(5)'], {}), '(2018, 12, 5)\n', (7859, 7872), False, 'from datetime import datetime\n')]
|
#!/usr/bin/env python
"""
PubMed (Scholar publications)
@website https://www.ncbi.nlm.nih.gov/pubmed/
@provide-api yes (https://www.ncbi.nlm.nih.gov/home/develop/api/)
@using-api yes
@results XML
@stable yes
@parse url, title, publishedDate, content
More info on api: https://www.ncbi.nlm.nih.gov/books/NBK25501/
"""
from flask_babel import gettext
from lxml import etree
from datetime import datetime
from searx.url_utils import urlencode
from searx.poolrequests import get
categories = ['science']
base_url = 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi'\
+ '?db=pubmed&{query}&retstart={offset}&retmax={hits}'
# engine dependent config
number_of_results = 10
pubmed_url = 'https://www.ncbi.nlm.nih.gov/pubmed/'
def request(query, params):
# basic search
offset = (params['pageno'] - 1) * number_of_results
string_args = dict(query=urlencode({'term': query}),
offset=offset,
hits=number_of_results)
params['url'] = base_url.format(**string_args)
return params
def response(resp):
results = []
# First retrieve notice of each result
pubmed_retrieve_api_url = 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?'\
+ 'db=pubmed&retmode=xml&id={pmids_string}'
pmids_results = etree.XML(resp.content)
pmids = pmids_results.xpath('//eSearchResult/IdList/Id')
pmids_string = ''
for item in pmids:
pmids_string += item.text + ','
retrieve_notice_args = dict(pmids_string=pmids_string)
retrieve_url_encoded = pubmed_retrieve_api_url.format(**retrieve_notice_args)
search_results_xml = get(retrieve_url_encoded).content
search_results = etree.XML(search_results_xml).xpath('//PubmedArticleSet/PubmedArticle/MedlineCitation')
for entry in search_results:
title = entry.xpath('.//Article/ArticleTitle')[0].text
pmid = entry.xpath('.//PMID')[0].text
url = pubmed_url + pmid
try:
content = entry.xpath('.//Abstract/AbstractText')[0].text
except:
content = gettext('No abstract is available for this publication.')
# If a doi is available, add it to the snipppet
try:
doi = entry.xpath('.//ELocationID[@EIdType="doi"]')[0].text
content = 'DOI: {doi} Abstract: {content}'.format(doi=doi, content=content)
except:
pass
if len(content) > 300:
content = content[0:300] + "..."
# TODO: center snippet on query term
res_dict = {'url': url,
'title': title,
'content': content}
try:
publishedDate = datetime.strptime(entry.xpath('.//DateCreated/Year')[0].text
+ '-' + entry.xpath('.//DateCreated/Month')[0].text
+ '-' + entry.xpath('.//DateCreated/Day')[0].text, '%Y-%m-%d')
res_dict['publishedDate'] = publishedDate
except:
pass
results.append(res_dict)
return results
|
[
"searx.url_utils.urlencode",
"lxml.etree.XML",
"flask_babel.gettext",
"searx.poolrequests.get"
] |
[((1364, 1387), 'lxml.etree.XML', 'etree.XML', (['resp.content'], {}), '(resp.content)\n', (1373, 1387), False, 'from lxml import etree\n'), ((1704, 1729), 'searx.poolrequests.get', 'get', (['retrieve_url_encoded'], {}), '(retrieve_url_encoded)\n', (1707, 1729), False, 'from searx.poolrequests import get\n'), ((910, 936), 'searx.url_utils.urlencode', 'urlencode', (["{'term': query}"], {}), "({'term': query})\n", (919, 936), False, 'from searx.url_utils import urlencode\n'), ((1759, 1788), 'lxml.etree.XML', 'etree.XML', (['search_results_xml'], {}), '(search_results_xml)\n', (1768, 1788), False, 'from lxml import etree\n'), ((2145, 2202), 'flask_babel.gettext', 'gettext', (['"""No abstract is available for this publication."""'], {}), "('No abstract is available for this publication.')\n", (2152, 2202), False, 'from flask_babel import gettext\n')]
|
"""
* Project Name: NAD-Logging-Service
* File Name: exception_test.py
* Programmer: <NAME>
* Date: Sun, Nov 15, 2020
* Description: This file contains exception tests for the Logger app.
"""
import pytest
from .sample_data import exception_logs as sample_logs
@pytest.mark.parametrize("data", sample_logs)
def test_all_bad_tests_fail(client, data):
""" All these tests should fail """
# Arrange
# Act
response = client.post(
"/logger/log",
content_type="application/json",
json=data,
headers={"x-access-token": data["authToken"]},
)
# Assert
assert response.status_code != 200
|
[
"pytest.mark.parametrize"
] |
[((272, 316), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""data"""', 'sample_logs'], {}), "('data', sample_logs)\n", (295, 316), False, 'import pytest\n')]
|
from importlib import import_module
from pythonapm.instrumentation import instrument_method
from pythonapm.logger import agentlogger
from pythonapm import constants
methods = [
'process_request',
'process_view',
'process_exception',
'process_template_response',
'process_response'
]
def instrument_middlewares():
try:
from django.conf import settings
middleware = getattr(settings, "MIDDLEWARE", None) or \
getattr(settings, "MIDDLEWARE_CLASSES", None)
if middleware is None:
return
for each in middleware:
module_path, class_name = each.rsplit('.', 1)
act_module = import_module(module_path)
for each_method in methods:
method_info = {
constants.class_str : class_name,
constants.method_str : each_method,
}
instrument_method(module_path, act_module, method_info)
except Exception as exc:
agentlogger('django middleware instrumentation error', exc)
|
[
"pythonapm.instrumentation.instrument_method",
"importlib.import_module",
"pythonapm.logger.agentlogger"
] |
[((688, 714), 'importlib.import_module', 'import_module', (['module_path'], {}), '(module_path)\n', (701, 714), False, 'from importlib import import_module\n'), ((1026, 1085), 'pythonapm.logger.agentlogger', 'agentlogger', (['"""django middleware instrumentation error"""', 'exc'], {}), "('django middleware instrumentation error', exc)\n", (1037, 1085), False, 'from pythonapm.logger import agentlogger\n'), ((931, 986), 'pythonapm.instrumentation.instrument_method', 'instrument_method', (['module_path', 'act_module', 'method_info'], {}), '(module_path, act_module, method_info)\n', (948, 986), False, 'from pythonapm.instrumentation import instrument_method\n')]
|
import re
from flask import request
from website.app.api import api
from spider.database import *
from website.app.util import JsonSuccess, JsonError, ParamCheck, Param, error
@api.route('/popular_songs_list', endpoint='get_popular_song_list')
@error
@ParamCheck({'type': Param(int),
'offset': Param(int, optional=True),
'limit': Param(int, optional=True)})
def get_popular_song_list(params):
type_ = params['type']
offset = params.get('offset')
limit = params.get('limit')
if not offset:
offset = 0
if not limit:
limit = 20
filters = {
'type': int(type_)
}
result = search_song_list_by_filter(filters, int(offset), int(limit))
data = [{'song_id': item['_id'], 'name': item['song_name'],
'comment_id': item['comment_id'], 'source_url': item['source_url']} for item in result]
return JsonSuccess(data)
@api.route('/popular_song_comments', endpoint='get_popular_song_comments')
@error
@ParamCheck({'comment_id': Param(int)})
def get_popular_song_comments(params):
comment_id = params['comment_id']
filter = {
'_id': comment_id
}
result = search_by_comment_id(filter)
return JsonSuccess(result[0])
@api.route('/songs_list', endpoint='get_chinese_songs_list')
@error
@ParamCheck({'name': Param(str),
'type': Param(int),
'offset': Param(int, optional=True),
'limit': Param(int, optional=True)})
def get_chinese_songs_list(params):
list_name = params['name']
offset = params.get('offset')
limit = params.get('limit')
type_ = int(params['type'])
if not offset:
offset = 0
if not limit:
limit = 20
filters = {
'title': {'$regex': re.compile(re.escape(list_name)), '$options': 'i'}
}
if 1 == type_:
result = search_chinese_lists_by_filter(filters, int(offset), int(limit))
elif 2 == type_:
result = search_janpanese_lists_by_filter(filters, int(offset), int(limit))
else:
raise Exception('type的数值暂不支持')
data = [{'song_id': item['_id'], 'name': item['song_name'],
'comment_id': item['comment_id'], 'source_url': item['source_url']} for item in result]
return JsonSuccess(data)
|
[
"website.app.api.api.route",
"website.app.util.Param",
"website.app.util.JsonSuccess",
"re.escape"
] |
[((182, 248), 'website.app.api.api.route', 'api.route', (['"""/popular_songs_list"""'], {'endpoint': '"""get_popular_song_list"""'}), "('/popular_songs_list', endpoint='get_popular_song_list')\n", (191, 248), False, 'from website.app.api import api\n'), ((917, 990), 'website.app.api.api.route', 'api.route', (['"""/popular_song_comments"""'], {'endpoint': '"""get_popular_song_comments"""'}), "('/popular_song_comments', endpoint='get_popular_song_comments')\n", (926, 990), False, 'from website.app.api import api\n'), ((1241, 1300), 'website.app.api.api.route', 'api.route', (['"""/songs_list"""'], {'endpoint': '"""get_chinese_songs_list"""'}), "('/songs_list', endpoint='get_chinese_songs_list')\n", (1250, 1300), False, 'from website.app.api import api\n'), ((896, 913), 'website.app.util.JsonSuccess', 'JsonSuccess', (['data'], {}), '(data)\n', (907, 913), False, 'from website.app.util import JsonSuccess, JsonError, ParamCheck, Param, error\n'), ((1216, 1238), 'website.app.util.JsonSuccess', 'JsonSuccess', (['result[0]'], {}), '(result[0])\n', (1227, 1238), False, 'from website.app.util import JsonSuccess, JsonError, ParamCheck, Param, error\n'), ((2252, 2269), 'website.app.util.JsonSuccess', 'JsonSuccess', (['data'], {}), '(data)\n', (2263, 2269), False, 'from website.app.util import JsonSuccess, JsonError, ParamCheck, Param, error\n'), ((277, 287), 'website.app.util.Param', 'Param', (['int'], {}), '(int)\n', (282, 287), False, 'from website.app.util import JsonSuccess, JsonError, ParamCheck, Param, error\n'), ((312, 337), 'website.app.util.Param', 'Param', (['int'], {'optional': '(True)'}), '(int, optional=True)\n', (317, 337), False, 'from website.app.util import JsonSuccess, JsonError, ParamCheck, Param, error\n'), ((361, 386), 'website.app.util.Param', 'Param', (['int'], {'optional': '(True)'}), '(int, optional=True)\n', (366, 386), False, 'from website.app.util import JsonSuccess, JsonError, ParamCheck, Param, error\n'), ((1025, 1035), 'website.app.util.Param', 'Param', (['int'], {}), '(int)\n', (1030, 1035), False, 'from website.app.util import JsonSuccess, JsonError, ParamCheck, Param, error\n'), ((1329, 1339), 'website.app.util.Param', 'Param', (['str'], {}), '(str)\n', (1334, 1339), False, 'from website.app.util import JsonSuccess, JsonError, ParamCheck, Param, error\n'), ((1362, 1372), 'website.app.util.Param', 'Param', (['int'], {}), '(int)\n', (1367, 1372), False, 'from website.app.util import JsonSuccess, JsonError, ParamCheck, Param, error\n'), ((1397, 1422), 'website.app.util.Param', 'Param', (['int'], {'optional': '(True)'}), '(int, optional=True)\n', (1402, 1422), False, 'from website.app.util import JsonSuccess, JsonError, ParamCheck, Param, error\n'), ((1446, 1471), 'website.app.util.Param', 'Param', (['int'], {'optional': '(True)'}), '(int, optional=True)\n', (1451, 1471), False, 'from website.app.util import JsonSuccess, JsonError, ParamCheck, Param, error\n'), ((1772, 1792), 're.escape', 're.escape', (['list_name'], {}), '(list_name)\n', (1781, 1792), False, 'import re\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# linkedin_login.py
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
import parameters
# install webdrive when needed
driver = webdriver.Chrome(ChromeDriverManager().install())
# driver.get method() will navigate to a page given by the URL address
driver.get('https://www.linkedin.com/login')
# locate email form by element_by_id
username = driver.find_element_by_id('username')
# send_keys() to simulate key strokes
username.send_keys('<EMAIL>')
# locate password form by_class_name
password = driver.find_element_by_id('password')
# send_keys() to simulate key strokes
password.send_keys('<<PASSWORD>>')
# locate submit button by_class_name
log_in_button = driver.find_element_by_class_name('btn__primary--large')
# locate submit button by_xpath
log_in_button = driver.find_element_by_xpath('//*[@type="submit"]')
# .click() to mimic button click
log_in_button.click()
|
[
"webdriver_manager.chrome.ChromeDriverManager"
] |
[((232, 253), 'webdriver_manager.chrome.ChromeDriverManager', 'ChromeDriverManager', ([], {}), '()\n', (251, 253), False, 'from webdriver_manager.chrome import ChromeDriverManager\n')]
|
from .models import Task
from django.http import HttpResponse
def index(request):
collection = Task.objects.all()
return HttpResponse(collection)
|
[
"django.http.HttpResponse"
] |
[((132, 156), 'django.http.HttpResponse', 'HttpResponse', (['collection'], {}), '(collection)\n', (144, 156), False, 'from django.http import HttpResponse\n')]
|
#!/usr/bin/python3
import random
from urizen.core.map import Map
NORTH = 'N'
SOUTH = 'S'
EAST = 'E'
WEST = 'W'
class LFD_WormSimpleFactory(object):
def generate(self, w, h, length=None, turn_chance=0.4):
if not length:
length = int(w*h/2)
return self._gen_main(w, h, length, turn_chance)
def _gen_main(self, xsize, ysize, length, turn_chance=0.4):
M = Map(xsize, ysize, fill_symbol='#')
worm_x = random.randint(int(xsize * 0.3), int(xsize * 0.6))
worm_y = random.randint(int(ysize * 0.3), int(ysize * 0.6))
move = random.choice([NORTH, SOUTH, EAST, WEST])
for _ in range(length):
worm_x, worm_y, move = self._move_worm(M, worm_x, worm_y, move, turn_chance)
return M
def _move_worm(self, M, x, y, move, turn_chance):
self._dig_cell(M, x, y)
if random.random() > turn_chance:
move = random.choice([NORTH, SOUTH, EAST, WEST])
xsize, ysize = M.get_size()
if x == xsize - 2 and move == EAST:
move = WEST
elif x == 1 and move == WEST:
move = EAST
elif y == ysize - 2 and move == SOUTH:
move = NORTH
elif y == 1 and move == NORTH:
move = SOUTH
if move == NORTH:
new_state = [x, y - 1]
elif move == SOUTH:
new_state = [x, y + 1]
elif move == EAST:
new_state = [x + 1, y]
else:
new_state = [x - 1, y]
new_state.append(move)
return new_state
def _dig_cell(self, M, x, y):
try:
M.cells[y][x].symbol = '.'
except IndexError:
pass
LFD_WormSimple = LFD_WormSimpleFactory()
|
[
"random.random",
"random.choice",
"urizen.core.map.Map"
] |
[((403, 437), 'urizen.core.map.Map', 'Map', (['xsize', 'ysize'], {'fill_symbol': '"""#"""'}), "(xsize, ysize, fill_symbol='#')\n", (406, 437), False, 'from urizen.core.map import Map\n'), ((590, 631), 'random.choice', 'random.choice', (['[NORTH, SOUTH, EAST, WEST]'], {}), '([NORTH, SOUTH, EAST, WEST])\n', (603, 631), False, 'import random\n'), ((869, 884), 'random.random', 'random.random', ([], {}), '()\n', (882, 884), False, 'import random\n'), ((919, 960), 'random.choice', 'random.choice', (['[NORTH, SOUTH, EAST, WEST]'], {}), '([NORTH, SOUTH, EAST, WEST])\n', (932, 960), False, 'import random\n')]
|
import os
import json
from urllib.request import urlopen
from io import BytesIO
from zipfile import ZipFile
import numpy as np
from torch.utils.data import Dataset
def download_and_unzip(url, extract_to='.'):
print(f"Waiting for response from {url}")
http_response = urlopen(url)
print(f"Downloading data from {url}")
zipfile = ZipFile(BytesIO(http_response.read()))
zipfile.extractall(path=extract_to)
def load_dataset_local(name, dataset_folder):
ds = {}
for root, dirs, files in os.walk(f"{dataset_folder}/TLiDB_{name}"):
for file in files:
if file.endswith(".json") and file!="sample_format.json":
ds[file[:-5]] = json.load(open(f"{root}/{file}"))
if len(ds.keys()) == 1:
ds = ds[list(ds.keys())[0]]
return ds
def load_dataset(name, dataset_folder, url):
# download and unzip dataset if needed
if f"TLiDB_{name}" not in os.listdir(dataset_folder):
assert(url is not None), "Must provide a url to download from"
download_and_unzip(url, dataset_folder)
print(f"Extracted files to {os.path.join(dataset_folder,name)}")
ds = load_dataset_local(name, dataset_folder)
return ds
def load_split_ids(name, dataset_folder, split, few_shot_percent=None):
if f"TLiDB_{name}" not in os.listdir(dataset_folder):
raise ValueError("Dataset not found")
if few_shot_percent and split!="test":
ids_file = f"{dataset_folder}/TLiDB_{name}/TTiDB_{few_shot_percent}_percent_few_shot_{split}_ids.txt"
else:
ids_file = f"{dataset_folder}/TLiDB_{name}/TTiDB_{split}_ids.txt"
with open(ids_file) as f:
ids = f.read().splitlines()
return ids
class TLiDB_Dataset(Dataset):
"""
Abstract dataset class for all TLiDB datasets
"""
def __init__(self, dataset_name, task, model_type, max_dialogue_length, dataset_folder):
super().__init__()
self.dataset = load_dataset(dataset_name, dataset_folder, self.url)
self._task = task
task_metadata = self.dataset['metadata']['task_metadata']
self.task_labels = []
self._max_dialogue_length = max_dialogue_length
self._model_type = model_type
if task in task_metadata and 'labels' in task_metadata[task]:
self.task_labels = task_metadata[task]['labels']
if task == "response_generation":
self.metrics = ['token_f1', 'bleu', 'bert_score', 'distinct_ngrams']
self.metric_kwargs = {
"bleu": [{"ngram_order": 1}, {"ngram_order": 2}, {"ngram_order": 3}, {"ngram_order": 4}],
"distinct_ngrams": [{"ngram_order": 1}, {"ngram_order": 2}, {"ngram_order": 3}]
}
self._collate = self._collate_response_generation
else:
self.metrics = task_metadata[task]['metrics']
self.metric_kwargs = task_metadata[task].get("metric_kwargs", dict())
if model_type == "Encoder":
self._collate = self._collate_encoder
elif model_type == "Decoder":
self._collate = self._collate_decoder
elif model_type == "EncoderDecoder":
self._collate = self._collate_encoderdecoder
else:
raise ValueError(f"{model_type} is not a valid algorithm type")
@property
def dataset_name(self):
return self._dataset_name
@property
def tasks(self):
return self._tasks
@property
def task(self):
return self._task
@property
def task_metadata(self):
return self._task_metadata
@property
def url(self):
return self._url
@property
def max_dialogue_length(self):
return self._max_dialogue_length
@property
def model_type(self):
return self._model_type
@property
def collate(self):
"""
Returns collate function to be used with dataloader
By default returns None -> uses default torch collate function
"""
return getattr(self, "_collate", None)
@property
def y_array(self):
"""
Targets for the model to predict, can be labels for classification or string for generation tasks
"""
return self._y_array
@property
def y_size(self):
"""
Number of elements in the target
For standard classification and text generation, y_size = 1
For multi-class or multi-task prediction tasks, y_size > 1
"""
return self._y_size
@property
def num_classes(self):
"""
Returns number of classes in the dataset
"""
return getattr(self, "_num_classes", None)
@property
def metadata_fields(self):
"""
Returns the fields that are stored in the metadata
Metadata should always contain the domains
If it is a classification task, then metadata should also contain the classes
"""
return self._metadata_fields
@property
def metadata_array(self):
"""
Returns the metadata array
"""
return self._metadata_array
def get_metadata_field(self, field):
return self.metadata_array[self.metadata_fields.index(field)]
def __getitem__(self, idx):
"""
Returns a single sample from the dataset
"""
x = self.get_input(idx)
y = self.get_target(idx)
m = self.get_metadata(idx)
return x, y, m
def get_input(self, idx):
return self._input_array[idx]
def get_target(self, idx):
return self.y_array[idx]
def get_metadata(self, idx):
return {}
def _collate(self, batch):
return NotImplementedError
def _collate_encoder(self, batch):
return NotImplementedError
def _collate_decoder(self, batch):
return NotImplementedError
def _collate_encoderdecoder(self, batch):
return NotImplementedError
def __len__(self):
"""
Returns the length of the dataset
"""
return len(self.y_array)
def _truncate_dialogue(self, input):
"""
Truncates the dialogue to the max dialogue length
"""
if self.max_dialogue_length:
dialogue = self._convert_dialogue_to_string(input)
while len(dialogue.split()) > self.max_dialogue_length:
input = input[1:]
dialogue = self._convert_dialogue_to_string(input)
return input
def _convert_dialogue_to_string(self, input):
dialogue = ""
for (speaker, utt) in input:
if speaker:
dialogue += f"{speaker}: "
dialogue += f"{utt} "
return dialogue[:-1]
def _join_strings(self, *args):
return " ".join(args)
def _load_response_generation_task(self, task, split_ids):
for datum in self.dataset['data']:
if datum['dialogue_id'] in split_ids:
dialogue = []
for turn in datum['dialogue']:
truncated_dialogue = self._truncate_dialogue(dialogue)
if turn['speakers']:
str_dialogue = self._convert_dialogue_to_string(truncated_dialogue)
str_dialogue += f" {' '.join(turn['speakers'])}: "
str_dialogue = str_dialogue.lstrip()
self._input_array.append(str_dialogue)
self._y_array.append(turn['utterance'])
dialogue.append([" ".join(turn['speakers']), turn['utterance']])
def _collate_response_generation(self, batch):
X, y, metadata = [], [], {}
for item in batch:
X.append(item[0])
y.append(item[1])
for k, v in item[2].items():
if k not in metadata:
metadata.append(k)
metadata[k].append(v)
return X, y, metadata
def random_subsample(self, frac=1.0):
"""
Subsamples the dataset
Args:
- frac (float): Fraction of the dataset to keep
"""
if frac < 1.0:
num_to_retain = int(self.y_size * frac)
if num_to_retain == 0:
return
idxs_to_retain = np.sort(np.random.permutation(len(self))[:num_to_retain]).tolist()
subsampled_input_array, subsampled_y_array, subsampled_metadata_array = [], [], []
for idx in idxs_to_retain:
input_item, y_item, metadata_item = self.__getitem__(idx)
subsampled_input_array.append(input_item)
subsampled_y_array.append(y_item)
subsampled_metadata_array.append(metadata_item)
self._input_array = subsampled_input_array
self._y_array = subsampled_y_array
metadata_iterated = list(metadata_item.keys())
metadata_not_iterated = [metadata_field for metadata_field in self.metadata_fields if metadata_field not in metadata_iterated]
subsampled_metadata_array = [subsampled_metadata_array]
for metadata_field in metadata_not_iterated:
subsampled_metadata_array.append(self.get_metadata_field(metadata_field))
self._metadata_array = subsampled_metadata_array
self._metadata_fields = metadata_iterated+metadata_not_iterated
self._y_size = num_to_retain
|
[
"os.listdir",
"os.walk",
"os.path.join",
"urllib.request.urlopen"
] |
[((277, 289), 'urllib.request.urlopen', 'urlopen', (['url'], {}), '(url)\n', (284, 289), False, 'from urllib.request import urlopen\n'), ((513, 554), 'os.walk', 'os.walk', (['f"""{dataset_folder}/TLiDB_{name}"""'], {}), "(f'{dataset_folder}/TLiDB_{name}')\n", (520, 554), False, 'import os\n'), ((921, 947), 'os.listdir', 'os.listdir', (['dataset_folder'], {}), '(dataset_folder)\n', (931, 947), False, 'import os\n'), ((1310, 1336), 'os.listdir', 'os.listdir', (['dataset_folder'], {}), '(dataset_folder)\n', (1320, 1336), False, 'import os\n'), ((1104, 1138), 'os.path.join', 'os.path.join', (['dataset_folder', 'name'], {}), '(dataset_folder, name)\n', (1116, 1138), False, 'import os\n')]
|
from starlette.responses import PlainTextResponse
def ping(_):
return PlainTextResponse('')
|
[
"starlette.responses.PlainTextResponse"
] |
[((76, 97), 'starlette.responses.PlainTextResponse', 'PlainTextResponse', (['""""""'], {}), "('')\n", (93, 97), False, 'from starlette.responses import PlainTextResponse\n')]
|
"""Parent class for each non-unit test. Creates and removes a new test table for each test."""
# TODO: integrate creating/removing a database
from unittest import TestCase
from flask_server_files.flask_app import app
from flask_server_files.sqla_instance import fsa
# from flask.ext.testing import TestCase
class BaseTest(TestCase):
def setUp(self):
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql+psycopg2://tester:tester@localhost:5432/test'
app.testing = True
self.app_context = app.app_context
self.app = app.test_client()
with self.app_context():
fsa.init_app(app)
fsa.create_all()
def tearDown(self):
with self.app_context():
fsa.session.remove()
fsa.drop_all()
|
[
"flask_server_files.sqla_instance.fsa.create_all",
"flask_server_files.sqla_instance.fsa.init_app",
"flask_server_files.sqla_instance.fsa.drop_all",
"flask_server_files.sqla_instance.fsa.session.remove",
"flask_server_files.flask_app.app.test_client"
] |
[((556, 573), 'flask_server_files.flask_app.app.test_client', 'app.test_client', ([], {}), '()\n', (571, 573), False, 'from flask_server_files.flask_app import app\n'), ((620, 637), 'flask_server_files.sqla_instance.fsa.init_app', 'fsa.init_app', (['app'], {}), '(app)\n', (632, 637), False, 'from flask_server_files.sqla_instance import fsa\n'), ((650, 666), 'flask_server_files.sqla_instance.fsa.create_all', 'fsa.create_all', ([], {}), '()\n', (664, 666), False, 'from flask_server_files.sqla_instance import fsa\n'), ((737, 757), 'flask_server_files.sqla_instance.fsa.session.remove', 'fsa.session.remove', ([], {}), '()\n', (755, 757), False, 'from flask_server_files.sqla_instance import fsa\n'), ((770, 784), 'flask_server_files.sqla_instance.fsa.drop_all', 'fsa.drop_all', ([], {}), '()\n', (782, 784), False, 'from flask_server_files.sqla_instance import fsa\n')]
|
import os
import sys
import json
import atexit
from argparse import ArgumentParser
from shutil import get_terminal_size
from subprocess import Popen, PIPE
from textwrap import dedent
from pkg_resources import get_distribution
from databricks_dbapi import databricks
from tabulate import tabulate
MAX_ROWS = 100
HISTORY = os.path.join(os.path.expanduser('~'), '.dbq_history')
def read_credentials():
filename = os.path.join(
os.path.expanduser('~'), '.databricks-credentials.json'
)
try:
with open(filename) as f:
return json.load(f)
except FileNotFoundError:
print('Databricks credentials missing!', file=sys.stderr)
print(
dedent(
f'''\
Please set up {filename} as follows:
{{
"cluster": "A CLUSTER NAME",
"host": "dbc-????????-????.cloud.databricks.com",
"token": "YOUR API ACCESS TOKEN"
}}'''
),
file=sys.stderr,
)
sys.exit(1)
def parse_args():
parser = ArgumentParser()
parser.add_argument(
'query',
help='query to run, use - to read from stdin or omit for '
'interactive session',
metavar='QUERY',
nargs='?',
)
return parser.parse_args()
def render(cursor):
headers = [col[0] for col in cursor.description]
rows = cursor.fetchall()
table = tabulate(rows[:MAX_ROWS], headers)
return (
table
+ '\n\nshowing '
+ (
f'first {MAX_ROWS} rows'
if len(rows) > MAX_ROWS
else f'full result, {len(rows)} row(s)'
)
)
def get_text_size(text):
lines = text.split('\n')
column_count = max(map(len, lines))
line_count = len(lines)
return column_count, line_count
def page(output):
process = Popen(["less", "-R"], stdin=PIPE)
try:
process.communicate(output.encode("utf-8"))
except IOError:
pass
def display(text):
term_columns, term_lines = get_terminal_size()
text_columns, text_lines = get_text_size(text)
if (
text_lines + 2 <= term_lines and text_columns <= term_columns
) or not sys.stdout.isatty():
return print(text)
page(text)
def sanitize_query(query):
return query + f' LIMIT {MAX_ROWS + 1}'
def try_extract_error(exception):
try:
return exception.args[0].status.errorMessage
except Exception:
raise exception
def run_query(cursor, query):
try:
cursor.execute(sanitize_query(query))
except Exception as e:
print(try_extract_error(e), file=sys.stderr)
return
display(render(cursor))
def setup_readline():
import readline
try:
readline.read_history_file(HISTORY)
except FileNotFoundError:
pass
atexit.register(readline.write_history_file, HISTORY)
def run_interactive(cursor):
setup_readline()
print(get_distribution('dbq'))
print('running in interactive mode, go ahead and type some SQL...')
try:
while True:
query = input('> ')
if query:
run_query(cursor, query)
except (EOFError, KeyboardInterrupt):
print()
print('Bye!')
def run():
args = parse_args()
connection = databricks.connect(**read_credentials())
cursor = connection.cursor()
if args.query:
query = sys.stdin.read() if args.query == '-' else args.query
run_query(cursor, query)
else:
run_interactive(cursor)
|
[
"pkg_resources.get_distribution",
"atexit.register",
"subprocess.Popen",
"json.load",
"sys.stdin.read",
"argparse.ArgumentParser",
"textwrap.dedent",
"shutil.get_terminal_size",
"readline.read_history_file",
"tabulate.tabulate",
"sys.stdout.isatty",
"os.path.expanduser",
"sys.exit"
] |
[((336, 359), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (354, 359), False, 'import os\n'), ((1129, 1145), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (1143, 1145), False, 'from argparse import ArgumentParser\n'), ((1483, 1517), 'tabulate.tabulate', 'tabulate', (['rows[:MAX_ROWS]', 'headers'], {}), '(rows[:MAX_ROWS], headers)\n', (1491, 1517), False, 'from tabulate import tabulate\n'), ((1919, 1952), 'subprocess.Popen', 'Popen', (["['less', '-R']"], {'stdin': 'PIPE'}), "(['less', '-R'], stdin=PIPE)\n", (1924, 1952), False, 'from subprocess import Popen, PIPE\n'), ((2100, 2119), 'shutil.get_terminal_size', 'get_terminal_size', ([], {}), '()\n', (2117, 2119), False, 'from shutil import get_terminal_size\n'), ((2902, 2955), 'atexit.register', 'atexit.register', (['readline.write_history_file', 'HISTORY'], {}), '(readline.write_history_file, HISTORY)\n', (2917, 2955), False, 'import atexit\n'), ((440, 463), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (458, 463), False, 'import os\n'), ((2818, 2853), 'readline.read_history_file', 'readline.read_history_file', (['HISTORY'], {}), '(HISTORY)\n', (2844, 2853), False, 'import readline\n'), ((3018, 3041), 'pkg_resources.get_distribution', 'get_distribution', (['"""dbq"""'], {}), "('dbq')\n", (3034, 3041), False, 'from pkg_resources import get_distribution\n'), ((565, 577), 'json.load', 'json.load', (['f'], {}), '(f)\n', (574, 577), False, 'import json\n'), ((1084, 1095), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1092, 1095), False, 'import sys\n'), ((2264, 2283), 'sys.stdout.isatty', 'sys.stdout.isatty', ([], {}), '()\n', (2281, 2283), False, 'import sys\n'), ((3484, 3500), 'sys.stdin.read', 'sys.stdin.read', ([], {}), '()\n', (3498, 3500), False, 'import sys\n'), ((701, 1013), 'textwrap.dedent', 'dedent', (['f""" Please set up {filename} as follows:\n\n {{\n "cluster": "A CLUSTER NAME",\n "host": "dbc-????????-????.cloud.databricks.com",\n "token": "YOUR API ACCESS TOKEN"\n }}"""'], {}), '(\n f""" Please set up {filename} as follows:\n\n {{\n "cluster": "A CLUSTER NAME",\n "host": "dbc-????????-????.cloud.databricks.com",\n "token": "YOUR API ACCESS TOKEN"\n }}"""\n )\n', (707, 1013), False, 'from textwrap import dedent\n')]
|
import numpy as np
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_recall_curve
from sklearn.metrics import auc
from sklearn.metrics import matthews_corrcoef
from sklearn.metrics import roc_auc_score
from sklearn.metrics import roc_curve
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.metrics import f1_score
from scripts.visualization import plot_roc_curve
from scripts.visualization import plot_precision_recall_curve
from scripts.visualization import plot_confusion_matrix
def to_labels(pos_probs, threshold):
return (pos_probs >= threshold).astype('int')
def get_cut_off_threshold_from_precision_recall(precision:list,
recall:list,
thresholds:list)->int:
try:
# convert to f score
fscore = (2 * precision * recall) / (precision + recall)
# locate the index of the largest f score
ix = np.argmax(fscore)
print('PR-curve threshold=%f, F-Score=%.3f' % (thresholds[ix], fscore[ix]))
return ix
except Exception as error:
raise Exception('Caught this error: ' + repr(error))
def get_cut_off_threshold_through_iteration(pos_probs:list, y_test:list)->float:
"""
Extracts cut off thresholds by itrating all possible values up to 3 decimal places
from 0.0001-1. Returns the value maximizes macro f1 score.
"""
try:
# define thresholds
thresholds = np.arange(0, 1, 0.0001)
# evaluate each threshold
scores = [f1_score(y_test, to_labels(pos_probs, t), average='macro') for t in thresholds]
# get best threshold
ix = np.argmax(scores)
print('Threshold=%.3f, Best macro F1-Score=%.5f' % (thresholds[ix], scores[ix]))
return thresholds[ix]
except Exception as error:
raise Exception('Caught this error: ' + repr(error))
def get_evaluation_report(test_set:list,
prediction_proba:list,
labels:list,
threshold:float = None,
plot:str='precision-recall',
save_path:str = None)->dict:
"""
Args:
test_set:list -> original target values
prediction_proba:list -> extension to use for serializing
labels:list -> target label names
threshold:float -> Probability cut off threshold
plot:str -> roc or precision-recall
save_path:str -> save directory
"""
try:
auc_score = 0
if plot=='roc':
fpr, tpr, _ = roc_curve(test_set, prediction_proba)
auc_score = roc_auc_score(test_set, prediction_proba)
plot_roc_curve(auc_score, fpr, tpr)
elif plot=='precision-recall':
precision, recall, thresholds = precision_recall_curve(test_set, prediction_proba)
auc_score = auc(recall, precision)
no_skill = np.sum(test_set==1)/test_set.shape
ix = get_cut_off_threshold_from_precision_recall(precision, recall, thresholds)
best_threshold_pos = (recall[ix], precision[ix])
plot_precision_recall_curve(auc_score,
recall,
precision,
best_threshold_pos,
round(no_skill[0], 2),
save_path)
#threshold = round(thresholds[ix], 3) if not threshold else None
if not threshold:
threshold = get_cut_off_threshold_through_iteration(prediction_proba, test_set)
predictions = prediction_proba>threshold
cr = classification_report(test_set, predictions, target_names=labels)
cm = confusion_matrix(test_set, predictions)
mcc = matthews_corrcoef(test_set, predictions)
print('\n',cr)
print('Matthews correlation coefficient: ', mcc)
plot_confusion_matrix(cm,
labels,
save_path=save_path)
return {'threshold':threshold,
'auc':auc_score,
'mcc':mcc,
'confusion_matrix': cm,
'classification_report':classification_report(test_set,
predictions,
target_names=labels,
output_dict=True)}
except Exception as error:
raise Exception('Caught this error: ' + repr(error))
|
[
"numpy.sum",
"scripts.visualization.plot_roc_curve",
"sklearn.metrics.roc_curve",
"numpy.argmax",
"scripts.visualization.plot_confusion_matrix",
"sklearn.metrics.classification_report",
"sklearn.metrics.roc_auc_score",
"sklearn.metrics.precision_recall_curve",
"sklearn.metrics.auc",
"sklearn.metrics.matthews_corrcoef",
"numpy.arange",
"sklearn.metrics.confusion_matrix"
] |
[((1023, 1040), 'numpy.argmax', 'np.argmax', (['fscore'], {}), '(fscore)\n', (1032, 1040), True, 'import numpy as np\n'), ((1546, 1569), 'numpy.arange', 'np.arange', (['(0)', '(1)', '(0.0001)'], {}), '(0, 1, 0.0001)\n', (1555, 1569), True, 'import numpy as np\n'), ((1746, 1763), 'numpy.argmax', 'np.argmax', (['scores'], {}), '(scores)\n', (1755, 1763), True, 'import numpy as np\n'), ((3875, 3940), 'sklearn.metrics.classification_report', 'classification_report', (['test_set', 'predictions'], {'target_names': 'labels'}), '(test_set, predictions, target_names=labels)\n', (3896, 3940), False, 'from sklearn.metrics import classification_report\n'), ((3954, 3993), 'sklearn.metrics.confusion_matrix', 'confusion_matrix', (['test_set', 'predictions'], {}), '(test_set, predictions)\n', (3970, 3993), False, 'from sklearn.metrics import confusion_matrix\n'), ((4009, 4049), 'sklearn.metrics.matthews_corrcoef', 'matthews_corrcoef', (['test_set', 'predictions'], {}), '(test_set, predictions)\n', (4026, 4049), False, 'from sklearn.metrics import matthews_corrcoef\n'), ((4149, 4203), 'scripts.visualization.plot_confusion_matrix', 'plot_confusion_matrix', (['cm', 'labels'], {'save_path': 'save_path'}), '(cm, labels, save_path=save_path)\n', (4170, 4203), False, 'from scripts.visualization import plot_confusion_matrix\n'), ((2725, 2762), 'sklearn.metrics.roc_curve', 'roc_curve', (['test_set', 'prediction_proba'], {}), '(test_set, prediction_proba)\n', (2734, 2762), False, 'from sklearn.metrics import roc_curve\n'), ((2787, 2828), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['test_set', 'prediction_proba'], {}), '(test_set, prediction_proba)\n', (2800, 2828), False, 'from sklearn.metrics import roc_auc_score\n'), ((2841, 2876), 'scripts.visualization.plot_roc_curve', 'plot_roc_curve', (['auc_score', 'fpr', 'tpr'], {}), '(auc_score, fpr, tpr)\n', (2855, 2876), False, 'from scripts.visualization import plot_roc_curve\n'), ((4456, 4543), 'sklearn.metrics.classification_report', 'classification_report', (['test_set', 'predictions'], {'target_names': 'labels', 'output_dict': '(True)'}), '(test_set, predictions, target_names=labels,\n output_dict=True)\n', (4477, 4543), False, 'from sklearn.metrics import classification_report\n'), ((2962, 3012), 'sklearn.metrics.precision_recall_curve', 'precision_recall_curve', (['test_set', 'prediction_proba'], {}), '(test_set, prediction_proba)\n', (2984, 3012), False, 'from sklearn.metrics import precision_recall_curve\n'), ((3037, 3059), 'sklearn.metrics.auc', 'auc', (['recall', 'precision'], {}), '(recall, precision)\n', (3040, 3059), False, 'from sklearn.metrics import auc\n'), ((3083, 3104), 'numpy.sum', 'np.sum', (['(test_set == 1)'], {}), '(test_set == 1)\n', (3089, 3104), True, 'import numpy as np\n')]
|
#####################################################################################################################
#####################################################################################################################
# See how TROPOMI NO2 responds to the Suez Canal blockage
# When downloading the data, look at a larger domain (Suez and its surrounding + Mediterranean Sea)
import os
import glob
import numpy as np
import pandas as pd
from netCDF4 import Dataset
import xarray as xr
'''
Note on this Suez Canal blockage
Blockage period: 23-29 March 2021
Data download period: 5 January - 26 April 2021
Domain (lon_min,lat_min,lon_max,lat_max): -20,5,60,50
Corresponding hour windows for data donwload: [6,7,8,9,10,11,12,13,14]
First test: sample weekly data before, during and after the blockage, get maps and time serires plot
Second test: get daily maps and combine with GeoViews
'''
#####################################################################################################################
# build a function to read oversampled TROPOMI NO2 as pandas dataframes
def read_oversampled_NO2(TROPOMI_oversampled_NO2_output_file):
'''read the output file for oversampled TROPOMI NO2'''
df = pd.read_csv(TROPOMI_oversampled_NO2_output_file,sep="\s+",header=None)
df = df.iloc[:,2:7]
df.columns = ['lat','lon','NO2','Count','NO2_uncertainty']
return df
#####################################################################################################################
# the spatial coverage may not be consistent on different days or during different weeks
# read all the data from the weekly results
os.chdir('/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Oversample_output')
Oversampled_NO2_files = sorted(glob.glob("Oversample_output_Suez_NO2_week*"), key=lambda x: int(x.split("_")[-2]))
print(*Oversampled_NO2_files,sep="\n")
oversampled_data = [read_oversampled_NO2(file) for file in Oversampled_NO2_files]
# use all the data ever sampled to decide the max dimension
lat_min = []
lat_max = []
lon_min = []
lon_max = []
for i in range(len(oversampled_data)):
lat_min.append(oversampled_data[i].lat.min())
lat_max.append(oversampled_data[i].lat.max())
lon_min.append(oversampled_data[i].lon.min())
lon_max.append(oversampled_data[i].lon.max())
lat_min = min(lat_min)
lat_max = max(lat_max)
lon_min = min(lon_min)
lon_max = max(lon_max)
# check the full dimension
print("lat_min:",lat_min)
print("lat_max:",lat_max)
print("lon_min:",lon_min)
print("lon_max:",lon_max)
# With the dimension above and the resolution, we can create a consistent domain ("the full grid")
# so that we can combine the data from different days/weeks together
# first list all the lats and lons: use (min,max+1/2 resolutions, resolution) to keep the max value in Python
# just round the floats created by Python to be safe
# as the "pd.merge" step later will require the values of "keys" to be excatly the same
Res = 0.05
domain_lat = np.arange(lat_min,lat_max+Res/2,Res,dtype=None)
domain_lon = np.arange(lon_min,lon_max+Res/2,Res,dtype=None)
domain_lat = np.round(domain_lat,3)
domain_lon = np.round(domain_lon,3)
# build a function to create a "full grid" by listing the full combinations of lats and lons in the domain
def expand_grid(lat,lon):
'''list all combinations of lats and lons using expand_grid(lat,lon)'''
test = [(A,B) for A in lat for B in lon]
test = np.array(test)
test_lat = test[:,0]
test_lon = test[:,1]
full_grid = pd.DataFrame({'lat': test_lat, 'lon': test_lon})
return full_grid
# create the "full grid"
domain_grid = expand_grid(domain_lat,domain_lon)
print(domain_grid)
################################################################################################
# Now we can read each single dataset and match it with the full grid
# Step 1> select the oversampled data
os.chdir('/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Oversample_output')
# change input time to read daily data or weekly data
time = 'week_1'
Oversampled_NO2_file = "Oversample_output_Suez_NO2_"+str(time)+"_0.05"
# check the selected data
print(Oversampled_NO2_file)
# Step 2> feed the oversampled data into this data cleaning routine
# read oversampled NO2 data
NO2_data = read_oversampled_NO2(Oversampled_NO2_file)
# combine the data with the full domain grids
NO2_data = pd.merge(domain_grid,NO2_data,how='left', on=['lat','lon'])
NO2_data = NO2_data.sort_values(by=['lat','lon'], ascending=[True, True])
# reshape the variables from 1D in the dataframe to the map dimension
NO2 = NO2_data['NO2'].values.reshape(len(domain_lat),len(domain_lon))
NO2_uncertainty = NO2_data['NO2_uncertainty'].values.reshape(len(domain_lat),len(domain_lon))
Count = NO2_data['Count'].values.reshape(len(domain_lat),len(domain_lon))
# convert to xarray for plotting
NO2_xarray = xr.DataArray(NO2, coords=[('lat', domain_lat),('lon', domain_lon)])
NO2_uncertainty_xarray = xr.DataArray(NO2_uncertainty, coords=[('lat', domain_lat),('lon', domain_lon)])
Count_xarray = xr.DataArray(Count, coords=[('lat', domain_lat),('lon', domain_lon)])
# but it is complicated to save out the results one by one for multiple days or weeks
################################################################################################
################################################################################################
# So here we use the list comprehensions to process multiple files
#################
# weekly data
os.chdir('/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Oversample_output')
# select the files and sort them numerically
Oversampled_NO2_files_weekly = sorted(glob.glob("Oversample_output_Suez_NO2_week*"), key=lambda x: int(x.split("_")[-2]))
print(*Oversampled_NO2_files_weekly,sep="\n")
# read oversampled data and match with the "full grid"
Oversampled_NO2_week = [read_oversampled_NO2(file) for file in Oversampled_NO2_files_weekly]
Oversampled_NO2_week = [pd.merge(domain_grid,data,how='left', on=['lat','lon']) for data in Oversampled_NO2_week]
Oversampled_NO2_week = [data.sort_values(by=['lat','lon'], ascending=[True, True]) for data in Oversampled_NO2_week]
# convert the data to the xarray format for plotting
NO2_week = [data['NO2'].values.reshape(len(domain_lat),len(domain_lon)) for data in Oversampled_NO2_week]
NO2_week_xr = [xr.DataArray(data, coords=[('lat', domain_lat),('lon', domain_lon)]) for data in NO2_week]
#################
# daily data
os.chdir('/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Oversample_output')
# select the files and sort them numerically
Oversampled_NO2_files_daily = sorted(glob.glob("Oversample_output_Suez_NO2_day*"), key=lambda x: int(x.split("_")[-2]))
print(*Oversampled_NO2_files_daily,sep="\n")
# read oversampled data and match with the "full grid"
Oversampled_NO2_day = [read_oversampled_NO2(file) for file in Oversampled_NO2_files_daily]
Oversampled_NO2_day = [pd.merge(domain_grid,data,how='left', on=['lat','lon']) for data in Oversampled_NO2_day]
Oversampled_NO2_day = [data.sort_values(by=['lat','lon'], ascending=[True, True]) for data in Oversampled_NO2_day]
# convert the data to the xarray format for plotting
NO2_day = [data['NO2'].values.reshape(len(domain_lat),len(domain_lon)) for data in Oversampled_NO2_day]
NO2_day_xr = [xr.DataArray(data, coords=[('lat', domain_lat),('lon', domain_lon)]) for data in NO2_day]
################################################################################################
# Start making maps to have a quick look at the results
# avoid setting "%matplotlib inline" as it is time consuming when we need to produce many figures
import matplotlib.pyplot as plt
import cartopy.crs as crs
import geopandas as gpd
# read shape file (Global high resolution shoreline database from NOAA: https://www.ngdc.noaa.gov/mgg/shorelines/)
# use "full reolution" here to avoid misrepresentation of land and water
os.chdir("/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/shapefiles/gshhg-shp-2.3.7/GSHHS_shp/f")
world_shore = gpd.read_file("GSHHS_f_L1.shp")
################################################################################################
# build a function to quickly generate maps without a legend to save space on a slide
def quick_plot(input_xr,plot_domain,var_min,var_max,output_figure_name):
'''
Input a xarray data array, define the map domain, provide the min and max of the values on map. Provide a outputfile name.
'''
# set the figure size, the aspect ratio is set to be 2:1 due to the sampling region
fig = plt.figure(figsize=[20,10])
# set the map projection and domain: https://scitools.org.uk/cartopy/docs/v0.15/crs/projections.html#cartopy-projection
ax = plt.axes(projection=ccrs.PlateCarree())
ax.set_extent(plot_domain)
# plot the value on map
im = input_xr.plot(ax=ax,cmap='jet',vmin=var_min,vmax=var_max)
# add shapefile
ax.add_geometries(world_shore.geometry, crs=ccrs.PlateCarree(),edgecolor='black',facecolor='none')
# remove the colorbar and tile
plt.delaxes(fig.axes[1])
ax.set_title('')
# save out
fig.savefig(output_figure_name, dpi=100,bbox_inches='tight')
# close the figure to avoid taking CPU memory
plt.close()
################################################################################################
# build a function to generatet the bar for the figures above
def plot_color_bar(input_xr,plot_domain,label,var_min,var_max,output_figure_name):
'''
Draw the figure in the same way as above, but remove the plot rather than the colorbar.
'''
fig = plt.figure(figsize=[20,10])
cbar_keys = {'shrink': 1, 'pad' : 0.05,'orientation':'horizontal','label':label}
# set the map projection: https://scitools.org.uk/cartopy/docs/v0.15/crs/projections.html#cartopy-projection
ax = plt.axes(projection=ccrs.PlateCarree())
ax.set_extent(plot_domain)
# plotthe value on map
im = input_xr.plot(ax=ax,cmap='jet',cbar_kwargs=cbar_keys,vmin=var_min,vmax=var_max)
# set color bar label size
plt.rcParams.update({'font.size':25})
ax.xaxis.label.set_size(25)
# remove the plot
plt.delaxes(fig.axes[0])
# save out
fig.savefig(output_figure_name, dpi=100,bbox_inches='tight')
# close the figure to avoid taking CPU memory
plt.close()
################################################################################################
# check again the data for plotting
print("weekly data:",len(NO2_week_xr))
print("daily data:",len(NO2_day_xr))
# generate corresponding output file names
# weekly maps
Suez_weeks = list(range(1,17))
Suez_weeks = [str('Suez_NO2_map_week_') + str(week_number) for week_number in Suez_weeks]
print(*Suez_weeks,sep="\n")
# daily maps
Suez_days = list(range(1,29))
Suez_days = [str('Suez_NO2_map_day_') + str(date_number) for date_number in Suez_days]
print(*Suez_days,sep="\n")
################################################################################################
# output multiple plots together
os.chdir('/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Figures')
# maps during the blockage
# week 12
# day 8-14
# plot weekly data
# plot over the big domain [lon_min,lon_max,lat_min,lat_max]
Suez_domain_big = [-20,60,5,50]
for i in range(len(NO2_week_xr)):
quick_plot(NO2_week_xr[i],Suez_domain_big,0,2,Suez_weeks[i]+str('_big'))
# plot over the small domain [lon_min,lon_max,lat_min,lat_max]
Suez_domain_small = [26,60,10,35]
for i in range(len(NO2_week_xr)):
quick_plot(NO2_week_xr[i],Suez_domain_small,0,2,Suez_weeks[i]+str('_small'))
# generate the color bar at the end
plot_color_bar(NO2_week_xr[0],Suez_domain_small,'NO$_2$ tropospheric column [$10^{15}$ molec. cm$^{-2}$]',0,2,"Suez_NO2_color_bar")
# plot daily data
# plot over the small domain [lon_min,lon_max,lat_min,lat_max]
Suez_domain_small = [26,60,10,35]
for i in range(len(NO2_day_xr)):
quick_plot(NO2_day_xr[i],Suez_domain_small,0,2,Suez_days[i]+str('_small'))
################################################################################################
################################################################################################
# Use GeoViews to combine the maps together in time series
# load GeoViews package
import geoviews as gv
import geoviews.feature as gf
import cartopy.crs as crs
# it is important to check your geoviews version, some commands may not work in a wrong version
# this script is written under version 1.9.1
print(gv.__version__)
# there are two backends ('bokeh', 'matplotlib') for the GeoViews
# later we will use "bokeh" for interactive plots
################################################################################################
# weekly maps
# list all the weeks
Suez_weeks = ['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16']
print(*Suez_weeks,sep="\n")
# combine the xarray data arrays from weekly results
# make a copy first
weekly_data = NO2_week_xr.copy()
# add the variable name
weekly_data = [data.rename('NO2') for data in weekly_data]
# add a time dimension to the data
for i in range(len(NO2_week_xr)):
NO2_week_xr[i] = NO2_week_xr[i].assign_coords(week=Suez_weeks[i])
NO2_week_xr[i] = NO2_week_xr[i].expand_dims('week')
# combine the data together
NO2_week_xr_combined = xr.concat(NO2_week_xr,'week')
# you can zoom in and change maps, so normally there is no need to make a small map
# but if you have to reduce the file size, you can subset over the small domain
# weekly_data = [data.sel(lat=slice(10,35),lon = slice(26,60)) for data in weekly_data]
# check the results
NO2_week_xr_combined
# output the plots
# first move to the output directory
os.chdir('/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Figures')
# turn on "bokeh" backend to enable interactive map
gv.extension('bokeh')
# extract data from the combined xarray
gv_data = gv.Dataset(NO2_week_xr_combined,['lon','lat','week'],'NO2',crs=crs.PlateCarree())
# use the data to generate the geoviews image
gv_image = gv_data.to(gv.Image)
# decide features of the output figure
gv_image_out = gv_image.opts(cmap='jet', clim=(0,2), colorbar=True, width=800, height=500) * gf.coastline
# save out the interactive map
renderer = gv.renderer('bokeh')
renderer.save(gv_image_out, 'weekly_maps')
################################################################################################
# daily maps
# list all the dates
def list_dates_between(start_date,end_date):
'''Select TROPOMI files within the start date ('yyyymmdd') and end date ('yyyymmdd')'''
# list all the dates between the start and the end
from datetime import date, timedelta
start_date = date(int(start_date[0:4]),int(start_date[4:6]),int(start_date[6:8]))
end_date = date(int(end_date[0:4]),int(end_date[4:6]),int(end_date[6:8]))
delta = end_date - start_date
sampling_dates = []
for i in range(delta.days + 1):
sampling_dates.append((start_date + timedelta(days=i)).strftime('%Y%m%d'))
# print out all the sampling dates
return sampling_dates
# list all the dates
Suez_days = list_dates_between("20210316","20210412")
print("number of days:",len(Suez_days))
print(*Suez_days,sep="\n")
# combine the xarray data arrays from daily results
# make a copy first
daily_data = NO2_day_xr.copy()
# add the variable name
daily_data = [data.rename('NO2') for data in daily_data]
# add a time dimension to the data
for i in range(len(NO2_day_xr)):
NO2_day_xr[i] = NO2_day_xr[i].assign_coords(date=Suez_days[i])
NO2_day_xr[i] = NO2_day_xr[i].expand_dims('date')
# combine the data together
NO2_day_xr_combined = xr.concat(NO2_day_xr,'date')
# check the results
NO2_day_xr_combined
# output the plots
# first move to the output directory
os.chdir('/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Figures')
# turn on "bokeh" backend to enable interactive map
gv.extension('bokeh')
# extract data from the combined xarray
gv_data = gv.Dataset(NO2_day_xr_combined,['lon','lat','date'],'NO2',crs=crs.PlateCarree())
# use the data to generate the geoviews image
gv_image = gv_data.to(gv.Image)
# decide features of the output figure
gv_image_out = gv_image.opts(cmap='jet', clim=(0,2), colorbar=True, width=800, height=500) * gf.coastline
# save out the interactive map
renderer = gv.renderer('bokeh')
renderer.save(gv_image_out, 'daily_maps')
# For now, the default coastline from GeoViews is used
# If you can crop and create your own shapefile, you should be able to use high resolution shorelines from NOAA
# Think about how to do this with geopandas
#####################################################################################################################
#####################################################################################################################
|
[
"pandas.read_csv",
"geoviews.extension",
"matplotlib.pyplot.figure",
"numpy.arange",
"glob.glob",
"geoviews.renderer",
"numpy.round",
"os.chdir",
"pandas.DataFrame",
"matplotlib.pyplot.close",
"pandas.merge",
"matplotlib.pyplot.rcParams.update",
"datetime.timedelta",
"geopandas.read_file",
"xarray.concat",
"matplotlib.pyplot.delaxes",
"numpy.array",
"xarray.DataArray",
"cartopy.crs.PlateCarree"
] |
[((1696, 1819), 'os.chdir', 'os.chdir', (['"""/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Oversample_output"""'], {}), "(\n '/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Oversample_output'\n )\n", (1704, 1819), False, 'import os\n'), ((3108, 3162), 'numpy.arange', 'np.arange', (['lat_min', '(lat_max + Res / 2)', 'Res'], {'dtype': 'None'}), '(lat_min, lat_max + Res / 2, Res, dtype=None)\n', (3117, 3162), True, 'import numpy as np\n'), ((3170, 3224), 'numpy.arange', 'np.arange', (['lon_min', '(lon_max + Res / 2)', 'Res'], {'dtype': 'None'}), '(lon_min, lon_max + Res / 2, Res, dtype=None)\n', (3179, 3224), True, 'import numpy as np\n'), ((3235, 3258), 'numpy.round', 'np.round', (['domain_lat', '(3)'], {}), '(domain_lat, 3)\n', (3243, 3258), True, 'import numpy as np\n'), ((3272, 3295), 'numpy.round', 'np.round', (['domain_lon', '(3)'], {}), '(domain_lon, 3)\n', (3280, 3295), True, 'import numpy as np\n'), ((4033, 4156), 'os.chdir', 'os.chdir', (['"""/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Oversample_output"""'], {}), "(\n '/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Oversample_output'\n )\n", (4041, 4156), False, 'import os\n'), ((4573, 4635), 'pandas.merge', 'pd.merge', (['domain_grid', 'NO2_data'], {'how': '"""left"""', 'on': "['lat', 'lon']"}), "(domain_grid, NO2_data, how='left', on=['lat', 'lon'])\n", (4581, 4635), True, 'import pandas as pd\n'), ((5072, 5140), 'xarray.DataArray', 'xr.DataArray', (['NO2'], {'coords': "[('lat', domain_lat), ('lon', domain_lon)]"}), "(NO2, coords=[('lat', domain_lat), ('lon', domain_lon)])\n", (5084, 5140), True, 'import xarray as xr\n'), ((5166, 5251), 'xarray.DataArray', 'xr.DataArray', (['NO2_uncertainty'], {'coords': "[('lat', domain_lat), ('lon', domain_lon)]"}), "(NO2_uncertainty, coords=[('lat', domain_lat), ('lon', domain_lon)]\n )\n", (5178, 5251), True, 'import xarray as xr\n'), ((5262, 5332), 'xarray.DataArray', 'xr.DataArray', (['Count'], {'coords': "[('lat', domain_lat), ('lon', domain_lon)]"}), "(Count, coords=[('lat', domain_lat), ('lon', domain_lon)])\n", (5274, 5332), True, 'import xarray as xr\n'), ((5723, 5846), 'os.chdir', 'os.chdir', (['"""/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Oversample_output"""'], {}), "(\n '/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Oversample_output'\n )\n", (5731, 5846), False, 'import os\n'), ((6746, 6869), 'os.chdir', 'os.chdir', (['"""/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Oversample_output"""'], {}), "(\n '/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Oversample_output'\n )\n", (6754, 6869), False, 'import os\n'), ((8257, 8380), 'os.chdir', 'os.chdir', (['"""/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/shapefiles/gshhg-shp-2.3.7/GSHHS_shp/f"""'], {}), "(\n '/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/shapefiles/gshhg-shp-2.3.7/GSHHS_shp/f'\n )\n", (8265, 8380), False, 'import os\n'), ((8386, 8417), 'geopandas.read_file', 'gpd.read_file', (['"""GSHHS_f_L1.shp"""'], {}), "('GSHHS_f_L1.shp')\n", (8399, 8417), True, 'import geopandas as gpd\n'), ((11533, 11646), 'os.chdir', 'os.chdir', (['"""/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Figures"""'], {}), "(\n '/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Figures'\n )\n", (11541, 11646), False, 'import os\n'), ((13932, 13962), 'xarray.concat', 'xr.concat', (['NO2_week_xr', '"""week"""'], {}), "(NO2_week_xr, 'week')\n", (13941, 13962), True, 'import xarray as xr\n'), ((14327, 14440), 'os.chdir', 'os.chdir', (['"""/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Figures"""'], {}), "(\n '/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Figures'\n )\n", (14335, 14440), False, 'import os\n'), ((14487, 14508), 'geoviews.extension', 'gv.extension', (['"""bokeh"""'], {}), "('bokeh')\n", (14499, 14508), True, 'import geoviews as gv\n'), ((14924, 14944), 'geoviews.renderer', 'gv.renderer', (['"""bokeh"""'], {}), "('bokeh')\n", (14935, 14944), True, 'import geoviews as gv\n'), ((16385, 16414), 'xarray.concat', 'xr.concat', (['NO2_day_xr', '"""date"""'], {}), "(NO2_day_xr, 'date')\n", (16394, 16414), True, 'import xarray as xr\n'), ((16521, 16634), 'os.chdir', 'os.chdir', (['"""/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Figures"""'], {}), "(\n '/rds/projects/2018/maraisea-glu-01/Study/Research_Data/TROPOMI/project_2_Suez_Canal/Figures'\n )\n", (16529, 16634), False, 'import os\n'), ((16681, 16702), 'geoviews.extension', 'gv.extension', (['"""bokeh"""'], {}), "('bokeh')\n", (16693, 16702), True, 'import geoviews as gv\n'), ((17117, 17137), 'geoviews.renderer', 'gv.renderer', (['"""bokeh"""'], {}), "('bokeh')\n", (17128, 17137), True, 'import geoviews as gv\n'), ((1262, 1335), 'pandas.read_csv', 'pd.read_csv', (['TROPOMI_oversampled_NO2_output_file'], {'sep': '"""\\\\s+"""', 'header': 'None'}), "(TROPOMI_oversampled_NO2_output_file, sep='\\\\s+', header=None)\n", (1273, 1335), True, 'import pandas as pd\n'), ((1842, 1887), 'glob.glob', 'glob.glob', (['"""Oversample_output_Suez_NO2_week*"""'], {}), "('Oversample_output_Suez_NO2_week*')\n", (1851, 1887), False, 'import glob\n'), ((3567, 3581), 'numpy.array', 'np.array', (['test'], {}), '(test)\n', (3575, 3581), True, 'import numpy as np\n'), ((3651, 3699), 'pandas.DataFrame', 'pd.DataFrame', (["{'lat': test_lat, 'lon': test_lon}"], {}), "({'lat': test_lat, 'lon': test_lon})\n", (3663, 3699), True, 'import pandas as pd\n'), ((5924, 5969), 'glob.glob', 'glob.glob', (['"""Oversample_output_Suez_NO2_week*"""'], {}), "('Oversample_output_Suez_NO2_week*')\n", (5933, 5969), False, 'import glob\n'), ((6232, 6290), 'pandas.merge', 'pd.merge', (['domain_grid', 'data'], {'how': '"""left"""', 'on': "['lat', 'lon']"}), "(domain_grid, data, how='left', on=['lat', 'lon'])\n", (6240, 6290), True, 'import pandas as pd\n'), ((6619, 6688), 'xarray.DataArray', 'xr.DataArray', (['data'], {'coords': "[('lat', domain_lat), ('lon', domain_lon)]"}), "(data, coords=[('lat', domain_lat), ('lon', domain_lon)])\n", (6631, 6688), True, 'import xarray as xr\n'), ((6946, 6990), 'glob.glob', 'glob.glob', (['"""Oversample_output_Suez_NO2_day*"""'], {}), "('Oversample_output_Suez_NO2_day*')\n", (6955, 6990), False, 'import glob\n'), ((7249, 7307), 'pandas.merge', 'pd.merge', (['domain_grid', 'data'], {'how': '"""left"""', 'on': "['lat', 'lon']"}), "(domain_grid, data, how='left', on=['lat', 'lon'])\n", (7257, 7307), True, 'import pandas as pd\n'), ((7630, 7699), 'xarray.DataArray', 'xr.DataArray', (['data'], {'coords': "[('lat', domain_lat), ('lon', domain_lon)]"}), "(data, coords=[('lat', domain_lat), ('lon', domain_lon)])\n", (7642, 7699), True, 'import xarray as xr\n'), ((8927, 8955), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '[20, 10]'}), '(figsize=[20, 10])\n', (8937, 8955), True, 'import matplotlib.pyplot as plt\n'), ((9449, 9473), 'matplotlib.pyplot.delaxes', 'plt.delaxes', (['fig.axes[1]'], {}), '(fig.axes[1])\n', (9460, 9473), True, 'import matplotlib.pyplot as plt\n'), ((9638, 9649), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (9647, 9649), True, 'import matplotlib.pyplot as plt\n'), ((10025, 10053), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '[20, 10]'}), '(figsize=[20, 10])\n', (10035, 10053), True, 'import matplotlib.pyplot as plt\n'), ((10509, 10547), 'matplotlib.pyplot.rcParams.update', 'plt.rcParams.update', (["{'font.size': 25}"], {}), "({'font.size': 25})\n", (10528, 10547), True, 'import matplotlib.pyplot as plt\n'), ((10614, 10638), 'matplotlib.pyplot.delaxes', 'plt.delaxes', (['fig.axes[0]'], {}), '(fig.axes[0])\n', (10625, 10638), True, 'import matplotlib.pyplot as plt\n'), ((10789, 10800), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (10798, 10800), True, 'import matplotlib.pyplot as plt\n'), ((14627, 14644), 'cartopy.crs.PlateCarree', 'crs.PlateCarree', ([], {}), '()\n', (14642, 14644), True, 'import cartopy.crs as crs\n'), ((16820, 16837), 'cartopy.crs.PlateCarree', 'crs.PlateCarree', ([], {}), '()\n', (16835, 16837), True, 'import cartopy.crs as crs\n'), ((15677, 15694), 'datetime.timedelta', 'timedelta', ([], {'days': 'i'}), '(days=i)\n', (15686, 15694), False, 'from datetime import date, timedelta\n')]
|
import torch
import torchvision
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.distributions as dists
import numpy as np
import scipy.io
import foolbox
import input_data
import argparse
from tqdm import tqdm
import data_loader
import math
import os
import tensorflow as tf
from cleverhans.attacks import FastGradientMethod
from cleverhans.model import CallableModelWrapper
from cleverhans.utils import AccuracyReport
from cleverhans.utils_pytorch import convert_pytorch_model_to_tf
parser = argparse.ArgumentParser()
parser.add_argument('--use_dropout', default=False, action='store_true')
parser.add_argument('--normalize', default=False, action='store_true')
parser.add_argument('--load', default=False, action='store_true')
parser.add_argument('--train_samples', type=int, default=1)
parser.add_argument('--n_samples', type=int, default=100)
parser.add_argument('--lr', type=float, default=1e-3)
parser.add_argument('--wd', type=float, default=0)
parser.add_argument('--lam', type=float, default=1e-7)
parser.add_argument('--n_hidden', type=int, default=100)
parser.add_argument('--n_hidden_hypernet', type=int, default=50)
parser.add_argument('--batch_size', type=int, default=200)
parser.add_argument('--n_iter', type=int, default=100)
parser.add_argument('--randseed', type=int, default=9999)
args = parser.parse_args()
np.random.seed(args.randseed)
torch.manual_seed(args.randseed)
name = 'mlcdn'
if args.use_dropout:
name = 'dropout'
os.makedirs('./results/cifar', exist_ok=True)
os.makedirs('./models/cifar', exist_ok=True)
# Load training data
trainset, testset = data_loader.load_dataset('cifar10_pretrained')
class ProbHypernet(nn.Module):
def __init__(self, in_dim, out_dim, h_dim=100):
super(ProbHypernet, self).__init__()
self.in_dim = in_dim + 1
self.out_dim = out_dim
self.h_dim = h_dim
self.M = nn.Parameter(torch.randn(self.in_dim, out_dim))
self.fc_xh = nn.Linear(in_dim, h_dim)
nn.init.uniform_(self.fc_xh.weight, -0.0001, 0.0001)
self.fc_hmu = nn.Linear(h_dim, self.in_dim)
nn.init.uniform_(self.fc_hmu.weight, -0.0001, 0.0001)
self.fc_hlogvar_in = nn.Linear(h_dim, self.in_dim)
nn.init.uniform_(self.fc_hlogvar_in.weight, -0.0001, 0.0001)
self.fc_hlogvar_out = nn.Linear(h_dim, out_dim)
nn.init.uniform_(self.fc_hlogvar_out.weight, -0.0001, 0.0001)
def forward(self, x, output_weight_params=False):
m = x.shape[0]
r, c = self.in_dim, self.out_dim
h = self.fc_xh(x)
h = F.relu(h)
mu_scaling = self.fc_hmu(h)
logvar_r = self.fc_hlogvar_in(h)
logvar_c = self.fc_hlogvar_out(h)
M = self.M
M = mu_scaling.view(m, r, 1) * M # Broadcasted: M is (m, r, c)
var_r = torch.exp(logvar_r)
var_c = torch.exp(logvar_c)
E = torch.randn(m, r, c, device='cuda')
# Reparametrization trick
W = M + torch.sqrt(var_r).view(m, r, 1) * E * torch.sqrt(var_c).view(m, 1, c)
# KL divergence to prior MVN(0, I, I)
D_KL = torch.mean(
1/2 * (torch.sum(var_r, 1)*torch.sum(var_c, 1) \
+ torch.norm(M.view(m, -1), dim=1)**2 \
- r*c - c*torch.sum(logvar_r, 1) - r*torch.sum(logvar_c, 1))
)
x = torch.cat([x, torch.ones(m, 1, device='cuda')], 1)
h = torch.bmm(x.unsqueeze(1), W).squeeze()
if output_weight_params:
return h, D_KL, (M, var_r, var_c)
else:
return h, D_KL
class Model(nn.Module):
def __init__(self, h_dim=100, h_dim_hypernet=50, use_dropout=False):
super(Model, self).__init__()
self.use_dropout = use_dropout
if not self.use_dropout:
self.fc_xh = ProbHypernet(1024, h_dim, h_dim_hypernet)
self.fc_hy = ProbHypernet(h_dim, 10, h_dim_hypernet)
else:
self.fc_xh = nn.Linear(1024, h_dim)
self.fc_hy = nn.Linear(h_dim, 10)
def forward(self, X):
X = X.squeeze()
if not self.use_dropout:
h, D_KL1 = self.fc_xh(X)
h = F.relu(h)
y, D_KL2 = self.fc_hy(h)
return (y, D_KL1+D_KL2) if self.training else y
else:
h = F.relu(self.fc_xh(X))
if self.use_dropout:
h = F.dropout(h, p=0.5, training=True)
y = self.fc_hy(h)
return y
def validate(m=args.batch_size):
model.eval()
val_acc = 0
total = 0
for x, y in testset:
x = x.cuda()
y_i = model.forward(x)
val_acc += np.sum(y_i.argmax(dim=1).cpu().numpy() == y.numpy())
total += x.shape[0]
model.train()
return val_acc/total
""" Training """
S = args.train_samples
m = args.batch_size
lr = args.lr
lam = args.lam
h_dim = args.n_hidden
h_dim_hypernet = args.n_hidden_hypernet
model = Model(h_dim, h_dim_hypernet, args.use_dropout).cuda()
print(f'Parameter count: {np.sum([value.numel() for value in model.parameters()])}')
if args.load:
model.load_state_dict(torch.load(f'models/cifar/model_{name}_{h_dim}_{h_dim_hypernet}_{m}_{lr}_{args.wd}_{args.lam}_{S}.bin'))
else:
opt = optim.Adam(model.parameters(), lr, weight_decay=args.wd)
pbar = tqdm(range(args.n_iter))
for i in pbar:
for x, y in trainset:
x = x.cuda()
y = y.cuda()
if not args.use_dropout:
log_p_y = []
D_KL = 0
for _ in range(S):
y_s, D_KL = model.forward(x)
log_p_y_s = dists.Categorical(logits=y_s).log_prob(y)
log_p_y.append(log_p_y_s)
loss = -torch.mean(torch.logsumexp(torch.stack(log_p_y), 0) - math.log(S))
loss += args.lam*D_KL
else:
out = model.forward(x)
loss = F.cross_entropy(out, y)
loss.backward()
nn.utils.clip_grad_value_(model.parameters(), 5)
opt.step()
opt.zero_grad()
val_acc = validate(m)
pbar.set_description(f'[Loss: {loss.data.item():.3f}; val acc: {val_acc:.3f}]')
# Save model
if not args.load:
torch.save(model.state_dict(), f'models/cifar/model_{name}_{h_dim}_{h_dim_hypernet}_{m}_{lr}_{args.wd}_{args.lam}_{S}.bin')
""" =============================== Validate ======================================= """
def test():
model.eval()
y = []
t = []
for x_test, y_test in testset:
x_test = x_test.cuda()
y_i = model.forward(x_test)
y.append(F.softmax(y_i, dim=1).cpu().data.numpy())
t.append(y_test)
y = np.concatenate(y, 0)
t = np.concatenate(t)
return y, t
y_val = 0
for _ in tqdm(range(args.n_samples)):
y_s, t = test()
y_val += 1/args.n_samples*y_s
# Print accuracy
acc = np.mean(y_val.argmax(1) == t)
print(f'Test accuracy on CIFAR-10: {acc:.3f}')
""" ======================= Adversarial examples experiments ======================= """
model.eval()
input_shape = (None, 3, 32, 32)
trainset, testset = data_loader.load_dataset('cifar10')
pretrained_model = torchvision.models.densenet121(pretrained=True).cuda()
pretrained_model = torch.nn.Sequential(*(list(pretrained_model.children())[:-1]))
pretrained_model.eval()
model = nn.Sequential(pretrained_model, model)
model.eval()
# We use tf for evaluation on adversarial data
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.Session(config=config)
x_op = tf.placeholder(tf.float32, shape=input_shape)
# Convert pytorch model to a tf_model and wrap it in cleverhans
tf_model_fn = convert_pytorch_model_to_tf(model, out_dims=10)
cleverhans_model = CallableModelWrapper(tf_model_fn, output_layer='logits')
adv_accs = []
adv_ents = []
def test_tf(use_adv=True):
preds = []
y_test = []
total = 0
for x, y in testset:
x = x.permute(0, 3, 1, 2)
if use_adv:
pred = sess.run(adv_preds_op, feed_dict={x_op: x})
pred = F.softmax(torch.from_numpy(pred), 1).numpy()
else:
pred = model.forward(x.cuda())
pred = F.softmax(pred, 1).cpu().data.numpy()
preds.append(pred)
y_test.append(y)
total += x.shape[0]
if total >= 1000:
break
preds = np.concatenate(preds, 0)
y_test = np.concatenate(y_test, 0)
return np.nan_to_num(preds), y_test
adv_preds = 0
for _ in tqdm(range(args.n_samples)):
preds, y_test = test_tf(False)
adv_preds += 1/args.n_samples * preds
# Compute acc and entropy
acc = (np.argmax(adv_preds, axis=1) == y_test).mean()
ent = (-adv_preds*np.log(adv_preds+1e-8)).sum(1).mean()
adv_accs.append(acc)
adv_ents.append(ent)
print('Adv accuracy: {:.3f}'.format(acc))
print('Avg entropy: {:.3f}'.format(ent))
for eps in np.arange(0.1, 1.01, 0.1):
# Create an FGSM attack
fgsm_op = FastGradientMethod(cleverhans_model, sess=sess)
fgsm_params = {'eps': eps,
'clip_min': 0.,
'clip_max': 1.}
adv_x_op = fgsm_op.generate(x_op, **fgsm_params)
adv_preds_op = tf_model_fn(adv_x_op)
# Run an evaluation of our model against fgsm
# Use M data
adv_preds = 0
for _ in tqdm(range(args.n_samples)):
preds, y_test = test_tf()
adv_preds += 1/args.n_samples * preds
# Compute acc and entropy
acc = (np.argmax(adv_preds, axis=1) == y_test).mean()
ent = (-adv_preds*np.log(adv_preds+1e-8)).sum(1).mean()
adv_accs.append(acc)
adv_ents.append(ent)
print('Adv accuracy: {:.3f}'.format(acc))
print('Avg entropy: {:.3f}'.format(ent))
sess.close()
# Save data
np.save(f'results/cifar/accs_adv_{name}_{h_dim}_{h_dim_hypernet}_{m}_{lr}_{args.wd}_{args.lam}_{S}.npy', adv_accs)
np.save(f'results/cifar/ents_adv_{name}_{h_dim}_{h_dim_hypernet}_{m}_{lr}_{args.wd}_{args.lam}_{S}.npy', adv_ents)
|
[
"numpy.random.seed",
"argparse.ArgumentParser",
"data_loader.load_dataset",
"numpy.nan_to_num",
"numpy.argmax",
"torch.sqrt",
"torch.nn.init.uniform_",
"cleverhans.attacks.FastGradientMethod",
"torch.nn.functional.dropout",
"torch.randn",
"tensorflow.ConfigProto",
"numpy.arange",
"cleverhans.utils_pytorch.convert_pytorch_model_to_tf",
"torch.ones",
"torch.load",
"tensorflow.placeholder",
"torch.exp",
"torch.nn.Linear",
"torch.nn.functional.relu",
"math.log",
"numpy.save",
"torch.manual_seed",
"tensorflow.Session",
"torch.nn.functional.cross_entropy",
"cleverhans.model.CallableModelWrapper",
"torch.sum",
"numpy.concatenate",
"torch.from_numpy",
"os.makedirs",
"numpy.log",
"torch.nn.Sequential",
"torch.stack",
"torchvision.models.densenet121",
"torch.nn.functional.softmax",
"torch.distributions.Categorical"
] |
[((542, 567), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (565, 567), False, 'import argparse\n'), ((1379, 1408), 'numpy.random.seed', 'np.random.seed', (['args.randseed'], {}), '(args.randseed)\n', (1393, 1408), True, 'import numpy as np\n'), ((1409, 1441), 'torch.manual_seed', 'torch.manual_seed', (['args.randseed'], {}), '(args.randseed)\n', (1426, 1441), False, 'import torch\n'), ((1502, 1547), 'os.makedirs', 'os.makedirs', (['"""./results/cifar"""'], {'exist_ok': '(True)'}), "('./results/cifar', exist_ok=True)\n", (1513, 1547), False, 'import os\n'), ((1548, 1592), 'os.makedirs', 'os.makedirs', (['"""./models/cifar"""'], {'exist_ok': '(True)'}), "('./models/cifar', exist_ok=True)\n", (1559, 1592), False, 'import os\n'), ((1635, 1681), 'data_loader.load_dataset', 'data_loader.load_dataset', (['"""cifar10_pretrained"""'], {}), "('cifar10_pretrained')\n", (1659, 1681), False, 'import data_loader\n'), ((7165, 7200), 'data_loader.load_dataset', 'data_loader.load_dataset', (['"""cifar10"""'], {}), "('cifar10')\n", (7189, 7200), False, 'import data_loader\n'), ((7389, 7427), 'torch.nn.Sequential', 'nn.Sequential', (['pretrained_model', 'model'], {}), '(pretrained_model, model)\n', (7402, 7427), True, 'import torch.nn as nn\n'), ((7499, 7515), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {}), '()\n', (7513, 7515), True, 'import tensorflow as tf\n'), ((7562, 7587), 'tensorflow.Session', 'tf.Session', ([], {'config': 'config'}), '(config=config)\n', (7572, 7587), True, 'import tensorflow as tf\n'), ((7595, 7640), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': 'input_shape'}), '(tf.float32, shape=input_shape)\n', (7609, 7640), True, 'import tensorflow as tf\n'), ((7721, 7768), 'cleverhans.utils_pytorch.convert_pytorch_model_to_tf', 'convert_pytorch_model_to_tf', (['model'], {'out_dims': '(10)'}), '(model, out_dims=10)\n', (7748, 7768), False, 'from cleverhans.utils_pytorch import convert_pytorch_model_to_tf\n'), ((7788, 7844), 'cleverhans.model.CallableModelWrapper', 'CallableModelWrapper', (['tf_model_fn'], {'output_layer': '"""logits"""'}), "(tf_model_fn, output_layer='logits')\n", (7808, 7844), False, 'from cleverhans.model import CallableModelWrapper\n'), ((8923, 8948), 'numpy.arange', 'np.arange', (['(0.1)', '(1.01)', '(0.1)'], {}), '(0.1, 1.01, 0.1)\n', (8932, 8948), True, 'import numpy as np\n'), ((9766, 9890), 'numpy.save', 'np.save', (['f"""results/cifar/accs_adv_{name}_{h_dim}_{h_dim_hypernet}_{m}_{lr}_{args.wd}_{args.lam}_{S}.npy"""', 'adv_accs'], {}), "(\n f'results/cifar/accs_adv_{name}_{h_dim}_{h_dim_hypernet}_{m}_{lr}_{args.wd}_{args.lam}_{S}.npy'\n , adv_accs)\n", (9773, 9890), True, 'import numpy as np\n'), ((9881, 10005), 'numpy.save', 'np.save', (['f"""results/cifar/ents_adv_{name}_{h_dim}_{h_dim_hypernet}_{m}_{lr}_{args.wd}_{args.lam}_{S}.npy"""', 'adv_ents'], {}), "(\n f'results/cifar/ents_adv_{name}_{h_dim}_{h_dim_hypernet}_{m}_{lr}_{args.wd}_{args.lam}_{S}.npy'\n , adv_ents)\n", (9888, 10005), True, 'import numpy as np\n'), ((6736, 6756), 'numpy.concatenate', 'np.concatenate', (['y', '(0)'], {}), '(y, 0)\n', (6750, 6756), True, 'import numpy as np\n'), ((6765, 6782), 'numpy.concatenate', 'np.concatenate', (['t'], {}), '(t)\n', (6779, 6782), True, 'import numpy as np\n'), ((8409, 8433), 'numpy.concatenate', 'np.concatenate', (['preds', '(0)'], {}), '(preds, 0)\n', (8423, 8433), True, 'import numpy as np\n'), ((8447, 8472), 'numpy.concatenate', 'np.concatenate', (['y_test', '(0)'], {}), '(y_test, 0)\n', (8461, 8472), True, 'import numpy as np\n'), ((8993, 9040), 'cleverhans.attacks.FastGradientMethod', 'FastGradientMethod', (['cleverhans_model'], {'sess': 'sess'}), '(cleverhans_model, sess=sess)\n', (9011, 9040), False, 'from cleverhans.attacks import FastGradientMethod\n'), ((1993, 2017), 'torch.nn.Linear', 'nn.Linear', (['in_dim', 'h_dim'], {}), '(in_dim, h_dim)\n', (2002, 2017), True, 'import torch.nn as nn\n'), ((2026, 2078), 'torch.nn.init.uniform_', 'nn.init.uniform_', (['self.fc_xh.weight', '(-0.0001)', '(0.0001)'], {}), '(self.fc_xh.weight, -0.0001, 0.0001)\n', (2042, 2078), True, 'import torch.nn as nn\n'), ((2102, 2131), 'torch.nn.Linear', 'nn.Linear', (['h_dim', 'self.in_dim'], {}), '(h_dim, self.in_dim)\n', (2111, 2131), True, 'import torch.nn as nn\n'), ((2140, 2193), 'torch.nn.init.uniform_', 'nn.init.uniform_', (['self.fc_hmu.weight', '(-0.0001)', '(0.0001)'], {}), '(self.fc_hmu.weight, -0.0001, 0.0001)\n', (2156, 2193), True, 'import torch.nn as nn\n'), ((2224, 2253), 'torch.nn.Linear', 'nn.Linear', (['h_dim', 'self.in_dim'], {}), '(h_dim, self.in_dim)\n', (2233, 2253), True, 'import torch.nn as nn\n'), ((2262, 2322), 'torch.nn.init.uniform_', 'nn.init.uniform_', (['self.fc_hlogvar_in.weight', '(-0.0001)', '(0.0001)'], {}), '(self.fc_hlogvar_in.weight, -0.0001, 0.0001)\n', (2278, 2322), True, 'import torch.nn as nn\n'), ((2354, 2379), 'torch.nn.Linear', 'nn.Linear', (['h_dim', 'out_dim'], {}), '(h_dim, out_dim)\n', (2363, 2379), True, 'import torch.nn as nn\n'), ((2388, 2449), 'torch.nn.init.uniform_', 'nn.init.uniform_', (['self.fc_hlogvar_out.weight', '(-0.0001)', '(0.0001)'], {}), '(self.fc_hlogvar_out.weight, -0.0001, 0.0001)\n', (2404, 2449), True, 'import torch.nn as nn\n'), ((2608, 2617), 'torch.nn.functional.relu', 'F.relu', (['h'], {}), '(h)\n', (2614, 2617), True, 'import torch.nn.functional as F\n'), ((2846, 2865), 'torch.exp', 'torch.exp', (['logvar_r'], {}), '(logvar_r)\n', (2855, 2865), False, 'import torch\n'), ((2882, 2901), 'torch.exp', 'torch.exp', (['logvar_c'], {}), '(logvar_c)\n', (2891, 2901), False, 'import torch\n'), ((2915, 2950), 'torch.randn', 'torch.randn', (['m', 'r', 'c'], {'device': '"""cuda"""'}), "(m, r, c, device='cuda')\n", (2926, 2950), False, 'import torch\n'), ((5133, 5246), 'torch.load', 'torch.load', (['f"""models/cifar/model_{name}_{h_dim}_{h_dim_hypernet}_{m}_{lr}_{args.wd}_{args.lam}_{S}.bin"""'], {}), "(\n f'models/cifar/model_{name}_{h_dim}_{h_dim_hypernet}_{m}_{lr}_{args.wd}_{args.lam}_{S}.bin'\n )\n", (5143, 5246), False, 'import torch\n'), ((7220, 7267), 'torchvision.models.densenet121', 'torchvision.models.densenet121', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (7250, 7267), False, 'import torchvision\n'), ((8485, 8505), 'numpy.nan_to_num', 'np.nan_to_num', (['preds'], {}), '(preds)\n', (8498, 8505), True, 'import numpy as np\n'), ((1936, 1969), 'torch.randn', 'torch.randn', (['self.in_dim', 'out_dim'], {}), '(self.in_dim, out_dim)\n', (1947, 1969), False, 'import torch\n'), ((3978, 4000), 'torch.nn.Linear', 'nn.Linear', (['(1024)', 'h_dim'], {}), '(1024, h_dim)\n', (3987, 4000), True, 'import torch.nn as nn\n'), ((4026, 4046), 'torch.nn.Linear', 'nn.Linear', (['h_dim', '(10)'], {}), '(h_dim, 10)\n', (4035, 4046), True, 'import torch.nn as nn\n'), ((4185, 4194), 'torch.nn.functional.relu', 'F.relu', (['h'], {}), '(h)\n', (4191, 4194), True, 'import torch.nn.functional as F\n'), ((8680, 8708), 'numpy.argmax', 'np.argmax', (['adv_preds'], {'axis': '(1)'}), '(adv_preds, axis=1)\n', (8689, 8708), True, 'import numpy as np\n'), ((3386, 3417), 'torch.ones', 'torch.ones', (['m', '(1)'], {'device': '"""cuda"""'}), "(m, 1, device='cuda')\n", (3396, 3417), False, 'import torch\n'), ((4399, 4433), 'torch.nn.functional.dropout', 'F.dropout', (['h'], {'p': '(0.5)', 'training': '(True)'}), '(h, p=0.5, training=True)\n', (4408, 4433), True, 'import torch.nn.functional as F\n'), ((5954, 5977), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['out', 'y'], {}), '(out, y)\n', (5969, 5977), True, 'import torch.nn.functional as F\n'), ((9489, 9517), 'numpy.argmax', 'np.argmax', (['adv_preds'], {'axis': '(1)'}), '(adv_preds, axis=1)\n', (9498, 9517), True, 'import numpy as np\n'), ((8745, 8770), 'numpy.log', 'np.log', (['(adv_preds + 1e-08)'], {}), '(adv_preds + 1e-08)\n', (8751, 8770), True, 'import numpy as np\n'), ((3040, 3057), 'torch.sqrt', 'torch.sqrt', (['var_c'], {}), '(var_c)\n', (3050, 3057), False, 'import torch\n'), ((3325, 3347), 'torch.sum', 'torch.sum', (['logvar_c', '(1)'], {}), '(logvar_c, 1)\n', (3334, 3347), False, 'import torch\n'), ((8121, 8143), 'torch.from_numpy', 'torch.from_numpy', (['pred'], {}), '(pred)\n', (8137, 8143), False, 'import torch\n'), ((9558, 9583), 'numpy.log', 'np.log', (['(adv_preds + 1e-08)'], {}), '(adv_preds + 1e-08)\n', (9564, 9583), True, 'import numpy as np\n'), ((3002, 3019), 'torch.sqrt', 'torch.sqrt', (['var_r'], {}), '(var_r)\n', (3012, 3019), False, 'import torch\n'), ((3298, 3320), 'torch.sum', 'torch.sum', (['logvar_r', '(1)'], {}), '(logvar_r, 1)\n', (3307, 3320), False, 'import torch\n'), ((5656, 5685), 'torch.distributions.Categorical', 'dists.Categorical', ([], {'logits': 'y_s'}), '(logits=y_s)\n', (5673, 5685), True, 'import torch.distributions as dists\n'), ((5823, 5834), 'math.log', 'math.log', (['S'], {}), '(S)\n', (5831, 5834), False, 'import math\n'), ((5796, 5816), 'torch.stack', 'torch.stack', (['log_p_y'], {}), '(log_p_y)\n', (5807, 5816), False, 'import torch\n'), ((6660, 6681), 'torch.nn.functional.softmax', 'F.softmax', (['y_i'], {'dim': '(1)'}), '(y_i, dim=1)\n', (6669, 6681), True, 'import torch.nn.functional as F\n'), ((8232, 8250), 'torch.nn.functional.softmax', 'F.softmax', (['pred', '(1)'], {}), '(pred, 1)\n', (8241, 8250), True, 'import torch.nn.functional as F\n'), ((3166, 3185), 'torch.sum', 'torch.sum', (['var_r', '(1)'], {}), '(var_r, 1)\n', (3175, 3185), False, 'import torch\n'), ((3186, 3205), 'torch.sum', 'torch.sum', (['var_c', '(1)'], {}), '(var_c, 1)\n', (3195, 3205), False, 'import torch\n')]
|
"""
Mergence
^^^^^^^^
All parsed plain text files should be merged into a single file to handle them
as an unified large corpus data.
.. autoclass:: MergeFiles
"""
from langumo.building import Builder
from langumo.utils import AuxiliaryFile, AuxiliaryFileManager, colorful
class MergeFiles(Builder):
"""Merge files into a single one.
Note:
All documents are separated by new-line character(``\\n``) and this
builder automatically appends the new-line character to avoid mixing
the last document of a file and the first document of another one.
"""
def build(self, afm: AuxiliaryFileManager, *inputs: AuxiliaryFile
) -> AuxiliaryFile:
merged = afm.create()
print(colorful.render(f'<r>[*]</r> merge <m>{len(inputs)}</m> files '
f'into one'))
with merged.open('wb') as dst, \
AuxiliaryFile.opens(inputs, 'rb') as srcs:
for src in srcs:
for line in src:
# Add break-line character to the end of text to avoid
# being merged with other line.
line += b'\n' if not line.endswith(b'\n') else b''
dst.write(line)
return merged
|
[
"langumo.utils.AuxiliaryFile.opens"
] |
[((905, 938), 'langumo.utils.AuxiliaryFile.opens', 'AuxiliaryFile.opens', (['inputs', '"""rb"""'], {}), "(inputs, 'rb')\n", (924, 938), False, 'from langumo.utils import AuxiliaryFile, AuxiliaryFileManager, colorful\n')]
|
"""
From http://arxiv.org/pdf/1204.0375.pdf
"""
from numpy import dot, sum, tile, linalg
from numpy.linalg import inv
def kf_predict(X, P, A, Q, B, U):
"""
X: The mean state estimate of the previous step (k−1).
P: The state covariance of previous step (k−1).
A: The transition n × n matrix.
Q: The process noise covariance matrix.
B: The input effect matrix.
U: The control input.
"""
X = dot(A, X) + dot(B, U)
P = dot(A, dot(P, A.T)) + Q
return(X,P)
def kf_update(X, P, Y, H, R):
"""
K: the Kalman Gain matrix
IM: the Mean of predictive distribution of Y
IS: the Covariance or predictive mean of Y
LH: the Predictive probability (likelihood) of measurement which is computed using the Python function gauss_pdf.
"""
IM = dot(H, X)
IS = R + dot(H, dot(P, H.T))
K = dot(P, dot(H.T, inv(IS)))
X = X + dot(K, (Y-IM))
P = P - dot(K, dot(IS, K.T))
LH = gauss_pdf(Y, IM, IS)
return (X,P,K,IM,IS,LH)
def gauss_pdf(X, M, S):
if M.shape()[1] == 1:
DX = X - tile(M, X.shape()[1])
E = 0.5 * sum(DX * (dot(inv(S), DX)), axis=0)
E = E + 0.5 * M.shape()[0] * log(2 * pi) + 0.5 * log(det(S))
P = exp(-E)
elif X.shape()[1] == 1:
DX = tile(X, M.shape()[1])- M
E = 0.5 * sum(DX * (dot(inv(S), DX)), axis=0)
E = E + 0.5 * M.shape()[0] * log(2 * pi) + 0.5 * log(det(S))
P = exp(-E)
else:
DX = X-M
E = 0.5 * dot(DX.T, dot(inv(S), DX))
E = E + 0.5 * M.shape()[0] * log(2 * pi) + 0.5 * log(det(S))
P = exp(-E)
return (P[0],E[0])
from numpy import *
from numpy.linalg import inv
#time step of mobile movement
dt = 0.1
# Initialization of state matrices
X = array([[0.0], [0.0], [0.1], [0.1]])
P = diag((0.01, 0.01, 0.01, 0.01))
A = array([[1, 0, dt , 0], [0, 1, 0, dt], [0, 0, 1, 0], [0, 0, 0, 1]])
Q = eye(X.shape()[0])
B = eye(X.shape()[0])
U = zeros((X.shape()[0],1))
# Measurement matrices
Y = array([[X[0,0] + abs(randn(1)[0])], [X[1,0] + abs(randn(1)[0])]])
H = array([[1, 0, 0, 0], [0, 1, 0, 0]])
R = eye(Y.shape()[0])
# Number of iterations in Kalman Filter
N_iter = 50
# Applying the Kalman Filter
for i in arange(0, N_iter):
(X, P) = kf_predict(X, P, A, Q, B, U)
(X, P, K, IM, IS, LH) = kf_update(X, P, Y, H, R)
Y = array([[X[0,0] + abs(0.1 * randn(1)[0])],[X[1, 0] + abs(0.1 * randn(1)[0])]])
|
[
"numpy.dot",
"numpy.linalg.inv"
] |
[((811, 820), 'numpy.dot', 'dot', (['H', 'X'], {}), '(H, X)\n', (814, 820), False, 'from numpy import dot, sum, tile, linalg\n'), ((427, 436), 'numpy.dot', 'dot', (['A', 'X'], {}), '(A, X)\n', (430, 436), False, 'from numpy import dot, sum, tile, linalg\n'), ((439, 448), 'numpy.dot', 'dot', (['B', 'U'], {}), '(B, U)\n', (442, 448), False, 'from numpy import dot, sum, tile, linalg\n'), ((900, 914), 'numpy.dot', 'dot', (['K', '(Y - IM)'], {}), '(K, Y - IM)\n', (903, 914), False, 'from numpy import dot, sum, tile, linalg\n'), ((464, 475), 'numpy.dot', 'dot', (['P', 'A.T'], {}), '(P, A.T)\n', (467, 475), False, 'from numpy import dot, sum, tile, linalg\n'), ((841, 852), 'numpy.dot', 'dot', (['P', 'H.T'], {}), '(P, H.T)\n', (844, 852), False, 'from numpy import dot, sum, tile, linalg\n'), ((878, 885), 'numpy.linalg.inv', 'inv', (['IS'], {}), '(IS)\n', (881, 885), False, 'from numpy.linalg import inv\n'), ((934, 946), 'numpy.dot', 'dot', (['IS', 'K.T'], {}), '(IS, K.T)\n', (937, 946), False, 'from numpy import dot, sum, tile, linalg\n'), ((1128, 1134), 'numpy.linalg.inv', 'inv', (['S'], {}), '(S)\n', (1131, 1134), False, 'from numpy.linalg import inv\n'), ((1507, 1513), 'numpy.linalg.inv', 'inv', (['S'], {}), '(S)\n', (1510, 1513), False, 'from numpy.linalg import inv\n'), ((1337, 1343), 'numpy.linalg.inv', 'inv', (['S'], {}), '(S)\n', (1340, 1343), False, 'from numpy.linalg import inv\n')]
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
try:
long_description = open("README.md").read()
except IOError:
long_description = ""
setup(
name="word-embedder",
version="1.0.0",
description="Word Embedder",
license="MIT",
author="Solumilken",
packages=find_packages(),
install_requires=[
"mkdir-p>=0.1.1",
"numpy>=1.15.1",
"python-dotenv==0.9.1",
],
long_description=long_description,
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
],
)
|
[
"os.path.dirname",
"setuptools.find_packages"
] |
[((103, 128), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (118, 128), False, 'import os\n'), ((371, 386), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (384, 386), False, 'from setuptools import setup, find_packages\n')]
|
"""The purpose of this module is to test the TemporaryResources context manager
The purpose of the TemporaryResources context manager is to enable using temporary, specific
configuration of resources when creating a custom Template.
If you use the global configuration `pn.config` for your templates you will include the same
css and js files in all templates. This is problematic if you want different templates, like for
example a light template, a dark template, a bootstrap template, a material template, a template
with Plotly Plots, a template without Plotly plots etc.
"""
import panel as pn
import pytest
from panel_components.resources import TemporaryResources
# pylint: disable=missing-function-docstring
@pytest.fixture(scope="function", autouse=True)
def clear_config_except_panel_css():
"""Reset pn.config except for panel css"""
# pylint: disable=protected-access
pn.config.raw_css = []
pn.config.js_files = {}
pn.config.css_files = [
file for file in pn.config.css_files if TemporaryResources._is_panel_style_file(file)
]
@pytest.fixture()
def clear_config():
"""Reset pn.config"""
pn.config.raw_css = []
pn.config.js_files = {}
pn.config.css_files = []
def _contains_bokeh_and_panel_resources(text):
return (
"bokeh-" in text
and "bokeh-widgets" in text
and "bokeh-tables" in text
and ".panel-widget-box"
)
def test_does_not_include_pn_config_raw_css():
# Given
pre_raw_css = "body {background: black;"
# When
pn.config.raw_css.append(pre_raw_css)
backup = pn.config.raw_css
with TemporaryResources():
text = pn.io.resources.Resources().render()
# Then
assert pre_raw_css not in text
assert pn.config.raw_css == backup
assert _contains_bokeh_and_panel_resources(text)
def test_does_not_include_pn_config_css_files():
# Given
pre_css_file = "https://somedomain.com/test.css"
# When
pn.config.css_files.append(pre_css_file)
backup = pn.config.css_files
with TemporaryResources():
text = pn.io.resources.Resources().render()
# Then
assert pre_css_file not in text
assert pn.config.css_files == backup
assert _contains_bokeh_and_panel_resources(text)
def test_does_not_include_pn_config_js_files():
# Given
pre_js = "http://some/domain.com/test.js"
# When
pn.config.js_files = {"somejs": pre_js}
backup = pn.config.js_files
with TemporaryResources():
text = pn.io.resources.Resources().render()
# Then
assert pre_js not in text
assert pn.config.js_files == backup
assert _contains_bokeh_and_panel_resources(text)
def test_does_not_include_pn_extension():
# Given
pre_extension = "plotly"
# When
pn.extension(pre_extension)
with TemporaryResources():
text = pn.io.resources.Resources().render()
# Then
assert pre_extension not in text
assert _contains_bokeh_and_panel_resources(text)
def test_includes_template_extension():
extension = "katex"
with TemporaryResources(extensions={extension}):
text = pn.io.resources.Resources().render()
assert extension in text
assert _contains_bokeh_and_panel_resources(text)
def test_includes_template_raw_css():
raw_css = "body {background: black;"
with TemporaryResources(raw_css=[raw_css]):
text = pn.io.resources.Resources().render()
assert raw_css in text
assert _contains_bokeh_and_panel_resources(text)
def test_includes_template_css_files():
css_file = "https://somedomain.com/test.css"
with TemporaryResources(css_files=[css_file]):
text = pn.io.resources.Resources().render()
assert css_file in text
assert _contains_bokeh_and_panel_resources(text)
def test_includes_template_js_files():
js_file = "http://some/domain.com/test.js"
with TemporaryResources(js_files={"somejs": js_file}):
text = pn.io.resources.Resources().render()
assert js_file in text
assert _contains_bokeh_and_panel_resources(text)
def test_can_exclude_panel_css():
with TemporaryResources(include_panel_css=False):
text = pn.io.resources.Resources().render()
assert ".panel-widget-box" not in text
def test_complex_use_case():
# Given
pre_raw_css = "body {background: black;"
pre_css_file = "https://somedomain.com/test.css"
pre_js = "http://some/domain.com/test.js"
pre_extension = "plotly"
extension = "katex"
# When
pn.extension(pre_extension)
pn.config.raw_css.append(pre_raw_css)
pn.config.css_files.append(pre_css_file)
pn.config.js_files = {"somejs": pre_js}
backup_css_files = pn.config.css_files
with TemporaryResources(extensions={extension}, include_panel_css=False):
text = pn.io.resources.Resources().render()
# Then
assert "bokeh-" in text
assert "bokeh-widgets" in text
assert "bokeh-tables" in text
assert ".panel-widget-box" not in text
assert extension in text
assert pre_raw_css not in text
assert pre_css_file not in text
assert pre_js not in text
assert pre_extension not in text
assert pn.config.raw_css == [pre_raw_css]
assert pn.config.js_files == {"somejs": pre_js}
assert pn.config.css_files == backup_css_files
|
[
"panel_components.resources.TemporaryResources._is_panel_style_file",
"panel.extension",
"pytest.fixture",
"panel_components.resources.TemporaryResources",
"panel.io.resources.Resources",
"panel.config.raw_css.append",
"panel.config.css_files.append"
] |
[((720, 766), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""', 'autouse': '(True)'}), "(scope='function', autouse=True)\n", (734, 766), False, 'import pytest\n'), ((1076, 1092), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1090, 1092), False, 'import pytest\n'), ((1541, 1578), 'panel.config.raw_css.append', 'pn.config.raw_css.append', (['pre_raw_css'], {}), '(pre_raw_css)\n', (1565, 1578), True, 'import panel as pn\n'), ((1965, 2005), 'panel.config.css_files.append', 'pn.config.css_files.append', (['pre_css_file'], {}), '(pre_css_file)\n', (1991, 2005), True, 'import panel as pn\n'), ((2779, 2806), 'panel.extension', 'pn.extension', (['pre_extension'], {}), '(pre_extension)\n', (2791, 2806), True, 'import panel as pn\n'), ((4511, 4538), 'panel.extension', 'pn.extension', (['pre_extension'], {}), '(pre_extension)\n', (4523, 4538), True, 'import panel as pn\n'), ((4543, 4580), 'panel.config.raw_css.append', 'pn.config.raw_css.append', (['pre_raw_css'], {}), '(pre_raw_css)\n', (4567, 4580), True, 'import panel as pn\n'), ((4585, 4625), 'panel.config.css_files.append', 'pn.config.css_files.append', (['pre_css_file'], {}), '(pre_css_file)\n', (4611, 4625), True, 'import panel as pn\n'), ((1620, 1640), 'panel_components.resources.TemporaryResources', 'TemporaryResources', ([], {}), '()\n', (1638, 1640), False, 'from panel_components.resources import TemporaryResources\n'), ((2048, 2068), 'panel_components.resources.TemporaryResources', 'TemporaryResources', ([], {}), '()\n', (2066, 2068), False, 'from panel_components.resources import TemporaryResources\n'), ((2469, 2489), 'panel_components.resources.TemporaryResources', 'TemporaryResources', ([], {}), '()\n', (2487, 2489), False, 'from panel_components.resources import TemporaryResources\n'), ((2816, 2836), 'panel_components.resources.TemporaryResources', 'TemporaryResources', ([], {}), '()\n', (2834, 2836), False, 'from panel_components.resources import TemporaryResources\n'), ((3068, 3110), 'panel_components.resources.TemporaryResources', 'TemporaryResources', ([], {'extensions': '{extension}'}), '(extensions={extension})\n', (3086, 3110), False, 'from panel_components.resources import TemporaryResources\n'), ((3338, 3375), 'panel_components.resources.TemporaryResources', 'TemporaryResources', ([], {'raw_css': '[raw_css]'}), '(raw_css=[raw_css])\n', (3356, 3375), False, 'from panel_components.resources import TemporaryResources\n'), ((3611, 3651), 'panel_components.resources.TemporaryResources', 'TemporaryResources', ([], {'css_files': '[css_file]'}), '(css_files=[css_file])\n', (3629, 3651), False, 'from panel_components.resources import TemporaryResources\n'), ((3885, 3933), 'panel_components.resources.TemporaryResources', 'TemporaryResources', ([], {'js_files': "{'somejs': js_file}"}), "(js_files={'somejs': js_file})\n", (3903, 3933), False, 'from panel_components.resources import TemporaryResources\n'), ((4113, 4156), 'panel_components.resources.TemporaryResources', 'TemporaryResources', ([], {'include_panel_css': '(False)'}), '(include_panel_css=False)\n', (4131, 4156), False, 'from panel_components.resources import TemporaryResources\n'), ((4723, 4790), 'panel_components.resources.TemporaryResources', 'TemporaryResources', ([], {'extensions': '{extension}', 'include_panel_css': '(False)'}), '(extensions={extension}, include_panel_css=False)\n', (4741, 4790), False, 'from panel_components.resources import TemporaryResources\n'), ((1021, 1066), 'panel_components.resources.TemporaryResources._is_panel_style_file', 'TemporaryResources._is_panel_style_file', (['file'], {}), '(file)\n', (1060, 1066), False, 'from panel_components.resources import TemporaryResources\n'), ((1657, 1684), 'panel.io.resources.Resources', 'pn.io.resources.Resources', ([], {}), '()\n', (1682, 1684), True, 'import panel as pn\n'), ((2085, 2112), 'panel.io.resources.Resources', 'pn.io.resources.Resources', ([], {}), '()\n', (2110, 2112), True, 'import panel as pn\n'), ((2506, 2533), 'panel.io.resources.Resources', 'pn.io.resources.Resources', ([], {}), '()\n', (2531, 2533), True, 'import panel as pn\n'), ((2853, 2880), 'panel.io.resources.Resources', 'pn.io.resources.Resources', ([], {}), '()\n', (2878, 2880), True, 'import panel as pn\n'), ((3127, 3154), 'panel.io.resources.Resources', 'pn.io.resources.Resources', ([], {}), '()\n', (3152, 3154), True, 'import panel as pn\n'), ((3392, 3419), 'panel.io.resources.Resources', 'pn.io.resources.Resources', ([], {}), '()\n', (3417, 3419), True, 'import panel as pn\n'), ((3668, 3695), 'panel.io.resources.Resources', 'pn.io.resources.Resources', ([], {}), '()\n', (3693, 3695), True, 'import panel as pn\n'), ((3950, 3977), 'panel.io.resources.Resources', 'pn.io.resources.Resources', ([], {}), '()\n', (3975, 3977), True, 'import panel as pn\n'), ((4173, 4200), 'panel.io.resources.Resources', 'pn.io.resources.Resources', ([], {}), '()\n', (4198, 4200), True, 'import panel as pn\n'), ((4807, 4834), 'panel.io.resources.Resources', 'pn.io.resources.Resources', ([], {}), '()\n', (4832, 4834), True, 'import panel as pn\n')]
|
#!/usr/bin/env python
"""
4a. Add nxos1 to your my_devices.py file.
Ensure that you include the necessary information to set the NX-API port to 8443.
This is done using 'optional_args' in NAPALM so you should have the following key-value pair defined:
"optional_args": {"port": 8443}
4b. Create a new function named 'create_checkpoint'.
Add this function into your my_functions.py file.
This function should take one argument, the NAPALM connection object.
This function should use the NAPALM _get_checkpoint_file() method
to retrieve a checkpoint from the NX-OS device.
It should then write this checkpoint out to a file.
Recall that the NX-OS platform requires a 'checkpoint' file
for configuration replace operations.
Using this new function, retrieve a checkpoint
from nxos1 and write it to the local file system.
4c. Manually copy the saved checkpoint to a new file
and add an additional loopback interface to the configuration.
4d. Create a Python script that stages a complete configuration replace operation
(using the checkpoint file that you just retrieved and modified).
Once your candidate configuration is staged perform a compare_config (diff)
on the configuration to see your pending changes.
After the compare_config is complete, then use the discard_config() method
to eliminate the pending changes.
Next, perform an additional compare_config (diff) to verify that you have
no pending configuration changes.
Do not actually perform the commit_config as part of this exercise.
"""
import my_devices
from pprint import pprint
from napalm import get_network_driver
from my_functions import napalm_conn, create_backup, create_checkpoint
# Disable Self-signed Certificate Warnings
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
if __name__=="__main__":
# Creating the nxos napalm connection
nxos1 = my_devices.nxos1
nxos1_hostname = nxos1['hostname']
device_conn = napalm_conn(nxos1)
print("#" * 50)
print(f"Printing {nxos1_hostname} napalm connection: ")
print("#" * 50)
print(device_conn)
# Creating the nxos checkpoint file
filename = f"{nxos1_hostname}_checkpoint"
checkpoint = create_checkpoint(device_conn, filename)
# Napalm Config Replace staging
device_conn.load_replace_candidate(filename=f"{nxos1_hostname}_config")
print("#" * 50)
print(f"Printing {nxos1_hostname} DIFFS candidate vs running before commiting: ")
print("#" * 50)
print(device_conn.compare_config())
device_conn.discard_config()
print("#" * 50)
print(f"Printing {nxos1_hostname} DIFFS candidate vs running after discarding the staged candidate: ")
print("#" * 50)
print(device_conn.compare_config())
device_conn.close()
|
[
"requests.packages.urllib3.disable_warnings",
"my_functions.napalm_conn",
"my_functions.create_checkpoint"
] |
[((1808, 1874), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', (['InsecureRequestWarning'], {}), '(InsecureRequestWarning)\n', (1850, 1874), False, 'import requests\n'), ((2033, 2051), 'my_functions.napalm_conn', 'napalm_conn', (['nxos1'], {}), '(nxos1)\n', (2044, 2051), False, 'from my_functions import napalm_conn, create_backup, create_checkpoint\n'), ((2284, 2324), 'my_functions.create_checkpoint', 'create_checkpoint', (['device_conn', 'filename'], {}), '(device_conn, filename)\n', (2301, 2324), False, 'from my_functions import napalm_conn, create_backup, create_checkpoint\n')]
|
# -*- coding: utf-8 -*-
"""
{{NAMEPROJECT}}.users.controllers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
{{NAME}} user controllers module
:copyright: (c) {{YEAR}} by {{AUTHOR}}.
:license: BSD, see LICENSE for more details.
"""
from flask import current_app, render_template, Blueprint
from flask_security import login_required
blueprint = Blueprint('users', __name__, url_prefix='/users')
@blueprint.route('/profile')
@login_required
def profile():
"""return user profle."""
current_app.logger.debug(u'Get profile user.')
return render_template('users/profile.html')
|
[
"flask.Blueprint",
"flask.render_template",
"flask.current_app.logger.debug"
] |
[((353, 402), 'flask.Blueprint', 'Blueprint', (['"""users"""', '__name__'], {'url_prefix': '"""/users"""'}), "('users', __name__, url_prefix='/users')\n", (362, 402), False, 'from flask import current_app, render_template, Blueprint\n'), ((499, 545), 'flask.current_app.logger.debug', 'current_app.logger.debug', (['u"""Get profile user."""'], {}), "(u'Get profile user.')\n", (523, 545), False, 'from flask import current_app, render_template, Blueprint\n'), ((557, 594), 'flask.render_template', 'render_template', (['"""users/profile.html"""'], {}), "('users/profile.html')\n", (572, 594), False, 'from flask import current_app, render_template, Blueprint\n')]
|
# Ros Client
import rospy
# Standard Python Libraries
import threading
import os
import time
# Messages
from geometry_msgs.msg import Twist
# Third Party Libraries
from flask import Flask, request, Response
from pi_drone_server.html import html
from pi_drone_server.camera import Camera
# Globals
current_speed = 0
current_turn = 0
ping_time = 0
write_event = threading.Event()
app = Flask(__name__)
# Constants
TIMEOUT = 1.5 # Seconds
direction = rospy.Publisher("robot_twist", Twist, queue_size=10)
@app.route('/')
def view():
return html
def gen(camera):
"""Video streaming generator function."""
while True:
frame = camera.get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
@app.route('/video_feed')
def video_feed():
"""Video streaming route. Put this in the src attribute of an img tag."""
return Response(gen(Camera()), mimetype='multipart/x-mixed-replace; boundary=frame')
@app.route("/control")
def control():
global direction, current_speed, current_turn, write_event
# Decode Speed
if 'speed' in request.args and int(request.args["speed"]) != current_speed:
current_speed = request.args["speed"]
else:
current_speed = 0
# Decode Turn
if 'turn' in request.args and int(request.args["turn"]) != current_turn:
current_turn = request.args["turn"]
else:
current_turn = 0
# Signal To ros_thread That New Directions Have Been Received
write_event.set()
# Return Code 204
return ('', 204)
@app.route("/ping")
def ping():
global ping_time
ping_time = time.time()
return ('', 204)
def timeout_thread():
global ping_time, current_speed, current_turn, write_event, TIMEOUT
time.sleep(1) # We need to wait for the rospy node to initialize before running.
while not rospy.is_shutdown():
if (time.time() - ping_time) > TIMEOUT:
current_speed = 0
current_turn = 0
write_event.set()
time.sleep(0.1)
def ros_thread():
global current_speed, current_turn, write_event, direction
rospy.init_node('pi_drone_server', disable_signals=True)
while not rospy.is_shutdown():
write_event.wait()
msg = Twist()
msg.linear.x = float(current_speed)
msg.angular.z = float(current_turn)
direction.publish(msg)
write_event.clear()
def pi_drone_server():
"""Executable"""
threading.Thread(target=ros_thread).start()
threading.Thread(target=timeout_thread).start()
app.run(host="0.0.0.0", threaded=True)
|
[
"threading.Thread",
"pi_drone_server.camera.Camera",
"flask.Flask",
"rospy.Publisher",
"geometry_msgs.msg.Twist",
"time.sleep",
"time.time",
"rospy.is_shutdown",
"threading.Event",
"rospy.init_node"
] |
[((364, 381), 'threading.Event', 'threading.Event', ([], {}), '()\n', (379, 381), False, 'import threading\n'), ((388, 403), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (393, 403), False, 'from flask import Flask, request, Response\n'), ((454, 506), 'rospy.Publisher', 'rospy.Publisher', (['"""robot_twist"""', 'Twist'], {'queue_size': '(10)'}), "('robot_twist', Twist, queue_size=10)\n", (469, 506), False, 'import rospy\n'), ((1643, 1654), 'time.time', 'time.time', ([], {}), '()\n', (1652, 1654), False, 'import time\n'), ((1776, 1789), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1786, 1789), False, 'import time\n'), ((2140, 2196), 'rospy.init_node', 'rospy.init_node', (['"""pi_drone_server"""'], {'disable_signals': '(True)'}), "('pi_drone_server', disable_signals=True)\n", (2155, 2196), False, 'import rospy\n'), ((1871, 1890), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (1888, 1890), False, 'import rospy\n'), ((2037, 2052), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (2047, 2052), False, 'import time\n'), ((2212, 2231), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (2229, 2231), False, 'import rospy\n'), ((2274, 2281), 'geometry_msgs.msg.Twist', 'Twist', ([], {}), '()\n', (2279, 2281), False, 'from geometry_msgs.msg import Twist\n'), ((916, 924), 'pi_drone_server.camera.Camera', 'Camera', ([], {}), '()\n', (922, 924), False, 'from pi_drone_server.camera import Camera\n'), ((2479, 2514), 'threading.Thread', 'threading.Thread', ([], {'target': 'ros_thread'}), '(target=ros_thread)\n', (2495, 2514), False, 'import threading\n'), ((2527, 2566), 'threading.Thread', 'threading.Thread', ([], {'target': 'timeout_thread'}), '(target=timeout_thread)\n', (2543, 2566), False, 'import threading\n'), ((1904, 1915), 'time.time', 'time.time', ([], {}), '()\n', (1913, 1915), False, 'import time\n')]
|
# -*- mode:python; coding:utf-8; -*-
from sqlalchemy import create_engine
from html_templates import html_begin, html_end, html_links_li
from mysql import mysql_connect_data
__all__ = ["links"]
def links(lang, connect_data=mysql_connect_data):
e = create_engine(connect_data)
if lang not in ("ru", "en"):
lang = "uk"
link_list = [row for row in
e.execute("select uri, desc_{l} from links".format(l=lang))]
html = html_begin
for record in link_list:
html += html_links_li.format(link=record[0], desc=record[1])
return html + html_end
|
[
"sqlalchemy.create_engine",
"html_templates.html_links_li.format"
] |
[((258, 285), 'sqlalchemy.create_engine', 'create_engine', (['connect_data'], {}), '(connect_data)\n', (271, 285), False, 'from sqlalchemy import create_engine\n'), ((516, 568), 'html_templates.html_links_li.format', 'html_links_li.format', ([], {'link': 'record[0]', 'desc': 'record[1]'}), '(link=record[0], desc=record[1])\n', (536, 568), False, 'from html_templates import html_begin, html_end, html_links_li\n')]
|
# represents items that used by Storage;
# can be either a directory or media file, or non-media file
import abc
import hashlib
import os
import uuid
from enum import Enum
class StorageItemStatus(Enum):
UNKNOWN = 'Unknown'
ON_TARGET = 'On Target'
UPLOADING = 'Uploading'
UPLOAD_FAILED = 'Upload Failed'
UPLOADED = 'Uploaded'
class StorageItem(abc.ABC):
def __init__(self, path: str, uuid_str: str = None):
self._uuid = uuid_str if uuid_str else str(uuid.uuid1())
self._path = os.path.abspath(path)
self._hash = None
self._status = StorageItemStatus.UNKNOWN
@abc.abstractmethod
def is_dir(self):
pass
def uuid(self):
return self._uuid
def path(self):
return self._path
def hash(self):
return self._hash
def status(self):
return self._status
def set_status(self, status: StorageItemStatus):
self._status = status
def _calc_hash(self):
if self._path is None or self._path == '':
self._hash = None
return
h = hashlib.sha256()
if not self.is_dir():
b = bytearray(256 * 1024)
mv = memoryview(b)
with open(self._path, 'rb', buffering=0) as f:
for n in iter(lambda: f.readinto(mv), 0):
h.update(mv[:n])
self._hash = h.hexdigest()
else:
h.update(os.path.abspath(self._path))
digest = h.digest()
digest_str = ''
for b in digest:
digest_str += f'{b:02x}'
self._hash = digest_str
# class Directory(StorageItem):
# def __init__(self, path: str):
# super(Directory, self).__init__(path)
#
# def is_dir(self):
# return True
class File(StorageItem):
def __init__(self, path: str, uuid_str: str = None):
super(File, self).__init__(path, uuid_str)
def is_dir(self):
return False
class MediaFile(File):
def __init__(self, path: str, uuid_str: str = None):
super(MediaFile, self).__init__(path, uuid_str)
self._calc_hash()
|
[
"uuid.uuid1",
"hashlib.sha256",
"os.path.abspath"
] |
[((520, 541), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (535, 541), False, 'import os\n'), ((1093, 1109), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (1107, 1109), False, 'import hashlib\n'), ((485, 497), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (495, 497), False, 'import uuid\n'), ((1437, 1464), 'os.path.abspath', 'os.path.abspath', (['self._path'], {}), '(self._path)\n', (1452, 1464), False, 'import os\n')]
|
from aes import AES
from hmac import new as new_hmac, compare_digest
from hashlib import pbkdf2_hmac
import os
AES_KEY_SIZE = 16
HMAC_KEY_SIZE = 16
IV_SIZE = 16
SALT_SIZE = 16
HMAC_SIZE = 32
def get_key_iv(password, salt, workload=100000):
"""
Stretches the password and extracts an AES key, an HMAC key and an AES
initialization vector.
"""
stretched = pbkdf2_hmac('sha256', password, salt,
workload, AES_KEY_SIZE + IV_SIZE + HMAC_KEY_SIZE)
aes_key, stretched = stretched[:AES_KEY_SIZE], stretched[AES_KEY_SIZE:]
hmac_key, stretched = stretched[:HMAC_KEY_SIZE], stretched[HMAC_KEY_SIZE:]
iv = stretched[:IV_SIZE]
return aes_key, hmac_key, iv
def encrypt(key, plaintext, workload=100000):
"""
Encrypts `plaintext` with `key` using AES-128, an HMAC to verify integrity,
and PBKDF2 to stretch the given key.
The exact algorithm is specified in the module docstring.
"""
if isinstance(key, str):
key = key.encode('utf-8')
if isinstance(plaintext, str):
plaintext = plaintext.encode('utf-8')
salt = os.urandom(SALT_SIZE)
key, hmac_key, iv = get_key_iv(key, salt, workload)
ciphertext = AES(key).encrypt_cbc(plaintext, iv)
hmac = new_hmac(hmac_key, salt + ciphertext, 'sha256').digest()
assert len(hmac) == HMAC_SIZE
return hmac + salt + ciphertext
def decrypt(key, ciphertext, workload=100000):
"""
Decrypts `ciphertext` with `key` using AES-128, an HMAC to verify integrity,
and PBKDF2 to stretch the given key.
The exact algorithm is specified in the module docstring.
"""
assert len(
ciphertext) % 16 == 0, "Ciphertext must be made of full 16-byte blocks."
assert len(ciphertext) >= 32, """
Ciphertext must be at least 32 bytes long (16 byte salt + 16 byte block). To
encrypt or decrypt single blocks use `AES(key).decrypt_block(ciphertext)`.
"""
if isinstance(key, str):
key = key.encode('utf-8')
hmac, ciphertext = ciphertext[:HMAC_SIZE], ciphertext[HMAC_SIZE:]
salt, ciphertext = ciphertext[:SALT_SIZE], ciphertext[SALT_SIZE:]
key, hmac_key, iv = get_key_iv(key, salt, workload)
expected_hmac = new_hmac(hmac_key, salt + ciphertext, 'sha256').digest()
assert compare_digest(
hmac, expected_hmac), 'Ciphertext corrupted or tampered.'
return AES(key).decrypt_cbc(ciphertext, iv)
def benchmark():
key = b'P' * 16
message = b'M' * 16
aes = AES(key)
for i in range(30000):
aes.encrypt_block(message)
__all__ = [encrypt, decrypt, AES]
# Running the AES-128
if __name__ == '__main__':
key = 'master key'
message = 'a secret message'
ciphertext = encrypt(key, message)
print("Cipher text : {}".format(ciphertext))
plaintext = decrypt(key, ciphertext)
print("Plaintext : {}".format(str(plaintext, 'utf-8')))
|
[
"aes.AES",
"hmac.new",
"hashlib.pbkdf2_hmac",
"hmac.compare_digest",
"os.urandom"
] |
[((378, 469), 'hashlib.pbkdf2_hmac', 'pbkdf2_hmac', (['"""sha256"""', 'password', 'salt', 'workload', '(AES_KEY_SIZE + IV_SIZE + HMAC_KEY_SIZE)'], {}), "('sha256', password, salt, workload, AES_KEY_SIZE + IV_SIZE +\n HMAC_KEY_SIZE)\n", (389, 469), False, 'from hashlib import pbkdf2_hmac\n'), ((1115, 1136), 'os.urandom', 'os.urandom', (['SALT_SIZE'], {}), '(SALT_SIZE)\n', (1125, 1136), False, 'import os\n'), ((2290, 2325), 'hmac.compare_digest', 'compare_digest', (['hmac', 'expected_hmac'], {}), '(hmac, expected_hmac)\n', (2304, 2325), False, 'from hmac import new as new_hmac, compare_digest\n'), ((2494, 2502), 'aes.AES', 'AES', (['key'], {}), '(key)\n', (2497, 2502), False, 'from aes import AES\n'), ((1210, 1218), 'aes.AES', 'AES', (['key'], {}), '(key)\n', (1213, 1218), False, 'from aes import AES\n'), ((1257, 1304), 'hmac.new', 'new_hmac', (['hmac_key', '(salt + ciphertext)', '"""sha256"""'], {}), "(hmac_key, salt + ciphertext, 'sha256')\n", (1265, 1304), True, 'from hmac import new as new_hmac, compare_digest\n'), ((2222, 2269), 'hmac.new', 'new_hmac', (['hmac_key', '(salt + ciphertext)', '"""sha256"""'], {}), "(hmac_key, salt + ciphertext, 'sha256')\n", (2230, 2269), True, 'from hmac import new as new_hmac, compare_digest\n'), ((2384, 2392), 'aes.AES', 'AES', (['key'], {}), '(key)\n', (2387, 2392), False, 'from aes import AES\n')]
|
from django.test import TestCase
from .models import Image,Profile
from django.contrib.auth.models import User
# Create your tests here.
class ProfileTestCase(TestCase):
# SetUp method
def setUp(self):
#creating a user instance
self.user = User(username="chris",email="<EMAIL>",password="<PASSWORD>")
self.image = Profile(user=self.user,profile_avatar="ben_H62Kawu.jpeg",bio="Rolls-Royce Wraith")
def tearDown(self):
User.objects.all().delete()
Image.objects.all().delete()
# Testing Instance
def test_instance(self):
self.assertTrue(isinstance(self.image,Image))
def test_save_profile(self):
new_user = User(id=1,username="chris",email="<EMAIL>",password="<PASSWORD>")
new_user.save()
users = User.objects.all()
self.assertTrue(len(users)>=1)
def test_delete_profile(self):
new_user = User(id=1,username="chris",email="<EMAIL>",password="<PASSWORD>")
new_user.delete()
users = User.objects.all()
self.assertTrue(len(users)<=0)
class ImageTestCase(TestCase):
# SetUp method
def setUp(self):
#creating a user instance
self.user = User(username="chris",email="<EMAIL>",password="<PASSWORD>")
self.image = Image(image="default.jpg",tag_someone="ben2_2HRlWyC.jpeg",image_caption="ai at its best")
def tearDown(self):
User.objects.all().delete()
Image.objects.all().delete()
# Testing Instance
def test_instance(self):
self.assertTrue(isinstance(self.image,Image))
def test_save_image(self):
new_image =Image(image="default.jpg",tag_someone="ben2_2HRlWyC.jpeg",image_caption="ai at its best")
new_image.save()
images = Image.objects.all()
self.assertTrue(len(images)>=1)
def test_delete_image(self):
new_image =Image(id=1,image="default.jpg",tag_someone="ben2_2HRlWyC.jpeg",image_caption="ai at its best")
new_image.delete()
images = Image.objects.all()
self.assertTrue(len(images)==0)
|
[
"django.contrib.auth.models.User",
"django.contrib.auth.models.User.objects.all"
] |
[((283, 345), 'django.contrib.auth.models.User', 'User', ([], {'username': '"""chris"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""'}), "(username='chris', email='<EMAIL>', password='<PASSWORD>')\n", (287, 345), False, 'from django.contrib.auth.models import User\n'), ((754, 822), 'django.contrib.auth.models.User', 'User', ([], {'id': '(1)', 'username': '"""chris"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""'}), "(id=1, username='chris', email='<EMAIL>', password='<PASSWORD>')\n", (758, 822), False, 'from django.contrib.auth.models import User\n'), ((860, 878), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (876, 878), False, 'from django.contrib.auth.models import User\n'), ((981, 1049), 'django.contrib.auth.models.User', 'User', ([], {'id': '(1)', 'username': '"""chris"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""'}), "(id=1, username='chris', email='<EMAIL>', password='<PASSWORD>')\n", (985, 1049), False, 'from django.contrib.auth.models import User\n'), ((1089, 1107), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (1105, 1107), False, 'from django.contrib.auth.models import User\n'), ((1277, 1339), 'django.contrib.auth.models.User', 'User', ([], {'username': '"""chris"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""'}), "(username='chris', email='<EMAIL>', password='<PASSWORD>')\n", (1281, 1339), False, 'from django.contrib.auth.models import User\n'), ((1482, 1500), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (1498, 1500), False, 'from django.contrib.auth.models import User\n'), ((489, 507), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (505, 507), False, 'from django.contrib.auth.models import User\n')]
|
import numpy as np
def LoadData(FileName):
'''
Loads hollow data into structured numpy array of floats and returns a tuple
of column headers along with the structured array.
'''
data = np.genfromtxt(FileName, names=True, delimiter=',')
return data.dtype.names, data
def SegmentDataByAspect(FileName):
'''
Loads hollow data into structured numpy array of floats, and splits the
data into separate structured arrays by aspect band and returns a tuple
of column headers along with the structured arrays.
'''
Headers, A = LoadData(FileName)
NE = A[(A['Aspect'] >= 0) & (A['Aspect'] <= 85)]
SE = A[(A['Aspect'] > 85) & (A['Aspect'] <= 165)]
E = A[(A['Aspect'] >= 0) & (A['Aspect'] <= 165)]
W = A[(A['Aspect'] > 165)]
return Headers, NE, SE, E, W
def DataFilter(DataFile, Parameter, Value):
'''
Split hollows around Value of a given property. returns Small and
Large, two lists of IDs corresponding to hollows above and below the
median.
'''
Headers, A = LoadData(DataFile)
Small = A[(A[Parameter] < Value)]['ID']
Large = A[(A[Parameter] >= Value)]['ID']
return Small, Large
def VegDataFilter(DataFile):
'''
Split hollows into vegetation categories of a given property. returns
4 lists of IDs corresponding to specific vegetation types
'''
Headers, A = LoadData(DataFile)
a = A[(A['Veg'] == 1)]['ID']
b = A[(A['Veg'] == 2)]['ID']
c = A[(A['Veg'] == 3)]['ID']
d = A[(A['Veg'] == 4)]['ID']
return a, b, c, d
|
[
"numpy.genfromtxt"
] |
[((208, 258), 'numpy.genfromtxt', 'np.genfromtxt', (['FileName'], {'names': '(True)', 'delimiter': '""","""'}), "(FileName, names=True, delimiter=',')\n", (221, 258), True, 'import numpy as np\n')]
|
import os
from .version import __version__
def get_include():
''' Path of cython headers for compiling cython modules '''
return os.path.dirname(os.path.abspath(__file__))
|
[
"os.path.abspath"
] |
[((154, 179), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (169, 179), False, 'import os\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.