code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation
from time import sleep
from threading import Event, Thread
def test_default_values():
mbd = MockBlueDot()
assert mbd.device == "hci0"
assert mbd.port == 1
assert mbd.running
assert mbd.print_messages
assert mbd.double_press_time == 0.3
assert mbd.rotation_segments == 8
assert mbd.when_client_connects == None
assert mbd.when_client_disconnects == None
assert mbd.when_pressed == None
assert mbd.when_double_pressed == None
assert mbd.when_moved == None
assert mbd.when_released == None
assert mbd.when_swiped == None
def test_modify_values():
mbd = MockBlueDot(device = "hci1", port = 2, auto_start_server = False, print_messages = False)
assert mbd.device == "hci1"
assert mbd.port == 2
assert not mbd.running
assert not mbd.print_messages
mbd.print_messages = True
assert mbd.print_messages
assert mbd.double_press_time == 0.3
mbd.double_press_time = 0.4
assert mbd.double_press_time == 0.4
assert mbd.rotation_segments == 8
mbd.rotation_segments = 16
assert mbd.rotation_segments == 16
def test_start_stop():
mbd = MockBlueDot(auto_start_server = False)
assert not mbd.running
mbd.start()
assert mbd.running
mbd.stop()
assert not mbd.running
def test_connect_disconnect():
mbd = MockBlueDot()
assert not mbd.is_connected
mbd.mock_client_connected()
assert mbd.wait_for_connection(1)
assert mbd.is_connected
mbd.mock_client_disconnected()
assert not mbd.is_connected
def test_when_connect_disconnect():
mbd = MockBlueDot()
event_connect = Event()
mbd.when_client_connects = lambda: event_connect.set()
event_disconnect = Event()
mbd.when_client_disconnects = lambda: event_disconnect.set()
assert not event_connect.is_set()
mbd.mock_client_connected()
assert event_connect.wait(1)
assert not event_disconnect.is_set()
mbd.mock_client_disconnected()
assert event_disconnect.wait(1)
def test_when_connect_disconnect_background():
mbd = MockBlueDot()
event_connect = Event()
mbd.set_when_client_connects(lambda: delay_function(event_connect.set, 0.2), background=True)
event_disconnect = Event()
mbd.set_when_client_disconnects(lambda: delay_function(event_disconnect.set, 0.2), background=True)
mbd.when_client_disconnects = lambda: event_disconnect.set()
assert not event_connect.is_set()
mbd.mock_client_connected()
assert not event_connect.is_set()
assert event_connect.wait(1)
assert not event_disconnect.is_set()
mbd.mock_client_disconnected()
assert not event_disconnect.is_set()
assert event_disconnect.wait(1)
def test_pressed_moved_released():
mbd = MockBlueDot()
mbd.mock_client_connected()
#initial value
assert not mbd.is_pressed
assert mbd.value == 0
#pressed
mbd.mock_blue_dot_pressed(0,0)
assert mbd.is_pressed
assert mbd.value == 1
#released
mbd.mock_blue_dot_released(0,0)
assert not mbd.is_pressed
assert mbd.value == 0
#wait_for_press
delay_function(lambda: mbd.mock_blue_dot_pressed(0,0), 0.5)
assert mbd.wait_for_press(1)
assert not mbd.wait_for_release(0)
#wait_for_release
delay_function(lambda: mbd.mock_blue_dot_released(0,0), 0.5)
assert mbd.wait_for_release(1)
assert not mbd.wait_for_press(0)
def test_double_press():
mbd = MockBlueDot()
mbd.mock_client_connected()
def simulate_double_press():
#sleep longer than the double press time, to clear any past double presses!
sleep(mbd.double_press_time + 0.1)
mbd.mock_blue_dot_pressed(0,0)
mbd.mock_blue_dot_released(0,0)
mbd.mock_blue_dot_pressed(0,0)
mbd.mock_blue_dot_released(0,0)
def simulate_failed_double_press():
sleep(mbd.double_press_time + 0.1)
mbd.mock_blue_dot_pressed(0,0)
mbd.mock_blue_dot_released(0,0)
sleep(mbd.double_press_time + 0.1)
mbd.mock_blue_dot_pressed(0,0)
mbd.mock_blue_dot_released(0,0)
# when_double_pressed
event_double_pressed = Event()
mbd.when_double_pressed = lambda: event_double_pressed.set()
simulate_failed_double_press()
assert not event_double_pressed.is_set()
simulate_double_press()
assert event_double_pressed.is_set()
# wait for double press
# double press the blue dot
delay_function(simulate_double_press, 0.2)
# wait for double press
assert mbd.wait_for_double_press(1)
# dont double press the blue dot
delay_function(simulate_failed_double_press, 0.2)
assert not mbd.wait_for_double_press(1)
def test_when_pressed_moved_released():
mbd = MockBlueDot()
mbd.mock_client_connected()
#when_pressed
event_pressed = Event()
mbd.when_pressed = lambda: event_pressed.set()
#when_double_pressed
event_double_pressed = Event()
mbd.when_double_pressed = lambda: event_double_pressed.set()
#when_moved
event_moved = Event()
mbd.when_moved = lambda: event_moved.set()
#when_released
event_released = Event()
mbd.when_released = lambda: event_released.set()
assert not event_pressed.is_set()
mbd.mock_blue_dot_pressed(0,0)
assert event_pressed.is_set()
assert not event_moved.is_set()
mbd.mock_blue_dot_moved(1,1)
assert event_moved.is_set()
assert not event_released.is_set()
mbd.mock_blue_dot_released(0,0)
assert event_released.is_set()
assert not event_double_pressed.is_set()
mbd.mock_blue_dot_pressed(0,0)
assert event_double_pressed.is_set()
def test_when_pressed_moved_released_background():
mbd = MockBlueDot()
mbd.mock_client_connected()
#when_pressed
event_pressed = Event()
mbd.set_when_pressed(lambda: delay_function(event_pressed.set, 0.2), background=True)
#when_double_pressed
event_double_pressed = Event()
mbd.set_when_double_pressed(lambda: delay_function(event_double_pressed.set, 0.2), background=True)
#when_moved
event_moved = Event()
mbd.set_when_moved(lambda: delay_function(event_moved.set, 0.2), background=True)
#when_released
event_released = Event()
mbd.set_when_released(lambda: delay_function(event_released.set, 0.2), background=True)
# test that the events dont block
assert not event_pressed.is_set()
mbd.mock_blue_dot_pressed(0,0)
assert not event_pressed.is_set()
assert event_pressed.wait(1)
assert not event_moved.is_set()
mbd.mock_blue_dot_moved(1,1)
assert not event_moved.is_set()
assert event_moved.wait(1)
assert not event_released.is_set()
mbd.mock_blue_dot_released(0,0)
assert not event_released.is_set()
assert event_released.wait(1)
# set pressed, moved, released to None so they dont wait
mbd.set_when_pressed(None)
mbd.set_when_moved(None)
mbd.set_when_released(None)
mbd.mock_blue_dot_pressed(0,0)
mbd.mock_blue_dot_moved(1,1)
mbd.mock_blue_dot_released(0,0)
assert not event_double_pressed.is_set()
mbd.mock_blue_dot_pressed(0,0)
assert not event_double_pressed.is_set()
assert event_double_pressed.wait(1)
def test_position():
mbd = MockBlueDot()
mbd.mock_client_connected()
mbd.mock_blue_dot_pressed(0,0)
assert not mbd.position.top
assert mbd.position.middle
assert not mbd.position.bottom
assert not mbd.position.left
assert not mbd.position.right
mbd.mock_blue_dot_moved(1,0)
assert not mbd.position.top
assert not mbd.position.middle
assert not mbd.position.bottom
assert not mbd.position.left
assert mbd.position.right
mbd.mock_blue_dot_moved(-1,0)
assert not mbd.position.top
assert not mbd.position.middle
assert not mbd.position.bottom
assert mbd.position.left
assert not mbd.position.right
mbd.mock_blue_dot_moved(0,1)
assert mbd.position.top
assert not mbd.position.middle
assert not mbd.position.bottom
assert not mbd.position.left
assert not mbd.position.right
mbd.mock_blue_dot_moved(0,-1)
assert not mbd.position.top
assert not mbd.position.middle
assert mbd.position.bottom
assert not mbd.position.left
assert not mbd.position.right
mbd.mock_blue_dot_moved(0.1234, -0.4567)
assert mbd.position.x == 0.1234
assert mbd.position.y == -0.4567
mbd.mock_blue_dot_moved(1, 0)
assert mbd.position.distance == 1
assert mbd.position.angle == 90
def test_interaction():
mbd = MockBlueDot()
mbd.mock_client_connected()
assert mbd.interaction == None
mbd.mock_blue_dot_pressed(-1,0)
assert mbd.interaction.active
assert len(mbd.interaction.positions) == 1
assert mbd.interaction.distance == 0
assert mbd.interaction.pressed_position.x == -1
assert mbd.interaction.pressed_position.y == 0
assert mbd.interaction.current_position.x == -1
assert mbd.interaction.current_position.y == 0
assert mbd.interaction.previous_position == None
assert mbd.interaction.released_position == None
mbd.mock_blue_dot_moved(0,0)
assert mbd.interaction.active
assert len(mbd.interaction.positions) == 2
assert mbd.interaction.distance == 1
assert mbd.interaction.pressed_position.x == -1
assert mbd.interaction.pressed_position.y == 0
assert mbd.interaction.current_position.x == 0
assert mbd.interaction.current_position.y == 0
assert mbd.interaction.previous_position.x == -1
assert mbd.interaction.previous_position.y == 0
assert mbd.interaction.released_position == None
mbd.mock_blue_dot_released(1,0)
assert not mbd.interaction.active
assert len(mbd.interaction.positions) == 3
assert mbd.interaction.distance == 2
assert mbd.interaction.pressed_position.x == -1
assert mbd.interaction.pressed_position.y == 0
assert mbd.interaction.current_position.x == 1
assert mbd.interaction.current_position.y == 0
assert mbd.interaction.previous_position.x == 0
assert mbd.interaction.previous_position.y == 0
assert mbd.interaction.released_position.x == 1
assert mbd.interaction.released_position.y == 0
def test_swipe():
mbd = MockBlueDot()
mbd.mock_client_connected()
def simulate_swipe(
pressed_x, pressed_y,
moved_x, moved_y,
released_x, released_y):
mbd.mock_blue_dot_pressed(pressed_x, pressed_y)
mbd.mock_blue_dot_moved(moved_x, moved_y)
mbd.mock_blue_dot_released(released_x, released_y)
#wait_for_swipe
delay_function(lambda: simulate_swipe(-1,0,0,0,1,0), 0.5)
assert mbd.wait_for_swipe(1)
#when_swiped
event_swiped = Event()
mbd.when_swiped = lambda: event_swiped.set()
assert not event_swiped.is_set()
#simulate swipe left to right
simulate_swipe(-1,0,0,0,1,0)
#check event
assert event_swiped.is_set()
#get the swipe
swipe = BlueDotSwipe(mbd.interaction)
assert swipe.right
assert not swipe.left
assert not swipe.up
assert not swipe.down
#right to left
event_swiped.clear()
simulate_swipe(1,0,0,0,-1,0)
assert event_swiped.is_set()
swipe = BlueDotSwipe(mbd.interaction)
assert not swipe.right
assert swipe.left
assert not swipe.up
assert not swipe.down
#bottom to top
event_swiped.clear()
simulate_swipe(0,-1,0,0,0,1)
assert event_swiped.is_set()
swipe = BlueDotSwipe(mbd.interaction)
assert not swipe.right
assert not swipe.left
assert swipe.up
assert not swipe.down
#top to bottom
event_swiped.clear()
simulate_swipe(0,1,0,0,0,-1)
assert event_swiped.is_set()
swipe = BlueDotSwipe(mbd.interaction)
assert not swipe.right
assert not swipe.left
assert not swipe.up
assert swipe.down
# background
event_swiped.clear()
mbd.set_when_swiped(lambda: delay_function(event_swiped.set, 0.2), background=True)
simulate_swipe(0,1,0,0,0,-1)
assert not event_swiped.is_set()
assert event_swiped.wait(1)
def test_callback_in_class():
class CallbackClass():
def __init__(self):
self.event = Event()
def no_pos(self):
self.event.set()
self.pos = None
def with_pos(self, pos):
self.event.set()
self.pos = pos
cc = CallbackClass()
mbd = MockBlueDot()
mbd.mock_client_connected()
mbd.when_pressed = cc.no_pos
mbd.mock_blue_dot_pressed(0,0)
assert cc.event.is_set()
assert cc.pos is None
mbd.mock_blue_dot_released(0,0)
cc.event.clear()
mbd.when_pressed = cc.with_pos
mbd.mock_blue_dot_pressed(0,0)
assert cc.event.is_set()
assert cc.pos.middle
def test_rotation():
mbd = MockBlueDot()
mbd.mock_client_connected()
event_rotated = Event()
mbd.when_rotated = lambda: event_rotated.set()
assert not event_rotated.is_set()
#press the blue dot, no rotation
mbd.mock_blue_dot_pressed(-0.1,1)
assert not event_rotated.is_set()
r = BlueDotRotation(mbd.interaction, mbd.rotation_segments)
assert not r.valid
assert r.value == 0
assert not r.clockwise
assert not r.anti_clockwise
#rotate clockwise
event_rotated.clear()
mbd.mock_blue_dot_moved(0.1,1)
assert event_rotated.is_set()
r = BlueDotRotation(mbd.interaction, mbd.rotation_segments)
assert r.value == 1
assert r.valid
assert r.clockwise
assert not r.anti_clockwise
#rotate anti-clockwise
event_rotated.clear()
mbd.mock_blue_dot_moved(-0.1,1)
assert event_rotated.is_set()
r = BlueDotRotation(mbd.interaction, mbd.rotation_segments)
assert r.value == -1
assert r.valid
assert not r.clockwise
assert r.anti_clockwise
# background
# rotate clockwise again
event_rotated.clear()
mbd.set_when_rotated(lambda: delay_function(event_rotated.set, 0.2), background=True)
mbd.mock_blue_dot_moved(0.1,1)
assert not event_rotated.is_set()
assert event_rotated.wait(1)
def test_allow_pairing():
mbd = MockBlueDot()
assert not mbd.adapter.discoverable
assert not mbd.adapter.pairable
mbd.allow_pairing()
assert mbd.adapter.discoverable
assert mbd.adapter.pairable
def test_dot_appearance():
mbd = MockBlueDot()
assert mbd.color == "blue"
assert mbd.border == False
assert mbd.square == False
assert mbd.visible == True
mbd.color = "red"
mbd.border = True
mbd.square = True
mbd.visible = False
assert mbd.color == "red"
assert mbd.border == True
assert mbd.square == True
assert mbd.visible == False
def test_dot_colors():
from bluedot.colors import BLUE, RED, GREEN, YELLOW
mbd = MockBlueDot()
assert mbd.color == "blue"
assert mbd.color == (0,0,255)
assert mbd.color == BLUE
assert mbd.color == "#0000ff"
assert mbd.color == "#0000ffff"
mbd.color = RED
assert mbd.color == (255,0,0)
assert mbd.color == "red"
assert mbd.color == "#ff0000"
assert mbd.color == "#ff0000ff"
mbd.color = "green"
assert mbd.color == GREEN
assert mbd.color == (0,128,0)
assert mbd.color == "#008000"
assert mbd.color == "#008000ff"
mbd.color = "#ffff00"
assert mbd.color == YELLOW
assert mbd.color == "yellow"
assert mbd.color == (255,255,0)
assert mbd.color == "#ffff00ff"
mbd.color = "#ffffff11"
assert mbd.color == "#ffffff11"
def delay_function(func, time):
delayed_thread = Thread(target = _delayed_function, args = (func, time))
delayed_thread.start()
def _delayed_function(func, time):
sleep(time)
func()
|
[
"threading.Thread",
"bluedot.MockBlueDot",
"bluedot.BlueDotRotation",
"time.sleep",
"threading.Event",
"bluedot.BlueDotSwipe"
] |
[((160, 173), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (171, 173), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((677, 763), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {'device': '"""hci1"""', 'port': '(2)', 'auto_start_server': '(False)', 'print_messages': '(False)'}), "(device='hci1', port=2, auto_start_server=False, print_messages=\n False)\n", (688, 763), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((1202, 1238), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {'auto_start_server': '(False)'}), '(auto_start_server=False)\n', (1213, 1238), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((1391, 1404), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (1402, 1404), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((1649, 1662), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (1660, 1662), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((1684, 1691), 'threading.Event', 'Event', ([], {}), '()\n', (1689, 1691), False, 'from threading import Event, Thread\n'), ((1775, 1782), 'threading.Event', 'Event', ([], {}), '()\n', (1780, 1782), False, 'from threading import Event, Thread\n'), ((2123, 2136), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (2134, 2136), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((2158, 2165), 'threading.Event', 'Event', ([], {}), '()\n', (2163, 2165), False, 'from threading import Event, Thread\n'), ((2292, 2299), 'threading.Event', 'Event', ([], {}), '()\n', (2297, 2299), False, 'from threading import Event, Thread\n'), ((2811, 2824), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (2822, 2824), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((3494, 3507), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (3505, 3507), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((4198, 4205), 'threading.Event', 'Event', ([], {}), '()\n', (4203, 4205), False, 'from threading import Event, Thread\n'), ((4787, 4800), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (4798, 4800), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((4872, 4879), 'threading.Event', 'Event', ([], {}), '()\n', (4877, 4879), False, 'from threading import Event, Thread\n'), ((4984, 4991), 'threading.Event', 'Event', ([], {}), '()\n', (4989, 4991), False, 'from threading import Event, Thread\n'), ((5092, 5099), 'threading.Event', 'Event', ([], {}), '()\n', (5097, 5099), False, 'from threading import Event, Thread\n'), ((5188, 5195), 'threading.Event', 'Event', ([], {}), '()\n', (5193, 5195), False, 'from threading import Event, Thread\n'), ((5754, 5767), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (5765, 5767), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((5839, 5846), 'threading.Event', 'Event', ([], {}), '()\n', (5844, 5846), False, 'from threading import Event, Thread\n'), ((5990, 5997), 'threading.Event', 'Event', ([], {}), '()\n', (5995, 5997), False, 'from threading import Event, Thread\n'), ((6141, 6148), 'threading.Event', 'Event', ([], {}), '()\n', (6146, 6148), False, 'from threading import Event, Thread\n'), ((6276, 6283), 'threading.Event', 'Event', ([], {}), '()\n', (6281, 6283), False, 'from threading import Event, Thread\n'), ((7304, 7317), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (7315, 7317), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((8612, 8625), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (8623, 8625), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((10288, 10301), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (10299, 10301), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((10769, 10776), 'threading.Event', 'Event', ([], {}), '()\n', (10774, 10776), False, 'from threading import Event, Thread\n'), ((11012, 11041), 'bluedot.BlueDotSwipe', 'BlueDotSwipe', (['mbd.interaction'], {}), '(mbd.interaction)\n', (11024, 11041), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((11264, 11293), 'bluedot.BlueDotSwipe', 'BlueDotSwipe', (['mbd.interaction'], {}), '(mbd.interaction)\n', (11276, 11293), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((11516, 11545), 'bluedot.BlueDotSwipe', 'BlueDotSwipe', (['mbd.interaction'], {}), '(mbd.interaction)\n', (11528, 11545), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((11768, 11797), 'bluedot.BlueDotSwipe', 'BlueDotSwipe', (['mbd.interaction'], {}), '(mbd.interaction)\n', (11780, 11797), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((12460, 12473), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (12471, 12473), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((12849, 12862), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (12860, 12862), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((12916, 12923), 'threading.Event', 'Event', ([], {}), '()\n', (12921, 12923), False, 'from threading import Event, Thread\n'), ((13135, 13190), 'bluedot.BlueDotRotation', 'BlueDotRotation', (['mbd.interaction', 'mbd.rotation_segments'], {}), '(mbd.interaction, mbd.rotation_segments)\n', (13150, 13190), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((13423, 13478), 'bluedot.BlueDotRotation', 'BlueDotRotation', (['mbd.interaction', 'mbd.rotation_segments'], {}), '(mbd.interaction, mbd.rotation_segments)\n', (13438, 13478), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((13709, 13764), 'bluedot.BlueDotRotation', 'BlueDotRotation', (['mbd.interaction', 'mbd.rotation_segments'], {}), '(mbd.interaction, mbd.rotation_segments)\n', (13724, 13764), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((14174, 14187), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (14185, 14187), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((14395, 14408), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (14406, 14408), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((14836, 14849), 'bluedot.MockBlueDot', 'MockBlueDot', ([], {}), '()\n', (14847, 14849), False, 'from bluedot import MockBlueDot, BlueDotSwipe, BlueDotRotation\n'), ((15610, 15661), 'threading.Thread', 'Thread', ([], {'target': '_delayed_function', 'args': '(func, time)'}), '(target=_delayed_function, args=(func, time))\n', (15616, 15661), False, 'from threading import Event, Thread\n'), ((15733, 15744), 'time.sleep', 'sleep', (['time'], {}), '(time)\n', (15738, 15744), False, 'from time import sleep\n'), ((3666, 3700), 'time.sleep', 'sleep', (['(mbd.double_press_time + 0.1)'], {}), '(mbd.double_press_time + 0.1)\n', (3671, 3700), False, 'from time import sleep\n'), ((3908, 3942), 'time.sleep', 'sleep', (['(mbd.double_press_time + 0.1)'], {}), '(mbd.double_press_time + 0.1)\n', (3913, 3942), False, 'from time import sleep\n'), ((4030, 4064), 'time.sleep', 'sleep', (['(mbd.double_press_time + 0.1)'], {}), '(mbd.double_press_time + 0.1)\n', (4035, 4064), False, 'from time import sleep\n'), ((12242, 12249), 'threading.Event', 'Event', ([], {}), '()\n', (12247, 12249), False, 'from threading import Event, Thread\n')]
|
"""Data Provider module for providing data blocks made from similar stocks over a set time period, but separated.
This data provider is not intended to be used outside of this module, instead, upon import, this module will create an
instance of a SplitBlockProvider and register it with the global DataProviderRegistry. To register a consumer to
receive data from this provider, use the id provided by data_provider_static_names.SPLIT_BLOCK_PROVIDER.
The separation, or split, referred to by this module is that the data block for one cluster is not combined with
the data block from others into a large training set. This is in contrast to the ClusteredBlockProvider, which
combines its cluster's blocks into a larger data set.
A detailed argument list that is required by this provider can be found in the generate_data method.
"""
from datetime import datetime as dt, timedelta as td
import configparser
from data_providing_module import configurable_registry
from data_providing_module import data_provider_registry
from data_providing_module.data_providers import data_provider_static_names
from stock_data_analysis_module.data_processing_module import stock_cluster_data_manager
from general_utils.config import config_util
ENABLED_CONFIG_ID = "enabled"
class SplitBlockProvider(data_provider_registry.DataProviderBase):
"""Data Provider that provides data constructed by clustering stocks, but keeping the cluster's data separate
The organization of these clusters is handled according to the specifications established in the
StockClusterDataManager, and will operate on the time frame [start_date, end_date]. This time frame is currently
fixed where end_date is the current date, and start_date is 52 * 4 weeks ago (approximately four years).
Additionally this provider supports configuration of certain parameters through the configuration file. These
parameters are listed in the Configurable Parameters section.
Configurable Parameters:
enabled: Whether this provider is enabled for consumers to receive data from.
"""
def generate_prediction_data(self, *args, **kwargs):
"""Generates data that consumers will use to make predictions for the next trading day.
Currently there is no implementation for this, and calling the method will result in a NotImplementedError
"""
raise NotImplementedError()
def __init__(self):
"""Initializes a SplitBlockProvider and registers it to the global DataProviderRegistry
"""
super(SplitBlockProvider, self).__init__()
configurable_registry.config_registry.register_configurable(self)
def write_default_configuration(self, section: "configparser.SectionProxy"):
"""Writes default configuration values into the SectionProxy provided.
For more details see abstract class documentation.
"""
section[ENABLED_CONFIG_ID] = "True"
def load_configuration(self, parser: "configparser.ConfigParser"):
"""Attempts to load the configurable parameters for this provider from the provided parser.
For more details see abstract class documentation.
"""
section = config_util.create_type_section(parser, self)
if not parser.has_option(section.name, ENABLED_CONFIG_ID):
self.write_default_configuration(section)
enabled = parser.getboolean(section.name, ENABLED_CONFIG_ID)
if enabled:
data_provider_registry.registry.register_provider(data_provider_static_names.SPLIT_BLOCK_PROVIDER_ID, self)
def generate_data(self, *args, **kwargs):
"""Generates data for Consumers to use by clustering together stocks in a time period,
The time period for cluster creation is a period of 52 * 4 weeks (approximately 4 years).
Consumers requiring data from this provider are expected to provide the arguments specified in the
*args entry of the Arguments section
The split portion of this data provider is that the data returned is split into different entries in a
dictionary, keyed off of the root stock's ticker. The root stock is the stock that the cluster is based around
and all other data in the cluster is deemed as being similar to the root stock's data.
Arguments:
*args:
List of arguments that are expected to be in the following order, with the specified types
train_columns: List[str]
List of names of columns from a StockDataTable. These will be used to retrieve data
from the database and construct the returned data blocks
expectation_columns: List[int]
List of integers representing the indices of the columns to be used as the target data
in the generation of the data blocks
Returns:
See StockClusterDataManager.retrieve_training_data_movement_targets_split
"""
if len(args) < 1:
raise ValueError('Expected at least the first argument from the following list;' +
' train_columns: List["str"], expectation_columns: List["int"]')
columns = args[0]
expectation_columns = None
if len(args) == 2:
expectation_columns = args[1]
start_date = dt.now() - td(weeks=(52 * 4))
start_date = start_date.isoformat()[:10].replace('-', '/')
end_date = dt.now().isoformat()[:10].replace('-', '/')
data_retriever = stock_cluster_data_manager.StockClusterDataManager(start_date, end_date, column_list=columns)
return data_retriever.retrieveTrainingDataMovementTargetsSplit(expectation_columns=expectation_columns)
provider = SplitBlockProvider()
|
[
"data_providing_module.configurable_registry.config_registry.register_configurable",
"stock_data_analysis_module.data_processing_module.stock_cluster_data_manager.StockClusterDataManager",
"datetime.datetime.now",
"datetime.timedelta",
"data_providing_module.data_provider_registry.registry.register_provider",
"general_utils.config.config_util.create_type_section"
] |
[((2646, 2711), 'data_providing_module.configurable_registry.config_registry.register_configurable', 'configurable_registry.config_registry.register_configurable', (['self'], {}), '(self)\n', (2705, 2711), False, 'from data_providing_module import configurable_registry\n'), ((3265, 3310), 'general_utils.config.config_util.create_type_section', 'config_util.create_type_section', (['parser', 'self'], {}), '(parser, self)\n', (3296, 3310), False, 'from general_utils.config import config_util\n'), ((5653, 5750), 'stock_data_analysis_module.data_processing_module.stock_cluster_data_manager.StockClusterDataManager', 'stock_cluster_data_manager.StockClusterDataManager', (['start_date', 'end_date'], {'column_list': 'columns'}), '(start_date, end_date,\n column_list=columns)\n', (5703, 5750), False, 'from stock_data_analysis_module.data_processing_module import stock_cluster_data_manager\n'), ((3538, 3650), 'data_providing_module.data_provider_registry.registry.register_provider', 'data_provider_registry.registry.register_provider', (['data_provider_static_names.SPLIT_BLOCK_PROVIDER_ID', 'self'], {}), '(data_provider_static_names\n .SPLIT_BLOCK_PROVIDER_ID, self)\n', (3587, 3650), False, 'from data_providing_module import data_provider_registry\n'), ((5465, 5473), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (5471, 5473), True, 'from datetime import datetime as dt, timedelta as td\n'), ((5476, 5492), 'datetime.timedelta', 'td', ([], {'weeks': '(52 * 4)'}), '(weeks=52 * 4)\n', (5478, 5492), True, 'from datetime import datetime as dt, timedelta as td\n'), ((5583, 5591), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (5589, 5591), True, 'from datetime import datetime as dt, timedelta as td\n')]
|
import sys
import time
def create_versioned_files(src_filename, filenames):
timestamp = int(time.time())
with open(src_filename, encoding='utf-8') as html_file:
html_file_content = html_file.read()
for filename in filenames:
usages_count = html_file_content.count(filename)
if usages_count != 1:
print('ERROR: Found {} usages for file {} (expected exactly 1)'.format(usages_count, filename))
return
new_filename = "{}?v={}".format(filename, timestamp)
html_file_content = html_file_content.replace(filename, new_filename)
with open('versioned.' + src_filename, mode="w", encoding="utf-8") as f:
f.write(html_file_content)
if __name__ == '__main__':
create_versioned_files(sys.argv[1], sys.argv[2:])
|
[
"time.time"
] |
[((98, 109), 'time.time', 'time.time', ([], {}), '()\n', (107, 109), False, 'import time\n')]
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="bgflow",
version="0.1",
description="Boltzmann Generators in PyTorch",
author="<NAME>, <NAME>, <NAME>, <NAME>",
author_email="<EMAIL>",
url="https://www.mi.fu-berlin.de/en/math/groups/comp-mol-bio/index.html",
packages=find_packages()
)
|
[
"setuptools.find_packages"
] |
[((328, 343), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (341, 343), False, 'from setuptools import setup, find_packages\n')]
|
from __future__ import print_function, division
import matplotlib.pyplot as plt
import math
from sklearn.metrics import auc
import numpy as np
import cv2
import os, sys
int_ = lambda x: int(round(x))
def IoU( r1, r2 ):
x11, y11, w1, h1 = r1
x21, y21, w2, h2 = r2
x12 = x11 + w1; y12 = y11 + h1
x22 = x21 + w2; y22 = y21 + h2
x_overlap = max(0, min(x12,x22) - max(x11,x21) )
y_overlap = max(0, min(y12,y22) - max(y11,y21) )
I = 1. * x_overlap * y_overlap
U = (y12-y11)*(x12-x11) + (y22-y21)*(x22-x21) - I
J = I/U
return J
def evaluate_iou( rect_gt, rect_pred ):
# score of iou
score = [ IoU(i, j) for i, j in zip(rect_gt, rect_pred) ]
return score
def compute_score( x, w, h ):
# score of response strength
k = np.ones( (h, w) )
score = cv2.filter2D(x, -1, k)
score[:, :w//2] = 0
score[:, math.ceil(-w/2):] = 0
score[:h//2, :] = 0
score[math.ceil(-h/2):, :] = 0
return score
def locate_bbox( a, w, h ):
row = np.argmax( np.max(a, axis=1) )
col = np.argmax( np.max(a, axis=0) )
x = col - 1. * w / 2
y = row - 1. * h / 2
return x, y, w, h
def score2curve( score, thres_delta = 0.01 ):
thres = np.linspace( 0, 1, int(1./thres_delta)+1 )
success_num = []
for th in thres:
success_num.append( np.sum(score >= (th+1e-6)) )
success_rate = np.array(success_num) / len(score)
return thres, success_rate
def all_sample_iou( score_list, gt_list):
num_samples = len(score_list)
iou_list = []
for idx in range(num_samples):
score, image_gt = score_list[idx], gt_list[idx]
w, h = image_gt[2:]
pred_rect = locate_bbox( score, w, h )
iou = IoU( image_gt, pred_rect )
iou_list.append( iou )
return iou_list
def plot_success_curve( iou_score, title='' ):
thres, success_rate = score2curve( iou_score, thres_delta = 0.05 )
auc_ = np.mean( success_rate[:-1] ) # this is same auc protocol as used in previous template matching papers #auc_ = auc( thres, success_rate ) # this is the actual auc
plt.figure()
plt.grid(True)
plt.xticks(np.linspace(0,1,11))
plt.yticks(np.linspace(0,1,11))
plt.ylim(0, 1)
plt.title(title + 'auc={}'.format(auc_))
plt.plot( thres, success_rate )
plt.show()
|
[
"matplotlib.pyplot.show",
"numpy.sum",
"cv2.filter2D",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.plot",
"math.ceil",
"numpy.ones",
"matplotlib.pyplot.figure",
"numpy.mean",
"numpy.max",
"numpy.array",
"numpy.linspace",
"matplotlib.pyplot.grid"
] |
[((777, 792), 'numpy.ones', 'np.ones', (['(h, w)'], {}), '((h, w))\n', (784, 792), True, 'import numpy as np\n'), ((807, 829), 'cv2.filter2D', 'cv2.filter2D', (['x', '(-1)', 'k'], {}), '(x, -1, k)\n', (819, 829), False, 'import cv2\n'), ((1921, 1947), 'numpy.mean', 'np.mean', (['success_rate[:-1]'], {}), '(success_rate[:-1])\n', (1928, 1947), True, 'import numpy as np\n'), ((2087, 2099), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2097, 2099), True, 'import matplotlib.pyplot as plt\n'), ((2104, 2118), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (2112, 2118), True, 'import matplotlib.pyplot as plt\n'), ((2195, 2209), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', '(1)'], {}), '(0, 1)\n', (2203, 2209), True, 'import matplotlib.pyplot as plt\n'), ((2259, 2288), 'matplotlib.pyplot.plot', 'plt.plot', (['thres', 'success_rate'], {}), '(thres, success_rate)\n', (2267, 2288), True, 'import matplotlib.pyplot as plt\n'), ((2295, 2305), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2303, 2305), True, 'import matplotlib.pyplot as plt\n'), ((1016, 1033), 'numpy.max', 'np.max', (['a'], {'axis': '(1)'}), '(a, axis=1)\n', (1022, 1033), True, 'import numpy as np\n'), ((1057, 1074), 'numpy.max', 'np.max', (['a'], {'axis': '(0)'}), '(a, axis=0)\n', (1063, 1074), True, 'import numpy as np\n'), ((1370, 1391), 'numpy.array', 'np.array', (['success_num'], {}), '(success_num)\n', (1378, 1391), True, 'import numpy as np\n'), ((2134, 2155), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(11)'], {}), '(0, 1, 11)\n', (2145, 2155), True, 'import numpy as np\n'), ((2170, 2191), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(11)'], {}), '(0, 1, 11)\n', (2181, 2191), True, 'import numpy as np\n'), ((1322, 1349), 'numpy.sum', 'np.sum', (['(score >= th + 1e-06)'], {}), '(score >= th + 1e-06)\n', (1328, 1349), True, 'import numpy as np\n'), ((867, 884), 'math.ceil', 'math.ceil', (['(-w / 2)'], {}), '(-w / 2)\n', (876, 884), False, 'import math\n'), ((923, 940), 'math.ceil', 'math.ceil', (['(-h / 2)'], {}), '(-h / 2)\n', (932, 940), False, 'import math\n')]
|
#
# Compare lithium-ion battery models with and without particle size distibution
#
import numpy as np
import pybamm
pybamm.set_logging_level("INFO")
# load models
models = [
pybamm.lithium_ion.DFN(name="standard DFN"),
pybamm.lithium_ion.DFN(name="particle DFN"),
]
# load parameter values
params = [models[0].default_parameter_values, models[1].default_parameter_values]
def negative_distribution(x):
return 1 + 2 * x / models[1].param.l_n
def positive_distribution(x):
return 1 + 2 * (1 - x) / models[1].param.l_p
params[1]["Negative particle distribution in x"] = negative_distribution
params[1]["Positive particle distribution in x"] = positive_distribution
# set up and solve simulations
t_eval = np.linspace(0, 3600, 100)
sols = []
for model, param in zip(models, params):
sim = pybamm.Simulation(model, parameter_values=param)
sol = sim.solve(t_eval)
sols.append(sol)
output_variables = [
"Negative particle surface concentration",
"Electrolyte concentration",
"Positive particle surface concentration",
"Current [A]",
"Negative electrode potential [V]",
"Electrolyte potential [V]",
"Positive electrode potential [V]",
"Terminal voltage [V]",
"Negative particle distribution in x",
"Positive particle distribution in x",
]
# plot
plot = pybamm.QuickPlot(sols, output_variables=output_variables)
plot.dynamic_plot()
|
[
"pybamm.set_logging_level",
"pybamm.Simulation",
"numpy.linspace",
"pybamm.QuickPlot",
"pybamm.lithium_ion.DFN"
] |
[((118, 150), 'pybamm.set_logging_level', 'pybamm.set_logging_level', (['"""INFO"""'], {}), "('INFO')\n", (142, 150), False, 'import pybamm\n'), ((730, 755), 'numpy.linspace', 'np.linspace', (['(0)', '(3600)', '(100)'], {}), '(0, 3600, 100)\n', (741, 755), True, 'import numpy as np\n'), ((1327, 1384), 'pybamm.QuickPlot', 'pybamm.QuickPlot', (['sols'], {'output_variables': 'output_variables'}), '(sols, output_variables=output_variables)\n', (1343, 1384), False, 'import pybamm\n'), ((181, 224), 'pybamm.lithium_ion.DFN', 'pybamm.lithium_ion.DFN', ([], {'name': '"""standard DFN"""'}), "(name='standard DFN')\n", (203, 224), False, 'import pybamm\n'), ((230, 273), 'pybamm.lithium_ion.DFN', 'pybamm.lithium_ion.DFN', ([], {'name': '"""particle DFN"""'}), "(name='particle DFN')\n", (252, 273), False, 'import pybamm\n'), ((817, 865), 'pybamm.Simulation', 'pybamm.Simulation', (['model'], {'parameter_values': 'param'}), '(model, parameter_values=param)\n', (834, 865), False, 'import pybamm\n')]
|
from django.shortcuts import render
from django.core.mail import send_mail
from django.conf import settings
# from .forms import contactForms
# Create your views here.
def contact(request):
context = locals()
template = 'contact.html'
return render(request,template,context)
'''def contact(request):
title = 'Contact'
form = contactForms(request.POST or None)
confirm_message = None
if form.is_valid():
name = form.cleaned_data['name']
comment = form.cleaned_data['comment']
subject = 'message from MYSITE.com'
message = '%s %s' % (comment , name)
emailFrom = form.cleaned_data['email']
emailTo = [settings.EMAIL_HOST_USER]
send_mail(subject ,message,emailFrom,emailTo, fail_silently=True)
title = 'Thanks!'
confirm_message = 'Thanks for your message, we will get back to you!!'
form = None
context = {'title': title, 'form': form,'confirm_message': confirm_message,}
template = 'contact.html'
return render(request,template,context)'''
|
[
"django.shortcuts.render"
] |
[((246, 280), 'django.shortcuts.render', 'render', (['request', 'template', 'context'], {}), '(request, template, context)\n', (252, 280), False, 'from django.shortcuts import render\n')]
|
#!/usr/bin/env python
"""
Use this node to perform indoor zone location using the metraTec IPS tracking system. Prerequisites for using this node
is a running receiver-node that handles communication with the receiver and thus with the beacons in the vicinity.
Also, make sure that you have defined your zones correctly in the YAML config file.
Subscribed topics:
- ips/receiver/raw (indoor_positioning/StringStamped):
Raw messages received by the UWB receiver
Published topics:
- ips/receiver/current_zone/name (indoor_positioning/StringStamped):
Name of the zone the receiver is currently in
- ips/receiver/current_zone/polygon (geometry_msgs/PolygonStamped):
Polygon comprising the current zone
- ips/receiver/zone_leave (indoor_positioning/StringStamped):
Name of the zone that the receiver has left. Is published at the moment a zone-leave occurs
- ips/receiver/zone_enter (indoor_positioning/StringStamped):
Name of the zone that the receiver has entered. Is published at the moment a zone-enter occurs
Parameters:
- ~config_file (string, default='PKG_DIR/config/zones.yml'):
Path to the configuration file of zones and beacons relative to the package directory
- ~rate (double, default=1):
The publishing rate in messages per second
- ~bcn_len (int, default=2*number_of_beacons):
Buffer length for BCN messages
"""
import rospy
import os
import rospkg
from geometry_msgs.msg import PolygonStamped, Point32
from indoor_positioning.msg import StringStamped
from indoor_positioning.positioning import Positioning
class IPS:
"""Configure ROS node for metraTec IPS indoor positioning system for zone location."""
def __init__(self):
# subscribe to raw messages from USB stick
self.receiver_sub = rospy.Subscriber('ips/receiver/raw', StringStamped, self.callback)
# get directory of config file
config_dir = rospy.get_param('~config_file') if rospy.has_param('~config_file') else 'config/zones.yml'
abs_dir = os.path.join(rospkg.RosPack().get_path('indoor_positioning'), config_dir)
# initialize positioning class
self.positioning = Positioning(abs_dir)
# get number of beacons specified in zones.yml file for default buffer values
n_beacons = self.positioning.n_beacons
# number of messages to keep
self.buffer_length = rospy.get_param('~bcn_len') if rospy.has_param('~bcn_len') else 2*n_beacons
self.buffer_length = 2*n_beacons if self.buffer_length == -1 else self.buffer_length
# list of incoming messages
self.msg_buffer = []
# timestamp from last received message
self.last_time = None
# publishers
# current zone name
self.zone_name_pub = rospy.Publisher('ips/receiver/current_zone/name', StringStamped, queue_size=1)
# polygon of current zone
self.zone_polygon_pub = rospy.Publisher('ips/receiver/current_zone/polygon', PolygonStamped, queue_size=1)
# zone leave event
self.zone_leave_pub = rospy.Publisher('ips/receiver/zone_leave', StringStamped, queue_size=10)
# zone enter event
self.zone_enter_pub = rospy.Publisher('ips/receiver/zone_enter', StringStamped, queue_size=10)
# set publishing rate
self.rate = rospy.Rate(rospy.get_param('~rate')) if rospy.has_param('~rate') else rospy.Rate(1)
def callback(self, msg):
"""
Append incoming messages to list of previous messages.
:param msg: String, message of subscribed topic
"""
# append message to buffer
self.msg_buffer.append(msg.data)
# save time of last raw signal
self.last_time = msg.header.stamp
# delete oldest message if buffer is full
if len(self.msg_buffer) > self.buffer_length:
del(self.msg_buffer[0])
def publish(self):
"""Publish zone information"""
# last zone that the receiver was in
last_zone = None
while not rospy.is_shutdown():
# get the current zone
zone = self.positioning.get_zone(self.msg_buffer) if self.msg_buffer else None
# check if zone change occurred
if zone != last_zone:
# publish zone change event
event = StringStamped()
event.header.stamp = self.last_time
# only zone leave
if zone is None:
event.data = last_zone.name
self.zone_leave_pub.publish(event)
# only zone enter
elif last_zone is None:
event.data = zone.name
self.zone_enter_pub.publish(event)
# leave on zone and enter another
else:
event.data = last_zone.name
self.zone_leave_pub.publish(event)
event.data = zone.name
self.zone_enter_pub.publish(event)
if zone is not None:
# publish zone name
name = StringStamped()
name.header.stamp = self.last_time
name.header.frame_id = zone.frame_id
name.data = zone.name
self.zone_name_pub.publish(name)
# publish zone polygon
polygon = PolygonStamped()
polygon.header.stamp = self.last_time
polygon.header.frame_id = zone.frame_id
points = []
for p in zone.polygon:
points.append(Point32(p[0], p[1], p[2]))
polygon.polygon.points = points
self.zone_polygon_pub.publish(polygon)
# set current zone to last zone
last_zone = zone
# wait to start next iteration
self.rate.sleep()
if __name__ == '__main__':
# start node
rospy.init_node('positioning', anonymous=False)
# initialize IPSReceiver class
ips = IPS()
try:
# publish receiver messages
ips.publish()
except rospy.ROSInterruptException:
pass
|
[
"rospy.Subscriber",
"geometry_msgs.msg.PolygonStamped",
"indoor_positioning.msg.StringStamped",
"rospkg.RosPack",
"rospy.Publisher",
"rospy.Rate",
"rospy.get_param",
"rospy.is_shutdown",
"geometry_msgs.msg.Point32",
"rospy.init_node",
"indoor_positioning.positioning.Positioning",
"rospy.has_param"
] |
[((5949, 5996), 'rospy.init_node', 'rospy.init_node', (['"""positioning"""'], {'anonymous': '(False)'}), "('positioning', anonymous=False)\n", (5964, 5996), False, 'import rospy\n'), ((1824, 1890), 'rospy.Subscriber', 'rospy.Subscriber', (['"""ips/receiver/raw"""', 'StringStamped', 'self.callback'], {}), "('ips/receiver/raw', StringStamped, self.callback)\n", (1840, 1890), False, 'import rospy\n'), ((2201, 2221), 'indoor_positioning.positioning.Positioning', 'Positioning', (['abs_dir'], {}), '(abs_dir)\n', (2212, 2221), False, 'from indoor_positioning.positioning import Positioning\n'), ((2812, 2890), 'rospy.Publisher', 'rospy.Publisher', (['"""ips/receiver/current_zone/name"""', 'StringStamped'], {'queue_size': '(1)'}), "('ips/receiver/current_zone/name', StringStamped, queue_size=1)\n", (2827, 2890), False, 'import rospy\n'), ((2957, 3043), 'rospy.Publisher', 'rospy.Publisher', (['"""ips/receiver/current_zone/polygon"""', 'PolygonStamped'], {'queue_size': '(1)'}), "('ips/receiver/current_zone/polygon', PolygonStamped,\n queue_size=1)\n", (2972, 3043), False, 'import rospy\n'), ((3097, 3169), 'rospy.Publisher', 'rospy.Publisher', (['"""ips/receiver/zone_leave"""', 'StringStamped'], {'queue_size': '(10)'}), "('ips/receiver/zone_leave', StringStamped, queue_size=10)\n", (3112, 3169), False, 'import rospy\n'), ((3227, 3299), 'rospy.Publisher', 'rospy.Publisher', (['"""ips/receiver/zone_enter"""', 'StringStamped'], {'queue_size': '(10)'}), "('ips/receiver/zone_enter', StringStamped, queue_size=10)\n", (3242, 3299), False, 'import rospy\n'), ((1987, 2018), 'rospy.has_param', 'rospy.has_param', (['"""~config_file"""'], {}), "('~config_file')\n", (2002, 2018), False, 'import rospy\n'), ((1952, 1983), 'rospy.get_param', 'rospy.get_param', (['"""~config_file"""'], {}), "('~config_file')\n", (1967, 1983), False, 'import rospy\n'), ((2453, 2480), 'rospy.has_param', 'rospy.has_param', (['"""~bcn_len"""'], {}), "('~bcn_len')\n", (2468, 2480), False, 'import rospy\n'), ((2422, 2449), 'rospy.get_param', 'rospy.get_param', (['"""~bcn_len"""'], {}), "('~bcn_len')\n", (2437, 2449), False, 'import rospy\n'), ((3390, 3414), 'rospy.has_param', 'rospy.has_param', (['"""~rate"""'], {}), "('~rate')\n", (3405, 3414), False, 'import rospy\n'), ((3420, 3433), 'rospy.Rate', 'rospy.Rate', (['(1)'], {}), '(1)\n', (3430, 3433), False, 'import rospy\n'), ((4055, 4074), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (4072, 4074), False, 'import rospy\n'), ((3361, 3385), 'rospy.get_param', 'rospy.get_param', (['"""~rate"""'], {}), "('~rate')\n", (3376, 3385), False, 'import rospy\n'), ((4348, 4363), 'indoor_positioning.msg.StringStamped', 'StringStamped', ([], {}), '()\n', (4361, 4363), False, 'from indoor_positioning.msg import StringStamped\n'), ((5123, 5138), 'indoor_positioning.msg.StringStamped', 'StringStamped', ([], {}), '()\n', (5136, 5138), False, 'from indoor_positioning.msg import StringStamped\n'), ((5395, 5411), 'geometry_msgs.msg.PolygonStamped', 'PolygonStamped', ([], {}), '()\n', (5409, 5411), False, 'from geometry_msgs.msg import PolygonStamped, Point32\n'), ((2074, 2090), 'rospkg.RosPack', 'rospkg.RosPack', ([], {}), '()\n', (2088, 2090), False, 'import rospkg\n'), ((5623, 5648), 'geometry_msgs.msg.Point32', 'Point32', (['p[0]', 'p[1]', 'p[2]'], {}), '(p[0], p[1], p[2])\n', (5630, 5648), False, 'from geometry_msgs.msg import PolygonStamped, Point32\n')]
|
from flask import Flask
from flask import jsonify
from flask import request
from flask_cors import CORS
from raven.contrib.flask import Sentry
from orion.context import Context
from orion.handlers import handler_classes
def init_app(app):
"""
Statefully initialize the Flask application. This involves creating a sever-side application
context and adding route definitions for all endpoint handlers.
:param app: Uninitialized Flask application instance.
:return: Server-side application context.
"""
ctx = Context(app)
CORS(app, supports_credentials=True, origins=[ctx.config.get_value('frontend_url')])
sentry_dsn = ctx.config.get_value('sentry_dsn')
if sentry_dsn:
Sentry(dsn=sentry_dsn).init_app(app)
def map_handler_func(HandlerClass):
"""
Create all necessary params for adding this route to the Flask server.
:param HandlerClass: Handler class to prepare.
:return: A tuple of (path, name, view_func, methods) for this handler.
"""
def handler_wrapper(*args, **kwargs):
# Provide an abstraction for supplying the handler with request JSON.
data = request.get_json(force=True, silent=True) or {}
handler = HandlerClass(ctx, data)
resp_json, status = handler.run(*args, **kwargs)
return jsonify(resp_json), status
return HandlerClass.path, HandlerClass.__name__, handler_wrapper, HandlerClass.methods
for rule, endpoint, view_func, methods in map(map_handler_func, handler_classes):
app.add_url_rule(
rule=rule,
endpoint=endpoint,
view_func=view_func,
methods=methods,
)
return ctx
def create_app():
"""
Create a fully initialized Flask application instance for this server.
:return: The initialized Flask application instance.
"""
app = Flask('orion')
ctx = init_app(app)
app.ctx = ctx
return app
|
[
"raven.contrib.flask.Sentry",
"orion.context.Context",
"flask.Flask",
"flask.jsonify",
"flask.request.get_json"
] |
[((538, 550), 'orion.context.Context', 'Context', (['app'], {}), '(app)\n', (545, 550), False, 'from orion.context import Context\n'), ((1915, 1929), 'flask.Flask', 'Flask', (['"""orion"""'], {}), "('orion')\n", (1920, 1929), False, 'from flask import Flask\n'), ((720, 742), 'raven.contrib.flask.Sentry', 'Sentry', ([], {'dsn': 'sentry_dsn'}), '(dsn=sentry_dsn)\n', (726, 742), False, 'from raven.contrib.flask import Sentry\n'), ((1184, 1225), 'flask.request.get_json', 'request.get_json', ([], {'force': '(True)', 'silent': '(True)'}), '(force=True, silent=True)\n', (1200, 1225), False, 'from flask import request\n'), ((1358, 1376), 'flask.jsonify', 'jsonify', (['resp_json'], {}), '(resp_json)\n', (1365, 1376), False, 'from flask import jsonify\n')]
|
"""
A simple message queue for TAPPs using Redis.
"""
import json
import time
from sqlalchemy_models import create_session_engine, setup_database, util, exchange as em, user as um, wallet as wm
from tapp_config import setup_redis, get_config, setup_logging
def subscription_handler(channel, client, mykey=None, auth=False, multi=True):
"""
:param str channel: The channel to subscribe to.
:param client: A plugin manager client.
:param mykey: A bitjws public key to use in authenticating requests (unused)
:param bool auth: If true, authenticate all requests, otherwise assume plain json encoding.
:param multi: Process multiple results if True, otherwise return after 1
"""
while True:
message = client.red.rpop(channel)
if message is not None:
toprint = message if len(message) < 60 else message[:59]
client.logger.info("handling message %s..." % toprint)
if not auth:
# assume json encoding
mess = json.loads(message)
client.logger.debug("handling message:\n%s" % json.dumps(mess, indent=2))
if 'command' not in mess or not hasattr(client, mess['command']):
# nothing to do
pass
else:
try:
getattr(client, mess['command'])(**mess['data'])
except Exception as e:
client.logger.exception(e)
client.session.rollback()
client.session.flush()
else:
# TODO implement auth options
raise NotImplementedError("auth not supported yet.")
if not multi:
return
else:
time.sleep(0.01)
def publish(channel, command, data, key=None, auth=False, red=None):
"""
Publish a command to a redis channel.
:param channel: The channel to send the command to
:param command: The command name
:param data: The data to send (parameters)
:param key: The key to sign with (unused)
:param auth: If true, authenticate the message before sending (unused)
:param red: The StrictRedis client to use for redis communication
"""
if red is None:
red = setup_redis()
if not auth:
red.lpush(channel, json.dumps({'command': command, 'data': data}))
else:
# TODO implement auth options
raise NotImplementedError("auth not supported yet.")
def set_status(nam, status='loading', red=None):
if red is None:
red = setup_redis()
if status in ['loading', 'running', 'stopped']:
red.set("%s_status" % nam.lower(), status)
def get_status(nam, red=None):
if red is None:
red = setup_redis()
status = red.get("%s_status" % nam.lower())
return status if status is not None else 'stopped'
def get_running_workers(wlist, red=None):
"""
Search list for only the workers which return status 'running'.
:param wlist: The list of workers to search through.
:param red: The redis connection.
:return: The worker list filtered for status 'running'.
"""
if red is None:
red = setup_redis()
workers = []
for work in wlist:
if get_status(work, red=red) == 'running':
workers.append(work)
return workers
class MQHandlerBase(object):
"""
A parent class for Message Queue Handlers.
Plugins should inherit from this class, and overwrite all of the methods
that raise a NotImplementedError.
"""
NAME = 'Base'
KEY = 'PubKey'
_user = None
session = None
def __init__(self, key=None, secret=None, session=None, engine=None, red=None, cfg=None):
self.cfg = get_config(self.NAME.lower()) if cfg is None else cfg
self.key = key if key is not None else self.cfg.get(self.NAME.lower(), 'key')
self.secret = secret if secret is not None else self.cfg.get(self.NAME.lower(), 'secret')
self.session = session
self.engine = engine
self.red = red
self.logger = None
"""
Daemonization and process management section. Do not override.
"""
def setup_connections(self):
if self.session is None or self.engine is None:
self.session, self.engine = create_session_engine(cfg=self.cfg)
setup_database(self.engine, modules=[wm, em, um])
self.red = setup_redis() if self.red is None else self.red
def setup_logger(self):
self.logger = setup_logging(self.NAME.lower(), cfg=self.cfg)
def cleanup(self):
if self.session is not None:
self.session.close()
@property
def manager_user(self):
"""
Get the User associated with this plugin Manager.
This User is the owner of records for the plugin.
:rtype: User
:return: The Manager User
"""
if not self._user:
# try to get existing user
self._user = self.session.query(um.User).filter(um.User.username == '%sManager' % self.NAME) \
.first()
if not self._user:
# create a new user
userpubkey = self.cfg.get(self.NAME.lower(), 'userpubkey')
self._user = util.create_user('%sManager' % self.NAME, userpubkey, self.session)
return self._user
def run(self):
"""
Run this manager as a daemon. Subscribes to a redis channel matching self.NAME
and processes messages received there.
"""
set_status(self.NAME.lower(), 'loading', self.red)
self.setup_connections()
self.setup_logger()
self.logger.info("%s loading" % self.NAME)
set_status(self.NAME.lower(), 'running', self.red)
self.logger.info("%s running" % self.NAME)
subscription_handler(self.NAME.lower(), client=self)
|
[
"sqlalchemy_models.util.create_user",
"sqlalchemy_models.setup_database",
"json.loads",
"tapp_config.setup_redis",
"json.dumps",
"time.sleep",
"sqlalchemy_models.create_session_engine"
] |
[((2302, 2315), 'tapp_config.setup_redis', 'setup_redis', ([], {}), '()\n', (2313, 2315), False, 'from tapp_config import setup_redis, get_config, setup_logging\n'), ((2602, 2615), 'tapp_config.setup_redis', 'setup_redis', ([], {}), '()\n', (2613, 2615), False, 'from tapp_config import setup_redis, get_config, setup_logging\n'), ((2786, 2799), 'tapp_config.setup_redis', 'setup_redis', ([], {}), '()\n', (2797, 2799), False, 'from tapp_config import setup_redis, get_config, setup_logging\n'), ((3221, 3234), 'tapp_config.setup_redis', 'setup_redis', ([], {}), '()\n', (3232, 3234), False, 'from tapp_config import setup_redis, get_config, setup_logging\n'), ((1791, 1807), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (1801, 1807), False, 'import time\n'), ((2360, 2406), 'json.dumps', 'json.dumps', (["{'command': command, 'data': data}"], {}), "({'command': command, 'data': data})\n", (2370, 2406), False, 'import json\n'), ((4336, 4371), 'sqlalchemy_models.create_session_engine', 'create_session_engine', ([], {'cfg': 'self.cfg'}), '(cfg=self.cfg)\n', (4357, 4371), False, 'from sqlalchemy_models import create_session_engine, setup_database, util, exchange as em, user as um, wallet as wm\n'), ((4384, 4433), 'sqlalchemy_models.setup_database', 'setup_database', (['self.engine'], {'modules': '[wm, em, um]'}), '(self.engine, modules=[wm, em, um])\n', (4398, 4433), False, 'from sqlalchemy_models import create_session_engine, setup_database, util, exchange as em, user as um, wallet as wm\n'), ((4453, 4466), 'tapp_config.setup_redis', 'setup_redis', ([], {}), '()\n', (4464, 4466), False, 'from tapp_config import setup_redis, get_config, setup_logging\n'), ((5285, 5352), 'sqlalchemy_models.util.create_user', 'util.create_user', (["('%sManager' % self.NAME)", 'userpubkey', 'self.session'], {}), "('%sManager' % self.NAME, userpubkey, self.session)\n", (5301, 5352), False, 'from sqlalchemy_models import create_session_engine, setup_database, util, exchange as em, user as um, wallet as wm\n'), ((1019, 1038), 'json.loads', 'json.loads', (['message'], {}), '(message)\n', (1029, 1038), False, 'import json\n'), ((1101, 1127), 'json.dumps', 'json.dumps', (['mess'], {'indent': '(2)'}), '(mess, indent=2)\n', (1111, 1127), False, 'import json\n')]
|
from typing import Optional
import graphene
from django.core.exceptions import ValidationError
from ....giftcard.utils import order_has_gift_card_lines
from ....order import FulfillmentLineData
from ....order import models as order_models
from ....order.error_codes import OrderErrorCode
from ....order.fetch import OrderLineInfo
from ....payment.models import TransactionItem
from ...core.mutations import BaseMutation
from ..types import FulfillmentLine, OrderLine
class FulfillmentRefundAndReturnProductBase(BaseMutation):
class Meta:
abstract = True
@classmethod
def clean_order_payment(cls, payment, cleaned_input):
if not payment or not payment.can_refund():
raise ValidationError(
{
"order": ValidationError(
"Order cannot be refunded.",
code=OrderErrorCode.CANNOT_REFUND.value,
)
}
)
cleaned_input["payment"] = payment
@classmethod
def clean_amount_to_refund(
cls, order, amount_to_refund, charged_value, cleaned_input
):
if amount_to_refund is not None:
if order_has_gift_card_lines(order):
raise ValidationError(
{
"amount_to_refund": ValidationError(
(
"Cannot specified amount to refund when order has "
"gift card lines."
),
code=OrderErrorCode.CANNOT_REFUND.value,
)
}
)
if amount_to_refund > charged_value:
raise ValidationError(
{
"amount_to_refund": ValidationError(
(
"The amountToRefund is greater than the maximal "
"possible amount to refund."
),
code=OrderErrorCode.CANNOT_REFUND.value,
),
}
)
cleaned_input["amount_to_refund"] = amount_to_refund
@classmethod
def _raise_error_for_line(cls, msg, type, line_id, field_name, code=None):
line_global_id = graphene.Node.to_global_id(type, line_id)
if not code:
code = OrderErrorCode.INVALID_QUANTITY.value
raise ValidationError(
{
field_name: ValidationError(
msg,
code=code,
params={field_name: line_global_id},
)
}
)
@classmethod
def raise_error_for_payment_error(cls, transactions: Optional[TransactionItem]):
if transactions:
code = OrderErrorCode.MISSING_TRANSACTION_ACTION_REQUEST_WEBHOOK.value
msg = "No app or plugin is configured to handle payment action requests."
else:
msg = "The refund operation is not available yet."
code = OrderErrorCode.CANNOT_REFUND.value
raise ValidationError(
msg,
code=code,
)
@classmethod
def clean_fulfillment_lines(
cls, fulfillment_lines_data, cleaned_input, whitelisted_statuses
):
fulfillment_lines = cls.get_nodes_or_error(
[line["fulfillment_line_id"] for line in fulfillment_lines_data],
field="fulfillment_lines",
only_type=FulfillmentLine,
qs=order_models.FulfillmentLine.objects.prefetch_related(
"fulfillment", "order_line"
),
)
fulfillment_lines = list(fulfillment_lines)
cleaned_fulfillment_lines = []
for line, line_data in zip(fulfillment_lines, fulfillment_lines_data):
quantity = line_data["quantity"]
if line.order_line.is_gift_card:
cls._raise_error_for_line(
"Cannot refund or return gift card line.",
"FulfillmentLine",
line.pk,
"fulfillment_line_id",
OrderErrorCode.GIFT_CARD_LINE.value,
)
if line.quantity < quantity:
cls._raise_error_for_line(
"Provided quantity is bigger than quantity from "
"fulfillment line",
"FulfillmentLine",
line.pk,
"fulfillment_line_id",
)
if line.fulfillment.status not in whitelisted_statuses:
allowed_statuses_str = ", ".join(whitelisted_statuses)
cls._raise_error_for_line(
f"Unable to process action for fulfillmentLine with different "
f"status than {allowed_statuses_str}.",
"FulfillmentLine",
line.pk,
"fulfillment_line_id",
code=OrderErrorCode.INVALID.value,
)
replace = line_data.get("replace", False)
if replace and not line.order_line.variant_id:
cls._raise_error_for_line(
"Unable to replace line as the assigned product doesn't exist.",
"OrderLine",
line.pk,
"order_line_id",
)
cleaned_fulfillment_lines.append(
FulfillmentLineData(
line=line,
quantity=quantity,
replace=replace,
)
)
cleaned_input["fulfillment_lines"] = cleaned_fulfillment_lines
@classmethod
def clean_lines(cls, lines_data, cleaned_input):
order_lines = cls.get_nodes_or_error(
[line["order_line_id"] for line in lines_data],
field="order_lines",
only_type=OrderLine,
qs=order_models.OrderLine.objects.prefetch_related(
"fulfillment_lines__fulfillment", "variant", "allocations"
),
)
order_lines = list(order_lines)
cleaned_order_lines = []
for line, line_data in zip(order_lines, lines_data):
quantity = line_data["quantity"]
if line.is_gift_card:
cls._raise_error_for_line(
"Cannot refund or return gift card line.",
"OrderLine",
line.pk,
"order_line_id",
OrderErrorCode.GIFT_CARD_LINE.value,
)
if line.quantity < quantity:
cls._raise_error_for_line(
"Provided quantity is bigger than quantity from order line.",
"OrderLine",
line.pk,
"order_line_id",
)
quantity_ready_to_move = line.quantity_unfulfilled
if quantity_ready_to_move < quantity:
cls._raise_error_for_line(
"Provided quantity is bigger than unfulfilled quantity.",
"OrderLine",
line.pk,
"order_line_id",
)
variant = line.variant
replace = line_data.get("replace", False)
if replace and not line.variant_id:
cls._raise_error_for_line(
"Unable to replace line as the assigned product doesn't exist.",
"OrderLine",
line.pk,
"order_line_id",
)
cleaned_order_lines.append(
OrderLineInfo(
line=line, quantity=quantity, variant=variant, replace=replace
)
)
cleaned_input["order_lines"] = cleaned_order_lines
|
[
"graphene.Node.to_global_id",
"django.core.exceptions.ValidationError"
] |
[((2374, 2415), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['type', 'line_id'], {}), '(type, line_id)\n', (2400, 2415), False, 'import graphene\n'), ((3181, 3212), 'django.core.exceptions.ValidationError', 'ValidationError', (['msg'], {'code': 'code'}), '(msg, code=code)\n', (3196, 3212), False, 'from django.core.exceptions import ValidationError\n'), ((2567, 2635), 'django.core.exceptions.ValidationError', 'ValidationError', (['msg'], {'code': 'code', 'params': '{field_name: line_global_id}'}), '(msg, code=code, params={field_name: line_global_id})\n', (2582, 2635), False, 'from django.core.exceptions import ValidationError\n'), ((780, 870), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Order cannot be refunded."""'], {'code': 'OrderErrorCode.CANNOT_REFUND.value'}), "('Order cannot be refunded.', code=OrderErrorCode.\n CANNOT_REFUND.value)\n", (795, 870), False, 'from django.core.exceptions import ValidationError\n'), ((1331, 1465), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Cannot specified amount to refund when order has gift card lines."""'], {'code': 'OrderErrorCode.CANNOT_REFUND.value'}), "(\n 'Cannot specified amount to refund when order has gift card lines.',\n code=OrderErrorCode.CANNOT_REFUND.value)\n", (1346, 1465), False, 'from django.core.exceptions import ValidationError\n'), ((1834, 1977), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""The amountToRefund is greater than the maximal possible amount to refund."""'], {'code': 'OrderErrorCode.CANNOT_REFUND.value'}), "(\n 'The amountToRefund is greater than the maximal possible amount to refund.'\n , code=OrderErrorCode.CANNOT_REFUND.value)\n", (1849, 1977), False, 'from django.core.exceptions import ValidationError\n')]
|
from typing import Callable, Generator, Generic, Optional, TypeVar
from mlprogram import logging
from mlprogram.synthesizers.synthesizer import Result, Synthesizer
logger = logging.Logger(__name__)
Input = TypeVar("Input")
Output = TypeVar("Output")
class FilteredSynthesizer(Synthesizer[Input, Output], Generic[Input, Output]):
def __init__(self, synthesizer: Synthesizer[Input, Output],
score: Callable[[Input, Output], float],
threshold: float):
self.synthesizer = synthesizer
self.score = score
self.threshold = threshold
def _synthesize(self, input: Input, n_required_output: Optional[int] = None) \
-> Generator[Result[Output], None, None]:
with logger.block("_synthesize"):
for result in self.synthesizer(input, n_required_output):
score = self.score(input, result.output)
if score >= self.threshold:
logger.debug(f"find appropriate output: score={score}")
yield result
return
|
[
"typing.TypeVar",
"mlprogram.logging.Logger"
] |
[((175, 199), 'mlprogram.logging.Logger', 'logging.Logger', (['__name__'], {}), '(__name__)\n', (189, 199), False, 'from mlprogram import logging\n'), ((209, 225), 'typing.TypeVar', 'TypeVar', (['"""Input"""'], {}), "('Input')\n", (216, 225), False, 'from typing import Callable, Generator, Generic, Optional, TypeVar\n'), ((235, 252), 'typing.TypeVar', 'TypeVar', (['"""Output"""'], {}), "('Output')\n", (242, 252), False, 'from typing import Callable, Generator, Generic, Optional, TypeVar\n')]
|
import os
import sys
import logging
import traceback
from logging import Logger
from types import TracebackType
from typing import Union, Tuple, Optional
from .argparser import LogArgParser
from .handlers import CustomTimedRotatingFileHandler
class Log:
"""Initiates a logging object to record processes and errors"""
DEFAULT_LOG_LEVEL = 'INFO'
def __init__(self, log: Union[str, 'Log', Logger] = None, child_name: str = None,
log_level_str: str = None, log_to_file: bool = False, log_dir: str = None):
"""
Args:
log: display name of the log. If Log object, will extract name from that.
Typically, this second method is done in the name of assigning a child log a parent.
If NoneType, will use __name__.
child_name: str, name of the child log.
This is used when the log being made is considered a child to the parent log name
log_to_file: if True, will create a file handler for outputting logs to file.
The files are incremented in days, with the date appended to the file name.
Logs older than 20 days will be removed upon instantiation
log_level_str: str, minimum logging level to write to log (Levels: DEBUG -> INFO -> WARN -> ERROR)
default: 'INFO'
log_dir: str, directory to save the log
default: "~/logs/{log_name}/"
"""
# If 'Log', it's a parent Log instance. Take the name from the object. Otherwise it's just a string
if log is None:
log = __name__
self.is_child = child_name is not None
self.log_name = log.name if isinstance(log, (Log, Logger)) else log
self.log_to_file = log_to_file
self.log_parent = log if self.is_child else None
# Determine if log is child of other Log objects (if so, it will be attached to that parent log)
# Instantiate the log object
if self.is_child and isinstance(self.log_parent, (Log, Logger)):
# Attach this instance to the parent log if it's the proper object
self.log_obj = self.log_parent.log_obj.getChild(child_name)
# Attempt to check for the parent log's log_to_file variable.
try:
self.log_to_file = self.log_parent.log_to_file
except AttributeError:
pass
else:
# Create logger if it hasn't been created
self.log_obj = logging.getLogger(self.log_name)
self.log_obj.setLevel(self.DEFAULT_LOG_LEVEL)
# Patch some things in for cross-class compatibility
self.name = self.log_name
self.debug = self.log_obj.debug
self.info = self.log_obj.info
self.warning = self.log_obj.warning
self.error = self.log_obj.error
self.getChild = self.log_obj.getChild
self.setLevel = self.log_obj.setLevel
# Check if debugging in pycharm
# Checking Methods:
# 1) checks for whether code run in-console
# 2) check for script run in debug mode per PyCharm
sysargs = sys.argv
self.is_debugging = any(['pydevconsole.py' in sysargs[0], sys.gettrace() is not None])
# Set the log level (will automatically set to DEBUG if is_debugging)
self._set_log_level(log_level_str)
# Set the log handlers
if self.log_to_file:
self._build_log_path(log_dir)
if not self.is_child and len(self.log_obj.handlers) == 0:
# We only need a handler for the parent log object
self._set_handlers()
self.info(f'Logging initiated{" for child instance" if self.is_child else ""}.')
def _build_log_path(self, log_dir: str):
"""Builds a filepath to the log file"""
# First just check if the log is a child of another.
# If so, we can bypass the logic below it and use the parent log's file path
if self.is_child:
try:
self.log_path = self.log_parent.log_path
return
except AttributeError:
pass
# Set name of file
self.log_filename = f"{self.log_name}"
# Set log directory (if none)
home_dir = os.path.join(os.path.expanduser('~'), 'logs')
log_dir = os.path.join(home_dir, log_dir if log_dir is not None else self.log_name)
# Check if logging directory exists
if not os.path.exists(log_dir):
# If dir doesn't exist, create
os.makedirs(log_dir)
# Path of logfile
self.log_path = os.path.join(log_dir, self.log_filename)
def _set_log_level(self, log_level_str: str):
"""Determines the minimum log level to set.
Logging progression: DEBUG -> INFO -> WARN -> ERROR -> CRITICAL
Methodology breakdown:
1. Looks for manually set string
2. If child, looks at parent's log level
3. If not, checks for script-level arguments passed in
"""
if log_level_str is None:
if self.is_child:
log_level_str = logging.getLevelName(self.log_parent.log_level_int) \
if isinstance(self.log_parent, Log) else self.DEFAULT_LOG_LEVEL
else:
# No log level provided. Check if any included as cmd argument
log_level_str = LogArgParser(self.is_debugging).log_level_str
self.log_level_str = log_level_str
self.log_level_int = getattr(logging, log_level_str.upper(), logging.DEBUG)
# Set minimum logging level
self.log_obj.setLevel(self.log_level_int)
def _set_handlers(self):
"""Sets up file & stream handlers"""
# Set format of logs
formatter = logging.Formatter('%(asctime)s - %(process)d - %(levelname)-8s - %(name)s - %(message)s')
# Create streamhandler for log (this sends streams to stdout for debug help)
sh = logging.StreamHandler(sys.stdout)
sh.setLevel(self.log_level_int)
sh.setFormatter(formatter)
self.log_obj.addHandler(sh)
if self.log_to_file:
# TimedRotating will delete logs older than 30 days
fh = CustomTimedRotatingFileHandler(self.log_path, when='d', interval=1, backup_cnt=30)
fh.setLevel(self.log_level_int)
fh.setFormatter(formatter)
self.log_obj.addHandler(fh)
# Intercept exceptions
sys.excepthook = self.handle_exception
def handle_exception(self, exc_type: type, exc_value: BaseException, exc_traceback: TracebackType):
"""Default wrapper for handling exceptions. Can be overwritten by classes that inherit Log class"""
self._handle_exception(exc_type=exc_type, exc_value=exc_value, exc_traceback=exc_traceback)
def _handle_exception(self, exc_type: type, exc_value: BaseException, exc_traceback: TracebackType):
"""Intercepts an exception and prints it to log file"""
if issubclass(exc_type, KeyboardInterrupt):
sys.__excepthook__(exc_type, exc_value, exc_traceback)
return
self.error('Uncaught exception', exc_info=(exc_type, exc_value, exc_traceback))
def error_from_class(self, err_obj: BaseException, text: str):
"""Default wrapper for extracting exceptions from Exception class.
Can be overwritten by classes that inherit the Log class"""
self._error_from_class(err_obj=err_obj, text=text)
def _error_from_class(self, err_obj: BaseException, text: str):
"""Error logging for exception objects"""
traceback_msg = '\n'.join(traceback.format_tb(err_obj.__traceback__))
exception_msg = f'{err_obj.__class__.__name__}: {err_obj}\n{traceback_msg}'
err_msg = f'{text}\n{exception_msg}'
self.error(err_msg)
@staticmethod
def extract_err() -> Tuple[Optional[type], Optional[BaseException], Optional[TracebackType]]:
"""Calls sys.exec_info() to get error details upon error instance
Returns:
(error type, error object, error traceback)
"""
return sys.exc_info()
def close(self):
"""Close logger"""
disconn_msg = 'Log disconnected'
if self.is_child:
self.info(f'{disconn_msg} for child instance.')
else:
self.info(f'{disconn_msg}.\n' + '-' * 80)
for handler in self.log_obj.handlers:
handler.close()
self.log_obj.removeHandler(handler)
|
[
"os.path.expanduser",
"sys.gettrace",
"sys.__excepthook__",
"os.makedirs",
"logging.StreamHandler",
"os.path.exists",
"traceback.format_tb",
"logging.Formatter",
"logging.getLevelName",
"sys.exc_info",
"os.path.join",
"logging.getLogger"
] |
[((4344, 4417), 'os.path.join', 'os.path.join', (['home_dir', '(log_dir if log_dir is not None else self.log_name)'], {}), '(home_dir, log_dir if log_dir is not None else self.log_name)\n', (4356, 4417), False, 'import os\n'), ((4629, 4669), 'os.path.join', 'os.path.join', (['log_dir', 'self.log_filename'], {}), '(log_dir, self.log_filename)\n', (4641, 4669), False, 'import os\n'), ((5800, 5894), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(process)d - %(levelname)-8s - %(name)s - %(message)s"""'], {}), "(\n '%(asctime)s - %(process)d - %(levelname)-8s - %(name)s - %(message)s')\n", (5817, 5894), False, 'import logging\n'), ((5988, 6021), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (6009, 6021), False, 'import logging\n'), ((8153, 8167), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (8165, 8167), False, 'import sys\n'), ((2506, 2538), 'logging.getLogger', 'logging.getLogger', (['self.log_name'], {}), '(self.log_name)\n', (2523, 2538), False, 'import logging\n'), ((4293, 4316), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (4311, 4316), False, 'import os\n'), ((4477, 4500), 'os.path.exists', 'os.path.exists', (['log_dir'], {}), '(log_dir)\n', (4491, 4500), False, 'import os\n'), ((4557, 4577), 'os.makedirs', 'os.makedirs', (['log_dir'], {}), '(log_dir)\n', (4568, 4577), False, 'import os\n'), ((7075, 7129), 'sys.__excepthook__', 'sys.__excepthook__', (['exc_type', 'exc_value', 'exc_traceback'], {}), '(exc_type, exc_value, exc_traceback)\n', (7093, 7129), False, 'import sys\n'), ((7661, 7703), 'traceback.format_tb', 'traceback.format_tb', (['err_obj.__traceback__'], {}), '(err_obj.__traceback__)\n', (7680, 7703), False, 'import traceback\n'), ((3224, 3238), 'sys.gettrace', 'sys.gettrace', ([], {}), '()\n', (3236, 3238), False, 'import sys\n'), ((5150, 5201), 'logging.getLevelName', 'logging.getLevelName', (['self.log_parent.log_level_int'], {}), '(self.log_parent.log_level_int)\n', (5170, 5201), False, 'import logging\n')]
|
from __future__ import absolute_import
from itertools import product, combinations
from git.objects import Blob
from collections import defaultdict
from kenja.historage import *
from kenja.shingles import calculate_similarity
def get_extends(commit, org_file_name, classes):
classes_path = '/[CN]/'.join(classes)
extends_path = '/'.join([org_file_name, '[CN]', classes_path, 'extend'])
try:
extend = commit.tree / extends_path
assert isinstance(extend, Blob)
except KeyError:
return None
return extend.data_stream.read().rstrip()
def exist_class(blob, commit):
split_path = blob.path.split('/')
while split_path[-2] != '[CN]':
split_path.pop()
class_path = '/'.join(split_path)
try:
commit.tree / class_path
except KeyError:
return False
return True
def detect_pull_up_method(historage):
pull_up_method_information = []
checked_commit = set()
detection_stack = []
for ref in get_refs(historage):
ref_commit = historage.commit(ref)
detection_stack.append(ref_commit)
while detection_stack:
commit = detection_stack.pop()
if commit.hexsha in checked_commit:
continue
for p in commit.parents:
pull_up_method_information.extend(detect_shingle_pullup_method(p, commit))
detection_stack.append(p)
checked_commit.add(commit.hexsha)
return pull_up_method_information
class Method(object):
def __init__(self, blob, commit):
self.blob = blob
self.package_name = get_package(blob.path, commit)
self.classes = self.get_classes(blob.path)
self.method_name = get_method(blob.path)
self.body_cache = None
def get_classes(self, path):
classes = []
split_path = path.split('/')
for i, v in enumerate(split_path):
if v == '[CN]':
classes.append(split_path[i+1])
return classes
def get_class_name(self):
return self.classes[-1]
def get_full_name(self):
class_name = '.'.join(self.classes)
if self.package_name:
return '.'.join([self.package_name, class_name, self.method_name])
else:
return '.'.join([class_name, self.method_name])
def get_full_class_name(self):
class_name = '.'.join(self.classes)
if self.package_name:
return '.'.join([self.package_name, class_name])
else:
return '.'.join([class_name])
def get_parameter_types(self):
index = self.method_name.index('(')
return self.method_name[index:-1].split(',')
@classmethod
def create_from_blob(cls, blob, commit):
if is_method_body(blob.path):
return cls(blob, commit)
else:
return None
def get_body(self):
if self.body_cache is None:
self.body_cache = self.blob.data_stream.read()
return self.body_cache
def __str__(self):
return self.get_full_name()
class SubclassMethod(Method):
def __init__(self, blob, commit):
super(SubclassMethod, self).__init__(blob, commit)
split_path = blob.path.split('/')
self.extend = get_extends(commit, split_path[0], self.classes)
def match_type(a_method, b_method):
a_types = a_method.get_parameter_types()
b_types = b_method.get_parameter_types()
return a_types == b_types
def detect_shingle_pullup_method(old_commit, new_commit):
diff_index = old_commit.diff(new_commit, create_patch=False)
added_methods = defaultdict(list)
deleted_methods = defaultdict(list)
for diff in diff_index.iter_change_type('A'):
new_method = Method.create_from_blob(diff.b_blob, new_commit)
if new_method:
added_methods[new_method.get_class_name()].append(new_method)
deleted_classes = set()
for diff in diff_index.iter_change_type('D'):
# NOTE change following old_commit to new_commit to detect
# pull_up_method by same condtion of UMLDiff
subclass_method = SubclassMethod.create_from_blob(diff.a_blob, old_commit)
if subclass_method:
if not subclass_method.extend:
continue
if subclass_method.get_full_class_name() in deleted_classes:
continue
if not exist_class(diff.a_blob, new_commit):
deleted_classes.add(subclass_method.get_full_class_name())
continue
if subclass_method.extend in added_methods.keys():
deleted_methods[subclass_method.extend].append(subclass_method)
pull_up_method_candidates = []
old_org_commit = get_org_commit(old_commit)
new_org_commit = get_org_commit(new_commit)
for super_class, v in deleted_methods.iteritems():
if super_class not in added_methods:
print('%s does\'nt have a deleted method' % (super_class))
continue
for dst_method in added_methods[super_class]:
dst_body = dst_method.get_body()
if not dst_body:
continue
dst_body = '\n'.join(dst_body.split('\n')[1:-2])
for src_method in v:
src_body = src_method.get_body()
is_same_parameters = match_type(src_method, dst_method)
if src_body:
src_body = '\n'.join(src_body.split('\n')[1:-2])
if src_body or dst_body:
try:
sim = calculate_similarity(src_body, dst_body)
except ZeroDivisionError:
sim = "N/A"
else:
sim = 0
pull_up_method_candidates.append((old_commit.hexsha,
new_commit.hexsha,
old_org_commit,
new_org_commit,
str(src_method),
str(dst_method),
sim,
is_same_parameters))
return pull_up_method_candidates
|
[
"collections.defaultdict",
"kenja.shingles.calculate_similarity"
] |
[((3627, 3644), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (3638, 3644), False, 'from collections import defaultdict\n'), ((3667, 3684), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (3678, 3684), False, 'from collections import defaultdict\n'), ((5572, 5612), 'kenja.shingles.calculate_similarity', 'calculate_similarity', (['src_body', 'dst_body'], {}), '(src_body, dst_body)\n', (5592, 5612), False, 'from kenja.shingles import calculate_similarity\n')]
|
#!/usr/bin/env python
"""Tests for `blast2xl` package."""
from os.path import abspath
from pathlib import Path
from click.testing import CliRunner
from blast2xl import cli
def test_command_line_interface():
"""Test the CLI."""
runner = CliRunner()
help_result = runner.invoke(cli.main, ['--help'])
assert help_result.exit_code == 0
assert 'blast2xl: BLAST XLSX Report Creator' in help_result.stdout
result = runner.invoke(cli.main)
assert result.exit_code == 2
assert 'Missing option' in result.output
blast_tsv_dir = abspath('tests/data/blast_tsv')
fasta_dir = abspath('tests/data/fastas')
with runner.isolated_filesystem():
excel_report = 'blast-report.xlsx'
seq_outdir = 'seq-outdir'
result = runner.invoke(cli.main, ['--blast-tsv-dir', blast_tsv_dir,
'--blast-tsv-sample-name-pattern', r'^blastn-(.+)-vs-nt.*',
'--seq-dir', fasta_dir,
'--top-n-results', 5,
'-o', excel_report,
'-O', seq_outdir,
'-vvv'])
assert result.exit_code == 0
path_seq_outdir = Path(seq_outdir)
assert path_seq_outdir.exists()
output_fastas = list(path_seq_outdir.glob('**/*.fasta'))
assert len(output_fastas) > 2
fasta_path = path_seq_outdir / 'FMDV' / 'Foot_and_mouth_disease_virus___type_O-12118' / 'FMDV.fasta'
assert fasta_path.exists()
assert fasta_path.stat().st_size > 0
assert Path(excel_report).exists()
assert Path(excel_report).stat().st_size > 0
|
[
"click.testing.CliRunner",
"os.path.abspath",
"pathlib.Path"
] |
[((250, 261), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (259, 261), False, 'from click.testing import CliRunner\n'), ((562, 593), 'os.path.abspath', 'abspath', (['"""tests/data/blast_tsv"""'], {}), "('tests/data/blast_tsv')\n", (569, 593), False, 'from os.path import abspath\n'), ((610, 638), 'os.path.abspath', 'abspath', (['"""tests/data/fastas"""'], {}), "('tests/data/fastas')\n", (617, 638), False, 'from os.path import abspath\n'), ((1300, 1316), 'pathlib.Path', 'Path', (['seq_outdir'], {}), '(seq_outdir)\n', (1304, 1316), False, 'from pathlib import Path\n'), ((1665, 1683), 'pathlib.Path', 'Path', (['excel_report'], {}), '(excel_report)\n', (1669, 1683), False, 'from pathlib import Path\n'), ((1708, 1726), 'pathlib.Path', 'Path', (['excel_report'], {}), '(excel_report)\n', (1712, 1726), False, 'from pathlib import Path\n')]
|
import requests
import json
__SERVER_HOST__ = "http://127.0.0.1:5057"
__CLIENT_SECRET__ = 1234567890
__SERVER_SECRET__ = 1234567890
__SERVER_START_API__ = "/api/start"
__SERVER_STOP_API__ = "/api/stop"
__SERVER_PARAMETERS_API__ = "/api/parameters"
__SERVER_ALLPARAMETERS_API__ = "/api/allparameters"
__SERVER_OBJECTIVE_API__ = "/api/objective"
__SERVER_VERSION_API__ = "/api/version"
params = []
_DIM_ = 0
rit = 0
svr_rit = 0
current_objective = 1E300
pareato_objective = 1E300
searchMin = True
default_headers = {'Content-Type': 'application/json'}
def version():
"""xtellix Module Copyright and Version Info """
print( "*******************************************************")
print("Copyright (C) 2010-2020 Dr <NAME> <EMAIL>")
print("Client Version: 0.0.1 beta")
print( "*******************************************************")
def setOptimizationServerIP(address_port):
"""Set Optimization Server IP and Port Number """
global __SERVER_HOST__
__SERVER_HOST__ = address_port
def setClientSecret(secret):
"""Set Client Secret to enable Singular Access to the optimization engine """
global __CLIENT_SECRET__
__CLIENT_SECRET__ = secret
def connect(address_port, secret):
"""Set Server Endpoint and Client Secrets """
setOptimizationServerIP(address_port)
setClientSecret(secret)
apipath = __SERVER_HOST__ + __SERVER_VERSION_API__ + "/" + str(__CLIENT_SECRET__)
response = requests.get(apipath, verify=False, headers=default_headers)
r_data = json.loads(response.content)
print( "*******************************************************")
print("Server Version: ")
print( "*******************************************************")
print(r_data)
print( "*******************************************************")
print("Client Version: ")
version()
def setInitialParameters(initialSuggestions):
"""Initial parameters for optimization problem being solved"""
global params
params = initialSuggestions
sugjson = json.dumps(list(initialSuggestions))
apipath = __SERVER_HOST__ + __SERVER_PARAMETERS_API__ + "/" + str(__SERVER_SECRET__)
response = requests.post(apipath, json =sugjson, headers=default_headers )
#print(sugjson)
#print(apipath)
#print(response)
return response
def initializeOptimizer(initMetric,ubound, lbound, dim, maxIter, maxSamples, initialSuggestions, seedId, minOrMax):
"""Default parameters for initializing the optimization engine, based on being solved"""
global current_objective
global pareato_objective
global __SERVER_SECRET__
global _DIM_
global searchMin
current_objective = initMetric
pareato_objective = initMetric
_DIM_ = dim
searchMin = minOrMax
initialize = [dim,ubound, lbound, maxIter, maxSamples, initMetric, seedId]
iniJson = json.dumps(initialize)
apipath = __SERVER_HOST__ + __SERVER_START_API__ + "/" + str(__CLIENT_SECRET__)
response = requests.post(apipath, json=iniJson, headers=default_headers )
secret = int(json.loads(response.content))
__SERVER_SECRET__ = secret
#print(apipath)
print("New Server Secret: ", __SERVER_SECRET__)
print("Optimization Engine Running.....")
response1 = setInitialParameters(initialSuggestions)
return response1
def getParameters(cached = True):
"""Get parameters from the Optimization Server """
global params
global svr_rit
if cached == True:
apipath = __SERVER_HOST__ + __SERVER_PARAMETERS_API__ + "/" + str(__SERVER_SECRET__)
response = requests.get(apipath, verify=False, headers=default_headers )
r_data = json.loads(response.content)
oldK = r_data[0]
newK = r_data[1]
oldPoint = r_data[2]
newPoint = r_data[3]
rit = r_data[4]
svr_rit = rit
params[oldK] = oldPoint
params[newK] = newPoint
else:
apipath = __SERVER_HOST__ + __SERVER_ALLPARAMETERS_API__ + "/" + str(__SERVER_SECRET__)
response = requests.get(apipath, verify=False, headers=default_headers )
r_data = json.loads(response.content)
global _DIM_
for i in range(_DIM_):
params[i] = r_data[i]
#print(apipath)
#print(response)
return params
def updateObjectiveFunctionValue(evalMetric):
"""Send Objective Function Value updates to the optimization server"""
jObj = json.dumps(evalMetric)
apipath = __SERVER_HOST__ + __SERVER_OBJECTIVE_API__ + "/" + str(__SERVER_SECRET__)
global current_objective
global pareato_objective
global rit
global searchMin
rit = rit + 1
current_objective = evalMetric
if searchMin == True:
if evalMetric <= pareato_objective: pareato_objective = evalMetric
elif searchMin == False:
if evalMetric >= pareato_objective: pareato_objective = evalMetric
else:
if evalMetric <= pareato_objective: pareato_objective = evalMetric
response = requests.post(apipath, json =jObj,verify=False, headers=default_headers )
#print(apipath)
#print(jObj)
#print(response)
return response
def getProgress():
global current_objective
global pareato_objective
global rit
global svr_rit
return current_objective, pareato_objective, rit, svr_rit
def getFunctionEvaluations():
global rit
global svr_rit
return rit, svr_rit
|
[
"requests.post",
"json.loads",
"requests.get",
"json.dumps"
] |
[((1516, 1576), 'requests.get', 'requests.get', (['apipath'], {'verify': '(False)', 'headers': 'default_headers'}), '(apipath, verify=False, headers=default_headers)\n', (1528, 1576), False, 'import requests\n'), ((1598, 1626), 'json.loads', 'json.loads', (['response.content'], {}), '(response.content)\n', (1608, 1626), False, 'import json\n'), ((2261, 2322), 'requests.post', 'requests.post', (['apipath'], {'json': 'sugjson', 'headers': 'default_headers'}), '(apipath, json=sugjson, headers=default_headers)\n', (2274, 2322), False, 'import requests\n'), ((2955, 2977), 'json.dumps', 'json.dumps', (['initialize'], {}), '(initialize)\n', (2965, 2977), False, 'import json\n'), ((3084, 3145), 'requests.post', 'requests.post', (['apipath'], {'json': 'iniJson', 'headers': 'default_headers'}), '(apipath, json=iniJson, headers=default_headers)\n', (3097, 3145), False, 'import requests\n'), ((4580, 4602), 'json.dumps', 'json.dumps', (['evalMetric'], {}), '(evalMetric)\n', (4590, 4602), False, 'import json\n'), ((5152, 5224), 'requests.post', 'requests.post', (['apipath'], {'json': 'jObj', 'verify': '(False)', 'headers': 'default_headers'}), '(apipath, json=jObj, verify=False, headers=default_headers)\n', (5165, 5224), False, 'import requests\n'), ((3165, 3193), 'json.loads', 'json.loads', (['response.content'], {}), '(response.content)\n', (3175, 3193), False, 'import json\n'), ((3699, 3759), 'requests.get', 'requests.get', (['apipath'], {'verify': '(False)', 'headers': 'default_headers'}), '(apipath, verify=False, headers=default_headers)\n', (3711, 3759), False, 'import requests\n'), ((3788, 3816), 'json.loads', 'json.loads', (['response.content'], {}), '(response.content)\n', (3798, 3816), False, 'import json\n'), ((4169, 4229), 'requests.get', 'requests.get', (['apipath'], {'verify': '(False)', 'headers': 'default_headers'}), '(apipath, verify=False, headers=default_headers)\n', (4181, 4229), False, 'import requests\n'), ((4258, 4286), 'json.loads', 'json.loads', (['response.content'], {}), '(response.content)\n', (4268, 4286), False, 'import json\n')]
|
# Copyright (c) 2015, <NAME>
# All rights reserved.
import os
import re
def get(osx_version):
dev_dir = re.sub(r'\.', '_', osx_version)
dev_dir = 'OSX_{}_DEVELOPER_DIR'.format(dev_dir)
return os.getenv(dev_dir)
|
[
"re.sub",
"os.getenv"
] |
[((108, 139), 're.sub', 're.sub', (['"""\\\\."""', '"""_"""', 'osx_version'], {}), "('\\\\.', '_', osx_version)\n", (114, 139), False, 'import re\n'), ((200, 218), 'os.getenv', 'os.getenv', (['dev_dir'], {}), '(dev_dir)\n', (209, 218), False, 'import os\n')]
|
"""config/config
Default corpus configs.
"""
import sys
import os
import inspect
from pathlib import Path
from kleis import kleis_data
ACLRDTEC = "acl-rd-tec-2.0"
SEMEVAL2017 = "semeval2017-task10"
KPEXTDATA_PATH = str(Path(inspect.getfile(kleis_data)).parent)
# Check for default paths for corpus
DEFAULT_CORPUS_PATH = "corpus/" + SEMEVAL2017 + "/"
if Path("./kleis_data/" + DEFAULT_CORPUS_PATH).exists():
CORPUS_PATH = "./kleis_data/" + DEFAULT_CORPUS_PATH
elif Path(os.path.expanduser("~/kleis_data/" + DEFAULT_CORPUS_PATH)).exists():
CORPUS_PATH = os.path.expanduser("~/kleis_data/" + DEFAULT_CORPUS_PATH)
elif Path(KPEXTDATA_PATH + "/" + DEFAULT_CORPUS_PATH).exists():
CORPUS_PATH = KPEXTDATA_PATH + "/" + DEFAULT_CORPUS_PATH
else:
print("Warning: SemEval 2017 Task 10 corpus doesn't exists.", file=sys.stderr)
print(" - Download from here https://scienceie.github.io/resources.html",
file=sys.stderr)
print(" - Use one of the following paths.", file=sys.stderr)
print(" + ./kleis_data/%s" % DEFAULT_CORPUS_PATH, file=sys.stderr)
print(" + ~/kleis_data/%s" % DEFAULT_CORPUS_PATH, file=sys.stderr)
print(" + %s" % (KPEXTDATA_PATH + "/" + DEFAULT_CORPUS_PATH), file=sys.stderr)
print(" - You can use pre-trained models.", file=sys.stderr)
CORPUS_PATH = os.path.expanduser("~/kleis_data/" + DEFAULT_CORPUS_PATH)
print("Default: ", Path(CORPUS_PATH))
CORPUS = {
ACLRDTEC: {
"_id": "acl-rd-tec-2.0",
"options": {}
},
SEMEVAL2017: {
"_id": SEMEVAL2017,
"format": "brat",
"format-description": "brat standoff format, http://brat.nlplab.org/standoff.html",
"dataset": {
"train-labeled": CORPUS_PATH + "train2/",
"train-unlabeled": None,
"dev-labeled": CORPUS_PATH + "dev/",
"dev-unlabeled": None,
"test-unlabeled": CORPUS_PATH + "scienceie2017_test_unlabelled/",
"test-labeled": CORPUS_PATH + "semeval_articles_test/"
},
"options": {}
},
"options": {}
}
CORPUS_DEFAULT = CORPUS[SEMEVAL2017]
CORPUS_SEMEVAL2017_TASK10 = CORPUS[SEMEVAL2017]
CORPUS_ACL_RD_TEC_2_0 = CORPUS[ACLRDTEC]
# Check for default paths for models
DEFAULT_MODELS_PATH = "models/"
if Path("./kleis_data/" + DEFAULT_MODELS_PATH).exists():
MODELS_PATH = "./kleis_data/" + DEFAULT_MODELS_PATH
elif Path(os.path.expanduser("~/kleis_data/" + DEFAULT_MODELS_PATH)).exists():
MODELS_PATH = os.path.expanduser("~/kleis_data/" + DEFAULT_MODELS_PATH)
elif Path(KPEXTDATA_PATH + "/" + DEFAULT_MODELS_PATH).exists():
MODELS_PATH = KPEXTDATA_PATH + "/" + DEFAULT_MODELS_PATH
else:
print("Warning: Path to save models doesn't exists.", file=sys.stderr)
print(" - Possible paths are:", file=sys.stderr)
print(" + %s" % (KPEXTDATA_PATH + "/" + DEFAULT_MODELS_PATH), file=sys.stderr)
print(" + %s" % ("./" + DEFAULT_MODELS_PATH), file=sys.stderr)
print(" + %s" % ("~/" + DEFAULT_MODELS_PATH), file=sys.stderr)
print(" - Default will be %s" % DEFAULT_MODELS_PATH, file=sys.stderr)
MODELS_PATH = DEFAULT_MODELS_PATH
# Check for default paths for PoS tag sequences
DEFAULT_TRAIN_PATH = "train/"
if Path("./kleis_data/" + DEFAULT_TRAIN_PATH).exists():
TRAIN_PATH = "./kleis_data/" + DEFAULT_TRAIN_PATH
elif Path(os.path.expanduser("~/kleis_data/" + DEFAULT_TRAIN_PATH)).exists():
TRAIN_PATH = os.path.expanduser("~/kleis_data/" + DEFAULT_TRAIN_PATH)
elif Path(KPEXTDATA_PATH + "/" + DEFAULT_TRAIN_PATH).exists():
TRAIN_PATH = KPEXTDATA_PATH + "/" + DEFAULT_TRAIN_PATH
else:
print("Warning: Path to save models doesn't exists.", file=sys.stderr)
print(" - Possible paths are:", file=sys.stderr)
print(" + %s" % (KPEXTDATA_PATH + "/" + DEFAULT_TRAIN_PATH), file=sys.stderr)
print(" + %s" % ("./" + DEFAULT_TRAIN_PATH), file=sys.stderr)
print(" + %s" % ("~/" + DEFAULT_TRAIN_PATH), file=sys.stderr)
print(" - Default will be %s" % DEFAULT_TRAIN_PATH, file=sys.stderr)
TRAIN_PATH = DEFAULT_TRAIN_PATH
OUTPUT_PATH = "output/"
|
[
"inspect.getfile",
"pathlib.Path",
"os.path.expanduser"
] |
[((359, 402), 'pathlib.Path', 'Path', (["('./kleis_data/' + DEFAULT_CORPUS_PATH)"], {}), "('./kleis_data/' + DEFAULT_CORPUS_PATH)\n", (363, 402), False, 'from pathlib import Path\n'), ((566, 623), 'os.path.expanduser', 'os.path.expanduser', (["('~/kleis_data/' + DEFAULT_CORPUS_PATH)"], {}), "('~/kleis_data/' + DEFAULT_CORPUS_PATH)\n", (584, 623), False, 'import os\n'), ((2313, 2356), 'pathlib.Path', 'Path', (["('./kleis_data/' + DEFAULT_MODELS_PATH)"], {}), "('./kleis_data/' + DEFAULT_MODELS_PATH)\n", (2317, 2356), False, 'from pathlib import Path\n'), ((2520, 2577), 'os.path.expanduser', 'os.path.expanduser', (["('~/kleis_data/' + DEFAULT_MODELS_PATH)"], {}), "('~/kleis_data/' + DEFAULT_MODELS_PATH)\n", (2538, 2577), False, 'import os\n'), ((3275, 3317), 'pathlib.Path', 'Path', (["('./kleis_data/' + DEFAULT_TRAIN_PATH)"], {}), "('./kleis_data/' + DEFAULT_TRAIN_PATH)\n", (3279, 3317), False, 'from pathlib import Path\n'), ((3477, 3533), 'os.path.expanduser', 'os.path.expanduser', (["('~/kleis_data/' + DEFAULT_TRAIN_PATH)"], {}), "('~/kleis_data/' + DEFAULT_TRAIN_PATH)\n", (3495, 3533), False, 'import os\n'), ((229, 256), 'inspect.getfile', 'inspect.getfile', (['kleis_data'], {}), '(kleis_data)\n', (244, 256), False, 'import inspect\n'), ((1346, 1403), 'os.path.expanduser', 'os.path.expanduser', (["('~/kleis_data/' + DEFAULT_CORPUS_PATH)"], {}), "('~/kleis_data/' + DEFAULT_CORPUS_PATH)\n", (1364, 1403), False, 'import os\n'), ((479, 536), 'os.path.expanduser', 'os.path.expanduser', (["('~/kleis_data/' + DEFAULT_CORPUS_PATH)"], {}), "('~/kleis_data/' + DEFAULT_CORPUS_PATH)\n", (497, 536), False, 'import os\n'), ((629, 677), 'pathlib.Path', 'Path', (["(KPEXTDATA_PATH + '/' + DEFAULT_CORPUS_PATH)"], {}), "(KPEXTDATA_PATH + '/' + DEFAULT_CORPUS_PATH)\n", (633, 677), False, 'from pathlib import Path\n'), ((1427, 1444), 'pathlib.Path', 'Path', (['CORPUS_PATH'], {}), '(CORPUS_PATH)\n', (1431, 1444), False, 'from pathlib import Path\n'), ((2433, 2490), 'os.path.expanduser', 'os.path.expanduser', (["('~/kleis_data/' + DEFAULT_MODELS_PATH)"], {}), "('~/kleis_data/' + DEFAULT_MODELS_PATH)\n", (2451, 2490), False, 'import os\n'), ((2583, 2631), 'pathlib.Path', 'Path', (["(KPEXTDATA_PATH + '/' + DEFAULT_MODELS_PATH)"], {}), "(KPEXTDATA_PATH + '/' + DEFAULT_MODELS_PATH)\n", (2587, 2631), False, 'from pathlib import Path\n'), ((3392, 3448), 'os.path.expanduser', 'os.path.expanduser', (["('~/kleis_data/' + DEFAULT_TRAIN_PATH)"], {}), "('~/kleis_data/' + DEFAULT_TRAIN_PATH)\n", (3410, 3448), False, 'import os\n'), ((3539, 3586), 'pathlib.Path', 'Path', (["(KPEXTDATA_PATH + '/' + DEFAULT_TRAIN_PATH)"], {}), "(KPEXTDATA_PATH + '/' + DEFAULT_TRAIN_PATH)\n", (3543, 3586), False, 'from pathlib import Path\n')]
|
from scipy.misc import imread
from tqdm import tqdm
import numpy as np
import os
import random
import warnings
class SetList(object):
'''A class to hold lists of inputs for a network'''
def __init__(self, source='', target=None):
'''Constructs a new SetList.
Args:
source (str): The path to the list file
'''
self.source = source
if target is None:
self.target = source
else:
self.target = target
self.list = []
self.mean = []
if source != '':
self.load()
@property
def set(self):
return set(self.list)
@set.setter
def set(self, set):
self.list = list(set)
def __len__(self):
'''Returns the length of this Set'''
return len(self.list)
def __str__(self):
'''Returns a str-description of this Set'''
return '{}[{}] → {}'.format(self.source, len(self.list), self.target)
def __iter__(self):
'''Returns the iterator for the contained list'''
return iter(self.list)
def load(self):
'''Loads the contents of self.source into the list. If source is a dir
it will list all files in it without extensions. It does replace the
whole content and does not append to it.'''
# utils.touch(self.source)
if os.path.isdir(self.source):
self.load_directory(self.source)
self.source = ''
self.target = ''
else:
if not os.path.exists(self.source):
self.list = []
else:
with open(self.source) as f:
self.list = [l[:-1] for l in f.readlines() if l.strip()]
def load_directory(self, dir):
'''Loads the contents of a dirctory into the list
Args:
dir (str): The path to the dir
'''
self.list = [os.path.splitext(f)[0] for f in next(os.walk(dir))[2]]
def write(self):
'''Saves the list to the path set in self.target. This is normally set
to self.source'''
with open(self.target, 'w') as f:
for row in self:
f.write("{}\n".format(row))
print('List {} written...'.format(self.target))
def shuffle(self):
'''Shuffles the list'''
random.shuffle(self.list)
def add_pre_suffix(self, prefix='', suffix=''):
'''Adds a prefix and a suffix to every element of the list.
Args:
prefix (str,optional): The prefix to prepend
suffix (str,optional): The prefix to append
'''
self.list = [prefix + x + suffix for x in self]
def rm_pre_suffix(self, prefix='', suffix=''):
'''Removes a prefix and a suffix from every element of the list.
Args:
prefix (str,optional): The prefix to remove
suffix (str,optional): The prefix to remove
'''
self.list = [x[len(prefix):-len(suffix)] for x in self]
def calculate_mean(self):
'''Calculates the mean pixel for this set. The list has to contain full
paths obviously so you probably have to append Prefixes and suffixes
before running this.
Returns:
The mean pixel. As BGR!
'''
self.mean = [[], [], []]
print('Calculating mean pixel...')
for row in tqdm(self):
im = imread(row)
self.mean[0].append(np.mean(im[..., 0]))
self.mean[1].append(np.mean(im[..., 1]))
self.mean[2].append(np.mean(im[..., 2]))
self.mean = np.mean(self.mean, axis=1)
if self.mean.shape == (3,):
return self.mean
else:
return self.mean[:, :, ::-1]
def each(self, callback):
'''Applies a callable to every element of the list
Args:
callback (func): The callback function to use
Returns:
True if successfull and False if not
'''
if not callable(callback):
warnings.warn('Not callable object')
return False
print('Each of {}'.format(self.source))
for row in tqdm(self):
callback(row)
return True
|
[
"tqdm.tqdm",
"os.path.isdir",
"random.shuffle",
"os.walk",
"os.path.exists",
"numpy.mean",
"os.path.splitext",
"warnings.warn",
"scipy.misc.imread"
] |
[((1365, 1391), 'os.path.isdir', 'os.path.isdir', (['self.source'], {}), '(self.source)\n', (1378, 1391), False, 'import os\n'), ((2331, 2356), 'random.shuffle', 'random.shuffle', (['self.list'], {}), '(self.list)\n', (2345, 2356), False, 'import random\n'), ((3380, 3390), 'tqdm.tqdm', 'tqdm', (['self'], {}), '(self)\n', (3384, 3390), False, 'from tqdm import tqdm\n'), ((3600, 3626), 'numpy.mean', 'np.mean', (['self.mean'], {'axis': '(1)'}), '(self.mean, axis=1)\n', (3607, 3626), True, 'import numpy as np\n'), ((4165, 4175), 'tqdm.tqdm', 'tqdm', (['self'], {}), '(self)\n', (4169, 4175), False, 'from tqdm import tqdm\n'), ((3409, 3420), 'scipy.misc.imread', 'imread', (['row'], {}), '(row)\n', (3415, 3420), False, 'from scipy.misc import imread\n'), ((4036, 4072), 'warnings.warn', 'warnings.warn', (['"""Not callable object"""'], {}), "('Not callable object')\n", (4049, 4072), False, 'import warnings\n'), ((1529, 1556), 'os.path.exists', 'os.path.exists', (['self.source'], {}), '(self.source)\n', (1543, 1556), False, 'import os\n'), ((1914, 1933), 'os.path.splitext', 'os.path.splitext', (['f'], {}), '(f)\n', (1930, 1933), False, 'import os\n'), ((3453, 3472), 'numpy.mean', 'np.mean', (['im[..., 0]'], {}), '(im[..., 0])\n', (3460, 3472), True, 'import numpy as np\n'), ((3506, 3525), 'numpy.mean', 'np.mean', (['im[..., 1]'], {}), '(im[..., 1])\n', (3513, 3525), True, 'import numpy as np\n'), ((3559, 3578), 'numpy.mean', 'np.mean', (['im[..., 2]'], {}), '(im[..., 2])\n', (3566, 3578), True, 'import numpy as np\n'), ((1951, 1963), 'os.walk', 'os.walk', (['dir'], {}), '(dir)\n', (1958, 1963), False, 'import os\n')]
|
from Source import ModelsIO as MIO
import numpy as np
from h5py import File
def E_fit(_cube: np.ndarray((10, 13, 21, 128, 128), '>f4'),
data: np.ndarray((128, 128), '>f4'),
seg: np.ndarray((128, 128), '>f4'),
noise: np.ndarray((128, 128), '>f4')) -> np.ndarray((10, 13, 21), '>f4'):
scaled_models: np.ndarray((10, 13, 21, 128, 128), '>f4')
flux_models: np.ndarray((10, 13, 21), '>f4')
flux_data: np.float('>f4')
X: np.ndarray((10, 13, 21), '>f4')
resta: np.ndarray((10, 13, 21, 128, 128), '>f4')
residuo: np.ndarray((10, 13, 21, 128, 128), '>f4')
chi: np.ndarray((10, 13, 21), '>f4')
area: int
flux_models = np.einsum("ijkxy,xy->ijk", _cube, seg)
flux_data = np.einsum("xy,xy", data, seg)
X = flux_data / flux_models
scaled_models = X[:, :, :, np.newaxis, np.newaxis] * _cube
resta = data - scaled_models
residuo = (resta ** 2) / (scaled_models + noise ** 2)
chi = np.einsum("ijkxy,xy->ijk", residuo, seg)
area = seg.sum()
chi = chi / area
return chi
def read_obj_h5(name):
# debe ser
try:
with File(name, 'r') as f:
data = f['obj'][:]
seg = f['seg'][:]
rms = f['rms'][:]
return data, seg, rms
except IOError:
print("{} not found".format(name))
return False, False, False
# se necesita esta funcion??
def read_obj(name):
try:
data = MIO.fits.open(name)[1].data
rms = MIO.fits.open(name.replace('objs', 'noise'))[1].data
seg = MIO.fits.open(name.replace('object', "segment").replace("objs", "segs"))[1].data
except IOError:
print("{} not found".format(name))
return False, False, False
noise = np.median(rms)
return data, seg, noise
def feed(name, cube):
"""
From a name and a models cube, run an object through the routine
Outputs the numpy array of the chi_cube
"""
a, b, s = read_obj_h5(name)
if a is not False:
chi = E_fit(cube, a, b, noise=s)
# outchi = MIO.fits.ImageHDU(data=chi)
# outchi.writeto(name.replace('cut_object',"chi_cube"),overwrite=True)
return chi
else:
return False
def save_chi(name, cube):
"""
Parameters
name : str of output file
cube : crunch.feed output
"""
outchi = MIO.fits.ImageHDU(data=cube)
outchi.writeto(name, overwrite=True)
return True
def get_cube(name):
cube = MIO.ModelsCube(name)
cube = cube.data.reshape((10, 13, 128, 21, 128))
cube = np.swapaxes(cube, 2, 3) # new shape (10, 13, 21, 128, 128)
return cube
def chi_index(chi_name):
"""
Parameters
----------
chi_name : chi_cube fits filename.
Returns
-------
tuple (i,j,k) of the index which minimize the residuals.
"""
chi_cube = MIO.fits.open(chi_name)
i, j, k = np.unravel_index(np.argmin(chi_cube[1].data), shape=(10, 13, 21))
return i, j, k
def pond_rad_like(chi_name, logh):
i, j, k = chi_index(chi_name)
chi_cubo = MIO.fits.open(chi_name)[1].data
weights = np.e ** (chi_cubo[i, j, :])
r_weight = 0
for r in range(21):
r_weight += (10 ** (logh[r])) / weights[r]
r_chi = np.log10(r_weight / np.sum(1. / weights))
r_var = 0
for r in range(21):
r_var += ((logh[r] - r_chi) ** 2) / (weights[r])
r_var = r_var / np.sum(1. / weights)
return r_chi, r_var
def pond_rad(chi_name, logh):
i, j, k = chi_index(chi_name)
chi_cubo = MIO.fits.open(chi_name)[1].data
weights = chi_cubo[i, j, :]
r_weight = 0
for r in range(21):
r_weight += (10 ** (logh[r])) / weights[r]
r_chi = np.log10(r_weight / np.sum(1. / weights))
r_var = 0
for r in range(21):
r_var += ((logh[r] - r_chi) ** 2) / (weights[r])
r_var = r_var / np.sum(1. / weights)
return r_chi, r_var
def pond_rad_3d(chi_name, logh):
chi_cubo = MIO.fits.open(chi_name)[1].data
sqrt_chi = np.sqrt(chi_cubo)
r_weight = 0
for e in range(10):
for t in range(13):
for r in range(21):
r_weight += (10 ** (logh[r])) / sqrt_chi[e, t, r]
r_chi = np.log10(r_weight / np.sum(1. / sqrt_chi))
r_var = 0
for e in range(10):
for t in range(13):
for r in range(21):
r_var += ((logh[r] - r_chi) ** 2) / (chi_cubo[e, t, r])
r_var = r_var / np.sum(1. / chi_cubo)
return r_chi, r_var
def make_mosaic(obj, chi, cube):
"""
Parameters
----------
obj : str
DESCRIPTION.
chi : str
DESCRIPTION.
cube : numpy array
DESCRIPTION.
Returns
-------
Bool
Builds a mosaic containing the data,segment,model and residual
"""
i, j, k = chi_index(chi)
model = cube[i, j, k]
gal, seg, noise = read_obj(obj)
output = chi.replace('chi_cube', 'mosaic').replace('cut_object', 'mosaic')
fg = np.sum(gal * seg)
fm1 = np.sum(model * seg)
aux = np.zeros((128, 128 * 4))
aux[:, 0:128] = gal
aux[:, 128:256] = seg * (fg / seg.sum())
aux[:, 256:384] = model * (fg / fm1)
aux[:, 384:] = gal - model * (fg / fm1)
gg = MIO.fits.ImageHDU(data=aux)
gg.writeto(output, overwrite=True)
return True
def make_mosaic_h5(obj, chi, cube):
"""
Parameters
----------
obj : str
DESCRIPTION.
chi : str
DESCRIPTION.
cube : numpy array
DESCRIPTION.
Returns
-------
Bool
Builds a mosaic containing the data,segment,model and residual
"""
i, j, k = chi_index(chi)
model = cube[i, j, k]
output = chi.replace('chi_cube', 'mosaic').replace('cut', 'mosaic')
with File(obj, 'r') as f:
gal = f['obj'][:]
seg = f['seg'][:]
fg = np.sum(gal * seg)
fm1 = np.sum(model * seg)
aux = np.zeros((128, 128 * 4))
aux[:, 0:128] = gal
aux[:, 128:256] = seg * (fg / seg.sum())
aux[:, 256:384] = model * (fg / fm1)
aux[:, 384:] = gal - model * (fg / fm1)
gg = MIO.fits.ImageHDU(data=aux)
gg.writeto(output, overwrite=True)
return True
|
[
"h5py.File",
"Source.ModelsIO.fits.ImageHDU",
"numpy.sum",
"Source.ModelsIO.ModelsCube",
"numpy.median",
"numpy.einsum",
"numpy.float",
"numpy.zeros",
"Source.ModelsIO.fits.open",
"numpy.argmin",
"numpy.swapaxes",
"numpy.ndarray",
"numpy.sqrt"
] |
[((282, 313), 'numpy.ndarray', 'np.ndarray', (['(10, 13, 21)', '""">f4"""'], {}), "((10, 13, 21), '>f4')\n", (292, 313), True, 'import numpy as np\n'), ((335, 376), 'numpy.ndarray', 'np.ndarray', (['(10, 13, 21, 128, 128)', '""">f4"""'], {}), "((10, 13, 21, 128, 128), '>f4')\n", (345, 376), True, 'import numpy as np\n'), ((394, 425), 'numpy.ndarray', 'np.ndarray', (['(10, 13, 21)', '""">f4"""'], {}), "((10, 13, 21), '>f4')\n", (404, 425), True, 'import numpy as np\n'), ((441, 456), 'numpy.float', 'np.float', (['""">f4"""'], {}), "('>f4')\n", (449, 456), True, 'import numpy as np\n'), ((464, 495), 'numpy.ndarray', 'np.ndarray', (['(10, 13, 21)', '""">f4"""'], {}), "((10, 13, 21), '>f4')\n", (474, 495), True, 'import numpy as np\n'), ((507, 548), 'numpy.ndarray', 'np.ndarray', (['(10, 13, 21, 128, 128)', '""">f4"""'], {}), "((10, 13, 21, 128, 128), '>f4')\n", (517, 548), True, 'import numpy as np\n'), ((562, 603), 'numpy.ndarray', 'np.ndarray', (['(10, 13, 21, 128, 128)', '""">f4"""'], {}), "((10, 13, 21, 128, 128), '>f4')\n", (572, 603), True, 'import numpy as np\n'), ((613, 644), 'numpy.ndarray', 'np.ndarray', (['(10, 13, 21)', '""">f4"""'], {}), "((10, 13, 21), '>f4')\n", (623, 644), True, 'import numpy as np\n'), ((678, 716), 'numpy.einsum', 'np.einsum', (['"""ijkxy,xy->ijk"""', '_cube', 'seg'], {}), "('ijkxy,xy->ijk', _cube, seg)\n", (687, 716), True, 'import numpy as np\n'), ((733, 762), 'numpy.einsum', 'np.einsum', (['"""xy,xy"""', 'data', 'seg'], {}), "('xy,xy', data, seg)\n", (742, 762), True, 'import numpy as np\n'), ((959, 999), 'numpy.einsum', 'np.einsum', (['"""ijkxy,xy->ijk"""', 'residuo', 'seg'], {}), "('ijkxy,xy->ijk', residuo, seg)\n", (968, 999), True, 'import numpy as np\n'), ((1743, 1757), 'numpy.median', 'np.median', (['rms'], {}), '(rms)\n', (1752, 1757), True, 'import numpy as np\n'), ((2348, 2376), 'Source.ModelsIO.fits.ImageHDU', 'MIO.fits.ImageHDU', ([], {'data': 'cube'}), '(data=cube)\n', (2365, 2376), True, 'from Source import ModelsIO as MIO\n'), ((2467, 2487), 'Source.ModelsIO.ModelsCube', 'MIO.ModelsCube', (['name'], {}), '(name)\n', (2481, 2487), True, 'from Source import ModelsIO as MIO\n'), ((2552, 2575), 'numpy.swapaxes', 'np.swapaxes', (['cube', '(2)', '(3)'], {}), '(cube, 2, 3)\n', (2563, 2575), True, 'import numpy as np\n'), ((2844, 2867), 'Source.ModelsIO.fits.open', 'MIO.fits.open', (['chi_name'], {}), '(chi_name)\n', (2857, 2867), True, 'from Source import ModelsIO as MIO\n'), ((3986, 4003), 'numpy.sqrt', 'np.sqrt', (['chi_cubo'], {}), '(chi_cubo)\n', (3993, 4003), True, 'import numpy as np\n'), ((4944, 4961), 'numpy.sum', 'np.sum', (['(gal * seg)'], {}), '(gal * seg)\n', (4950, 4961), True, 'import numpy as np\n'), ((4972, 4991), 'numpy.sum', 'np.sum', (['(model * seg)'], {}), '(model * seg)\n', (4978, 4991), True, 'import numpy as np\n'), ((5002, 5026), 'numpy.zeros', 'np.zeros', (['(128, 128 * 4)'], {}), '((128, 128 * 4))\n', (5010, 5026), True, 'import numpy as np\n'), ((5191, 5218), 'Source.ModelsIO.fits.ImageHDU', 'MIO.fits.ImageHDU', ([], {'data': 'aux'}), '(data=aux)\n', (5208, 5218), True, 'from Source import ModelsIO as MIO\n'), ((95, 136), 'numpy.ndarray', 'np.ndarray', (['(10, 13, 21, 128, 128)', '""">f4"""'], {}), "((10, 13, 21, 128, 128), '>f4')\n", (105, 136), True, 'import numpy as np\n'), ((154, 183), 'numpy.ndarray', 'np.ndarray', (['(128, 128)', '""">f4"""'], {}), "((128, 128), '>f4')\n", (164, 183), True, 'import numpy as np\n'), ((200, 229), 'numpy.ndarray', 'np.ndarray', (['(128, 128)', '""">f4"""'], {}), "((128, 128), '>f4')\n", (210, 229), True, 'import numpy as np\n'), ((248, 277), 'numpy.ndarray', 'np.ndarray', (['(128, 128)', '""">f4"""'], {}), "((128, 128), '>f4')\n", (258, 277), True, 'import numpy as np\n'), ((2899, 2926), 'numpy.argmin', 'np.argmin', (['chi_cube[1].data'], {}), '(chi_cube[1].data)\n', (2908, 2926), True, 'import numpy as np\n'), ((3390, 3411), 'numpy.sum', 'np.sum', (['(1.0 / weights)'], {}), '(1.0 / weights)\n', (3396, 3411), True, 'import numpy as np\n'), ((3844, 3865), 'numpy.sum', 'np.sum', (['(1.0 / weights)'], {}), '(1.0 / weights)\n', (3850, 3865), True, 'import numpy as np\n'), ((4419, 4441), 'numpy.sum', 'np.sum', (['(1.0 / chi_cubo)'], {}), '(1.0 / chi_cubo)\n', (4425, 4441), True, 'import numpy as np\n'), ((5712, 5726), 'h5py.File', 'File', (['obj', '"""r"""'], {}), "(obj, 'r')\n", (5716, 5726), False, 'from h5py import File\n'), ((5799, 5816), 'numpy.sum', 'np.sum', (['(gal * seg)'], {}), '(gal * seg)\n', (5805, 5816), True, 'import numpy as np\n'), ((5831, 5850), 'numpy.sum', 'np.sum', (['(model * seg)'], {}), '(model * seg)\n', (5837, 5850), True, 'import numpy as np\n'), ((5865, 5889), 'numpy.zeros', 'np.zeros', (['(128, 128 * 4)'], {}), '((128, 128 * 4))\n', (5873, 5889), True, 'import numpy as np\n'), ((6074, 6101), 'Source.ModelsIO.fits.ImageHDU', 'MIO.fits.ImageHDU', ([], {'data': 'aux'}), '(data=aux)\n', (6091, 6101), True, 'from Source import ModelsIO as MIO\n'), ((1120, 1135), 'h5py.File', 'File', (['name', '"""r"""'], {}), "(name, 'r')\n", (1124, 1135), False, 'from h5py import File\n'), ((3053, 3076), 'Source.ModelsIO.fits.open', 'MIO.fits.open', (['chi_name'], {}), '(chi_name)\n', (3066, 3076), True, 'from Source import ModelsIO as MIO\n'), ((3252, 3273), 'numpy.sum', 'np.sum', (['(1.0 / weights)'], {}), '(1.0 / weights)\n', (3258, 3273), True, 'import numpy as np\n'), ((3517, 3540), 'Source.ModelsIO.fits.open', 'MIO.fits.open', (['chi_name'], {}), '(chi_name)\n', (3530, 3540), True, 'from Source import ModelsIO as MIO\n'), ((3706, 3727), 'numpy.sum', 'np.sum', (['(1.0 / weights)'], {}), '(1.0 / weights)\n', (3712, 3727), True, 'import numpy as np\n'), ((3939, 3962), 'Source.ModelsIO.fits.open', 'MIO.fits.open', (['chi_name'], {}), '(chi_name)\n', (3952, 3962), True, 'from Source import ModelsIO as MIO\n'), ((4204, 4226), 'numpy.sum', 'np.sum', (['(1.0 / sqrt_chi)'], {}), '(1.0 / sqrt_chi)\n', (4210, 4226), True, 'import numpy as np\n'), ((1442, 1461), 'Source.ModelsIO.fits.open', 'MIO.fits.open', (['name'], {}), '(name)\n', (1455, 1461), True, 'from Source import ModelsIO as MIO\n')]
|
import os
import pandas as pd
from tqdm import tqdm
import pipelines.p1_orca_by_stop as p1
from utils import constants, data_utils
NAME = 'p2_aggregate_orca'
WRITE_DIR = os.path.join(constants.PIPELINE_OUTPUTS_DIR, NAME)
def load_input():
path = os.path.join(constants.PIPELINE_OUTPUTS_DIR, f'{p1.NAME}.csv')
return pd.read_csv(path)
def aggregate_stops(orca_df):
"""
Aggregates the ORCA dataset by summing together the boardings at each stop.
"""
cols = [
'stop_id',
'boarding_count',
'route_ids',
'tract_num',
'tract_population'
]
stops = orca_df['stop_id'].unique()
result = []
for stop in tqdm(stops, desc='Aggregating stops'):
rows = orca_df[orca_df[cols[0]] == stop]
result.append([
stop,
rows[cols[1]].sum(),
rows[cols[2]].iat[0],
rows[cols[3]].iat[0],
rows[cols[4]].iat[0],
])
# Renaming 'boarding_count' to 'orca_count' for clarity
cols[1] = 'orca_count'
return pd.DataFrame(result, columns=cols)
def aggregate_routes(orca_df):
"""
Maps each route to its list of stops.
"""
routes = {}
for row in orca_df.to_numpy():
stop_id = row[0]
route_ids = data_utils.parse_collection(row[2], set, int)
for route_id in route_ids:
routes.setdefault(route_id, set()).add(stop_id)
cols = ['route_id', 'stop_ids']
result = [[route_id, routes[route_id]] for route_id in routes]
return pd.DataFrame(result, columns=cols)
def run_pipeline():
"""
Runs the pipeline and writes the outputs to disk.
"""
orca_df = load_input()
orca_df = aggregate_stops(orca_df)
routes_df = aggregate_routes(orca_df)
# Write to CSV
if not os.path.exists(WRITE_DIR):
os.mkdir(WRITE_DIR)
files = {'stops_aggregate.csv': orca_df, 'routes_aggregate.csv': routes_df}
for fname in files:
files[fname].to_csv(os.path.join(WRITE_DIR, fname), index=False)
tqdm.write(f'Wrote {fname} to {WRITE_DIR}')
if __name__ == '__main__':
run_pipeline()
|
[
"pandas.DataFrame",
"os.mkdir",
"tqdm.tqdm",
"tqdm.tqdm.write",
"pandas.read_csv",
"os.path.exists",
"utils.data_utils.parse_collection",
"os.path.join"
] |
[((174, 224), 'os.path.join', 'os.path.join', (['constants.PIPELINE_OUTPUTS_DIR', 'NAME'], {}), '(constants.PIPELINE_OUTPUTS_DIR, NAME)\n', (186, 224), False, 'import os\n'), ((256, 318), 'os.path.join', 'os.path.join', (['constants.PIPELINE_OUTPUTS_DIR', 'f"""{p1.NAME}.csv"""'], {}), "(constants.PIPELINE_OUTPUTS_DIR, f'{p1.NAME}.csv')\n", (268, 318), False, 'import os\n'), ((330, 347), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (341, 347), True, 'import pandas as pd\n'), ((683, 720), 'tqdm.tqdm', 'tqdm', (['stops'], {'desc': '"""Aggregating stops"""'}), "(stops, desc='Aggregating stops')\n", (687, 720), False, 'from tqdm import tqdm\n'), ((1058, 1092), 'pandas.DataFrame', 'pd.DataFrame', (['result'], {'columns': 'cols'}), '(result, columns=cols)\n', (1070, 1092), True, 'import pandas as pd\n'), ((1538, 1572), 'pandas.DataFrame', 'pd.DataFrame', (['result'], {'columns': 'cols'}), '(result, columns=cols)\n', (1550, 1572), True, 'import pandas as pd\n'), ((1281, 1326), 'utils.data_utils.parse_collection', 'data_utils.parse_collection', (['row[2]', 'set', 'int'], {}), '(row[2], set, int)\n', (1308, 1326), False, 'from utils import constants, data_utils\n'), ((1805, 1830), 'os.path.exists', 'os.path.exists', (['WRITE_DIR'], {}), '(WRITE_DIR)\n', (1819, 1830), False, 'import os\n'), ((1840, 1859), 'os.mkdir', 'os.mkdir', (['WRITE_DIR'], {}), '(WRITE_DIR)\n', (1848, 1859), False, 'import os\n'), ((2045, 2088), 'tqdm.tqdm.write', 'tqdm.write', (['f"""Wrote {fname} to {WRITE_DIR}"""'], {}), "(f'Wrote {fname} to {WRITE_DIR}')\n", (2055, 2088), False, 'from tqdm import tqdm\n'), ((1992, 2022), 'os.path.join', 'os.path.join', (['WRITE_DIR', 'fname'], {}), '(WRITE_DIR, fname)\n', (2004, 2022), False, 'import os\n')]
|
import nltk
import os
import torch
import torch.utils.data as data
import numpy as np
import json
from .vocabulary import Vocabulary
from pycocotools.coco import COCO
from PIL import Image
from tqdm import tqdm
class CoCoDataset(data.Dataset):
def __init__(self, transform, mode, batch_size, vocab_threshold, vocab_file, start_word,
end_word, unk_word, annotations_file, vocab_from_file, img_folder):
self.transform = transform
self.mode = mode
self.batch_size = batch_size
self.vocab = Vocabulary(vocab_threshold, vocab_file, start_word,
end_word, unk_word, annotations_file, vocab_from_file)
self.img_folder = img_folder
if self.mode == 'train':
self.coco = COCO(annotations_file)
self.ids = list(self.coco.anns.keys())
print('Obtaining caption lengths...')
all_tokens = [nltk.tokenize.word_tokenize(str(self.coco.anns[self.ids[index]]['caption']).lower()) for index in tqdm(np.arange(len(self.ids)))]
self.caption_lengths = [len(token) for token in all_tokens]
else:
test_info = json.loads(open(annotations_file).read())
self.paths = [item['file_name'] for item in test_info['images']]
def __getitem__(self, index):
# obtain image and caption if in training mode
if self.mode == 'train':
ann_id = self.ids[index]
caption = self.coco.anns[ann_id]['caption']
img_id = self.coco.anns[ann_id]['image_id']
path = self.coco.loadImgs(img_id)[0]['file_name']
# Convert image to tensor and pre-process using transform
image = Image.open(os.path.join(self.img_folder, path)).convert('RGB')
image = self.transform(image)
# Convert caption to tensor of word ids.
tokens = nltk.tokenize.word_tokenize(str(caption).lower())
caption = []
caption.append(self.vocab(self.vocab.start_word))
caption.extend([self.vocab(token) for token in tokens])
caption.append(self.vocab(self.vocab.end_word))
caption = torch.Tensor(caption).long()
# return pre-processed image and caption tensors
return image, caption
# obtain image if in test mode
else:
path = self.paths[index]
# Convert image to tensor and pre-process using transform
PIL_image = Image.open(os.path.join(self.img_folder, path)).convert('RGB')
orig_image = np.array(PIL_image)
image = self.transform(PIL_image)
# return original image and pre-processed image tensor
return orig_image, image
def get_train_indices(self):
sel_length = np.random.choice(self.caption_lengths)
all_indices = np.where([self.caption_lengths[i] == sel_length for i in np.arange(len(self.caption_lengths))])[0]
indices = list(np.random.choice(all_indices, size=self.batch_size))
return indices
def __len__(self):
if self.mode == 'train':
return len(self.ids)
else:
return len(self.paths)
|
[
"pycocotools.coco.COCO",
"torch.Tensor",
"numpy.array",
"numpy.random.choice",
"os.path.join"
] |
[((2802, 2840), 'numpy.random.choice', 'np.random.choice', (['self.caption_lengths'], {}), '(self.caption_lengths)\n', (2818, 2840), True, 'import numpy as np\n'), ((776, 798), 'pycocotools.coco.COCO', 'COCO', (['annotations_file'], {}), '(annotations_file)\n', (780, 798), False, 'from pycocotools.coco import COCO\n'), ((2576, 2595), 'numpy.array', 'np.array', (['PIL_image'], {}), '(PIL_image)\n', (2584, 2595), True, 'import numpy as np\n'), ((2985, 3036), 'numpy.random.choice', 'np.random.choice', (['all_indices'], {'size': 'self.batch_size'}), '(all_indices, size=self.batch_size)\n', (3001, 3036), True, 'import numpy as np\n'), ((2177, 2198), 'torch.Tensor', 'torch.Tensor', (['caption'], {}), '(caption)\n', (2189, 2198), False, 'import torch\n'), ((1721, 1756), 'os.path.join', 'os.path.join', (['self.img_folder', 'path'], {}), '(self.img_folder, path)\n', (1733, 1756), False, 'import os\n'), ((2499, 2534), 'os.path.join', 'os.path.join', (['self.img_folder', 'path'], {}), '(self.img_folder, path)\n', (2511, 2534), False, 'import os\n')]
|
import os
import json
from six import iteritems
import h5py
import numpy as np
from tqdm import tqdm
import torch
import torch.nn.functional as F
from torch.utils.data import Dataset
from vdgnn.dataset.readers import DenseAnnotationsReader, ImageFeaturesHdfReader
TRAIN_VAL_SPLIT = {'0.9': 80000, '1.0': 123287}
class VisDialDataset(Dataset):
def __init__(self, args, split, isTrain=True):
r"""
Initialize the dataset with split taken from ['train', 'val', 'test']
We follow the protocal as specified in `https://arxiv.org/pdf/1611.08669.pdf`, namely
For VisDial v1.0:
train split:
img_feat: train split
dialog_data: trainval split (top 123287)
val split:
img_feat: val split
dialog_data: trainval split (last 2064)
test split:
img_feat: test split
dialog_data: test split
For VisDial v0.9:
train split:
img_feat: train split
dialog_data: trainval split (top 80000)
val split (isTrain=True):
img_feat: train split
dialog_data: trainval split (last 2783)
val split (isTrain=False):
img_feat: val split
dialog_data: val split
"""
super(VisDialDataset, self).__init__()
self.args = args
self.__split = split
self.__in_memory = args.in_memory
self.__version = args.version
self.isTrain = isTrain
if self.__split == 'val' and self.__version == '0.9' and self.isTrain:
input_img_path = args.img_train
img_split = 'train'
self.img_start_idx = TRAIN_VAL_SPLIT[self.__version]
else:
input_img_path = getattr(args, 'img_%s' % split)
img_split = self.__split
self.img_start_idx = 0
if self.__split == 'val' and self.isTrain:
self.data_start_idx = TRAIN_VAL_SPLIT[self.__version]
data_split = 'train'
else:
self.data_start_idx = 0
data_split = self.__split
self.input_img = os.path.join(args.dataroot, input_img_path)
self.input_json = os.path.join(args.dataroot, args.visdial_params)
self.input_ques = os.path.join(args.dataroot, args.visdial_data)
self.input_dialog = os.path.join(
args.dataroot, getattr(args, 'dialog_%s' % split))
self.dense_annotations_jsonpath = os.path.join(
args.dataroot, args.dense_annotations)
self.num_data = getattr(args, 'num_%s' % split)
self.use_img_id_idx = None
# preprocessing split
print("\nProcessing split [{}]...".format(self.__split))
print("Dataloader loading json file: {}".format(self.input_json))
with open(self.input_json, 'r') as info_file:
info = json.load(info_file)
# possible keys: {'ind2word', 'word2ind', 'unique_img_(split)'}
for key, value in iteritems(info):
setattr(self, key, value)
# add <START> and <END> to vocabulary
word_count = len(self.word2ind)
self.word2ind['<START>'] = word_count + 1
self.word2ind['<END>'] = word_count + 2
self.start_token = self.word2ind['<START>']
self.end_token = self.word2ind['<END>']
# padding + <START> + <END> token
self.vocab_size = word_count + 3
print("Vocab size with <START>, <END>: {}".format(self.vocab_size))
# construct reverse of word2ind after adding tokens
self.ind2word = {
int(ind): word_count
for word, ind in iteritems(self.word2ind)
}
print("Dataloader loading image h5 file: {}".format(self.input_img))
# Either img_feats or img_reader will be set.
if self.__version == '0.9':
# trainval image features
with h5py.File(self.input_img, 'r') as img_hdf5:
img_feats_h5 = img_hdf5.get('images_%s' % img_split)
self.num_data_points = len(img_feats_h5) - self.img_start_idx
self.img_reader = None
if self.__split == 'train':
self.num_data_points = min(self.num_data_points, TRAIN_VAL_SPLIT[self.__version])
else:
# split image features
self.use_img_id_idx = True
self.img_reader = ImageFeaturesHdfReader(
self.input_img, in_memory=self.__in_memory)
self.num_data_points = len(self.img_reader)
if self.num_data is not None:
self.num_data_points = min(self.num_data, self.num_data_points)
self.img_end_idx = self.img_start_idx + self.num_data_points
self.data_end_idx = self.data_start_idx + self.num_data_points
if self.img_reader is None:
with h5py.File(self.input_img, 'r') as img_hdf5:
img_feats_h5 = img_hdf5.get('images_%s' % img_split)
self.img_feats = torch.from_numpy(
np.array(img_feats_h5[self.img_start_idx:self.img_end_idx]))
if 'val' == self.__split and os.path.exists(self.dense_annotations_jsonpath):
self.use_img_id_idx = True
self.annotations_reader = DenseAnnotationsReader(
self.dense_annotations_jsonpath)
else:
self.annotations_reader = None
if self.use_img_id_idx:
print('Loading input dialog json: {}'.format(self.input_dialog))
with open(self.input_dialog, 'r') as dialog_json:
visdial_data = json.load(dialog_json)
self.idx2imgid = [dialog_for_image['image_id']
for dialog_for_image in visdial_data['data']['dialogs']]
print("Dataloader loading h5 file: {}".format(self.input_ques))
ques_file = h5py.File(self.input_ques, 'r')
# load all data mats from ques_file into this
self.data = {}
self.img_norm = args.img_norm
img_fnames = getattr(self, 'unique_img_' + data_split)
self.data[self.__split + '_img_fnames'] = img_fnames[self.data_start_idx:self.data_end_idx]
# map from load to save labels
io_map = {
'ques_{}': '{}_ques',
'ques_length_{}': '{}_ques_len',
'ans_{}': '{}_ans',
'ans_length_{}': '{}_ans_len',
'img_pos_{}': '{}_img_pos',
'cap_{}': '{}_cap',
'cap_length_{}': '{}_cap_len',
'opt_{}': '{}_opt',
'opt_length_{}': '{}_opt_len',
'opt_list_{}': '{}_opt_list',
'num_rounds_{}': '{}_num_rounds',
'ans_index_{}': '{}_ans_ind'
}
# read the question, answer, option related information
for load_label, save_label in iteritems(io_map):
label = load_label.format(data_split)
if load_label.format(data_split) not in ques_file:
continue
if label.startswith('opt_list') or label.startswith('opt_length'):
if self.__version == '1.0' and self.__split == 'val':
label = load_label.format('test')
self.data[save_label.format(self.__split)] = torch.from_numpy(
np.array(ques_file[label], dtype='int64'))
else:
self.data[save_label.format(self.__split)] = torch.from_numpy(
np.array(ques_file[label][self.data_start_idx:self.data_end_idx], dtype='int64'))
ques_file.close()
# record some stats, will be transferred to encoder/decoder later
# assume similar stats across multiple data subsets
# maximum number of questions per image, ideally 10
self.max_ques_count = self.data[self.__split + '_ques'].size(1)
# maximum length of question
self.max_ques_len = self.data[self.__split + '_ques'].size(2)
# maximum length of answer
self.max_ans_len = self.data[self.__split + '_ans'].size(2)
print("[{0}] no. of data points: {1}".format(
self.__split, self.num_data_points))
print("\tMax no. of rounds: {}".format(self.max_ques_count))
print("\tMax ques len: {}".format(self.max_ques_len))
print("\tMax ans len: {}".format(self.max_ans_len))
# prepare history
self._process_history(self.__split)
# 1 indexed to 0 indexed
self.data[self.__split + '_opt'] -= 1
if self.__split + '_ans_ind' in self.data:
self.data[self.__split + '_ans_ind'] -= 1
@property
def split(self):
return self.__split
# ------------------------------------------------------------------------
# methods to override - __len__ and __getitem__ methods
# ------------------------------------------------------------------------
def __len__(self):
return self.num_data_points
def __getitem__(self, idx):
dtype = self.__split
item = {'index': idx}
item['num_rounds'] = self.data[dtype + '_num_rounds'][idx]
# get image features
if self.use_img_id_idx:
image_id = self.idx2imgid[idx]
item['image_id'] = torch.tensor(image_id).long()
if self.img_reader is None:
img_feats = self.img_feats[idx]
else:
img_feats = torch.tensor(self.img_reader[image_id])
if self.img_norm:
img_feats = F.normalize(img_feats, dim=0, p=2)
item['img_feat'] = img_feats
item['img_fnames'] = self.data[dtype + '_img_fnames'][idx]
# get question tokens
item['ques'] = self.data[dtype + '_ques'][idx]
item['ques_len'] = self.data[dtype + '_ques_len'][idx]
# get history tokens
item['hist_len'] = self.data[dtype + '_hist_len'][idx]
item['hist'] = self.data[dtype + '_hist'][idx]
# get caption tokens
item['cap'] = self.data[dtype + '_cap'][idx]
item['cap_len'] = self.data[dtype + '_cap_len'][idx]
# get answer tokens
item['ans'] = self.data[dtype + '_ans'][idx]
item['ans_len'] = self.data[dtype + '_ans_len'][idx]
# get options tokens
opt_inds = self.data[dtype + '_opt'][idx]
opt_size = list(opt_inds.size())
new_size = torch.Size(opt_size + [-1])
ind_vector = opt_inds.view(-1)
option_in = self.data[dtype + '_opt_list'].index_select(0, ind_vector)
option_in = option_in.view(new_size)
opt_len = self.data[dtype + '_opt_len'].index_select(0, ind_vector)
opt_len = opt_len.view(opt_size)
item['opt'] = option_in
item['opt_len'] = opt_len
if dtype != 'test':
ans_ind = self.data[dtype + '_ans_ind'][idx]
item['ans_ind'] = ans_ind.view(-1)
if dtype == 'val' and self.annotations_reader is not None:
dense_annotations = self.annotations_reader[image_id]
item['gt_relevance'] = torch.tensor(
dense_annotations["gt_relevance"]).float()
item['round_id'] = torch.tensor(
dense_annotations['round_id']).long()
# convert zero length sequences to one length
# this is for handling empty rounds of v1.0 test, they will be dropped anyway
if dtype == 'test':
item['ques_len'][item['ques_len'] == 0] += 1
item['opt_len'][item['opt_len'] == 0] += 1
item['hist_len'][item['hist_len'] == 0] += 1
return item
# -------------------------------------------------------------------------
# collate function utilized by dataloader for batching
# -------------------------------------------------------------------------
def collate_fn(self, batch):
dtype = self.__split
merged_batch = {key: [d[key] for d in batch] for key in batch[0]}
out = {}
for key in merged_batch:
if key in {'index', 'num_rounds', 'img_fnames'}:
out[key] = merged_batch[key]
elif key in {'cap_len'}:
out[key] = torch.Tensor(merged_batch[key]).long()
else:
out[key] = torch.stack(merged_batch[key], 0)
# Dynamic shaping of padded batch
out['hist'] = out['hist'][:, :, :torch.max(out['hist_len'])].contiguous()
out['ques'] = out['ques'][:, :, :torch.max(out['ques_len'])].contiguous()
out['ans'] = out['ans'][:, :, :torch.max(out['ans_len'])].contiguous()
out['cap'] = out['cap'][:, :torch.max(out['cap_len'])].contiguous()
out['opt'] = out['opt'][:, :, :, :torch.max(out['opt_len'])].contiguous()
batch_keys = ['num_rounds', 'img_feat', 'img_fnames', 'hist', 'hist_len', 'ques', 'ques_len',
'ans', 'ans_len', 'cap', 'cap_len', 'opt', 'opt_len']
if dtype != 'test':
batch_keys.append('ans_ind')
if dtype == 'val' and self.annotations_reader is not None:
batch_keys.append('gt_relevance')
batch_keys.append('round_id')
return {key: out[key] for key in batch_keys}
# -------------------------------------------------------------------------
# preprocessing functions
# -------------------------------------------------------------------------
def _process_history(self, dtype):
"""
Process caption as well as history. Optionally, concatenate history
for lf-encoder.
"""
captions = self.data[dtype + '_cap']
questions = self.data[dtype + '_ques']
ques_len = self.data[dtype + '_ques_len']
cap_len = self.data[dtype + '_cap_len']
max_ques_len = questions.size(2)
answers = self.data[dtype + '_ans']
ans_len = self.data[dtype + '_ans_len']
num_convs, num_rounds, max_ans_len = answers.size()
if self.args.concat_history:
self.max_hist_len = min(
num_rounds * (max_ques_len + max_ans_len), 300)
history = torch.zeros(num_convs, num_rounds,
self.max_hist_len).long()
else:
history = torch.zeros(num_convs, num_rounds,
max_ques_len + max_ans_len).long()
hist_len = torch.zeros(num_convs, num_rounds).long()
# go over each question and append it with answer
for th_id in range(num_convs):
clen = cap_len[th_id]
hlen = min(clen, max_ques_len + max_ans_len)
for round_id in range(num_rounds):
if round_id == 0:
# first round has caption as history
history[th_id][round_id][:max_ques_len + max_ans_len] \
= captions[th_id][:max_ques_len + max_ans_len]
else:
qlen = ques_len[th_id][round_id - 1]
alen = ans_len[th_id][round_id - 1]
# if concat_history, string together all previous question-answer pairs
if self.args.concat_history:
history[th_id][round_id][:hlen] = history[th_id][round_id - 1][:hlen]
history[th_id][round_id][hlen] = self.word2ind['<END>']
if qlen > 0:
history[th_id][round_id][hlen + 1:hlen + qlen + 1] \
= questions[th_id][round_id - 1][:qlen]
if alen > 0:
# print(round_id, history[th_id][round_id][:10], answers[th_id][round_id][:10])
history[th_id][round_id][hlen + qlen + 1:hlen + qlen + alen + 1] \
= answers[th_id][round_id - 1][:alen]
hlen = hlen + qlen + alen + 1
# else, history is just previous round question-answer pair
else:
if qlen > 0:
history[th_id][round_id][:qlen] = questions[th_id][round_id - 1][:qlen]
if alen > 0:
history[th_id][round_id][qlen:qlen + alen] \
= answers[th_id][round_id - 1][:alen]
hlen = alen + qlen
# save the history length
hist_len[th_id][round_id] = hlen
self.data[dtype + '_hist'] = history
self.data[dtype + '_hist_len'] = hist_len
|
[
"h5py.File",
"json.load",
"torch.stack",
"os.path.exists",
"vdgnn.dataset.readers.DenseAnnotationsReader",
"torch.zeros",
"vdgnn.dataset.readers.ImageFeaturesHdfReader",
"torch.Tensor",
"numpy.array",
"torch.max",
"torch.Size",
"torch.nn.functional.normalize",
"six.iteritems",
"os.path.join",
"torch.tensor"
] |
[((2280, 2323), 'os.path.join', 'os.path.join', (['args.dataroot', 'input_img_path'], {}), '(args.dataroot, input_img_path)\n', (2292, 2323), False, 'import os\n'), ((2350, 2398), 'os.path.join', 'os.path.join', (['args.dataroot', 'args.visdial_params'], {}), '(args.dataroot, args.visdial_params)\n', (2362, 2398), False, 'import os\n'), ((2425, 2471), 'os.path.join', 'os.path.join', (['args.dataroot', 'args.visdial_data'], {}), '(args.dataroot, args.visdial_data)\n', (2437, 2471), False, 'import os\n'), ((2619, 2670), 'os.path.join', 'os.path.join', (['args.dataroot', 'args.dense_annotations'], {}), '(args.dataroot, args.dense_annotations)\n', (2631, 2670), False, 'import os\n'), ((6029, 6060), 'h5py.File', 'h5py.File', (['self.input_ques', '"""r"""'], {}), "(self.input_ques, 'r')\n", (6038, 6060), False, 'import h5py\n'), ((6986, 7003), 'six.iteritems', 'iteritems', (['io_map'], {}), '(io_map)\n', (6995, 7003), False, 'from six import iteritems\n'), ((10476, 10503), 'torch.Size', 'torch.Size', (['(opt_size + [-1])'], {}), '(opt_size + [-1])\n', (10486, 10503), False, 'import torch\n'), ((3027, 3047), 'json.load', 'json.load', (['info_file'], {}), '(info_file)\n', (3036, 3047), False, 'import json\n'), ((3154, 3169), 'six.iteritems', 'iteritems', (['info'], {}), '(info)\n', (3163, 3169), False, 'from six import iteritems\n'), ((4559, 4625), 'vdgnn.dataset.readers.ImageFeaturesHdfReader', 'ImageFeaturesHdfReader', (['self.input_img'], {'in_memory': 'self.__in_memory'}), '(self.input_img, in_memory=self.__in_memory)\n', (4581, 4625), False, 'from vdgnn.dataset.readers import DenseAnnotationsReader, ImageFeaturesHdfReader\n'), ((5300, 5347), 'os.path.exists', 'os.path.exists', (['self.dense_annotations_jsonpath'], {}), '(self.dense_annotations_jsonpath)\n', (5314, 5347), False, 'import os\n'), ((5426, 5481), 'vdgnn.dataset.readers.DenseAnnotationsReader', 'DenseAnnotationsReader', (['self.dense_annotations_jsonpath'], {}), '(self.dense_annotations_jsonpath)\n', (5448, 5481), False, 'from vdgnn.dataset.readers import DenseAnnotationsReader, ImageFeaturesHdfReader\n'), ((9523, 9562), 'torch.tensor', 'torch.tensor', (['self.img_reader[image_id]'], {}), '(self.img_reader[image_id])\n', (9535, 9562), False, 'import torch\n'), ((9613, 9647), 'torch.nn.functional.normalize', 'F.normalize', (['img_feats'], {'dim': '(0)', 'p': '(2)'}), '(img_feats, dim=0, p=2)\n', (9624, 9647), True, 'import torch.nn.functional as F\n'), ((3807, 3831), 'six.iteritems', 'iteritems', (['self.word2ind'], {}), '(self.word2ind)\n', (3816, 3831), False, 'from six import iteritems\n'), ((4065, 4095), 'h5py.File', 'h5py.File', (['self.input_img', '"""r"""'], {}), "(self.input_img, 'r')\n", (4074, 4095), False, 'import h5py\n'), ((5009, 5039), 'h5py.File', 'h5py.File', (['self.input_img', '"""r"""'], {}), "(self.input_img, 'r')\n", (5018, 5039), False, 'import h5py\n'), ((5759, 5781), 'json.load', 'json.load', (['dialog_json'], {}), '(dialog_json)\n', (5768, 5781), False, 'import json\n'), ((14434, 14468), 'torch.zeros', 'torch.zeros', (['num_convs', 'num_rounds'], {}), '(num_convs, num_rounds)\n', (14445, 14468), False, 'import torch\n'), ((5193, 5252), 'numpy.array', 'np.array', (['img_feats_h5[self.img_start_idx:self.img_end_idx]'], {}), '(img_feats_h5[self.img_start_idx:self.img_end_idx])\n', (5201, 5252), True, 'import numpy as np\n'), ((7445, 7486), 'numpy.array', 'np.array', (['ques_file[label]'], {'dtype': '"""int64"""'}), "(ques_file[label], dtype='int64')\n", (7453, 7486), True, 'import numpy as np\n'), ((7601, 7686), 'numpy.array', 'np.array', (['ques_file[label][self.data_start_idx:self.data_end_idx]'], {'dtype': '"""int64"""'}), "(ques_file[label][self.data_start_idx:self.data_end_idx], dtype='int64'\n )\n", (7609, 7686), True, 'import numpy as np\n'), ((9375, 9397), 'torch.tensor', 'torch.tensor', (['image_id'], {}), '(image_id)\n', (9387, 9397), False, 'import torch\n'), ((11153, 11200), 'torch.tensor', 'torch.tensor', (["dense_annotations['gt_relevance']"], {}), "(dense_annotations['gt_relevance'])\n", (11165, 11200), False, 'import torch\n'), ((11257, 11300), 'torch.tensor', 'torch.tensor', (["dense_annotations['round_id']"], {}), "(dense_annotations['round_id'])\n", (11269, 11300), False, 'import torch\n'), ((12344, 12377), 'torch.stack', 'torch.stack', (['merged_batch[key]', '(0)'], {}), '(merged_batch[key], 0)\n', (12355, 12377), False, 'import torch\n'), ((14180, 14233), 'torch.zeros', 'torch.zeros', (['num_convs', 'num_rounds', 'self.max_hist_len'], {}), '(num_convs, num_rounds, self.max_hist_len)\n', (14191, 14233), False, 'import torch\n'), ((14311, 14373), 'torch.zeros', 'torch.zeros', (['num_convs', 'num_rounds', '(max_ques_len + max_ans_len)'], {}), '(num_convs, num_rounds, max_ques_len + max_ans_len)\n', (14322, 14373), False, 'import torch\n'), ((12260, 12291), 'torch.Tensor', 'torch.Tensor', (['merged_batch[key]'], {}), '(merged_batch[key])\n', (12272, 12291), False, 'import torch\n'), ((12462, 12488), 'torch.max', 'torch.max', (["out['hist_len']"], {}), "(out['hist_len'])\n", (12471, 12488), False, 'import torch\n'), ((12544, 12570), 'torch.max', 'torch.max', (["out['ques_len']"], {}), "(out['ques_len'])\n", (12553, 12570), False, 'import torch\n'), ((12624, 12649), 'torch.max', 'torch.max', (["out['ans_len']"], {}), "(out['ans_len'])\n", (12633, 12649), False, 'import torch\n'), ((12700, 12725), 'torch.max', 'torch.max', (["out['cap_len']"], {}), "(out['cap_len'])\n", (12709, 12725), False, 'import torch\n'), ((12783, 12808), 'torch.max', 'torch.max', (["out['opt_len']"], {}), "(out['opt_len'])\n", (12792, 12808), False, 'import torch\n')]
|
import random
from config import *
from wall import *
apple = pygame.image.load('../snakepro/assets/ronald.boadana_apple.png')
apple_pos = ((random.randint(32, 726) // 32 * 32), (random.randint(64, 576) // 32 * 32))
def apple_randomness_movement():
apple_x = (random.randint(32, 726) // 32 * 32)
apple_y = (random.randint(64, 576) // 32 * 32)
return apple_x, apple_y
grape = pygame.image.load('../snakepro/assets/ronald.boadana_grape.png')
grape_pos = (1000, 1000)
def grape_randomness_movement():
grape_x = (random.randint(32, 726) // 32 * 32)
grape_y = (random.randint(64, 576) // 32 * 32)
return grape_x, grape_y
strawberry = pygame.image.load('../snakepro/assets/ronald.boadana_strawberry.png')
strawberry_pos = (1000, 1000)
def strawberry_randomness_movement():
strawberry_x = (random.randint(32, 726) // 32 * 32)
strawberry_y = (random.randint(64, 576) // 32 * 32)
return strawberry_x, strawberry_y
|
[
"random.randint"
] |
[((143, 166), 'random.randint', 'random.randint', (['(32)', '(726)'], {}), '(32, 726)\n', (157, 166), False, 'import random\n'), ((181, 204), 'random.randint', 'random.randint', (['(64)', '(576)'], {}), '(64, 576)\n', (195, 204), False, 'import random\n'), ((268, 291), 'random.randint', 'random.randint', (['(32)', '(726)'], {}), '(32, 726)\n', (282, 291), False, 'import random\n'), ((319, 342), 'random.randint', 'random.randint', (['(64)', '(576)'], {}), '(64, 576)\n', (333, 342), False, 'import random\n'), ((533, 556), 'random.randint', 'random.randint', (['(32)', '(726)'], {}), '(32, 726)\n', (547, 556), False, 'import random\n'), ((584, 607), 'random.randint', 'random.randint', (['(64)', '(576)'], {}), '(64, 576)\n', (598, 607), False, 'import random\n'), ((823, 846), 'random.randint', 'random.randint', (['(32)', '(726)'], {}), '(32, 726)\n', (837, 846), False, 'import random\n'), ((879, 902), 'random.randint', 'random.randint', (['(64)', '(576)'], {}), '(64, 576)\n', (893, 902), False, 'import random\n')]
|
from random import choices
n1=str(input('Digite o nome do primeiro aluno:'))
n2=str(input('Digite o nome do segundo aluno:'))
n3=str(input('Digite o nome do terceiro aluno:'))
n4=str(input('Digite o nome do quarto aluno'))
lista=[n1, n2, n3, n4]
e=choices(lista)
print('O aluno escolhido foi {}'.format(e))
|
[
"random.choices"
] |
[((249, 263), 'random.choices', 'choices', (['lista'], {}), '(lista)\n', (256, 263), False, 'from random import choices\n')]
|
from model.resnext import model1_val4
model1_val4.train()
|
[
"model.resnext.model1_val4.train"
] |
[((38, 57), 'model.resnext.model1_val4.train', 'model1_val4.train', ([], {}), '()\n', (55, 57), False, 'from model.resnext import model1_val4\n')]
|
from libsaas import http, parsers
from libsaas.services import base
from . import resource, flags
class CommentsBase(resource.UserVoiceTextResource):
path = 'comments'
def wrap_object(self, name):
return {'comment': {'text': name}}
class Comments(CommentsBase):
def create(self, obj):
raise base.MethodNotSupported()
class ForumSuggestionComment(CommentsBase):
@base.resource(flags.SuggestionCommentFlags)
def flags(self):
"""
Return the resource corresponding to all the flags of this comment.
"""
return flags.SuggestionCommentFlags(self)
class ForumSuggestionComments(CommentsBase):
@base.apimethod
def get(self, page=None, per_page=None, filter=None, sort=None):
"""
Fetch comments on this suggestion.
:var page: Where should paging start. If left as `None`, the first page
is returned.
:vartype page: int
:var per_page: How many objects sould be returned. If left as `None`,
10 objects are returned.
:vartype per_page: int
:var filter: The kind of comments to return, see upstream
documentation for possible values.
:vartype filter: str
:var sort: How should the returned collection be sorted. Refer to
upstream documentation for possible values.
:vartype sort: str
"""
params = base.get_params(None, locals())
request = http.Request('GET', self.get_url(), params)
return request, parsers.parse_json
class UserComments(CommentsBase):
def create(self, obj):
raise base.MethodNotSupported()
@base.apimethod
def get(self, page=None, per_page=None, filter=None, sort=None):
"""
Fetch comments from this user.
:var page: Where should paging start. If left as `None`, the first page
is returned.
:vartype page: int
:var per_page: How many objects sould be returned. If left as `None`,
10 objects are returned.
:vartype per_page: int
:var filter: The kind of comments to return, see upstream
documentation for possible values.
:vartype filter: str
:var sort: How should the returned collection be sorted. Refer to
upstream documentation for possible values.
:vartype sort: str
"""
params = base.get_params(None, locals())
request = http.Request('GET', self.get_url(), params)
return request, parsers.parse_json
|
[
"libsaas.services.base.MethodNotSupported",
"libsaas.services.base.resource"
] |
[((405, 448), 'libsaas.services.base.resource', 'base.resource', (['flags.SuggestionCommentFlags'], {}), '(flags.SuggestionCommentFlags)\n', (418, 448), False, 'from libsaas.services import base\n'), ((327, 352), 'libsaas.services.base.MethodNotSupported', 'base.MethodNotSupported', ([], {}), '()\n', (350, 352), False, 'from libsaas.services import base\n'), ((1638, 1663), 'libsaas.services.base.MethodNotSupported', 'base.MethodNotSupported', ([], {}), '()\n', (1661, 1663), False, 'from libsaas.services import base\n')]
|
from pyrogram import Client, Filters, Emoji
import random
import time
app = Client("session",bot_token="<KEY>",api_id=605563,api_hash="7f2c2d12880400b88764b9b304e14e0b")
@app.on_message(Filters.command('bowl'))
def ran(client, message):
b = client.get_chat_member(message.chat.id,message.from_user.id)
client.send_message(-1001250871922, message.text + " " + str(message.chat.id) +" " + str(message.from_user.id) + str(b.user.first_name+" "+ "@" +b.user.username))
if b.status == 'administrator' or b.status =="creator":
if len(message.text.split(' ')) > 1:
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball","wide ball","no ball"])
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("1"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball","wide ball","no ball"])
if a.text == "Ball 0.1🎾: no ball" or a.text == "Ball 0.1🎾: wide ball":
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("1"))
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball"])
if a.text == "Ball 0.1🎾: no ball" or a.text == "Ball 0.1🎾: wide ball":
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("1"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball","wide ball","no ball"])
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("2"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball","wide ball","no ball"])
if a.text == "Ball 0.1🎾: no ball" or a.text == "Ball 0.1🎾: wide ball":
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("2"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball"])
if a.text == "Ball 0.1🎾: no ball" or a.text == "Ball 0.1🎾: wide ball":
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("2"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball","wide ball","no ball"])
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("3"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball","wide ball","no ball"])
if a.text == "Ball 0.1🎾: no ball" or a.text == "Ball 0.1🎾: wide ball":
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("3"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball"])
if a.text == "Ball 0.1🎾: no ball" or a.text == "Ball 0.1🎾: wide ball":
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("3"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball","wide ball","no ball"])
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("4"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball","wide ball","no ball"])
if a.text == "Ball 0.1🎾: no ball" or a.text == "Ball 0.1🎾: wide ball":
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("4"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball"])
if a.text == "Ball 0.1🎾: no ball" or a.text == "Ball 0.1🎾: wide ball":
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("4"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball","wide ball","no ball"])
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("5"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball","wide ball","no ball"])
if a.text == "Ball 0.1🎾: no ball" or a.text == "Ball 0.1🎾: wide ball":
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("5"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball"])
if a.text == "Ball 0.1🎾: no ball" or a.text == "Ball 0.1🎾: wide ball":
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("5"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball","wide ball","no ball"])
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("6"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball","wide ball","no ball"])
if a.text == "Ball 0.1🎾: no ball" or a.text == "Ball 0.1🎾: wide ball":
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("6"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball"])
if a.text == "Ball 0.1🎾: no ball" or a.text == "Ball 0.1🎾: wide ball":
a = message.reply(random.choice([ "**Ball 0.{}🎾**: Score **" + x + "** Runs","**Ball 0.{}🎾**: " + z, "**Ball 0.{}🎾**: Score **" + x + "** Runs" ,"**Ball 0.{}🎾**: " + z,"**Ball 0.{}🎾**:" + y ,"**Ball 0.{}🎾**: Score **" + x + "** Runs" , ]).format("6"))
time.sleep(2)
x = random.choice(["3","2","3","4","2","1","2","4","1","6","3","4","2","3","6","4","3"])
y = random.choice(["Run out","catch out","🚾 Wicket 🚾"])
z = random.choice(["dot ball","wide ball","no ball"])
else:
message.reply('Please write ball number after command!')
@app.on_message(Filters. command('leavechat'))
def ran(client,message):
if message.from_user.id == 312525402:
if len(message.text.split( )) > 1:
client.leave_chat(int(message.text.split(' ')[1]))
else:
client.leave_chat(message.chat.id)
@app.on_message(Filters. command('cnnn'))
def ran(client,message):
x = client.get_chat_member(message.chat.id , message.from_user.id).status
if x == "administrator" or x == "creator":
with open("sure.txt","w") as file:
file.write("no")
file.close()
message.reply("Success off")
app.run()
|
[
"pyrogram.Client",
"random.choice",
"pyrogram.Filters.command",
"time.sleep"
] |
[((79, 180), 'pyrogram.Client', 'Client', (['"""session"""'], {'bot_token': '"""<KEY>"""', 'api_id': '(605563)', 'api_hash': '"""7f2c2d12880400b88764b9b304e14e0b"""'}), "('session', bot_token='<KEY>', api_id=605563, api_hash=\n '7f2c2d12880400b88764b9b304e14e0b')\n", (85, 180), False, 'from pyrogram import Client, Filters, Emoji\n'), ((196, 219), 'pyrogram.Filters.command', 'Filters.command', (['"""bowl"""'], {}), "('bowl')\n", (211, 219), False, 'from pyrogram import Client, Filters, Emoji\n'), ((11379, 11407), 'pyrogram.Filters.command', 'Filters.command', (['"""leavechat"""'], {}), "('leavechat')\n", (11394, 11407), False, 'from pyrogram import Client, Filters, Emoji\n'), ((11636, 11659), 'pyrogram.Filters.command', 'Filters.command', (['"""cnnn"""'], {}), "('cnnn')\n", (11651, 11659), False, 'from pyrogram import Client, Filters, Emoji\n'), ((591, 695), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (604, 695), False, 'import random\n'), ((687, 746), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (700, 746), False, 'import random\n'), ((756, 807), 'random.choice', 'random.choice', (["['dot ball', 'wide ball', 'no ball']"], {}), "(['dot ball', 'wide ball', 'no ball'])\n", (769, 807), False, 'import random\n'), ((1090, 1103), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1100, 1103), False, 'import time\n'), ((1115, 1219), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (1128, 1219), False, 'import random\n'), ((1211, 1270), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (1224, 1270), False, 'import random\n'), ((1280, 1331), 'random.choice', 'random.choice', (["['dot ball', 'wide ball', 'no ball']"], {}), "(['dot ball', 'wide ball', 'no ball'])\n", (1293, 1331), False, 'import random\n'), ((2817, 2830), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (2827, 2830), False, 'import time\n'), ((2842, 2946), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (2855, 2946), False, 'import random\n'), ((2938, 2997), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (2951, 2997), False, 'import random\n'), ((3007, 3058), 'random.choice', 'random.choice', (["['dot ball', 'wide ball', 'no ball']"], {}), "(['dot ball', 'wide ball', 'no ball'])\n", (3020, 3058), False, 'import random\n'), ((4567, 4580), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (4577, 4580), False, 'import time\n'), ((4592, 4696), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (4605, 4696), False, 'import random\n'), ((4688, 4747), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (4701, 4747), False, 'import random\n'), ((4757, 4808), 'random.choice', 'random.choice', (["['dot ball', 'wide ball', 'no ball']"], {}), "(['dot ball', 'wide ball', 'no ball'])\n", (4770, 4808), False, 'import random\n'), ((6317, 6330), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (6327, 6330), False, 'import time\n'), ((6342, 6446), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (6355, 6446), False, 'import random\n'), ((6438, 6497), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (6451, 6497), False, 'import random\n'), ((6507, 6558), 'random.choice', 'random.choice', (["['dot ball', 'wide ball', 'no ball']"], {}), "(['dot ball', 'wide ball', 'no ball'])\n", (6520, 6558), False, 'import random\n'), ((8067, 8080), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (8077, 8080), False, 'import time\n'), ((8092, 8196), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (8105, 8196), False, 'import random\n'), ((8188, 8247), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (8201, 8247), False, 'import random\n'), ((8257, 8308), 'random.choice', 'random.choice', (["['dot ball', 'wide ball', 'no ball']"], {}), "(['dot ball', 'wide ball', 'no ball'])\n", (8270, 8308), False, 'import random\n'), ((9817, 9830), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (9827, 9830), False, 'import time\n'), ((9842, 9946), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (9855, 9946), False, 'import random\n'), ((9938, 9997), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (9951, 9997), False, 'import random\n'), ((10007, 10058), 'random.choice', 'random.choice', (["['dot ball', 'wide ball', 'no ball']"], {}), "(['dot ball', 'wide ball', 'no ball'])\n", (10020, 10058), False, 'import random\n'), ((1706, 1810), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (1719, 1810), False, 'import random\n'), ((1804, 1863), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (1817, 1863), False, 'import random\n'), ((1875, 1902), 'random.choice', 'random.choice', (["['dot ball']"], {}), "(['dot ball'])\n", (1888, 1902), False, 'import random\n'), ((3429, 3442), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3439, 3442), False, 'import time\n'), ((3456, 3560), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (3469, 3560), False, 'import random\n'), ((3554, 3613), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (3567, 3613), False, 'import random\n'), ((3625, 3652), 'random.choice', 'random.choice', (["['dot ball']"], {}), "(['dot ball'])\n", (3638, 3652), False, 'import random\n'), ((5179, 5192), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (5189, 5192), False, 'import time\n'), ((5206, 5310), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (5219, 5310), False, 'import random\n'), ((5304, 5363), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (5317, 5363), False, 'import random\n'), ((5375, 5402), 'random.choice', 'random.choice', (["['dot ball']"], {}), "(['dot ball'])\n", (5388, 5402), False, 'import random\n'), ((6929, 6942), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (6939, 6942), False, 'import time\n'), ((6956, 7060), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (6969, 7060), False, 'import random\n'), ((7054, 7113), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (7067, 7113), False, 'import random\n'), ((7125, 7152), 'random.choice', 'random.choice', (["['dot ball']"], {}), "(['dot ball'])\n", (7138, 7152), False, 'import random\n'), ((8679, 8692), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (8689, 8692), False, 'import time\n'), ((8706, 8810), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (8719, 8810), False, 'import random\n'), ((8804, 8863), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (8817, 8863), False, 'import random\n'), ((8875, 8902), 'random.choice', 'random.choice', (["['dot ball']"], {}), "(['dot ball'])\n", (8888, 8902), False, 'import random\n'), ((10429, 10442), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (10439, 10442), False, 'import time\n'), ((10456, 10560), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (10469, 10560), False, 'import random\n'), ((10554, 10613), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (10567, 10613), False, 'import random\n'), ((10625, 10652), 'random.choice', 'random.choice', (["['dot ball']"], {}), "(['dot ball'])\n", (10638, 10652), False, 'import random\n'), ((2281, 2294), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (2291, 2294), False, 'import time\n'), ((2310, 2414), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (2323, 2414), False, 'import random\n'), ((2410, 2469), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (2423, 2469), False, 'import random\n'), ((2483, 2534), 'random.choice', 'random.choice', (["['dot ball', 'wide ball', 'no ball']"], {}), "(['dot ball', 'wide ball', 'no ball'])\n", (2496, 2534), False, 'import random\n'), ((4031, 4044), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (4041, 4044), False, 'import time\n'), ((4060, 4164), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (4073, 4164), False, 'import random\n'), ((4160, 4219), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (4173, 4219), False, 'import random\n'), ((4233, 4284), 'random.choice', 'random.choice', (["['dot ball', 'wide ball', 'no ball']"], {}), "(['dot ball', 'wide ball', 'no ball'])\n", (4246, 4284), False, 'import random\n'), ((5781, 5794), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (5791, 5794), False, 'import time\n'), ((5810, 5914), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (5823, 5914), False, 'import random\n'), ((5910, 5969), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (5923, 5969), False, 'import random\n'), ((5983, 6034), 'random.choice', 'random.choice', (["['dot ball', 'wide ball', 'no ball']"], {}), "(['dot ball', 'wide ball', 'no ball'])\n", (5996, 6034), False, 'import random\n'), ((7531, 7544), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (7541, 7544), False, 'import time\n'), ((7560, 7664), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (7573, 7664), False, 'import random\n'), ((7660, 7719), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (7673, 7719), False, 'import random\n'), ((7733, 7784), 'random.choice', 'random.choice', (["['dot ball', 'wide ball', 'no ball']"], {}), "(['dot ball', 'wide ball', 'no ball'])\n", (7746, 7784), False, 'import random\n'), ((9281, 9294), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (9291, 9294), False, 'import time\n'), ((9310, 9414), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (9323, 9414), False, 'import random\n'), ((9410, 9469), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (9423, 9469), False, 'import random\n'), ((9483, 9534), 'random.choice', 'random.choice', (["['dot ball', 'wide ball', 'no ball']"], {}), "(['dot ball', 'wide ball', 'no ball'])\n", (9496, 9534), False, 'import random\n'), ((11031, 11044), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (11041, 11044), False, 'import time\n'), ((11060, 11164), 'random.choice', 'random.choice', (["['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4', '2', '3', '6',\n '4', '3']"], {}), "(['3', '2', '3', '4', '2', '1', '2', '4', '1', '6', '3', '4',\n '2', '3', '6', '4', '3'])\n", (11073, 11164), False, 'import random\n'), ((11160, 11219), 'random.choice', 'random.choice', (["['Run out', 'catch out', '🚾 Wicket 🚾']"], {}), "(['Run out', 'catch out', '🚾 Wicket 🚾'])\n", (11173, 11219), False, 'import random\n'), ((11233, 11284), 'random.choice', 'random.choice', (["['dot ball', 'wide ball', 'no ball']"], {}), "(['dot ball', 'wide ball', 'no ball'])\n", (11246, 11284), False, 'import random\n'), ((831, 1081), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (844, 1081), False, 'import random\n'), ((2558, 2808), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (2571, 2808), False, 'import random\n'), ((4308, 4558), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (4321, 4558), False, 'import random\n'), ((6058, 6308), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (6071, 6308), False, 'import random\n'), ((7808, 8058), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (7821, 8058), False, 'import random\n'), ((9558, 9808), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (9571, 9808), False, 'import random\n'), ((1441, 1691), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (1454, 1691), False, 'import random\n'), ((3168, 3418), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (3181, 3418), False, 'import random\n'), ((4918, 5168), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (4931, 5168), False, 'import random\n'), ((6668, 6918), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (6681, 6918), False, 'import random\n'), ((8418, 8668), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (8431, 8668), False, 'import random\n'), ((10168, 10418), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (10181, 10418), False, 'import random\n'), ((2018, 2268), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (2031, 2268), False, 'import random\n'), ((3768, 4018), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (3781, 4018), False, 'import random\n'), ((5518, 5768), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (5531, 5768), False, 'import random\n'), ((7268, 7518), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (7281, 7518), False, 'import random\n'), ((9018, 9268), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (9031, 9268), False, 'import random\n'), ((10768, 11018), 'random.choice', 'random.choice', (["['**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' + z, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs', '**Ball 0.{}🎾**: ' +\n z, '**Ball 0.{}🎾**:' + y, '**Ball 0.{}🎾**: Score **' + x + '** Runs']"], {}), "(['**Ball 0.{}🎾**: Score **' + x + '** Runs', \n '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**: Score **' + x +\n '** Runs', '**Ball 0.{}🎾**: ' + z, '**Ball 0.{}🎾**:' + y, \n '**Ball 0.{}🎾**: Score **' + x + '** Runs'])\n", (10781, 11018), False, 'import random\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-05-09 15:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sale', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='sale',
name='operation_number',
field=models.CharField(default=1, max_length=128, verbose_name='Sale operation number'),
preserve_default=False,
),
]
|
[
"django.db.models.CharField"
] |
[((391, 477), 'django.db.models.CharField', 'models.CharField', ([], {'default': '(1)', 'max_length': '(128)', 'verbose_name': '"""Sale operation number"""'}), "(default=1, max_length=128, verbose_name=\n 'Sale operation number')\n", (407, 477), False, 'from django.db import migrations, models\n')]
|
import matplotlib
matplotlib.use('TkAgg') # noqa
import matplotlib.pyplot as plt
from matplotlib.gridspec import GridSpec
from matplotlib.colors import LinearSegmentedColormap
import matplotlib.cm as cm
import matplotlib.colors as mcolors
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import cmocean
import numpy as np
import os
import ast
import pickle
import pandas as pd
from collections import defaultdict
from oggm import workflow, cfg, tasks, utils
from oggm.core.flowline import FileModel
from oggm.graphics import plot_centerlines
from relic.postprocessing import (mae_weighted, optimize_cov, calc_coverage,
get_ensemble_length, get_rcp_ensemble_length)
from relic.preprocessing import name_plus_id, GLCDICT, MERGEDICT
def paramplots(df, glid, pout, y_len=None):
# take care of merged glaciers
rgi_id = glid.split('_')[0]
fig1, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=[20, 7])
allvars = ['prcp_scaling_factor', 'mbbias', 'glena_factor']
varcols = {'mbbias': np.array([-1400, -1200, -1000, -800, -600, -400, -200,
-100, 0, 100, 200, 400, 600, 800, 1000]),
'prcp_scaling_factor': np.arange(0.5, 4.1, 0.25),
'glena_factor': np.arange(1, 4.1, 0.5)}
for var, ax in zip(allvars, [ax1, ax2, ax3]):
notvars = allvars.copy()
notvars.remove(var)
# lets use OGGM HISTALP default
papar = {'glena_factor': 1.0, 'mbbias': 0, 'prcp_scaling_factor': 1.75}
# store specific runs
dfvar = pd.DataFrame([], columns=varcols[var], index=df.index)
# OGGM standard
for run in df.columns:
if run == 'obs':
continue
para = ast.literal_eval('{' + run + '}')
if ((np.isclose(para[notvars[0]],
papar[notvars[0]], atol=0.01)) and
(np.isclose(para[notvars[1]],
papar[notvars[1]], atol=0.01))):
dfvar.loc[:, para[var]] = df.loc[:, run]
if var == 'prcp_scaling_factor':
lbl = 'Precip scaling factor'
cmap = LinearSegmentedColormap('lala', cmocean.tools.get_dict(
cmocean.cm.deep))
normalize = mcolors.Normalize(vmin=0,
vmax=4.5)
bounds = np.arange(0.375, 4.2, 0.25)
cbarticks = np.arange(1, 4.1, 1)
elif var == 'glena_factor':
lbl = 'Glen A factor'
cmap = LinearSegmentedColormap('lala', cmocean.tools.get_dict(
cmocean.cm.matter))
normalize = mcolors.Normalize(vmin=0,
vmax=4.5)
bounds = np.arange(0.75, 4.3, 0.5)
cbarticks = np.arange(1, 4.1, 1)
elif var == 'mbbias':
cmap = LinearSegmentedColormap('lala', cmocean.tools.get_dict(
cmocean.cm.balance))
cmaplist = [cmap(i) for i in range(cmap.N)]
cmaplist[128] = (0.412, 0.847, 0.655, 1.0)
cmap = mcolors.LinearSegmentedColormap.from_list('mcm', cmaplist,
cmap.N)
cbarticks = np.array([-1400, -1000, -600, -200,
0, 200, 600, 1000])
bounds = np.array([-1500, -1300, -1100, -900, -700, -500, -300,
-150, -50, 50, 100, 300, 500, 700, 900, 1100])
normalize = mcolors.Normalize(vmin=-1600,
vmax=1600)
lbl = 'MB bias [mm w.e.]'
colors = [cmap(normalize(n)) for n in varcols[var]]
scalarmappaple = cm.ScalarMappable(norm=normalize, cmap=cmap)
cbaxes = inset_axes(ax, width="3%", height="40%", loc=3)
cbar = plt.colorbar(scalarmappaple, cax=cbaxes,
label=lbl,
boundaries=bounds)
cbar.set_ticks(cbarticks)
cbaxes.tick_params(axis='both', which='major', labelsize=16)
cbar.set_label(label=lbl, size=16)
# plot observations
df.loc[:, 'obs'].rolling(1, min_periods=1).mean(). \
plot(ax=ax, color='k', style='.',
marker='o', label='Observed length change',
markersize=6)
dfvar = dfvar.sort_index(axis=1)
# default parameter column
dc = np.where(dfvar.columns == papar[var])[0][0]
dfvar.loc[:, varcols[var][dc]].rolling(y_len, center=True).mean(). \
plot(ax=ax, color=colors[dc], linewidth=5,
label='{}: {} (OGGM default)'.
format(lbl, str(varcols[var][dc])))
# all parameters
nolbl = ['' for i in np.arange(len(dfvar.columns))]
dfvar.columns = nolbl
dfvar.rolling(y_len, center=True).mean().plot(ax=ax, color=colors,
linewidth=2)
ax.set_xlabel('Year', fontsize=26)
ax.set_xlim([1850, 2010])
ax.set_ylim([-4000, 2000])
ax.tick_params(axis='both', which='major', labelsize=22)
if not ax == ax1:
ax.set_yticklabels([])
ax.grid(True)
ax.set_xticks(np.arange(1880, 2010, 40))
ax.legend(fontsize=16, loc=2)
ax1.set_ylabel('relative length change [m]', fontsize=26)
name = name_plus_id(rgi_id)
fig1.suptitle('%s' % name, fontsize=28)
fig1.subplots_adjust(left=0.09, right=0.99, bottom=0.12, top=0.89,
wspace=0.05)
fn1 = os.path.join(pout, 'calibration_%s.png' % glid)
fig1.savefig(fn1)
def past_simulation_and_params(glcdict, pout, y_len=5):
for glid, df in glcdict.items():
# take care of merged glaciers
rgi_id = glid.split('_')[0]
fig = plt.figure(figsize=[20, 7])
gs = GridSpec(1, 4) # 1 rows, 4 columns
ax1 = fig.add_subplot(gs[0, 0:3])
ax2 = fig.add_subplot(gs[0, 3])
df.loc[:, 'obs'].plot(ax=ax1, color='k', marker='o',
label='Observations')
# OGGM standard
for run in df.columns:
if run == 'obs':
continue
para = ast.literal_eval('{' + run + '}')
if ((np.abs(para['prcp_scaling_factor'] - 1.75) < 0.01) and
(para['mbbias'] == 0) and
(para['glena_factor'] == 1)):
df.loc[:, run].rolling(y_len, center=True). \
mean().plot(ax=ax1, linewidth=2, color='k',
label='OGGM default parameter run')
oggmdefault = run
maes = mae_weighted(df).sort_values()
idx2plot = optimize_cov(df.loc[:, maes.index[:150]],
df.loc[:, 'obs'], glid, minuse=5)
ensmean = df.loc[:, idx2plot].mean(axis=1)
ensmeanmean = ensmean.rolling(y_len, center=True).mean()
ensstdmean = df.loc[:, idx2plot].std(axis=1).rolling(y_len,
center=True).mean()
# coverage
cov = calc_coverage(df, idx2plot, df['obs'])
ax1.fill_between(ensmeanmean.index, ensmeanmean - ensstdmean,
ensmeanmean + ensstdmean, color='xkcd:teal', alpha=0.5)
# nolbl = df.loc[:, idx2plot2].rolling(y_len, center=True).mean().copy()
# nolbl.columns = ['' for i in range(len(nolbl.columns))]
#df.loc[:, idx2plot2].rolling(y_len, center=True).mean().plot(
# ax=ax1, linewidth=0.8)
# plot ens members
ensmeanmean.plot(ax=ax1, linewidth=4.0, color='xkcd:teal',
label='ensemble parameters runs')
# reference run (basically min mae)
df.loc[:, maes.index[0]].rolling(y_len, center=True).mean(). \
plot(ax=ax1, linewidth=3, color='xkcd:lavender',
label='minimum wMAE parameter run')
name = name_plus_id(rgi_id)
mae_ens = mae_weighted(pd.concat([ensmean, df['obs']], axis=1))[0]
mae_best = maes[0]
ax1.set_title('%s' % name, fontsize=28)
ax1.text(2030, -4900, 'wMAE ensemble mean = %.2f m\n'
'wMAE minimum run = %.2f m' %
(mae_ens, mae_best), fontsize=18,
horizontalalignment='right')
ax1.text(2040, -4900, '%d ensemble members\n'
'coverage = %.2f' %
(len(idx2plot), cov), fontsize=18)
ax1.set_ylabel('relative length change [m]', fontsize=26)
ax1.set_xlabel('Year', fontsize=26)
ax1.set_xlim([1850, 2020])
ax1.set_ylim([-3500, 1000])
ax1.tick_params(axis='both', which='major', labelsize=22)
ax1.grid(True)
ax1.legend(bbox_to_anchor=(-0.1, -0.15), loc='upper left',
fontsize=18, ncol=2)
# parameter plots
from colorspace import sequential_hcl
col = sequential_hcl('Blue-Yellow').colors(len(idx2plot) + 3)
for i, run in enumerate(idx2plot):
para = ast.literal_eval('{' + run + '}')
psf = para['prcp_scaling_factor']
gla = para['glena_factor']
mbb = para['mbbias']
mbb = (mbb - -1400) * (4-0.5) / (1000 - -1400) + 0.5
ax2.plot([1, 2, 3], [psf, gla, mbb], color=col[i], linewidth=2)
ax2.set_xlabel('calibration parameters', fontsize=18)
ax2.set_ylabel('Precipitation scaling factor\nGlen A factor',
fontsize=18)
ax2.set_xlim([0.8, 3.2])
ax2.set_ylim([0.3, 4.2])
ax2.set_xticks([1, 2, 3])
ax2.set_xticklabels(['Psf', 'GlenA', 'MB bias'], fontsize=16)
ax2.tick_params(axis='y', which='major', labelsize=16)
ax2.grid(True)
ax3 = ax2.twinx()
# scale to same y lims
scale = (4.2-0.3)/(4.0-0.5)
dy = (2400*scale-2400)/2
ax3.set_ylim([-1400-dy, 1000+dy])
ax3.set_ylabel('mass balance bias [m w.e. ]', fontsize=18)
ax3.set_yticks(np.arange(-1400, 1100, 400))
ax3.set_yticklabels(['-1.4', '-1.0', '-0.6', '-0.2',
'0.2', '0.6', '1.0'])
ax3.tick_params(axis='both', which='major', labelsize=16)
fig.subplots_adjust(left=0.08, right=0.95, bottom=0.24, top=0.93,
wspace=0.5)
fn1 = os.path.join(pout, 'histalp_%s.png' % glid)
fig.savefig(fn1)
used = dict()
used['oggmdefault'] = oggmdefault
used['minmae'] = idx2plot[0]
used['ensemble'] = idx2plot
pickle.dump(used, open(os.path.join(pout, 'runs_%s.p' % glid), 'wb'))
def past_simulation_and_commitment(rgi, allobs, allmeta, histalp_storage,
comit_storage, comit_storage_noseed,
pout, y_len=5, comyears=300):
cols = ['xkcd:teal',
'xkcd:orange',
'xkcd:azure',
'xkcd:tomato',
'xkcd:blue',
'xkcd:chartreuse',
'xkcd:green'
]
obs = allobs.loc[rgi.split('_')[0]]
meta = allmeta.loc[rgi.split('_')[0]]
fn99 = 'model_diagnostics_commitment1999_{:02d}.nc'
df99 = get_ensemble_length(rgi, histalp_storage, comit_storage, fn99, meta)
fn85 = 'model_diagnostics_commitment1885_{:02d}.nc'
df85 = get_ensemble_length(rgi, histalp_storage, comit_storage, fn85, meta)
fn70 = 'model_diagnostics_commitment1970_{:02d}.nc'
df70 = get_ensemble_length(rgi, histalp_storage, comit_storage, fn70, meta)
# plot
fig, ax1 = plt.subplots(1, figsize=[20, 7])
obs.plot(ax=ax1, color='k', marker='o',
label='Observations')
# past
ensmean = df99.mean(axis=1)
ensmeanmean = ensmean.rolling(y_len, center=True).mean()
ensstdmean = df99.std(axis=1).rolling(y_len, center=True).mean()
ax1.fill_between(ensmeanmean.loc[:2015].index,
ensmeanmean.loc[:2015] - ensstdmean.loc[:2015],
ensmeanmean.loc[:2015] + ensstdmean.loc[:2015],
color=cols[0], alpha=0.5)
ensmeanmean.loc[:2015].plot(ax=ax1, linewidth=4.0, color=cols[0],
label='HISTALP climate')
# dummy
ax1.plot(0, 0, 'w-', label=' ')
# 1999
ax1.fill_between(ensmeanmean.loc[2015:].index,
ensmeanmean.loc[2015:] - ensstdmean.loc[2015:],
ensmeanmean.loc[2015:] + ensstdmean.loc[2015:],
color=cols[1], alpha=0.5)
ensmeanmean.loc[2015:].plot(ax=ax1, linewidth=4.0, color=cols[1],
label='Random climate (1984-2014)')
# 1970
ensmean = df70.mean(axis=1)
ensmeanmean = ensmean.rolling(y_len, center=True).mean()
ensstdmean = df70.std(axis=1).rolling(y_len, center=True).mean()
ax1.fill_between(ensmeanmean.loc[2015:].index,
ensmeanmean.loc[2015:] - ensstdmean.loc[2015:],
ensmeanmean.loc[2015:] + ensstdmean.loc[2015:],
color=cols[5], alpha=0.5)
ensmeanmean.loc[2015:].plot(ax=ax1, linewidth=4.0, color=cols[5],
label='Random climate (1960-1980)')
# 1885
ensmean = df85.mean(axis=1)
ensmeanmean = ensmean.rolling(y_len, center=True).mean()
ensstdmean = df85.std(axis=1).rolling(y_len, center=True).mean()
ax1.fill_between(ensmeanmean.loc[2015:].index,
ensmeanmean.loc[2015:] - ensstdmean.loc[2015:],
ensmeanmean.loc[2015:] + ensstdmean.loc[2015:],
color=cols[2], alpha=0.5)
ensmeanmean.loc[2015:].plot(ax=ax1, linewidth=4.0, color=cols[2],
label='Random climate (1870-1900)')
# ---------------------------------------------------------------------
# plot commitment ensemble length
# 1984
efn99 = 'model_diagnostics_commitment1999_{:02d}.nc'
edf99 = get_ensemble_length(rgi, histalp_storage, comit_storage_noseed,
efn99, meta)
ensmean = edf99.mean(axis=1)
ensmeanmean = ensmean.rolling(y_len, center=True).mean()
ensstdmean = edf99.std(axis=1).rolling(y_len, center=True).mean()
postlength = ensmeanmean.dropna().iloc[-30:].mean()
poststd = ensstdmean.dropna().iloc[-30:].mean()
ax1.fill_between([2014+comyears+10, 2014+comyears+25],
postlength + poststd, postlength - poststd,
color=cols[3], alpha=0.5)
ax1.plot([2014+comyears+10.5, 2014+comyears+24.5], [postlength, postlength], linewidth=4.0,
color=cols[3],
label=('Random climate (1984-2014) '
'equlibrium length'))
# 1970
efn70 = 'model_diagnostics_commitment1970_{:02d}.nc'
edf70 = get_ensemble_length(rgi, histalp_storage, comit_storage_noseed,
efn70, meta)
ensmean = edf70.mean(axis=1)
ensmeanmean = ensmean.rolling(y_len, center=True).mean()
ensstdmean = edf70.std(axis=1).rolling(y_len, center=True).mean()
prelength = ensmeanmean.dropna().iloc[-30:].mean()
prestd = ensstdmean.dropna().iloc[-30:].mean()
ax1.fill_between([2014+comyears+10, 2014+comyears+25],
prelength + prestd, prelength - prestd,
color=cols[6], alpha=0.5)
ax1.plot([2014+comyears+10.5, 2014+comyears+24.5], [prelength, prelength],
linewidth=4.0,
color=cols[6],
label=('Random climate (1960-1980) '
'equlibrium length'))
# 1885
efn85 = 'model_diagnostics_commitment1885_{:02d}.nc'
edf85 = get_ensemble_length(rgi, histalp_storage, comit_storage_noseed,
efn85, meta)
ensmean = edf85.mean(axis=1)
ensmeanmean = ensmean.rolling(y_len, center=True).mean()
ensstdmean = edf85.std(axis=1).rolling(y_len, center=True).mean()
prelength = ensmeanmean.dropna().iloc[-30:].mean()
prestd = ensstdmean.dropna().iloc[-30:].mean()
ax1.fill_between([2014+comyears+10, 2014+comyears+25],
prelength + prestd, prelength - prestd,
color=cols[4], alpha=0.5)
ax1.plot([2014+comyears+10.5, 2014+comyears+24.5], [prelength, prelength],
linewidth=4.0,
color=cols[4],
label=('Random climate (1870-1900) '
'equlibrium length'))
# ---------------------------------------------------------------------
ylim = ax1.get_ylim()
#ax1.plot([2015, 2015], ylim, 'k-', linewidth=2)
ax1.set_xlim([1850, 2014+comyears+30])
#ax1.set_ylim(ylim)
ax2 = ax1.twinx()
ax2.set_ylabel('approximate\n absolute glacier length [m]', fontsize=26)
y1, y2 = get_absolute_length(ylim[0], ylim[1], rgi, df99, histalp_storage)
ax2.tick_params(axis='both', which='major', labelsize=22)
ax2.set_ylim([y1, y2])
name = name_plus_id(rgi)
ax1.set_title('%s' % name, fontsize=28)
ax1.set_ylabel('relative length change [m]', fontsize=26)
ax1.set_xlabel('Year', fontsize=26)
ax1.tick_params(axis='both', which='major', labelsize=22)
ax1.set_xticks([1850, 1950, 2014, 2114, 2214, 2314])
ax1.set_xticklabels(['1850', '1950', '2014/0', '100', '200', '300'])
ax1.grid(True)
ax1.legend(bbox_to_anchor=(-0.0, -0.17), loc='upper left', fontsize=18,
ncol=3)
fig.subplots_adjust(left=0.09, right=0.9, bottom=0.3, top=0.93,
wspace=0.5)
fn1 = os.path.join(pout, 'commit_%s.png' % rgi)
fig.savefig(fn1)
def past_simulation_and_projection(rgi, allobs, allmeta, histalp_storage,
proj_storage, comit_storage,
pout, y_len=5,):
cols = ['xkcd:teal',
'xkcd:azure',
'xkcd:lime',
'xkcd:orange',
'xkcd:magenta',
'xkcd:tomato',
'xkcd:blue',
'xkcd:green'
]
obs = allobs.loc[rgi.split('_')[0]]
meta = allmeta.loc[rgi.split('_')[0]]
dfall = pd.DataFrame([], index=np.arange(1850, 2101))
dfallstd = pd.DataFrame([], index=np.arange(1850, 2101))
for rcp in ['rcp26', 'rcp45', 'rcp60', 'rcp85']:
dfrcp = get_rcp_ensemble_length(rgi, histalp_storage, proj_storage,
rcp, meta)
ensmean = dfrcp.mean(axis=1)
dfall.loc[:, rcp] = ensmean.rolling(y_len, center=True).mean()
dfallstd.loc[:, rcp] = dfrcp.std(axis=1).\
rolling(y_len, center=True).mean()
# plot
fig, ax1 = plt.subplots(1, figsize=[20, 7])
obs.plot(ax=ax1, color='k', marker='o',
label='Observations')
# past
ax1.fill_between(dfall.loc[:2015, rcp].index,
dfall.loc[:2015, rcp] - dfallstd.loc[:2015, rcp],
dfall.loc[:2015, rcp] + dfallstd.loc[:2015, rcp],
color=cols[0], alpha=0.5)
dfall.loc[:2015, rcp].plot(ax=ax1, linewidth=4.0, color=cols[0],
label='HISTALP climate')
# dummy
ax1.plot(0, 0, 'w-', label=' ')
# projections
# rcp26
ax1.fill_between(dfall.loc[2015:, 'rcp26'].index,
dfall.loc[2015:, 'rcp26'] - dfallstd.loc[2015:, 'rcp26'],
dfall.loc[2015:, 'rcp26'] + dfallstd.loc[2015:, 'rcp26'],
color=cols[1], alpha=0.5)
dfall.loc[2015:, 'rcp26'].plot(ax=ax1, linewidth=4.0, color=cols[1],
label='RCP 2.6 climate')
# rcp45
dfall.loc[2015:, 'rcp45'].plot(ax=ax1, linewidth=4.0, color=cols[2],
label='RCP 4.5 climate')
# dummy
ax1.plot(0, 0, 'w-', label=' ')
# rcp60
dfall.loc[2015:, 'rcp60'].plot(ax=ax1, linewidth=4.0, color=cols[3],
label='RCP 6.0 climate')
# rcp85
ax1.fill_between(dfall.loc[2015:, 'rcp85'].index,
dfall.loc[2015:, 'rcp85'] - dfallstd.loc[2015:, 'rcp85'],
dfall.loc[2015:, 'rcp85'] + dfallstd.loc[2015:, 'rcp85'],
color=cols[4], alpha=0.5)
dfall.loc[2015:, 'rcp85'].plot(ax=ax1, linewidth=4.0, color=cols[4],
label='RCP 8.5 climate')
# dummy
ax1.plot(0, 0, 'w-', label=' ')
# plot commitment length
# 1984
fn99 = 'model_diagnostics_commitment1999_{:02d}.nc'
df99 = get_ensemble_length(rgi, histalp_storage, comit_storage, fn99, meta)
ensmean = df99.mean(axis=1)
ensmeanmean = ensmean.rolling(y_len, center=True).mean()
ensstdmean = df99.std(axis=1).rolling(y_len, center=True).mean()
postlength = ensmeanmean.dropna().iloc[-30:].mean()
poststd = ensstdmean.dropna().iloc[-30:].mean()
ax1.fill_between([2105, 2111],
postlength + poststd, postlength - poststd,
color=cols[5], alpha=0.5)
ax1.plot([2105.5, 2110.5], [postlength, postlength], linewidth=4.0,
color=cols[5],
label=('Random climate (1984-2014) '
'equilibrium length'))
# 1970
fn70 = 'model_diagnostics_commitment1970_{:02d}.nc'
df70 = get_ensemble_length(rgi, histalp_storage, comit_storage, fn70, meta)
ensmean = df70.mean(axis=1)
ensmeanmean = ensmean.rolling(y_len, center=True).mean()
ensstdmean = df70.std(axis=1).rolling(y_len, center=True).mean()
prelength = ensmeanmean.dropna().iloc[-30:].mean()
prestd = ensstdmean.dropna().iloc[-30:].mean()
ax1.fill_between([2105, 2111],
prelength + prestd, prelength - prestd,
color=cols[7], alpha=0.5)
ax1.plot([2105.5, 2110.5], [prelength, prelength], linewidth=4.0,
color=cols[7],
label=('Random climate (1960-1980) '
'equilibrium length'))
# 1885
fn85 = 'model_diagnostics_commitment1885_{:02d}.nc'
df85 = get_ensemble_length(rgi, histalp_storage, comit_storage, fn85, meta)
ensmean = df85.mean(axis=1)
ensmeanmean = ensmean.rolling(y_len, center=True).mean()
ensstdmean = df85.std(axis=1).rolling(y_len, center=True).mean()
prelength = ensmeanmean.dropna().iloc[-30:].mean()
prestd = ensstdmean.dropna().iloc[-30:].mean()
ax1.fill_between([2105, 2111],
prelength + prestd, prelength - prestd,
color=cols[6], alpha=0.5)
ax1.plot([2105.5, 2110.5], [prelength, prelength], linewidth=4.0,
color=cols[6],
label=('Random climate (1870-1900) '
'equilibrium length'))
ylim = ax1.get_ylim()
ax1.set_xlim([1850, 2112])
ax2 = ax1.twinx()
ax2.set_ylabel('apporixmate\n absolute glacier length [m]', fontsize=26)
y1, y2 = get_absolute_length(ylim[0], ylim[1], rgi, df99, histalp_storage)
ax2.tick_params(axis='both', which='major', labelsize=22)
ax2.set_ylim([y1, y2])
name = name_plus_id(rgi)
ax1.set_title('%s' % name, fontsize=28)
ax1.set_ylabel('relative length change [m]', fontsize=26)
ax1.set_xlabel('Year', fontsize=26)
ax1.tick_params(axis='both', which='major', labelsize=22)
ax1.grid(True)
ax1.legend(bbox_to_anchor=(0.0, -0.17), loc='upper left', fontsize=18,
ncol=4)
fig.subplots_adjust(left=0.09, right=0.9, bottom=0.3, top=0.93,
wspace=0.5)
fn1 = os.path.join(pout, 'proj_%s.png' % rgi)
fig.savefig(fn1)
def get_mean_temps_eq(rgi, histalp_storage, comit_storage, ensmembers):
from oggm import cfg, utils, GlacierDirectory
from oggm.core.massbalance import MultipleFlowlineMassBalance
from oggm.core.flowline import FileModel
import shutil
# 1. get mean surface heights
df85 = pd.DataFrame([])
df99 = pd.DataFrame([])
for i in range(ensmembers):
fnc1 = os.path.join(comit_storage, rgi,
'model_run_commitment1885_{:02d}.nc'.format(i))
fnc2 = os.path.join(comit_storage, rgi,
'model_run_commitment1999_{:02d}.nc'.format(i))
tmpmod1 = FileModel(fnc1)
tmpmod2 = FileModel(fnc2)
for j in np.arange(270, 301):
tmpmod1.run_until(j)
df85.loc[:, '{}{}'.format(i, j)] = tmpmod1.fls[-1].surface_h
tmpmod2.run_until(j)
df99.loc[:, '{}{}'.format(i, j)] = tmpmod2.fls[-1].surface_h
meanhgt99 = df99.mean(axis=1).values
meanhgt85 = df85.mean(axis=1).values
# 2. get the climate
# Initialize OGGM
cfg.initialize()
wd = utils.gettempdir(reset=True)
cfg.PATHS['working_dir'] = wd
utils.mkdir(wd, reset=True)
cfg.PARAMS['baseline_climate'] = 'HISTALP'
# and set standard histalp values
cfg.PARAMS['temp_melt'] = -1.75
i = 0
storage_dir = os.path.join(histalp_storage, rgi, '{:02d}'.format(i),
rgi[:8], rgi[:11], rgi)
new_dir = os.path.join(cfg.PATHS['working_dir'], 'per_glacier',
rgi[:8], rgi[:11], rgi)
shutil.copytree(storage_dir, new_dir)
gdir = GlacierDirectory(rgi)
mb = MultipleFlowlineMassBalance(gdir, filename='climate_monthly',
check_calib_params=False)
# need to do the above for every ensemble member if I consider PRECIP!
# and set cfg.PARAMS['prcp_scaling_factor'] = pdict['prcp_scaling_factor']
df99_2 = pd.DataFrame()
df85_2 = pd.DataFrame()
for i in np.arange(9, 12):
for y in np.arange(1870, 1901):
flyear = utils.date_to_floatyear(y, i)
tmp = mb.flowline_mb_models[-1].get_monthly_climate(meanhgt85,
flyear)[0]
df85_2.loc[y, i] = tmp.mean()
for y in np.arange(1984, 2015):
tmp = mb.flowline_mb_models[-1].get_monthly_climate(meanhgt99,
flyear)[0]
df99_2.loc[y, i] = tmp.mean()
t99 = df99_2.mean().mean()
t85 = df85_2.mean().mean()
return t85, t99
def get_mean_temps_2k(rgi, return_prcp):
from oggm import cfg, utils, workflow, tasks
from oggm.core.massbalance import PastMassBalance
# Initialize OGGM
cfg.initialize()
wd = utils.gettempdir(reset=True)
cfg.PATHS['working_dir'] = wd
utils.mkdir(wd, reset=True)
cfg.PARAMS['baseline_climate'] = 'HISTALP'
# and set standard histalp values
cfg.PARAMS['temp_melt'] = -1.75
cfg.PARAMS['prcp_scaling_factor'] = 1.75
gdir = workflow.init_glacier_regions(rgidf=rgi.split('_')[0],
from_prepro_level=3,
prepro_border=10)[0]
# run histalp climate on glacier!
tasks.process_histalp_data(gdir)
f = gdir.get_filepath('climate_historical')
with utils.ncDataset(f) as nc:
refhgt = nc.ref_hgt
mb = PastMassBalance(gdir, check_calib_params=False)
df = pd.DataFrame()
df2 = pd.DataFrame()
for y in np.arange(1870, 2015):
for i in np.arange(9, 12):
flyear = utils.date_to_floatyear(y, i)
tmp = mb.get_monthly_climate([refhgt], flyear)[0]
df.loc[y, i] = tmp.mean()
if return_prcp:
for i in np.arange(3, 6):
flyear = utils.date_to_floatyear(y, i)
pcp = mb.get_monthly_climate([refhgt], flyear)[3]
df2.loc[y, i] = tmp.mean()
t99 = df.loc[1984:2014, :].mean().mean()
t85 = df.loc[1870:1900, :].mean().mean()
t2k = df.loc[1900:2000, :].mean().mean()
if return_prcp:
p99 = df2.loc[1984:2014, :].mean().mean()
p85 = df2.loc[1870:1900, :].mean().mean()
p2k = df2.loc[1900:2000, :].mean().mean()
return t85, t99, t2k, p85, p99, p2k
return t85, t99, t2k
def get_absolute_length(y0, y1, rgi, df, storage):
rgipath = os.path.join(storage, rgi, '{:02d}'.format(0),
rgi[:8], rgi[:11], rgi)
mfile = os.path.join(rgipath, 'model_run_histalp_{:02d}.nc'.format(0))
tmpmod = FileModel(mfile)
absL = tmpmod.length_m
deltaL = df.loc[int(tmpmod.yr.values), 0]
abs_y0 = absL + (y0 - deltaL)
abs_y1 = absL + (y1 - deltaL)
return abs_y0, abs_y1
def elevation_profiles(rgi, meta, histalp_storage, pout):
name = name_plus_id(rgi)
df1850 = pd.DataFrame()
df2003 = pd.DataFrame()
df2003b = pd.DataFrame()
dfbed = pd.DataFrame()
for i in np.arange(999):
# Local working directory (where OGGM will write its output)
rgipath = os.path.join(histalp_storage, rgi, '{:02d}'.format(i),
rgi[:8], rgi[:11], rgi)
fn = os.path.join(rgipath, 'model_run_histalp_{:02d}.nc'.format(i))
try:
tmpmod = FileModel(fn)
except FileNotFoundError:
break
df1850.loc[:, i] = tmpmod.fls[-1].surface_h
# get bed surface
dfbed.loc[:, i] = tmpmod.fls[-1].bed_h
# HISTALP surface
tmpmod.run_until(2003)
df2003.loc[:, i] = tmpmod.fls[-1].surface_h
df2003b.loc[:, i] = tmpmod.fls[-1].thick
# RGI init surface, once is enough
fn2 = os.path.join(histalp_storage, rgi, '00', rgi[:8], rgi[:11],
rgi, 'model_run_spinup_00.nc')
tmpmod2 = FileModel(fn2)
initsfc = tmpmod2.fls[-1].surface_h
# get distance on line
dx_meter = tmpmod.fls[-1].dx_meter
meanbed = dfbed.mean(axis=1).values
maxbed = dfbed.max(axis=1).values
minbed = dfbed.min(axis=1).values
# 1850
mean1850 = df1850.mean(axis=1).values
# where is mean glacier thinner than 1m
ix50 = np.where(mean1850-meanbed < 1)[0][0]
mean1850[ix50:] = np.nan
min1850 = df1850.min(axis=1).values
min1850[ix50:] = np.nan
min1850[min1850 <= meanbed] = meanbed[min1850 <= meanbed]
max1850 = df1850.max(axis=1).values
max1850[max1850 <= meanbed] = meanbed[max1850 <= meanbed]
# 2003
mean2003 = df2003.mean(axis=1).values
# where is mean glacier thinner than 1m
ix03 = np.where(mean2003-meanbed < 1)[0][0]
mean2003[ix03:] = np.nan
min2003 = df2003.min(axis=1).values
min2003[ix03:] = np.nan
min2003[min2003 <= meanbed] = meanbed[min2003 <= meanbed]
max2003 = df2003.max(axis=1).values
max2003[max2003 <= meanbed] = meanbed[max2003 <= meanbed]
lastx = np.where(initsfc-meanbed < 1)[0][0]
initsfc[lastx:] = np.nan
initsfc[lastx] = meanbed[lastx]
dis = np.arange(len(meanbed)) * dx_meter / 1000
xmax = sum(np.isfinite(mean1850))
ymax = np.nanmax(mean1850) + 50
ymin = minbed[np.where(np.isfinite(mean1850))].min() - 50
fig, ax = plt.subplots(1, figsize=[15, 9])
ax.fill_between(dis[:xmax+1], dis[:xmax+1] * 0 + ymin, minbed[:xmax+1],
color='0.7', alpha=0.5)
ax.fill_between(dis[:xmax+1], minbed[:xmax+1], maxbed[:xmax+1],
color='xkcd:tan', alpha=0.5)
ax.plot(dis[:xmax+1], meanbed[:xmax+1], 'k-', color='xkcd:tan',
linewidth=3, label='Glacier bed elevation [m]')
ax.fill_between(dis, min1850, max1850, color='xkcd:azure', alpha=0.5)
ax.plot(dis, mean1850, 'k-', color='xkcd:azure', linewidth=4,
label=('Surface elevation [m] year {:d}\n'
'(initialization state after spinup)'.
format(meta['first'])))
ax.fill_between(dis, min2003, max2003, color='xkcd:teal', alpha=0.5)
ax.plot(dis, mean2003, 'k-', color='xkcd:teal', linewidth=4,
label=('Surface elevation [m] year 2003\n'
'(from HISTALP ensemble simulations)'))
ax.plot(dis, initsfc, 'k-', color='xkcd:crimson', linewidth=4,
label=('Surface elevation [m] year 2003\n'
'(from RGI initialization)'))
ax.legend(loc=1, fontsize=20)
ax.set_ylim(ymin, ymax)
ax.set_xlim(0, dis[xmax])
ax.set_xlabel('Distance along major flowline [km]', fontsize=28)
ax.set_ylabel('Elevation [m a.s.l.]', fontsize=28)
ax.tick_params(axis='both', which='major', labelsize=26)
ax.grid(True)
ax.set_title(name, fontsize=30)
fig.tight_layout()
fn = os.path.join(pout, 'profile_%s' % rgi)
if ('3643' in rgi) or ('1450' in rgi) or ('2051' in rgi) or ('897' in rgi):
fig.savefig('{}.svg'.format(fn))
fig.savefig('{}.png'.format(fn))
def grey_madness(glcdict, pout, y_len=5):
for glid, df in glcdict.items():
# take care of merged glaciers
rgi_id = glid.split('_')[0]
fig, ax1 = plt.subplots(figsize=[20, 7])
# OGGM standard
for run in df.columns:
if run == 'obs':
continue
para = ast.literal_eval('{' + run + '}')
if ((np.abs(para['prcp_scaling_factor'] - 1.75) < 0.01) and
(para['mbbias'] == 0) and
(para['glena_factor'] == 1)):
oggmdefault = run
break
nolbl = df.loc[:, df.columns != 'obs'].\
rolling(y_len, center=True).mean().copy()
nolbl.columns = ['' for i in range(len(nolbl.columns))]
nolbl.plot(ax=ax1, linewidth=0.8, color='0.7')
df.loc[:, oggmdefault].rolling(y_len, center=True).mean().plot(
ax=ax1, linewidth=0.8, color='0.7',
label='Every possible calibration parameter combination')
df.loc[:, oggmdefault].rolling(y_len, center=True).mean().\
plot(ax=ax1, color='k', linewidth=2,
label='OGGM default parameters')
df.loc[:, 'obs'].plot(ax=ax1, color='k', marker='o',
label='Observations')
name = name_plus_id(rgi_id)
ax1.set_title('%s' % name, fontsize=28)
ax1.set_ylabel('relative length change [m]', fontsize=26)
ax1.set_xlabel('Year', fontsize=26)
ax1.set_xlim([1850, 2014])
ax1.set_ylim([-7500, 4000])
ax1.tick_params(axis='both', which='major', labelsize=22)
ax1.grid(True)
ax1.legend(bbox_to_anchor=(-0.0, -0.15), loc='upper left',
fontsize=18, ncol=2)
fig.subplots_adjust(left=0.09, right=0.99, bottom=0.24, top=0.93,
wspace=0.5)
fn1 = os.path.join(pout, 'all_%s.png' % glid)
fig.savefig(fn1)
def run_and_plot_merged_montmine(pout):
# Set-up
cfg.initialize(logging_level='WORKFLOW')
cfg.PATHS['working_dir'] = utils.gettempdir(dirname='OGGM-merging',
reset=True)
# Use a suitable border size for your domain
cfg.PARAMS['border'] = 80
cfg.PARAMS['use_intersects'] = False
montmine = workflow.init_glacier_directories(['RGI60-11.02709'],
from_prepro_level=3)[0]
gdirs = workflow.init_glacier_directories(['RGI60-11.02709',
'RGI60-11.02715'],
from_prepro_level=3)
workflow.execute_entity_task(tasks.init_present_time_glacier, gdirs)
gdirs_merged = workflow.merge_glacier_tasks(gdirs, 'RGI60-11.02709',
return_all=False,
filename='climate_monthly',
buffer=2.5)
# plot centerlines
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=[20, 10])
plot_centerlines(montmine, ax=ax1, use_flowlines=True)
xt = ax1.get_xticks()
ax1.set_xticks(xt[::2])
ax1.tick_params(axis='both', which='major', labelsize=20)
ax1.set_title('entity glacier', fontsize=24)
plot_centerlines(gdirs_merged, ax=ax2, use_model_flowlines=True)
ax2.tick_params(axis='both', which='major', labelsize=20)
ax2.set_title('merged with Glacier de Ferpecle', fontsize=24)
axs = fig.get_axes()
axs[3].remove()
axs[2].tick_params(axis='y', labelsize=16)
axs[2].set_ylabel('Altitude [m]', fontsize=18)
fig.suptitle('Glacier du Mont Mine', fontsize=24)
fig.subplots_adjust(left=0.04, right=0.99, bottom=0.08, top=0.89,
wspace=0.3)
fn = os.path.join(pout, 'merged_montmine.png')
fig.savefig(fn)
# run glaciers with negative t bias
# some model settings
years = 125
tbias = -1.5
# model Mont Mine glacier as entity and complile the output
tasks.run_constant_climate(montmine, nyears=years,
output_filesuffix='_entity',
temperature_bias=tbias)
ds_entity = utils.compile_run_output([montmine], path=False,
filesuffix='_entity')
# model the merged glacier and complile the output
tasks.run_constant_climate(gdirs_merged, nyears=years,
output_filesuffix='_merged',
temperature_bias=tbias,
climate_filename='climate_monthly')
ds_merged = utils.compile_run_output([gdirs_merged], path=False,
filesuffix='_merged')
#
# bring them to same size again
tbias = -2.2
years = 125
tasks.run_constant_climate(montmine, nyears=years,
output_filesuffix='_entity1',
temperature_bias=tbias)
ds_entity1 = utils.compile_run_output([montmine], path=False,
filesuffix='_entity1')
# and let them shrink again
# some model settings
tbias = -0.5
years = 100
# load the previous entity run
tmp_mine = FileModel(
montmine.get_filepath('model_run', filesuffix='_entity1'))
tmp_mine.run_until(years)
tasks.run_constant_climate(montmine, nyears=years,
output_filesuffix='_entity2',
init_model_fls=tmp_mine.fls,
temperature_bias=tbias)
ds_entity2 = utils.compile_run_output([montmine], path=False,
filesuffix='_entity2')
# model the merged glacier and complile the output
tmp_merged = FileModel(
gdirs_merged.get_filepath('model_run', filesuffix='_merged'))
tmp_merged.run_until(years)
tasks.run_constant_climate(gdirs_merged, nyears=years,
output_filesuffix='_merged2',
init_model_fls=tmp_merged.fls,
temperature_bias=tbias,
climate_filename='climate_monthly')
ds_merged2 = utils.compile_run_output([gdirs_merged], path=False,
filesuffix='_merged2')
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=[20, 7])
dse = ds_entity.length.to_series().rolling(5, center=True).mean()
dsm = ds_merged.length.to_series().rolling(5, center=True).mean()
ax1.plot(dse.values, 'C1', label='Entity glacier', linewidth=3)
ax1.plot(dsm.values, 'C2', label='Merged glacier', linewidth=3)
ax1.set_xlabel('Simulation time [yr]', fontsize=20)
ax1.set_ylabel('Glacier length[m]', fontsize=20)
ax1.grid(True)
ax1.legend(loc=2, fontsize=18)
dse2 = ds_entity2.length.to_series().rolling(5, center=True).mean()
dsm2 = ds_merged2.length.to_series().rolling(5, center=True).mean()
ax2.plot(dse2.values, 'C1', label='Entity glacier', linewidth=3)
ax2.plot(dsm2.values, 'C2', label='Merged glacier', linewidth=3)
ax2.set_xlabel('Simulation time [yr]', fontsize=22)
ax2.set_ylabel('Glacier length [m]', fontsize=22)
ax2.grid(True)
ax2.legend(loc=1, fontsize=18)
ax1.set_xlim([0, 120])
ax2.set_xlim([0, 100])
ax1.set_ylim([7500, 12000])
ax2.set_ylim([7500, 12000])
ax1.tick_params(axis='both', which='major', labelsize=20)
ax2.tick_params(axis='both', which='major', labelsize=20)
fig.subplots_adjust(left=0.08, right=0.96, bottom=0.11, top=0.93,
wspace=0.3)
fn = os.path.join(pout, 'merged_montmine_timeseries.png')
fig.savefig(fn)
def climate_vs_lengthchange(dfout, pout):
fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2, figsize=[20, 15])
ost = dfout.loc[dfout['lon'] >= 9.5]
west = dfout.loc[dfout['lon'] < 9.5]
# ax1: temp, winter
ost.plot.scatter(x='dl 1885-1970', y='dt win', color='C1',
ax=ax1, s=80, label='Temp. Oct-Apr (East)')
ost.plot.scatter(x='dl 1885-1970', y='dt djf', color='C3',
ax=ax1, s=80, label='Temp. DJF (East)')
west.plot.scatter(x='dl 1885-1970', y='dt win', color='C2', marker='s',
ax=ax1, s=80, label='Temp. Oct-Apr (West)')
west.plot.scatter(x='dl 1885-1970', y='dt djf', color='C4', marker='s',
ax=ax1, s=80, label='Temp. DJF (West)')
# ax2: temp, sommer
ost.plot.scatter(x='dl 1885-1970', y='dt som', color='C1',
ax=ax2, s=80, label='Temp. Mai-Sep (East)')
ost.plot.scatter(x='dl 1885-1970', y='dt jja', color='C3',
ax=ax2, s=80, label='Temp. JJA (East)')
west.plot.scatter(x='dl 1885-1970', y='dt som', color='C2', marker='s',
ax=ax2, s=80, label='Temp. Mai-Sep (West)')
west.plot.scatter(x='dl 1885-1970', y='dt jja', color='C4', marker='s',
ax=ax2, s=80, label='Temp. JJA (West)')
# ax3: pcp, winter
west.plot.scatter(x='dl 1885-1970', y='dp win', color='C2', marker='s',
ax=ax3, s=80, label='Prcp. Oct-Apr (West)')
west.plot.scatter(x='dl 1885-1970', y='dp djf', color='C4', marker='s',
ax=ax3, s=80, label='Prcp. DJF (West)')
ost.plot.scatter(x='dl 1885-1970', y='dp win', color='C1',
ax=ax3, s=80, label='Prcp. Oct-Apr (East)')
ost.plot.scatter(x='dl 1885-1970', y='dp djf', color='C3',
ax=ax3, s=80, label='Prcp. DJF (East)')
# ax4: pcp, sommer
west.plot.scatter(x='dl 1885-1970', y='dp jja', color='C4', marker='s',
ax=ax4, s=80, label='Prcp. JJA (West)')
west.plot.scatter(x='dl 1885-1970', y='dp som', color='C2', marker='s',
ax=ax4, s=80, label='Prcp. Mai-Sep (West)')
ost.plot.scatter(x='dl 1885-1970', y='dp jja', color='C3',
ax=ax4, s=80, label='Prcp. JJA (East)')
ost.plot.scatter(x='dl 1885-1970', y='dp som', color='C1',
ax=ax4, s=80, label='Prcp. Mai-Sep (East)')
ax4.set_xlabel(('Equilibrium length difference\nbetween 1870-1900 '
'and 1960-1980 climate'), fontsize=20)
ax3.set_xlabel(('Equilibrium length difference\nbetween 1870-1900 '
'and 1960-1980 climate'), fontsize=20)
ax1.set_ylabel(('Temperature difference between\n 1870-1900 and '
'1960-1980 climate'), fontsize=20)
ax3.set_ylabel(('Precipitation difference between\n 1870-1900 and '
'1960-1980 climate'), fontsize=20)
ax2.set_ylabel('')
ax4.set_ylabel('')
ax1.set_xlabel('')
ax2.set_xlabel('')
ax1.set_ylim([-1.0, 0.2])
ax2.set_ylim([-1.0, 0.2])
ax3.set_ylim([-350, 50])
ax4.set_ylim([-350, 50])
for ax in [ax1, ax2, ax3, ax4]:
ax.grid(True)
ax.legend(loc=3, ncol=2, fontsize=18)
ax.set_xlim([-4, 2])
ax.tick_params(axis='both', which='major', labelsize=20)
fig.subplots_adjust(left=0.08, right=0.98, bottom=0.11, top=0.93,
wspace=0.2, hspace=0.2)
fig.savefig(os.path.join(pout, 'climate_vs_length.png'))
def histogram(pin, pout):
glena = defaultdict(int)
mbbias = defaultdict(int)
prcpsf = defaultdict(int)
for glc in GLCDICT.keys():
glid = str(glc)
if MERGEDICT.get(glc):
glid += '_merged'
rundictpath = os.path.join(pin, 'runs_%s.p' % glid)
rundict = pickle.load(open(rundictpath, 'rb'))
ens = rundict['ensemble']
for run in ens:
para = ast.literal_eval('{' + run + '}')
prcpsf[para['prcp_scaling_factor']] += 1
glena[para['glena_factor']] += 1
mbbias[para['mbbias']] += 1
fig, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=[20, 7])
ax1.bar(list(glena.keys()), glena.values(), width=0.4)
ax1.set_xlabel('Glen A factor', fontsize=22)
ax1.set_ylabel('# used in ensemble', fontsize=22)
ax2.bar(list(prcpsf.keys()), prcpsf.values(), width=0.2)
ax2.set_xlabel('Prcp SF factor', fontsize=22)
ax2.set_ylabel('# used in ensemble', fontsize=22)
ax3.bar(list(mbbias.keys()), mbbias.values(), width=150)
ax3.set_xlabel('MB bias', fontsize=22)
ax3.set_ylabel('# used in ensemble', fontsize=22)
for ax in [ax1, ax2, ax3]:
ax.tick_params(axis='both', which='major', labelsize=20)
ax.grid(True)
fig.subplots_adjust(left=0.08, right=0.98, bottom=0.11, top=0.93,
wspace=0.2, hspace=0.2)
fig.savefig(os.path.join(pout, 'histo.png'))
|
[
"numpy.abs",
"oggm.cfg.initialize",
"relic.preprocessing.GLCDICT.keys",
"relic.postprocessing.get_ensemble_length",
"collections.defaultdict",
"matplotlib.pyplot.figure",
"mpl_toolkits.axes_grid1.inset_locator.inset_axes",
"numpy.arange",
"oggm.utils.ncDataset",
"numpy.isclose",
"relic.preprocessing.name_plus_id",
"oggm.workflow.merge_glacier_tasks",
"os.path.join",
"oggm.tasks.run_constant_climate",
"pandas.DataFrame",
"relic.postprocessing.mae_weighted",
"matplotlib.colors.LinearSegmentedColormap.from_list",
"matplotlib.colors.Normalize",
"matplotlib.cm.ScalarMappable",
"numpy.isfinite",
"matplotlib.pyplot.colorbar",
"relic.postprocessing.optimize_cov",
"relic.postprocessing.get_rcp_ensemble_length",
"oggm.core.massbalance.MultipleFlowlineMassBalance",
"relic.postprocessing.calc_coverage",
"oggm.core.massbalance.PastMassBalance",
"matplotlib.pyplot.subplots",
"pandas.concat",
"oggm.core.flowline.FileModel",
"ast.literal_eval",
"oggm.tasks.process_histalp_data",
"cmocean.tools.get_dict",
"matplotlib.gridspec.GridSpec",
"matplotlib.use",
"numpy.nanmax",
"oggm.workflow.execute_entity_task",
"oggm.utils.compile_run_output",
"relic.preprocessing.MERGEDICT.get",
"oggm.utils.date_to_floatyear",
"oggm.GlacierDirectory",
"oggm.workflow.init_glacier_directories",
"numpy.where",
"numpy.array",
"colorspace.sequential_hcl",
"oggm.utils.gettempdir",
"shutil.copytree",
"oggm.utils.mkdir",
"oggm.graphics.plot_centerlines"
] |
[((19, 42), 'matplotlib.use', 'matplotlib.use', (['"""TkAgg"""'], {}), "('TkAgg')\n", (33, 42), False, 'import matplotlib\n'), ((923, 958), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(3)'], {'figsize': '[20, 7]'}), '(1, 3, figsize=[20, 7])\n', (935, 958), True, 'import matplotlib.pyplot as plt\n'), ((5423, 5443), 'relic.preprocessing.name_plus_id', 'name_plus_id', (['rgi_id'], {}), '(rgi_id)\n', (5435, 5443), False, 'from relic.preprocessing import name_plus_id, GLCDICT, MERGEDICT\n'), ((5607, 5654), 'os.path.join', 'os.path.join', (['pout', "('calibration_%s.png' % glid)"], {}), "(pout, 'calibration_%s.png' % glid)\n", (5619, 5654), False, 'import os\n'), ((11319, 11387), 'relic.postprocessing.get_ensemble_length', 'get_ensemble_length', (['rgi', 'histalp_storage', 'comit_storage', 'fn99', 'meta'], {}), '(rgi, histalp_storage, comit_storage, fn99, meta)\n', (11338, 11387), False, 'from relic.postprocessing import mae_weighted, optimize_cov, calc_coverage, get_ensemble_length, get_rcp_ensemble_length\n'), ((11455, 11523), 'relic.postprocessing.get_ensemble_length', 'get_ensemble_length', (['rgi', 'histalp_storage', 'comit_storage', 'fn85', 'meta'], {}), '(rgi, histalp_storage, comit_storage, fn85, meta)\n', (11474, 11523), False, 'from relic.postprocessing import mae_weighted, optimize_cov, calc_coverage, get_ensemble_length, get_rcp_ensemble_length\n'), ((11591, 11659), 'relic.postprocessing.get_ensemble_length', 'get_ensemble_length', (['rgi', 'histalp_storage', 'comit_storage', 'fn70', 'meta'], {}), '(rgi, histalp_storage, comit_storage, fn70, meta)\n', (11610, 11659), False, 'from relic.postprocessing import mae_weighted, optimize_cov, calc_coverage, get_ensemble_length, get_rcp_ensemble_length\n'), ((11687, 11719), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)'], {'figsize': '[20, 7]'}), '(1, figsize=[20, 7])\n', (11699, 11719), True, 'import matplotlib.pyplot as plt\n'), ((14065, 14141), 'relic.postprocessing.get_ensemble_length', 'get_ensemble_length', (['rgi', 'histalp_storage', 'comit_storage_noseed', 'efn99', 'meta'], {}), '(rgi, histalp_storage, comit_storage_noseed, efn99, meta)\n', (14084, 14141), False, 'from relic.postprocessing import mae_weighted, optimize_cov, calc_coverage, get_ensemble_length, get_rcp_ensemble_length\n'), ((14915, 14991), 'relic.postprocessing.get_ensemble_length', 'get_ensemble_length', (['rgi', 'histalp_storage', 'comit_storage_noseed', 'efn70', 'meta'], {}), '(rgi, histalp_storage, comit_storage_noseed, efn70, meta)\n', (14934, 14991), False, 'from relic.postprocessing import mae_weighted, optimize_cov, calc_coverage, get_ensemble_length, get_rcp_ensemble_length\n'), ((15769, 15845), 'relic.postprocessing.get_ensemble_length', 'get_ensemble_length', (['rgi', 'histalp_storage', 'comit_storage_noseed', 'efn85', 'meta'], {}), '(rgi, histalp_storage, comit_storage_noseed, efn85, meta)\n', (15788, 15845), False, 'from relic.postprocessing import mae_weighted, optimize_cov, calc_coverage, get_ensemble_length, get_rcp_ensemble_length\n'), ((17046, 17063), 'relic.preprocessing.name_plus_id', 'name_plus_id', (['rgi'], {}), '(rgi)\n', (17058, 17063), False, 'from relic.preprocessing import name_plus_id, GLCDICT, MERGEDICT\n'), ((17638, 17679), 'os.path.join', 'os.path.join', (['pout', "('commit_%s.png' % rgi)"], {}), "(pout, 'commit_%s.png' % rgi)\n", (17650, 17679), False, 'import os\n'), ((18735, 18767), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)'], {'figsize': '[20, 7]'}), '(1, figsize=[20, 7])\n', (18747, 18767), True, 'import matplotlib.pyplot as plt\n'), ((20598, 20666), 'relic.postprocessing.get_ensemble_length', 'get_ensemble_length', (['rgi', 'histalp_storage', 'comit_storage', 'fn99', 'meta'], {}), '(rgi, histalp_storage, comit_storage, fn99, meta)\n', (20617, 20666), False, 'from relic.postprocessing import mae_weighted, optimize_cov, calc_coverage, get_ensemble_length, get_rcp_ensemble_length\n'), ((21357, 21425), 'relic.postprocessing.get_ensemble_length', 'get_ensemble_length', (['rgi', 'histalp_storage', 'comit_storage', 'fn70', 'meta'], {}), '(rgi, histalp_storage, comit_storage, fn70, meta)\n', (21376, 21425), False, 'from relic.postprocessing import mae_weighted, optimize_cov, calc_coverage, get_ensemble_length, get_rcp_ensemble_length\n'), ((22107, 22175), 'relic.postprocessing.get_ensemble_length', 'get_ensemble_length', (['rgi', 'histalp_storage', 'comit_storage', 'fn85', 'meta'], {}), '(rgi, histalp_storage, comit_storage, fn85, meta)\n', (22126, 22175), False, 'from relic.postprocessing import mae_weighted, optimize_cov, calc_coverage, get_ensemble_length, get_rcp_ensemble_length\n'), ((23116, 23133), 'relic.preprocessing.name_plus_id', 'name_plus_id', (['rgi'], {}), '(rgi)\n', (23128, 23133), False, 'from relic.preprocessing import name_plus_id, GLCDICT, MERGEDICT\n'), ((23578, 23617), 'os.path.join', 'os.path.join', (['pout', "('proj_%s.png' % rgi)"], {}), "(pout, 'proj_%s.png' % rgi)\n", (23590, 23617), False, 'import os\n'), ((23939, 23955), 'pandas.DataFrame', 'pd.DataFrame', (['[]'], {}), '([])\n', (23951, 23955), True, 'import pandas as pd\n'), ((23967, 23983), 'pandas.DataFrame', 'pd.DataFrame', (['[]'], {}), '([])\n', (23979, 23983), True, 'import pandas as pd\n'), ((24717, 24733), 'oggm.cfg.initialize', 'cfg.initialize', ([], {}), '()\n', (24731, 24733), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((24743, 24771), 'oggm.utils.gettempdir', 'utils.gettempdir', ([], {'reset': '(True)'}), '(reset=True)\n', (24759, 24771), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((24810, 24837), 'oggm.utils.mkdir', 'utils.mkdir', (['wd'], {'reset': '(True)'}), '(wd, reset=True)\n', (24821, 24837), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((25112, 25189), 'os.path.join', 'os.path.join', (["cfg.PATHS['working_dir']", '"""per_glacier"""', 'rgi[:8]', 'rgi[:11]', 'rgi'], {}), "(cfg.PATHS['working_dir'], 'per_glacier', rgi[:8], rgi[:11], rgi)\n", (25124, 25189), False, 'import os\n'), ((25221, 25258), 'shutil.copytree', 'shutil.copytree', (['storage_dir', 'new_dir'], {}), '(storage_dir, new_dir)\n', (25236, 25258), False, 'import shutil\n'), ((25270, 25291), 'oggm.GlacierDirectory', 'GlacierDirectory', (['rgi'], {}), '(rgi)\n', (25286, 25291), False, 'from oggm import cfg, utils, GlacierDirectory\n'), ((25301, 25392), 'oggm.core.massbalance.MultipleFlowlineMassBalance', 'MultipleFlowlineMassBalance', (['gdir'], {'filename': '"""climate_monthly"""', 'check_calib_params': '(False)'}), "(gdir, filename='climate_monthly',\n check_calib_params=False)\n", (25328, 25392), False, 'from oggm.core.massbalance import MultipleFlowlineMassBalance\n'), ((25594, 25608), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (25606, 25608), True, 'import pandas as pd\n'), ((25622, 25636), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (25634, 25636), True, 'import pandas as pd\n'), ((25650, 25666), 'numpy.arange', 'np.arange', (['(9)', '(12)'], {}), '(9, 12)\n', (25659, 25666), True, 'import numpy as np\n'), ((26439, 26455), 'oggm.cfg.initialize', 'cfg.initialize', ([], {}), '()\n', (26453, 26455), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((26465, 26493), 'oggm.utils.gettempdir', 'utils.gettempdir', ([], {'reset': '(True)'}), '(reset=True)\n', (26481, 26493), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((26532, 26559), 'oggm.utils.mkdir', 'utils.mkdir', (['wd'], {'reset': '(True)'}), '(wd, reset=True)\n', (26543, 26559), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((26959, 26991), 'oggm.tasks.process_histalp_data', 'tasks.process_histalp_data', (['gdir'], {}), '(gdir)\n', (26985, 26991), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((27114, 27161), 'oggm.core.massbalance.PastMassBalance', 'PastMassBalance', (['gdir'], {'check_calib_params': '(False)'}), '(gdir, check_calib_params=False)\n', (27129, 27161), False, 'from oggm.core.massbalance import PastMassBalance\n'), ((27172, 27186), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (27184, 27186), True, 'import pandas as pd\n'), ((27197, 27211), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (27209, 27211), True, 'import pandas as pd\n'), ((27226, 27247), 'numpy.arange', 'np.arange', (['(1870)', '(2015)'], {}), '(1870, 2015)\n', (27235, 27247), True, 'import numpy as np\n'), ((28292, 28308), 'oggm.core.flowline.FileModel', 'FileModel', (['mfile'], {}), '(mfile)\n', (28301, 28308), False, 'from oggm.core.flowline import FileModel\n'), ((28550, 28567), 'relic.preprocessing.name_plus_id', 'name_plus_id', (['rgi'], {}), '(rgi)\n', (28562, 28567), False, 'from relic.preprocessing import name_plus_id, GLCDICT, MERGEDICT\n'), ((28582, 28596), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (28594, 28596), True, 'import pandas as pd\n'), ((28610, 28624), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (28622, 28624), True, 'import pandas as pd\n'), ((28639, 28653), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (28651, 28653), True, 'import pandas as pd\n'), ((28666, 28680), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (28678, 28680), True, 'import pandas as pd\n'), ((28695, 28709), 'numpy.arange', 'np.arange', (['(999)'], {}), '(999)\n', (28704, 28709), True, 'import numpy as np\n'), ((29421, 29515), 'os.path.join', 'os.path.join', (['histalp_storage', 'rgi', '"""00"""', 'rgi[:8]', 'rgi[:11]', 'rgi', '"""model_run_spinup_00.nc"""'], {}), "(histalp_storage, rgi, '00', rgi[:8], rgi[:11], rgi,\n 'model_run_spinup_00.nc')\n", (29433, 29515), False, 'import os\n'), ((29549, 29563), 'oggm.core.flowline.FileModel', 'FileModel', (['fn2'], {}), '(fn2)\n', (29558, 29563), False, 'from oggm.core.flowline import FileModel\n'), ((30924, 30956), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)'], {'figsize': '[15, 9]'}), '(1, figsize=[15, 9])\n', (30936, 30956), True, 'import matplotlib.pyplot as plt\n'), ((32410, 32448), 'os.path.join', 'os.path.join', (['pout', "('profile_%s' % rgi)"], {}), "(pout, 'profile_%s' % rgi)\n", (32422, 32448), False, 'import os\n'), ((34616, 34656), 'oggm.cfg.initialize', 'cfg.initialize', ([], {'logging_level': '"""WORKFLOW"""'}), "(logging_level='WORKFLOW')\n", (34630, 34656), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((34688, 34740), 'oggm.utils.gettempdir', 'utils.gettempdir', ([], {'dirname': '"""OGGM-merging"""', 'reset': '(True)'}), "(dirname='OGGM-merging', reset=True)\n", (34704, 34740), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((35065, 35161), 'oggm.workflow.init_glacier_directories', 'workflow.init_glacier_directories', (["['RGI60-11.02709', 'RGI60-11.02715']"], {'from_prepro_level': '(3)'}), "(['RGI60-11.02709', 'RGI60-11.02715'],\n from_prepro_level=3)\n", (35098, 35161), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((35255, 35323), 'oggm.workflow.execute_entity_task', 'workflow.execute_entity_task', (['tasks.init_present_time_glacier', 'gdirs'], {}), '(tasks.init_present_time_glacier, gdirs)\n', (35283, 35323), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((35343, 35458), 'oggm.workflow.merge_glacier_tasks', 'workflow.merge_glacier_tasks', (['gdirs', '"""RGI60-11.02709"""'], {'return_all': '(False)', 'filename': '"""climate_monthly"""', 'buffer': '(2.5)'}), "(gdirs, 'RGI60-11.02709', return_all=False,\n filename='climate_monthly', buffer=2.5)\n", (35371, 35458), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((35645, 35681), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '[20, 10]'}), '(1, 2, figsize=[20, 10])\n', (35657, 35681), True, 'import matplotlib.pyplot as plt\n'), ((35686, 35740), 'oggm.graphics.plot_centerlines', 'plot_centerlines', (['montmine'], {'ax': 'ax1', 'use_flowlines': '(True)'}), '(montmine, ax=ax1, use_flowlines=True)\n', (35702, 35740), False, 'from oggm.graphics import plot_centerlines\n'), ((35912, 35976), 'oggm.graphics.plot_centerlines', 'plot_centerlines', (['gdirs_merged'], {'ax': 'ax2', 'use_model_flowlines': '(True)'}), '(gdirs_merged, ax=ax2, use_model_flowlines=True)\n', (35928, 35976), False, 'from oggm.graphics import plot_centerlines\n'), ((36420, 36461), 'os.path.join', 'os.path.join', (['pout', '"""merged_montmine.png"""'], {}), "(pout, 'merged_montmine.png')\n", (36432, 36461), False, 'import os\n'), ((36651, 36759), 'oggm.tasks.run_constant_climate', 'tasks.run_constant_climate', (['montmine'], {'nyears': 'years', 'output_filesuffix': '"""_entity"""', 'temperature_bias': 'tbias'}), "(montmine, nyears=years, output_filesuffix=\n '_entity', temperature_bias=tbias)\n", (36677, 36759), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((36833, 36903), 'oggm.utils.compile_run_output', 'utils.compile_run_output', (['[montmine]'], {'path': '(False)', 'filesuffix': '"""_entity"""'}), "([montmine], path=False, filesuffix='_entity')\n", (36857, 36903), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((37005, 37153), 'oggm.tasks.run_constant_climate', 'tasks.run_constant_climate', (['gdirs_merged'], {'nyears': 'years', 'output_filesuffix': '"""_merged"""', 'temperature_bias': 'tbias', 'climate_filename': '"""climate_monthly"""'}), "(gdirs_merged, nyears=years, output_filesuffix=\n '_merged', temperature_bias=tbias, climate_filename='climate_monthly')\n", (37031, 37153), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((37258, 37332), 'oggm.utils.compile_run_output', 'utils.compile_run_output', (['[gdirs_merged]'], {'path': '(False)', 'filesuffix': '"""_merged"""'}), "([gdirs_merged], path=False, filesuffix='_merged')\n", (37282, 37332), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((37455, 37564), 'oggm.tasks.run_constant_climate', 'tasks.run_constant_climate', (['montmine'], {'nyears': 'years', 'output_filesuffix': '"""_entity1"""', 'temperature_bias': 'tbias'}), "(montmine, nyears=years, output_filesuffix=\n '_entity1', temperature_bias=tbias)\n", (37481, 37564), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((37639, 37710), 'oggm.utils.compile_run_output', 'utils.compile_run_output', (['[montmine]'], {'path': '(False)', 'filesuffix': '"""_entity1"""'}), "([montmine], path=False, filesuffix='_entity1')\n", (37663, 37710), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((38009, 38147), 'oggm.tasks.run_constant_climate', 'tasks.run_constant_climate', (['montmine'], {'nyears': 'years', 'output_filesuffix': '"""_entity2"""', 'init_model_fls': 'tmp_mine.fls', 'temperature_bias': 'tbias'}), "(montmine, nyears=years, output_filesuffix=\n '_entity2', init_model_fls=tmp_mine.fls, temperature_bias=tbias)\n", (38035, 38147), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((38253, 38324), 'oggm.utils.compile_run_output', 'utils.compile_run_output', (['[montmine]'], {'path': '(False)', 'filesuffix': '"""_entity2"""'}), "([montmine], path=False, filesuffix='_entity2')\n", (38277, 38324), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((38558, 38742), 'oggm.tasks.run_constant_climate', 'tasks.run_constant_climate', (['gdirs_merged'], {'nyears': 'years', 'output_filesuffix': '"""_merged2"""', 'init_model_fls': 'tmp_merged.fls', 'temperature_bias': 'tbias', 'climate_filename': '"""climate_monthly"""'}), "(gdirs_merged, nyears=years, output_filesuffix=\n '_merged2', init_model_fls=tmp_merged.fls, temperature_bias=tbias,\n climate_filename='climate_monthly')\n", (38584, 38742), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((38875, 38950), 'oggm.utils.compile_run_output', 'utils.compile_run_output', (['[gdirs_merged]'], {'path': '(False)', 'filesuffix': '"""_merged2"""'}), "([gdirs_merged], path=False, filesuffix='_merged2')\n", (38899, 38950), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((39016, 39051), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '[20, 7]'}), '(1, 2, figsize=[20, 7])\n', (39028, 39051), True, 'import matplotlib.pyplot as plt\n'), ((40299, 40351), 'os.path.join', 'os.path.join', (['pout', '"""merged_montmine_timeseries.png"""'], {}), "(pout, 'merged_montmine_timeseries.png')\n", (40311, 40351), False, 'import os\n'), ((40452, 40488), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(2)'], {'figsize': '[20, 15]'}), '(2, 2, figsize=[20, 15])\n', (40464, 40488), True, 'import matplotlib.pyplot as plt\n'), ((43948, 43964), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (43959, 43964), False, 'from collections import defaultdict\n'), ((43978, 43994), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (43989, 43994), False, 'from collections import defaultdict\n'), ((44008, 44024), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (44019, 44024), False, 'from collections import defaultdict\n'), ((44041, 44055), 'relic.preprocessing.GLCDICT.keys', 'GLCDICT.keys', ([], {}), '()\n', (44053, 44055), False, 'from relic.preprocessing import name_plus_id, GLCDICT, MERGEDICT\n'), ((44534, 44569), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(3)'], {'figsize': '[20, 7]'}), '(1, 3, figsize=[20, 7])\n', (44546, 44569), True, 'import matplotlib.pyplot as plt\n'), ((1049, 1149), 'numpy.array', 'np.array', (['[-1400, -1200, -1000, -800, -600, -400, -200, -100, 0, 100, 200, 400, 600, \n 800, 1000]'], {}), '([-1400, -1200, -1000, -800, -600, -400, -200, -100, 0, 100, 200, \n 400, 600, 800, 1000])\n', (1057, 1149), True, 'import numpy as np\n'), ((1219, 1244), 'numpy.arange', 'np.arange', (['(0.5)', '(4.1)', '(0.25)'], {}), '(0.5, 4.1, 0.25)\n', (1228, 1244), True, 'import numpy as np\n'), ((1277, 1299), 'numpy.arange', 'np.arange', (['(1)', '(4.1)', '(0.5)'], {}), '(1, 4.1, 0.5)\n', (1286, 1299), True, 'import numpy as np\n'), ((1581, 1635), 'pandas.DataFrame', 'pd.DataFrame', (['[]'], {'columns': 'varcols[var]', 'index': 'df.index'}), '([], columns=varcols[var], index=df.index)\n', (1593, 1635), True, 'import pandas as pd\n'), ((3748, 3792), 'matplotlib.cm.ScalarMappable', 'cm.ScalarMappable', ([], {'norm': 'normalize', 'cmap': 'cmap'}), '(norm=normalize, cmap=cmap)\n', (3765, 3792), True, 'import matplotlib.cm as cm\n'), ((3810, 3857), 'mpl_toolkits.axes_grid1.inset_locator.inset_axes', 'inset_axes', (['ax'], {'width': '"""3%"""', 'height': '"""40%"""', 'loc': '(3)'}), "(ax, width='3%', height='40%', loc=3)\n", (3820, 3857), False, 'from mpl_toolkits.axes_grid1.inset_locator import inset_axes\n'), ((3873, 3943), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['scalarmappaple'], {'cax': 'cbaxes', 'label': 'lbl', 'boundaries': 'bounds'}), '(scalarmappaple, cax=cbaxes, label=lbl, boundaries=bounds)\n', (3885, 3943), True, 'import matplotlib.pyplot as plt\n'), ((5863, 5890), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '[20, 7]'}), '(figsize=[20, 7])\n', (5873, 5890), True, 'import matplotlib.pyplot as plt\n'), ((5905, 5919), 'matplotlib.gridspec.GridSpec', 'GridSpec', (['(1)', '(4)'], {}), '(1, 4)\n', (5913, 5919), False, 'from matplotlib.gridspec import GridSpec\n'), ((6764, 6839), 'relic.postprocessing.optimize_cov', 'optimize_cov', (['df.loc[:, maes.index[:150]]', "df.loc[:, 'obs']", 'glid'], {'minuse': '(5)'}), "(df.loc[:, maes.index[:150]], df.loc[:, 'obs'], glid, minuse=5)\n", (6776, 6839), False, 'from relic.postprocessing import mae_weighted, optimize_cov, calc_coverage, get_ensemble_length, get_rcp_ensemble_length\n'), ((7173, 7211), 'relic.postprocessing.calc_coverage', 'calc_coverage', (['df', 'idx2plot', "df['obs']"], {}), "(df, idx2plot, df['obs'])\n", (7186, 7211), False, 'from relic.postprocessing import mae_weighted, optimize_cov, calc_coverage, get_ensemble_length, get_rcp_ensemble_length\n'), ((8019, 8039), 'relic.preprocessing.name_plus_id', 'name_plus_id', (['rgi_id'], {}), '(rgi_id)\n', (8031, 8039), False, 'from relic.preprocessing import name_plus_id, GLCDICT, MERGEDICT\n'), ((10468, 10511), 'os.path.join', 'os.path.join', (['pout', "('histalp_%s.png' % glid)"], {}), "(pout, 'histalp_%s.png' % glid)\n", (10480, 10511), False, 'import os\n'), ((18390, 18460), 'relic.postprocessing.get_rcp_ensemble_length', 'get_rcp_ensemble_length', (['rgi', 'histalp_storage', 'proj_storage', 'rcp', 'meta'], {}), '(rgi, histalp_storage, proj_storage, rcp, meta)\n', (18413, 18460), False, 'from relic.postprocessing import mae_weighted, optimize_cov, calc_coverage, get_ensemble_length, get_rcp_ensemble_length\n'), ((24282, 24297), 'oggm.core.flowline.FileModel', 'FileModel', (['fnc1'], {}), '(fnc1)\n', (24291, 24297), False, 'from oggm.core.flowline import FileModel\n'), ((24316, 24331), 'oggm.core.flowline.FileModel', 'FileModel', (['fnc2'], {}), '(fnc2)\n', (24325, 24331), False, 'from oggm.core.flowline import FileModel\n'), ((24349, 24368), 'numpy.arange', 'np.arange', (['(270)', '(301)'], {}), '(270, 301)\n', (24358, 24368), True, 'import numpy as np\n'), ((25685, 25706), 'numpy.arange', 'np.arange', (['(1870)', '(1901)'], {}), '(1870, 1901)\n', (25694, 25706), True, 'import numpy as np\n'), ((25968, 25989), 'numpy.arange', 'np.arange', (['(1984)', '(2015)'], {}), '(1984, 2015)\n', (25977, 25989), True, 'import numpy as np\n'), ((27050, 27068), 'oggm.utils.ncDataset', 'utils.ncDataset', (['f'], {}), '(f)\n', (27065, 27068), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((27266, 27282), 'numpy.arange', 'np.arange', (['(9)', '(12)'], {}), '(9, 12)\n', (27275, 27282), True, 'import numpy as np\n'), ((30788, 30809), 'numpy.isfinite', 'np.isfinite', (['mean1850'], {}), '(mean1850)\n', (30799, 30809), True, 'import numpy as np\n'), ((30822, 30841), 'numpy.nanmax', 'np.nanmax', (['mean1850'], {}), '(mean1850)\n', (30831, 30841), True, 'import numpy as np\n'), ((32784, 32813), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '[20, 7]'}), '(figsize=[20, 7])\n', (32796, 32813), True, 'import matplotlib.pyplot as plt\n'), ((33913, 33933), 'relic.preprocessing.name_plus_id', 'name_plus_id', (['rgi_id'], {}), '(rgi_id)\n', (33925, 33933), False, 'from relic.preprocessing import name_plus_id, GLCDICT, MERGEDICT\n'), ((34492, 34531), 'os.path.join', 'os.path.join', (['pout', "('all_%s.png' % glid)"], {}), "(pout, 'all_%s.png' % glid)\n", (34504, 34531), False, 'import os\n'), ((34925, 34999), 'oggm.workflow.init_glacier_directories', 'workflow.init_glacier_directories', (["['RGI60-11.02709']"], {'from_prepro_level': '(3)'}), "(['RGI60-11.02709'], from_prepro_level=3)\n", (34958, 34999), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((43863, 43906), 'os.path.join', 'os.path.join', (['pout', '"""climate_vs_length.png"""'], {}), "(pout, 'climate_vs_length.png')\n", (43875, 43906), False, 'import os\n'), ((44092, 44110), 'relic.preprocessing.MERGEDICT.get', 'MERGEDICT.get', (['glc'], {}), '(glc)\n', (44105, 44110), False, 'from relic.preprocessing import name_plus_id, GLCDICT, MERGEDICT\n'), ((44164, 44201), 'os.path.join', 'os.path.join', (['pin', "('runs_%s.p' % glid)"], {}), "(pin, 'runs_%s.p' % glid)\n", (44176, 44201), False, 'import os\n'), ((45312, 45343), 'os.path.join', 'os.path.join', (['pout', '"""histo.png"""'], {}), "(pout, 'histo.png')\n", (45324, 45343), False, 'import os\n'), ((1765, 1798), 'ast.literal_eval', 'ast.literal_eval', (["('{' + run + '}')"], {}), "('{' + run + '}')\n", (1781, 1798), False, 'import ast\n'), ((2299, 2334), 'matplotlib.colors.Normalize', 'mcolors.Normalize', ([], {'vmin': '(0)', 'vmax': '(4.5)'}), '(vmin=0, vmax=4.5)\n', (2316, 2334), True, 'import matplotlib.colors as mcolors\n'), ((2398, 2425), 'numpy.arange', 'np.arange', (['(0.375)', '(4.2)', '(0.25)'], {}), '(0.375, 4.2, 0.25)\n', (2407, 2425), True, 'import numpy as np\n'), ((2450, 2470), 'numpy.arange', 'np.arange', (['(1)', '(4.1)', '(1)'], {}), '(1, 4.1, 1)\n', (2459, 2470), True, 'import numpy as np\n'), ((5283, 5308), 'numpy.arange', 'np.arange', (['(1880)', '(2010)', '(40)'], {}), '(1880, 2010, 40)\n', (5292, 5308), True, 'import numpy as np\n'), ((6267, 6300), 'ast.literal_eval', 'ast.literal_eval', (["('{' + run + '}')"], {}), "('{' + run + '}')\n", (6283, 6300), False, 'import ast\n'), ((9153, 9186), 'ast.literal_eval', 'ast.literal_eval', (["('{' + run + '}')"], {}), "('{' + run + '}')\n", (9169, 9186), False, 'import ast\n'), ((10131, 10158), 'numpy.arange', 'np.arange', (['(-1400)', '(1100)', '(400)'], {}), '(-1400, 1100, 400)\n', (10140, 10158), True, 'import numpy as np\n'), ((18235, 18256), 'numpy.arange', 'np.arange', (['(1850)', '(2101)'], {}), '(1850, 2101)\n', (18244, 18256), True, 'import numpy as np\n'), ((18296, 18317), 'numpy.arange', 'np.arange', (['(1850)', '(2101)'], {}), '(1850, 2101)\n', (18305, 18317), True, 'import numpy as np\n'), ((25729, 25758), 'oggm.utils.date_to_floatyear', 'utils.date_to_floatyear', (['y', 'i'], {}), '(y, i)\n', (25752, 25758), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((27305, 27334), 'oggm.utils.date_to_floatyear', 'utils.date_to_floatyear', (['y', 'i'], {}), '(y, i)\n', (27328, 27334), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((27481, 27496), 'numpy.arange', 'np.arange', (['(3)', '(6)'], {}), '(3, 6)\n', (27490, 27496), True, 'import numpy as np\n'), ((29019, 29032), 'oggm.core.flowline.FileModel', 'FileModel', (['fn'], {}), '(fn)\n', (29028, 29032), False, 'from oggm.core.flowline import FileModel\n'), ((29897, 29929), 'numpy.where', 'np.where', (['(mean1850 - meanbed < 1)'], {}), '(mean1850 - meanbed < 1)\n', (29905, 29929), True, 'import numpy as np\n'), ((30306, 30338), 'numpy.where', 'np.where', (['(mean2003 - meanbed < 1)'], {}), '(mean2003 - meanbed < 1)\n', (30314, 30338), True, 'import numpy as np\n'), ((30619, 30650), 'numpy.where', 'np.where', (['(initsfc - meanbed < 1)'], {}), '(initsfc - meanbed < 1)\n', (30627, 30650), True, 'import numpy as np\n'), ((32943, 32976), 'ast.literal_eval', 'ast.literal_eval', (["('{' + run + '}')"], {}), "('{' + run + '}')\n", (32959, 32976), False, 'import ast\n'), ((44334, 44367), 'ast.literal_eval', 'ast.literal_eval', (["('{' + run + '}')"], {}), "('{' + run + '}')\n", (44350, 44367), False, 'import ast\n'), ((1817, 1875), 'numpy.isclose', 'np.isclose', (['para[notvars[0]]', 'papar[notvars[0]]'], {'atol': '(0.01)'}), '(para[notvars[0]], papar[notvars[0]], atol=0.01)\n', (1827, 1875), True, 'import numpy as np\n'), ((1930, 1988), 'numpy.isclose', 'np.isclose', (['para[notvars[1]]', 'papar[notvars[1]]'], {'atol': '(0.01)'}), '(para[notvars[1]], papar[notvars[1]], atol=0.01)\n', (1940, 1988), True, 'import numpy as np\n'), ((2217, 2256), 'cmocean.tools.get_dict', 'cmocean.tools.get_dict', (['cmocean.cm.deep'], {}), '(cmocean.cm.deep)\n', (2239, 2256), False, 'import cmocean\n'), ((2678, 2713), 'matplotlib.colors.Normalize', 'mcolors.Normalize', ([], {'vmin': '(0)', 'vmax': '(4.5)'}), '(vmin=0, vmax=4.5)\n', (2695, 2713), True, 'import matplotlib.colors as mcolors\n'), ((2777, 2802), 'numpy.arange', 'np.arange', (['(0.75)', '(4.3)', '(0.5)'], {}), '(0.75, 4.3, 0.5)\n', (2786, 2802), True, 'import numpy as np\n'), ((2827, 2847), 'numpy.arange', 'np.arange', (['(1)', '(4.1)', '(1)'], {}), '(1, 4.1, 1)\n', (2836, 2847), True, 'import numpy as np\n'), ((4465, 4502), 'numpy.where', 'np.where', (['(dfvar.columns == papar[var])'], {}), '(dfvar.columns == papar[var])\n', (4473, 4502), True, 'import numpy as np\n'), ((6713, 6729), 'relic.postprocessing.mae_weighted', 'mae_weighted', (['df'], {}), '(df)\n', (6725, 6729), False, 'from relic.postprocessing import mae_weighted, optimize_cov, calc_coverage, get_ensemble_length, get_rcp_ensemble_length\n'), ((8072, 8111), 'pandas.concat', 'pd.concat', (["[ensmean, df['obs']]"], {'axis': '(1)'}), "([ensmean, df['obs']], axis=1)\n", (8081, 8111), True, 'import pandas as pd\n'), ((9035, 9064), 'colorspace.sequential_hcl', 'sequential_hcl', (['"""Blue-Yellow"""'], {}), "('Blue-Yellow')\n", (9049, 9064), False, 'from colorspace import sequential_hcl\n'), ((10707, 10745), 'os.path.join', 'os.path.join', (['pout', "('runs_%s.p' % glid)"], {}), "(pout, 'runs_%s.p' % glid)\n", (10719, 10745), False, 'import os\n'), ((27523, 27552), 'oggm.utils.date_to_floatyear', 'utils.date_to_floatyear', (['y', 'i'], {}), '(y, i)\n', (27546, 27552), False, 'from oggm import cfg, utils, workflow, tasks\n'), ((2594, 2635), 'cmocean.tools.get_dict', 'cmocean.tools.get_dict', (['cmocean.cm.matter'], {}), '(cmocean.cm.matter)\n', (2616, 2635), False, 'import cmocean\n'), ((3121, 3187), 'matplotlib.colors.LinearSegmentedColormap.from_list', 'mcolors.LinearSegmentedColormap.from_list', (['"""mcm"""', 'cmaplist', 'cmap.N'], {}), "('mcm', cmaplist, cmap.N)\n", (3162, 3187), True, 'import matplotlib.colors as mcolors\n'), ((3273, 3328), 'numpy.array', 'np.array', (['[-1400, -1000, -600, -200, 0, 200, 600, 1000]'], {}), '([-1400, -1000, -600, -200, 0, 200, 600, 1000])\n', (3281, 3328), True, 'import numpy as np\n'), ((3384, 3490), 'numpy.array', 'np.array', (['[-1500, -1300, -1100, -900, -700, -500, -300, -150, -50, 50, 100, 300, 500,\n 700, 900, 1100]'], {}), '([-1500, -1300, -1100, -900, -700, -500, -300, -150, -50, 50, 100, \n 300, 500, 700, 900, 1100])\n', (3392, 3490), True, 'import numpy as np\n'), ((3541, 3581), 'matplotlib.colors.Normalize', 'mcolors.Normalize', ([], {'vmin': '(-1600)', 'vmax': '(1600)'}), '(vmin=-1600, vmax=1600)\n', (3558, 3581), True, 'import matplotlib.colors as mcolors\n'), ((6318, 6360), 'numpy.abs', 'np.abs', (["(para['prcp_scaling_factor'] - 1.75)"], {}), "(para['prcp_scaling_factor'] - 1.75)\n", (6324, 6360), True, 'import numpy as np\n'), ((32994, 33036), 'numpy.abs', 'np.abs', (["(para['prcp_scaling_factor'] - 1.75)"], {}), "(para['prcp_scaling_factor'] - 1.75)\n", (33000, 33036), True, 'import numpy as np\n'), ((2930, 2972), 'cmocean.tools.get_dict', 'cmocean.tools.get_dict', (['cmocean.cm.balance'], {}), '(cmocean.cm.balance)\n', (2952, 2972), False, 'import cmocean\n'), ((30874, 30895), 'numpy.isfinite', 'np.isfinite', (['mean1850'], {}), '(mean1850)\n', (30885, 30895), True, 'import numpy as np\n')]
|
import os
import sys
from typing import List, Set, Tuple
import unittest
sys.path.append(os.path.join('..', 'filmatyk'))
import containers
import database
import filmweb
class DatabaseDifference():
"""Represents a difference between two DBs.
Can be constructed using the "compute" @staticmethod, which can be used to
replace the __ne__ (!=) operator on the Database class. This way, comparing
(db1 != db2) returns an instance of this class, which:
* holds detailed information on difference, specifically two sets of IDs (one
for objects present in db1 and not in db2, other for vice-versa) and a list
of all differing Items,
* is bool-convertible, allowing its usage in if clauses,
* has a __repr__ so can be pretty printed.
Example usage:
db1:database.Database
db2:database.Database
diff = db1 != db2
print(diff)
"""
@staticmethod
def ne_to_eq(a, b):
"""Since overriding __ne__ by "compute" makes more sense than __eq__,
we invert != to obtain ==, not the other way around.
"""
return not (a != b)
@staticmethod
def compute(db1, db2):
"""Finds the difference between the two objects."""
# Work with IDs only
ids1 = set(item.getRawProperty('id') for item in db1)
ids2 = set(item.getRawProperty('id') for item in db2)
# Compute differences
common_ids = ids1.intersection(ids2)
only_in_1 = ids1.difference(common_ids)
only_in_2 = ids2.difference(common_ids)
# Extract Item instances for pretty printing
items_1 = [item for item in db1 if item.getRawProperty('id') in only_in_1]
items_2 = [item for item in db2 if item.getRawProperty('id') in only_in_2]
return DatabaseDifference(only_in_1, only_in_2, items_1+items_2)
def __init__(self, ids1:Set[int], ids2:Set[int], items:List[containers.Item]):
self.ids1 = ids1
self.ids2 = ids2
self.items = {item.getRawProperty('id'): item for item in items}
self.equal = len(self.ids1) == 0 and len(self.ids2) == 0
def __str__(self):
if self.equal:
return 'These databases are equal!'
else:
lines = []
if self.ids1:
lines.append('These {} IDs were present only in DB1:'.format(len(self.ids1)))
lines.extend('\t{} ({})'.format(i, self.items[i]['title']) for i in self.ids1)
if self.ids2:
lines.append('These {} IDs were present only in DB2:'.format(len(self.ids2)))
lines.extend('\t{} ({})'.format(i, self.items[i]['title']) for i in self.ids2)
return '\n'.join(lines)
def __repr__(self):
print(self)
def __bool__(self):
return not self.equal
class FakeAPI(filmweb.FilmwebAPI):
"""Loads cached data instead of connecting online.
When initializing, will look for HTML files in the given directory and treat
them as "pages" to load data from, later when emulating "getItemsPage".
"""
def __init__(self, src_path:str='', itemtype:str='Movie'):
super(FakeAPI, self).__init__(None)
self.src_path = src_path
self.page_paths = self.initPages()
self.item_count, self.items_per_page = self.initAnalyze(itemtype)
def initPages(self):
"""Finds HTML files with movie ratings cached by the API tests."""
if not os.path.exists(self.src_path):
return []
pages = [
item.path for item in os.scandir(self.src_path)
if item.name.endswith('.html') and item.name.startswith('movies_')
]
return pages
def initAnalyze(self, itemtype:str):
"""Checks how many items are in the stored files, and how many per page."""
counts = []
for path in self.page_paths:
page = self.fetchPage(path)
items = self.parsePage(page, itemtype)
counts.append(len(items))
# Return in the same format as getNumOf.
# The first page will either have exactly as many items as any other page,
# or will contain all items - in either case its length being the count of
# items per page.
return sum(counts), counts[0]
def checkSession(self):
"""First part of the hack - don't bother with the session at all."""
return True
def fetchPage(self, path:str):
"""Load HTML from file instead of URL."""
with open(path, 'r', encoding='utf-8') as html:
page = filmweb.BS(html.read(), features='lxml')
return page
def getItemsPage(self, itemtype:str, page:int=1):
"""Hack to use cached HTMLs instead of online session."""
path = self.page_paths[page - 1]
#path = os.path.join(self.src_path, 'movies_{}.html'.format(page))
page = self.fetchPage(path)
items = self.parsePage(page, itemtype)
return items
def getNumOf(self, itemtype:str):
"""Simply return the values we have computed earlier (initAnalyze)."""
return self.item_count, self.items_per_page
class UpdateScenario():
"""Database modification scenario to obtain a simulated previous state.
Contains:
* a list of Item indices to remove from the Database - a new Database created
via this removal will look like these items were yet to be added,
* a list of tuples of Item indices and IDs to add to the Database - simulates
removal of items in the same manner.
"""
def __init__(self, removals:List[int]=[], additions:List[Tuple[int,int]]=[]):
self.removals = removals
self.additions = additions
class TestDatabaseCreation(unittest.TestCase):
"""Basic test for Database loading data from scratch using the API."""
@classmethod
def setUpClass(self):
self.api = FakeAPI('assets')
def test_creation(self):
"""Create a new Database and fill it with items using (Fake)API.
Basically checks whether a new instance has as many items in it as the API
says there are available, and whether these items are actually instances of
the Item class.
"""
db = database.Database(
itemtype='Movie',
api=self.api,
callback=lambda x: x,
)
db.hardUpdate()
known_count, _ = self.api.getNumOf('Movie')
self.assertEqual(len(db.items), known_count)
self.assertIsInstance(db.items[0], containers.Item)
class TestDatabaseSerialization(unittest.TestCase):
"""Test Database serialization and deserialization.
The only test in this case validates the whole serialization-deserialization
cycle, so if anything goes wrong, it will be hard to say which functionality
is actually broken.
"""
@classmethod
def setUpClass(self):
self.api = FakeAPI('assets')
def test_serialization(self):
"""Serialize and deserialize a Database, check if they look the same."""
original = database.Database(
itemtype='Movie',
api=self.api,
callback=lambda x: x,
)
# Load some initial data
original.hardUpdate()
# Serialize/deserialize cycle
string = original.storeToString()
restored = database.Database.restoreFromString(
itemtype='Movie',
string=string,
api=self.api,
callback=lambda x: x,
)
self.assertEqual(original, restored)
class TestDatabaseUpdates(unittest.TestCase):
"""Test Database updates capability in different initial conditions.
Each test consists of the following 3 steps:
* load an original Database,
* perform some change to its content, simulating some earlier point in time
(e.g. where some Items were not yet present),
* call a soft update.
The desired result is a Database back in the original state. Any differences
are considered failures.
The update itself is performed via a proxy, which loads data cached from
earlier tests instead of requiring a live and authenticated session.
"""
@classmethod
def setUpClass(self):
self.api = FakeAPI('assets')
# Create the original database
self.orig_db = database.Database(
itemtype='Movie', api=self.api, callback=lambda x: x
)
# Fill it with available cached data
for i in range(len(self.api.page_paths)):
self.orig_db.items += self.api.getItemsPage('Movie', page=i+1)
@classmethod
def makeModifiedDatabase(self, scenario:UpdateScenario):
"""Creates a new DB by modifying the copy according to the scenario."""
# Create a bare new instance
new_db = database.Database(
itemtype=self.orig_db.itemtype,
api=self.orig_db.api,
callback=self.orig_db.callback,
)
# Remove items according to the scenario
new_db.items = [
item for i, item in enumerate(self.orig_db.items)
if i not in scenario.removals
]
# Add new items according to the scenario
# The items are all clones of the last available item, with changed ID
template = new_db.items[-1].asDict()
template.pop('id') # that will be replaced
item_cls = containers.classByString[new_db.itemtype]
# Create items and insert on their respective places
for index, item_id in scenario.additions:
new_item = item_cls(id=item_id, **template)
new_db.items.insert(index, new_item)
return new_db
def __test_body(self, scenario):
"""Since they all look the same..."""
alter_db = self.makeModifiedDatabase(scenario)
# Make sure the databases are actually different!
self.assertNotEqual(alter_db, self.orig_db)
# Call update and check difference
alter_db.softUpdate()
self.assertEqual(alter_db, self.orig_db)
# Addition tests
def test_singleAddition(self):
"""Add a single missing item."""
scenario = UpdateScenario(removals=[0])
self.__test_body(scenario)
def test_simpleAddition(self):
"""Add a few items missing from the first page."""
scenario = UpdateScenario(removals=[0, 1, 2])
self.__test_body(scenario)
def test_massiveAddition(self):
"""Add over one full page of new items."""
scenario = UpdateScenario(removals=list(range(37)))
self.__test_body(scenario)
def test_randomAddition(self):
"""Add an item missing from somewhere on the first page."""
scenario = UpdateScenario(removals=[4])
self.__test_body(scenario)
def test_nonContinuousAddition(self):
"""Add a few items non-continuously missing from the first page."""
scenario = UpdateScenario(removals=[0, 1, 2, 3, 6])
self.__test_body(scenario)
def test_multipageAddition(self):
"""Add a few items non-continuously missing from multiple pages."""
scenario = UpdateScenario(removals=[0, 1, 2, 16, 30, 32])
self.__test_body(scenario)
# Removal tests - are all expected to fail at this moment.
def test_singleRemoval(self):
"""Remove a single item from the first page."""
scenario = UpdateScenario(additions=[(0, 666)])
self.__test_body(scenario)
def test_simpleRemoval(self):
"""Remove a few items from the first page."""
scenario = UpdateScenario(additions=[(0, 666), (1, 4270)])
self.__test_body(scenario)
def test_randomRemoval(self):
"""Remove an item from somewhere on the first page."""
scenario = UpdateScenario(additions=[(4, 420)])
self.__test_body(scenario)
def test_nonContinuousRemoval(self):
"""Remove a few items non-continuously from the first page."""
scenario = UpdateScenario(
additions=[(0, 666), (1, 4270), (2, 2137), (5, 61504)]
)
self.__test_body(scenario)
def test_multipageRemoval(self):
"""Remove a few items non-continuously from multiple pages."""
scenario = UpdateScenario(
additions=[(3, 666), (4, 4270), (15, 2137), (35, 61504)]
)
self.__test_body(scenario)
# Other tests - for future features.
def test_additionRemoval(self):
"""Add and remove a few items at once, but only from the first page."""
scenario = UpdateScenario(
removals=[0, 1, 2, 9, 13],
additions=[(3, 1991), (4, 37132)]
)
self.__test_body(scenario)
def test_complexAdditionRemoval(self):
"""Add and remove a few items at once from multiple pages."""
scenario = UpdateScenario(
removals=[0, 1, 2, 9, 23, 35, 36],
additions=[(3, 1991), (4, 37132), (28, 628)]
)
self.__test_body(scenario)
@unittest.skip('Relevant feature not implemented yet.')
def test_difficultAdditionRemoval(self):
"""Add and remove a few items at once from multiple pages WITH BALANCE.
This test is extremely difficult because it is impossible to recognize such
scenario in real usage (online), by looking at getNumOf alone. That number
only shows the total balance of added/removed items. If that balance evens
out on any page further than 1st (like in the case of removing some items
and adding the same number of items), it is impossible to spot to any fast
and simple algorithm (i.e. one that does not deeply inspect all pages).
"""
scenario = UpdateScenario(
removals=[0, 1, 2, 9, 33],
additions=[(3, 1991), (34, 37132)]
)
self.__test_body(scenario)
def test_hardUpdate(self):
"""Make "random" removals and additions, then hard update."""
scenario = UpdateScenario(
removals=[1, 5, 6, 7, 40],
additions=[(0, 666), (13, 667)]
)
alter_db = self.makeModifiedDatabase(scenario)
self.assertNotEqual(alter_db, self.orig_db)
alter_db.hardUpdate()
self.assertEqual(alter_db, self.orig_db)
if __name__ == "__main__":
database.Database.__ne__ = DatabaseDifference.compute
database.Database.__eq__ = DatabaseDifference.ne_to_eq
unittest.main()
|
[
"unittest.main",
"os.path.exists",
"database.Database.restoreFromString",
"unittest.skip",
"database.Database",
"os.path.join",
"os.scandir"
] |
[((90, 120), 'os.path.join', 'os.path.join', (['""".."""', '"""filmatyk"""'], {}), "('..', 'filmatyk')\n", (102, 120), False, 'import os\n'), ((11925, 11979), 'unittest.skip', 'unittest.skip', (['"""Relevant feature not implemented yet."""'], {}), "('Relevant feature not implemented yet.')\n", (11938, 11979), False, 'import unittest\n'), ((13239, 13254), 'unittest.main', 'unittest.main', ([], {}), '()\n', (13252, 13254), False, 'import unittest\n'), ((5764, 5835), 'database.Database', 'database.Database', ([], {'itemtype': '"""Movie"""', 'api': 'self.api', 'callback': '(lambda x: x)'}), "(itemtype='Movie', api=self.api, callback=lambda x: x)\n", (5781, 5835), False, 'import database\n'), ((6526, 6597), 'database.Database', 'database.Database', ([], {'itemtype': '"""Movie"""', 'api': 'self.api', 'callback': '(lambda x: x)'}), "(itemtype='Movie', api=self.api, callback=lambda x: x)\n", (6543, 6597), False, 'import database\n'), ((6765, 6874), 'database.Database.restoreFromString', 'database.Database.restoreFromString', ([], {'itemtype': '"""Movie"""', 'string': 'string', 'api': 'self.api', 'callback': '(lambda x: x)'}), "(itemtype='Movie', string=string, api=\n self.api, callback=lambda x: x)\n", (6800, 6874), False, 'import database\n'), ((7677, 7748), 'database.Database', 'database.Database', ([], {'itemtype': '"""Movie"""', 'api': 'self.api', 'callback': '(lambda x: x)'}), "(itemtype='Movie', api=self.api, callback=lambda x: x)\n", (7694, 7748), False, 'import database\n'), ((8114, 8221), 'database.Database', 'database.Database', ([], {'itemtype': 'self.orig_db.itemtype', 'api': 'self.orig_db.api', 'callback': 'self.orig_db.callback'}), '(itemtype=self.orig_db.itemtype, api=self.orig_db.api,\n callback=self.orig_db.callback)\n', (8131, 8221), False, 'import database\n'), ((3203, 3232), 'os.path.exists', 'os.path.exists', (['self.src_path'], {}), '(self.src_path)\n', (3217, 3232), False, 'import os\n'), ((3292, 3317), 'os.scandir', 'os.scandir', (['self.src_path'], {}), '(self.src_path)\n', (3302, 3317), False, 'import os\n')]
|
from django.contrib import admin
from .models import Company, DPEF, Sentence, ActivitySector
admin.site.register(Company)
admin.site.register(DPEF)
admin.site.register(Sentence)
admin.site.register(ActivitySector)
|
[
"django.contrib.admin.site.register"
] |
[((95, 123), 'django.contrib.admin.site.register', 'admin.site.register', (['Company'], {}), '(Company)\n', (114, 123), False, 'from django.contrib import admin\n'), ((124, 149), 'django.contrib.admin.site.register', 'admin.site.register', (['DPEF'], {}), '(DPEF)\n', (143, 149), False, 'from django.contrib import admin\n'), ((150, 179), 'django.contrib.admin.site.register', 'admin.site.register', (['Sentence'], {}), '(Sentence)\n', (169, 179), False, 'from django.contrib import admin\n'), ((180, 215), 'django.contrib.admin.site.register', 'admin.site.register', (['ActivitySector'], {}), '(ActivitySector)\n', (199, 215), False, 'from django.contrib import admin\n')]
|
"""tests"""
import pytest
from shapely.geometry import Point, Polygon, LineString
from pyiem import wellknowntext
def test_parsecoordinate_lists():
"""Parse!"""
with pytest.raises(ValueError):
wellknowntext.parse_coordinate_lists(" ")
def test_unknown():
"""Test an emptry string."""
with pytest.raises(ValueError):
wellknowntext.convert_well_known_text("")
def test_wkt():
"""Try the properties function"""
wkt = "SRID=4326;POINT(-99 43)"
geom = wellknowntext.convert_well_known_text(wkt)
assert Point(geom) == Point([-99, 43])
wkt = """MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),
((20 35, 10 30, 10 10, 30 5, 45 20, 20 35),
(30 20, 20 15, 20 25, 30 20)))"""
geom = wellknowntext.convert_well_known_text(wkt)
assert abs(Polygon(geom[0]).area - 87.5) < 0.1
wkt = """MULTILINESTRING ((10 10, 20 20, 10 40),
(40 40, 30 30, 40 20, 30 10))"""
geom = wellknowntext.convert_well_known_text(wkt)
assert abs(LineString(geom[0]).length - 36.5) < 0.1
wkt = """LINESTRING (30 10, 10 30, 40 40)"""
geom = wellknowntext.convert_well_known_text(wkt)
assert abs(LineString(geom).length - 59.9) < 0.1
wkt = """POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))"""
geom = wellknowntext.convert_well_known_text(wkt)
assert abs(Polygon(geom[0]).area - 550.0) < 0.1
wkt = """POLYGON q((30 10, 40 40, 20 40, 10 20, 30 10))q"""
with pytest.raises(ValueError):
wellknowntext.convert_well_known_text(wkt)
wkt = """RARRR q((30 10, 40 40, 20 40, 10 20, 30 10))q"""
with pytest.raises(ValueError):
wellknowntext.convert_well_known_text(wkt)
with pytest.raises(ValueError):
wellknowntext.convert_well_known_text("")
|
[
"pyiem.wellknowntext.convert_well_known_text",
"shapely.geometry.Point",
"shapely.geometry.Polygon",
"pyiem.wellknowntext.parse_coordinate_lists",
"shapely.geometry.LineString",
"pytest.raises"
] |
[((499, 541), 'pyiem.wellknowntext.convert_well_known_text', 'wellknowntext.convert_well_known_text', (['wkt'], {}), '(wkt)\n', (536, 541), False, 'from pyiem import wellknowntext\n'), ((750, 792), 'pyiem.wellknowntext.convert_well_known_text', 'wellknowntext.convert_well_known_text', (['wkt'], {}), '(wkt)\n', (787, 792), False, 'from pyiem import wellknowntext\n'), ((946, 988), 'pyiem.wellknowntext.convert_well_known_text', 'wellknowntext.convert_well_known_text', (['wkt'], {}), '(wkt)\n', (983, 988), False, 'from pyiem import wellknowntext\n'), ((1106, 1148), 'pyiem.wellknowntext.convert_well_known_text', 'wellknowntext.convert_well_known_text', (['wkt'], {}), '(wkt)\n', (1143, 1148), False, 'from pyiem import wellknowntext\n'), ((1276, 1318), 'pyiem.wellknowntext.convert_well_known_text', 'wellknowntext.convert_well_known_text', (['wkt'], {}), '(wkt)\n', (1313, 1318), False, 'from pyiem import wellknowntext\n'), ((177, 202), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (190, 202), False, 'import pytest\n'), ((212, 254), 'pyiem.wellknowntext.parse_coordinate_lists', 'wellknowntext.parse_coordinate_lists', (['""" """'], {}), "(' ')\n", (248, 254), False, 'from pyiem import wellknowntext\n'), ((319, 344), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (332, 344), False, 'import pytest\n'), ((354, 395), 'pyiem.wellknowntext.convert_well_known_text', 'wellknowntext.convert_well_known_text', (['""""""'], {}), "('')\n", (391, 395), False, 'from pyiem import wellknowntext\n'), ((553, 564), 'shapely.geometry.Point', 'Point', (['geom'], {}), '(geom)\n', (558, 564), False, 'from shapely.geometry import Point, Polygon, LineString\n'), ((568, 584), 'shapely.geometry.Point', 'Point', (['[-99, 43]'], {}), '([-99, 43])\n', (573, 584), False, 'from shapely.geometry import Point, Polygon, LineString\n'), ((1445, 1470), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1458, 1470), False, 'import pytest\n'), ((1480, 1522), 'pyiem.wellknowntext.convert_well_known_text', 'wellknowntext.convert_well_known_text', (['wkt'], {}), '(wkt)\n', (1517, 1522), False, 'from pyiem import wellknowntext\n'), ((1595, 1620), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1608, 1620), False, 'import pytest\n'), ((1630, 1672), 'pyiem.wellknowntext.convert_well_known_text', 'wellknowntext.convert_well_known_text', (['wkt'], {}), '(wkt)\n', (1667, 1672), False, 'from pyiem import wellknowntext\n'), ((1683, 1708), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1696, 1708), False, 'import pytest\n'), ((1718, 1759), 'pyiem.wellknowntext.convert_well_known_text', 'wellknowntext.convert_well_known_text', (['""""""'], {}), "('')\n", (1755, 1759), False, 'from pyiem import wellknowntext\n'), ((808, 824), 'shapely.geometry.Polygon', 'Polygon', (['geom[0]'], {}), '(geom[0])\n', (815, 824), False, 'from shapely.geometry import Point, Polygon, LineString\n'), ((1004, 1023), 'shapely.geometry.LineString', 'LineString', (['geom[0]'], {}), '(geom[0])\n', (1014, 1023), False, 'from shapely.geometry import Point, Polygon, LineString\n'), ((1164, 1180), 'shapely.geometry.LineString', 'LineString', (['geom'], {}), '(geom)\n', (1174, 1180), False, 'from shapely.geometry import Point, Polygon, LineString\n'), ((1334, 1350), 'shapely.geometry.Polygon', 'Polygon', (['geom[0]'], {}), '(geom[0])\n', (1341, 1350), False, 'from shapely.geometry import Point, Polygon, LineString\n')]
|
from sklearn.preprocessing import OrdinalEncoder
from typing import List
import pandas as pd
import numpy as np
from ._base_transform import BaseTransform
##############################################################################
class CategoricalEncoder(BaseTransform):
""" Categorical encoder
Parameters
----------
columns: List [str]
Columns that encode
"""
def __init__(self, columns:List[str]):
super().__init__({'columns':columns})
self.encoder = {column: OrdinalEncoder(handle_unknown='use_encoded_value', unknown_value = np.nan) for column in columns}
def fit(self, X:pd.DataFrame, Y:pd.DataFrame or pd.Series):
for column in self.encoder.copy():
if column in X.columns:
X_fit = pd.DataFrame(X[column].loc[~X[column].isnull()])
if len(X_fit) > 0:
self.encoder[column].fit(X_fit)
else:
self.encoder[column] = False
return self
def transform(self, X:pd.DataFrame, Y:pd.DataFrame or pd.Series = None):
# print(X.columns)
for column in self.encoder:
if column in X.columns:
if self.encoder[column]:
X[column] = self.encoder[column].transform(pd.DataFrame(X[column].fillna('NAN')))
else:
del X[column]
# print(X.columns)
return X
|
[
"sklearn.preprocessing.OrdinalEncoder"
] |
[((521, 593), 'sklearn.preprocessing.OrdinalEncoder', 'OrdinalEncoder', ([], {'handle_unknown': '"""use_encoded_value"""', 'unknown_value': 'np.nan'}), "(handle_unknown='use_encoded_value', unknown_value=np.nan)\n", (535, 593), False, 'from sklearn.preprocessing import OrdinalEncoder\n')]
|
"""
Makes Puddleworld tasks.
Tasks are (gridworld, text instruction) -> goal coordinate.
Credit: tasks are taken from: https://github.com/JannerM/spatial-reasoning
"""
from puddleworldPrimitives import *
from utilities import *
from task import *
from type import *
OBJECT_NAMES = ["NULL", "puddle", "star", "circle", "triangle", "heart", "spade", "diamond", "rock", "tree", "house", "horse"]
def loadPuddleWorldTasks(datafile='data/puddleworld/puddleworld.json'):
"""
Loads a pre-processed version of the Puddleworld tasks.
"""
import json
with open(datafile) as f:
result = json.load(f)
return result
def makePuddleworldTask(raw_task):
"""
Converts a raw task with
layouts (NxN array),
Objects (NxN array of object locations),
Instructions (string) and
Goals ((X, Y) coordinate)
into a task.
"""
layouts, objects, instructions, goals = raw_task
task = Task(name=instructions,
request=(arrow(tpair(tLayoutMap, tObjectMap), tLocation)),
examples=[((layouts, objects), goals)],
features=instructions)
return task
def makeTasks(train_key, test_key):
data = loadPuddleWorldTasks()
raw_train, raw_test = data[train_key], data[test_key]
train, test = [makePuddleworldTask(task) for task in raw_train], [makePuddleworldTask(task) for task in raw_test]
print(train[0].name)
print(train[0].examples)
print(train[0].features)
return train, test
def makeLocalTasks():
return makeTasks('local_train', 'local_test')
def makeGlobalTasks():
return makeTasks('global_train', 'global_test')
|
[
"json.load"
] |
[((587, 599), 'json.load', 'json.load', (['f'], {}), '(f)\n', (596, 599), False, 'import json\n')]
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from . import common
def main(debug=False):
name = ['I', 'A', 'S', 'C']
suffix = ['', '', '', '']
df0 = []
for n, s in zip(name, suffix):
prec = pd.read_csv(f'results/logk_prec_{n}{s}.csv')
prec = prec.groupby(['v', 'x'])['log_err'].mean()
time = pd.read_csv(f'results/logk_time_{n}{s}.csv')
time = time.groupby(['v', 'x'])['time'].mean()
tmp = pd.concat([prec, time], axis=1)
tmp['time'] = np.where(tmp['log_err'] < 3, 1000 * tmp['time'], np.nan)
tmp = tmp['time']
tmp.name = n
df0.append(tmp)
df0 = pd.concat(df0, axis=1)
name = [['I', 'A'], ['S', 'C']]
pos = [[[0.1, 0.85], [0.85, 0.1]], [[0.1, 0.1], [0.1, 0.85]]]
fig = common.figure(figsize=(5.5, 4), box=debug)
ax = fig.subplots(
2, 3, sharex=True, sharey=True,
gridspec_kw=dict(width_ratios=(1,1,0.15)),
)
ax[0, 2].set_visible(False)
ax[1, 2].set_visible(False)
cbar = fig.add_axes([0.93, 0.1, 0.02, 0.85])
xticks = [0, 1, 5, 10, 50]
yticks = [0.1, 0.5, 1, 5, 10, 50]
cmap = plt.get_cmap('Greys').copy()
cmap.set_bad(color='gray')
for i in range(2):
for j in range(2):
hm = df0[name[i][j]].unstack(0)
if i == j == 0:
args = dict(cbar_ax=cbar)
else:
args = dict(cbar=False)
sns.heatmap(hm, vmin=0, vmax=28, cmap=cmap, ax=ax[i, j], **args)
ax[i, j].invert_yaxis()
ax[i, j].text(*pos[i][j], name[i][j], transform=ax[i, j].transAxes)
ax[i, j].set_xticks([40*np.log10(x+1) for x in xticks])
ax[i, j].set_xticklabels([f"${k}$" for k in xticks], rotation=0)
ax[i, j].xaxis.set_ticks_position('both')
ax[i, j].set_yticks([40*(np.log10(x)+1) for x in yticks])
ax[i, j].set_yticklabels([f"${k}$" for k in yticks])
ax[i, j].yaxis.set_ticks_position('both')
if i == 1:
ax[i, j].set_xlabel('$v$')
else:
ax[i, j].set_xlabel('')
if j == 0:
ax[i, j].set_ylabel('$x$')
else:
ax[i, j].set_ylabel('')
cbar = ax[0, 0].collections[0].colorbar
cbar.set_ticks([0, 10, 20])
cbar.set_ticklabels([f'${{{l}}}$' for l in [0, 10, 20]])
fig.savefig('figs/fig3.pdf')
if __name__ == '__main__':
main(debug=False)
|
[
"seaborn.heatmap",
"matplotlib.pyplot.get_cmap",
"pandas.read_csv",
"numpy.where",
"numpy.log10",
"pandas.concat"
] |
[((690, 712), 'pandas.concat', 'pd.concat', (['df0'], {'axis': '(1)'}), '(df0, axis=1)\n', (699, 712), True, 'import pandas as pd\n'), ((266, 310), 'pandas.read_csv', 'pd.read_csv', (['f"""results/logk_prec_{n}{s}.csv"""'], {}), "(f'results/logk_prec_{n}{s}.csv')\n", (277, 310), True, 'import pandas as pd\n'), ((384, 428), 'pandas.read_csv', 'pd.read_csv', (['f"""results/logk_time_{n}{s}.csv"""'], {}), "(f'results/logk_time_{n}{s}.csv')\n", (395, 428), True, 'import pandas as pd\n'), ((498, 529), 'pandas.concat', 'pd.concat', (['[prec, time]'], {'axis': '(1)'}), '([prec, time], axis=1)\n', (507, 529), True, 'import pandas as pd\n'), ((552, 608), 'numpy.where', 'np.where', (["(tmp['log_err'] < 3)", "(1000 * tmp['time'])", 'np.nan'], {}), "(tmp['log_err'] < 3, 1000 * tmp['time'], np.nan)\n", (560, 608), True, 'import numpy as np\n'), ((1184, 1205), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""Greys"""'], {}), "('Greys')\n", (1196, 1205), True, 'import matplotlib.pyplot as plt\n'), ((1478, 1542), 'seaborn.heatmap', 'sns.heatmap', (['hm'], {'vmin': '(0)', 'vmax': '(28)', 'cmap': 'cmap', 'ax': 'ax[i, j]'}), '(hm, vmin=0, vmax=28, cmap=cmap, ax=ax[i, j], **args)\n', (1489, 1542), True, 'import seaborn as sns\n'), ((1695, 1710), 'numpy.log10', 'np.log10', (['(x + 1)'], {}), '(x + 1)\n', (1703, 1710), True, 'import numpy as np\n'), ((1895, 1906), 'numpy.log10', 'np.log10', (['x'], {}), '(x)\n', (1903, 1906), True, 'import numpy as np\n')]
|
"""Tests for loading and saving pickled files."""
from pytype import file_utils
from pytype.tests import test_base
class PickleTest(test_base.TargetPython3BasicTest):
"""Tests for loading and saving pickled files."""
def testContainer(self):
pickled = self.Infer("""
import collections, json
def f() -> collections.OrderedDict[int, int]:
return collections.OrderedDict({1: 1})
def g() -> json.JSONDecoder:
return json.JSONDecoder()
""", pickle=True, module_name="foo")
with file_utils.Tempdir() as d:
u = d.create_file("u.pickled", pickled)
ty = self.Infer("""
import u
r = u.f()
""", deep=False, pythonpath=[""], imports_map={"u": u})
self.assertTypesMatchPytd(ty, """
import collections
u = ... # type: module
r = ... # type: collections.OrderedDict[int, int]
""")
test_base.main(globals(), __name__ == "__main__")
|
[
"pytype.file_utils.Tempdir"
] |
[((528, 548), 'pytype.file_utils.Tempdir', 'file_utils.Tempdir', ([], {}), '()\n', (546, 548), False, 'from pytype import file_utils\n')]
|
import argparse
from datetime import datetime
import torch
import torch.nn.functional as F
from torch.utils.tensorboard import SummaryWriter
import numpy as np
from torch_model import SizedGenerator
import os
from tqdm import trange
from torchvision.utils import save_image, make_grid
import params as P
from utils import save_img_tensorboard, load_trained_generator, load_target_image, psnr
def output_to_imshow(v):
return v.squeeze(0).detach().to('cpu').numpy().transpose(1, 2, 0)
def main(args):
logdir = f'tensorboard_logs/search/{args.run_name}'
os.makedirs(logdir,
exist_ok=True) # TODO - decide whether to clobber or what?
writer = SummaryWriter(logdir)
device = 'cuda:0'
x = load_target_image(args.image).to(device)
save_img_tensorboard(x.squeeze(0).detach().cpu(), writer, f'original')
g = load_trained_generator(SizedGenerator,
args.generator_checkpoint,
latent_dim=64,
num_filters=P.num_filters,
image_size=P.size,
num_ups=P.num_ups).to(device)
g.eval()
if args.latent_dim != g.latent_dim:
args.skip_linear_layer = True
else:
args.skip_linear_layer = False
if args.skip_linear_layer and args.latent_dim < 8192:
# Then we need a new linear layer to map to dimension 8192
linear_layer = torch.nn.Linear(args.latent_dim, 8192).to(device)
else:
linear_layer = lambda x: x
save_every_n = 50
for i in trange(args.n_restarts):
seed = i
torch.manual_seed(seed)
np.random.seed(seed)
# NOTE - based on quick experiments:
# - std=1.0 better than std=0.1 or std=0.01
# - uniform and normal performed nearly identical
if args.initialization == 'uniform':
z = (2 * args.std) * torch.rand(args.latent_dim,
device=device) - args.std
elif args.initialization == 'normal':
z = torch.randn(args.latent_dim, device=device) * args.std
elif args.initialization == 'ones':
mask = torch.rand(args.latent_dim) < 0.5
z = torch.ones(args.latent_dim, device=device)
z[mask] = -1
else:
raise NotImplementedError(args.initialization)
# network only saw [-1, 1] during training
z = torch.nn.Parameter(torch.clamp(z, -1, 1))
z_initial = z.data.clone()
optimizer = torch.optim.Adam([z], lr=0.05, betas=(0.5, 0.999))
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
optimizer, args.n_steps)
with torch.no_grad():
model_input = linear_layer(z_initial)
save_img_tensorboard(
g(model_input,
skip_linear_layer=args.skip_linear_layer).squeeze(
0).detach().cpu(), writer, f'restart_{i}/beginning')
for j in trange(args.n_steps, leave=False):
optimizer.zero_grad()
model_input = linear_layer(z)
x_hat = g(model_input,
skip_linear_layer=args.skip_linear_layer).squeeze(0)
mse = F.mse_loss(x_hat, x)
mse.backward()
optimizer.step()
scheduler.step()
writer.add_scalar(f'MSE/{i}', mse, j)
writer.add_scalar(f'PSNR/{i}', psnr(x, x_hat), j)
if j % save_every_n == 0:
save_img_tensorboard(
x_hat.squeeze(0).detach().cpu(), writer,
f'restart_{i}/reconstruction', j)
save_img_tensorboard(
x_hat.squeeze(0).detach().cpu(), writer, f'restart_{i}/final')
save_image(make_grid([x, x_hat.squeeze(0)], nrow=2),
f'{args.run_name}.png')
def get_latent_dims(x):
x = int(x)
if x > 8192:
raise ValueError('give a latent_dim between [1, 8192]')
return x
if __name__ == '__main__':
p = argparse.ArgumentParser()
p.add_argument('--generator_checkpoint',
default='./checkpoints/celeba_cropped/gen_ckpt.49.pt',
help="Path to generator checkpoint")
p.add_argument('--image', required=True)
p.add_argument('--run_name', default=datetime.now().isoformat())
p.add_argument('--n_restarts', type=int, default=3)
p.add_argument('--n_steps', type=int, default=3000)
p.add_argument('--initialization',
choices=['uniform', 'normal', 'ones'],
default='normal')
p.add_argument(
'--std',
type=float,
default=1.0,
help='for normal dist, the std. for uniform, the min and max val')
p.add_argument('--latent_dim',
type=get_latent_dims,
default=4096,
help='int between [1, 8192]')
args = p.parse_args()
# TODO - if model used latent_dim=64 and you also wanan reconstruct from 64,
# does it hurt to just skip linear layer?
main(args)
|
[
"numpy.random.seed",
"argparse.ArgumentParser",
"torch.randn",
"utils.psnr",
"torch.no_grad",
"torch.ones",
"torch.optim.lr_scheduler.CosineAnnealingLR",
"torch.utils.tensorboard.SummaryWriter",
"torch.nn.Linear",
"datetime.datetime.now",
"utils.load_trained_generator",
"tqdm.trange",
"torch.manual_seed",
"torch.nn.functional.mse_loss",
"torch.optim.Adam",
"torch.clamp",
"torch.rand",
"os.makedirs",
"utils.load_target_image"
] |
[((568, 602), 'os.makedirs', 'os.makedirs', (['logdir'], {'exist_ok': '(True)'}), '(logdir, exist_ok=True)\n', (579, 602), False, 'import os\n'), ((678, 699), 'torch.utils.tensorboard.SummaryWriter', 'SummaryWriter', (['logdir'], {}), '(logdir)\n', (691, 699), False, 'from torch.utils.tensorboard import SummaryWriter\n'), ((1591, 1614), 'tqdm.trange', 'trange', (['args.n_restarts'], {}), '(args.n_restarts)\n', (1597, 1614), False, 'from tqdm import trange\n'), ((4051, 4076), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4074, 4076), False, 'import argparse\n'), ((1641, 1664), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (1658, 1664), False, 'import torch\n'), ((1673, 1693), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (1687, 1693), True, 'import numpy as np\n'), ((2559, 2609), 'torch.optim.Adam', 'torch.optim.Adam', (['[z]'], {'lr': '(0.05)', 'betas': '(0.5, 0.999)'}), '([z], lr=0.05, betas=(0.5, 0.999))\n', (2575, 2609), False, 'import torch\n'), ((2630, 2697), 'torch.optim.lr_scheduler.CosineAnnealingLR', 'torch.optim.lr_scheduler.CosineAnnealingLR', (['optimizer', 'args.n_steps'], {}), '(optimizer, args.n_steps)\n', (2672, 2697), False, 'import torch\n'), ((3019, 3052), 'tqdm.trange', 'trange', (['args.n_steps'], {'leave': '(False)'}), '(args.n_steps, leave=False)\n', (3025, 3052), False, 'from tqdm import trange\n'), ((732, 761), 'utils.load_target_image', 'load_target_image', (['args.image'], {}), '(args.image)\n', (749, 761), False, 'from utils import save_img_tensorboard, load_trained_generator, load_target_image, psnr\n'), ((857, 1011), 'utils.load_trained_generator', 'load_trained_generator', (['SizedGenerator', 'args.generator_checkpoint'], {'latent_dim': '(64)', 'num_filters': 'P.num_filters', 'image_size': 'P.size', 'num_ups': 'P.num_ups'}), '(SizedGenerator, args.generator_checkpoint,\n latent_dim=64, num_filters=P.num_filters, image_size=P.size, num_ups=P.\n num_ups)\n', (879, 1011), False, 'from utils import save_img_tensorboard, load_trained_generator, load_target_image, psnr\n'), ((2480, 2501), 'torch.clamp', 'torch.clamp', (['z', '(-1)', '(1)'], {}), '(z, -1, 1)\n', (2491, 2501), False, 'import torch\n'), ((2725, 2740), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2738, 2740), False, 'import torch\n'), ((3258, 3278), 'torch.nn.functional.mse_loss', 'F.mse_loss', (['x_hat', 'x'], {}), '(x_hat, x)\n', (3268, 3278), True, 'import torch.nn.functional as F\n'), ((1459, 1497), 'torch.nn.Linear', 'torch.nn.Linear', (['args.latent_dim', '(8192)'], {}), '(args.latent_dim, 8192)\n', (1474, 1497), False, 'import torch\n'), ((3458, 3472), 'utils.psnr', 'psnr', (['x', 'x_hat'], {}), '(x, x_hat)\n', (3462, 3472), False, 'from utils import save_img_tensorboard, load_trained_generator, load_target_image, psnr\n'), ((1928, 1970), 'torch.rand', 'torch.rand', (['args.latent_dim'], {'device': 'device'}), '(args.latent_dim, device=device)\n', (1938, 1970), False, 'import torch\n'), ((2088, 2131), 'torch.randn', 'torch.randn', (['args.latent_dim'], {'device': 'device'}), '(args.latent_dim, device=device)\n', (2099, 2131), False, 'import torch\n'), ((2256, 2298), 'torch.ones', 'torch.ones', (['args.latent_dim'], {'device': 'device'}), '(args.latent_dim, device=device)\n', (2266, 2298), False, 'import torch\n'), ((4338, 4352), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4350, 4352), False, 'from datetime import datetime\n'), ((2206, 2233), 'torch.rand', 'torch.rand', (['args.latent_dim'], {}), '(args.latent_dim)\n', (2216, 2233), False, 'import torch\n')]
|
# -*- coding: utf-8 -*-
# @Time : 2021/11/13 1:47 下午
# @Author : xujunpeng
from app import app
from confluent_kafka import Producer
KafkaProducer = Producer({'bootstrap.servers': app.config["KAFKA_SERVERS"]})
|
[
"confluent_kafka.Producer"
] |
[((154, 214), 'confluent_kafka.Producer', 'Producer', (["{'bootstrap.servers': app.config['KAFKA_SERVERS']}"], {}), "({'bootstrap.servers': app.config['KAFKA_SERVERS']})\n", (162, 214), False, 'from confluent_kafka import Producer\n')]
|
import django.core.validators
from django.db import migrations, models
def clear_gen8(apps, schema_editor):
Update = apps.get_model("pokemongo", "Update")
Update.objects.update(badge_pokedex_entries_gen8=None)
class Migration(migrations.Migration):
dependencies = [
("pokemongo", "0032_remove_trainer_leaderboard_region"),
]
operations = [
migrations.RenameField(
model_name="update",
old_name="badge_photobombadge_rocket_grunts_defeated",
new_name="badge_rocket_grunts_defeated",
),
migrations.AlterField(
model_name="update",
name="badge_pokedex_entries_gen8",
field=models.PositiveIntegerField(
blank=True,
help_text="Register x Pokémon first discovered in the Alola region to the Pokédex.",
null=True,
validators=[django.core.validators.MaxValueValidator(2)],
verbose_name="Galar",
),
),
migrations.RunPython(clear_gen8, migrations.RunPython.noop),
]
|
[
"django.db.migrations.RunPython",
"django.db.migrations.RenameField"
] |
[((382, 531), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""update"""', 'old_name': '"""badge_photobombadge_rocket_grunts_defeated"""', 'new_name': '"""badge_rocket_grunts_defeated"""'}), "(model_name='update', old_name=\n 'badge_photobombadge_rocket_grunts_defeated', new_name=\n 'badge_rocket_grunts_defeated')\n", (404, 531), False, 'from django.db import migrations, models\n'), ((1030, 1089), 'django.db.migrations.RunPython', 'migrations.RunPython', (['clear_gen8', 'migrations.RunPython.noop'], {}), '(clear_gen8, migrations.RunPython.noop)\n', (1050, 1089), False, 'from django.db import migrations, models\n')]
|
__author__ = 'rcj1492'
__created__ = '2016.11'
__license__ = 'MIT'
from labpack.platforms.apscheduler import apschedulerClient
if __name__ == '__main__':
from labpack.records.settings import load_settings
system_config = load_settings('../../cred/system.yaml')
scheduler_url = 'http://%s:%s' % (system_config['system_ip_address'], system_config['scheduler_system_port'])
scheduler_client = apschedulerClient(scheduler_url)
scheduler_info = scheduler_client.get_info()
assert scheduler_info['running']
from time import time, sleep
job_function = 'init:app.logger.debug'
date_kwargs = {
'id': '%s.%s' % (job_function, str(time())),
'function': job_function,
'dt': time() + 2,
'kwargs': { 'msg': 'Add date job is working.'}
}
date_job = scheduler_client.add_date_job(**date_kwargs)
interval_kwargs = {
'id': '%s.%s' % (job_function, str(time())),
'function': job_function,
'interval': 1,
'kwargs': {'msg': 'Add interval job is working.'},
'start': time() + 0.5,
'end': time() + 10.5
}
interval_job = scheduler_client.add_interval_job(**interval_kwargs)
cron_a_kwargs = {
'id': '%s.%s' % (job_function, str(time())),
'function': job_function,
'kwargs': {'msg': 'Add nye cron job is working.'},
'month': 12,
'day': 31,
'hour': 23,
'minute': 59,
'second': 59
}
cron_job_a = scheduler_client.add_cron_job(**cron_a_kwargs)
cron_b_kwargs = {
'id': '%s.%s' % (job_function, str(time())),
'function': job_function,
'kwargs': {'msg': 'Add ny cron job is working.'},
'month': 1,
'day': 1,
'hour': 0,
'minute': 0,
'second': 0
}
cron_job_b = scheduler_client.add_cron_job(**cron_b_kwargs)
cron_c_kwargs = {
'id': '%s.%s' % (job_function, str(time())),
'function': job_function,
'kwargs': {'msg': 'Add weekday cron job is working.'},
'weekday': 5,
'hour': 5,
'minute': 5,
'second': 5
}
cron_job_c = scheduler_client.add_cron_job(**cron_c_kwargs)
try:
interval_filter = [{'.id': {'must_contain': [ 'app\\.logger']}, '.function': {'must_contain': [ 'debug' ]}, '.interval': { 'discrete_values': [1] }}]
interval_list = scheduler_client.list_jobs(argument_filters=interval_filter)
dt_filter = [{'.dt': { 'min_value': time() }}]
dt_list = scheduler_client.list_jobs(argument_filters=dt_filter)
cron_filter = [{'.weekday': { 'discrete_values': [ 5 ]}}]
cron_list = scheduler_client.list_jobs(argument_filters=cron_filter)
print(interval_list)
print(dt_list)
print(cron_list)
except:
pass
sleep(2)
job_list = scheduler_client.list_jobs()
assert job_list
id_list = [ interval_kwargs['id'], date_kwargs['id'], cron_a_kwargs['id'], cron_b_kwargs['id'], cron_c_kwargs['id'] ]
for job in job_list:
if job['id'] in id_list:
assert scheduler_client.delete_job(job['id']) == 204
|
[
"time.sleep",
"time.time",
"labpack.records.settings.load_settings",
"labpack.platforms.apscheduler.apschedulerClient"
] |
[((239, 278), 'labpack.records.settings.load_settings', 'load_settings', (['"""../../cred/system.yaml"""'], {}), "('../../cred/system.yaml')\n", (252, 278), False, 'from labpack.records.settings import load_settings\n'), ((418, 450), 'labpack.platforms.apscheduler.apschedulerClient', 'apschedulerClient', (['scheduler_url'], {}), '(scheduler_url)\n', (435, 450), False, 'from labpack.platforms.apscheduler import apschedulerClient\n'), ((2892, 2900), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (2897, 2900), False, 'from time import time, sleep\n'), ((742, 748), 'time.time', 'time', ([], {}), '()\n', (746, 748), False, 'from time import time, sleep\n'), ((1094, 1100), 'time.time', 'time', ([], {}), '()\n', (1098, 1100), False, 'from time import time, sleep\n'), ((1124, 1130), 'time.time', 'time', ([], {}), '()\n', (1128, 1130), False, 'from time import time, sleep\n'), ((682, 688), 'time.time', 'time', ([], {}), '()\n', (686, 688), False, 'from time import time, sleep\n'), ((947, 953), 'time.time', 'time', ([], {}), '()\n', (951, 953), False, 'from time import time, sleep\n'), ((1285, 1291), 'time.time', 'time', ([], {}), '()\n', (1289, 1291), False, 'from time import time, sleep\n'), ((1637, 1643), 'time.time', 'time', ([], {}), '()\n', (1641, 1643), False, 'from time import time, sleep\n'), ((1983, 1989), 'time.time', 'time', ([], {}), '()\n', (1987, 1989), False, 'from time import time, sleep\n'), ((2550, 2556), 'time.time', 'time', ([], {}), '()\n', (2554, 2556), False, 'from time import time, sleep\n')]
|
from sqlalchemy import sql
from sqlalchemy.orm import joinedload, subqueryload
from sqlalchemy.inspection import inspect
from mbdata.utils.models import get_entity_type_model, get_link_model, ENTITY_TYPES
from mbdata.models import (
Area,
Artist,
Label,
Link,
LinkAreaArea,
LinkType,
Place,
Release,
Recording,
ReleaseGroup,
Work,
)
def load_areas(session, objs, include):
attrs = []
ids = set()
for obj in objs:
mapper = inspect(obj).mapper
for relationship in mapper.relationships:
if not issubclass(relationship.mapper.class_, Area):
continue
attr = relationship.key
for column in relationship.local_columns:
id = getattr(obj, mapper.get_property_by_column(column).key)
if id is not None:
attrs.append((obj, id, attr))
ids.add(id)
areas = fetch_areas(session, ids, include)
for obj, id, attr in attrs:
setattr(obj, attr, areas[id])
def fetch_areas(session, ids, include):
areas = {}
if not ids:
return areas
options = []
if include.iso_3166:
options.append(joinedload('iso_3166_1_codes'))
options.append(joinedload('iso_3166_2_codes'))
options.append(joinedload('iso_3166_3_codes'))
if include.type:
options.append(joinedload('type'))
query = session.query(Area).filter(Area.id.in_(ids)).options(*options)
for area in query:
areas[area.id] = area
if include.part_of and areas:
_fetch_parent_areas(session, areas, options)
return areas
def _fetch_parent_areas(session, areas, options):
for area in areas.values():
area.part_of = None
link_type_id_query = session.query(LinkType.id).\
filter_by(gid='de7cc874-8b1b-3a05-8272-f3834c968fb7').\
as_scalar()
area_parent_query = session.query(
LinkAreaArea.entity1_id.label('child_id'),
LinkAreaArea.entity0_id.label('parent_id'),
).\
select_from(LinkAreaArea).\
join(Link, LinkAreaArea.link_id == Link.id).\
filter(Link.link_type_id == link_type_id_query).\
subquery()
if session.bind.dialect.name == 'postgresql':
_fetch_parent_areas_cte(session, area_parent_query, areas, options)
else:
_fetch_parent_areas_iterate(session, area_parent_query, areas, options)
def _fetch_parent_areas_iterate(session, area_parent_query, areas, options):
while True:
area_ids = [area.id for area in areas.values() if area.part_of is None]
query = session.query(Area, area_parent_query.c.child_id).\
filter(Area.id == area_parent_query.c.parent_id).\
filter(area_parent_query.c.child_id.in_(area_ids)).\
options(*options)
found = False
for area, child_id in query:
area.part_of = None
areas[area.id] = area
areas[child_id].part_of = area
found = True
if not found:
break
def _fetch_parent_areas_cte(session, area_parent_query, areas, options):
area_ids = [area.id for area in areas.values() if area.part_of is None]
area_ancestors_cte = session.query(
area_parent_query.c.child_id,
area_parent_query.c.parent_id,
sql.literal(1).label('depth')
).\
select_from(area_parent_query).\
filter(area_parent_query.c.child_id.in_(area_ids)).\
cte(name='area_ancestors', recursive=True)
area_ancestors_cte = area_ancestors_cte.union_all(
session.query(
area_parent_query.c.child_id,
area_parent_query.c.parent_id,
area_ancestors_cte.c.depth + 1
).
select_from(area_ancestors_cte).
join(area_parent_query, area_ancestors_cte.c.parent_id == area_parent_query.c.child_id)
)
query = session.query(Area, area_ancestors_cte.c.child_id, area_ancestors_cte.c.depth).\
filter(Area.id == area_ancestors_cte.c.parent_id).\
order_by(area_ancestors_cte.c.depth).options(*options)
for area, child_id, depth in query:
area.part_of = None
areas[area.id] = area
areas[child_id].part_of = area
def query_artist(db, include):
return prepare_artist_query(db.query(Artist), include)
def query_label(db, include):
return prepare_label_query(db.query(Label), include)
def query_place(db, include):
return prepare_place_query(db.query(Place), include)
def query_recording(db, include):
return prepare_recording_query(db.query(Recording), include)
def query_release(db, include):
return prepare_release_query(db.query(Release), include)
def query_release_group(db, include):
return prepare_release_group_query(db.query(ReleaseGroup), include)
def query_work(db, include):
return prepare_work_query(db.query(Work), include)
def prepare_artist_query(query, include, prefix=""):
query = query.\
options(joinedload(prefix + "gender")).\
options(joinedload(prefix + "type"))
if include.ipi:
query = query.options(subqueryload(prefix + "ipis"))
if include.isni:
query = query.options(subqueryload(prefix + "isnis"))
return query
def prepare_label_query(query, include, prefix=""):
query = query.options(joinedload(prefix + "type"))
if include.ipi:
query = query.options(subqueryload(prefix + "ipis"))
if include.isni:
query = query.options(subqueryload(prefix + "isnis"))
return query
def prepare_place_query(query, include, prefix=""):
return query.options(joinedload(prefix + "type"))
def prepare_recording_query(query, include, prefix=""):
return prepare_artist_credits_subquery(query, include, prefix)
def prepare_release_query(query, include, prefix=""):
query = query.\
options(joinedload(prefix + "status")).\
options(joinedload(prefix + "packaging")).\
options(joinedload(prefix + "language")).\
options(joinedload(prefix + "script"))
query = prepare_artist_credits_subquery(query, include, prefix)
if include.release_group:
query = prepare_release_group_query(query, include.release_group, prefix + "release_group.")
if include.mediums:
query = query.options(subqueryload(prefix + "mediums"))
query = prepare_medium_query(query, include.mediums, prefix + "mediums.")
return query
def prepare_medium_query(query, include, prefix=""):
query = query.options(joinedload(prefix + "format"))
if include.tracks:
query = query.options(subqueryload(prefix + "tracks"))
query = prepare_track_query(query, include.tracks, prefix + "tracks.")
return query
def prepare_track_query(query, include, prefix=""):
query = prepare_artist_credits_subquery(query, include, prefix)
if include.recording:
query = query.options(subqueryload(prefix + "recording"))
query = prepare_recording_query(query, include, prefix + "recording.")
return query
def prepare_release_group_query(query, include, prefix=""):
query = query.\
options(joinedload(prefix + "type")).\
options(subqueryload(prefix + "secondary_types")).\
options(joinedload(prefix + "secondary_types.secondary_type", innerjoin=True))
query = prepare_artist_credits_subquery(query, include, prefix)
return query
def prepare_artist_credits_subquery(query, include, prefix):
if include.artist or include.artists:
query = query.options(joinedload(prefix + "artist_credit", innerjoin=True))
if include.artists:
query = query.\
options(subqueryload(prefix + "artist_credit.artists")).\
options(joinedload(prefix + "artist_credit.artists.artist", innerjoin=True))
return query
def prepare_url_query(query, include, prefix=""):
return query
def prepare_work_query(query, include, prefix=""):
return query.options(joinedload(prefix + "type"))
def load_links(db, all_objs, include):
for type in ENTITY_TYPES:
type_include = include.check(type)
if type_include:
load_links_by_target_type(db, all_objs, type, type_include)
ENTITY_TYPE_PREPARE_FUNCS = {
'artist': prepare_artist_query,
'label': prepare_label_query,
'place': prepare_place_query,
'recording': prepare_recording_query,
'release': prepare_release_query,
'release_group': prepare_release_group_query,
'url': prepare_url_query,
'work': prepare_work_query,
}
def load_links_by_target_type(db, all_objs, target_type, include):
attr = '{0}_links'.format(target_type)
grouped_objs = {}
for obj in all_objs:
setattr(obj, attr, [])
model = inspect(obj).mapper.class_
grouped_objs.setdefault(model, {})[obj.id] = obj
for model, objs in grouped_objs.items():
_load_links_by_types(db, objs, attr, model, target_type, include)
def _load_links_by_types(db, objs, attr, source_model, target_type, include):
target_model = get_entity_type_model(target_type)
model = get_link_model(source_model, target_model)
query = db.query(model).\
options(joinedload("link", innerjoin=True)).\
options(joinedload("link.link_type", innerjoin=True))
if model.entity0.property.mapper.class_ == model.entity1.property.mapper.class_:
_load_links_by_types_one_side(model, query, objs, attr, include, "entity0", "entity1", target_type)
_load_links_by_types_one_side(model, query, objs, attr, include, "entity1", "entity0", target_type)
else:
if source_model == model.entity0.property.mapper.class_:
_load_links_by_types_one_side(model, query, objs, attr, include, "entity0", "entity1", target_type)
else:
_load_links_by_types_one_side(model, query, objs, attr, include, "entity1", "entity0", target_type)
def _load_links_by_types_one_side(model, query, objs, attr, include, source_attr, target_attr, target_type):
source_id_attr = source_attr + "_id"
query = query.filter(getattr(model, source_id_attr).in_(objs))
query = query.options(joinedload(target_attr, innerjoin=True))
query = ENTITY_TYPE_PREPARE_FUNCS[target_type](query, include, target_attr + ".")
for link in query:
obj = objs.get(getattr(link, source_id_attr))
if obj is not None:
getattr(obj, attr).append(link)
|
[
"mbdata.models.LinkAreaArea.entity0_id.label",
"sqlalchemy.sql.literal",
"sqlalchemy.inspection.inspect",
"sqlalchemy.orm.subqueryload",
"sqlalchemy.orm.joinedload",
"mbdata.models.LinkAreaArea.entity1_id.label",
"mbdata.utils.models.get_entity_type_model",
"mbdata.utils.models.get_link_model",
"mbdata.models.Area.id.in_"
] |
[((9108, 9142), 'mbdata.utils.models.get_entity_type_model', 'get_entity_type_model', (['target_type'], {}), '(target_type)\n', (9129, 9142), False, 'from mbdata.utils.models import get_entity_type_model, get_link_model, ENTITY_TYPES\n'), ((9155, 9197), 'mbdata.utils.models.get_link_model', 'get_link_model', (['source_model', 'target_model'], {}), '(source_model, target_model)\n', (9169, 9197), False, 'from mbdata.utils.models import get_entity_type_model, get_link_model, ENTITY_TYPES\n'), ((5092, 5119), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'type')"], {}), "(prefix + 'type')\n", (5102, 5119), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((5385, 5412), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'type')"], {}), "(prefix + 'type')\n", (5395, 5412), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((5677, 5704), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'type')"], {}), "(prefix + 'type')\n", (5687, 5704), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((6075, 6104), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'script')"], {}), "(prefix + 'script')\n", (6085, 6104), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((6577, 6606), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'format')"], {}), "(prefix + 'format')\n", (6587, 6606), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((7309, 7378), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'secondary_types.secondary_type')"], {'innerjoin': '(True)'}), "(prefix + 'secondary_types.secondary_type', innerjoin=True)\n", (7319, 7378), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((8028, 8055), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'type')"], {}), "(prefix + 'type')\n", (8038, 8055), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((9298, 9342), 'sqlalchemy.orm.joinedload', 'joinedload', (['"""link.link_type"""'], {'innerjoin': '(True)'}), "('link.link_type', innerjoin=True)\n", (9308, 9342), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((10205, 10244), 'sqlalchemy.orm.joinedload', 'joinedload', (['target_attr'], {'innerjoin': '(True)'}), '(target_attr, innerjoin=True)\n', (10215, 10244), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((488, 500), 'sqlalchemy.inspection.inspect', 'inspect', (['obj'], {}), '(obj)\n', (495, 500), False, 'from sqlalchemy.inspection import inspect\n'), ((1211, 1241), 'sqlalchemy.orm.joinedload', 'joinedload', (['"""iso_3166_1_codes"""'], {}), "('iso_3166_1_codes')\n", (1221, 1241), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((1266, 1296), 'sqlalchemy.orm.joinedload', 'joinedload', (['"""iso_3166_2_codes"""'], {}), "('iso_3166_2_codes')\n", (1276, 1296), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((1321, 1351), 'sqlalchemy.orm.joinedload', 'joinedload', (['"""iso_3166_3_codes"""'], {}), "('iso_3166_3_codes')\n", (1331, 1351), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((1398, 1416), 'sqlalchemy.orm.joinedload', 'joinedload', (['"""type"""'], {}), "('type')\n", (1408, 1416), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((5172, 5201), 'sqlalchemy.orm.subqueryload', 'subqueryload', (["(prefix + 'ipis')"], {}), "(prefix + 'ipis')\n", (5184, 5201), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((5255, 5285), 'sqlalchemy.orm.subqueryload', 'subqueryload', (["(prefix + 'isnis')"], {}), "(prefix + 'isnis')\n", (5267, 5285), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((5465, 5494), 'sqlalchemy.orm.subqueryload', 'subqueryload', (["(prefix + 'ipis')"], {}), "(prefix + 'ipis')\n", (5477, 5494), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((5548, 5578), 'sqlalchemy.orm.subqueryload', 'subqueryload', (["(prefix + 'isnis')"], {}), "(prefix + 'isnis')\n", (5560, 5578), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((6362, 6394), 'sqlalchemy.orm.subqueryload', 'subqueryload', (["(prefix + 'mediums')"], {}), "(prefix + 'mediums')\n", (6374, 6394), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((6662, 6693), 'sqlalchemy.orm.subqueryload', 'subqueryload', (["(prefix + 'tracks')"], {}), "(prefix + 'tracks')\n", (6674, 6693), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((6971, 7005), 'sqlalchemy.orm.subqueryload', 'subqueryload', (["(prefix + 'recording')"], {}), "(prefix + 'recording')\n", (6983, 7005), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((7602, 7654), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'artist_credit')"], {'innerjoin': '(True)'}), "(prefix + 'artist_credit', innerjoin=True)\n", (7612, 7654), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((7794, 7861), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'artist_credit.artists.artist')"], {'innerjoin': '(True)'}), "(prefix + 'artist_credit.artists.artist', innerjoin=True)\n", (7804, 7861), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((1458, 1474), 'mbdata.models.Area.id.in_', 'Area.id.in_', (['ids'], {}), '(ids)\n', (1469, 1474), False, 'from mbdata.models import Area, Artist, Label, Link, LinkAreaArea, LinkType, Place, Release, Recording, ReleaseGroup, Work\n'), ((5043, 5072), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'gender')"], {}), "(prefix + 'gender')\n", (5053, 5072), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((6024, 6055), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'language')"], {}), "(prefix + 'language')\n", (6034, 6055), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((7249, 7289), 'sqlalchemy.orm.subqueryload', 'subqueryload', (["(prefix + 'secondary_types')"], {}), "(prefix + 'secondary_types')\n", (7261, 7289), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((8805, 8817), 'sqlalchemy.inspection.inspect', 'inspect', (['obj'], {}), '(obj)\n', (8812, 8817), False, 'from sqlalchemy.inspection import inspect\n'), ((9244, 9278), 'sqlalchemy.orm.joinedload', 'joinedload', (['"""link"""'], {'innerjoin': '(True)'}), "('link', innerjoin=True)\n", (9254, 9278), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((7724, 7770), 'sqlalchemy.orm.subqueryload', 'subqueryload', (["(prefix + 'artist_credit.artists')"], {}), "(prefix + 'artist_credit.artists')\n", (7736, 7770), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((5972, 6004), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'packaging')"], {}), "(prefix + 'packaging')\n", (5982, 6004), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((7202, 7229), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'type')"], {}), "(prefix + 'type')\n", (7212, 7229), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((5923, 5952), 'sqlalchemy.orm.joinedload', 'joinedload', (["(prefix + 'status')"], {}), "(prefix + 'status')\n", (5933, 5952), False, 'from sqlalchemy.orm import joinedload, subqueryload\n'), ((1956, 1997), 'mbdata.models.LinkAreaArea.entity1_id.label', 'LinkAreaArea.entity1_id.label', (['"""child_id"""'], {}), "('child_id')\n", (1985, 1997), False, 'from mbdata.models import Area, Artist, Label, Link, LinkAreaArea, LinkType, Place, Release, Recording, ReleaseGroup, Work\n'), ((2011, 2053), 'mbdata.models.LinkAreaArea.entity0_id.label', 'LinkAreaArea.entity0_id.label', (['"""parent_id"""'], {}), "('parent_id')\n", (2040, 2053), False, 'from mbdata.models import Area, Artist, Label, Link, LinkAreaArea, LinkType, Place, Release, Recording, ReleaseGroup, Work\n'), ((3377, 3391), 'sqlalchemy.sql.literal', 'sql.literal', (['(1)'], {}), '(1)\n', (3388, 3391), False, 'from sqlalchemy import sql\n')]
|
import torch
import numpy as np
import os
import sys
from shark_runner import shark_inference
class ResNest50(torch.nn.Module):
def __init__(self):
super().__init__()
self.model = torch.hub.load(
"zhanghang1989/ResNeSt", "resnest50", pretrained=True
)
self.train(False)
def forward(self, input):
return self.model.forward(input)
input = torch.randn(1, 3, 224, 224)
results = shark_inference(
ResNest50(),
input,
device="cpu",
dynamic=False,
jit_trace=True,
)
|
[
"torch.hub.load",
"torch.randn"
] |
[((402, 429), 'torch.randn', 'torch.randn', (['(1)', '(3)', '(224)', '(224)'], {}), '(1, 3, 224, 224)\n', (413, 429), False, 'import torch\n'), ((202, 271), 'torch.hub.load', 'torch.hub.load', (['"""zhanghang1989/ResNeSt"""', '"""resnest50"""'], {'pretrained': '(True)'}), "('zhanghang1989/ResNeSt', 'resnest50', pretrained=True)\n", (216, 271), False, 'import torch\n')]
|
# -*- coding: utf-8 -*-
import pathlib as _pl
import pandas as _pd
import s3fs as _s3fs
# import urllib as _urllib
# import html2text as _html2text
import psutil as _psutil
import numpy as _np
# import xarray as _xr
def readme():
url = 'https://docs.opendata.aws/noaa-goes16/cics-readme.html'
# html = _urllib.request.urlopen(url).read().decode("utf-8")
# out = _html2text.html2text(html)
# print(out)
print(f'follow link for readme: {url}')
def available_products():
aws = _s3fs.S3FileSystem(anon=True)
df = _pd.DataFrame()
for satellite in [16,17]:
# satellite = 16#16 (east) or 17(west)
base_folder = _pl.Path(f'noaa-goes{satellite}')
products_available = aws.glob(base_folder.joinpath('*').as_posix())
df[satellite] = [p.split('/')[-1] for p in products_available if '.pdf' not in p]
if _np.all(df[16] == df[17]):
ins = ''
else:
ins = ' !!_NOT_!!'
print(f'goes 16 and 17 products are{ins} identical')
return df
class AwsQuery(object):
def __init__(self,
path2folder_local = '/mnt/telg/tmp/aws_tmp/',
satellite = '16',
product = 'ABI-L2-AOD',
scan_sector = 'C',
start = '2020-08-08 20:00:00',
end = '2020-08-09 18:00:00',
process = None,
keep_files = None,
# check_if_file_exist = True,
# no_of_days = None,
# last_x_days = None,
# max_no_of_files = 100,#10*24*7,
):
"""
This will initialize a search on AWS.
Parameters
----------
path2folder_local : TYPE, optional
DESCRIPTION. The default is '/mnt/telg/tmp/aws_tmp/'.
satellite : TYPE, optional
DESCRIPTION. The default is '16'.
product : str, optional
Note this is the product name described at
https://docs.opendata.aws/noaa-goes16/cics-readme.html
but without the scan sector. The default is 'ABI-L2-AOD'.
scan_sector : str, optional
(C)onus, (F)ull_disk, (M)eso. The default is 'C'.
start : TYPE, optional
DESCRIPTION. The default is '2020-08-08 20:00:00'.
end : TYPE, optional
DESCRIPTION. The default is '2020-08-09 18:00:00'.
process: dict,
This is still in development and might be buggy.
Example:
dict(concatenate = 'daily',
function = lambda row: some_function(row, *args, **kwargs),
prefix = 'ABI_L2_AOD_processed',
path2processed = '/path2processed/')
keep_files: bool, optional
Default is True unless process is given which changes the default
False.
Returns
-------
None.
"""
self.satellite = satellite
self.path2folder_aws = _pl.Path(f'noaa-goes{self.satellite}')
self.scan_sector = scan_sector
self.product = product
self.start = _pd.to_datetime(start)
self.end = _pd.to_datetime(end)
self.path2folder_local = _pl.Path(path2folder_local)
if isinstance(process, dict):
self._process = True
# self._process_concatenate = process['concatenate']
self._process_function = process['function']
self._process_name_prefix = process['prefix']
self._process_path2processed = _pl.Path(process['path2processed'])
# self._process_path2processed_tmp = self._process_path2processed.joinpath('tmp')
# self._process_path2processed_tmp.mkdir(exist_ok=True)
self.keep_files = False
# self.check_if_file_exist = False
else:
self._process = False
self.aws = _s3fs.S3FileSystem(anon=True)
self.aws.clear_instance_cache() # strange things happen if the is not the only query one is doing during a session
# properties
self._workplan = None
@property
def product(self):
return self._product
@product.setter
def product(self, value):
if value[-1] == self.scan_sector:
value = value[:-1]
self._product = value
return
def info_on_current_query(self):
nooffiles = self.workplan.shape[0]
if nooffiles == 0:
info = 'no file found or all files already on disk.'
else:
du = self.estimate_disk_usage()
disk_space_needed = du['disk_space_needed'] * 1e-6
disk_space_free_after_download = du['disk_space_free_after_download']
info = (f'no of files: {nooffiles}\n'
f'estimated disk usage: {disk_space_needed:0.0f} mb\n'
f'remaining disk space after download: {disk_space_free_after_download:0.0f} %\n')
return info
# def print_readme(self):
# url = 'https://docs.opendata.aws/noaa-goes16/cics-readme.html'
# html = _urllib.request.urlopen(url).read().decode("utf-8")
# out = _html2text.html2text(html)
# print(out)
def estimate_disk_usage(self, sample_size = 10): #mega bites
step_size = int(self.workplan.shape[0]/sample_size)
if step_size < 1:
step_size = 1
sizes = self.workplan.iloc[::step_size].apply(lambda row: self.aws.disk_usage(row.path2file_aws), axis = 1)
# sizes = self.workplan.iloc[::int(self.workplan.shape[0]/sample_size)].apply(lambda row: self.aws.disk_usage(row.path2file_aws), axis = 1)
disk_space_needed = sizes.mean() * self.workplan.shape[0]
# get remaining disk space after download
du = _psutil.disk_usage(self.path2folder_local)
disk_space_free_after_download = 100 - (100* (du.used + disk_space_needed)/du.total )
out = {}
out['disk_space_needed'] = disk_space_needed
out['disk_space_free_after_download'] = disk_space_free_after_download
return out
@property
def workplan(self):
if isinstance(self._workplan, type(None)):
# #### bug: problem below is that time ranges that span over multiple years will not work!
# # get the julian days (thus folders on aws) needed
# start_julian = int(_pd.to_datetime(self.start.date()).to_julian_date() - _pd.to_datetime(f'{self.start.year:04d}-01-01').to_julian_date()) + 1
# end_julian = int(_pd.to_datetime(self.end.date()).to_julian_date() - _pd.to_datetime(f'{self.end.year:04d}-01-01').to_julian_date()) + 1
# days = list(range(start_julian, end_julian+1))
# # get all the files available
# # base_folder = pl.Path(f'noaa-goes{self.satellite}')
# base_folder = self.path2folder_aws
# product_folder = base_folder.joinpath(f'{self.product}{self.scan_sector}')
# files_available = []
# year_folder = product_folder.joinpath(f'{self.start.year}')
# for day in days:
# day_folder = year_folder.joinpath(f'{day:03d}')
# hours_available = self.aws.glob(day_folder.joinpath('*').as_posix())
# hours_available = [h.split('/')[-1] for h in hours_available]
# for hour in hours_available:
# hour_folder = day_folder.joinpath(f'{hour}')
# glob_this = hour_folder.joinpath('*').as_posix()
# last_glob = self.aws.glob(glob_this)
# files_available += last_glob
#### make a data frame to all the available files in the time range
# create a dataframe with all hours in the time range
df = _pd.DataFrame(index = _pd.date_range(self.start, self.end, freq='h'), columns=['path'])
# create the path to the directory of each row above (one per houre)
product_folder = self.path2folder_aws.joinpath(f'{self.product}{self.scan_sector}')
df['path'] = df.apply(lambda row: product_folder.joinpath(str(row.name.year)).joinpath(f'{row.name.day_of_year:03d}').joinpath(f'{row.name.hour:02d}').joinpath('*'), axis= 1)
# get the path to each file in all the folders
files_available = []
for idx,row in df.iterrows():
files_available += self.aws.glob(row.path.as_posix())
#### Make workplan
workplan = _pd.DataFrame([_pl.Path(f) for f in files_available], columns=['path2file_aws'])
workplan['path2file_local'] = workplan.apply(lambda row: self.path2folder_local.joinpath(row.path2file_aws.name), axis = 1)
#### remove if local file exists
if not self._process:
workplan = workplan[~workplan.apply(lambda row: row.path2file_local.is_file(), axis = 1)]
# get file sizes ... takes to long to do for each file
# workplan['file_size_mb'] = workplan.apply(lambda row: self.aws.disk_usage(row.path2file_aws)/1e6, axis = 1)
#### get the timestamp
def row2timestamp(row):
sos = row.path2file_aws.name.split('_')[-3]
assert(sos[0] == 's'), f'Something needs fixing, this string ({sos}) should start with s.'
ts = _pd.to_datetime(sos[1:-1],format = '%Y%j%H%M%S')
return ts
workplan.index = workplan.apply(lambda row: row2timestamp(row), axis = 1)
#### truncate ... remember so far we did not consider times in start and end, only the entire days
workplan = workplan.sort_index()
workplan = workplan.truncate(self.start, self.end)
#### processing additions
if self._process:
### add path to processed file names
workplan["path2file_local_processed"] = workplan.apply(lambda row: self._process_path2processed.joinpath(f'{self._process_name_prefix}_{row.name.year}{row.name.month:02d}{row.name.day:02d}_{row.name.hour:02d}{row.name.minute:02d}{row.name.second:02d}.nc'), axis = 1)
### remove if file exists
workplan = workplan[~workplan.apply(lambda row: row.path2file_local_processed.is_file(), axis = True)]
# workplan['path2file_tmp'] = workplan.apply(lambda row: self._process_path2processed_tmp.joinpath(row.name.__str__()), axis = 1)
self._workplan = workplan
return self._workplan
@workplan.setter
def workplan(self, new_workplan):
self._workplan = new_workplan
@property
def product_available_since(self):
product_folder = self.path2folder_aws.joinpath(f'{self.product}{self.scan_sector}')
years = self.aws.glob(product_folder.joinpath('*').as_posix())
years.sort()
is2000 = True
while is2000:
yearfolder = years.pop(0)
firstyear = yearfolder.split('/')[-1]
# print(firstyear)
if firstyear != '2000':
is2000 = False
yearfolder = _pl.Path(yearfolder)
days = self.aws.glob(yearfolder.joinpath('*').as_posix())
days.sort()
firstday = int(days[0].split('/')[-1])
firstday_ts = _pd.to_datetime(firstyear) + _pd.to_timedelta(firstday, "D")
return firstday_ts
def download(self, test = False, overwrite = False, alternative_workplan = False,
error_if_low_disk_space = True):
"""
Parameters
----------
test : TYPE, optional
DESCRIPTION. The default is False.
overwrite : TYPE, optional
DESCRIPTION. The default is False.
alternative_workplan : pandas.Dataframe, optional
This will ignore the instance workplan and use the provided one
instead. The default is False.
error_if_low_disk_space : TYPE, optional
DESCRIPTION. The default is True.
Returns
-------
out : TYPE
DESCRIPTION.
"""
if isinstance(alternative_workplan, _pd.DataFrame):
workplan = alternative_workplan
else:
workplan = self.workplan
if error_if_low_disk_space:
disk_space_free_after_download = self.estimate_disk_usage()['disk_space_free_after_download']
assert(disk_space_free_after_download<90), f"This download will bring the disk usage above 90% ({disk_space_free_after_download:0.0f}%). Turn off this error by setting error_if_low_disk_space to False."
for idx, row in workplan.iterrows():
if not overwrite:
if row.path2file_local.is_file():
continue
out = self.aws.get(row.path2file_aws.as_posix(), row.path2file_local.as_posix())
if test:
break
return out
def process(self):
# deprecated first grouping is required
# group = self.workplan.groupby('path2file_local_processed')
# for p2flp, p2flpgrp in group:
# break
## for each file in group
for dt, row in self.workplan.iterrows():
if row.path2file_local_processed.is_file():
continue
if not row.path2file_local.is_file():
# print('downloading')
#### download
# download_output =
self.aws.get(row.path2file_aws.as_posix(), row.path2file_local.as_posix())
#### process
try:
self._process_function(row)
except:
print(f'error applying function on one file {row.path2file_local.name}. The raw fill will still be removed (unless keep_files is True) to avoid storage issues')
#### remove raw file
if not self.keep_files:
row.path2file_local.unlink()
#### todo: concatenate
# if this is actually desired I would think this should be done seperately, not as part of this package
# try:
# ds = _xr.open_mfdataset(p2flpgrp.path2file_tmp)
# #### save final product
# ds.to_netcdf(p2flp)
# #### remove all tmp files
# if not keep_tmp_files:
# for dt, row in p2flpgrp.iterrows():
# try:
# row.path2file_tmp.unlink()
# except FileNotFoundError:
# pass
# except:
# print('something went wrong with the concatenation. The file will not be removed')
|
[
"pandas.DataFrame",
"pandas.date_range",
"psutil.disk_usage",
"pathlib.Path",
"s3fs.S3FileSystem",
"pandas.to_datetime",
"pandas.to_timedelta",
"numpy.all"
] |
[((502, 531), 's3fs.S3FileSystem', '_s3fs.S3FileSystem', ([], {'anon': '(True)'}), '(anon=True)\n', (520, 531), True, 'import s3fs as _s3fs\n'), ((542, 557), 'pandas.DataFrame', '_pd.DataFrame', ([], {}), '()\n', (555, 557), True, 'import pandas as _pd\n'), ((865, 890), 'numpy.all', '_np.all', (['(df[16] == df[17])'], {}), '(df[16] == df[17])\n', (872, 890), True, 'import numpy as _np\n'), ((657, 690), 'pathlib.Path', '_pl.Path', (['f"""noaa-goes{satellite}"""'], {}), "(f'noaa-goes{satellite}')\n", (665, 690), True, 'import pathlib as _pl\n'), ((2996, 3034), 'pathlib.Path', '_pl.Path', (['f"""noaa-goes{self.satellite}"""'], {}), "(f'noaa-goes{self.satellite}')\n", (3004, 3034), True, 'import pathlib as _pl\n'), ((3145, 3167), 'pandas.to_datetime', '_pd.to_datetime', (['start'], {}), '(start)\n', (3160, 3167), True, 'import pandas as _pd\n'), ((3188, 3208), 'pandas.to_datetime', '_pd.to_datetime', (['end'], {}), '(end)\n', (3203, 3208), True, 'import pandas as _pd\n'), ((3251, 3278), 'pathlib.Path', '_pl.Path', (['path2folder_local'], {}), '(path2folder_local)\n', (3259, 3278), True, 'import pathlib as _pl\n'), ((3943, 3972), 's3fs.S3FileSystem', '_s3fs.S3FileSystem', ([], {'anon': '(True)'}), '(anon=True)\n', (3961, 3972), True, 'import s3fs as _s3fs\n'), ((5853, 5895), 'psutil.disk_usage', '_psutil.disk_usage', (['self.path2folder_local'], {}), '(self.path2folder_local)\n', (5871, 5895), True, 'import psutil as _psutil\n'), ((11345, 11365), 'pathlib.Path', '_pl.Path', (['yearfolder'], {}), '(yearfolder)\n', (11353, 11365), True, 'import pathlib as _pl\n'), ((3582, 3617), 'pathlib.Path', '_pl.Path', (["process['path2processed']"], {}), "(process['path2processed'])\n", (3590, 3617), True, 'import pathlib as _pl\n'), ((11521, 11547), 'pandas.to_datetime', '_pd.to_datetime', (['firstyear'], {}), '(firstyear)\n', (11536, 11547), True, 'import pandas as _pd\n'), ((11550, 11581), 'pandas.to_timedelta', '_pd.to_timedelta', (['firstday', '"""D"""'], {}), "(firstday, 'D')\n", (11566, 11581), True, 'import pandas as _pd\n'), ((9507, 9554), 'pandas.to_datetime', '_pd.to_datetime', (['sos[1:-1]'], {'format': '"""%Y%j%H%M%S"""'}), "(sos[1:-1], format='%Y%j%H%M%S')\n", (9522, 9554), True, 'import pandas as _pd\n'), ((7913, 7959), 'pandas.date_range', '_pd.date_range', (['self.start', 'self.end'], {'freq': '"""h"""'}), "(self.start, self.end, freq='h')\n", (7927, 7959), True, 'import pandas as _pd\n'), ((8645, 8656), 'pathlib.Path', '_pl.Path', (['f'], {}), '(f)\n', (8653, 8656), True, 'import pathlib as _pl\n')]
|
"""
CanvasItem module contains classes related to canvas items.
"""
from __future__ import annotations
# standard libraries
import collections
import concurrent.futures
import contextlib
import copy
import datetime
import enum
import functools
import imageio
import logging
import operator
import sys
import threading
import types
import typing
import warnings
import weakref
# third party libraries
import numpy
# local libraries
from nion.ui import DrawingContext
from nion.utils import Event
from nion.utils import Geometry
from nion.utils import Observable
from nion.utils import Stream
if typing.TYPE_CHECKING:
from nion.ui import UserInterface
from nion.ui import MouseTrackingCanvasItem
from nion.ui import Widgets
MAX_VALUE = sys.maxsize
class Orientation(enum.Enum):
Vertical = 0
Horizontal = 1
class Constraint:
""" A constraint on an item in a layout. Preferred is only used when free sizing. """
def __init__(self) -> None:
self.minimum: typing.Optional[int] = None
self.maximum: typing.Optional[int] = None
self.preferred: typing.Optional[int] = None
def __repr__(self) -> str:
return "Constraint (min={0}, max={1}, pref={2})".format(self.minimum, self.maximum, self.preferred)
class SolverItem:
def __init__(self, constraint: Constraint) -> None:
self.constraint = constraint
self.size: typing.Optional[int] = None
self.is_constrained = False
ConstraintResultType = typing.Tuple[typing.List[int], typing.List[int]]
def constraint_solve(canvas_origin: int, canvas_size: int, canvas_item_constraints: typing.Sequence[Constraint], spacing: int = 0) -> ConstraintResultType:
"""
Solve the layout by assigning space and enforcing constraints.
Returns origins, sizes tuple.
"""
# setup information from each item
solver_items = [SolverItem(constraint) for constraint in canvas_item_constraints]
# assign preferred size, if any, to each item. items with preferred size are still
# free to change as long as they don't become constrained.
for solver_item in solver_items:
if solver_item.constraint.preferred is not None:
solver_item.size = solver_item.constraint.preferred
assert solver_item.constraint.minimum is not None
assert solver_item.constraint.maximum is not None
if solver_item.size < solver_item.constraint.minimum:
solver_item.size = solver_item.constraint.minimum
if solver_item.size > solver_item.constraint.maximum:
solver_item.size = solver_item.constraint.maximum
solver_item.is_constrained = True
if solver_item.size > solver_item.constraint.maximum:
solver_item.size = solver_item.constraint.maximum
if solver_item.size < solver_item.constraint.minimum:
solver_item.size = solver_item.constraint.minimum
solver_item.is_constrained = True
# put these here to avoid linter warnings
remaining_canvas_size = canvas_size
remaining_count = len(solver_items)
# assign the free space to the remaining items. first figure out how much space is left
# and how many items remain. then divide the space up.
finished = False
while not finished:
finished = True
remaining_canvas_size = canvas_size
remaining_count = len(solver_items)
# reset the items that we can, i.e. those that aren't already constrained and don't have a preferred size
for solver_item in solver_items:
if not solver_item.is_constrained and solver_item.constraint.preferred is None:
solver_item.size = None
# figure out how many free range items there are, i.e. those that don't already have a size assigned
for solver_item in solver_items:
if solver_item.size is not None:
remaining_canvas_size -= solver_item.size
remaining_count -= 1
# again attempt to assign sizes
for solver_item in solver_items:
if solver_item.size is None:
size = remaining_canvas_size // remaining_count
assert solver_item.constraint.minimum is not None
assert solver_item.constraint.maximum is not None
if size < solver_item.constraint.minimum:
size = solver_item.constraint.minimum
solver_item.is_constrained = True
finished = False
if size > solver_item.constraint.maximum:
size = solver_item.constraint.maximum
solver_item.is_constrained = True
finished = False
solver_item.size = size
remaining_canvas_size -= size
remaining_count -= 1
if not finished:
break
# go through again and assign any remaining space
for solver_item in solver_items:
if solver_item.size is None:
solver_item.size = remaining_canvas_size // remaining_count
# check if we're oversized. if so divide among unconstrained items, but honor minimum size.
finished = False
while not finished:
finished = True
actual_canvas_size = sum([solver_item.size for solver_item in solver_items])
assert actual_canvas_size is not None
if actual_canvas_size > canvas_size:
remaining_count = sum([not solver_item.is_constrained for solver_item in solver_items])
remaining_canvas_size = actual_canvas_size - canvas_size
if remaining_count > 0:
for solver_item in solver_items:
if not solver_item.is_constrained:
assert solver_item.size is not None
assert solver_item.constraint.minimum is not None
size = solver_item.size - remaining_canvas_size // remaining_count
if size < solver_item.constraint.minimum:
size = solver_item.constraint.minimum
solver_item.is_constrained = True
finished = False
adjustment = solver_item.size - size
solver_item.size = size
remaining_canvas_size -= adjustment
remaining_count -= 1
if not finished:
break
# check if we're undersized. if so add among unconstrained items, but honor maximum size.
finished = False
while not finished:
finished = True
actual_canvas_size = sum([solver_item.size for solver_item in solver_items])
assert actual_canvas_size is not None
if actual_canvas_size < canvas_size:
remaining_count = sum([not solver_item.is_constrained for solver_item in solver_items])
remaining_canvas_size = canvas_size - actual_canvas_size
if remaining_count > 0:
for solver_item in solver_items:
if not solver_item.is_constrained:
assert solver_item.size is not None
assert solver_item.constraint.maximum is not None
size = solver_item.size + remaining_canvas_size // remaining_count
if size > solver_item.constraint.maximum:
size = solver_item.constraint.maximum
solver_item.is_constrained = True
finished = False
adjustment = size - solver_item.size
solver_item.size = size
remaining_canvas_size -= adjustment
remaining_count -= 1
if not finished:
break
# assign layouts
# TODO: allow for various justification options (start - default, end, center, space-between, space-around)
# see https://css-tricks.com/snippets/css/a-guide-to-flexbox/
sizes = [(solver_item.size or 0) for solver_item in solver_items]
origins = list()
for index in range(len(canvas_item_constraints)):
origins.append(canvas_origin)
canvas_origin += sizes[index] + spacing
return origins, sizes
class Sizing:
"""
Describes the sizing for a particular canvas item.
Aspect ratio, width, and height can each specify minimums, maximums, and preferred values.
Width and height can be integer or floats. If floats, they specify a percentage of their
respective maximum.
Preferred values are only used when free sizing.
Collapsible items collapse to fixed size of 0 if they don't have children.
"""
def __init__(self) -> None:
self.__preferred_width: typing.Optional[typing.Union[int, float]] = None
self.__preferred_height: typing.Optional[typing.Union[int, float]] = None
self.__preferred_aspect_ratio: typing.Optional[float] = None
self.__minimum_width: typing.Optional[typing.Union[int, float]] = None
self.__minimum_height: typing.Optional[typing.Union[int, float]] = None
self.__minimum_aspect_ratio: typing.Optional[float] = None
self.__maximum_width: typing.Optional[typing.Union[int, float]] = None
self.__maximum_height: typing.Optional[typing.Union[int, float]] = None
self.__maximum_aspect_ratio: typing.Optional[float] = None
self.__collapsible: bool = False
def __repr__(self) -> str:
format_str = "Sizing (min_w={0}, max_w={1}, pref_w={2}, min_h={3}, max_h={4}, pref_h={5}, min_a={6}, max_a={7}, pref_a={8}, collapsible={9})"
return format_str.format(self.__minimum_width, self.__maximum_width, self.__preferred_width,
self.__minimum_height, self.__maximum_height, self.__preferred_height,
self.__minimum_aspect_ratio, self.__maximum_aspect_ratio, self.__preferred_aspect_ratio,
self.__collapsible)
def __eq__(self, other: typing.Any) -> bool:
if self.__preferred_width != other.preferred_width:
return False
if self.__preferred_height != other.preferred_height:
return False
if self.__preferred_aspect_ratio != other.preferred_aspect_ratio:
return False
if self.__minimum_width != other.minimum_width:
return False
if self.__minimum_height != other.minimum_height:
return False
if self.__minimum_aspect_ratio != other.minimum_aspect_ratio:
return False
if self.__maximum_width != other.maximum_width:
return False
if self.__maximum_height != other.maximum_height:
return False
if self.__maximum_aspect_ratio != other.maximum_aspect_ratio:
return False
if self.__collapsible != other.collapsible:
return False
return True
def __deepcopy__(self, memo: typing.Dict[typing.Any, typing.Any]) -> Sizing:
deepcopy = Sizing()
deepcopy._copy_from(self)
memo[id(self)] = deepcopy
return deepcopy
@property
def preferred_width(self) -> typing.Optional[typing.Union[int, float]]:
return self.__preferred_width
@property
def preferred_height(self) -> typing.Optional[typing.Union[int, float]]:
return self.__preferred_height
@property
def preferred_aspect_ratio(self) -> typing.Optional[float]:
return self.__preferred_aspect_ratio
@property
def minimum_width(self) -> typing.Optional[typing.Union[int, float]]:
return self.__minimum_width
@property
def minimum_height(self) -> typing.Optional[typing.Union[int, float]]:
return self.__minimum_height
@property
def minimum_aspect_ratio(self) -> typing.Optional[float]:
return self.__minimum_aspect_ratio
@property
def maximum_width(self) -> typing.Optional[typing.Union[int, float]]:
return self.__maximum_width
@property
def maximum_height(self) -> typing.Optional[typing.Union[int, float]]:
return self.__maximum_height
@property
def maximum_aspect_ratio(self) -> typing.Optional[float]:
return self.__maximum_aspect_ratio
@property
def collapsible(self) -> bool:
return self.__collapsible
@property
def _preferred_width(self) -> typing.Optional[typing.Union[int, float]]:
return self.__preferred_width
@_preferred_width.setter
def _preferred_width(self, value: typing.Optional[typing.Union[int, float]]) -> None:
self.__preferred_width = value
def with_preferred_width(self, width: typing.Optional[typing.Union[int, float]]) -> Sizing:
sizing = copy.deepcopy(self)
sizing._preferred_width = width
return sizing
@property
def _preferred_height(self) -> typing.Optional[typing.Union[int, float]]:
return self.__preferred_height
@_preferred_height.setter
def _preferred_height(self, value: typing.Optional[typing.Union[int, float]]) -> None:
self.__preferred_height = value
def with_preferred_height(self, height: typing.Optional[typing.Union[int, float]]) -> Sizing:
sizing = copy.deepcopy(self)
sizing._preferred_height = height
return sizing
@property
def _preferred_aspect_ratio(self) -> typing.Optional[float]:
return self.__preferred_aspect_ratio
@_preferred_aspect_ratio.setter
def _preferred_aspect_ratio(self, value: typing.Optional[float]) -> None:
self.__preferred_aspect_ratio = value
def with_preferred_aspect_ratio(self, aspect_ratio: typing.Optional[float]) -> Sizing:
sizing = copy.deepcopy(self)
sizing._preferred_aspect_ratio = aspect_ratio
return sizing
@property
def _minimum_width(self) -> typing.Optional[typing.Union[int, float]]:
return self.__minimum_width
@_minimum_width.setter
def _minimum_width(self, value: typing.Optional[typing.Union[int, float]]) -> None:
self.__minimum_width = value
def with_minimum_width(self, width: typing.Optional[typing.Union[int, float]]) -> Sizing:
sizing = copy.deepcopy(self)
sizing._minimum_width = width
return sizing
@property
def _minimum_height(self) -> typing.Optional[typing.Union[int, float]]:
return self.__minimum_height
@_minimum_height.setter
def _minimum_height(self, value: typing.Optional[typing.Union[int, float]]) -> None:
self.__minimum_height = value
def with_minimum_height(self, height: typing.Optional[typing.Union[int, float]]) -> Sizing:
sizing = copy.deepcopy(self)
sizing._minimum_height = height
return sizing
@property
def _minimum_aspect_ratio(self) -> typing.Optional[float]:
return self.__minimum_aspect_ratio
@_minimum_aspect_ratio.setter
def _minimum_aspect_ratio(self, value: typing.Optional[float]) -> None:
self.__minimum_aspect_ratio = value
def with_minimum_aspect_ratio(self, aspect_ratio: typing.Optional[float]) -> Sizing:
sizing = copy.deepcopy(self)
sizing._minimum_aspect_ratio = aspect_ratio
return sizing
@property
def _maximum_width(self) -> typing.Optional[typing.Union[int, float]]:
return self.__maximum_width
@_maximum_width.setter
def _maximum_width(self, value: typing.Optional[typing.Union[int, float]]) -> None:
self.__maximum_width = value
def with_maximum_width(self, width: typing.Optional[typing.Union[int, float]]) -> Sizing:
sizing = copy.deepcopy(self)
sizing._maximum_width = width
return sizing
@property
def _maximum_height(self) -> typing.Optional[typing.Union[int, float]]:
return self.__maximum_height
@_maximum_height.setter
def _maximum_height(self, value: typing.Optional[typing.Union[int, float]]) -> None:
self.__maximum_height = value
def with_maximum_height(self, height: typing.Optional[typing.Union[int, float]]) -> Sizing:
sizing = copy.deepcopy(self)
sizing._maximum_height = height
return sizing
@property
def _maximum_aspect_ratio(self) -> typing.Optional[float]:
return self.__maximum_aspect_ratio
@_maximum_aspect_ratio.setter
def _maximum_aspect_ratio(self, value: typing.Optional[float]) -> None:
self.__maximum_aspect_ratio = value
def with_maximum_aspect_ratio(self, aspect_ratio: typing.Optional[float]) -> Sizing:
sizing = copy.deepcopy(self)
sizing._maximum_aspect_ratio = aspect_ratio
return sizing
@property
def _collapsible(self) -> bool:
return self.__collapsible
@_collapsible.setter
def _collapsible(self, value: bool) -> None:
self.__collapsible = value
def with_collapsible(self, collapsible: bool) -> Sizing:
sizing = copy.deepcopy(self)
sizing._collapsible = collapsible
return sizing
def _copy_from(self, other: Sizing) -> None:
self.__preferred_width = other.preferred_width
self.__preferred_height = other.preferred_height
self.__preferred_aspect_ratio = other.preferred_aspect_ratio
self.__minimum_width = other.minimum_width
self.__minimum_height = other.minimum_height
self.__minimum_aspect_ratio = other.minimum_aspect_ratio
self.__maximum_width = other.maximum_width
self.__maximum_height = other.maximum_height
self.__maximum_aspect_ratio = other.maximum_aspect_ratio
self.__collapsible = other.collapsible
def _clear_height_constraint(self) -> None:
self.__preferred_height = None
self.__minimum_height = None
self.__maximum_height = None
def with_unconstrained_height(self) -> Sizing:
sizing = copy.deepcopy(self)
sizing._clear_height_constraint()
return sizing
def _clear_width_constraint(self) -> None:
self.__preferred_width = None
self.__minimum_width = None
self.__maximum_width = None
def with_unconstrained_width(self) -> Sizing:
sizing = copy.deepcopy(self)
sizing._clear_width_constraint()
return sizing
def _set_fixed_height(self, height: typing.Optional[typing.Union[int, float]]) -> None:
self.__preferred_height = height
self.__minimum_height = height
self.__maximum_height = height
def with_fixed_height(self, height: typing.Optional[typing.Union[int, float]]) -> Sizing:
sizing = copy.deepcopy(self)
sizing._set_fixed_height(height)
return sizing
def _set_fixed_width(self, width: typing.Optional[typing.Union[int, float]]) -> None:
self.__preferred_width = width
self.__minimum_width = width
self.__maximum_width = width
def with_fixed_width(self, width: typing.Optional[typing.Union[int, float]]) -> Sizing:
sizing = copy.deepcopy(self)
sizing._set_fixed_width(width)
return sizing
def _set_fixed_size(self, size: Geometry.IntSizeTuple) -> None:
size_ = Geometry.IntSize.make(size)
self._set_fixed_height(size_.height)
self._set_fixed_width(size_.width)
def with_fixed_size(self, size: Geometry.IntSizeTuple) -> Sizing:
sizing = copy.deepcopy(self)
sizing._set_fixed_size(size)
return sizing
def get_width_constraint(self, width: typing.Union[int, float]) -> Constraint:
""" Create and return a new width Constraint object made from this sizing object. """
constraint = Constraint()
if self.minimum_width is not None:
if isinstance(self.minimum_width, float) and self.minimum_width <= 1.0:
constraint.minimum = int(width * self.minimum_width)
else:
constraint.minimum = int(self.minimum_width)
else:
constraint.minimum = 0
if self.maximum_width is not None:
if isinstance(self.maximum_width, float) and self.maximum_width <= 1.0:
constraint.maximum = int(width * self.maximum_width)
else:
constraint.maximum = int(self.maximum_width)
else:
constraint.maximum = MAX_VALUE
if self.preferred_width is not None:
if isinstance(self.preferred_width, float) and self.preferred_width <= 1.0:
constraint.preferred = int(width * self.preferred_width)
else:
constraint.preferred = int(self.preferred_width)
else:
constraint.preferred = None
return constraint
def get_height_constraint(self, height: typing.Union[int, float]) -> Constraint:
""" Create and return a new height Constraint object made from this sizing object. """
constraint = Constraint()
if self.minimum_height is not None:
if isinstance(self.minimum_height, float) and self.minimum_height <= 1.0:
constraint.minimum = int(height * self.minimum_height)
else:
constraint.minimum = int(self.minimum_height)
else:
constraint.minimum = 0
if self.maximum_height is not None:
if isinstance(self.maximum_height, float) and self.maximum_height <= 1.0:
constraint.maximum = int(height * self.maximum_height)
else:
constraint.maximum = int(self.maximum_height)
else:
constraint.maximum = MAX_VALUE
if self.preferred_height is not None:
if isinstance(self.preferred_height, float) and self.preferred_height <= 1.0:
constraint.preferred = int(height * self.preferred_height)
else:
constraint.preferred = int(self.preferred_height)
else:
constraint.preferred = None
return constraint
def get_unrestrained_width(self, maximum_width: typing.Union[int, float]) -> int:
if self.maximum_width is not None:
if isinstance(self.maximum_width, float) and self.maximum_width < 1.0:
return int(self.maximum_width * maximum_width)
return int(min(self.maximum_width, maximum_width))
return int(maximum_width)
def get_unrestrained_height(self, maximum_height: typing.Union[int, float]) -> int:
if self.maximum_height is not None:
if isinstance(self.maximum_height, float) and self.maximum_height < 1.0:
return int(self.maximum_height * maximum_height)
return int(min(self.maximum_height, maximum_height))
return int(maximum_height)
class KeyboardModifiers:
def __init__(self, shift: bool = False, control: bool = False, alt: bool = False, meta: bool = False, keypad: bool = False) -> None:
self.__shift = shift
self.__control = control
self.__alt = alt
self.__meta = meta
self.__keypad = keypad
@property
def any_modifier(self) -> bool:
return self.shift or self.control or self.alt or self.meta
# shift
@property
def shift(self) -> bool:
return self.__shift
@property
def only_shift(self) -> bool:
return self.__shift and not self.__control and not self.__alt and not self.__meta
# control (command key on mac)
@property
def control(self) -> bool:
return self.__control
@property
def only_control(self) -> bool:
return self.__control and not self.__shift and not self.__alt and not self.__meta
# alt (option key on mac)
@property
def alt(self) -> bool:
return self.__alt
@property
def only_alt(self) -> bool:
return self.__alt and not self.__control and not self.__shift and not self.__meta
# option (alt key on windows)
@property
def option(self) -> bool:
return self.__alt
@property
def only_option(self) -> bool:
return self.__alt and not self.__control and not self.__shift and not self.__meta
# meta (control key on mac)
@property
def meta(self) -> bool:
return self.__meta
@property
def only_meta(self) -> bool:
return self.__meta and not self.__control and not self.__shift and not self.__alt
# keypad
@property
def keypad(self) -> bool:
return self.__keypad
@property
def only_keypad(self) -> bool:
return self.__keypad
@property
def native_control(self) -> bool:
return self.control
def visible_canvas_item(canvas_item: typing.Optional[AbstractCanvasItem]) -> typing.Optional[AbstractCanvasItem]:
return canvas_item if canvas_item and canvas_item.visible else None
class AbstractCanvasItem:
"""An item drawn on a canvas supporting mouse and keyboard actions.
CONTAINERS
A canvas item should be added to a container. It is an error to add a particular canvas item to more than one
container. The container in which the canvas item resides is accessible via the ``container`` property.
LAYOUT
The container is responsible for layout and will set the canvas bounds of this canvas item as a function of the
container layout algorithm and this canvas item's sizing information.
The ``sizing`` property is the intrinsic sizing constraints of this canvas item.
The ``layout_sizing`` property is a the sizing information used by the container layout algorithm.
If this canvas item is non-composite, then ``layout_sizing`` will be identical to this canvas item's ``sizing``.
However, if this canvas item is composite, then ``layout_sizing`` is determined by the layout algorithm and then
additionally constrained by this canvas item's ``sizing``. In this way, by leaving ``sizing`` unconstrained, the
layout can determine the sizing of this canvas item. Alternatively, by adding a constraint to ``sizing``, the layout
can be constrained. This corresponds to the contents determining the size of the container vs. the container
determining the size of the layout.
Unpredictable layout may occur if an unconstrained item is placed into an unrestrained container. Be sure to
either restrain (implicitly or explicitly) the content or the container.
Layout occurs when the structure of the item hierarchy changes, such as when a new canvas item is added to a
container. Clients can also call ``refresh_layout`` explicitly as needed.
UPDATES AND DRAWING
Update is the mechanism by which the container is notified that one of its child canvas items needs updating.
The update message will ultimately end up at the root container at which point the root container will trigger a
repaint on a thread.
Subclasses should override _repaint or _repaint_visible to implement drawing. Drawing should take place within the
canvas bounds.
"""
def __init__(self) -> None:
super().__init__()
self.__container: typing.Optional[CanvasItemComposition] = None
self.__canvas_size: typing.Optional[Geometry.IntSize] = None
self.__canvas_origin: typing.Optional[Geometry.IntPoint] = None
self.__sizing = Sizing()
self.__focused = False
self.__focusable = False
self.wants_mouse_events = False
self.wants_drag_events = False
self.on_focus_changed: typing.Optional[typing.Callable[[bool], None]] = None
self.on_layout_updated: typing.Optional[typing.Callable[[typing.Optional[Geometry.IntPoint], typing.Optional[Geometry.IntSize], bool], None]] = None
self.__cursor_shape: typing.Optional[str] = None
self.__tool_tip: typing.Optional[str] = None
self.__background_color: typing.Optional[str] = None
self.__border_color: typing.Optional[str] = None
self.__visible = True
self._has_layout = False
self.__thread = threading.current_thread()
self.__pending_update = True
self.__repaint_drawing_context: typing.Optional[DrawingContext.DrawingContext] = None
# stats for testing
self._update_count = 0
self._repaint_count = 0
self.is_root_opaque = False
def close(self) -> None:
""" Close the canvas object. """
if threading.current_thread() != self.__thread:
warnings.warn('CanvasItem closed on different thread')
import traceback
traceback.print_stack()
self.__container = None
self.on_focus_changed = None
self.on_layout_updated = None
@property
def is_ui_interaction_active(self) -> bool:
root_container = self.root_container
if root_container:
return root_container.is_ui_interaction_active
return False
@property
def canvas_size(self) -> typing.Optional[Geometry.IntSize]:
""" Returns size of canvas_rect (external coordinates). """
return self.__canvas_size
def _set_canvas_size(self, canvas_size: typing.Optional[Geometry.IntSizeTuple]) -> None:
canvas_size_ = Geometry.IntSize.make(canvas_size) if canvas_size is not None else None
if ((self.__canvas_size is None) != (canvas_size_ is None)) or (self.__canvas_size != canvas_size_):
self.__canvas_size = canvas_size_
self.update()
@property
def canvas_origin(self) -> typing.Optional[Geometry.IntPoint]:
""" Returns origin of canvas_rect (external coordinates). """
return self.__canvas_origin
def _set_canvas_origin(self, canvas_origin: typing.Optional[Geometry.IntPointTuple]) -> None:
canvas_origin_ = Geometry.IntPoint.make(canvas_origin) if canvas_origin is not None else None
if ((self.__canvas_origin is None) != (canvas_origin_ is None)) or (self.__canvas_origin != canvas_origin_):
self.__canvas_origin = canvas_origin_
self.update()
def _begin_container_layout_changed(self) -> None:
pass
def _finish_container_layout_changed(self) -> None:
pass
def _container_layout_changed(self) -> None:
pass
@property
def canvas_widget(self) -> typing.Optional[UserInterface.CanvasWidget]:
return self.container.canvas_widget if self.container else None
@property
def canvas_bounds(self) -> typing.Optional[Geometry.IntRect]:
""" Returns a rect of the internal coordinates. """
if self.canvas_size is not None:
return Geometry.IntRect((0, 0), self.canvas_size)
return None
@property
def canvas_rect(self) -> typing.Optional[Geometry.IntRect]:
""" Returns a rect of the external coordinates. """
if self.canvas_origin is not None and self.canvas_size is not None:
return Geometry.IntRect(self.canvas_origin, self.canvas_size)
return None
@property
def container(self) -> typing.Optional[CanvasItemComposition]:
""" Return the container, if any. """
return self.__container
@container.setter
def container(self, container: typing.Optional[CanvasItemComposition]) -> None:
""" Set container. """
assert self.__container is None or container is None
self.__container = container
@property
def layer_container(self) -> typing.Optional[CanvasItemComposition]:
""" Return the root container, if any. """
return self.__container.layer_container if self.__container else None
@property
def root_container(self) -> typing.Optional[RootCanvasItem]:
""" Return the root container, if any. """
return self.__container.root_container if self.__container else None
@property
def background_color(self) -> typing.Optional[str]:
return self.__background_color
@background_color.setter
def background_color(self, background_color: typing.Optional[str]) -> None:
self.__background_color = background_color
self.update()
@property
def border_color(self) -> typing.Optional[str]:
return self.__border_color
@border_color.setter
def border_color(self, border_color: typing.Optional[str]) -> None:
self.__border_color = border_color
self.update()
@property
def focusable(self) -> bool:
""" Return whether the canvas item is focusable. """
return self.__focusable
@focusable.setter
def focusable(self, focusable: bool) -> None:
"""
Set whether the canvas item is focusable.
If this canvas item is focusable and contains other canvas items, they should
not be focusable.
"""
self.__focusable = focusable
@property
def focused(self) -> bool:
""" Return whether the canvas item is focused. """
return self.__focused
def _set_focused(self, focused: bool) -> None:
""" Set whether the canvas item is focused. Only called from container. """
if focused != self.__focused:
self.__focused = focused
self.update()
if callable(self.on_focus_changed):
self.on_focus_changed(focused)
def _request_focus(self, p: typing.Optional[Geometry.IntPoint] = None,
modifiers: typing.Optional[UserInterface.KeyboardModifiers] = None) -> None:
# protected method
if not self.focused:
root_container = self.root_container
if root_container:
root_container._request_root_focus(self, p, modifiers)
def request_focus(self) -> None:
"""Request focus.
Subclasses should not override. Override _request_focus instead."""
self._request_focus()
def adjust_secondary_focus(self, p: Geometry.IntPoint, modifiers: UserInterface.KeyboardModifiers) -> None:
"""Adjust secondary focus. Default does nothing."""
pass
def clear_focus(self) -> None:
""" Relinquish focus. """
if self.focused:
root_container = self.root_container
if root_container:
root_container._set_focused_item(None)
def drag(self, mime_data: UserInterface.MimeData, thumbnail: typing.Optional[DrawingContext.RGBA32Type] = None,
hot_spot_x: typing.Optional[int] = None, hot_spot_y: typing.Optional[int] = None,
drag_finished_fn: typing.Optional[typing.Callable[[str], None]] = None) -> None:
root_container = self.root_container
if root_container:
root_container.drag(mime_data, thumbnail, hot_spot_x, hot_spot_y, drag_finished_fn)
def show_tool_tip_text(self, text: str, gx: int, gy: int) -> None:
root_container = self.root_container
if root_container:
root_container.show_tool_tip_text(text, gx, gy)
@property
def tool_tip(self) -> typing.Optional[str]:
return self.__tool_tip
@tool_tip.setter
def tool_tip(self, value: typing.Optional[str]) -> None:
self.__tool_tip = value
@property
def cursor_shape(self) -> typing.Optional[str]:
return self.__cursor_shape
@cursor_shape.setter
def cursor_shape(self, cursor_shape: typing.Optional[str]) -> None:
self.__cursor_shape = cursor_shape
root_container = self.root_container
if root_container:
root_container._cursor_shape_changed(self)
def map_to_canvas_item(self, p: Geometry.IntPointTuple, canvas_item: AbstractCanvasItem) -> Geometry.IntPoint:
""" Map the point to the local coordinates of canvas_item. """
o1 = self.map_to_root_container(Geometry.IntPoint())
o2 = canvas_item.map_to_root_container(Geometry.IntPoint())
return Geometry.IntPoint.make(p) + o1 - o2
def map_to_root_container(self, p: Geometry.IntPoint) -> Geometry.IntPoint:
""" Map the point to the coordinates of the root container. """
canvas_item: typing.Optional[AbstractCanvasItem] = self
while canvas_item: # handle case where last canvas item was root
canvas_item_origin = canvas_item.canvas_origin
if canvas_item_origin is not None: # handle case where canvas item is not root but has no parent
p = p + canvas_item_origin
canvas_item = canvas_item.container
else:
break
return p
def map_to_container(self, p: Geometry.IntPoint) -> Geometry.IntPoint:
""" Map the point to the coordinates of the container. """
canvas_origin = self.canvas_origin
assert canvas_origin
return p + canvas_origin
def map_to_global(self, p: Geometry.IntPoint) -> Geometry.IntPoint:
root_container = self.root_container
assert root_container
return root_container.map_to_global(self.map_to_root_container(p))
def _inserted(self, container: typing.Optional[AbstractCanvasItem]) -> None:
"""Subclasses may override to know when inserted into a container."""
pass
def _removed(self, container: typing.Optional[AbstractCanvasItem]) -> None:
"""Subclasses may override to know when removed from a container."""
pass
def prepare_render(self) -> None:
"""Subclasses may override to prepare for layout and repaint. DEPRECATED see _prepare_render."""
pass
def _prepare_render(self) -> None:
"""Subclasses may override to prepare for layout and repaint."""
self._prepare_render_self()
def _prepare_render_self(self) -> None:
"""Subclasses may override to prepare for layout and repaint."""
pass
def update_layout(self, canvas_origin: typing.Optional[Geometry.IntPoint],
canvas_size: typing.Optional[Geometry.IntSize], *, immediate: bool = False) -> None:
"""Update the layout with a new canvas_origin and canvas_size.
canvas_origin and canvas_size are the external bounds.
This method will be called on the render thread.
Subclasses can override this method to take action when the size of the canvas item changes, but they should
typically call super to do the actual layout.
The on_layout_updated callable will be called with the new canvas_origin and canvas_size.
The canvas_origin and canvas_size properties are valid after calling this method and _has_layout is True.
"""
self._update_self_layout(canvas_origin, canvas_size, immediate=immediate)
self._has_layout = self.canvas_origin is not None and self.canvas_size is not None
def _update_self_layout(self, canvas_origin: typing.Optional[Geometry.IntPoint],
canvas_size: typing.Optional[Geometry.IntSize], *, immediate: bool = False) -> None:
"""Update the canvas origin and size and call notification methods."""
self._set_canvas_origin(canvas_origin)
self._set_canvas_size(canvas_size)
if callable(self.on_layout_updated):
self.on_layout_updated(self.canvas_origin, self.canvas_size, immediate)
self._has_layout = self.canvas_origin is not None and self.canvas_size is not None
def refresh_layout_immediate(self) -> None:
"""Immediate re-layout the item."""
self.refresh_layout()
self.update_layout(self.canvas_origin, self.canvas_size, immediate=True)
def refresh_layout(self) -> None:
"""Invalidate the layout and trigger layout.
Items get layout from their container, so the default implementation asks the container to layout.
"""
if self.__container:
self.__container._needs_layout(self)
def _needs_layout(self, canvas_item: AbstractCanvasItem) -> None:
# pass the needs layout up the chain.
if self.__container:
self.__container._needs_layout(canvas_item)
@property
def visible(self) -> bool:
return self.__visible
@visible.setter
def visible(self, value: bool) -> None:
if self.__visible != value:
self.__visible = value
if self.__container:
self.__container.refresh_layout()
@property
def sizing(self) -> Sizing:
"""
Return sizing information for this canvas item.
The sizing property is read only, but the object itself
can be modified.
"""
return copy.deepcopy(self.__sizing)
@property
def layout_sizing(self) -> Sizing:
"""
Return layout sizing information for this canvas item.
The layout sizing is read only and cannot be modified. It is
used from the layout engine.
"""
return copy.deepcopy(self.sizing)
def copy_sizing(self) -> Sizing:
return self.sizing
def update_sizing(self, new_sizing: Sizing) -> None:
if new_sizing != self.sizing:
self.__sizing._copy_from(new_sizing)
self.refresh_layout()
def update(self) -> None:
"""Mark canvas item as needing a display update.
The canvas item will be repainted by the root canvas item.
"""
self._update_with_items()
def _update_with_items(self, canvas_items: typing.Optional[typing.Sequence[AbstractCanvasItem]] = None) -> None:
self._update_count += 1
self._updated(canvas_items)
def _updated(self, canvas_items: typing.Optional[typing.Sequence[AbstractCanvasItem]] = None) -> None:
# Notify this canvas item that a child has been updated, repaint if needed at next opportunity.
self.__pending_update = True
self._update_container(canvas_items)
def _update_container(self, canvas_items: typing.Optional[typing.Sequence[AbstractCanvasItem]] = None) -> None:
# if not in the middle of a nested update, and if this canvas item has
# a layout, update the container.
container = self.__container
if container and self._has_layout:
canvas_items = list(canvas_items) if canvas_items else list()
canvas_items.append(self)
container._update_with_items(canvas_items)
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
"""Repaint the canvas item to the drawing context.
Subclasses should override this method to paint.
This method will be called on a thread.
The drawing should take place within the canvas_bounds.
"""
assert self.canvas_size is not None
self._repaint_count += 1
def _repaint_template(self, drawing_context: DrawingContext.DrawingContext, immediate: bool) -> None:
"""A wrapper method for _repaint.
Callers should always call this method instead of _repaint directly. This helps keep the _repaint
implementations simple and easy to understand.
"""
self._repaint(drawing_context)
def _repaint_if_needed(self, drawing_context: DrawingContext.DrawingContext, *, immediate: bool = False) -> None:
# Repaint if no cached version of the last paint is available.
# If no cached drawing context is available, regular _repaint is used to make a new one which is then cached.
# The cached drawing context is typically cleared during the update method.
# Subclasses will typically not need to override this method, except in special cases.
pending_update, self.__pending_update = self.__pending_update, False
if pending_update:
repaint_drawing_context = DrawingContext.DrawingContext()
self._repaint_template(repaint_drawing_context, immediate)
self.__repaint_drawing_context = repaint_drawing_context
if self.__repaint_drawing_context:
drawing_context.add(self.__repaint_drawing_context)
def _repaint_finished(self, drawing_context: DrawingContext.DrawingContext) -> None:
# when the thread finishes the repaint, this method gets called. the normal container update
# has not been called yet since the repaint wasn't finished until now. this method performs
# the container update.
self._update_container()
def repaint_immediate(self, drawing_context: DrawingContext.DrawingContext, canvas_size: Geometry.IntSize) -> None:
self.update_layout(Geometry.IntPoint(), canvas_size)
self._repaint_template(drawing_context, immediate=True)
def _draw_background(self, drawing_context: DrawingContext.DrawingContext) -> None:
"""Draw the background. Subclasses can call this."""
background_color = self.__background_color
if background_color:
rect = self.canvas_bounds
if rect:
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.rect(rect.left, rect.top, rect.width, rect.height)
drawing_context.fill_style = background_color
drawing_context.fill()
def _draw_border(self, drawing_context: DrawingContext.DrawingContext) -> None:
"""Draw the border. Subclasses can call this."""
border_color = self.__border_color
if border_color:
rect = self.canvas_bounds
if rect:
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.rect(rect.left, rect.top, rect.width, rect.height)
drawing_context.stroke_style = border_color
drawing_context.stroke()
def _repaint_visible(self, drawing_context: DrawingContext.DrawingContext, visible_rect: Geometry.IntRect) -> None:
"""
Repaint the canvas item to the drawing context within the visible area.
Subclasses can override this method to paint.
This method will be called on a thread.
The drawing should take place within the canvas_bounds.
The default implementation calls _repaint(drawing_context)
"""
self._repaint_if_needed(drawing_context)
def canvas_item_at_point(self, x: int, y: int) -> typing.Optional[AbstractCanvasItem]:
canvas_items = self.canvas_items_at_point(x, y)
return canvas_items[0] if len(canvas_items) > 0 else None
def canvas_items_at_point(self, x: int, y: int) -> typing.List[AbstractCanvasItem]:
""" Return the canvas item at the point. May return None. """
canvas_bounds = self.canvas_bounds
if canvas_bounds and canvas_bounds.contains_point(Geometry.IntPoint(x=x, y=y)):
return [self]
return []
def get_root_opaque_canvas_items(self) -> typing.List[AbstractCanvasItem]:
return [self] if self.is_root_opaque else list()
def mouse_clicked(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
""" Handle a mouse click within this canvas item. Return True if handled. """
return False
def mouse_double_clicked(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
""" Handle a mouse double click within this canvas item. Return True if handled. """
return False
def mouse_entered(self) -> bool:
""" Handle a mouse entering this canvas item. Return True if handled. """
return False
def mouse_exited(self) -> bool:
""" Handle a mouse exiting this canvas item. Return True if handled. """
return False
def mouse_pressed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
""" Handle a mouse press within this canvas item. Return True if handled. """
return False
def mouse_released(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
""" Handle a mouse release within this canvas item. Return True if handled. """
return False
def mouse_position_changed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
""" Handle a mouse move within this canvas item. Return True if handled. """
return False
def wheel_changed(self, x: int, y: int, dx: int, dy: int, is_horizontal: bool) -> bool:
""" Handle a mouse wheel changed within this canvas item. Return True if handled. """
return False
def context_menu_event(self, x: int, y: int, gx: int, gy: int) -> bool:
""" Handle a context menu event. x, y are local coordinates. gx, gy are global coordinates. """
return False
def key_pressed(self, key: UserInterface.Key) -> bool:
""" Handle a key pressed while this canvas item has focus. Return True if handled. """
return False
def key_released(self, key: UserInterface.Key) -> bool:
""" Handle a key released while this canvas item has focus. Return True if handled. """
return False
def wants_drag_event(self, mime_data: UserInterface.MimeData, x: int, y: int) -> bool:
""" Determines if the item should handle certain mime_data at a certain point. Return True if handled."""
return self.wants_drag_events
def drag_enter(self, mime_data: UserInterface.MimeData) -> str:
""" Handle a drag event entering this canvas item. Return action if handled. """
return "ignore"
def drag_leave(self) -> str:
""" Handle a drag event leaving this canvas item. Return action if handled. """
return "ignore"
def drag_move(self, mime_data: UserInterface.MimeData, x: int, y: int) -> str:
""" Handle a drag event moving within this canvas item. Return action if handled. """
return "ignore"
def drop(self, mime_data: UserInterface.MimeData, x: int, y: int) -> str:
""" Handle a drop event in this canvas item. Return action if handled. """
return "ignore"
def handle_tool_tip(self, x: int, y: int, gx: int, gy: int) -> bool:
return False
def pan_gesture(self, dx: int, dy: int) -> bool:
""" Handle a pan gesture in this canvas item. Return action if handled. """
return False
def _dispatch_any(self, method: str, *args: typing.Any, **kwargs: typing.Any) -> bool:
if hasattr(self, method):
return typing.cast(bool, getattr(self, method)(*args, **kwargs))
return False
def _can_dispatch_any(self, method: str) -> bool:
return hasattr(self, method)
def _get_menu_item_state(self, command_id: str) -> typing.Optional[UserInterface.MenuItemState]:
handle_method = "handle_" + command_id
menu_item_state_method = "get_" + command_id + "_menu_item_state"
if hasattr(self, menu_item_state_method):
menu_item_state = getattr(self, menu_item_state_method)()
if menu_item_state:
return typing.cast(UserInterface.MenuItemState, menu_item_state)
if hasattr(self, handle_method):
return UserInterface.MenuItemState(title=None, enabled=True, checked=False)
return None
def simulate_click(self, p: Geometry.IntPointTuple, modifiers: typing.Optional[UserInterface.KeyboardModifiers] = None) -> None:
modifiers_ = modifiers or typing.cast("UserInterface.KeyboardModifiers", KeyboardModifiers())
self.mouse_pressed(p[1], p[0], modifiers_)
self.mouse_released(p[1], p[0], modifiers_)
def simulate_drag(self, p1: Geometry.IntPointTuple, p2: Geometry.IntPointTuple, modifiers: typing.Optional[UserInterface.KeyboardModifiers] = None) -> None:
modifiers_ = modifiers or typing.cast("UserInterface.KeyboardModifiers", KeyboardModifiers())
self.mouse_pressed(p1[1], p1[0], modifiers_)
self.mouse_position_changed(p1[1], p1[0], modifiers_)
midpoint = Geometry.midpoint(Geometry.IntPoint.make(p1).to_float_point(), Geometry.IntPoint.make(p2).to_float_point())
self.mouse_position_changed(round(midpoint[1]), round(midpoint[0]), modifiers_)
self.mouse_position_changed(p2[1], p2[0], modifiers_)
self.mouse_released(p2[1], p2[0], modifiers_)
def simulate_press(self, p: Geometry.IntPointTuple, modifiers: typing.Optional[UserInterface.KeyboardModifiers] = None) -> None:
modifiers_ = modifiers or typing.cast("UserInterface.KeyboardModifiers", KeyboardModifiers())
self.mouse_pressed(p[1], p[0], modifiers_)
def simulate_move(self, p: Geometry.IntPointTuple, modifiers: typing.Optional[UserInterface.KeyboardModifiers] = None) -> None:
modifiers_ = modifiers or typing.cast("UserInterface.KeyboardModifiers", KeyboardModifiers())
self.mouse_position_changed(p[1], p[0], modifiers_)
def simulate_release(self, p: Geometry.IntPointTuple, modifiers: typing.Optional[UserInterface.KeyboardModifiers] = None) -> None:
modifiers_ = modifiers or typing.cast("UserInterface.KeyboardModifiers", KeyboardModifiers())
self.mouse_released(p[1], p[0], modifiers_)
class CanvasItemAbstractLayout:
"""
Layout canvas items within a larger space.
Subclasses must implement layout method.
NOTE: origin=0 is at the top
"""
def __init__(self, margins: typing.Optional[Geometry.Margins] = None, spacing: typing.Optional[int] = None) -> None:
self.margins = margins if margins is not None else Geometry.Margins(0, 0, 0, 0)
self.spacing = spacing if spacing else 0
def calculate_row_layout(self, canvas_origin: Geometry.IntPoint, canvas_size: Geometry.IntSize,
canvas_items: typing.Sequence[AbstractCanvasItem]) -> ConstraintResultType:
""" Use constraint_solve to return the positions of canvas items as if they are in a row. """
canvas_item_count = len(canvas_items)
spacing_count = canvas_item_count - 1
content_left = canvas_origin.x + self.margins.left
content_width = canvas_size.width - self.margins.left - self.margins.right - self.spacing * spacing_count
constraints = [canvas_item.layout_sizing.get_width_constraint(content_width) for canvas_item in canvas_items]
return constraint_solve(content_left, content_width, constraints, self.spacing)
def calculate_column_layout(self, canvas_origin: Geometry.IntPoint, canvas_size: Geometry.IntSize,
canvas_items: typing.Sequence[AbstractCanvasItem]) -> ConstraintResultType:
""" Use constraint_solve to return the positions of canvas items as if they are in a column. """
canvas_item_count = len(canvas_items)
spacing_count = canvas_item_count - 1
content_top = canvas_origin.y + self.margins.top
content_height = canvas_size.height - self.margins.top - self.margins.bottom - self.spacing * spacing_count
constraints = [canvas_item.layout_sizing.get_height_constraint(content_height) for canvas_item in canvas_items]
return constraint_solve(content_top, content_height, constraints, self.spacing)
def update_canvas_item_layout(self, canvas_item_origin: Geometry.IntPoint, canvas_item_size: Geometry.IntSize,
canvas_item: AbstractCanvasItem, *, immediate: bool = False) -> None:
""" Given a container box, adjust a single canvas item within the box according to aspect_ratio constraints. """
# TODO: Also adjust canvas items for maximums, and positioning
aspect_ratio = canvas_item_size.aspect_ratio
rect = Geometry.IntRect(origin=canvas_item_origin, size=canvas_item_size)
layout_sizing = canvas_item.layout_sizing
if layout_sizing.minimum_aspect_ratio is not None and aspect_ratio < layout_sizing.minimum_aspect_ratio:
rect = Geometry.fit_to_aspect_ratio(rect, layout_sizing.minimum_aspect_ratio).to_int_rect()
elif layout_sizing.maximum_aspect_ratio is not None and aspect_ratio > layout_sizing.maximum_aspect_ratio:
rect = Geometry.fit_to_aspect_ratio(rect, layout_sizing.maximum_aspect_ratio).to_int_rect()
elif layout_sizing.preferred_aspect_ratio is not None:
rect = Geometry.fit_to_aspect_ratio(rect, layout_sizing.preferred_aspect_ratio).to_int_rect()
canvas_item.update_layout(rect.origin, rect.size, immediate=immediate)
def layout_canvas_items(self, x_positions: typing.Sequence[int], y_positions: typing.Sequence[int],
widths: typing.Sequence[int], heights: typing.Sequence[int],
canvas_items: typing.Sequence[AbstractCanvasItem], *, immediate: bool = False) -> None:
""" Set the container boxes for the canvas items using update_canvas_item_layout on the individual items. """
for index, canvas_item in enumerate(canvas_items):
if canvas_item is not None:
canvas_item_origin = Geometry.IntPoint(x=x_positions[index], y=y_positions[index])
canvas_item_size = Geometry.IntSize(width=widths[index], height=heights[index])
self.update_canvas_item_layout(canvas_item_origin, canvas_item_size, canvas_item, immediate=immediate)
def _combine_sizing_property(self, sizing: Sizing, canvas_item_sizing: Sizing, property: str,
combiner: typing.Callable[[typing.Any, typing.Any], typing.Any],
clear_if_missing: bool = False) -> None:
""" Utility method for updating the property of the sizing object using the combiner function and the canvas_item_sizing. """
property = "_" + property
canvas_item_value = getattr(canvas_item_sizing, property)
value = getattr(sizing, property)
if canvas_item_value is not None:
if clear_if_missing:
setattr(sizing, property, combiner(value, canvas_item_value) if value is not None else None)
else:
setattr(sizing, property, combiner(value, canvas_item_value) if value is not None else canvas_item_value)
elif clear_if_missing:
setattr(sizing, property, None)
def _get_overlap_sizing(self, canvas_items: typing.Sequence[typing.Optional[AbstractCanvasItem]]) -> Sizing:
"""
A commonly used sizing method to determine the preferred/min/max assuming everything is stacked/overlapping.
Does not include spacing or margins.
"""
sizing = Sizing()
sizing._maximum_width = 0
sizing._maximum_height = 0
sizing._preferred_width = 0
sizing._preferred_height = 0
for canvas_item in canvas_items:
if canvas_item is not None:
canvas_item_sizing = canvas_item.layout_sizing
self._combine_sizing_property(sizing, canvas_item_sizing, "preferred_width", max, True)
self._combine_sizing_property(sizing, canvas_item_sizing, "preferred_height", max, True)
self._combine_sizing_property(sizing, canvas_item_sizing, "minimum_width", max) # if any minimum_width is present, take the maximum one
self._combine_sizing_property(sizing, canvas_item_sizing, "minimum_height", max)
self._combine_sizing_property(sizing, canvas_item_sizing, "maximum_width", max, True)
self._combine_sizing_property(sizing, canvas_item_sizing, "maximum_height", max, True)
if sizing.maximum_width == 0 or len(canvas_items) == 0:
sizing._maximum_width = None
if sizing.maximum_height == 0 or len(canvas_items) == 0:
sizing._maximum_height = None
if sizing.preferred_width == 0 or len(canvas_items) == 0:
sizing._preferred_width = None
if sizing.preferred_height == 0 or len(canvas_items) == 0:
sizing._preferred_height = None
return sizing
def _get_column_sizing(self, canvas_items: typing.Sequence[AbstractCanvasItem])-> Sizing:
"""
A commonly used sizing method to determine the preferred/min/max assuming everything is a column.
Does not include spacing or margins.
"""
sizing = Sizing()
sizing._maximum_width = 0
sizing._maximum_height = 0
sizing._preferred_width = 0
for canvas_item in canvas_items:
if canvas_item is not None:
canvas_item_sizing = canvas_item.layout_sizing
self._combine_sizing_property(sizing, canvas_item_sizing, "preferred_width", max, True)
self._combine_sizing_property(sizing, canvas_item_sizing, "preferred_height", operator.add)
self._combine_sizing_property(sizing, canvas_item_sizing, "minimum_width", max)
self._combine_sizing_property(sizing, canvas_item_sizing, "minimum_height", operator.add)
self._combine_sizing_property(sizing, canvas_item_sizing, "maximum_width", max, True)
self._combine_sizing_property(sizing, canvas_item_sizing, "maximum_height", operator.add, True)
if sizing.maximum_width == 0 or len(canvas_items) == 0:
sizing._maximum_width = None
if sizing.preferred_width == 0 or len(canvas_items) == 0:
sizing._preferred_width = None
if sizing.maximum_height == MAX_VALUE or len(canvas_items) == 0:
sizing._maximum_height = None
return sizing
def _get_row_sizing(self, canvas_items: typing.Sequence[AbstractCanvasItem]) -> Sizing:
"""
A commonly used sizing method to determine the preferred/min/max assuming everything is a column.
Does not include spacing or margins.
"""
sizing = Sizing()
sizing._maximum_width = 0
sizing._maximum_height = 0
sizing._preferred_height = 0
for canvas_item in canvas_items:
if canvas_item is not None:
canvas_item_sizing = canvas_item.layout_sizing
self._combine_sizing_property(sizing, canvas_item_sizing, "preferred_width", operator.add)
self._combine_sizing_property(sizing, canvas_item_sizing, "preferred_height", max, True)
self._combine_sizing_property(sizing, canvas_item_sizing, "minimum_width", operator.add)
self._combine_sizing_property(sizing, canvas_item_sizing, "minimum_height", max)
self._combine_sizing_property(sizing, canvas_item_sizing, "maximum_width", operator.add, True)
self._combine_sizing_property(sizing, canvas_item_sizing, "maximum_height", max, True)
if sizing.maximum_width == MAX_VALUE or len(canvas_items) == 0:
sizing._maximum_width = None
if sizing.maximum_height == 0 or len(canvas_items) == 0:
sizing._maximum_height = None
if sizing.preferred_height == 0 or len(canvas_items) == 0:
sizing._preferred_height = None
return sizing
def _adjust_sizing(self, sizing: Sizing, x_spacing: int, y_spacing: int) -> None:
""" Adjust the sizing object by adding margins and spacing. Spacing is total, not per item. """
if sizing._minimum_width is not None:
sizing._minimum_width += self.margins.left + self.margins.right + x_spacing
if sizing._maximum_width is not None:
sizing._maximum_width += self.margins.left + self.margins.right + x_spacing
if sizing._preferred_width is not None:
sizing._preferred_width += self.margins.left + self.margins.right + x_spacing
if sizing._minimum_height is not None:
sizing._minimum_height += self.margins.top + self.margins.bottom + y_spacing
if sizing._maximum_height is not None:
sizing._maximum_height += self.margins.top + self.margins.bottom + y_spacing
if sizing._preferred_height is not None:
sizing._preferred_height += self.margins.top + self.margins.bottom + y_spacing
def add_canvas_item(self, canvas_item: AbstractCanvasItem, pos: typing.Optional[Geometry.IntPoint]) -> None:
"""
Subclasses may override this method to get position specific information when a canvas item is added to
the layout.
"""
pass
def remove_canvas_item(self, canvas_item: AbstractCanvasItem) -> None:
"""
Subclasses may override this method to clean up position specific information when a canvas item is removed
from the layout.
"""
pass
def layout(self, canvas_origin: Geometry.IntPoint, canvas_size: Geometry.IntSize,
canvas_items: typing.Sequence[AbstractCanvasItem], *, immediate: bool = False) -> None:
""" Subclasses must override this method to layout canvas item. """
raise NotImplementedError()
def get_sizing(self, canvas_items: typing.Sequence[AbstractCanvasItem]) -> Sizing:
"""
Return the sizing object for this layout. Includes spacing and margins.
Subclasses must implement.
"""
raise NotImplementedError()
def create_spacing_item(self, spacing: int) -> AbstractCanvasItem:
raise NotImplementedError()
def create_stretch_item(self) -> AbstractCanvasItem:
raise NotImplementedError()
class CanvasItemLayout(CanvasItemAbstractLayout):
"""
Default layout which overlays all items on one another.
Pass margins.
"""
def __init__(self, margins: typing.Optional[Geometry.Margins] = None, spacing: typing.Optional[int] = None) -> None:
super().__init__(margins, spacing)
def layout(self, canvas_origin: Geometry.IntPoint, canvas_size: Geometry.IntSize,
canvas_items: typing.Sequence[AbstractCanvasItem], *, immediate: bool = False) -> None:
for canvas_item in canvas_items:
self.update_canvas_item_layout(canvas_origin, canvas_size, canvas_item, immediate=immediate)
def get_sizing(self, canvas_items: typing.Sequence[AbstractCanvasItem]) -> Sizing:
sizing = self._get_overlap_sizing(canvas_items)
self._adjust_sizing(sizing, 0, 0)
return sizing
def create_spacing_item(self, spacing: int) -> AbstractCanvasItem:
raise NotImplementedError()
def create_stretch_item(self) -> AbstractCanvasItem:
raise NotImplementedError()
class CanvasItemColumnLayout(CanvasItemAbstractLayout):
"""
Layout items in a column.
Pass margins and spacing.
"""
def __init__(self, margins: typing.Optional[Geometry.Margins] = None, spacing: typing.Optional[int] = None,
alignment: typing.Optional[str] = None) -> None:
super().__init__(margins, spacing)
self.alignment = alignment
def layout(self, canvas_origin: Geometry.IntPoint, canvas_size: Geometry.IntSize,
canvas_items: typing.Sequence[AbstractCanvasItem], *, immediate: bool = False) -> None:
# calculate the vertical placement
y_positions, heights = self.calculate_column_layout(canvas_origin, canvas_size, canvas_items)
widths = [canvas_item.layout_sizing.get_unrestrained_width(canvas_size.width - self.margins.left - self.margins.right) for canvas_item in canvas_items]
available_width = canvas_size.width - self.margins.left - self.margins.right
if self.alignment == "start":
x_positions = [canvas_origin.x + self.margins.left for width in widths]
elif self.alignment == "end":
x_positions = [canvas_origin.x + self.margins.left + (available_width - width) for width in widths]
else:
x_positions = [round(canvas_origin.x + self.margins.left + (available_width - width) * 0.5) for width in widths]
self.layout_canvas_items(x_positions, y_positions, widths, heights, canvas_items, immediate=immediate)
def get_sizing(self, canvas_items: typing.Sequence[AbstractCanvasItem]) -> Sizing:
sizing = self._get_column_sizing(canvas_items)
self._adjust_sizing(sizing, 0, self.spacing * (len(canvas_items) - 1))
return sizing
def create_spacing_item(self, spacing: int) -> AbstractCanvasItem:
spacing_item = EmptyCanvasItem()
spacing_item.update_sizing(spacing_item.sizing.with_fixed_height(spacing).with_fixed_width(0))
return spacing_item
def create_stretch_item(self) -> AbstractCanvasItem:
spacing_item = EmptyCanvasItem()
spacing_item.update_sizing(spacing_item.sizing.with_fixed_width(0))
return spacing_item
class CanvasItemRowLayout(CanvasItemAbstractLayout):
"""
Layout items in a row.
Pass margins and spacing.
"""
def __init__(self, margins: typing.Optional[Geometry.Margins] = None, spacing: typing.Optional[int] = None,
alignment: typing.Optional[str] = None) -> None:
super().__init__(margins, spacing)
self.alignment = alignment
def layout(self, canvas_origin: Geometry.IntPoint, canvas_size: Geometry.IntSize,
canvas_items: typing.Sequence[AbstractCanvasItem], *, immediate: bool = False) -> None:
# calculate the vertical placement
x_positions, widths = self.calculate_row_layout(canvas_origin, canvas_size, canvas_items)
heights = [canvas_item.layout_sizing.get_unrestrained_height(canvas_size.height - self.margins.top - self.margins.bottom) for canvas_item in canvas_items]
available_height = canvas_size.height - self.margins.top - self.margins.bottom
if self.alignment == "start":
y_positions = [canvas_origin.y + self.margins.top for width in widths]
elif self.alignment == "end":
y_positions = [canvas_origin.y + self.margins.top + (available_height - height) for height in heights]
else:
y_positions = [round(canvas_origin.y + self.margins.top + (available_height - height) // 2) for height in heights]
self.layout_canvas_items(x_positions, y_positions, widths, heights, canvas_items, immediate=immediate)
def get_sizing(self, canvas_items: typing.Sequence[AbstractCanvasItem]) -> Sizing:
sizing = self._get_row_sizing(canvas_items)
self._adjust_sizing(sizing, self.spacing * (len(canvas_items) - 1), 0)
return sizing
def create_spacing_item(self, spacing: int) -> AbstractCanvasItem:
spacing_item = EmptyCanvasItem()
spacing_item.update_sizing(spacing_item.sizing.with_fixed_width(spacing).with_fixed_height(0))
return spacing_item
def create_stretch_item(self) -> AbstractCanvasItem:
spacing_item = EmptyCanvasItem()
spacing_item.update_sizing(spacing_item.sizing.with_fixed_height(0))
return spacing_item
class CanvasItemGridLayout(CanvasItemAbstractLayout):
"""
Layout items in a grid specified by size (IntSize).
Pass margins and spacing.
Canvas items must be added to container canvas item using
add_canvas_item with the position (IntPoint) passed as pos
parameter.
"""
def __init__(self, size: Geometry.IntSize, margins: typing.Optional[Geometry.Margins] = None, spacing: typing.Optional[int] = None) -> None:
super().__init__(margins, spacing)
assert size.width > 0 and size.height > 0
self.__size = size
self.__columns: typing.List[typing.List[typing.Optional[AbstractCanvasItem]]] = [[None for _ in range(self.__size.height)] for _ in range(self.__size.width)]
def add_canvas_item(self, canvas_item: AbstractCanvasItem, pos: typing.Optional[Geometry.IntPoint]) -> None:
assert pos
assert pos.x >= 0 and pos.x < self.__size.width
assert pos.y >= 0 and pos.y < self.__size.height
self.__columns[pos.x][pos.y] = canvas_item
def remove_canvas_item(self, canvas_item: AbstractCanvasItem) -> None:
canvas_item.close()
for x in range(self.__size.width):
for y in range(self.__size.height):
if self.__columns[x][y] == canvas_item:
self.__columns[x][y] = None
def layout(self, canvas_origin: Geometry.IntPoint, canvas_size: Geometry.IntSize,
canvas_items: typing.Sequence[AbstractCanvasItem], *, immediate: bool = False) -> None:
# calculate the horizontal placement
# calculate the sizing (x, width) for each column
canvas_item_count = self.__size.width
spacing_count = canvas_item_count - 1
content_left = canvas_origin.x + self.margins.left
content_width = canvas_size.width - self.margins.left - self.margins.right - self.spacing * spacing_count
constraints = list()
for x in range(self.__size.width):
sizing = self._get_overlap_sizing([visible_canvas_item(self.__columns[x][y]) for y in range(self.__size.height)])
constraints.append(sizing.get_width_constraint(content_width))
# run the layout engine
x_positions, widths = constraint_solve(content_left, content_width, constraints, self.spacing)
# calculate the vertical placement
# calculate the sizing (y, height) for each row
canvas_item_count = self.__size.height
spacing_count = canvas_item_count - 1
content_top = canvas_origin.y + self.margins.top
content_height = canvas_size.height - self.margins.top - self.margins.bottom - self.spacing * spacing_count
constraints = list()
for y in range(self.__size.height):
sizing = self._get_overlap_sizing([visible_canvas_item(self.__columns[x][y]) for x in range(self.__size.width)])
constraints.append(sizing.get_height_constraint(content_height))
# run the layout engine
y_positions, heights = constraint_solve(content_top, content_height, constraints, self.spacing)
# do the layout
combined_xs = list()
combined_ys = list()
combined_widths = list()
combined_heights = list()
combined_canvas_items = list()
for x in range(self.__size.width):
for y in range(self.__size.height):
canvas_item = visible_canvas_item(self.__columns[x][y])
if canvas_item is not None:
combined_xs.append(x_positions[x])
combined_ys.append(y_positions[y])
combined_widths.append(widths[x])
combined_heights.append(heights[y])
combined_canvas_items.append(canvas_item)
self.layout_canvas_items(combined_xs, combined_ys, combined_widths, combined_heights, combined_canvas_items, immediate=immediate)
def get_sizing(self, canvas_items: typing.Sequence[AbstractCanvasItem]) -> Sizing:
"""
Calculate the sizing for the grid. Treat columns and rows independently.
Override from abstract layout.
"""
sizing = Sizing().with_maximum_width(0).with_maximum_height(0).with_preferred_height(0)
# the widths
canvas_item_sizings = list()
for x in range(self.__size.width):
canvas_items_ = [visible_canvas_item(self.__columns[x][y]) for y in range(self.__size.height)]
canvas_item_sizings.append(self._get_overlap_sizing(canvas_items_))
for canvas_item_sizing in canvas_item_sizings:
self._combine_sizing_property(sizing, canvas_item_sizing, "preferred_width", operator.add)
self._combine_sizing_property(sizing, canvas_item_sizing, "minimum_width", operator.add)
self._combine_sizing_property(sizing, canvas_item_sizing, "maximum_width", operator.add, True)
# the heights
canvas_item_sizings = list()
for y in range(self.__size.height):
canvas_items_ = [visible_canvas_item(self.__columns[x][y]) for x in range(self.__size.width)]
canvas_item_sizings.append(self._get_overlap_sizing(canvas_items_))
for canvas_item_sizing in canvas_item_sizings:
self._combine_sizing_property(sizing, canvas_item_sizing, "preferred_height", operator.add)
self._combine_sizing_property(sizing, canvas_item_sizing, "minimum_height", operator.add)
self._combine_sizing_property(sizing, canvas_item_sizing, "maximum_height", operator.add, True)
if sizing.maximum_width == MAX_VALUE or len(canvas_items_) == 0:
sizing._maximum_width = None
if sizing.maximum_height == MAX_VALUE or len(canvas_items_) == 0:
sizing._maximum_height = None
if sizing.maximum_width == 0 or len(canvas_items_) == 0:
sizing._maximum_width = None
if sizing.preferred_width == 0 or len(canvas_items_) == 0:
sizing._preferred_width = None
if sizing.maximum_height == 0 or len(canvas_items_) == 0:
sizing._maximum_height = None
if sizing.preferred_height == 0 or len(canvas_items_) == 0:
sizing._preferred_height = None
self._adjust_sizing(sizing, self.spacing * (self.__size.width - 1), self.spacing * (self.__size.height - 1))
return sizing
class CompositionLayoutRenderTrait:
"""A trait (a set of methods for extending a class) allow customization of composition layout/rendering.
Since traits aren't supported directly in Python, this works by having associated methods in the
CanvasItemComposition class directly invoke the methods of this or a subclass of this object.
"""
def __init__(self, canvas_item_composition: CanvasItemComposition):
self._canvas_item_composition = canvas_item_composition
def close(self) -> None:
self._stop_render_behavior()
self._canvas_item_composition = None # type: ignore
def _stop_render_behavior(self) -> None:
pass
@property
def _needs_layout_for_testing(self) -> bool:
return False
@property
def is_layer_container(self) -> bool:
return False
def register_prepare_canvas_item(self, canvas_item: AbstractCanvasItem) -> None:
pass
def unregister_prepare_canvas_item(self, canvas_item: AbstractCanvasItem) -> None:
pass
def _container_layout_changed(self) -> None:
pass
def _try_update_layout(self, canvas_origin: typing.Optional[Geometry.IntPoint], canvas_size: typing.Optional[Geometry.IntSize], *, immediate: bool = False) -> bool:
return False
def _try_needs_layout(self, canvas_item: AbstractCanvasItem) -> bool:
return False
def _try_update_with_items(self, canvas_items: typing.Optional[typing.Sequence[AbstractCanvasItem]] = None) -> bool:
return False
def _try_updated(self) -> bool:
return False
def _try_repaint_template(self, drawing_context: DrawingContext.DrawingContext, immediate: bool) -> bool:
return False
def _try_repaint_if_needed(self, drawing_context: DrawingContext.DrawingContext, *, immediate: bool = False) -> bool:
return False
def layout_immediate(self, canvas_size: Geometry.IntSize, force: bool=True) -> None:
self._canvas_item_composition._prepare_render()
self._canvas_item_composition._update_self_layout(Geometry.IntPoint(), canvas_size, immediate=True)
self._canvas_item_composition._update_child_layouts(canvas_size, immediate=True)
def _try_repaint_immediate(self, drawing_context: DrawingContext.DrawingContext, canvas_size: Geometry.IntSize) -> bool:
return False
class CanvasItemComposition(AbstractCanvasItem):
"""A composite canvas item comprised of other canvas items.
Optionally includes a layout. Compositions without an explicit layout are stacked to fit this container.
Access child canvas items using canvas_items.
Child canvas items with higher indexes are considered to be foremost.
"""
def __init__(self, layout_render_trait: typing.Optional[CompositionLayoutRenderTrait] = None) -> None:
super().__init__()
self.__canvas_items: typing.List[AbstractCanvasItem] = list()
self.layout: CanvasItemAbstractLayout = CanvasItemLayout()
self.__layout_lock = threading.RLock()
self.__layout_render_trait = layout_render_trait or CompositionLayoutRenderTrait(self)
self.__container_layout_changed_count = 0
def close(self) -> None:
self.__layout_render_trait.close()
self.__layout_render_trait = typing.cast(typing.Any, None)
with self.__layout_lock:
canvas_items = self.canvas_items
for canvas_item in canvas_items:
canvas_item.close()
# this goes after closing; if this goes before closing, threaded canvas items don't get closed properly
# since they notify their container (to cull). to reproduce the bug, create a 1x2, then a 4x3 in the bottom.
# then close several panels and undo. not sure if this is the permanent fix or not.
self.__canvas_items = typing.cast(typing.Any, None)
super().close()
def _stop_render_behavior(self) -> None:
if self.__layout_render_trait:
self.__layout_render_trait._stop_render_behavior()
@property
def _needs_layout_for_testing(self) -> bool:
return self.__layout_render_trait._needs_layout_for_testing
@property
def layer_container(self) -> typing.Optional[CanvasItemComposition]:
return self if self.__layout_render_trait.is_layer_container else super().layer_container
def register_prepare_canvas_item(self, canvas_item: AbstractCanvasItem) -> None:
"""DEPRECATED see _prepare_render."""
self.__layout_render_trait.register_prepare_canvas_item(canvas_item)
def unregister_prepare_canvas_item(self, canvas_item: AbstractCanvasItem) -> None:
"""DEPRECATED see _prepare_render."""
self.__layout_render_trait.unregister_prepare_canvas_item(canvas_item)
def _begin_container_layout_changed(self) -> None:
# recursively increase the changed count
self.__container_layout_changed_count += 1
for canvas_item in self.canvas_items:
canvas_item._begin_container_layout_changed()
def _finish_container_layout_changed(self) -> None:
# recursively decrease the changed count
self.__container_layout_changed_count -= 1
for canvas_item in self.canvas_items:
canvas_item._finish_container_layout_changed()
# when the change count is zero, call container layout changed.
# the effect is this will occur once per composite item. only
# layers will actually do something (re-render with new layout).
if self.__container_layout_changed_count == 0:
self._container_layout_changed()
def _redraw_container(self) -> None:
self.__layout_render_trait._container_layout_changed()
def _prepare_render(self) -> None:
for canvas_item in self.__canvas_items:
canvas_item._prepare_render()
super()._prepare_render()
@property
def canvas_items_count(self) -> int:
"""Return count of canvas items managed by this composition."""
return len(self.__canvas_items)
@property
def canvas_items(self) -> typing.List[AbstractCanvasItem]:
""" Return a copy of the canvas items managed by this composition. """
return copy.copy(self.__canvas_items)
@property
def visible_canvas_items(self) -> typing.List[AbstractCanvasItem]:
with self.__layout_lock:
if self.__canvas_items is not None:
return [canvas_item for canvas_item in self.__canvas_items if canvas_item and canvas_item.visible]
return list()
def update_layout(self, canvas_origin: typing.Optional[Geometry.IntPoint],
canvas_size: typing.Optional[Geometry.IntSize], *, immediate: bool = False) -> None:
"""Override from abstract canvas item."""
if immediate or not self.__layout_render_trait._try_update_layout(canvas_origin, canvas_size, immediate=immediate):
self._update_layout(canvas_origin, canvas_size, immediate=immediate)
def layout_immediate(self, canvas_size: Geometry.IntSize, force: bool = True) -> None:
# useful for tests
self.__layout_render_trait.layout_immediate(canvas_size, force)
def _update_with_items(self, canvas_items: typing.Optional[typing.Sequence[AbstractCanvasItem]] = None) -> None:
# extra check for behavior during closing
if self.__layout_render_trait and not self.__layout_render_trait._try_update_with_items(canvas_items):
super()._update_with_items(canvas_items)
def _updated(self, canvas_items: typing.Optional[typing.Sequence[AbstractCanvasItem]] = None) -> None:
# extra check for behavior during closing
if self.__layout_render_trait and not self.__layout_render_trait._try_updated():
super()._updated(canvas_items)
def _update_layout(self, canvas_origin: typing.Optional[Geometry.IntPoint],
canvas_size: typing.Optional[Geometry.IntSize], *, immediate: bool = False) -> None:
"""Private method, but available to tests."""
with self.__layout_lock:
if self.__canvas_items is not None:
assert canvas_origin is not None
assert canvas_size is not None
canvas_origin_ = Geometry.IntPoint.make(canvas_origin)
canvas_size_ = Geometry.IntSize.make(canvas_size)
self._update_self_layout(canvas_origin_, canvas_size_, immediate=immediate)
self._update_child_layouts(canvas_size_, immediate=immediate)
def _update_child_layouts(self, canvas_size: typing.Optional[Geometry.IntSize], *, immediate: bool = False) -> None:
with self.__layout_lock:
if self.__canvas_items is not None:
assert canvas_size is not None
canvas_size = Geometry.IntSize.make(canvas_size)
self.layout.layout(Geometry.IntPoint(), canvas_size, self.visible_canvas_items, immediate=immediate)
def _needs_layout(self, canvas_item: AbstractCanvasItem) -> None:
# extra check for behavior during closing
if self.__layout_render_trait and not self.__layout_render_trait._try_needs_layout(canvas_item):
super()._needs_layout(canvas_item)
# override sizing information. let layout provide it.
@property
def layout_sizing(self) -> Sizing:
sizing = self.sizing
layout_sizing = self.layout.get_sizing(self.visible_canvas_items)
if sizing.minimum_width is not None:
layout_sizing._minimum_width = sizing.minimum_width
if sizing.maximum_width is not None:
layout_sizing._maximum_width = sizing.maximum_width
if sizing.preferred_width is not None:
layout_sizing._preferred_width = sizing.preferred_width
if sizing.minimum_height is not None:
layout_sizing._minimum_height = sizing.minimum_height
if sizing.maximum_height is not None:
layout_sizing._maximum_height = sizing.maximum_height
if sizing.preferred_height is not None:
layout_sizing._preferred_height = sizing.preferred_height
if sizing.minimum_aspect_ratio is not None:
layout_sizing._minimum_aspect_ratio = sizing.minimum_aspect_ratio
if sizing.maximum_aspect_ratio is not None:
layout_sizing._maximum_aspect_ratio = sizing.maximum_aspect_ratio
if sizing.preferred_aspect_ratio is not None:
layout_sizing._preferred_aspect_ratio = sizing.preferred_aspect_ratio
if len(self.visible_canvas_items) == 0 and sizing.collapsible:
layout_sizing._minimum_width = 0
layout_sizing._preferred_width = 0
layout_sizing._maximum_width = 0
layout_sizing._minimum_height = 0
layout_sizing._preferred_height = 0
layout_sizing._maximum_height = 0
return layout_sizing
def canvas_item_layout_sizing_changed(self, canvas_item: AbstractCanvasItem) -> None:
""" Contained canvas items call this when their layout_sizing changes. """
self.refresh_layout()
def _insert_canvas_item_direct(self, before_index: int, canvas_item: AbstractCanvasItem,
pos: typing.Optional[Geometry.IntPoint] = None) -> None:
self.insert_canvas_item(before_index, canvas_item, pos)
def insert_canvas_item(self, before_index: int, canvas_item: AbstractCanvasItem,
pos: typing.Optional[typing.Any] = None) -> AbstractCanvasItem:
""" Insert canvas item into layout. pos parameter is layout specific. """
self.__canvas_items.insert(before_index, canvas_item)
canvas_item.container = self
canvas_item._inserted(self)
self.layout.add_canvas_item(canvas_item, pos)
self.refresh_layout()
self.update()
return canvas_item
def insert_spacing(self, before_index: int, spacing: int) -> AbstractCanvasItem:
spacing_item = self.layout.create_spacing_item(spacing)
return self.insert_canvas_item(before_index, spacing_item)
def insert_stretch(self, before_index: int) -> AbstractCanvasItem:
stretch_item = self.layout.create_stretch_item()
return self.insert_canvas_item(before_index, stretch_item)
def add_canvas_item(self, canvas_item: AbstractCanvasItem, pos: typing.Optional[typing.Any] = None) -> AbstractCanvasItem:
""" Add canvas item to layout. pos parameter is layout specific. """
return self.insert_canvas_item(len(self.__canvas_items), canvas_item, pos)
def add_spacing(self, spacing: int) -> AbstractCanvasItem:
return self.insert_spacing(len(self.__canvas_items), spacing)
def add_stretch(self) -> AbstractCanvasItem:
return self.insert_stretch(len(self.__canvas_items))
def _remove_canvas_item_direct(self, canvas_item: AbstractCanvasItem) -> None:
self.__canvas_items.remove(canvas_item)
def _remove_canvas_item(self, canvas_item: AbstractCanvasItem) -> None:
canvas_item._removed(self)
canvas_item.close()
self.layout.remove_canvas_item(canvas_item)
canvas_item.container = None
self.__canvas_items.remove(canvas_item)
self.refresh_layout()
self.update()
def remove_canvas_item(self, canvas_item: AbstractCanvasItem) -> None:
""" Remove canvas item from layout. Canvas item is closed. """
self._remove_canvas_item(canvas_item)
def remove_all_canvas_items(self) -> None:
""" Remove all canvas items from layout. Canvas items are closed. """
for canvas_item in reversed(copy.copy(self.__canvas_items)):
self._remove_canvas_item(canvas_item)
def replace_canvas_item(self, old_canvas_item: AbstractCanvasItem, new_canvas_item: AbstractCanvasItem) -> None:
""" Replace the given canvas item with the new one. Canvas item is closed. """
index = self.__canvas_items.index(old_canvas_item)
self.remove_canvas_item(old_canvas_item)
self.insert_canvas_item(index, new_canvas_item)
def wrap_canvas_item(self, canvas_item: AbstractCanvasItem, canvas_item_container: CanvasItemComposition) -> None:
""" Replace the given canvas item with the container and move the canvas item into the container. """
canvas_origin = canvas_item.canvas_origin
canvas_size = canvas_item.canvas_size
index = self.__canvas_items.index(canvas_item)
# remove the existing canvas item, but without closing it.
self.layout.remove_canvas_item(canvas_item)
canvas_item.container = None
self._remove_canvas_item_direct(canvas_item)
# insert the canvas item container
# self.insert_canvas_item(index, canvas_item_container) # this would adjust splitters. don't do it.
self._insert_canvas_item_direct(index, canvas_item_container)
# insert the canvas item into the container
canvas_item_container.add_canvas_item(canvas_item)
# perform the layout using existing origin/size.
if canvas_origin is not None and canvas_size is not None:
canvas_item_container._set_canvas_origin(canvas_origin)
canvas_item_container._set_canvas_size(canvas_size)
canvas_item._set_canvas_origin(Geometry.IntPoint())
self.refresh_layout()
def unwrap_canvas_item(self, canvas_item: AbstractCanvasItem) -> None:
""" Replace the canvas item container with the canvas item. """
container = canvas_item.container
assert container
assert len(container.canvas_items) == 1
assert container.canvas_items[0] == canvas_item
enclosing_container = container.container
assert enclosing_container
index = enclosing_container.canvas_items.index(container)
# remove the existing canvas item from the container, but without closing it.
container.layout.remove_canvas_item(canvas_item)
canvas_item.container = None
container._remove_canvas_item_direct(canvas_item)
# remove container from enclosing container
enclosing_container._remove_canvas_item_direct(container)
# insert canvas item into the enclosing container
# enclosing_container.insert_canvas_item(index, canvas_item) # this would adjust splitters. don't do it.
enclosing_container._insert_canvas_item_direct(index, canvas_item)
# update the layout if origin and size already known
self.refresh_layout()
def _repaint_template(self, drawing_context: DrawingContext.DrawingContext, immediate: bool) -> None:
if not self.__layout_render_trait._try_repaint_template(drawing_context, immediate):
self._repaint_children(drawing_context, immediate=immediate)
self._repaint(drawing_context)
def _repaint_if_needed(self, drawing_context: DrawingContext.DrawingContext, *, immediate: bool = False) -> None:
if self.__layout_render_trait:
if not self.__layout_render_trait._try_repaint_if_needed(drawing_context, immediate=immediate):
super()._repaint_if_needed(drawing_context, immediate=immediate)
def repaint_immediate(self, drawing_context: DrawingContext.DrawingContext, canvas_size: Geometry.IntSize) -> None:
if not self.__layout_render_trait._try_repaint_immediate(drawing_context, canvas_size):
super().repaint_immediate(drawing_context, canvas_size)
def _repaint_children(self, drawing_context: DrawingContext.DrawingContext, *, immediate: bool = False) -> None:
"""Paint items from back to front."""
self._draw_background(drawing_context)
for canvas_item in self.visible_canvas_items:
if canvas_item._has_layout:
with drawing_context.saver():
canvas_item_rect = canvas_item.canvas_rect
if canvas_item_rect:
drawing_context.translate(canvas_item_rect.left, canvas_item_rect.top)
canvas_item._repaint_if_needed(drawing_context, immediate=immediate)
self._draw_border(drawing_context)
def _canvas_items_at_point(self, visible_canvas_items: typing.Sequence[AbstractCanvasItem], x: int, y: int) -> typing.List[AbstractCanvasItem]:
"""Returns list of canvas items under x, y, ordered from back to front."""
canvas_items: typing.List[AbstractCanvasItem] = []
point = Geometry.IntPoint(x=x, y=y)
for canvas_item in reversed(visible_canvas_items):
# the visible items can be changed while this method is running from the layout thread.
# and yet we don't want to allow this to occur; maybe the layout thread should have some
# sort of pending system, where once methods like this exit, they're allowed to update...?
canvas_item_rect = canvas_item.canvas_rect
if canvas_item_rect and canvas_item_rect.contains_point(point):
canvas_origin = typing.cast(Geometry.IntPoint, canvas_item.canvas_origin)
canvas_point = point - canvas_origin
canvas_items.extend(canvas_item.canvas_items_at_point(canvas_point.x, canvas_point.y))
canvas_items.extend(super().canvas_items_at_point(x, y))
return canvas_items
def canvas_items_at_point(self, x: int, y: int) -> typing.List[AbstractCanvasItem]:
"""Returns list of canvas items under x, y, ordered from back to front."""
return self._canvas_items_at_point(self.visible_canvas_items, x, y)
def get_root_opaque_canvas_items(self) -> typing.List[AbstractCanvasItem]:
if self.is_root_opaque:
return [self]
canvas_items = list()
for canvas_item in self.canvas_items:
canvas_items.extend(canvas_item.get_root_opaque_canvas_items())
return canvas_items
def pan_gesture(self, dx: int, dy: int) -> bool:
for canvas_item in reversed(self.visible_canvas_items):
if canvas_item.pan_gesture(dx, dy):
return True
return False
_threaded_rendering_enabled = True
class LayerLayoutRenderTrait(CompositionLayoutRenderTrait):
_layer_id = 0
_executor = concurrent.futures.ThreadPoolExecutor()
def __init__(self, canvas_item_composition: CanvasItemComposition):
super().__init__(canvas_item_composition)
LayerLayoutRenderTrait._layer_id += 1
self.__layer_id = LayerLayoutRenderTrait._layer_id
self.__layer_lock = threading.RLock()
self.__layer_drawing_context: typing.Optional[DrawingContext.DrawingContext] = None
self.__layer_seed = 0
self.__executing = False
self.__cancel = False
self.__needs_layout = False
self.__needs_repaint = False
self.__prepare_canvas_items: typing.List[AbstractCanvasItem] = list()
self._layer_thread_suppress = not _threaded_rendering_enabled # for testing
self.__layer_thread_condition = threading.Condition()
# Python 3.9+: Optional[concurrent.futures.Future[Any]]
self.__repaint_one_future: typing.Optional[typing.Any] = None
def close(self) -> None:
self._sync_repaint()
super().close()
def _stop_render_behavior(self) -> None:
self.__cancel = True
self._sync_repaint()
self.__layer_drawing_context = None
@property
def _needs_layout_for_testing(self) -> bool:
return self.__needs_layout
@property
def is_layer_container(self) -> bool:
return True
def register_prepare_canvas_item(self, canvas_item: AbstractCanvasItem) -> None:
assert canvas_item not in self.__prepare_canvas_items
self.__prepare_canvas_items.append(canvas_item)
def unregister_prepare_canvas_item(self, canvas_item: AbstractCanvasItem) -> None:
assert canvas_item in self.__prepare_canvas_items
self.__prepare_canvas_items.remove(canvas_item)
def _container_layout_changed(self) -> None:
# the section drawing code has no layout information; so it's possible for the sections to
# overlap, particularly during resizing, resulting in one layer drawing only to be overwritten
# by an older layer whose size hasn't been updated. this method is called quickly when the
# enclosing container changes layout and helps ensure that all layers in the container are drawn
# with the correct size.
if self.__layer_drawing_context:
self._canvas_item_composition._repaint_finished(self.__layer_drawing_context)
def _try_update_layout(self, canvas_origin: typing.Optional[Geometry.IntPoint], canvas_size: typing.Optional[Geometry.IntSize], *, immediate: bool = False) -> bool:
# layout self, but not the children. layout for children goes to thread.
self._canvas_item_composition._update_self_layout(canvas_origin, canvas_size)
self.__trigger_layout()
return True
def _try_needs_layout(self, canvas_item: AbstractCanvasItem) -> bool:
self.__trigger_layout()
return True
def _sync_repaint(self) -> None:
done_event = threading.Event()
with self.__layer_thread_condition:
if self.__repaint_one_future:
# Python 3.9: Optional[concurrent.futures.Future[Any]]
def repaint_done(future: typing.Any) -> None:
done_event.set()
self.__repaint_one_future.add_done_callback(repaint_done)
else:
done_event.set()
done_event.wait()
# Python 3.9: Optional[concurrent.futures.Future[Any]]
def __repaint_done(self, future: typing.Any) -> None:
with self.__layer_thread_condition:
self.__repaint_one_future = None
if self.__needs_layout or self.__needs_repaint:
self.__queue_repaint()
def __queue_repaint(self) -> None:
with self.__layer_thread_condition:
if not self.__cancel and not self.__repaint_one_future:
self.__repaint_one_future = LayerLayoutRenderTrait._executor.submit(self.__repaint_layer)
self.__repaint_one_future.add_done_callback(self.__repaint_done)
def _try_updated(self) -> bool:
with self.__layer_thread_condition:
self.__needs_repaint = True
if not self._layer_thread_suppress:
self.__queue_repaint()
# normally, this method would mark a pending update and forward the update to the container;
# however with the layer, since drawing occurs on a thread, this must occur after the thread
# is finished. if the thread is suppressed (typically during testing), use the regular flow.
if self._layer_thread_suppress:
# pass through updates in the thread is suppressed, so that updates actually occur.
return False
return True
def _try_repaint_template(self, drawing_context: DrawingContext.DrawingContext, immediate: bool) -> bool:
if immediate:
canvas_size = self._canvas_item_composition.canvas_size
if canvas_size:
self._canvas_item_composition.repaint_immediate(drawing_context, canvas_size)
else:
with self.__layer_lock:
layer_drawing_context = self.__layer_drawing_context
layer_seed = self.__layer_seed
canvas_size = self._canvas_item_composition.canvas_size
if canvas_size:
drawing_context.begin_layer(self.__layer_id, layer_seed, 0, 0, *tuple(canvas_size))
if layer_drawing_context:
drawing_context.add(layer_drawing_context)
drawing_context.end_layer(self.__layer_id, layer_seed, 0, 0, *tuple(canvas_size))
return True
def _try_repaint_if_needed(self, drawing_context: DrawingContext.DrawingContext, *, immediate: bool = False) -> bool:
# If the render behavior is a layer, it will have its own cached drawing context. Use it.
self._canvas_item_composition._repaint_template(drawing_context, immediate)
return True
def layout_immediate(self, canvas_size: Geometry.IntSize, force: bool = True) -> None:
# used for testing
orphan = len(self.__prepare_canvas_items) == 0
if orphan:
self._canvas_item_composition._inserted(None)
if force or self.__needs_layout:
self.__needs_layout = False
layer_thread_suppress, self._layer_thread_suppress = self._layer_thread_suppress, True
for canvas_item in copy.copy(self.__prepare_canvas_items):
canvas_item.prepare_render()
self._canvas_item_composition._prepare_render()
self._canvas_item_composition._update_self_layout(Geometry.IntPoint(), canvas_size, immediate=True)
self._canvas_item_composition._update_child_layouts(canvas_size, immediate=True)
self._layer_thread_suppress = layer_thread_suppress
if orphan:
self._canvas_item_composition._removed(None)
def _try_repaint_immediate(self, drawing_context: DrawingContext.DrawingContext, canvas_size: Geometry.IntSize) -> bool:
orphan = len(self.__prepare_canvas_items) == 0
if orphan:
self._canvas_item_composition._inserted(None)
layer_thread_suppress, self._layer_thread_suppress = self._layer_thread_suppress, True
self._layer_thread_suppress = True
for canvas_item in copy.copy(self.__prepare_canvas_items):
canvas_item.prepare_render()
self._canvas_item_composition._update_self_layout(Geometry.IntPoint(), canvas_size, immediate=True)
self._canvas_item_composition._update_child_layouts(canvas_size, immediate=True)
self._canvas_item_composition._repaint_children(drawing_context, immediate=True)
self._canvas_item_composition._repaint(drawing_context)
self._layer_thread_suppress = layer_thread_suppress
if orphan:
self._canvas_item_composition._removed(None)
return True
def __repaint_layer(self) -> None:
with self.__layer_thread_condition:
needs_layout = self.__needs_layout
needs_repaint = self.__needs_repaint
self.__needs_layout = False
self.__needs_repaint = False
if not self.__cancel and (needs_repaint or needs_layout):
if self._canvas_item_composition._has_layout:
try:
for canvas_item in copy.copy(self.__prepare_canvas_items):
canvas_item.prepare_render()
self._canvas_item_composition._prepare_render()
# layout or repaint that occurs during prepare render should be handled
# but not trigger another repaint after this one.
with self.__layer_thread_condition:
needs_layout = needs_layout or self.__needs_layout
self.__needs_layout = False
self.__needs_repaint = False
if needs_layout:
assert self._canvas_item_composition.canvas_size is not None
self._canvas_item_composition._update_child_layouts(
self._canvas_item_composition.canvas_size)
drawing_context = DrawingContext.DrawingContext()
self._canvas_item_composition._repaint_children(drawing_context)
self._canvas_item_composition._repaint(drawing_context)
with self.__layer_lock:
self.__layer_seed += 1
self.__layer_drawing_context = drawing_context
if not self.__cancel:
self._canvas_item_composition._repaint_finished(self.__layer_drawing_context)
except Exception as e:
import traceback
logging.debug("CanvasItem Render Error: %s", e)
traceback.print_exc()
traceback.print_stack()
def __trigger_layout(self) -> None:
with self.__layer_thread_condition:
self.__needs_layout = True
if not self._layer_thread_suppress:
self.__queue_repaint()
class LayerCanvasItem(CanvasItemComposition):
"""A composite canvas item that does layout and repainting in a thread."""
def __init__(self) -> None:
super().__init__(LayerLayoutRenderTrait(self))
def _container_layout_changed(self) -> None:
# override. a layer needs to redraw in the user interface.
self._redraw_container()
class ScrollAreaCanvasItem(AbstractCanvasItem):
"""
A scroll area canvas item with content.
The content property holds the content of the scroll area.
This scroll area controls the canvas_origin of the content, but not the
size. When the scroll area is resized, update_layout will be called on
the content, during which the content is free to adjust its canvas size.
When the call to update_layout returns, this scroll area will adjust
the canvas origin separately.
The content canvas_rect property describes the position that the content
is drawn within the scroll area. This means that content items must
already have a layout when they're added to this scroll area.
The content canvas_origin will typically be negative if the content
canvas_size is larger than the scroll area canvas size.
The content canvas_origin will typically be positive (or zero) if the
content canvas_size is smaller than the scroll area canvas size.
"""
def __init__(self, content: typing.Optional[AbstractCanvasItem] = None) -> None:
super().__init__()
self.__content: typing.Optional[AbstractCanvasItem] = None
if content:
self.content = content
self.auto_resize_contents = False
self._constrain_position = True
self.content_updated_event = Event.Event()
def close(self) -> None:
content = self.__content
self.__content = None
if content:
content.close()
super().close()
@property
def content(self) -> typing.Optional[AbstractCanvasItem]:
""" Return the content of the scroll area. """
return self.__content
@content.setter
def content(self, content: AbstractCanvasItem) -> None:
""" Set the content of the scroll area. """
# remove the old content
if self.__content:
self.__content.container = None
self.__content.on_layout_updated = None
# add the new content
self.__content = content
content.container = typing.cast(CanvasItemComposition, self) # argh
content.on_layout_updated = self.__content_layout_updated
self.update()
@property
def visible_rect(self) -> Geometry.IntRect:
content = self.__content
if content:
content_canvas_origin = content.canvas_origin
canvas_size = self.canvas_size
if content_canvas_origin and canvas_size:
return Geometry.IntRect(origin=-content_canvas_origin, size=canvas_size)
return Geometry.IntRect(origin=Geometry.IntPoint(), size=Geometry.IntSize())
def update_layout(self, canvas_origin: typing.Optional[Geometry.IntPoint],
canvas_size: typing.Optional[Geometry.IntSize], *, immediate: bool = False) -> None:
"""Override from abstract canvas item.
After setting the canvas origin and canvas size, like the abstract canvas item,
update the layout of the content if it has no assigned layout yet. Whether it has
an assigned layout is determined by whether the canvas origin and canvas size are
None or not.
"""
self._set_canvas_origin(canvas_origin)
self._set_canvas_size(canvas_size)
content = self.__content
if content:
canvas_origin = content.canvas_origin
canvas_size = content.canvas_size
if canvas_origin is None or canvas_size is None:
# if content has no assigned layout, update its layout relative to this object.
# it will get a 0,0 origin but the same size as this scroll area.
content.update_layout(Geometry.IntPoint(), self.canvas_size, immediate=immediate)
elif self.auto_resize_contents:
# if content has no assigned layout, update its layout relative to this object.
# it will get a 0,0 origin but the same size as this scroll area.
content.update_layout(canvas_origin, self.canvas_size, immediate=immediate)
# validate the content origin. this is used for the scroll bar canvas item to ensure that the content is
# consistent with the scroll bar.
self.__content_layout_updated(canvas_origin, canvas_size, immediate=immediate)
# NOTE: super is never called for this implementation
# call on_layout_updated, just like the super implementation.
if callable(self.on_layout_updated):
self.on_layout_updated(self.canvas_origin, self.canvas_size, immediate)
self._has_layout = self.canvas_origin is not None and self.canvas_size is not None
def __content_layout_updated(self, canvas_origin: typing.Optional[Geometry.IntPoint],
canvas_size: typing.Optional[Geometry.IntSize], immediate: bool = False) -> None:
# whenever the content layout changes, this method gets called.
# adjust the canvas_origin of the content if necessary. pass the canvas_origin, canvas_size of the content.
# this method is used in the scroll bar canvas item to ensure that the content stays within view and
# consistent with the scroll bar when the scroll area gets a new layout.
if self._constrain_position and canvas_origin is not None and canvas_size is not None and self.canvas_origin is not None and self.canvas_size is not None:
# when the scroll area content layout changes, this method will get called.
# ensure that the content matches the scroll position.
visible_size = self.canvas_size
content = self.__content
if content:
content_size = content.canvas_size
if content_size:
scroll_range_h = max(content_size.width - visible_size.width, 0)
scroll_range_v = max(content_size.height - visible_size.height, 0)
canvas_origin = Geometry.IntPoint(x=canvas_origin.x, y=max(min(canvas_origin.y, 0), -scroll_range_v))
canvas_origin = Geometry.IntPoint(x=max(min(canvas_origin.x, 0), -scroll_range_h), y=canvas_origin.y)
content._set_canvas_origin(canvas_origin)
self.content_updated_event.fire()
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
super()._repaint(drawing_context)
with drawing_context.saver():
canvas_origin = self.canvas_origin
canvas_size = self.canvas_size
if canvas_origin and canvas_size:
drawing_context.clip_rect(canvas_origin.x, canvas_origin.y, canvas_size.width, canvas_size.height)
content = self.__content
if content:
content_canvas_origin = content.canvas_origin
if content_canvas_origin:
drawing_context.translate(content_canvas_origin.x, content_canvas_origin.y)
visible_rect = Geometry.IntRect(origin=-content_canvas_origin, size=canvas_size)
content._repaint_visible(drawing_context, visible_rect)
def canvas_items_at_point(self, x: int, y: int) -> typing.List[AbstractCanvasItem]:
canvas_items: typing.List[AbstractCanvasItem] = []
point = Geometry.IntPoint(x=x, y=y)
content = self.__content
if content and content.canvas_rect and content.canvas_rect.contains_point(point):
content_canvas_origin = content.canvas_origin
if content_canvas_origin:
canvas_point = point - content_canvas_origin
canvas_items.extend(content.canvas_items_at_point(canvas_point.x, canvas_point.y))
canvas_items.extend(super().canvas_items_at_point(x, y))
return canvas_items
def wheel_changed(self, x: int, y: int, dx: int, dy: int, is_horizontal: bool) -> bool:
canvas_origin = self.canvas_origin
if canvas_origin:
x -= canvas_origin.x
y -= canvas_origin.y
content = self.__content
if content:
return content.wheel_changed(x, y, dx, dy, is_horizontal)
return False
def pan_gesture(self, dx: int, dy: int) -> bool:
content = self.__content
if content:
return content.pan_gesture(dx, dy)
return False
class SplitterCanvasItem(CanvasItemComposition):
def __init__(self, orientation: typing.Optional[str] = None) -> None:
super().__init__()
self.orientation = orientation if orientation else "vertical"
self.wants_mouse_events = True
self.__lock = threading.RLock()
self.__sizings: typing.List[Sizing] = []
self.__shadow_canvas_items: typing.List[AbstractCanvasItem] = []
self.__actual_sizings: typing.List[Sizing] = []
self.__tracking = False
self.on_splits_will_change: typing.Optional[typing.Callable[[], None]] = None
self.on_splits_changed: typing.Optional[typing.Callable[[], None]] = None
@classmethod
def __calculate_layout(self, orientation: str, canvas_size: Geometry.IntSize, sizings: typing.Sequence[Sizing]) -> ConstraintResultType:
if orientation == "horizontal":
content_origin = 0
content_size = canvas_size.height
constraints = [sizing.get_height_constraint(content_size) for sizing in sizings]
else:
content_origin = 0
content_size = canvas_size.width
constraints = [sizing.get_width_constraint(content_size) for sizing in sizings]
return constraint_solve(content_origin, content_size, constraints)
@property
def splits(self) -> typing.Sequence[float]:
""" Return the canvas item splits, which represent the relative size of each child. """
if self.canvas_size:
canvas_size = self.canvas_size
else:
canvas_size = Geometry.IntSize(w=640, h=480)
if self.orientation == "horizontal":
content_size = canvas_size.height
else:
content_size = canvas_size.width
with self.__lock:
sizings = copy.deepcopy(self.__sizings)
_, sizes = SplitterCanvasItem.__calculate_layout(self.orientation, canvas_size, sizings)
return [float(size) / content_size for size in sizes]
@splits.setter
def splits(self, splits: typing.Sequence[float]) -> None:
with self.__lock:
sizings = copy.deepcopy(self.__sizings)
assert len(splits) == len(sizings)
for split, sizing in zip(splits, sizings):
if self.orientation == "horizontal":
sizing._preferred_height = split
else:
sizing._preferred_width = split
with self.__lock:
self.__sizings = sizings
self.refresh_layout()
def _insert_canvas_item_direct(self, before_index: int, canvas_item: AbstractCanvasItem,
pos: typing.Optional[Geometry.IntPoint] = None) -> None:
super().insert_canvas_item(before_index, canvas_item)
def insert_canvas_item(self, before_index: int, canvas_item: AbstractCanvasItem,
sizing: typing.Optional[typing.Any] = None) -> AbstractCanvasItem:
sizing = copy.copy(sizing) if sizing else Sizing()
if self.orientation == "horizontal":
sizing._preferred_height = None
if sizing._minimum_height is None:
sizing._minimum_height = 0.1
else:
sizing._preferred_width = None
if sizing._minimum_width is None:
sizing._minimum_width = 0.1
with self.__lock:
self.__sizings.insert(before_index, sizing)
return super().insert_canvas_item(before_index, canvas_item)
def remove_canvas_item(self, canvas_item: AbstractCanvasItem) -> None:
with self.__lock:
del self.__sizings[self.canvas_items.index(canvas_item)]
super().remove_canvas_item(canvas_item)
def update_layout(self, canvas_origin: typing.Optional[Geometry.IntPoint],
canvas_size: typing.Optional[Geometry.IntSize], *, immediate: bool = False) -> None:
"""
wrap the updates in container layout changes to avoid a waterfall of
change messages. this is specific to splitter for now, but it's a general
behavior that should eventually wrap all update layout calls.
canvas items that cache their drawing bitmap (layers) need to know as quickly as possible
that their layout has changed to a new size to avoid partially updated situations where
their bitmaps overlap and a newer bitmap gets overwritten by a older overlapping bitmap,
resulting in drawing anomaly. request a repaint for each canvas item at its new size here.
this can also be tested by doing a 1x2 split; then 5x4 on the bottom; adding some images
to the bottom; resizing the 1x2 split; then undo/redo. it helps to run on a slower machine.
"""
self._begin_container_layout_changed()
try:
with self.__lock:
canvas_items = copy.copy(self.canvas_items)
sizings = copy.deepcopy(self.__sizings)
assert len(canvas_items) == len(sizings)
if canvas_size:
origins, sizes = SplitterCanvasItem.__calculate_layout(self.orientation, canvas_size, sizings)
if self.orientation == "horizontal":
for canvas_item, (origin, size) in zip(canvas_items, zip(origins, sizes)):
canvas_item_origin = Geometry.IntPoint(y=origin, x=0) # origin within the splitter
canvas_item_size = Geometry.IntSize(height=size, width=canvas_size.width)
canvas_item.update_layout(canvas_item_origin, canvas_item_size, immediate=immediate)
assert canvas_item._has_layout
for sizing, size in zip(sizings, sizes):
sizing._preferred_height = size
else:
for canvas_item, (origin, size) in zip(canvas_items, zip(origins, sizes)):
canvas_item_origin = Geometry.IntPoint(y=0, x=origin) # origin within the splitter
canvas_item_size = Geometry.IntSize(height=canvas_size.height, width=size)
canvas_item.update_layout(canvas_item_origin, canvas_item_size, immediate=immediate)
assert canvas_item._has_layout
for sizing, size in zip(sizings, sizes):
sizing._preferred_width = size
with self.__lock:
self.__actual_sizings = sizings
self.__shadow_canvas_items = canvas_items
# instead of calling the canvas item composition, call the one for abstract canvas item.
self._update_self_layout(canvas_origin, canvas_size, immediate=immediate)
self._has_layout = self.canvas_origin is not None and self.canvas_size is not None
# the next update is required because the children will trigger updates; but the updates
# might not go all the way up the chain if this splitter has no layout. by now, it will
# have a layout, so force an update.
self.update()
finally:
self._finish_container_layout_changed()
def canvas_items_at_point(self, x: int, y: int) -> typing.List[AbstractCanvasItem]:
assert self.canvas_origin is not None and self.canvas_size is not None
with self.__lock:
canvas_items = copy.copy(self.__shadow_canvas_items)
sizings = copy.deepcopy(self.__actual_sizings)
origins, _ = SplitterCanvasItem.__calculate_layout(self.orientation, self.canvas_size, sizings)
if self.orientation == "horizontal":
for origin in origins[1:]: # don't check the '0' origin
if abs(y - origin) < 6:
return [self]
else:
for origin in origins[1:]: # don't check the '0' origin
if abs(x - origin) < 6:
return [self]
return self._canvas_items_at_point(canvas_items, x, y)
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
super()._repaint(drawing_context)
assert self.canvas_origin is not None and self.canvas_size is not None
with self.__lock:
sizings = copy.deepcopy(self.__actual_sizings)
origins, _ = SplitterCanvasItem.__calculate_layout(self.orientation, self.canvas_size, sizings)
with drawing_context.saver():
drawing_context.begin_path()
for origin in origins[1:]: # don't paint the '0' origin
canvas_bounds = self.canvas_bounds
if canvas_bounds:
if self.orientation == "horizontal":
drawing_context.move_to(canvas_bounds.left, origin)
drawing_context.line_to(canvas_bounds.right, origin)
else:
drawing_context.move_to(origin, canvas_bounds.top)
drawing_context.line_to(origin, canvas_bounds.bottom)
drawing_context.line_width = 0.5
drawing_context.stroke_style = "#666"
drawing_context.stroke()
def __hit_test(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> str:
with self.__lock:
sizings = copy.deepcopy(self.__actual_sizings)
canvas_size = self.canvas_size
if canvas_size:
origins, _ = SplitterCanvasItem.__calculate_layout(self.orientation, canvas_size, sizings)
if self.orientation == "horizontal":
for index, origin in enumerate(origins[1:]): # don't check the '0' origin
if abs(y - origin) < 6:
return "horizontal"
else:
for index, origin in enumerate(origins[1:]): # don't check the '0' origin
if abs(x - origin) < 6:
return "vertical"
return "horizontal"
def mouse_pressed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
assert self.canvas_origin is not None and self.canvas_size is not None
with self.__lock:
sizings = copy.deepcopy(self.__actual_sizings)
origins, _ = SplitterCanvasItem.__calculate_layout(self.orientation, self.canvas_size, sizings)
if self.orientation == "horizontal":
for index, origin in enumerate(origins[1:]): # don't check the '0' origin
if abs(y - origin) < 6:
self.__tracking = True
self.__tracking_start_pos = Geometry.IntPoint(y=y, x=x)
self.__tracking_start_adjust = y - origin
self.__tracking_start_index = index
self.__tracking_start_preferred = int(sizings[index].preferred_height or 0)
self.__tracking_start_preferred_next = int(sizings[index + 1].preferred_height or 0)
if callable(self.on_splits_will_change):
self.on_splits_will_change()
return True
else:
for index, origin in enumerate(origins[1:]): # don't check the '0' origin
if abs(x - origin) < 6:
self.__tracking = True
self.__tracking_start_pos = Geometry.IntPoint(y=y, x=x)
self.__tracking_start_adjust = x - origin
self.__tracking_start_index = index
self.__tracking_start_preferred = int(sizings[index].preferred_width or 0)
self.__tracking_start_preferred_next = int(sizings[index + 1].preferred_width or 0)
if callable(self.on_splits_will_change):
self.on_splits_will_change()
return True
return super().mouse_pressed(x, y, modifiers)
def mouse_released(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self.__tracking = False
if callable(self.on_splits_changed):
self.on_splits_changed()
return True
def mouse_position_changed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
if self.__tracking:
with self.__lock:
old_sizings = copy.deepcopy(self.__sizings)
temp_sizings = copy.deepcopy(self.__actual_sizings)
tracking_start_preferred_next = self.__tracking_start_preferred_next or 0
tracking_start_preferred = self.__tracking_start_preferred or 0
snaps: typing.List[int] = list()
canvas_bounds = self.canvas_bounds
if canvas_bounds:
if self.orientation == "horizontal":
offset = y - self.__tracking_start_pos.y
if not modifiers.shift:
snaps.append((tracking_start_preferred_next - tracking_start_preferred) // 2)
snaps.append(canvas_bounds.height // 3 - self.__tracking_start_pos.y - self.__tracking_start_adjust)
snaps.append(2 * canvas_bounds.height // 3 - self.__tracking_start_pos.y - self.__tracking_start_adjust)
for snap in snaps:
if abs(offset - snap) < 12:
offset = snap
break
temp_sizings[self.__tracking_start_index]._preferred_height = tracking_start_preferred + offset
temp_sizings[self.__tracking_start_index + 1]._preferred_height = tracking_start_preferred_next - offset
else:
offset = x - self.__tracking_start_pos.x
if not modifiers.shift:
snaps.append((tracking_start_preferred_next - tracking_start_preferred) // 2)
snaps.append(canvas_bounds.width // 3 - self.__tracking_start_pos.x - self.__tracking_start_adjust)
snaps.append(2 * canvas_bounds.width // 3 - self.__tracking_start_pos.x - self.__tracking_start_adjust)
for snap in snaps:
if abs(offset - snap) < 12:
offset = snap
break
temp_sizings[self.__tracking_start_index]._preferred_width = tracking_start_preferred + offset
temp_sizings[self.__tracking_start_index + 1]._preferred_width = tracking_start_preferred_next - offset
# fix the size of all children except for the two in question
for index, sizing in enumerate(temp_sizings):
if index != self.__tracking_start_index and index != self.__tracking_start_index + 1:
if self.orientation == "horizontal":
sizing._set_fixed_height(sizing.preferred_height)
else:
sizing._set_fixed_width(sizing.preferred_width)
# update the layout
with self.__lock:
self.__sizings = temp_sizings
self.refresh_layout()
self.update_layout(self.canvas_origin, self.canvas_size)
# restore the freedom of the others
new_sizings = list()
for index, (old_sizing, temp_sizing) in enumerate(zip(old_sizings, temp_sizings)):
sizing = Sizing()
sizing._copy_from(old_sizing)
if index == self.__tracking_start_index or index == self.__tracking_start_index + 1:
if self.orientation == "horizontal":
sizing._preferred_height = temp_sizing.preferred_height
else:
sizing._preferred_width = temp_sizing.preferred_width
new_sizings.append(sizing)
with self.__lock:
self.__sizings = new_sizings
# update once more with restored sizings. addresses issue nionswift/605
self.refresh_layout()
return True
else:
control = self.__hit_test(x, y, modifiers)
if control == "horizontal":
self.cursor_shape = "split_vertical"
elif control == "vertical":
self.cursor_shape = "split_horizontal"
else:
self.cursor_shape = None
return super().mouse_position_changed(x, y, modifiers)
class SliderCanvasItem(AbstractCanvasItem, Observable.Observable):
"""Slider."""
thumb_width = 8
thumb_height = 16
bar_offset = 1
bar_height = 4
def __init__(self) -> None:
super().__init__()
self.wants_mouse_events = True
self.__tracking = False
self.__tracking_start = Geometry.IntPoint()
self.__tracking_value = 0.0
self.update_sizing(self.sizing.with_fixed_height(20))
self.value_stream = Stream.ValueStream[float]().add_ref()
self.value_change_stream = Stream.ValueChangeStream(self.value_stream).add_ref()
def close(self) -> None:
self.value_change_stream.remove_ref()
self.value_change_stream = typing.cast(typing.Any, None)
self.value_stream.remove_ref()
self.value_stream = typing.cast(typing.Any, None)
super().close()
@property
def value(self) -> float:
return self.value_stream.value or 0.0
@value.setter
def value(self, value: float) -> None:
if self.value != value:
self.value_stream.value = max(0.0, min(1.0, value))
self.update()
self.notify_property_changed("value")
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
thumb_rect = self.__get_thumb_rect()
bar_rect = self.__get_bar_rect()
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.rect(bar_rect.left, bar_rect.top, bar_rect.width, bar_rect.height)
drawing_context.fill_style = "#CCC"
drawing_context.fill()
drawing_context.stroke_style = "#888"
drawing_context.stroke()
drawing_context.begin_path()
drawing_context.rect(thumb_rect.left, thumb_rect.top, thumb_rect.width, thumb_rect.height)
drawing_context.fill_style = "#007AD8"
drawing_context.fill()
def __get_bar_rect(self) -> Geometry.FloatRect:
canvas_size = self.canvas_size
if canvas_size:
thumb_width = self.thumb_width
bar_offset = self.bar_offset
bar_width = canvas_size.width - thumb_width - bar_offset * 2
bar_height = self.bar_height
return Geometry.FloatRect.from_tlhw(canvas_size.height / 2 - bar_height / 2, bar_offset + thumb_width / 2, bar_height, bar_width)
return Geometry.FloatRect.empty_rect()
def __get_thumb_rect(self) -> Geometry.IntRect:
canvas_size = self.canvas_size
if canvas_size:
thumb_width = self.thumb_width
thumb_height = self.thumb_height
bar_offset = self.bar_offset
bar_width = canvas_size.width - thumb_width - bar_offset * 2
# use tracking value to avoid thumb jumping around while dragging, which occurs when value gets integerized and set.
value = self.value if not self.__tracking else self.__tracking_value
return Geometry.FloatRect.from_tlhw(canvas_size.height / 2 - thumb_height / 2, value * bar_width + bar_offset, thumb_height, thumb_width).to_int_rect()
return Geometry.IntRect.empty_rect()
def mouse_pressed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
thumb_rect = self.__get_thumb_rect()
pos = Geometry.IntPoint(x=x, y=y)
if thumb_rect.inset(-2, -2).contains_point(pos):
self.__tracking = True
self.__tracking_start = pos
self.__tracking_value = self.value
self.value_change_stream.begin()
self.update()
return True
elif x < thumb_rect.left:
self.__adjust_thumb(-1)
return True
elif x > thumb_rect.right:
self.__adjust_thumb(1)
return True
return super().mouse_pressed(x, y, modifiers)
def mouse_released(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
if self.__tracking:
self.__tracking = False
self.value_change_stream.end()
self.update()
return True
return super().mouse_released(x, y, modifiers)
def mouse_position_changed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
if self.__tracking:
pos = Geometry.FloatPoint(x=x, y=y)
bar_rect = self.__get_bar_rect()
value = (pos.x - bar_rect.left) / bar_rect.width
self.__tracking_value = max(0.0, min(1.0, value))
self.value = value
return super().mouse_position_changed(x, y, modifiers)
def __adjust_thumb(self, amount: float) -> None:
self.value_change_stream.begin()
self.value = max(0.0, min(1.0, self.value + amount * 0.1))
self.value_change_stream.end()
PositionLength = collections.namedtuple("PositionLength", ["position", "length"])
class ScrollBarCanvasItem(AbstractCanvasItem):
""" A scroll bar for a scroll area. """
def __init__(self, scroll_area_canvas_item: ScrollAreaCanvasItem, orientation: typing.Optional[Orientation] = None) -> None:
super().__init__()
orientation = orientation if orientation is not None else Orientation.Vertical
self.wants_mouse_events = True
self.__scroll_area_canvas_item = scroll_area_canvas_item
self.__scroll_area_canvas_item_content_updated_listener = self.__scroll_area_canvas_item.content_updated_event.listen(self.update)
self.__tracking = False
self.__orientation = orientation
if self.__orientation == Orientation.Vertical:
self.update_sizing(self.sizing.with_fixed_width(16))
else:
self.update_sizing(self.sizing.with_fixed_height(16))
def close(self) -> None:
self.__scroll_area_canvas_item_content_updated_listener.close()
self.__scroll_area_canvas_item_content_updated_listener = typing.cast(typing.Any, None)
super().close()
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
# canvas size, thumb rect
canvas_size = self.canvas_size
thumb_rect = self.thumb_rect
if canvas_size:
# draw it
with drawing_context.saver():
# draw the border of the scroll bar
drawing_context.begin_path()
drawing_context.rect(0, 0, canvas_size.width, canvas_size.height)
if self.__orientation == Orientation.Vertical:
gradient = drawing_context.create_linear_gradient(canvas_size.width, canvas_size.height, 0, 0, canvas_size.width, 0)
else:
gradient = drawing_context.create_linear_gradient(canvas_size.width, canvas_size.height, 0, 0, 0, canvas_size.height)
gradient.add_color_stop(0.0, "#F2F2F2")
gradient.add_color_stop(0.35, "#FDFDFD")
gradient.add_color_stop(0.65, "#FDFDFD")
gradient.add_color_stop(1.0, "#F2F2F2")
drawing_context.fill_style = gradient
drawing_context.fill()
# draw the thumb, if any
if thumb_rect.height > 0 and thumb_rect.width > 0:
with drawing_context.saver():
drawing_context.begin_path()
if self.__orientation == Orientation.Vertical:
drawing_context.move_to(thumb_rect.width - 8, thumb_rect.top + 6)
drawing_context.line_to(thumb_rect.width - 8, thumb_rect.bottom - 6)
else:
drawing_context.move_to(thumb_rect.left + 6, thumb_rect.height - 8)
drawing_context.line_to(thumb_rect.right - 6, thumb_rect.height - 8)
drawing_context.line_width = 8.0
drawing_context.line_cap = "round"
drawing_context.stroke_style = "#888" if self.__tracking else "#CCC"
drawing_context.stroke()
# draw inside edge
drawing_context.begin_path()
drawing_context.move_to(0, 0)
if self.__orientation == Orientation.Vertical:
drawing_context.line_to(0, canvas_size.height)
else:
drawing_context.line_to(canvas_size.width, 0)
drawing_context.line_width = 0.5
drawing_context.stroke_style = "#E3E3E3"
drawing_context.stroke()
# draw outside
drawing_context.begin_path()
if self.__orientation == Orientation.Vertical:
drawing_context.move_to(canvas_size.width, 0)
else:
drawing_context.move_to(0, canvas_size.height)
drawing_context.line_to(canvas_size.width, canvas_size.height)
drawing_context.line_width = 0.5
drawing_context.stroke_style = "#999999"
drawing_context.stroke()
def get_thumb_position_and_length(self, canvas_length: int, visible_length: int, content_length: int, content_offset: int) -> PositionLength:
"""
Return the thumb position and length as a tuple of ints.
The canvas_length is the size of the canvas of the scroll bar.
The visible_length is the size of the visible area of the scroll area.
The content_length is the size of the content of the scroll area.
The content_offset is the position of the content within the scroll area. It
will always be negative or zero.
"""
# the scroll_range defines the maximum negative value of the content_offset.
scroll_range = max(content_length - visible_length, 0)
# content_offset should be negative, but not more negative than the scroll_range.
content_offset = max(-scroll_range, min(0, content_offset))
# assert content_offset <= 0 and content_offset >= -scroll_range
# the length of the thumb is the visible_length multiplied by the ratio of
# visible_length to the content_length. however, a minimum height is enforced
# so that the user can always grab it. if the thumb is invisible (the content_length
# is less than or equal to the visible_length) then the thumb will have a length of zero.
if content_length > visible_length:
thumb_length = int(canvas_length * (float(visible_length) / content_length))
thumb_length = max(thumb_length, 32)
# the position of the thumb is the content_offset over the content_length multiplied by
# the free range of the thumb which is the canvas_length minus the thumb_length.
thumb_position = int((canvas_length - thumb_length) * (float(-content_offset) / scroll_range))
else:
thumb_length = 0
thumb_position = 0
return PositionLength(thumb_position, thumb_length)
@property
def thumb_rect(self) -> Geometry.IntRect:
# return the thumb rect for the given canvas_size
canvas_size = self.canvas_size
if canvas_size:
index = 0 if self.__orientation == Orientation.Vertical else 1
scroll_area_canvas_size = self.__scroll_area_canvas_item.canvas_size
scroll_area_content = self.__scroll_area_canvas_item.content
if scroll_area_content and scroll_area_canvas_size:
visible_length = scroll_area_canvas_size[index]
scroll_area_rect = scroll_area_content.canvas_rect
if scroll_area_rect:
content_length = scroll_area_rect.size[index]
content_offset = scroll_area_rect.origin[index]
thumb_position, thumb_length = self.get_thumb_position_and_length(canvas_size[index], visible_length, content_length, content_offset)
if self.__orientation == Orientation.Vertical:
thumb_origin = Geometry.IntPoint(x=0, y=thumb_position)
thumb_size = Geometry.IntSize(width=canvas_size.width, height=thumb_length)
else:
thumb_origin = Geometry.IntPoint(x=thumb_position, y=0)
thumb_size = Geometry.IntSize(width=thumb_length, height=canvas_size.height)
return Geometry.IntRect(origin=thumb_origin, size=thumb_size)
return Geometry.IntRect.empty_rect()
def mouse_pressed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
thumb_rect = self.thumb_rect
pos = Geometry.IntPoint(x=x, y=y)
if thumb_rect.contains_point(pos):
self.__tracking = True
self.__tracking_start = pos
scroll_area_content = self.__scroll_area_canvas_item.content
self.__tracking_content_offset = scroll_area_content.canvas_origin if scroll_area_content else Geometry.IntPoint()
self.update()
return True
elif self.__orientation == Orientation.Vertical and y < thumb_rect.top:
self.__adjust_thumb(-1)
return True
elif self.__orientation == Orientation.Vertical and y > thumb_rect.bottom:
self.__adjust_thumb(1)
return True
elif self.__orientation != Orientation.Vertical and x < thumb_rect.left:
self.__adjust_thumb(-1)
return True
elif self.__orientation != Orientation.Vertical and x > thumb_rect.right:
self.__adjust_thumb(1)
return True
return super().mouse_pressed(x, y, modifiers)
def mouse_released(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self.__tracking = False
self.update()
return super().mouse_released(x, y, modifiers)
def __adjust_thumb(self, amount: float) -> None:
# adjust the position up or down one visible screen worth
index = 0 if self.__orientation == Orientation.Vertical else 1
scroll_area_rect = self.__scroll_area_canvas_item.canvas_rect
if scroll_area_rect:
visible_length = scroll_area_rect.size[index]
content = self.__scroll_area_canvas_item.content
if content:
content_canvas_origin = content.canvas_origin
if content_canvas_origin:
if self.__orientation == Orientation.Vertical:
new_content_offset = Geometry.IntPoint(y=round(content_canvas_origin[0] - visible_length * amount), x=content_canvas_origin[1])
else:
new_content_offset = Geometry.IntPoint(y=content_canvas_origin[0], x=round(content_canvas_origin[1] - visible_length * amount))
content.update_layout(new_content_offset, content.canvas_size)
content.update()
def adjust_content_offset(self, canvas_length: int, visible_length: int, content_length: int, content_offset: int, mouse_offset: int) -> int:
"""
Return the adjusted content offset.
The canvas_length is the size of the canvas of the scroll bar.
The visible_length is the size of the visible area of the scroll area.
The content_length is the size of the content of the scroll area.
The content_offset is the position of the content within the scroll area. It
will always be negative or zero.
The mouse_offset is the offset of the mouse.
"""
scroll_range = max(content_length - visible_length, 0)
_, thumb_length = self.get_thumb_position_and_length(canvas_length, visible_length, content_length, content_offset)
offset_rel = int(scroll_range * float(mouse_offset) / (canvas_length - thumb_length))
return max(min(content_offset - offset_rel, 0), -scroll_range)
def mouse_position_changed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
if self.__tracking:
pos = Geometry.IntPoint(x=x, y=y)
canvas_size = self.canvas_size
scroll_area_canvas_size = self.__scroll_area_canvas_item.canvas_size
if canvas_size and scroll_area_canvas_size:
scroll_area_content = self.__scroll_area_canvas_item.content
if scroll_area_content:
tracking_content_offset = self.__tracking_content_offset
scroll_area_content_canvas_size = scroll_area_content.canvas_size
if tracking_content_offset and scroll_area_content_canvas_size:
if self.__orientation == Orientation.Vertical:
mouse_offset_v = pos.y - self.__tracking_start.y
visible_height = scroll_area_canvas_size[0]
content_height = scroll_area_content_canvas_size[0]
new_content_offset_v = self.adjust_content_offset(canvas_size[0], visible_height, content_height, tracking_content_offset[0], mouse_offset_v)
new_content_offset = Geometry.IntPoint(x=tracking_content_offset[1], y=new_content_offset_v)
else:
mouse_offset_h = pos.x - self.__tracking_start.x
visible_width = scroll_area_canvas_size[1]
content_width = scroll_area_content_canvas_size[1]
new_content_offset_h = self.adjust_content_offset(canvas_size[1], visible_width, content_width, tracking_content_offset[1], mouse_offset_h)
new_content_offset = Geometry.IntPoint(x=new_content_offset_h, y=tracking_content_offset[0])
scroll_area_content._set_canvas_origin(new_content_offset)
scroll_area_content.update()
self.update()
return super().mouse_position_changed(x, y, modifiers)
class RootLayoutRenderTrait(CompositionLayoutRenderTrait):
next_section_id = 0
def __init__(self, canvas_item_composition: CanvasItemComposition) -> None:
super().__init__(canvas_item_composition)
self.__needs_repaint = False
self.__section_ids_lock = threading.RLock()
self.__section_map: typing.Dict[AbstractCanvasItem, int] = dict()
def close(self) -> None:
with self.__section_ids_lock:
section_map = self.__section_map
self.__section_map = dict()
for section_id in section_map.values():
canvas_widget = self._canvas_item_composition.canvas_widget
if canvas_widget:
canvas_widget.remove_section(section_id)
super().close()
@property
def is_layer_container(self) -> bool:
return True
def _try_needs_layout(self, canvas_item: AbstractCanvasItem) -> bool:
if self._canvas_item_composition.canvas_size:
# if this is a normal canvas item, tell it's container to layout again.
# otherwise, if this is the root, just layout the root.
container = canvas_item.container if canvas_item != self._canvas_item_composition else canvas_item
if container and container.canvas_size:
container.update_layout(container.canvas_origin, container.canvas_size)
if container == self._canvas_item_composition:
# when the root is resized, be sure to update all of the opaque items since layout
# doesn't do it automatically right now.
for canvas_item in self._canvas_item_composition.get_root_opaque_canvas_items():
canvas_item.update()
return True
def _try_update_with_items(self, canvas_items: typing.Optional[typing.Sequence[AbstractCanvasItem]] = None) -> bool:
drawing_context = DrawingContext.DrawingContext()
if self._canvas_item_composition._has_layout and self._canvas_item_composition.canvas_widget and canvas_items:
for canvas_item in canvas_items:
if canvas_item.is_root_opaque:
self._canvas_item_composition._update_count += 1
canvas_size = canvas_item.canvas_size
if canvas_size:
canvas_rect = Geometry.IntRect(canvas_item.map_to_root_container(Geometry.IntPoint(0, 0)), canvas_size)
canvas_item._repaint_template(drawing_context, immediate=False)
drawing_context.translate(-canvas_rect.left, -canvas_rect.top)
with self.__section_ids_lock:
section_id = self.__section_map.get(canvas_item, None)
if not section_id:
RootLayoutRenderTrait.next_section_id += 1
section_id = RootLayoutRenderTrait.next_section_id
self.__section_map[canvas_item] = section_id
self._canvas_item_composition.canvas_widget.draw_section(section_id, drawing_context, canvas_rect)
# break
self.__cull_unused_sections()
return True
def __cull_unused_sections(self) -> None:
canvas_items = self._canvas_item_composition.get_root_opaque_canvas_items()
with self.__section_ids_lock:
section_map = self.__section_map
self.__section_map = dict()
for canvas_item in canvas_items:
section_id = section_map.pop(canvas_item, None)
if section_id:
self.__section_map[canvas_item] = section_id
for section_id in section_map.values():
canvas_widget = self._canvas_item_composition.canvas_widget
if canvas_widget:
canvas_widget.remove_section(section_id)
RootLayoutRender = "root"
DefaultLayoutRender: typing.Optional[str] = None
class RootCanvasItem(CanvasItemComposition):
"""A root layer to interface to the widget world.
The root canvas item acts as a bridge between the higher level ui widget and a canvas hierarchy. It connects size
notifications, mouse activity, keyboard activity, focus activity, and drag and drop actions to the canvas item.
The root canvas item provides a canvas_widget property which is the canvas widget associated with this root item.
The root canvas may be focusable or not. There are two focus states that this root canvas item handles: the widget
focus and the canvas item focus. The widget focus comes from the enclosing widget. If this root canvas item has a
widget focus, then it can also have a canvas item focus to specify which specific canvas item is the focus in this
root canvas item's hierarchy.
"""
def __init__(self, canvas_widget: UserInterface.CanvasWidget, *, layout_render: typing.Optional[str] = DefaultLayoutRender) -> None:
super().__init__(RootLayoutRenderTrait(self) if layout_render == RootLayoutRender and _threaded_rendering_enabled else LayerLayoutRenderTrait(self))
self.__canvas_widget = canvas_widget
self.__canvas_widget.on_size_changed = self.size_changed
self.__canvas_widget.on_mouse_clicked = self.__mouse_clicked
self.__canvas_widget.on_mouse_double_clicked = self.__mouse_double_clicked
self.__canvas_widget.on_mouse_entered = self.__mouse_entered
self.__canvas_widget.on_mouse_exited = self.__mouse_exited
self.__canvas_widget.on_mouse_pressed = self.__mouse_pressed
self.__canvas_widget.on_mouse_released = self.__mouse_released
self.__canvas_widget.on_mouse_position_changed = self.__mouse_position_changed
self.__canvas_widget.on_grabbed_mouse_position_changed = self.__grabbed_mouse_position_changed
self.__canvas_widget.on_wheel_changed = self.wheel_changed
self.__canvas_widget.on_context_menu_event = self.__context_menu_event
self.__canvas_widget.on_key_pressed = self.__key_pressed
self.__canvas_widget.on_key_released = self.__key_released
self.__canvas_widget.on_focus_changed = self.__focus_changed
self.__canvas_widget.on_drag_enter = self.__drag_enter
self.__canvas_widget.on_drag_leave = self.__drag_leave
self.__canvas_widget.on_drag_move = self.__drag_move
self.__canvas_widget.on_drop = self.__drop
self.__canvas_widget.on_tool_tip = self.handle_tool_tip
self.__canvas_widget.on_pan_gesture = self.pan_gesture
self.__canvas_widget.on_dispatch_any = self.__dispatch_any
self.__canvas_widget.on_can_dispatch_any = self.__can_dispatch_any
self.__canvas_widget.on_get_menu_item_state = self.__get_menu_item_state
setattr(self.__canvas_widget, "_root_canvas_item", weakref.ref(self)) # for debugging
self.__drawing_context_updated = False
self.__interaction_count = 0
self.__focused_item: typing.Optional[AbstractCanvasItem] = None
self.__last_focused_item: typing.Optional[AbstractCanvasItem] = None
self.__mouse_canvas_item: typing.Optional[AbstractCanvasItem] = None # not None when the mouse is pressed
self.__mouse_tracking = False
self.__mouse_tracking_canvas_item: typing.Optional[AbstractCanvasItem] = None
self.__drag_tracking = False
self.__drag_tracking_canvas_item: typing.Optional[AbstractCanvasItem] = None
self.__grab_canvas_item: typing.Optional[MouseTrackingCanvasItem.TrackingCanvasItem] = None
self._set_canvas_origin(Geometry.IntPoint())
def close(self) -> None:
# shut down the repaint thread first
self._stop_render_behavior() # call first so that it doesn't use canvas widget
self.__mouse_tracking_canvas_item = None
self.__drag_tracking_canvas_item = None
self.__grab_canvas_item = None
self.__focused_item = None
self.__last_focused_item = None
self.__canvas_widget.on_size_changed = None
self.__canvas_widget.on_mouse_clicked = None
self.__canvas_widget.on_mouse_double_clicked = None
self.__canvas_widget.on_mouse_entered = None
self.__canvas_widget.on_mouse_exited = None
self.__canvas_widget.on_mouse_pressed = None
self.__canvas_widget.on_mouse_released = None
self.__canvas_widget.on_mouse_position_changed = None
self.__canvas_widget.on_grabbed_mouse_position_changed = None
self.__canvas_widget.on_wheel_changed = None
self.__canvas_widget.on_context_menu_event = None
self.__canvas_widget.on_key_pressed = None
self.__canvas_widget.on_key_released = None
self.__canvas_widget.on_focus_changed = None
self.__canvas_widget.on_drag_enter = None
self.__canvas_widget.on_drag_leave = None
self.__canvas_widget.on_drag_move = None
self.__canvas_widget.on_drop = None
self.__canvas_widget.on_tool_tip = None
self.__canvas_widget.on_pan_gesture = None
super().close()
# culling will require the canvas widget; clear it here (after close) so that it is availahle.
self.__canvas_widget = typing.cast(typing.Any, None)
def _repaint_finished(self, drawing_context: DrawingContext.DrawingContext) -> None:
self.__canvas_widget.draw(drawing_context)
def refresh_layout(self) -> None:
self._needs_layout(self)
@property
def root_container(self) -> typing.Optional[RootCanvasItem]:
return self
@property
def canvas_widget(self) -> UserInterface.CanvasWidget:
""" Return the canvas widget. """
return self.__canvas_widget
def map_to_global(self, p: Geometry.IntPoint) -> Geometry.IntPoint:
return self.__canvas_widget.map_to_global(p)
@property
def is_ui_interaction_active(self) -> bool:
return self.__interaction_count > 0
def _adjust_ui_interaction(self, value: int) -> None:
self.__interaction_count += value
class UIInteractionContext:
def __init__(self, root_canvas_item: RootCanvasItem) -> None:
self.__root_canvas_item = root_canvas_item
def close(self) -> None:
self.__root_canvas_item._adjust_ui_interaction(-1)
def __enter__(self) -> RootCanvasItem.UIInteractionContext:
self.__root_canvas_item._adjust_ui_interaction(1)
return self
def __exit__(self, exception_type: typing.Optional[typing.Type[BaseException]],
value: typing.Optional[BaseException], traceback: typing.Optional[types.TracebackType]) -> typing.Optional[bool]:
self.close()
return None
def _ui_interaction(self) -> contextlib.AbstractContextManager[RootCanvasItem.UIInteractionContext]:
return RootCanvasItem.UIInteractionContext(self)
@property
def focusable(self) -> bool:
""" Return whether the canvas widget is focusable. """
return self.canvas_widget.focusable
@focusable.setter
def focusable(self, focusable: bool) -> None:
""" Set whether the canvas widget is focusable. """
self.canvas_widget.focusable = focusable
def size_changed(self, width: int, height: int) -> None:
""" Called when size changes. """
# logging.debug("{} {} x {}".format(id(self), width, height))
if width > 0 and height > 0:
self._set_canvas_origin(Geometry.IntPoint())
self._set_canvas_size(Geometry.IntSize(height=height, width=width))
self._has_layout = self.canvas_origin is not None and self.canvas_size is not None
self.refresh_layout()
@property
def focused_item(self) -> typing.Optional[AbstractCanvasItem]:
"""
Return the canvas focused item. May return None.
The focused item is either this item itself or one of its
children.
"""
return self.__focused_item
def _set_focused_item(self, focused_item: typing.Optional[AbstractCanvasItem], p: typing.Optional[Geometry.IntPoint] = None, modifiers: typing.Optional[UserInterface.KeyboardModifiers] = None) -> None:
""" Set the canvas focused item. This will also update the focused property of both old item (if any) and new item (if any). """
if not modifiers or not modifiers.any_modifier:
if focused_item != self.__focused_item:
if self.__focused_item:
self.__focused_item._set_focused(False)
self.__focused_item = focused_item
if self.__focused_item:
self.__focused_item._set_focused(True)
if self.__focused_item:
self.__last_focused_item = self.__focused_item
elif focused_item:
focused_item.adjust_secondary_focus(p or Geometry.IntPoint(), modifiers)
def __focus_changed(self, focused: bool) -> None:
""" Called when widget focus changes. """
if focused and not self.focused_item:
self._set_focused_item(self.__last_focused_item)
elif not focused and self.focused_item:
self._set_focused_item(None)
def _request_root_focus(self, focused_item: typing.Optional[AbstractCanvasItem], p: typing.Optional[Geometry.IntPoint], modifiers: typing.Optional[UserInterface.KeyboardModifiers]) -> None:
"""Requests that the root widget gets focus.
This focused is different from the focus within the canvas system. This is
the external focus in the widget system.
If the canvas widget is already focused, this simply sets the focused item
to be the requested one. Otherwise, the widget has to request focus. When
it receives focus, a __focus_changed from the widget which will restore the
last focused item to be the new focused canvas item.
"""
if self.__canvas_widget.focused:
self._set_focused_item(focused_item, p, modifiers)
else:
self._set_focused_item(None, p, modifiers)
self.__last_focused_item = focused_item
self.__canvas_widget.focused = True # this will trigger focus changed to set the focus
def wheel_changed(self, x: int, y: int, dx: int, dy: int, is_horizontal: bool) -> bool:
# always give the mouse canvas item priority (for tracking outside bounds)
canvas_items = self.canvas_items_at_point(x, y)
for canvas_item in reversed(canvas_items):
if canvas_item != self:
canvas_item_point = self.map_to_canvas_item(Geometry.IntPoint(y=y, x=x), canvas_item)
if canvas_item.wheel_changed(canvas_item_point.x, canvas_item_point.y, dx, dy, is_horizontal):
return True
return False
def handle_tool_tip(self, x: int, y: int, gx: int, gy: int) -> bool:
canvas_items = self.canvas_items_at_point(x, y)
for canvas_item in reversed(canvas_items):
if canvas_item != self:
canvas_item_point = self.map_to_canvas_item(Geometry.IntPoint(y=y, x=x), canvas_item)
if canvas_item.handle_tool_tip(canvas_item_point.x, canvas_item_point.y, gx, gy):
return True
return False
def __dispatch_any(self, method: str, *args: typing.Any, **kwargs: typing.Any) -> bool:
focused_item = self.focused_item
if focused_item:
return focused_item._dispatch_any(method, *args, **kwargs)
return False
def __can_dispatch_any(self, method: str) -> bool:
focused_item = self.focused_item
if focused_item:
return focused_item._can_dispatch_any(method)
return False
def __get_menu_item_state(self, command_id: str) -> typing.Optional[UserInterface.MenuItemState]:
focused_item = self.focused_item
if focused_item:
menu_item_state = focused_item._get_menu_item_state(command_id)
if menu_item_state:
return menu_item_state
return None
def _cursor_shape_changed(self, item: AbstractCanvasItem) -> None:
if item == self.__mouse_tracking_canvas_item and self.__mouse_tracking_canvas_item:
self.__canvas_widget.set_cursor_shape(self.__mouse_tracking_canvas_item.cursor_shape)
def _restore_cursor_shape(self) -> None:
# if self.__mouse_tracking_canvas_item:
# self.__canvas_widget.set_cursor_shape(self.__mouse_tracking_canvas_item.cursor_shape)
# else:
self.__canvas_widget.set_cursor_shape(None)
def __mouse_entered(self) -> None:
self.__mouse_tracking = True
def __mouse_exited(self) -> None:
if self.__mouse_tracking_canvas_item:
self.__mouse_tracking_canvas_item.mouse_exited()
self.__mouse_tracking = False
self.__mouse_tracking_canvas_item = None
self.__canvas_widget.set_cursor_shape(None)
self.__canvas_widget.tool_tip = None
def __mouse_canvas_item_at_point(self, x: int, y: int) -> typing.Optional[AbstractCanvasItem]:
if self.__mouse_canvas_item:
return self.__mouse_canvas_item
canvas_items = self.canvas_items_at_point(x, y)
for canvas_item in canvas_items:
if canvas_item.wants_mouse_events:
return canvas_item
return None
def __request_focus(self, canvas_item: AbstractCanvasItem, p: Geometry.IntPoint, modifiers: UserInterface.KeyboardModifiers) -> None:
canvas_item_: typing.Optional[AbstractCanvasItem] = canvas_item
while canvas_item_:
if canvas_item_.focusable:
canvas_item_._request_focus(p, modifiers)
break
canvas_item_ = canvas_item_.container
def __mouse_clicked(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
with self._ui_interaction():
canvas_item = self.__mouse_canvas_item_at_point(x, y)
if canvas_item:
canvas_item_point = self.map_to_canvas_item(Geometry.IntPoint(y=y, x=x), canvas_item)
return canvas_item.mouse_clicked(canvas_item_point.x, canvas_item_point.y, modifiers)
return False
def __mouse_double_clicked(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
with self._ui_interaction():
canvas_item = self.__mouse_canvas_item_at_point(x, y)
if canvas_item:
self.__request_focus(canvas_item, Geometry.IntPoint(x=x, y=y), modifiers)
canvas_item_point = self.map_to_canvas_item(Geometry.IntPoint(y=y, x=x), canvas_item)
return canvas_item.mouse_double_clicked(canvas_item_point.x, canvas_item_point.y, modifiers)
return False
def __mouse_pressed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self._adjust_ui_interaction(1)
self.__mouse_position_changed(x, y, modifiers)
if not self.__mouse_tracking_canvas_item:
self.__mouse_tracking_canvas_item = self.__mouse_canvas_item_at_point(x, y)
if self.__mouse_tracking_canvas_item:
self.__mouse_tracking_canvas_item.mouse_entered()
self.__canvas_widget.set_cursor_shape(self.__mouse_tracking_canvas_item.cursor_shape)
self.__canvas_widget.tool_tip = self.__mouse_tracking_canvas_item.tool_tip
if self.__mouse_tracking_canvas_item:
self.__mouse_canvas_item = self.__mouse_tracking_canvas_item
canvas_item_point = self.map_to_canvas_item(Geometry.IntPoint(y=y, x=x), self.__mouse_canvas_item)
self.__request_focus_canvas_item = self.__mouse_canvas_item
return self.__mouse_canvas_item.mouse_pressed(canvas_item_point.x, canvas_item_point.y, modifiers)
return False
def __mouse_released(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
result = False
if self.__mouse_canvas_item:
if self.__request_focus_canvas_item:
self.__request_focus(self.__request_focus_canvas_item, Geometry.IntPoint(x=x, y=y), modifiers)
self.__request_focus_canvas_item = typing.cast(typing.Any, None)
canvas_item_point = self.map_to_canvas_item(Geometry.IntPoint(y=y, x=x), self.__mouse_canvas_item)
result = self.__mouse_canvas_item.mouse_released(canvas_item_point.x, canvas_item_point.y, modifiers)
self.__mouse_canvas_item = None
self.__mouse_position_changed(x, y, modifiers)
self._adjust_ui_interaction(-1)
return result
def bypass_request_focus(self) -> None:
self.__request_focus_canvas_item = typing.cast(typing.Any, None)
def __mouse_position_changed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> None:
if not self.__mouse_tracking:
# handle case where mouse is suddenly within this canvas item but it never entered. this can happen when
# the user activates the application.
self.mouse_entered()
if self.__mouse_tracking and not self.__mouse_tracking_canvas_item:
# find the existing canvas item that is or wants to track the mouse. if it's new, call entered and update
# the cursor.
self.__mouse_tracking_canvas_item = self.__mouse_canvas_item_at_point(x, y)
if self.__mouse_tracking_canvas_item:
self.__mouse_tracking_canvas_item.mouse_entered()
self.__canvas_widget.set_cursor_shape(self.__mouse_tracking_canvas_item.cursor_shape)
self.__canvas_widget.tool_tip = self.__mouse_tracking_canvas_item.tool_tip
new_mouse_canvas_item = self.__mouse_canvas_item_at_point(x, y)
if self.__mouse_tracking_canvas_item != new_mouse_canvas_item:
# if the mouse tracking canvas item changes, exit the old one and enter the new one.
if self.__mouse_tracking_canvas_item:
# there may be a case where the mouse has moved outside the canvas item and the canvas
# item has also been closed. for instance, context menu item which closes the canvas item.
# so double check whether the mouse tracking canvas item is still in the hierarchy by checking
# its container. only call mouse existed if the item is still in the hierarchy.
if self.__mouse_tracking_canvas_item.container:
self.__mouse_tracking_canvas_item.mouse_exited()
self.__canvas_widget.set_cursor_shape(None)
self.__canvas_widget.tool_tip = None
self.__mouse_tracking_canvas_item = new_mouse_canvas_item
if self.__mouse_tracking_canvas_item:
self.__mouse_tracking_canvas_item.mouse_entered()
self.__canvas_widget.set_cursor_shape(self.__mouse_tracking_canvas_item.cursor_shape)
self.__canvas_widget.tool_tip = self.__mouse_tracking_canvas_item.tool_tip
# finally, send out the actual position changed message to the (possibly new) current mouse tracking canvas
# item. also make note of the last time the cursor changed for tool tip tracking.
if self.__mouse_tracking_canvas_item:
canvas_item_point = self.map_to_canvas_item(Geometry.IntPoint(y=y, x=x), self.__mouse_tracking_canvas_item)
self.__mouse_tracking_canvas_item.mouse_position_changed(canvas_item_point.x, canvas_item_point.y, modifiers)
def __grabbed_mouse_position_changed(self, dx: int, dy: int, modifiers: UserInterface.KeyboardModifiers) -> None:
if self.__grab_canvas_item:
self.__grab_canvas_item.grabbed_mouse_position_changed(dx, dy, modifiers)
def __context_menu_event(self, x: int, y: int, gx: int, gy: int) -> bool:
with self._ui_interaction():
canvas_items = self.canvas_items_at_point(x, y)
for canvas_item in canvas_items:
canvas_item_point = self.map_to_canvas_item(Geometry.IntPoint(y=y, x=x), canvas_item)
if canvas_item.context_menu_event(canvas_item_point.x, canvas_item_point.y, gx, gy):
return True
return False
def __key_pressed(self, key: UserInterface.Key) -> bool:
self._adjust_ui_interaction(1)
if self.focused_item:
return self.focused_item.key_pressed(key)
return False
def __key_released(self, key: UserInterface.Key) -> bool:
result = False
if self.focused_item:
result = self.focused_item.key_released(key)
self._adjust_ui_interaction(-1)
return result
def __drag_enter(self, mime_data: UserInterface.MimeData) -> str:
self.__drag_tracking = True
return "accept"
def __drag_leave(self) -> str:
if self.__drag_tracking_canvas_item:
self.__drag_tracking_canvas_item.drag_leave()
self.__drag_tracking = False
self.__drag_tracking_canvas_item = None
return "accept"
def __drag_canvas_item_at_point(self, x: int, y: int, mime_data: UserInterface.MimeData) -> typing.Optional[AbstractCanvasItem]:
canvas_items = self.canvas_items_at_point(x, y)
for canvas_item in canvas_items:
if canvas_item.wants_drag_event(mime_data, x, y):
return canvas_item
return None
def __drag_move(self, mime_data: UserInterface.MimeData, x: int, y: int) -> str:
response = "ignore"
if self.__drag_tracking and not self.__drag_tracking_canvas_item:
self.__drag_tracking_canvas_item = self.__drag_canvas_item_at_point(x, y, mime_data)
if self.__drag_tracking_canvas_item:
self.__drag_tracking_canvas_item.drag_enter(mime_data)
new_drag_canvas_item = self.__drag_canvas_item_at_point(x, y, mime_data)
if self.__drag_tracking_canvas_item != new_drag_canvas_item:
if self.__drag_tracking_canvas_item:
self.__drag_tracking_canvas_item.drag_leave()
self.__drag_tracking_canvas_item = new_drag_canvas_item
if self.__drag_tracking_canvas_item:
self.__drag_tracking_canvas_item.drag_enter(mime_data)
if self.__drag_tracking_canvas_item:
canvas_item_point = self.map_to_canvas_item(Geometry.IntPoint(y=y, x=x), self.__drag_tracking_canvas_item)
response = self.__drag_tracking_canvas_item.drag_move(mime_data, canvas_item_point.x, canvas_item_point.y)
return response
def __drop(self, mime_data: UserInterface.MimeData, x: int, y: int) -> str:
with self._ui_interaction():
response = "ignore"
if self.__drag_tracking_canvas_item:
canvas_item_point = self.map_to_canvas_item(Geometry.IntPoint(y=y, x=x), self.__drag_tracking_canvas_item)
response = self.__drag_tracking_canvas_item.drop(mime_data, canvas_item_point.x, canvas_item_point.y)
self.__drag_leave()
return response
def drag(self, mime_data: UserInterface.MimeData, thumbnail: typing.Optional[DrawingContext.RGBA32Type] = None,
hot_spot_x: typing.Optional[int] = None, hot_spot_y: typing.Optional[int] = None,
drag_finished_fn: typing.Optional[typing.Callable[[str], None]] = None) -> None:
self.__canvas_widget.drag(mime_data, thumbnail, hot_spot_x, hot_spot_y, drag_finished_fn)
def grab_gesture(self, gesture_type: str) -> None:
""" Grab gesture """
self._adjust_ui_interaction(1)
self.__canvas_widget.grab_gesture(gesture_type)
def release_gesture(self, gesture_type: str) -> None:
""" Ungrab gesture """
self.__canvas_widget.release_gesture(gesture_type)
self._adjust_ui_interaction(-1)
def grab_mouse(self, grabbed_canvas_item: MouseTrackingCanvasItem.TrackingCanvasItem, gx: int, gy: int) -> None:
self._adjust_ui_interaction(1)
self.__canvas_widget.grab_mouse(gx, gy)
self.__grab_canvas_item = grabbed_canvas_item
def release_mouse(self) -> None:
self.__canvas_widget.release_mouse()
self._restore_cursor_shape()
self.__grab_canvas_item = None
self._adjust_ui_interaction(-1)
def show_tool_tip_text(self, text: str, gx: int, gy: int) -> None:
self.__canvas_widget.show_tool_tip_text(text, gx, gy)
class BackgroundCanvasItem(AbstractCanvasItem):
""" Canvas item to draw background_color. """
def __init__(self, background_color: typing.Optional[str] = None) -> None:
super().__init__()
self.background_color = background_color or "#888"
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
# canvas size
canvas_size = self.canvas_size
if canvas_size:
canvas_width = canvas_size[1]
canvas_height = canvas_size[0]
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.rect(0, 0, canvas_width, canvas_height)
drawing_context.fill_style = self.background_color
drawing_context.fill()
class CellCanvasItem(AbstractCanvasItem):
""" Canvas item to draw and respond to user events for a cell.
A cell must implement the following interface:
event: update_event() - fired when the canvas item needs an update
method: paint_cell(drawing_context, rect, style) - called to draw the cell
The style parameter passed to paint_cell is a list with zero or one strings from each of the aspects below:
disabled (default is enabled)
checked, partial (default is unchecked)
hover, active (default is none)
"""
def __init__(self, cell: typing.Optional[Widgets.CellLike] = None) -> None:
super().__init__()
self.__enabled = True
self.__check_state = "unchecked"
self.__mouse_inside = False
self.__mouse_pressed = False
self.__cell = None
self.__cell_update_event_listener: typing.Optional[Event.EventListener] = None
self.cell = cell
self.style: typing.Set[str] = set()
def close(self) -> None:
self.cell = None
super().close()
@property
def enabled(self) -> bool:
return self.__enabled
@enabled.setter
def enabled(self, value: bool) -> None:
if self.__enabled != value:
self.__enabled = value
self.__update_style()
@property
def check_state(self) -> str:
return self.__check_state
@check_state.setter
def check_state(self, value: str) -> None:
assert value in ["checked", "unchecked", "partial"]
if self.__check_state != value:
self.__check_state = value
self.__update_style()
@property
def checked(self) -> bool:
return self.check_state == "checked"
@checked.setter
def checked(self, value: bool) -> None:
self.check_state = "checked" if value else "unchecked"
@property
def _mouse_inside(self) -> bool:
return self.__mouse_inside
@_mouse_inside.setter
def _mouse_inside(self, value: bool) -> None:
self.__mouse_inside = value
self.__update_style()
@property
def _mouse_pressed(self) -> bool:
return self.__mouse_pressed
@_mouse_pressed.setter
def _mouse_pressed(self, value: bool) -> None:
self.__mouse_pressed = value
self.__update_style()
def __update_style(self) -> None:
old_style = copy.copy(self.style)
# enabled state
self.style.discard('disabled')
if not self.enabled:
self.style.add('disabled')
# checked state
self.style.discard('checked')
if self.check_state == "checked":
self.style.add('checked')
# hover state
self.style.discard('hover')
self.style.discard('active')
if self._mouse_inside and self._mouse_pressed:
self.style.add('active')
elif self.__mouse_inside:
self.style.add('hover')
if self.style != old_style:
self.update()
@property
def cell(self) -> typing.Optional[Widgets.CellLike]:
return self.__cell
@cell.setter
def cell(self, new_cell: typing.Optional[Widgets.CellLike]) -> None:
if self.__cell_update_event_listener:
self.__cell_update_event_listener.close()
self.__cell_update_event_listener = None
self.__cell = new_cell
if self.__cell:
self.__cell_update_event_listener = self.__cell.update_event.listen(self.update)
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
rect = self.canvas_bounds
if self.__cell and rect is not None:
with drawing_context.saver():
self.__cell.paint_cell(drawing_context, rect, self.style)
class TwistDownCell:
def __init__(self) -> None:
super().__init__()
self.update_event = Event.Event()
def paint_cell(self, drawing_context: DrawingContext.DrawingContext, rect: Geometry.IntRect, style: typing.Set[str]) -> None:
# disabled (default is enabled)
# checked, partial (default is unchecked)
# hover, active (default is none)
if "checked" in style:
drawing_context.begin_path()
drawing_context.move_to(rect.center.x, rect.center.y + 4)
drawing_context.line_to(rect.center.x + 4.5, rect.center.y - 4)
drawing_context.line_to(rect.center.x - 4.5, rect.center.y - 4)
drawing_context.close_path()
else:
drawing_context.begin_path()
drawing_context.move_to(rect.center.x + 4, rect.center.y)
drawing_context.line_to(rect.center.x - 4, rect.center.y + 4.5)
drawing_context.line_to(rect.center.x - 4, rect.center.y - 4.5)
drawing_context.close_path()
overlay_color = None
if "disabled" in style:
overlay_color = "rgba(255, 255, 255, 0.5)"
else:
if "active" in style:
overlay_color = "rgba(128, 128, 128, 0.5)"
elif "hover" in style:
overlay_color = "rgba(128, 128, 128, 0.1)"
drawing_context.fill_style = "#444"
drawing_context.fill()
drawing_context.stroke_style = "#444"
drawing_context.stroke()
if overlay_color:
rect_args = rect.left, rect.top, rect.width, rect.height
drawing_context.begin_path()
drawing_context.rect(*rect_args)
drawing_context.fill_style = overlay_color
drawing_context.fill()
class TwistDownCanvasItem(CellCanvasItem):
def __init__(self) -> None:
super().__init__()
self.cell = TwistDownCell()
self.wants_mouse_events = True
self.on_button_clicked: typing.Optional[typing.Callable[[], None]] = None
def close(self) -> None:
self.on_button_clicked = None
super().close()
def mouse_entered(self) -> bool:
self._mouse_inside = True
return True
def mouse_exited(self) -> bool:
self._mouse_inside = False
return True
def mouse_pressed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self._mouse_pressed = True
return True
def mouse_released(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self._mouse_pressed = False
return True
def mouse_clicked(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
if self.enabled:
if callable(self.on_button_clicked):
self.on_button_clicked()
return True
class BitmapCell:
def __init__(self, rgba_bitmap_data: typing.Optional[DrawingContext.RGBA32Type] = None,
background_color: typing.Optional[str] = None, border_color: typing.Optional[str] = None) -> None:
super().__init__()
self.__rgba_bitmap_data = rgba_bitmap_data
self.__data: typing.Optional[DrawingContext.GrayscaleF32Type] = None
self.__display_limits: typing.Optional[typing.Tuple[float, float]] = None
self.__color_map_data: typing.Optional[DrawingContext.RGBA32Type] = None
self.__background_color = background_color
self.__border_color = border_color
self.update_event = Event.Event()
def set_rgba_bitmap_data(self, rgba_bitmap_data: typing.Optional[DrawingContext.RGBA32Type], trigger_update: bool = True) -> None:
self.__rgba_bitmap_data = rgba_bitmap_data
self.__data = None
self.__display_limits = None
self.__color_map_data = None
if trigger_update:
self.update_event.fire()
def set_data(self, data: typing.Optional[DrawingContext.GrayscaleF32Type],
display_limits: typing.Optional[typing.Tuple[float, float]],
color_map_data: typing.Optional[DrawingContext.RGBA32Type], trigger_update: bool = True) -> None:
self.__rgba_bitmap_data = None
self.__data = data
self.__display_limits = display_limits
self.__color_map_data = color_map_data
if trigger_update:
self.update_event.fire()
@property
def data(self) -> typing.Optional[DrawingContext.GrayscaleF32Type]:
return self.__data
@property
def rgba_bitmap_data(self) -> typing.Optional[DrawingContext.RGBA32Type]:
return self.__rgba_bitmap_data
@rgba_bitmap_data.setter
def rgba_bitmap_data(self, value: typing.Optional[DrawingContext.RGBA32Type]) -> None:
self.set_rgba_bitmap_data(value, trigger_update=True)
@property
def background_color(self) -> typing.Optional[str]:
return self.__background_color
@background_color.setter
def background_color(self, background_color: typing.Optional[str]) -> None:
self.__background_color = background_color
self.update_event.fire()
@property
def border_color(self) -> typing.Optional[str]:
return self.__border_color
@border_color.setter
def border_color(self, border_color: typing.Optional[str]) -> None:
self.__border_color = border_color
self.update_event.fire()
def paint_cell(self, drawing_context: DrawingContext.DrawingContext, rect: Geometry.IntRect, style: typing.Set[str]) -> None:
# set up the defaults
background_color = self.__background_color
border_color = self.__border_color
overlay_color = None
# configure based on style
if "disabled" in style:
overlay_color = "rgba(255, 255, 255, 0.5)"
if "checked" in style:
background_color = "rgb(64, 64, 64)"
else:
if "checked" in style:
background_color = "rgb(192, 192, 192)"
if "active" in style:
overlay_color = "rgba(128, 128, 128, 0.5)"
elif "hover" in style:
overlay_color = "rgba(128, 128, 128, 0.1)"
rect_args = rect.left, rect.top, rect.width, rect.height
bitmap_data = self.rgba_bitmap_data
raw_data = self.__data
# draw the background
if background_color:
drawing_context.begin_path()
drawing_context.rect(*rect_args)
drawing_context.fill_style = background_color
drawing_context.fill()
# draw the bitmap
if bitmap_data is not None:
image_size = typing.cast(Geometry.IntSizeTuple, bitmap_data.shape)
if image_size[0] > 0 and image_size[1] > 0:
display_rect = Geometry.fit_to_size(rect, image_size)
display_height = display_rect.height
display_width = display_rect.width
if display_rect and display_width > 0 and display_height > 0:
display_top = display_rect.top
display_left = display_rect.left
drawing_context.draw_image(bitmap_data, display_left, display_top, display_width, display_height)
if raw_data is not None:
image_size = typing.cast(Geometry.IntSizeTuple, raw_data.shape)
if image_size[0] > 0 and image_size[1] > 0:
display_rect = Geometry.fit_to_size(rect, image_size)
display_height = display_rect.height
display_width = display_rect.width
if display_rect and display_width > 0 and display_height > 0:
display_top = display_rect.top
display_left = display_rect.left
display_limits = self.__display_limits or (0.0, 0.0)
color_map_data = self.__color_map_data
drawing_context.draw_data(raw_data, display_left, display_top, display_width, display_height, display_limits[0], display_limits[1], color_map_data)
# draw the overlay style
if overlay_color:
drawing_context.begin_path()
drawing_context.rect(*rect_args)
drawing_context.fill_style = overlay_color
drawing_context.fill()
# draw the border
if border_color:
drawing_context.begin_path()
drawing_context.rect(*rect_args)
drawing_context.stroke_style = border_color
drawing_context.stroke()
class BitmapCanvasItem(CellCanvasItem):
""" Canvas item to draw rgba bitmap in bgra uint32 ndarray format. """
def __init__(self, rgba_bitmap_data: typing.Optional[DrawingContext.RGBA32Type] = None,
background_color: typing.Optional[str] = None, border_color: typing.Optional[str] = None) -> None:
super().__init__()
self.__bitmap_cell = BitmapCell(rgba_bitmap_data, background_color, border_color)
self.cell = self.__bitmap_cell
def set_rgba_bitmap_data(self, rgba_bitmap_data: typing.Optional[DrawingContext.RGBA32Type],
trigger_update: bool = True) -> None:
self.__bitmap_cell.set_rgba_bitmap_data(rgba_bitmap_data, trigger_update)
def set_data(self, data: typing.Optional[DrawingContext.GrayscaleF32Type],
display_limits: typing.Optional[typing.Tuple[float, float]],
color_map_data: typing.Optional[DrawingContext.RGBA32Type], trigger_update: bool = True) -> None:
self.__bitmap_cell.set_data(data, display_limits, color_map_data, trigger_update)
@property
def data(self) -> typing.Optional[DrawingContext.GrayscaleF32Type]:
return self.__bitmap_cell.data
@property
def rgba_bitmap_data(self) -> typing.Optional[DrawingContext.RGBA32Type]:
return self.__bitmap_cell.rgba_bitmap_data
@rgba_bitmap_data.setter
def rgba_bitmap_data(self, rgb_bitmap_data: typing.Optional[DrawingContext.RGBA32Type]) -> None:
self.__bitmap_cell.rgba_bitmap_data = rgb_bitmap_data
@property
def background_color(self) -> typing.Optional[str]:
return self.__bitmap_cell.background_color
@background_color.setter
def background_color(self, background_color: typing.Optional[str]) -> None:
self.__bitmap_cell.background_color = background_color
@property
def border_color(self) -> typing.Optional[str]:
return self.__bitmap_cell.border_color
@border_color.setter
def border_color(self, border_color: typing.Optional[str]) -> None:
self.__bitmap_cell.border_color = border_color
class BitmapButtonCanvasItem(BitmapCanvasItem):
""" Canvas item button to draw rgba bitmap in bgra uint32 ndarray format. """
def __init__(self, rgba_bitmap_data: typing.Optional[DrawingContext.RGBA32Type] = None,
background_color: typing.Optional[str] = None, border_color: typing.Optional[str] = None) -> None:
super().__init__(rgba_bitmap_data, background_color, border_color)
self.wants_mouse_events = True
self.on_button_clicked: typing.Optional[typing.Callable[[], None]] = None
def close(self) -> None:
self.on_button_clicked = None
super().close()
def mouse_entered(self) -> bool:
self._mouse_inside = True
return True
def mouse_exited(self) -> bool:
self._mouse_inside = False
return True
def mouse_pressed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self._mouse_pressed = True
return True
def mouse_released(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self._mouse_pressed = False
return True
def mouse_clicked(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
if self.enabled:
if callable(self.on_button_clicked):
self.on_button_clicked()
return True
class StaticTextCanvasItem(AbstractCanvasItem):
def __init__(self, text: typing.Optional[str] = None) -> None:
super().__init__()
self.__text = text if text is not None else str()
self.__text_color = "#000"
self.__text_disabled_color = "#888"
self.__enabled = True
self.__font = "12px"
@property
def text(self) -> str:
return self.__text
@text.setter
def text(self, text: typing.Optional[str]) -> None:
text = text if text is not None else str()
if self.__text != text:
self.__text = text
self.update()
@property
def enabled(self) -> bool:
return self.__enabled
@enabled.setter
def enabled(self, value: bool) -> None:
if self.__enabled != value:
self.__enabled = value
self.update()
@property
def text_color(self) -> str:
return self.__text_color
@text_color.setter
def text_color(self, value: str) -> None:
if self.__text_color != value:
self.__text_color = value
self.update()
@property
def text_disabled_color(self) -> str:
return self.__text_disabled_color
@text_disabled_color.setter
def text_disabled_color(self, value: str) -> None:
if self.__text_disabled_color != value:
self.__text_disabled_color = value
self.update()
@property
def font(self) -> str:
return self.__font
@font.setter
def font(self, value: str) -> None:
if self.__font != value:
self.__font = value
self.update()
def size_to_content(self, get_font_metrics_fn: typing.Callable[[str, str], UserInterface.FontMetrics],
horizontal_padding: typing.Optional[int] = None,
vertical_padding: typing.Optional[int] = None) -> None:
""" Size the canvas item to the text content. """
if horizontal_padding is None:
horizontal_padding = 4
if vertical_padding is None:
vertical_padding = 4
font_metrics = get_font_metrics_fn(self.__font, self.__text)
new_sizing = self.copy_sizing()
new_sizing._set_fixed_width(font_metrics.width + 2 * horizontal_padding)
new_sizing._set_fixed_height(font_metrics.height + 2 * vertical_padding)
self.update_sizing(new_sizing)
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
canvas_bounds = self.canvas_bounds
if canvas_bounds:
canvas_bounds_center = canvas_bounds.center
with drawing_context.saver():
drawing_context.font = self.__font
drawing_context.text_align = 'center'
drawing_context.text_baseline = 'middle'
drawing_context.fill_style = self.__text_color if self.__enabled else self.__text_disabled_color
drawing_context.fill_text(self.__text, canvas_bounds_center.x, canvas_bounds_center.y + 1)
class TextButtonCanvasItem(StaticTextCanvasItem):
def __init__(self, text: typing.Optional[str] = None) -> None:
super().__init__(text)
self.wants_mouse_events = True
self.__border_enabled = True
self.__mouse_inside = False
self.__mouse_pressed = False
self.on_button_clicked: typing.Optional[typing.Callable[[], None]] = None
def close(self) -> None:
self.on_button_clicked = None
super().close()
@property
def border_enabled(self) -> bool:
return self.__border_enabled
@border_enabled.setter
def border_enabled(self, value: bool) -> None:
if self.__border_enabled != value:
self.__border_enabled = value
self.update()
def mouse_entered(self) -> bool:
self.__mouse_inside = True
self.update()
return True
def mouse_exited(self) -> bool:
self.__mouse_inside = False
self.update()
return True
def mouse_pressed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self.__mouse_pressed = True
self.update()
return True
def mouse_released(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self.__mouse_pressed = False
self.update()
return True
def mouse_clicked(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
if self.enabled:
if callable(self.on_button_clicked):
self.on_button_clicked()
return True
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
canvas_size = self.canvas_size
if canvas_size:
with drawing_context.saver():
drawing_context.begin_path()
# drawing_context.rect(0, 0, canvas_size.width, canvas_size.height)
drawing_context.round_rect(1.0, 1.0, canvas_size.width - 2.0, canvas_size.height - 2.0, 4)
if self.enabled and self.__mouse_inside and self.__mouse_pressed:
drawing_context.fill_style = "rgba(128, 128, 128, 0.5)"
drawing_context.fill()
elif self.enabled and self.__mouse_inside:
drawing_context.fill_style = "rgba(128, 128, 128, 0.1)"
drawing_context.fill()
if self.border_enabled:
drawing_context.stroke_style = "#000"
drawing_context.line_width = 1.0
drawing_context.stroke()
super()._repaint(drawing_context)
class CheckBoxCanvasItem(AbstractCanvasItem):
def __init__(self, text: typing.Optional[str] = None) -> None:
super().__init__()
self.wants_mouse_events = True
self.__enabled = True
self.__mouse_inside = False
self.__mouse_pressed = False
self.__check_state = "unchecked"
self.__tristate = False
self.__text = text if text is not None else str()
self.__text_color = "#000"
self.__text_disabled_color = "#888"
self.__font = "12px"
self.on_checked_changed: typing.Optional[typing.Callable[[bool], None]] = None
self.on_check_state_changed: typing.Optional[typing.Callable[[str], None]] = None
def close(self) -> None:
self.on_checked_changed = None
self.on_check_state_changed = None
super().close()
@property
def enabled(self) -> bool:
return self.__enabled
@enabled.setter
def enabled(self, value: bool) -> None:
self.__enabled = value
self.update()
@property
def tristate(self) -> bool:
return self.__tristate
@tristate.setter
def tristate(self, value: bool) -> None:
self.__tristate = value
if not self.__tristate:
self.checked = self.check_state == "checked"
self.update()
@property
def check_state(self) -> str:
return self.__check_state
@check_state.setter
def check_state(self, value: str) -> None:
if self.tristate and value not in ("unchecked", "checked", "partial"):
value = "unchecked"
elif not self.tristate and value not in ("unchecked", "checked"):
value = "unchecked"
self.__check_state = value
self.update()
@property
def checked(self) -> bool:
return self.check_state == "checked"
@checked.setter
def checked(self, value: bool) -> None:
self.check_state = "checked" if value else "unchecked"
@property
def text(self) -> str:
return self.__text
@text.setter
def text(self, text: typing.Optional[str]) -> None:
text = text if text is not None else str()
if self.__text != text:
self.__text = text
self.update()
@property
def text_color(self) -> str:
return self.__text_color
@text_color.setter
def text_color(self, value: str) -> None:
if self.__text_color != value:
self.__text_color = value
self.update()
@property
def text_disabled_color(self) -> str:
return self.__text_disabled_color
@text_disabled_color.setter
def text_disabled_color(self, value: str) -> None:
if self.__text_disabled_color != value:
self.__text_disabled_color = value
self.update()
@property
def font(self) -> str:
return self.__font
@font.setter
def font(self, value: str) -> None:
if self.__font != value:
self.__font = value
self.update()
def mouse_entered(self) -> bool:
self.__mouse_inside = True
self.update()
return True
def mouse_exited(self) -> bool:
self.__mouse_inside = False
self.update()
return True
def mouse_pressed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self.__mouse_pressed = True
self.update()
return True
def mouse_released(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self.__mouse_pressed = False
self.update()
return True
def mouse_clicked(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self._toggle_checked()
return True
def _toggle_checked(self) -> None:
if self.enabled:
if self.check_state == "checked":
self.check_state = "unchecked"
else:
self.check_state = "checked"
if callable(self.on_checked_changed):
self.on_checked_changed(self.check_state == "checked")
if callable(self.on_check_state_changed):
self.on_check_state_changed(self.check_state)
@property
def _mouse_inside(self) -> bool:
return self.__mouse_inside
@property
def _mouse_pressed(self) -> bool:
return self.__mouse_pressed
def size_to_content(self, get_font_metrics_fn: typing.Callable[[str, str], UserInterface.FontMetrics]) -> None:
""" Size the canvas item to the text content. """
horizontal_padding = 4
vertical_padding = 3
font_metrics = get_font_metrics_fn(self.__font, self.__text)
new_sizing = self.copy_sizing()
new_sizing._set_fixed_width(font_metrics.width + 2 * horizontal_padding + 14 + 4)
new_sizing._set_fixed_height(font_metrics.height + 2 * vertical_padding)
self.update_sizing(new_sizing)
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
canvas_size = self.canvas_size
if canvas_size:
with drawing_context.saver():
drawing_context.begin_path()
tx = 4 + 14 + 4
cx = 4 + 7
cy = canvas_size.height * 0.5
size = 14
size_half = 7
drawing_context.round_rect(4, cy - size_half, size, size, 4.0)
if self.check_state in ("checked", "partial"):
drawing_context.fill_style = "#FFF"
drawing_context.fill()
if self.enabled and self.__mouse_inside and self.__mouse_pressed:
drawing_context.fill_style = "rgba(128, 128, 128, 0.5)"
drawing_context.fill()
elif self.enabled and self.__mouse_inside:
drawing_context.fill_style = "rgba(128, 128, 128, 0.1)"
drawing_context.fill()
drawing_context.stroke_style = "#000"
drawing_context.line_width = 1.0
drawing_context.stroke()
if self.check_state == "checked":
drawing_context.begin_path()
drawing_context.move_to(cx - 3, cy - 2)
drawing_context.line_to(cx + 0, cy + 2)
drawing_context.line_to(cx + 8, cy - 9)
drawing_context.stroke_style = "#000"
drawing_context.line_width = 2.0
drawing_context.stroke()
elif self.check_state == "partial":
drawing_context.begin_path()
drawing_context.move_to(cx - 5, cy)
drawing_context.line_to(cx + 5, cy)
drawing_context.stroke_style = "#000"
drawing_context.line_width = 2.0
drawing_context.stroke()
drawing_context.font = self.__font
drawing_context.text_align = 'left'
drawing_context.text_baseline = 'middle'
drawing_context.fill_style = self.__text_color if self.__enabled else self.__text_disabled_color
drawing_context.fill_text(self.__text, tx, cy + 1)
super()._repaint(drawing_context)
class EmptyCanvasItem(AbstractCanvasItem):
""" Canvas item to act as a placeholder (spacer or stretch). """
def __init__(self) -> None:
super().__init__()
class RadioButtonGroup:
def __init__(self, buttons: typing.Sequence[BitmapButtonCanvasItem]) -> None:
self.__buttons = copy.copy(buttons)
self.__current_index = 0
self.on_current_index_changed: typing.Optional[typing.Callable[[int], None]] = None
for index, button in enumerate(self.__buttons):
button.checked = index == self.__current_index
for index, button in enumerate(self.__buttons):
def current_index_changed(index: int) -> None:
self.__current_index = index
for index, button in enumerate(self.__buttons):
button.checked = index == self.__current_index
if callable(self.on_current_index_changed):
self.on_current_index_changed(self.__current_index)
button.on_button_clicked = functools.partial(current_index_changed, index)
def close(self) -> None:
for button in self.__buttons:
button.on_button_clicked = None
self.on_current_index_changed = None
@property
def current_index(self) -> int:
return self.__current_index
@current_index.setter
def current_index(self, value: int) -> None:
self.__current_index = value
for index, button in enumerate(self.__buttons):
button.checked = index == self.__current_index
class DrawCanvasItem(AbstractCanvasItem):
def __init__(self, drawing_fn: typing.Callable[[DrawingContext.DrawingContext, Geometry.IntSize], None]) -> None:
super().__init__()
self.__drawing_fn = drawing_fn
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
canvas_size = self.canvas_size
if canvas_size:
self.__drawing_fn(drawing_context, canvas_size)
super()._repaint(drawing_context)
class DividerCanvasItem(AbstractCanvasItem):
def __init__(self, *, orientation: typing.Optional[str] = None, color: typing.Optional[str] = None):
super().__init__()
self.__orientation = orientation or "vertical"
if orientation == "vertical":
self.update_sizing(self.sizing.with_fixed_width(2))
else:
self.update_sizing(self.sizing.with_fixed_height(2))
self.__color = color or "#CCC"
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
canvas_size = self.canvas_size
if canvas_size:
with drawing_context.saver():
if self.__orientation == "vertical":
drawing_context.move_to(1, 0)
drawing_context.line_to(1, canvas_size.height)
else:
drawing_context.move_to(0, 1)
drawing_context.line_to(canvas_size.width, 1)
drawing_context.stroke_style = self.__color
drawing_context.stroke()
super()._repaint(drawing_context)
class ProgressBarCanvasItem(AbstractCanvasItem):
def __init__(self) -> None:
super().__init__()
self.__enabled = True
self.__progress = 0.0 # 0.0 to 1.0
self.update_sizing(self.sizing.with_fixed_height(4))
@property
def enabled(self) -> bool:
return self.__enabled
@enabled.setter
def enabled(self, value: bool) -> None:
self.__enabled = value
self.update()
@property
def progress(self) -> float:
return self.__progress
@progress.setter
def progress(self, value: float) -> None:
self.__progress = min(max(value, 0.0), 1.0)
self.update()
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
canvas_bounds = self.canvas_bounds
if canvas_bounds:
canvas_size = canvas_bounds.size
canvas_bounds_center = canvas_bounds.center
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.rect(0, 0, canvas_size.width, canvas_size.height)
drawing_context.close_path()
drawing_context.stroke_style = "#CCC"
drawing_context.fill_style = "#CCC"
drawing_context.fill()
drawing_context.stroke()
if canvas_size.width * self.progress >= 1:
drawing_context.begin_path()
drawing_context.rect(0, 0, canvas_size.width * self.progress, canvas_size.height)
drawing_context.close_path()
drawing_context.stroke_style = "#6AB"
drawing_context.fill_style = "#6AB"
drawing_context.fill()
drawing_context.stroke()
if canvas_size.height >= 16 and canvas_size.width * self.progress >= 50: # TODO: use font metrics to find length of text
progress_text = str(round(self.progress * 100)) + "%"
drawing_context.begin_path()
drawing_context.font = "12px sans-serif"
drawing_context.text_align = 'center'
drawing_context.text_baseline = 'middle'
drawing_context.fill_style = "#fff"
drawing_context.line_width = 2
drawing_context.fill_text(progress_text, (canvas_size.width - 6) * self.progress - 19, canvas_bounds_center.y + 1)
drawing_context.fill()
drawing_context.close_path()
super()._repaint(drawing_context)
class TimestampCanvasItem(AbstractCanvasItem):
def __init__(self) -> None:
super().__init__()
self.__timestamp: typing.Optional[datetime.datetime] = None
@property
def timestamp(self) -> typing.Optional[datetime.datetime]:
return self.__timestamp
@timestamp.setter
def timestamp(self, value: typing.Optional[datetime.datetime]) -> None:
self.__timestamp = value
# self.update()
def _repaint_if_needed(self, drawing_context: DrawingContext.DrawingContext, *, immediate: bool = False) -> None:
if self.__timestamp:
drawing_context.timestamp(self.__timestamp.isoformat())
super()._repaint(drawing_context)
def load_rgba_data_from_bytes(b: typing.ByteString, format: typing.Optional[str] = None) -> typing.Optional[DrawingContext.RGBA32Type]:
image_rgba = None
image_argb = imageio.imread(b, format)
if image_argb is not None:
image_rgba = numpy.zeros_like(image_argb)
image_rgba[:, :, 0] = image_argb[:, :, 2]
image_rgba[:, :, 1] = image_argb[:, :, 1]
image_rgba[:, :, 2] = image_argb[:, :, 0]
image_rgba[:, :, 3] = image_argb[:, :, 3]
image_rgba = image_rgba.view(numpy.uint32).reshape(image_rgba.shape[:-1])
return image_rgba
|
[
"nion.utils.Geometry.fit_to_aspect_ratio",
"typing.cast",
"nion.utils.Geometry.IntPoint",
"weakref.ref",
"nion.utils.Geometry.FloatPoint",
"nion.utils.Geometry.IntRect",
"numpy.zeros_like",
"traceback.print_exc",
"threading.Condition",
"nion.ui.UserInterface.MenuItemState",
"threading.Event",
"traceback.print_stack",
"nion.utils.Geometry.IntRect.empty_rect",
"threading.current_thread",
"nion.utils.Geometry.fit_to_size",
"nion.utils.Geometry.IntPoint.make",
"nion.utils.Geometry.FloatRect.from_tlhw",
"functools.partial",
"copy.deepcopy",
"nion.utils.Event.Event",
"threading.RLock",
"imageio.imread",
"nion.utils.Geometry.Margins",
"nion.utils.Geometry.IntSize.make",
"logging.debug",
"nion.utils.Geometry.IntSize",
"copy.copy",
"nion.utils.Stream.ValueChangeStream",
"collections.namedtuple",
"nion.utils.Geometry.FloatRect.empty_rect",
"nion.ui.DrawingContext.DrawingContext",
"warnings.warn"
] |
[((137650, 137714), 'collections.namedtuple', 'collections.namedtuple', (['"""PositionLength"""', "['position', 'length']"], {}), "('PositionLength', ['position', 'length'])\n", (137672, 137714), False, 'import collections\n'), ((215555, 215580), 'imageio.imread', 'imageio.imread', (['b', 'format'], {}), '(b, format)\n', (215569, 215580), False, 'import imageio\n'), ((12936, 12955), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (12949, 12955), False, 'import copy\n'), ((13428, 13447), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (13441, 13447), False, 'import copy\n'), ((13907, 13926), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (13920, 13926), False, 'import copy\n'), ((14394, 14413), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (14407, 14413), False, 'import copy\n'), ((14872, 14891), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (14885, 14891), False, 'import copy\n'), ((15337, 15356), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (15350, 15356), False, 'import copy\n'), ((15822, 15841), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (15835, 15841), False, 'import copy\n'), ((16300, 16319), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (16313, 16319), False, 'import copy\n'), ((16765, 16784), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (16778, 16784), False, 'import copy\n'), ((17133, 17152), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (17146, 17152), False, 'import copy\n'), ((18064, 18083), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (18077, 18083), False, 'import copy\n'), ((18374, 18393), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (18387, 18393), False, 'import copy\n'), ((18781, 18800), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (18794, 18800), False, 'import copy\n'), ((19178, 19197), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (19191, 19197), False, 'import copy\n'), ((19344, 19371), 'nion.utils.Geometry.IntSize.make', 'Geometry.IntSize.make', (['size'], {}), '(size)\n', (19365, 19371), False, 'from nion.utils import Geometry\n'), ((19548, 19567), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (19561, 19567), False, 'import copy\n'), ((28112, 28138), 'threading.current_thread', 'threading.current_thread', ([], {}), '()\n', (28136, 28138), False, 'import threading\n'), ((40577, 40605), 'copy.deepcopy', 'copy.deepcopy', (['self.__sizing'], {}), '(self.__sizing)\n', (40590, 40605), False, 'import copy\n'), ((40881, 40907), 'copy.deepcopy', 'copy.deepcopy', (['self.sizing'], {}), '(self.sizing)\n', (40894, 40907), False, 'import copy\n'), ((55596, 55662), 'nion.utils.Geometry.IntRect', 'Geometry.IntRect', ([], {'origin': 'canvas_item_origin', 'size': 'canvas_item_size'}), '(origin=canvas_item_origin, size=canvas_item_size)\n', (55612, 55662), False, 'from nion.utils import Geometry\n'), ((80156, 80173), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (80171, 80173), False, 'import threading\n'), ((80429, 80458), 'typing.cast', 'typing.cast', (['typing.Any', 'None'], {}), '(typing.Any, None)\n', (80440, 80458), False, 'import typing\n'), ((83374, 83404), 'copy.copy', 'copy.copy', (['self.__canvas_items'], {}), '(self.__canvas_items)\n', (83383, 83404), False, 'import copy\n'), ((95631, 95658), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'x': 'x', 'y': 'y'}), '(x=x, y=y)\n', (95648, 95658), False, 'from nion.utils import Geometry\n'), ((97704, 97721), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (97719, 97721), False, 'import threading\n'), ((98183, 98204), 'threading.Condition', 'threading.Condition', ([], {}), '()\n', (98202, 98204), False, 'import threading\n'), ((100347, 100364), 'threading.Event', 'threading.Event', ([], {}), '()\n', (100362, 100364), False, 'import threading\n'), ((104718, 104756), 'copy.copy', 'copy.copy', (['self.__prepare_canvas_items'], {}), '(self.__prepare_canvas_items)\n', (104727, 104756), False, 'import copy\n'), ((109335, 109348), 'nion.utils.Event.Event', 'Event.Event', ([], {}), '()\n', (109346, 109348), False, 'from nion.utils import Event\n'), ((110056, 110096), 'typing.cast', 'typing.cast', (['CanvasItemComposition', 'self'], {}), '(CanvasItemComposition, self)\n', (110067, 110096), False, 'import typing\n'), ((115344, 115371), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'x': 'x', 'y': 'y'}), '(x=x, y=y)\n', (115361, 115371), False, 'from nion.utils import Geometry\n'), ((116687, 116704), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (116702, 116704), False, 'import threading\n'), ((133132, 133151), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (133149, 133151), False, 'from nion.utils import Geometry\n'), ((133516, 133545), 'typing.cast', 'typing.cast', (['typing.Any', 'None'], {}), '(typing.Any, None)\n', (133527, 133545), False, 'import typing\n'), ((133613, 133642), 'typing.cast', 'typing.cast', (['typing.Any', 'None'], {}), '(typing.Any, None)\n', (133624, 133642), False, 'import typing\n'), ((135204, 135235), 'nion.utils.Geometry.FloatRect.empty_rect', 'Geometry.FloatRect.empty_rect', ([], {}), '()\n', (135233, 135235), False, 'from nion.utils import Geometry\n'), ((135943, 135972), 'nion.utils.Geometry.IntRect.empty_rect', 'Geometry.IntRect.empty_rect', ([], {}), '()\n', (135970, 135972), False, 'from nion.utils import Geometry\n'), ((136130, 136157), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'x': 'x', 'y': 'y'}), '(x=x, y=y)\n', (136147, 136157), False, 'from nion.utils import Geometry\n'), ((138737, 138766), 'typing.cast', 'typing.cast', (['typing.Any', 'None'], {}), '(typing.Any, None)\n', (138748, 138766), False, 'import typing\n'), ((145345, 145374), 'nion.utils.Geometry.IntRect.empty_rect', 'Geometry.IntRect.empty_rect', ([], {}), '()\n', (145372, 145374), False, 'from nion.utils import Geometry\n'), ((145524, 145551), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'x': 'x', 'y': 'y'}), '(x=x, y=y)\n', (145541, 145551), False, 'from nion.utils import Geometry\n'), ((151190, 151207), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (151205, 151207), False, 'import threading\n'), ((152812, 152843), 'nion.ui.DrawingContext.DrawingContext', 'DrawingContext.DrawingContext', ([], {}), '()\n', (152841, 152843), False, 'from nion.ui import DrawingContext\n'), ((160146, 160175), 'typing.cast', 'typing.cast', (['typing.Any', 'None'], {}), '(typing.Any, None)\n', (160157, 160175), False, 'import typing\n'), ((171714, 171743), 'typing.cast', 'typing.cast', (['typing.Any', 'None'], {}), '(typing.Any, None)\n', (171725, 171743), False, 'import typing\n'), ((182634, 182655), 'copy.copy', 'copy.copy', (['self.style'], {}), '(self.style)\n', (182643, 182655), False, 'import copy\n'), ((184126, 184139), 'nion.utils.Event.Event', 'Event.Event', ([], {}), '()\n', (184137, 184139), False, 'from nion.utils import Event\n'), ((187551, 187564), 'nion.utils.Event.Event', 'Event.Event', ([], {}), '()\n', (187562, 187564), False, 'from nion.utils import Event\n'), ((209281, 209299), 'copy.copy', 'copy.copy', (['buttons'], {}), '(buttons)\n', (209290, 209299), False, 'import copy\n'), ((215633, 215661), 'numpy.zeros_like', 'numpy.zeros_like', (['image_argb'], {}), '(image_argb)\n', (215649, 215661), False, 'import numpy\n'), ((28479, 28505), 'threading.current_thread', 'threading.current_thread', ([], {}), '()\n', (28503, 28505), False, 'import threading\n'), ((28536, 28590), 'warnings.warn', 'warnings.warn', (['"""CanvasItem closed on different thread"""'], {}), "('CanvasItem closed on different thread')\n", (28549, 28590), False, 'import warnings\n'), ((28632, 28655), 'traceback.print_stack', 'traceback.print_stack', ([], {}), '()\n', (28653, 28655), False, 'import traceback\n'), ((29276, 29310), 'nion.utils.Geometry.IntSize.make', 'Geometry.IntSize.make', (['canvas_size'], {}), '(canvas_size)\n', (29297, 29310), False, 'from nion.utils import Geometry\n'), ((29841, 29878), 'nion.utils.Geometry.IntPoint.make', 'Geometry.IntPoint.make', (['canvas_origin'], {}), '(canvas_origin)\n', (29863, 29878), False, 'from nion.utils import Geometry\n'), ((30677, 30719), 'nion.utils.Geometry.IntRect', 'Geometry.IntRect', (['(0, 0)', 'self.canvas_size'], {}), '((0, 0), self.canvas_size)\n', (30693, 30719), False, 'from nion.utils import Geometry\n'), ((30974, 31028), 'nion.utils.Geometry.IntRect', 'Geometry.IntRect', (['self.canvas_origin', 'self.canvas_size'], {}), '(self.canvas_origin, self.canvas_size)\n', (30990, 31028), False, 'from nion.utils import Geometry\n'), ((35799, 35818), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (35816, 35818), False, 'from nion.utils import Geometry\n'), ((35867, 35886), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (35884, 35886), False, 'from nion.utils import Geometry\n'), ((43711, 43742), 'nion.ui.DrawingContext.DrawingContext', 'DrawingContext.DrawingContext', ([], {}), '()\n', (43740, 43742), False, 'from nion.ui import DrawingContext\n'), ((44494, 44513), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (44511, 44513), False, 'from nion.utils import Geometry\n'), ((51088, 51156), 'nion.ui.UserInterface.MenuItemState', 'UserInterface.MenuItemState', ([], {'title': 'None', 'enabled': '(True)', 'checked': '(False)'}), '(title=None, enabled=True, checked=False)\n', (51115, 51156), False, 'from nion.ui import UserInterface\n'), ((53469, 53497), 'nion.utils.Geometry.Margins', 'Geometry.Margins', (['(0)', '(0)', '(0)', '(0)'], {}), '(0, 0, 0, 0)\n', (53485, 53497), False, 'from nion.utils import Geometry\n'), ((79210, 79229), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (79227, 79229), False, 'from nion.utils import Geometry\n'), ((80986, 81015), 'typing.cast', 'typing.cast', (['typing.Any', 'None'], {}), '(typing.Any, None)\n', (80997, 81015), False, 'import typing\n'), ((90802, 90832), 'copy.copy', 'copy.copy', (['self.__canvas_items'], {}), '(self.__canvas_items)\n', (90811, 90832), False, 'import copy\n'), ((103805, 103843), 'copy.copy', 'copy.copy', (['self.__prepare_canvas_items'], {}), '(self.__prepare_canvas_items)\n', (103814, 103843), False, 'import copy\n'), ((104857, 104876), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (104874, 104876), False, 'from nion.utils import Geometry\n'), ((117980, 118010), 'nion.utils.Geometry.IntSize', 'Geometry.IntSize', ([], {'w': '(640)', 'h': '(480)'}), '(w=640, h=480)\n', (117996, 118010), False, 'from nion.utils import Geometry\n'), ((118211, 118240), 'copy.deepcopy', 'copy.deepcopy', (['self.__sizings'], {}), '(self.__sizings)\n', (118224, 118240), False, 'import copy\n'), ((118532, 118561), 'copy.deepcopy', 'copy.deepcopy', (['self.__sizings'], {}), '(self.__sizings)\n', (118545, 118561), False, 'import copy\n'), ((119358, 119375), 'copy.copy', 'copy.copy', (['sizing'], {}), '(sizing)\n', (119367, 119375), False, 'import copy\n'), ((123787, 123824), 'copy.copy', 'copy.copy', (['self.__shadow_canvas_items'], {}), '(self.__shadow_canvas_items)\n', (123796, 123824), False, 'import copy\n'), ((123847, 123883), 'copy.deepcopy', 'copy.deepcopy', (['self.__actual_sizings'], {}), '(self.__actual_sizings)\n', (123860, 123883), False, 'import copy\n'), ((124646, 124682), 'copy.deepcopy', 'copy.deepcopy', (['self.__actual_sizings'], {}), '(self.__actual_sizings)\n', (124659, 124682), False, 'import copy\n'), ((125683, 125719), 'copy.deepcopy', 'copy.deepcopy', (['self.__actual_sizings'], {}), '(self.__actual_sizings)\n', (125696, 125719), False, 'import copy\n'), ((126562, 126598), 'copy.deepcopy', 'copy.deepcopy', (['self.__actual_sizings'], {}), '(self.__actual_sizings)\n', (126575, 126598), False, 'import copy\n'), ((135066, 135193), 'nion.utils.Geometry.FloatRect.from_tlhw', 'Geometry.FloatRect.from_tlhw', (['(canvas_size.height / 2 - bar_height / 2)', '(bar_offset + thumb_width / 2)', 'bar_height', 'bar_width'], {}), '(canvas_size.height / 2 - bar_height / 2, \n bar_offset + thumb_width / 2, bar_height, bar_width)\n', (135094, 135193), False, 'from nion.utils import Geometry\n'), ((137138, 137167), 'nion.utils.Geometry.FloatPoint', 'Geometry.FloatPoint', ([], {'x': 'x', 'y': 'y'}), '(x=x, y=y)\n', (137157, 137167), False, 'from nion.utils import Geometry\n'), ((148956, 148983), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'x': 'x', 'y': 'y'}), '(x=x, y=y)\n', (148973, 148983), False, 'from nion.utils import Geometry\n'), ((157763, 157780), 'weakref.ref', 'weakref.ref', (['self'], {}), '(self)\n', (157774, 157780), False, 'import weakref\n'), ((158525, 158544), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (158542, 158544), False, 'from nion.utils import Geometry\n'), ((190678, 190731), 'typing.cast', 'typing.cast', (['Geometry.IntSizeTuple', 'bitmap_data.shape'], {}), '(Geometry.IntSizeTuple, bitmap_data.shape)\n', (190689, 190731), False, 'import typing\n'), ((191320, 191370), 'typing.cast', 'typing.cast', (['Geometry.IntSizeTuple', 'raw_data.shape'], {}), '(Geometry.IntSizeTuple, raw_data.shape)\n', (191331, 191370), False, 'import typing\n'), ((210005, 210052), 'functools.partial', 'functools.partial', (['current_index_changed', 'index'], {}), '(current_index_changed, index)\n', (210022, 210052), False, 'import functools\n'), ((35903, 35928), 'nion.utils.Geometry.IntPoint.make', 'Geometry.IntPoint.make', (['p'], {}), '(p)\n', (35925, 35928), False, 'from nion.utils import Geometry\n'), ((46737, 46764), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'x': 'x', 'y': 'y'}), '(x=x, y=y)\n', (46754, 46764), False, 'from nion.utils import Geometry\n'), ((50970, 51027), 'typing.cast', 'typing.cast', (['UserInterface.MenuItemState', 'menu_item_state'], {}), '(UserInterface.MenuItemState, menu_item_state)\n', (50981, 51027), False, 'import typing\n'), ((56961, 57022), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'x': 'x_positions[index]', 'y': 'y_positions[index]'}), '(x=x_positions[index], y=y_positions[index])\n', (56978, 57022), False, 'from nion.utils import Geometry\n'), ((57058, 57118), 'nion.utils.Geometry.IntSize', 'Geometry.IntSize', ([], {'width': 'widths[index]', 'height': 'heights[index]'}), '(width=widths[index], height=heights[index])\n', (57074, 57118), False, 'from nion.utils import Geometry\n'), ((85417, 85454), 'nion.utils.Geometry.IntPoint.make', 'Geometry.IntPoint.make', (['canvas_origin'], {}), '(canvas_origin)\n', (85439, 85454), False, 'from nion.utils import Geometry\n'), ((85486, 85520), 'nion.utils.Geometry.IntSize.make', 'Geometry.IntSize.make', (['canvas_size'], {}), '(canvas_size)\n', (85507, 85520), False, 'from nion.utils import Geometry\n'), ((85971, 86005), 'nion.utils.Geometry.IntSize.make', 'Geometry.IntSize.make', (['canvas_size'], {}), '(canvas_size)\n', (85992, 86005), False, 'from nion.utils import Geometry\n'), ((92475, 92494), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (92492, 92494), False, 'from nion.utils import Geometry\n'), ((96185, 96242), 'typing.cast', 'typing.cast', (['Geometry.IntPoint', 'canvas_item.canvas_origin'], {}), '(Geometry.IntPoint, canvas_item.canvas_origin)\n', (96196, 96242), False, 'import typing\n'), ((104012, 104031), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (104029, 104031), False, 'from nion.utils import Geometry\n'), ((110487, 110552), 'nion.utils.Geometry.IntRect', 'Geometry.IntRect', ([], {'origin': '(-content_canvas_origin)', 'size': 'canvas_size'}), '(origin=-content_canvas_origin, size=canvas_size)\n', (110503, 110552), False, 'from nion.utils import Geometry\n'), ((110592, 110611), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (110609, 110611), False, 'from nion.utils import Geometry\n'), ((110618, 110636), 'nion.utils.Geometry.IntSize', 'Geometry.IntSize', ([], {}), '()\n', (110634, 110636), False, 'from nion.utils import Geometry\n'), ((121247, 121275), 'copy.copy', 'copy.copy', (['self.canvas_items'], {}), '(self.canvas_items)\n', (121256, 121275), False, 'import copy\n'), ((121302, 121331), 'copy.deepcopy', 'copy.deepcopy', (['self.__sizings'], {}), '(self.__sizings)\n', (121315, 121331), False, 'import copy\n'), ((128664, 128693), 'copy.deepcopy', 'copy.deepcopy', (['self.__sizings'], {}), '(self.__sizings)\n', (128677, 128693), False, 'import copy\n'), ((128725, 128761), 'copy.deepcopy', 'copy.deepcopy', (['self.__actual_sizings'], {}), '(self.__actual_sizings)\n', (128738, 128761), False, 'import copy\n'), ((133351, 133394), 'nion.utils.Stream.ValueChangeStream', 'Stream.ValueChangeStream', (['self.value_stream'], {}), '(self.value_stream)\n', (133375, 133394), False, 'from nion.utils import Stream\n'), ((145850, 145869), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (145867, 145869), False, 'from nion.utils import Geometry\n'), ((162405, 162424), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (162422, 162424), False, 'from nion.utils import Geometry\n'), ((162460, 162504), 'nion.utils.Geometry.IntSize', 'Geometry.IntSize', ([], {'height': 'height', 'width': 'width'}), '(height=height, width=width)\n', (162476, 162504), False, 'from nion.utils import Geometry\n'), ((170575, 170602), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': 'y', 'x': 'x'}), '(y=y, x=x)\n', (170592, 170602), False, 'from nion.utils import Geometry\n'), ((171206, 171235), 'typing.cast', 'typing.cast', (['typing.Any', 'None'], {}), '(typing.Any, None)\n', (171217, 171235), False, 'import typing\n'), ((171292, 171319), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': 'y', 'x': 'x'}), '(y=y, x=x)\n', (171309, 171319), False, 'from nion.utils import Geometry\n'), ((174348, 174375), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': 'y', 'x': 'x'}), '(y=y, x=x)\n', (174365, 174375), False, 'from nion.utils import Geometry\n'), ((177379, 177406), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': 'y', 'x': 'x'}), '(y=y, x=x)\n', (177396, 177406), False, 'from nion.utils import Geometry\n'), ((190819, 190857), 'nion.utils.Geometry.fit_to_size', 'Geometry.fit_to_size', (['rect', 'image_size'], {}), '(rect, image_size)\n', (190839, 190857), False, 'from nion.utils import Geometry\n'), ((191458, 191496), 'nion.utils.Geometry.fit_to_size', 'Geometry.fit_to_size', (['rect', 'image_size'], {}), '(rect, image_size)\n', (191478, 191496), False, 'from nion.utils import Geometry\n'), ((51932, 51958), 'nion.utils.Geometry.IntPoint.make', 'Geometry.IntPoint.make', (['p1'], {}), '(p1)\n', (51954, 51958), False, 'from nion.utils import Geometry\n'), ((51977, 52003), 'nion.utils.Geometry.IntPoint.make', 'Geometry.IntPoint.make', (['p2'], {}), '(p2)\n', (51999, 52003), False, 'from nion.utils import Geometry\n'), ((55845, 55915), 'nion.utils.Geometry.fit_to_aspect_ratio', 'Geometry.fit_to_aspect_ratio', (['rect', 'layout_sizing.minimum_aspect_ratio'], {}), '(rect, layout_sizing.minimum_aspect_ratio)\n', (55873, 55915), False, 'from nion.utils import Geometry\n'), ((86041, 86060), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (86058, 86060), False, 'from nion.utils import Geometry\n'), ((105750, 105788), 'copy.copy', 'copy.copy', (['self.__prepare_canvas_items'], {}), '(self.__prepare_canvas_items)\n', (105759, 105788), False, 'import copy\n'), ((106617, 106648), 'nion.ui.DrawingContext.DrawingContext', 'DrawingContext.DrawingContext', ([], {}), '()\n', (106646, 106648), False, 'from nion.ui import DrawingContext\n'), ((111690, 111709), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (111707, 111709), False, 'from nion.utils import Geometry\n'), ((126966, 126993), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': 'y', 'x': 'x'}), '(y=y, x=x)\n', (126983, 126993), False, 'from nion.utils import Geometry\n'), ((127691, 127718), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': 'y', 'x': 'x'}), '(y=y, x=x)\n', (127708, 127718), False, 'from nion.utils import Geometry\n'), ((135783, 135918), 'nion.utils.Geometry.FloatRect.from_tlhw', 'Geometry.FloatRect.from_tlhw', (['(canvas_size.height / 2 - thumb_height / 2)', '(value * bar_width + bar_offset)', 'thumb_height', 'thumb_width'], {}), '(canvas_size.height / 2 - thumb_height / 2, \n value * bar_width + bar_offset, thumb_height, thumb_width)\n', (135811, 135918), False, 'from nion.utils import Geometry\n'), ((145275, 145329), 'nion.utils.Geometry.IntRect', 'Geometry.IntRect', ([], {'origin': 'thumb_origin', 'size': 'thumb_size'}), '(origin=thumb_origin, size=thumb_size)\n', (145291, 145329), False, 'from nion.utils import Geometry\n'), ((165552, 165579), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': 'y', 'x': 'x'}), '(y=y, x=x)\n', (165569, 165579), False, 'from nion.utils import Geometry\n'), ((166035, 166062), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': 'y', 'x': 'x'}), '(y=y, x=x)\n', (166052, 166062), False, 'from nion.utils import Geometry\n'), ((169026, 169053), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': 'y', 'x': 'x'}), '(y=y, x=x)\n', (169043, 169053), False, 'from nion.utils import Geometry\n'), ((169483, 169510), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'x': 'x', 'y': 'y'}), '(x=x, y=y)\n', (169500, 169510), False, 'from nion.utils import Geometry\n'), ((169583, 169610), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': 'y', 'x': 'x'}), '(y=y, x=x)\n', (169600, 169610), False, 'from nion.utils import Geometry\n'), ((171115, 171142), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'x': 'x', 'y': 'y'}), '(x=x, y=y)\n', (171132, 171142), False, 'from nion.utils import Geometry\n'), ((175056, 175083), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': 'y', 'x': 'x'}), '(y=y, x=x)\n', (175073, 175083), False, 'from nion.utils import Geometry\n'), ((177844, 177871), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': 'y', 'x': 'x'}), '(y=y, x=x)\n', (177861, 177871), False, 'from nion.utils import Geometry\n'), ((56064, 56134), 'nion.utils.Geometry.fit_to_aspect_ratio', 'Geometry.fit_to_aspect_ratio', (['rect', 'layout_sizing.maximum_aspect_ratio'], {}), '(rect, layout_sizing.maximum_aspect_ratio)\n', (56092, 56134), False, 'from nion.utils import Geometry\n'), ((107212, 107259), 'logging.debug', 'logging.debug', (['"""CanvasItem Render Error: %s"""', 'e'], {}), "('CanvasItem Render Error: %s', e)\n", (107225, 107259), False, 'import logging\n'), ((107280, 107301), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (107299, 107301), False, 'import traceback\n'), ((107322, 107345), 'traceback.print_stack', 'traceback.print_stack', ([], {}), '()\n', (107343, 107345), False, 'import traceback\n'), ((115034, 115099), 'nion.utils.Geometry.IntRect', 'Geometry.IntRect', ([], {'origin': '(-content_canvas_origin)', 'size': 'canvas_size'}), '(origin=-content_canvas_origin, size=canvas_size)\n', (115050, 115099), False, 'from nion.utils import Geometry\n'), ((121717, 121749), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': 'origin', 'x': '(0)'}), '(y=origin, x=0)\n', (121734, 121749), False, 'from nion.utils import Geometry\n'), ((121823, 121877), 'nion.utils.Geometry.IntSize', 'Geometry.IntSize', ([], {'height': 'size', 'width': 'canvas_size.width'}), '(height=size, width=canvas_size.width)\n', (121839, 121877), False, 'from nion.utils import Geometry\n'), ((122321, 122353), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'y': '(0)', 'x': 'origin'}), '(y=0, x=origin)\n', (122338, 122353), False, 'from nion.utils import Geometry\n'), ((122427, 122482), 'nion.utils.Geometry.IntSize', 'Geometry.IntSize', ([], {'height': 'canvas_size.height', 'width': 'size'}), '(height=canvas_size.height, width=size)\n', (122443, 122482), False, 'from nion.utils import Geometry\n'), ((144900, 144940), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'x': '(0)', 'y': 'thumb_position'}), '(x=0, y=thumb_position)\n', (144917, 144940), False, 'from nion.utils import Geometry\n'), ((144978, 145040), 'nion.utils.Geometry.IntSize', 'Geometry.IntSize', ([], {'width': 'canvas_size.width', 'height': 'thumb_length'}), '(width=canvas_size.width, height=thumb_length)\n', (144994, 145040), False, 'from nion.utils import Geometry\n'), ((145106, 145146), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'x': 'thumb_position', 'y': '(0)'}), '(x=thumb_position, y=0)\n', (145123, 145146), False, 'from nion.utils import Geometry\n'), ((145184, 145247), 'nion.utils.Geometry.IntSize', 'Geometry.IntSize', ([], {'width': 'thumb_length', 'height': 'canvas_size.height'}), '(width=thumb_length, height=canvas_size.height)\n', (145200, 145247), False, 'from nion.utils import Geometry\n'), ((163811, 163830), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {}), '()\n', (163828, 163830), False, 'from nion.utils import Geometry\n'), ((56231, 56303), 'nion.utils.Geometry.fit_to_aspect_ratio', 'Geometry.fit_to_aspect_ratio', (['rect', 'layout_sizing.preferred_aspect_ratio'], {}), '(rect, layout_sizing.preferred_aspect_ratio)\n', (56259, 56303), False, 'from nion.utils import Geometry\n'), ((150047, 150118), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'x': 'tracking_content_offset[1]', 'y': 'new_content_offset_v'}), '(x=tracking_content_offset[1], y=new_content_offset_v)\n', (150064, 150118), False, 'from nion.utils import Geometry\n'), ((150593, 150664), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', ([], {'x': 'new_content_offset_h', 'y': 'tracking_content_offset[0]'}), '(x=new_content_offset_h, y=tracking_content_offset[0])\n', (150610, 150664), False, 'from nion.utils import Geometry\n'), ((153307, 153330), 'nion.utils.Geometry.IntPoint', 'Geometry.IntPoint', (['(0)', '(0)'], {}), '(0, 0)\n', (153324, 153330), False, 'from nion.utils import Geometry\n')]
|
from aoc_wim.aoc2019 import q12
test10 = """\
<x=-1, y=0, z=2>
<x=2, y=-10, z=-7>
<x=4, y=-8, z=8>
<x=3, y=5, z=-1>"""
test100 = """\
<x=-8, y=-10, z=0>
<x=5, y=5, z=10>
<x=2, y=-7, z=3>
<x=9, y=-8, z=-3>"""
def test_total_energy_after_10_steps():
assert q12.simulate(test10, n=10) == 179
def test_total_energy_after_100_steps():
assert q12.simulate(test100, n=100) == 1940
|
[
"aoc_wim.aoc2019.q12.simulate"
] |
[((264, 290), 'aoc_wim.aoc2019.q12.simulate', 'q12.simulate', (['test10'], {'n': '(10)'}), '(test10, n=10)\n', (276, 290), False, 'from aoc_wim.aoc2019 import q12\n'), ((352, 380), 'aoc_wim.aoc2019.q12.simulate', 'q12.simulate', (['test100'], {'n': '(100)'}), '(test100, n=100)\n', (364, 380), False, 'from aoc_wim.aoc2019 import q12\n')]
|
import requests
import smtplib
import time
from bs4 import BeautifulSoup
URL = 'https://www.amazon.de/PowerColor-Radeon-5700-8192MB-PCI/dp/B07WT15P2P/ref=sr_1_8?__mk_de_DE=%C3%85M%C3%85%C5%BD%C3%95%C3%91&keywords=PowerColor+Radeon+RX+5700+Red+Dragon+8GB&qid=1582975984&sr=8-8#customerReviews'
def send_mail():
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login('<EMAIL>', '<PASSWORD>')
subject = 'Price alert'
body = 'Price Alert. Check the Amazon link: https://www.amazon.de/PowerColor-Radeon-5700-8192MB-PCI/dp/B07WT15P2P/ref=sr_1_8?__mk_de_DE=%C3%85M%C3%85%C5%BD%C3%95%C3%91&keywords=PowerColor+Radeon+RX+5700+Red+Dragon+8GB&qid=1582975984&sr=8-8#customerReviews'
msg = f'Subject: {subject} \n\n{body}'
server.sendmail(
'<EMAIL>',
'<EMAIL>',
msg
)
print('Email Alert has been send.')
server.quit()
def check_price(url, prefered_price):
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36'}
page = requests.get(URL, headers=headers)
soup = BeautifulSoup(page.content, 'html.parser')
title = soup.find(id='productTitle').get_text()
price = soup.find(id='priceblock_ourprice').get_text()
converted_price = float(price[0:3])
if converted_price < prefered_price:
send_mail()
while True:
check_price(URL, 350)
time.sleep(6)
|
[
"requests.get",
"time.sleep",
"smtplib.SMTP",
"bs4.BeautifulSoup"
] |
[((325, 360), 'smtplib.SMTP', 'smtplib.SMTP', (['"""smtp.gmail.com"""', '(587)'], {}), "('smtp.gmail.com', 587)\n", (337, 360), False, 'import smtplib\n'), ((1130, 1164), 'requests.get', 'requests.get', (['URL'], {'headers': 'headers'}), '(URL, headers=headers)\n', (1142, 1164), False, 'import requests\n'), ((1176, 1218), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page.content', '"""html.parser"""'], {}), "(page.content, 'html.parser')\n", (1189, 1218), False, 'from bs4 import BeautifulSoup\n'), ((1475, 1488), 'time.sleep', 'time.sleep', (['(6)'], {}), '(6)\n', (1485, 1488), False, 'import time\n')]
|
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.decomposition import PCA, IncrementalPCA
from sklearn.linear_model import LogisticRegression
from sklearn.neural_network import MLPClassifier
from sklearn.svm import SVC
from sklearn.grid_search import GridSearchCV
from sklearn import cross_validation as cv
from sklearn import metrics
from time import time
import pickle
import warnings
from text_utilities import *
# Ignore deprecation warnings.
warnings.filterwarnings("ignore")
# Script Precondition:
# Need to have data.pkl and labels.pkl available.
# Run load_data.py to generate these files if haven't already done so.
# Load data.
data = pickle.load(open('data_all.pkl', "rb"))
labels = pickle.load(open('labels_all.pkl', "rb"))
# Transform the data - vectorise and apply tf-idf.
data = CountVectorizer().fit_transform(data)
tf_idf_transform = TfidfTransformer(use_idf=True).fit(data)
data = tf_idf_transform.transform(data)
print("\nData frame shape:")
print(data.shape)
# Dimensionality reduction
print("\nRunning dimensionality reduction (custom)")
data = feature_reduction(data, labels, 0.85)
print("\nData frame shape after dimensionality reduction (custom):")
print(data.shape)
# Dimensionality reduction
print("\nRunning dimensionality reduction (PCA)")
pca = IncrementalPCA(n_components=50, batch_size=100)
pca.fit(data.toarray())
data = pca.transform(data.toarray())
print("\nPCA explained variance:")
print(pca.explained_variance_ratio_)
print(sum(pca.explained_variance_ratio_))
print("\nData frame shape after dimensionality reduction (custom):")
print(data.shape)
# Splitting the data up into 60% training set, 20% cross-validation and 20% testing sets.
x_train, x_test, y_train, y_test = cv.train_test_split(data, labels, test_size=0.30, random_state=1)
# Test Logistical Regression classifier
print("\nRunning Logistic Regression classifier and tuning using grid search..")
t0 = time()
# Grid search for best LR parameters
cost_range = [1e-3, 0.1, 1, 100]
parameters = dict(C=cost_range)
grid = GridSearchCV(LogisticRegression(), param_grid=parameters, cv=2, n_jobs=7, verbose=3)
grid.fit(x_train, y_train)
print("\nThe best LR parameters are %s with a score of %0.2f"
% (grid.best_params_, grid.best_score_))
predicted = grid.predict(x_test)
accuracy = np.mean(predicted == y_test)
print(metrics.classification_report(y_test, predicted))
print(metrics.confusion_matrix(y_test, predicted))
t1 = time()
print("\nLR classification time: {} sec".format(round((t1-t0), 2)))
print("\nRunning SVM classifier and tuning using grid search..\n")
t0 = time()
# Grid search for best SVM parameters
cost_range = [0.1, 1, 10, 100, 1000]
gamma_range = [1e-5, 0.1, 1, 10, 100]
parameters = dict(gamma=gamma_range, C=cost_range)
grid = GridSearchCV(SVC(), param_grid=parameters, cv=2, n_jobs=7, verbose=3)
grid.fit(x_train, y_train)
print("\nThe best SVM parameters are %s with a score of %0.2f"
% (grid.best_params_, grid.best_score_))
print("\nClassification time: {} sec".format(round((t1-t0), 2)))
predicted = grid.predict(x_test)
accuracy = np.mean(predicted == y_test)
print(metrics.classification_report(y_test, predicted))
print(metrics.confusion_matrix(y_test, predicted))
t1 = time()
print("\nSVM classification time: {} sec".format(round((t1-t0), 2)))
print("\nRunning MLP classifier and tuning using grid search..\n")
t0 = time()
# Grid search for best SVM parameters
alpha_range = [1e-5, 1e-3, 0.1, 10, 100]
layer1_range = [5, 10, 30, 40, 50]
layer2_range = [5, 10, 30, 40, 50]
layer3_range = [5, 10, 30, 40, 50]
hidden_layer_range = np.vstack(np.meshgrid(layer1_range, layer2_range, layer3_range)).reshape(3, -1).T
hidden_layer_range = [tuple(i) for i in hidden_layer_range]
parameters = dict(solver=['lbfgs'], alpha=alpha_range,
hidden_layer_sizes=hidden_layer_range, random_state=[1])
grid = GridSearchCV(MLPClassifier(), param_grid=parameters, cv=2, n_jobs=7, verbose=3)
grid.fit(x_train, y_train)
print("\nThe best MLP parameters are %s with a score of %0.2f"
% (grid.best_params_, grid.best_score_))
print("\nClassification time: {} sec".format(round((t1-t0), 2)))
predicted = grid.predict(x_test)
accuracy = np.mean(predicted == y_test)
print(metrics.classification_report(y_test, predicted))
print(metrics.confusion_matrix(y_test, predicted))
t1 = time()
print("\nMLP classification time: {} sec".format(round((t1-t0), 2)))
|
[
"sklearn.cross_validation.train_test_split",
"sklearn.feature_extraction.text.CountVectorizer",
"warnings.filterwarnings",
"sklearn.decomposition.IncrementalPCA",
"sklearn.metrics.classification_report",
"time.time",
"sklearn.linear_model.LogisticRegression",
"sklearn.neural_network.MLPClassifier",
"sklearn.svm.SVC",
"sklearn.metrics.confusion_matrix",
"sklearn.feature_extraction.text.TfidfTransformer"
] |
[((533, 566), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (556, 566), False, 'import warnings\n'), ((1367, 1414), 'sklearn.decomposition.IncrementalPCA', 'IncrementalPCA', ([], {'n_components': '(50)', 'batch_size': '(100)'}), '(n_components=50, batch_size=100)\n', (1381, 1414), False, 'from sklearn.decomposition import PCA, IncrementalPCA\n'), ((1805, 1869), 'sklearn.cross_validation.train_test_split', 'cv.train_test_split', (['data', 'labels'], {'test_size': '(0.3)', 'random_state': '(1)'}), '(data, labels, test_size=0.3, random_state=1)\n', (1824, 1869), True, 'from sklearn import cross_validation as cv\n'), ((1998, 2004), 'time.time', 'time', ([], {}), '()\n', (2002, 2004), False, 'from time import time\n'), ((2525, 2531), 'time.time', 'time', ([], {}), '()\n', (2529, 2531), False, 'from time import time\n'), ((2673, 2679), 'time.time', 'time', ([], {}), '()\n', (2677, 2679), False, 'from time import time\n'), ((3313, 3319), 'time.time', 'time', ([], {}), '()\n', (3317, 3319), False, 'from time import time\n'), ((3462, 3468), 'time.time', 'time', ([], {}), '()\n', (3466, 3468), False, 'from time import time\n'), ((4426, 4432), 'time.time', 'time', ([], {}), '()\n', (4430, 4432), False, 'from time import time\n'), ((2128, 2148), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (2146, 2148), False, 'from sklearn.linear_model import LogisticRegression\n'), ((2418, 2466), 'sklearn.metrics.classification_report', 'metrics.classification_report', (['y_test', 'predicted'], {}), '(y_test, predicted)\n', (2447, 2466), False, 'from sklearn import metrics\n'), ((2474, 2517), 'sklearn.metrics.confusion_matrix', 'metrics.confusion_matrix', (['y_test', 'predicted'], {}), '(y_test, predicted)\n', (2498, 2517), False, 'from sklearn import metrics\n'), ((2865, 2870), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (2868, 2870), False, 'from sklearn.svm import SVC\n'), ((3206, 3254), 'sklearn.metrics.classification_report', 'metrics.classification_report', (['y_test', 'predicted'], {}), '(y_test, predicted)\n', (3235, 3254), False, 'from sklearn import metrics\n'), ((3262, 3305), 'sklearn.metrics.confusion_matrix', 'metrics.confusion_matrix', (['y_test', 'predicted'], {}), '(y_test, predicted)\n', (3286, 3305), False, 'from sklearn import metrics\n'), ((3968, 3983), 'sklearn.neural_network.MLPClassifier', 'MLPClassifier', ([], {}), '()\n', (3981, 3983), False, 'from sklearn.neural_network import MLPClassifier\n'), ((4319, 4367), 'sklearn.metrics.classification_report', 'metrics.classification_report', (['y_test', 'predicted'], {}), '(y_test, predicted)\n', (4348, 4367), False, 'from sklearn import metrics\n'), ((4375, 4418), 'sklearn.metrics.confusion_matrix', 'metrics.confusion_matrix', (['y_test', 'predicted'], {}), '(y_test, predicted)\n', (4399, 4418), False, 'from sklearn import metrics\n'), ((883, 900), 'sklearn.feature_extraction.text.CountVectorizer', 'CountVectorizer', ([], {}), '()\n', (898, 900), False, 'from sklearn.feature_extraction.text import CountVectorizer\n'), ((940, 970), 'sklearn.feature_extraction.text.TfidfTransformer', 'TfidfTransformer', ([], {'use_idf': '(True)'}), '(use_idf=True)\n', (956, 970), False, 'from sklearn.feature_extraction.text import TfidfTransformer\n')]
|
# String pattern matches used in Functional Owl
# The following productions are taken from ShExJ.py from the ShExJSG project
from typing import Union, Any
from funowl.terminals.Patterns import String, Pattern
class HEX(String):
pattern = Pattern(r'[0-9]|[A-F]|[a-f]')
python_type = Union[int, str]
class UCHAR(String):
pattern = Pattern(r'\\\\u({HEX})({HEX})({HEX})({HEX})|\\\\U({HEX})({HEX})({HEX})({HEX})({HEX})({HEX})({HEX})({HEX})'.format(HEX=HEX.pattern))
class IRIREF(String):
pattern = Pattern(r'([^\u0000-\u0020\u005C\u007B\u007D<>"|^`]|({UCHAR}))*'.format(UCHAR=UCHAR.pattern))
class PN_CHARS_BASE(String):
pattern = Pattern(r'[A-Z]|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|[\U00010000-\U000EFFFF]')
class PN_CHARS_U(String):
pattern = Pattern(r'({PN_CHARS_BASE})|_'.format(PN_CHARS_BASE=PN_CHARS_BASE.pattern))
class PN_CHARS(String):
pattern = Pattern(r'({PN_CHARS_U})|\-|[0-9]|\\u00B7|[\u0300-\u036F]|[\u203F-\u2040]'.format(PN_CHARS_U=PN_CHARS_U.pattern))
class PNAME_NS(String):
pattern = Pattern(r'({PN_CHARS_BASE})((({PN_CHARS})|\.)*({PN_CHARS}))?'
.format(PN_CHARS=PN_CHARS.pattern, PN_CHARS_BASE=PN_CHARS_BASE.pattern))
class OPT_PNAME_NS(String):
pattern = Pattern(r'(({PN_CHARS_BASE})((({PN_CHARS})|\.)*({PN_CHARS}))?)?'
.format(PN_CHARS=PN_CHARS.pattern, PN_CHARS_BASE=PN_CHARS_BASE.pattern))
class PNAME_LOCAL(String):
pattern = Pattern(r'(({PN_CHARS_U})|[0-9])((({PN_CHARS})|\.)*({PN_CHARS}))?'.format(PN_CHARS_U=PN_CHARS_U.pattern, PN_CHARS=PN_CHARS.pattern))
class BLANK_NODE_LABEL(String):
pattern = Pattern(r'_:(({PN_CHARS_U})|[0-9])((({PN_CHARS})|\.)*({PN_CHARS}))?'.format(PN_CHARS=PN_CHARS.pattern, PN_CHARS_U=PN_CHARS_U.pattern))
class PNAME_LN(String):
pattern = Pattern(r'({PNAME_NS})?:{PNAME_LOCAL}'.format(PNAME_NS=PNAME_NS.pattern, PNAME_LOCAL=PNAME_LOCAL.pattern))
class QUOTED_STRING(String):
pattern = Pattern(r'^".*"$|.*')
python_type = Any
|
[
"funowl.terminals.Patterns.Pattern"
] |
[((246, 274), 'funowl.terminals.Patterns.Pattern', 'Pattern', (['"""[0-9]|[A-F]|[a-f]"""'], {}), "('[0-9]|[A-F]|[a-f]')\n", (253, 274), False, 'from funowl.terminals.Patterns import String, Pattern\n'), ((656, 912), 'funowl.terminals.Patterns.Pattern', 'Pattern', (['"""[A-Z]|[a-z]|[\\\\u00C0-\\\\u00D6]|[\\\\u00D8-\\\\u00F6]|[\\\\u00F8-\\\\u02FF]|[\\\\u0370-\\\\u037D]|[\\\\u037F-\\\\u1FFF]|[\\\\u200C-\\\\u200D]|[\\\\u2070-\\\\u218F]|[\\\\u2C00-\\\\u2FEF]|[\\\\u3001-\\\\uD7FF]|[\\\\uF900-\\\\uFDCF]|[\\\\uFDF0-\\\\uFFFD]|[\\\\U00010000-\\\\U000EFFFF]"""'], {}), "(\n '[A-Z]|[a-z]|[\\\\u00C0-\\\\u00D6]|[\\\\u00D8-\\\\u00F6]|[\\\\u00F8-\\\\u02FF]|[\\\\u0370-\\\\u037D]|[\\\\u037F-\\\\u1FFF]|[\\\\u200C-\\\\u200D]|[\\\\u2070-\\\\u218F]|[\\\\u2C00-\\\\u2FEF]|[\\\\u3001-\\\\uD7FF]|[\\\\uF900-\\\\uFDCF]|[\\\\uFDF0-\\\\uFFFD]|[\\\\U00010000-\\\\U000EFFFF]'\n )\n", (663, 912), False, 'from funowl.terminals.Patterns import String, Pattern\n'), ((2102, 2122), 'funowl.terminals.Patterns.Pattern', 'Pattern', (['"""^".*"$|.*"""'], {}), '(\'^".*"$|.*\')\n', (2109, 2122), False, 'from funowl.terminals.Patterns import String, Pattern\n')]
|
import pcapkit
import json
from pymongofunct import insert_data
def pcaptojson(file) -> dict:
return(pcapkit.extract(fin=file, nofile=True, format='json', auto=False,
engine='deafult', extension=False, layer='Transport', tcp=True, ip=True,strict=True, store=False))
def pcapparse(obj) -> dict:
main = {}
data = {}
pcap_dict = obj.info.info2dict()
try:
time = (pcap_dict['time_epoch'])
main["time_epoc"] = time
except KeyError:
pass
try:
macdstdirt = (pcap_dict['ethernet']['dst'])
macdst = []
for delim in macdstdirt:
if delim != '\'' and delim != '[' and delim != ']' and delim != ',' and delim != ' ':
macdst.append(delim)
finalmacdst = ''.join(macdst)
data["macdst"] = finalmacdst
except KeyError:
pass
try:
connecttype = (pcap_dict['ethernet']['type'])
data["type"] = str(connecttype)
except KeyError:
pass
try:
macsrcdirt = (pcap_dict['ethernet']['src'])
macsrc = []
for delim in macsrcdirt:
if delim != '\'' and delim != '[' and delim != ']' and delim != ',' and delim != ' ':
macsrc.append(delim)
finalmacsrc = ''.join(macsrc)
data["macsrc"] = finalmacsrc
except KeyError:
pass
try:
tcpdstport = (pcap_dict['ethernet']['ipv4']['tcp']['dstport'])
data["tcpdstport"] = tcpdstport
except KeyError:
pass
try:
tcpsrcport = (pcap_dict['ethernet']['ipv4']['tcp']['srcport'])
data["tcpsrcport"] = tcpsrcport
except KeyError:
pass
try:
udpdstport = (pcap_dict['ethernet']['ipv4']['udp']['dstport'])
data["udpdstport"] = udpdstport
except KeyError:
pass
try:
udpsrcport = (pcap_dict['ethernet']['ipv4']['udp']['srcport'])
data["udpsrcport"] = udpsrcport
except KeyError:
pass
try:
ipv4proto = (pcap_dict['ethernet']['ipv4']['proto'])
data["ipv4proto"] = str(ipv4proto)
except KeyError:
pass
try:
ipv4src = (pcap_dict['ethernet']['ipv4']['src'])
data["ipv4src"] = str(ipv4src)
except KeyError:
pass
try:
ipv4dst = (pcap_dict['ethernet']['ipv4']['dst'])
data["ipv4dst"] = str(ipv4dst)
except KeyError:
pass
try:
ipv6proto = (pcap_dict['ethernet']['ipv6']['proto'])
data["ipv6proto"] = str(ipv6proto)
except KeyError:
pass
try:
ipv6src = (pcap_dict['ethernet']['ipv6']['src'])
data["ipv6src"] = str(ipv6src)
except KeyError:
pass
try:
ipv6dst = (pcap_dict['ethernet']['ipv6']['dst'])
data["ipv6dst"] = str(ipv6dst)
except KeyError:
pass
try:
ipv6tcpdstport = (pcap_dict['ethernet']['ipv6']['tcp']['dstport'])
data["ipv6tcpdstport"] = ipv6tcpdstport
except KeyError:
pass
try:
ipv6tcpsrcport = (pcap_dict['ethernet']['ipv6']['tcp']['srcport'])
data["ipv6tcpsrcport"] = ipv6tcpsrcport
except KeyError:
pass
try:
ipv6udpdstport = (pcap_dict['ethernet']['ipv6']['udp']['dstport'])
data["ipv6udpdstport"] = ipv6udpdstport
except KeyError:
pass
try:
ipv6udpsrcport = (pcap_dict['ethernet']['ipv6']['udp']['srcport'])
data["ipv6udpsrcport"] = ipv6udpsrcport
except KeyError:
pass
main["data"] = data
insert_data('localhost','oracl','pcaps',main)
return main
def pcaplist(jsondict) -> list:
final = []
for obj in jsondict:
final.append(pcapparse(obj))
return final
def pcapwork(filename):
jsondict = pcaptojson(filename)
return pcaplist(jsondict)
|
[
"pymongofunct.insert_data",
"pcapkit.extract"
] |
[((106, 279), 'pcapkit.extract', 'pcapkit.extract', ([], {'fin': 'file', 'nofile': '(True)', 'format': '"""json"""', 'auto': '(False)', 'engine': '"""deafult"""', 'extension': '(False)', 'layer': '"""Transport"""', 'tcp': '(True)', 'ip': '(True)', 'strict': '(True)', 'store': '(False)'}), "(fin=file, nofile=True, format='json', auto=False, engine=\n 'deafult', extension=False, layer='Transport', tcp=True, ip=True,\n strict=True, store=False)\n", (121, 279), False, 'import pcapkit\n'), ((3508, 3556), 'pymongofunct.insert_data', 'insert_data', (['"""localhost"""', '"""oracl"""', '"""pcaps"""', 'main'], {}), "('localhost', 'oracl', 'pcaps', main)\n", (3519, 3556), False, 'from pymongofunct import insert_data\n')]
|
import torch
import numpy as np
from torchwi.utils.ctensor import ca2rt, rt2ca
class FreqL2Loss(torch.autograd.Function):
@staticmethod
def forward(ctx, frd, true):
# resid: (nrhs, 2*nx) 2 for real and imaginary
resid = frd - true
resid_c = rt2ca(resid)
l2 = np.real(0.5*np.sum(resid_c*np.conjugate(resid_c)))
ctx.save_for_backward(resid)
return torch.tensor(l2)
@staticmethod
def backward(ctx, grad_output):
resid, = ctx.saved_tensors
grad_input = ca2rt(np.conjugate(rt2ca(resid)))
return grad_input, None
|
[
"numpy.conjugate",
"torchwi.utils.ctensor.rt2ca",
"torch.tensor"
] |
[((275, 287), 'torchwi.utils.ctensor.rt2ca', 'rt2ca', (['resid'], {}), '(resid)\n', (280, 287), False, 'from torchwi.utils.ctensor import ca2rt, rt2ca\n'), ((404, 420), 'torch.tensor', 'torch.tensor', (['l2'], {}), '(l2)\n', (416, 420), False, 'import torch\n'), ((551, 563), 'torchwi.utils.ctensor.rt2ca', 'rt2ca', (['resid'], {}), '(resid)\n', (556, 563), False, 'from torchwi.utils.ctensor import ca2rt, rt2ca\n'), ((328, 349), 'numpy.conjugate', 'np.conjugate', (['resid_c'], {}), '(resid_c)\n', (340, 349), True, 'import numpy as np\n')]
|
from asyncio import sleep, wait, get_event_loop, ensure_future
async def work(t):
await sleep(t)
print('time {}'.format(t))
return t
def on_done(t):
print(t.result())
async def main():
# 协程
coroutines = []
for i in range(2):
c = work(i)
print(type(c))
coroutines.append(c)
await wait(coroutines)
# 任务
tasks = []
for i in range(10):
c = work(i)
t = ensure_future(c)
t.add_done_callback(on_done)
print(type(t))
tasks.append(t)
await wait(tasks)
loop = get_event_loop()
loop.run_until_complete(main())
loop.close()
|
[
"asyncio.ensure_future",
"asyncio.get_event_loop",
"asyncio.sleep",
"asyncio.wait"
] |
[((569, 585), 'asyncio.get_event_loop', 'get_event_loop', ([], {}), '()\n', (583, 585), False, 'from asyncio import sleep, wait, get_event_loop, ensure_future\n'), ((93, 101), 'asyncio.sleep', 'sleep', (['t'], {}), '(t)\n', (98, 101), False, 'from asyncio import sleep, wait, get_event_loop, ensure_future\n'), ((338, 354), 'asyncio.wait', 'wait', (['coroutines'], {}), '(coroutines)\n', (342, 354), False, 'from asyncio import sleep, wait, get_event_loop, ensure_future\n'), ((436, 452), 'asyncio.ensure_future', 'ensure_future', (['c'], {}), '(c)\n', (449, 452), False, 'from asyncio import sleep, wait, get_event_loop, ensure_future\n'), ((547, 558), 'asyncio.wait', 'wait', (['tasks'], {}), '(tasks)\n', (551, 558), False, 'from asyncio import sleep, wait, get_event_loop, ensure_future\n')]
|
import csv
from datetime import datetime
aday,bday=[],[]
today = datetime.today().strftime('%m/%d')
with open('data.csv', newline='') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
if today in row['Birthday']:
bday.append([row['Name'], row['E-Mail'],row['Birthday']])
if today in row['Anniversary']:
aday.append([row['Name'], row['E-Mail'],row['Anniversary']])
for i in aday:
print(('Hi! %s')%(i[0]))
|
[
"csv.DictReader",
"datetime.datetime.today"
] |
[((158, 181), 'csv.DictReader', 'csv.DictReader', (['csvfile'], {}), '(csvfile)\n', (172, 181), False, 'import csv\n'), ((66, 82), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (80, 82), False, 'from datetime import datetime\n')]
|
import pytest
from pytest_lazyfixture import lazy_fixture
# Fixtures must be visible for lazy_fixture() calls.
from .fixtures import * # noqa
@pytest.fixture(
params=(
lazy_fixture('random_building_block'),
lazy_fixture('random_topology_graph'),
lazy_fixture('similar_building_block'),
lazy_fixture('random_mutator'),
),
)
def case_data(request):
return request.param
|
[
"pytest_lazyfixture.lazy_fixture"
] |
[((184, 221), 'pytest_lazyfixture.lazy_fixture', 'lazy_fixture', (['"""random_building_block"""'], {}), "('random_building_block')\n", (196, 221), False, 'from pytest_lazyfixture import lazy_fixture\n'), ((231, 268), 'pytest_lazyfixture.lazy_fixture', 'lazy_fixture', (['"""random_topology_graph"""'], {}), "('random_topology_graph')\n", (243, 268), False, 'from pytest_lazyfixture import lazy_fixture\n'), ((278, 316), 'pytest_lazyfixture.lazy_fixture', 'lazy_fixture', (['"""similar_building_block"""'], {}), "('similar_building_block')\n", (290, 316), False, 'from pytest_lazyfixture import lazy_fixture\n'), ((326, 356), 'pytest_lazyfixture.lazy_fixture', 'lazy_fixture', (['"""random_mutator"""'], {}), "('random_mutator')\n", (338, 356), False, 'from pytest_lazyfixture import lazy_fixture\n')]
|
import unittest
import sys
import os
import logging
from dotenv import load_dotenv
# load env-vars from .env file if there is one
basedir = os.path.abspath(os.path.dirname(__file__))
test_env = os.path.join(basedir, '.env')
if os.path.isfile(test_env):
load_dotenv(dotenv_path=os.path.join(basedir, '.env'), verbose=True)
import server.test.modelstest as models
test_classes = [
models.ModelsTest,
]
# set up all logging to DEBUG (cause we're running tests here!)
logging.basicConfig(level=logging.DEBUG)
log_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
log_handler = logging.FileHandler(os.path.join('logs', 'test.log'))
log_handler.setFormatter(log_formatter)
# now run all the tests
suites = [unittest.TestLoader().loadTestsFromTestCase(test_class) for test_class in test_classes]
if __name__ == "__main__":
suite = unittest.TestSuite(suites)
test_result = unittest.TextTestRunner(verbosity=2).run(suite)
if not test_result.wasSuccessful():
sys.exit(1)
|
[
"unittest.TextTestRunner",
"logging.basicConfig",
"unittest.TestSuite",
"os.path.dirname",
"logging.Formatter",
"os.path.isfile",
"unittest.TestLoader",
"os.path.join",
"sys.exit"
] |
[((195, 224), 'os.path.join', 'os.path.join', (['basedir', '""".env"""'], {}), "(basedir, '.env')\n", (207, 224), False, 'import os\n'), ((228, 252), 'os.path.isfile', 'os.path.isfile', (['test_env'], {}), '(test_env)\n', (242, 252), False, 'import os\n'), ((476, 516), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (495, 516), False, 'import logging\n'), ((533, 606), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (550, 606), False, 'import logging\n'), ((157, 182), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (172, 182), False, 'import os\n'), ((641, 673), 'os.path.join', 'os.path.join', (['"""logs"""', '"""test.log"""'], {}), "('logs', 'test.log')\n", (653, 673), False, 'import os\n'), ((878, 904), 'unittest.TestSuite', 'unittest.TestSuite', (['suites'], {}), '(suites)\n', (896, 904), False, 'import unittest\n'), ((1019, 1030), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1027, 1030), False, 'import sys\n'), ((282, 311), 'os.path.join', 'os.path.join', (['basedir', '""".env"""'], {}), "(basedir, '.env')\n", (294, 311), False, 'import os\n'), ((750, 771), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (769, 771), False, 'import unittest\n'), ((923, 959), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (946, 959), False, 'import unittest\n')]
|
# -*- coding: utf-8 -*-
"""Encapsulates functions which handle credentials.
"""
from blrequests.data_definitions import Credentials
import subprocess
import configparser
import os.path
CONFIG_FILE = ".blrequestsrc"
CONFIG_FILE_EXISTS = os.path.exists(CONFIG_FILE)
def fetch_credentials() -> Credentials:
"""Produces a Credentials object based on the contents of the
CONFIG_FILE or, alternatively, interactively.
"""
if CONFIG_FILE_EXISTS:
return parse_config_file(CONFIG_FILE)
else:
return get_credentials_interactively()
def get_pass_output(parameter: str) -> str:
"""consumes a parameter for the GNU password manager PASS and
produces the corresponding output of that program.
"""
return subprocess.run(
["pass", parameter], capture_output=True, text=True
).stdout.strip()
def parse_config_file(filepath: str) -> Credentials:
"""Produces a Credentials object based on the contents of a config
file.
"""
config = configparser.ConfigParser()
config.read(filepath)
print(config)
print([config["Authentication"][option] for option in config["Authentication"]])
username, password, passeval = [
config["Authentication"][option] for option in config["Authentication"]
]
if passeval:
password = get_pass_output(passeval)
return (username, password)
def get_credentials_interactively() -> Credentials:
""" Gets credentials for the bl interactively
"""
return ("placeholder-user", "placeholder-pass")
|
[
"subprocess.run",
"configparser.ConfigParser"
] |
[((993, 1020), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (1018, 1020), False, 'import configparser\n'), ((742, 809), 'subprocess.run', 'subprocess.run', (["['pass', parameter]"], {'capture_output': '(True)', 'text': '(True)'}), "(['pass', parameter], capture_output=True, text=True)\n", (756, 809), False, 'import subprocess\n')]
|
# coding: utf-8
import redis
from models.singleton import Singleton
from common.config import *
class DataManager:
__metaclass__ = Singleton
redis_instance = None # redis实例
def __init__(self):
self.redis_instance = redis.StrictRedis(REDIS_HOST, REDIS_PORT, REDIS_DB, REDIS_PASSWORD)
def save_room_info(self, room):
self.redis_instance.set("room:{0}".format(room.room_id), room.toJson())
def get_room_info(self, room_id):
return self.redis_instance.get("room:{0}".format(room_id))
def save_user_info(self, user):
self.redis_instance.set("user:{0}".format(user.userId), user.toJson())
def get_user_info(self, uuid):
return self.redis_instance.get("user:{0}".format(uuid))
data_manager = DataManager()
|
[
"redis.StrictRedis"
] |
[((241, 308), 'redis.StrictRedis', 'redis.StrictRedis', (['REDIS_HOST', 'REDIS_PORT', 'REDIS_DB', 'REDIS_PASSWORD'], {}), '(REDIS_HOST, REDIS_PORT, REDIS_DB, REDIS_PASSWORD)\n', (258, 308), False, 'import redis\n')]
|
""" Preprocess the ISBI data set.
"""
__author__ = "<NAME>"
__copyright__ = "Copyright 2015, JHU/APL"
__license__ = "Apache 2.0"
import argparse, os.path
import numpy as np
from scipy.stats.mstats import mquantiles
import scipy.io
import emlib
def get_args():
"""Command line parameters for the 'deploy' procedure.
You will probably want to override the train/valid/test split
to better suit your problem of interest...
"""
parser = argparse.ArgumentParser()
parser.add_argument('-X', dest='dataFileName', type=str, required=True,
help='EM data file')
parser.add_argument('-Y', dest='labelsFileName', type=str, required=True,
help='Ground truth labels for X')
parser.add_argument('--train-slices', dest='trainSlices',
type=str, default='range(10)',
help='which slices to use for training')
parser.add_argument('--valid-slices', dest='validSlices',
type=str, default='range(10,20)',
help='which slices to use for validation')
parser.add_argument('--test-slices', dest='testSlices',
type=str, default='range(20,30)',
help='which slices to use for test')
parser.add_argument('--brightness-quantile', dest='brightQuant',
type=float, default=0.97,
help='top quantile for non-membrane pixels.')
parser.add_argument('--out-dir', dest='outDir',
type=str, default='./',
help='output directory')
args = parser.parse_args()
assert(args.brightQuant <= 1.0)
assert(args.brightQuant > 0)
# map strings to python objects (XXX: a cleaner way than eval)
args.trainSlices = eval(args.trainSlices)
args.validSlices = eval(args.validSlices)
args.testSlices = eval(args.testSlices)
return args
if __name__ == "__main__":
args = get_args();
#outDir = os.path.split(args.dataFileName)[0]
if not os.path.isdir(args.outDir):
os.mkdir(args.outDir)
X = emlib.load_cube(args.dataFileName, np.uint8)
Y = emlib.load_cube(args.labelsFileName, np.uint8)
# remap Y labels from ISBI convention to membrane-vs-non-membrane
Y[Y==0] = 1; # membrane
Y[Y==255] = 0; # non-membrane
# change type of Y so can use -1 as a value.
Y = Y.astype(np.int8)
Xtrain = X[args.trainSlices,:,:]; Ytrain = Y[args.trainSlices,:,:]
Xvalid = X[args.validSlices,:,:]; Yvalid = Y[args.validSlices,:,:]
Xtest = X[args.testSlices,:,:]; Ytest = Y[args.testSlices,:,:]
# brightness thresholding
thresh = mquantiles(np.concatenate((Xtrain[Ytrain==1], Xvalid[Yvalid==1])), args.brightQuant)
pctOmitted = 100.0*np.sum(X > thresh) / np.prod(np.size(X))
print('[preprocess]: percent of pixels omitted by brightness filter: %0.2f' % pctOmitted)
Ytrain[Xtrain > thresh] = -1
Yvalid[Xvalid > thresh] = -1
Ytest[Xtest > thresh] = -1
# save results
np.save(os.path.join(args.outDir, 'Xtrain.npy'), Xtrain)
np.save(os.path.join(args.outDir, 'Ytrain.npy'), Ytrain)
np.save(os.path.join(args.outDir, 'Xvalid.npy'), Xvalid)
np.save(os.path.join(args.outDir, 'Yvalid.npy'), Yvalid)
if Xtest.size > 0:
np.save(os.path.join(args.outDir, 'Xtest.npy'), Xtest)
np.save(os.path.join(args.outDir, 'Ytest.npy'), Ytest)
# also a matlab version
scipy.io.savemat(os.path.join(args.outDir, 'Xtrain.mat'), {'Xtrain' : Xtrain})
scipy.io.savemat(os.path.join(args.outDir, 'Ytrain.mat'), {'Ytrain' : Ytrain})
scipy.io.savemat(os.path.join(args.outDir, 'Xvalid.mat'), {'Xvalid' : Xvalid})
scipy.io.savemat(os.path.join(args.outDir, 'Yvalid.mat'), {'Yvalid' : Yvalid})
if Xtest.size > 0:
scipy.io.savemat(os.path.join(args.outDir, 'Xtest.mat'), {'Xtest' : Xtest})
scipy.io.savemat(os.path.join(args.outDir, 'Ytest.mat'), {'Ytest' : Ytest})
print('[preprocess]: done!')
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
[
"numpy.size",
"numpy.sum",
"argparse.ArgumentParser",
"emlib.load_cube",
"numpy.concatenate"
] |
[((462, 487), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (485, 487), False, 'import argparse, os.path\n'), ((1935, 1979), 'emlib.load_cube', 'emlib.load_cube', (['args.dataFileName', 'np.uint8'], {}), '(args.dataFileName, np.uint8)\n', (1950, 1979), False, 'import emlib\n'), ((1988, 2034), 'emlib.load_cube', 'emlib.load_cube', (['args.labelsFileName', 'np.uint8'], {}), '(args.labelsFileName, np.uint8)\n', (2003, 2034), False, 'import emlib\n'), ((2523, 2581), 'numpy.concatenate', 'np.concatenate', (['(Xtrain[Ytrain == 1], Xvalid[Yvalid == 1])'], {}), '((Xtrain[Ytrain == 1], Xvalid[Yvalid == 1]))\n', (2537, 2581), True, 'import numpy as np\n'), ((2620, 2638), 'numpy.sum', 'np.sum', (['(X > thresh)'], {}), '(X > thresh)\n', (2626, 2638), True, 'import numpy as np\n'), ((2649, 2659), 'numpy.size', 'np.size', (['X'], {}), '(X)\n', (2656, 2659), True, 'import numpy as np\n')]
|
import yaml
def get(filename='config/config.yaml'):
with open(filename, 'r') as stream:
data = yaml.safe_load(stream)
return data
if __name__ == '__main__':
print(get())
|
[
"yaml.safe_load"
] |
[((109, 131), 'yaml.safe_load', 'yaml.safe_load', (['stream'], {}), '(stream)\n', (123, 131), False, 'import yaml\n')]
|
import numpy
import math
#from .. import utilities
class phase_space(object):
"""Phase space class.
"""
def __init__(self, xs, tau=1, m=2, eps=.001):
self.tau, self.m, self.eps = tau, m, eps
N = int(len(xs)-m*tau+tau)
self.matrix = numpy.empty([N,m],dtype=float)
for i in range(N):
self.matrix[i,:] = xs[i:i+1+int(m*tau-tau):tau]
self.recurrence_matrix = None
return None
def __repr__(self):
return "phase_space()"
def __str__(self):
return "{} with shape {} and (tau, m, eps) = ({}, {}, {})".format(type(self.matrix), self.matrix.shape, self.tau, self.m, self.eps)
def __hash__(self):
return id(self)
def __eq__(self, other):
return id(self) == id(other)
def _Theta(x, y, eps):
"""Theta tmp
Args:
x:
y:
eps:
Returns:
int: 0 or 1.
"""
sm = 0
for k in range(len(x)):
sm += (x[k]-y[k])**2
if sm > eps:
return 0
return 1
_recurrence_matrix_cache = dict()
def recurrence_matrix(xps, yps=None, joint=False):
"""Computes cross-reccurence matrix when two inputs are given and self-reccurence otherwise.
Args:
xps (numpy.array): Phase_space object(s).
yps (numpy.array, optional): Phase_space object for cross reccurence. Defaults to none.
joint (bool, optional): Should joint reccurence be calculated? Defaults to False.
Returns:
numpy.array : A 2D numpy matrix.
"""
if not yps:
yps, cross = xps, False
else:
cross = True
if (xps,yps,joint) in _recurrence_matrix_cache:
return _recurrence_matrix_cache[xps, yps, joint]
if (xps.matrix.shape, xps.tau, xps.m, xps.eps) != (yps.matrix.shape, yps.tau, yps.m, yps.eps):
print("Error: Input phase spaces have different parameters.")
return
if joint:
return numpy.multiply( recurrence_matrix(xps), recurrence_matrix(yps) )
BB, AA, tau, m, eps = yps.matrix, xps.matrix, xps.tau, xps.m, xps.eps
N = AA.shape[0]
ans = numpy.full([N, N],0)
for i in range(N):
for j in range(N if cross else i+1):
#ans[i][j] = _Theta( AA[i], BB[j], eps)
ans[i][j] = numpy.linalg.norm(AA[i]-BB[j])
_recurrence_matrix_cache[xps,yps,joint] = ans
return _recurrence_matrix_cache[xps, yps, joint]
def cross_recurrence_matrix( xps, yps ):
"""Cross reccurence matrix.
Args:
xps (numpy.array):
yps (numpy.array):
Returns:
numpy.array : A 2D numpy array.
"""
return recurrence_matrix( xps, yps )
def joint_recurrence_matrix( xps, yps ):
"""Joint reccurence matrix.
Args:
xps (numpy.array):
yps (numpy.array):
Returns:
numpy.array : A 2D numpy array.
"""
return recurrence_matrix( xps, yps, joint=True )
def recurrence_rate( AA ):
"""Computes reccurence-rate from reccurence matrix.
Args:
AA (numpy.array): A reccurence matrix.
Returns:
numpy.array : A numpy array.
"""
isLower = utilities.is_lower_triangular(AA)
N = AA.shape[0]
ans = numpy.zeros( N, dtype=float )
for k in range(1,N):
tmp = numpy.sum(AA[:k,:k])
ans[k] += tmp
for i in range(1, N-k):
if isLower:
tmp += numpy.sum(AA[i+k-1,i:i+k]) - numpy.sum(AA[i-1:i-1+k,i-1])
else:
tmp += numpy.sum( AA[i+k-1, i:i+k] ) \
+ numpy.sum( AA[i:i+k-1, i+k-1] ) \
- numpy.sum( AA[i-1:i-1+k, i-1] ) \
- numpy.sum( AA[i-1, i:i-1+k] )
ans[k] += tmp
ans[k] /= 0.5*(N-k)*k**2 if isLower else (N-k)*k**2
return ans
_measures_cache = dict()
def determinism( AA ):
"""Calculates percentage of recurrence points which form diagonal lines.
Args:
AA (numpy.array): A reccurence matrix.
Returns:
float: The determinism.
"""
if (id(AA),"determinism") in _measures_cache:
return _measures_cache[id(AA),"determinism"]
isLower = utilities.is_lower_triangular(AA)
N = AA.shape[0]
H = dict()
for key in range(N):
H[key] = 0
def lower_DET(x):
for i in range(1, N):
isPrev = False
count = 0
for j in range(i, N):
#search for consective lines in AA[idx1,idx1-idx]
if x[j, j-i]:
if isPrev:
count += 1
else:
count = 1
isPrev = True
elif isPrev:
isPrev = False
H[count] += 1 if count > 1 else 0
count = 0
H[count] += 1 if count>1 else 0
return
lower_DET(AA)
if not isLower:
lower_DET(numpy.transpose(AA))
num, avg, max_L = 0, 0, 0
for key, val in H.items():
max_L = key if val else max_L
num += key*val
avg += val
dem = numpy.sum(AA)
ENTR = 0
if avg:
for key, val in H.items():
p = val/avg
ENTR -= p*math.log(p) if p else 0
PRED = num/avg
else:
ENTR = None
PRED = 0
DIV = 1/max_L if max_L else float('inf')
_measures_cache[id(AA),"determinism"] = num/dem
_measures_cache[id(AA),"pred"] = PRED
_measures_cache[id(AA),"divergence"] = DIV
_measures_cache[id(AA),"entropy"] = ENTR
return _measures_cache[id(AA),"determinism"]
def divergence( AA ):
"""Divergence
Args:
AA (numpy.array): A numpy array.
Returns:
numpy.array: The answer.
"""
if (id(AA),"divergence") not in _measures_cache:
determinism(AA)
return _measures_cache[id(AA),"divergence"]
def entropy( AA ):
"""Entropy
Args:
AA (numpy.array): A numpy array.
Returns:
numpy.array: The answer.
"""
if (id(AA),"entropy") not in _measures_cache:
determinism(AA)
return _measures_cache[id(AA),"entropy"]
def pred( AA ):
"""Pred
Args:
AA (numpy.array): A numpy array.
Returns:
numpy.array: The answer.
"""
if (id(AA),"pred") not in _measures_cache:
determinism(AA)
return _measures_cache[id(AA),"pred"]
def trend( AA, longterm=False ):
"""Calculate the TREND of a give 1d numpy array R.
Args:
AA (numpy.array(float)): A 2D matrix.
longterm (bool, optional): Should long-term trend be calculate? Defaults to False.
Returns:
float: The medium and long range trends a float tuple (Med, Long)
"""
N = AA.shape[0]
R_med = R[:N//2] - np.mean(R[:N//2])
R_long = R[:-1] - np.mean(R[:-1])
coef = np.array([i - N//4 +1 for i in range(N//2)])
Med = np.dot(coef, R_med)/np.dot(coef, coef)
coef = np.array([i - N//2 +1 for i in range(N-1)])
Long = np.dot(coef, R_long)/np.dot(coef, coef)
return Long if longterm else Med
def laminarity( AA ): #+ Trapping
"""Laminarity. Calculates percentage of recurrence points which form verticle lines.
This function calculates Trapping as a side effect.
Args:
AA (numpy.array(float)): A 2D matrix.
Returns:
float: The laminarity
"""
N = AA.shape[0]
H = dict()
for key in range(N):
H[key] = 0
#Lower Lam
for j in range(N):
isPrev, count = False, 0
for i in range(j+1, N):
#search for consecutive lines in M[i, j]
if AA[i, j]:
if isPrev:
count += 1
else:
isPrev, count = True, 1
elif isPrev:
H[count] += 1 if count > 1 else 0
isPrev, count = False, 0
H[count] += 1 if count > 1 else 0
#Upper Lam
if not utilities.is_lower_triangular(AA):
for j in range(N):
isPrev, count = False, 0
for i in range(j):
#search for consecutive lines in M[idx1, idx]
if AA[i,j]:
if isPrev:
count += 1
else:
isPrev, count = True, 1
elif isPrev:
H[count] += 1 if count > 1 else 0
isPrev, count = False, 0
H[count] += 1 if count > 1 else 0
num, avg= 0, 0
for key, val in H.items():
avg += val
num += key*val
dem = num + numpy.sum(AA)
LAMI = num/dem
TRAP = num/avg if avg else 0
_measures_cache[id(AA),"laminarity"] = LAMI
_measures_cache[id(AA),"trapping"] = TRAP
return _measures_cache[id(AA),"laminarity"]
def trapping( AA ):
"""Trapping. Calculates ...
This function calculates Laminiarity as a side effect.
Args:
AA (numpy.array(float)): A 2D matrix.
Returns:
float: The trapping
"""
if (id(AA),"trapping") not in _measures_cache:
return laminarity(AA)
return _measures_cache[id(AA),"trapping"]
|
[
"numpy.full",
"numpy.sum",
"numpy.empty",
"numpy.zeros",
"numpy.transpose",
"numpy.linalg.norm",
"math.log"
] |
[((2196, 2217), 'numpy.full', 'numpy.full', (['[N, N]', '(0)'], {}), '([N, N], 0)\n', (2206, 2217), False, 'import numpy\n'), ((3346, 3373), 'numpy.zeros', 'numpy.zeros', (['N'], {'dtype': 'float'}), '(N, dtype=float)\n', (3357, 3373), False, 'import numpy\n'), ((5302, 5315), 'numpy.sum', 'numpy.sum', (['AA'], {}), '(AA)\n', (5311, 5315), False, 'import numpy\n'), ((279, 311), 'numpy.empty', 'numpy.empty', (['[N, m]'], {'dtype': 'float'}), '([N, m], dtype=float)\n', (290, 311), False, 'import numpy\n'), ((3417, 3438), 'numpy.sum', 'numpy.sum', (['AA[:k, :k]'], {}), '(AA[:k, :k])\n', (3426, 3438), False, 'import numpy\n'), ((8917, 8930), 'numpy.sum', 'numpy.sum', (['AA'], {}), '(AA)\n', (8926, 8930), False, 'import numpy\n'), ((2361, 2393), 'numpy.linalg.norm', 'numpy.linalg.norm', (['(AA[i] - BB[j])'], {}), '(AA[i] - BB[j])\n', (2378, 2393), False, 'import numpy\n'), ((5124, 5143), 'numpy.transpose', 'numpy.transpose', (['AA'], {}), '(AA)\n', (5139, 5143), False, 'import numpy\n'), ((3548, 3581), 'numpy.sum', 'numpy.sum', (['AA[i + k - 1, i:i + k]'], {}), '(AA[i + k - 1, i:i + k])\n', (3557, 3581), False, 'import numpy\n'), ((3577, 3614), 'numpy.sum', 'numpy.sum', (['AA[i - 1:i - 1 + k, i - 1]'], {}), '(AA[i - 1:i - 1 + k, i - 1])\n', (3586, 3614), False, 'import numpy\n'), ((3822, 3855), 'numpy.sum', 'numpy.sum', (['AA[i - 1, i:i - 1 + k]'], {}), '(AA[i - 1, i:i - 1 + k])\n', (3831, 3855), False, 'import numpy\n'), ((5422, 5433), 'math.log', 'math.log', (['p'], {}), '(p)\n', (5430, 5433), False, 'import math\n'), ((3763, 3800), 'numpy.sum', 'numpy.sum', (['AA[i - 1:i - 1 + k, i - 1]'], {}), '(AA[i - 1:i - 1 + k, i - 1])\n', (3772, 3800), False, 'import numpy\n'), ((3647, 3680), 'numpy.sum', 'numpy.sum', (['AA[i + k - 1, i:i + k]'], {}), '(AA[i + k - 1, i:i + k])\n', (3656, 3680), False, 'import numpy\n'), ((3704, 3741), 'numpy.sum', 'numpy.sum', (['AA[i:i + k - 1, i + k - 1]'], {}), '(AA[i:i + k - 1, i + k - 1])\n', (3713, 3741), False, 'import numpy\n')]
|
import traceback
class CallStructureException(Exception):
pass
def must_be_called_from(method):
for frame in traceback.extract_stack():
if frame.name == method.__name__ and frame.filename == method.__globals__['__file__']:
return
raise CallStructureException("Method called incorrectly!")
|
[
"traceback.extract_stack"
] |
[((121, 146), 'traceback.extract_stack', 'traceback.extract_stack', ([], {}), '()\n', (144, 146), False, 'import traceback\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('competition', '0005_alter_club_facebook_youtube_position'),
]
operations = [
migrations.AddField(
model_name='division',
name='sportingpulse_url',
field=models.URLField(help_text='Here be dragons! Enter at own risk!', max_length=1024, null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='match',
name='external_identifier',
field=models.CharField(db_index=True, max_length=20, unique=True, null=True, blank=True),
preserve_default=True,
),
]
|
[
"django.db.models.CharField",
"django.db.models.URLField"
] |
[((385, 494), 'django.db.models.URLField', 'models.URLField', ([], {'help_text': '"""Here be dragons! Enter at own risk!"""', 'max_length': '(1024)', 'null': '(True)', 'blank': '(True)'}), "(help_text='Here be dragons! Enter at own risk!', max_length\n =1024, null=True, blank=True)\n", (400, 494), False, 'from django.db import migrations, models\n'), ((656, 742), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'unique': '(True)', 'null': '(True)', 'blank': '(True)'}), '(db_index=True, max_length=20, unique=True, null=True,\n blank=True)\n', (672, 742), False, 'from django.db import migrations, models\n')]
|
import math
import pcsg
from exampleimg import runtime
from exampleimg import conf
def _setAttributes2D (attributes):
return attributes.override ({
'camera.view': (0, 0, 0, 0, 0, 0, 8)
})
def _setAttributes (attributes):
return attributes.override ({
'camera.view': (0, 0, 0, 70, 0, 30, 12),
'camera.projection': 'perspective'
})
def _posttransform (item):
return pcsg.transform.Translate (pcsg.solid.LinearExtrude (height = 1, children = (item)), z = 2)
# Examples for transform.Translate
class Translate:
"""
"""
@staticmethod
def example_a (attributes, isThumbnail):
"""
Transform solid on x-axis
import pcsg
body = pcsg.solid.Sphere (radius = 1)
item = pcsg.transform.Translate (body, y = 2)
"""
body = pcsg.solid.Sphere (radius = 1, attributes = {'material': conf.getMaterial (0)})
item = pcsg.transform.Translate (body, y = 2, attributes = {'material': conf.getMaterial (1)})
a = _setAttributes (attributes)
return ((a, body),(a, item))
@staticmethod
def example_b (attributes, isThumbnail):
"""
Transform solid on x- and y-axis
import pcsg
body = pcsg.solid.Sphere (radius = 1)
item = pcsg.transform.Translate (body, x = 1, y = 2)
"""
body = pcsg.solid.Sphere (radius = 1, attributes = {'material': conf.getMaterial (0)})
item = pcsg.transform.Translate (body, x = 1, y = 2, attributes = {'material': conf.getMaterial (1)})
a = _setAttributes (attributes)
return ((a, body),(a, item))
@staticmethod
def example_c (attributes, isThumbnail):
"""
Transform solid by vector
import pcsg
body = pcsg.solid.Sphere (radius = 1)
item = pcsg.transform.Translate (body, (1, 2, -2))
"""
body = pcsg.solid.Sphere (radius = 1, attributes = {'material': conf.getMaterial (0)})
item = pcsg.transform.Translate (body, (1, 2, -2), attributes = {'material': conf.getMaterial (1)})
a = _setAttributes (attributes)
return ((a, body),(a, item))
@staticmethod
def example_d (attributes, isThumbnail):
"""
Transform shape by vector
import pcsg
body = pcsg.shape.Circle (radius = 0.5)
item = pcsg.transform.Translate (body, (0.5, 1))
"""
body = pcsg.shape.Circle (radius = 0.5, attributes = {'material': conf.getMaterial (0)})
item = pcsg.transform.Translate (body, (0.5, 1), attributes = {'material': conf.getMaterial (1)})
a = _setAttributes2D (attributes)
return ((a, body),(a, item))
# Examples for transform.Scale
class Scale:
"""
"""
@staticmethod
def example_a (attributes, isThumbnail):
"""
Scale solid on x-axis
import pcsg
body = pcsg.solid.Sphere (radius = 1)
item = pcsg.transform.Scale (body, sy = 2)
"""
body = pcsg.solid.Sphere (radius = 1, attributes = {'material': conf.getMaterial (0)})
item = pcsg.transform.Scale (body, sy = 2, attributes = {'material': conf.getMaterial (2)})
a = _setAttributes (attributes)
return ((a, body),(a, item))
@staticmethod
def example_b (attributes, isThumbnail):
"""
Scale solid on x- and y-axis
import pcsg
body = pcsg.solid.Sphere (radius = 1)
item = pcsg.transform.Scale (body, sx = 0.6, sy = 1.3)
"""
body = pcsg.solid.Sphere (radius = 1, attributes = {'material': conf.getMaterial (0)})
item = pcsg.transform.Scale (body, sx = 0.6, sy = 1.3, attributes = {'material': conf.getMaterial (2)})
a = _setAttributes (attributes)
return ((a, body),(a, item))
@staticmethod
def example_c (attributes, isThumbnail):
"""
Scale solid by vector
import pcsg
body = pcsg.solid.Sphere (radius = 1)
item = pcsg.transform.Scale (body, (1.7, 0.9, 1.2))
"""
body = pcsg.solid.Sphere (radius = 1, attributes = {'material': conf.getMaterial (0)})
item = pcsg.transform.Scale (body, (1.7, 0.9, 1.2), attributes = {'material': conf.getMaterial (2)})
a = _setAttributes (attributes)
return ((a, body),(a, item))
@staticmethod
def example_d (attributes, isThumbnail):
"""
Scale shape by vector
import pcsg
body = pcsg.shape.Circle (radius = 0.5)
item = pcsg.transform.Scale (body, (0.5, 1))
"""
body = pcsg.shape.Circle (radius = 0.5, attributes = {'material': conf.getMaterial (0)})
item = pcsg.transform.Scale (body, (0.5, 1), attributes = {'material': conf.getMaterial (2)})
a = _setAttributes2D (attributes)
return ((a, body),(a, item))
# Examples for transform.Rotate
class Rotate:
"""
"""
@staticmethod
def example_a (attributes, isThumbnail):
"""
Rotate solid around x-axis
import pcsg
body = pcsg.solid.Cube (size = 1)
item = pcsg.transform.Rotate (body, rx = 25)
"""
body = pcsg.solid.Cube (size = 1, attributes = {'material': conf.getMaterial (0)})
item = pcsg.transform.Rotate (body, rx = 25, attributes = {'material': conf.getMaterial (3)})
a = _setAttributes (attributes)
return ((a, body),(a, item))
@staticmethod
def example_b (attributes, isThumbnail):
"""
Rotate solid around x- and y-axis
import pcsg
body = pcsg.solid.Cube (size = 1)
item = pcsg.transform.Rotate (body, rx = 25, ry = 15)
"""
body = pcsg.solid.Cube (size = 1, attributes = {'material': conf.getMaterial (0)})
item = pcsg.transform.Rotate (body, rx = 25, ry = 15, attributes = {'material': conf.getMaterial (3)})
a = _setAttributes (attributes)
return ((a, body),(a, item))
@staticmethod
def example_c (attributes, isThumbnail):
"""
Rotate solid around vector
import pcsg
body = pcsg.solid.Cube (size = 1)
item = pcsg.transform.Rotate (body, (10, 20, 35))
"""
body = pcsg.solid.Cube (size = 1, attributes = {'material': conf.getMaterial (0)})
item = pcsg.transform.Rotate (body, (10, 20, 35), attributes = {'material': conf.getMaterial (3)})
a = _setAttributes (attributes)
return ((a, body),(a, item))
@staticmethod
def example_d (attributes, isThumbnail):
"""
Rotate shape around z-axis
import pcsg
body = pcsg.transform.Translate (
pcsg.shape.Square (size = 0.5),
x = 1
)
item = pcsg.transform.Rotate (body, rz = 30)
"""
body = pcsg.transform.Translate (
pcsg.shape.Square (size = 0.5),
x = 1,
attributes = {'material': conf.getMaterial (0)}
)
item = pcsg.transform.Rotate (body, rz = 30, attributes = {'material': conf.getMaterial (3)})
a = _setAttributes2D (attributes)
return ((a, body),(a, item))
|
[
"pcsg.shape.Square",
"exampleimg.conf.getMaterial",
"pcsg.solid.LinearExtrude"
] |
[((446, 495), 'pcsg.solid.LinearExtrude', 'pcsg.solid.LinearExtrude', ([], {'height': '(1)', 'children': 'item'}), '(height=1, children=item)\n', (470, 495), False, 'import pcsg\n'), ((6891, 6918), 'pcsg.shape.Square', 'pcsg.shape.Square', ([], {'size': '(0.5)'}), '(size=0.5)\n', (6908, 6918), False, 'import pcsg\n'), ((901, 920), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(0)'], {}), '(0)\n', (917, 920), False, 'from exampleimg import conf\n'), ((1004, 1023), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(1)'], {}), '(1)\n', (1020, 1023), False, 'from exampleimg import conf\n'), ((1437, 1456), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(0)'], {}), '(0)\n', (1453, 1456), False, 'from exampleimg import conf\n'), ((1547, 1566), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(1)'], {}), '(1)\n', (1563, 1566), False, 'from exampleimg import conf\n'), ((1971, 1990), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(0)'], {}), '(0)\n', (1987, 1990), False, 'from exampleimg import conf\n'), ((2079, 2098), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(1)'], {}), '(1)\n', (2095, 2098), False, 'from exampleimg import conf\n'), ((2505, 2524), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(0)'], {}), '(0)\n', (2521, 2524), False, 'from exampleimg import conf\n'), ((2611, 2630), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(1)'], {}), '(1)\n', (2627, 2630), False, 'from exampleimg import conf\n'), ((3088, 3107), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(0)'], {}), '(0)\n', (3104, 3107), False, 'from exampleimg import conf\n'), ((3188, 3207), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(2)'], {}), '(2)\n', (3204, 3207), False, 'from exampleimg import conf\n'), ((3619, 3638), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(0)'], {}), '(0)\n', (3635, 3638), False, 'from exampleimg import conf\n'), ((3731, 3750), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(2)'], {}), '(2)\n', (3747, 3750), False, 'from exampleimg import conf\n'), ((4152, 4171), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(0)'], {}), '(0)\n', (4168, 4171), False, 'from exampleimg import conf\n'), ((4261, 4280), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(2)'], {}), '(2)\n', (4277, 4280), False, 'from exampleimg import conf\n'), ((4679, 4698), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(0)'], {}), '(0)\n', (4695, 4698), False, 'from exampleimg import conf\n'), ((4781, 4800), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(2)'], {}), '(2)\n', (4797, 4800), False, 'from exampleimg import conf\n'), ((5259, 5278), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(0)'], {}), '(0)\n', (5275, 5278), False, 'from exampleimg import conf\n'), ((5361, 5380), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(3)'], {}), '(3)\n', (5377, 5380), False, 'from exampleimg import conf\n'), ((5788, 5807), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(0)'], {}), '(0)\n', (5804, 5807), False, 'from exampleimg import conf\n'), ((5899, 5918), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(3)'], {}), '(3)\n', (5915, 5918), False, 'from exampleimg import conf\n'), ((6315, 6334), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(0)'], {}), '(0)\n', (6331, 6334), False, 'from exampleimg import conf\n'), ((6422, 6441), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(3)'], {}), '(3)\n', (6438, 6441), False, 'from exampleimg import conf\n'), ((6980, 6999), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(0)'], {}), '(0)\n', (6996, 6999), False, 'from exampleimg import conf\n'), ((7091, 7110), 'exampleimg.conf.getMaterial', 'conf.getMaterial', (['(3)'], {}), '(3)\n', (7107, 7110), False, 'from exampleimg import conf\n')]
|
import socket
HOST, PORT = '127.0.0.1', 8000
clientMessage = 'Hello!'
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as client:
client.connect((HOST, PORT))
client.sendall(clientMessage.encode())
serverMessage = str(client.recv(1024), encoding='utf-8')
print('Server:', serverMessage)
|
[
"socket.socket"
] |
[((77, 126), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (90, 126), False, 'import socket\n')]
|
'''
Horoscope Attributes of the Occult Bot
'''
# Imports
import requests, json
# Variables
# Emojis
aries_emoji = '\N{ARIES}'
taurus_emoji = '\N{TAURUS}'
gemini_emoji = '\N{GEMINI}'
cancer_emoji = '\N{CANCER}'
leo_emoji = '\N{LEO}'
virgo_emoji = '\N{VIRGO}'
libra_emoji = '\N{LIBRA}'
scorpio_emoji = '\N{SCORPIUS}'
sagittarius_emoji = '\N{SAGITTARIUS}'
capricorn_emoji = '\N{CAPRICORN}'
aquarius_emoji = '\N{AQUARIUS}'
pisces_emoji = '\N{PISCES}'
# Functions
def month_num_to_name(month_num):
"""
Takes a String or Int input of a Month's Number, and returns
a string of the the name of the corresponding Month.
"""
month_names = [
'January', 'February', 'March', 'April', 'May', 'June',
'July', 'August', 'September', 'October', 'Novemeber', 'December',
]
# Save Month Index to Determine Month Name
idx = int(month_num) - 1
month = month_names[idx]
return month
def horo_date(json_data):
'Converts YYYY-MM-DD date to MonthName DD, YYYY'
# Separate Parts of the Date
year = json_data['date'][:4]
month_num = json_data['date'][5:7]
day = json_data['date'][8:]
month = month_num_to_name(month_num)
return f'{month} {day}, {year}'
def get_horoscope(zodiac):
'Retrieves Daily Horoscopes for Specified Zodiac'
# Aquarius
if zodiac == 'aquarius':
response = requests.get("https://ohmanda.com/api/horoscope/aquarius")
json_data = json.loads(response.text)
date = horo_date(json_data)
horoscope = f'''
{aquarius_emoji} {json_data['sign'].upper()} {aquarius_emoji}
{date}
{json_data['horoscope']}
'''
# Pisces
elif zodiac == 'pisces':
response = requests.get("https://ohmanda.com/api/horoscope/pisces")
json_data = json.loads(response.text)
date = horo_date(json_data)
horoscope = f'''
{pisces_emoji} {json_data['sign'].upper()} {pisces_emoji}
{date}
{json_data['horoscope']}
'''
# Aries
elif zodiac == 'aries':
response = requests.get("https://ohmanda.com/api/horoscope/aries")
json_data = json.loads(response.text)
date = horo_date(json_data)
horoscope = f'''
{aries_emoji} {json_data['sign'].upper()} {aries_emoji}
{date}
{json_data['horoscope']}
'''
# Taurus
elif zodiac == 'taurus':
response = requests.get("https://ohmanda.com/api/horoscope/taurus")
json_data = json.loads(response.text)
date = horo_date(json_data)
horoscope = f'''
{taurus_emoji} {json_data['sign'].upper()} {taurus_emoji}
{date}
{json_data['horoscope']}
'''
# Gemini
elif zodiac == 'gemini':
response = requests.get("https://ohmanda.com/api/horoscope/gemini")
json_data = json.loads(response.text)
date = horo_date(json_data)
horoscope = f'''
{gemini_emoji} {json_data['sign'].upper()} {gemini_emoji}
{date}
{json_data['horoscope']}
'''
# Cancer
elif zodiac == 'cancer':
response = requests.get("https://ohmanda.com/api/horoscope/cancer")
json_data = json.loads(response.text)
date = horo_date(json_data)
horoscope = f'''
{cancer_emoji} {json_data['sign'].upper()} {cancer_emoji}
{date}
{json_data['horoscope']}
'''
# Leo
elif zodiac == 'leo':
response = requests.get("https://ohmanda.com/api/horoscope/leo")
json_data = json.loads(response.text)
date = horo_date(json_data)
horoscope = f'''
{leo_emoji} {json_data['sign'].upper()} {leo_emoji}
{date}
{json_data['horoscope']}
'''
# Virgo
elif zodiac == 'virgo':
response = requests.get("https://ohmanda.com/api/horoscope/virgo")
json_data = json.loads(response.text)
date = horo_date(json_data)
horoscope = f'''
{virgo_emoji} {json_data['sign'].upper()} {virgo_emoji}
{date}
{json_data['horoscope']}
'''
# Libra
elif zodiac == 'libra':
response = requests.get("https://ohmanda.com/api/horoscope/libra")
json_data = json.loads(response.text)
date = horo_date(json_data)
horoscope = f'''
{libra_emoji} {json_data['sign'].upper()} {libra_emoji}
{date}
{json_data['horoscope']}
'''
# Scorpio
elif zodiac == 'scorpio':
response = requests.get("https://ohmanda.com/api/horoscope/scorpio")
json_data = json.loads(response.text)
date = horo_date(json_data)
horoscope = f'''
{scorpio_emoji} {json_data['sign'].upper()} {scorpio_emoji}
{date}
{json_data['horoscope']}
'''
# Sagittarius
elif zodiac == 'sagittarius':
response = requests.get("https://ohmanda.com/api/horoscope/sagittarius")
json_data = json.loads(response.text)
date = horo_date(json_data)
horoscope = f'''
{sagittarius_emoji} {json_data['sign'].upper()} {sagittarius_emoji}
{date}
{json_data['horoscope']}
'''
# Capricorn
elif zodiac == 'capricorn':
response = requests.get("https://ohmanda.com/api/horoscope/capricorn")
json_data = json.loads(response.text)
date = horo_date(json_data)
horoscope = f'''
{capricorn_emoji} {json_data['sign'].upper()} {capricorn_emoji}
{date}
{json_data['horoscope']}
'''
return(horoscope)
|
[
"json.loads",
"requests.get"
] |
[((1364, 1422), 'requests.get', 'requests.get', (['"""https://ohmanda.com/api/horoscope/aquarius"""'], {}), "('https://ohmanda.com/api/horoscope/aquarius')\n", (1376, 1422), False, 'import requests, json\n'), ((1443, 1468), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (1453, 1468), False, 'import requests, json\n'), ((1699, 1755), 'requests.get', 'requests.get', (['"""https://ohmanda.com/api/horoscope/pisces"""'], {}), "('https://ohmanda.com/api/horoscope/pisces')\n", (1711, 1755), False, 'import requests, json\n'), ((1776, 1801), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (1786, 1801), False, 'import requests, json\n'), ((2026, 2081), 'requests.get', 'requests.get', (['"""https://ohmanda.com/api/horoscope/aries"""'], {}), "('https://ohmanda.com/api/horoscope/aries')\n", (2038, 2081), False, 'import requests, json\n'), ((2102, 2127), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (2112, 2127), False, 'import requests, json\n'), ((2352, 2408), 'requests.get', 'requests.get', (['"""https://ohmanda.com/api/horoscope/taurus"""'], {}), "('https://ohmanda.com/api/horoscope/taurus')\n", (2364, 2408), False, 'import requests, json\n'), ((2429, 2454), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (2439, 2454), False, 'import requests, json\n'), ((2677, 2733), 'requests.get', 'requests.get', (['"""https://ohmanda.com/api/horoscope/gemini"""'], {}), "('https://ohmanda.com/api/horoscope/gemini')\n", (2689, 2733), False, 'import requests, json\n'), ((2754, 2779), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (2764, 2779), False, 'import requests, json\n'), ((3006, 3062), 'requests.get', 'requests.get', (['"""https://ohmanda.com/api/horoscope/cancer"""'], {}), "('https://ohmanda.com/api/horoscope/cancer')\n", (3018, 3062), False, 'import requests, json\n'), ((3083, 3108), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (3093, 3108), False, 'import requests, json\n'), ((3330, 3383), 'requests.get', 'requests.get', (['"""https://ohmanda.com/api/horoscope/leo"""'], {}), "('https://ohmanda.com/api/horoscope/leo')\n", (3342, 3383), False, 'import requests, json\n'), ((3404, 3429), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (3414, 3429), False, 'import requests, json\n'), ((3648, 3703), 'requests.get', 'requests.get', (['"""https://ohmanda.com/api/horoscope/virgo"""'], {}), "('https://ohmanda.com/api/horoscope/virgo')\n", (3660, 3703), False, 'import requests, json\n'), ((3724, 3749), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (3734, 3749), False, 'import requests, json\n'), ((3972, 4027), 'requests.get', 'requests.get', (['"""https://ohmanda.com/api/horoscope/libra"""'], {}), "('https://ohmanda.com/api/horoscope/libra')\n", (3984, 4027), False, 'import requests, json\n'), ((4048, 4073), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (4058, 4073), False, 'import requests, json\n'), ((4300, 4357), 'requests.get', 'requests.get', (['"""https://ohmanda.com/api/horoscope/scorpio"""'], {}), "('https://ohmanda.com/api/horoscope/scorpio')\n", (4312, 4357), False, 'import requests, json\n'), ((4378, 4403), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (4388, 4403), False, 'import requests, json\n'), ((4642, 4703), 'requests.get', 'requests.get', (['"""https://ohmanda.com/api/horoscope/sagittarius"""'], {}), "('https://ohmanda.com/api/horoscope/sagittarius')\n", (4654, 4703), False, 'import requests, json\n'), ((4724, 4749), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (4734, 4749), False, 'import requests, json\n'), ((4992, 5051), 'requests.get', 'requests.get', (['"""https://ohmanda.com/api/horoscope/capricorn"""'], {}), "('https://ohmanda.com/api/horoscope/capricorn')\n", (5004, 5051), False, 'import requests, json\n'), ((5072, 5097), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (5082, 5097), False, 'import requests, json\n')]
|
import sqlite3
import unittest
from collections import namedtuple
from datetime import datetime
from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning
from dwgenerator.mappings import TableMappings, ColumnMappings, Mappings
from dwgenerator.templates import Templates
TableMapping = namedtuple('TableMapping',
'source_schema source_table source_filter target_schema target_table')
ColumnMapping = namedtuple('ColumnMapping',
'src_schema src_table src_column transformation tgt_schema tgt_table tgt_column')
class TestStandardTemplates(unittest.TestCase):
# Prepare the DB
def setUp(self):
self.dbtype = 'standard'
self.start_ts = datetime.fromisoformat('2021-06-01T12:10:00+00:00').timestamp()
self.templates = Templates(self.dbtype)
self.connection = sqlite3.connect(':memory:')
self.cur = self.connection.cursor()
self.cur.execute("ATTACH DATABASE ':memory:' AS db")
def tearDown(self):
self.connection.close()
# Create table definitions
def create_customer_h(self, **properties):
target_table = create_typed_table(
Table('db', 'customer_h', [
Column('customer_key', 'text'),
Column('ssn', 'text'),
Column('load_dts', 'numeric'),
Column('rec_src', 'text'),
], **properties))
target_table.check()
return target_table
def create_sales_line_customer_l(self, **properties):
target_table = create_typed_table(
Table('db', 'sales_line_customer_l', [
Column('sales_line_customer_l_key', 'text'),
Column('sales_line_key', 'text'),
Column('customer_key', 'text'),
Column('load_dts', 'numeric'),
Column('rec_src', 'text'),
], **properties))
target_table.check()
return target_table
def create_customer_s(self, **properties):
target_table = create_typed_table(
Table('db', 'customer_s', [
Column('customer_key', 'text'),
Column('load_dts', 'numeric'),
Column('ssn', 'text'),
Column('name', 'text'),
Column('rec_src', 'text'),
], **properties))
target_table.check()
return target_table
# Create mappings
def create_customer_h_mappings(self, target_table):
# I use the same source and target database as sqlite cannot create views that use other dbs
table_mappings = TableMappings([t._asdict() for t in [
TableMapping("db", "customers", "", "db", "customer_h"),
TableMapping("db", "sales_lines", "", "db", "customer_h")
]])
column_mappings = ColumnMappings([c._asdict() for c in [
ColumnMapping("db", "customers", "ssn", "", "db", "customer_h", "customer_key"),
ColumnMapping("db", "customers", "ssn", "", "db", "customer_h", "ssn"),
ColumnMapping("db", "customers", "load_dts", "", "db", "customer_h", "load_dts"),
ColumnMapping("db", "customers", "", "'db'", "db", "customer_h", "rec_src"),
ColumnMapping("db", "sales_lines", "ssn", "", "db", "customer_h", "customer_key"),
ColumnMapping("db", "sales_lines", "ssn", "", "db", "customer_h", "ssn"),
ColumnMapping("db", "sales_lines", "load_dts", "", "db", "customer_h", "load_dts"),
ColumnMapping("db", "sales_lines", "", "'db'", "db", "customer_h", "rec_src"),
]])
mappings = Mappings(table_mappings, column_mappings, [target_table] + column_mappings.source_tables())
mappings.check(target_table)
return mappings
def create_sales_line_customer_l_mappings(self, target_table):
# I use the same source and target database as sqlite cannot create views that use other dbs
table_mappings = TableMappings([
TableMapping("db", "sales_lines", "", "db", "sales_line_customer_l")._asdict()
])
column_mappings = ColumnMappings([c._asdict() for c in [
ColumnMapping("db", "sales_lines", "txn_id,ssn", "", "db", "sales_line_customer_l", "sales_line_customer_l_key"),
ColumnMapping("db", "sales_lines", "txn_id", "", "db", "sales_line_customer_l", "sales_line_key"),
ColumnMapping("db", "sales_lines", "ssn", "", "db", "sales_line_customer_l", "customer_key"),
ColumnMapping("db", "sales_lines", "load_dts", "", "db", "sales_line_customer_l", "load_dts"),
ColumnMapping("db", "sales_lines", "", "'db'", "db", "sales_line_customer_l", "rec_src"),
]])
mappings = Mappings(table_mappings, column_mappings, [target_table] + column_mappings.source_tables())
mappings.check(target_table)
return mappings
def create_customer_s_mappings(self, target_table):
# I use the same source and target database as sqlite cannot create views that use other dbs
table_mappings = TableMappings([
TableMapping("db", "customers", "", "db", "customer_s")._asdict()
])
column_mappings = ColumnMappings([c._asdict() for c in [
ColumnMapping("db", "customers", "ssn", "", "db", "customer_s", "customer_key"),
ColumnMapping("db", "customers", "load_dts", "", "db", "customer_s", "load_dts"),
ColumnMapping("db", "customers", "ssn", "", "db", "customer_s", "ssn"),
ColumnMapping("db", "customers", "name", "", "db", "customer_s", "name"),
ColumnMapping("db", "customers", "", "'db'", "db", "customer_s", "rec_src"),
]])
mappings = Mappings(table_mappings, column_mappings, [target_table] + column_mappings.source_tables())
mappings.check(target_table)
return mappings
# Create and put test data in source tables
def create_customers(self):
self.cur.execute('CREATE TABLE db.customers (ssn, name, load_dts)')
ts = self.start_ts
self.cur.executemany('INSERT INTO db.customers VALUES(?, ?, ?)', [
('198001010101', 'Michael', ts),
('199001010101', 'Jessica', ts + 1),
('199201010101', 'Ashley', ts + 2),
])
def create_sales_lines(self):
self.cur.execute('CREATE TABLE db.sales_lines (txn_id, ssn, load_dts)')
ts = self.start_ts
self.cur.executemany('INSERT INTO db.sales_lines VALUES(?, ?, ?)', [
('1234', '198001010101', ts + 20),
('2345', '199001010101', ts + 21),
('2345', '199001010101', ts + 3600),
('3456', '199201010101', ts + 3601),
])
# Utils
def render_view(self, target_table, mappings):
[(_, sql), *rest] = self.templates.render(target_table, mappings)
self.assertTrue(len(rest) == 0)
return sql
def executescript(self, sql, args):
for (key, value) in args.items():
sql = sql.replace(f':{key}', str(value))
self.cur.executescript(sql)
# Test the data vault objects
## Test views
def test_hub_view(self):
target_table = self.create_customer_h()
mappings = self.create_customer_h_mappings(target_table)
sql = self.render_view(target_table, mappings)
self.create_customers()
self.create_sales_lines()
self.cur.executescript(sql)
result = list(self.cur.execute('SELECT * FROM db.customer_h ORDER BY load_dts'))
expected = [
('198001010101', '198001010101', 1622549400.0, 'db'),
('199001010101', '199001010101', 1622549401.0, 'db'),
('199201010101', '199201010101', 1622549402.0, 'db')
]
self.assertEqual(result, expected)
def test_link_view(self):
target_table = self.create_sales_line_customer_l()
mappings = self.create_sales_line_customer_l_mappings(target_table)
sql = self.render_view(target_table, mappings)
self.create_sales_lines()
self.cur.executescript(sql)
result = list(self.cur.execute('SELECT * FROM db.sales_line_customer_l ORDER BY load_dts'))
expected = [
('1234|198001010101', '1234', '198001010101', 1622549420.0, 'db'),
('2345|199001010101', '2345', '199001010101', 1622549421.0, 'db'),
('3456|199201010101', '3456', '199201010101', 1622553001.0, 'db'),
]
self.assertEqual(result, expected)
def test_satellite_view(self):
target_table = self.create_customer_s()
mappings = self.create_customer_s_mappings(target_table)
sql = self.render_view(target_table, mappings)
self.create_customers()
self.cur.executescript(sql)
result = list(self.cur.execute('SELECT * FROM db.customer_s ORDER BY load_dts'))
expected = [
('198001010101', 1622549400.0, '198001010101', 'Michael', 'db'),
('199001010101', 1622549401.0, '199001010101', 'Jessica', 'db'),
('199201010101', 1622549402.0, '199201010101', 'Ashley', 'db'),
]
self.assertEqual(result, expected)
## Test persited tables
def test_hub_persisted(self):
target_table = self.create_customer_h(generate_type='table')
mappings = self.create_customer_h_mappings(target_table)
[(ddl_path, ddl), (etl_path, etl)] = self.templates.render(target_table, mappings)
self.assertEqual(ddl_path.as_posix(), 'db/customer_h_t.sql')
self.assertEqual(etl_path.as_posix(), 'db/customer_h_etl.sql')
self.cur.executescript(ddl)
result = list(self.cur.execute("PRAGMA db.table_info('customer_h')"))
expected = [
(0, 'customer_key', 'text', 0, None, 1),
(1, 'ssn', 'text', 0, None, 0),
(2, 'load_dts', 'numeric', 0, None, 0),
(3, 'rec_src', 'text', 0, None, 0)
]
self.assertEqual(result, expected)
self.create_customers()
self.create_sales_lines()
ts = self.start_ts
# print(etl)
self.executescript(etl, {'start_ts': ts, 'end_ts': ts + 2})
result1 = list(self.cur.execute('SELECT * FROM db.customer_h ORDER BY load_dts'))
expected1 = [
('198001010101', '198001010101', 1622549400.0, 'db'),
('199001010101', '199001010101', 1622549401.0, 'db'),
]
self.assertEqual(result1, expected1)
self.executescript(etl, {'start_ts': ts + 2, 'end_ts': ts + 4000})
result2 = list(self.cur.execute('SELECT * FROM db.customer_h ORDER BY load_dts'))
expected2 = expected1 + [
('199201010101', '199201010101', 1622549402.0, 'db')
]
self.assertEqual(result2, expected2)
def test_link_persisted(self):
target_table = self.create_sales_line_customer_l(generate_type='table')
mappings = self.create_sales_line_customer_l_mappings(target_table)
[(ddl_path, ddl), (etl_path, etl)] = self.templates.render(target_table, mappings)
self.assertEqual(ddl_path.as_posix(), 'db/sales_line_customer_l_t.sql')
self.assertEqual(etl_path.as_posix(), 'db/sales_line_customer_l_etl.sql')
self.cur.executescript(ddl)
result = list(self.cur.execute("PRAGMA db.table_info('sales_line_customer_l')"))
expected = [
(0, 'sales_line_customer_l_key', 'text', 0, None, 1),
(1, 'sales_line_key', 'text', 0, None, 0),
(2, 'customer_key', 'text', 0, None, 0),
(3, 'load_dts', 'numeric', 0, None, 0),
(4, 'rec_src', 'text', 0, None, 0)
]
self.assertEqual(result, expected)
self.create_sales_lines()
ts = self.start_ts
# print(etl)
self.executescript(etl, {'start_ts': ts + 0, 'end_ts': ts + 3600})
result1 = list(self.cur.execute('SELECT * FROM db.sales_line_customer_l ORDER BY load_dts'))
expected1 = [
('1234|198001010101', '1234', '198001010101', 1622549420.0, 'db'),
('2345|199001010101', '2345', '199001010101', 1622549421.0, 'db'),
]
self.assertEqual(result1, expected1)
self.executescript(etl, {'start_ts': ts + 3600, 'end_ts': ts + 7200})
result2 = list(self.cur.execute('SELECT * FROM db.sales_line_customer_l ORDER BY load_dts'))
expected2 = expected1 + [
('3456|199201010101', '3456', '199201010101', 1622553001.0, 'db'),
]
self.assertEqual(result2, expected2)
def test_satellite_persisted(self):
target_table = self.create_customer_s(generate_type='table')
mappings = self.create_customer_s_mappings(target_table)
[(ddl_path, ddl), (etl_path, etl)] = self.templates.render(target_table, mappings)
self.assertEqual(ddl_path.as_posix(), 'db/customer_s_t.sql')
self.assertEqual(etl_path.as_posix(), 'db/customer_s_etl.sql')
self.cur.executescript(ddl)
result = list(self.cur.execute("PRAGMA db.table_info('customer_s')"))
expected = [
(0, 'customer_key', 'text', 0, None, 1),
(1, 'load_dts', 'numeric', 0, None, 2),
(2, 'ssn', 'text', 0, None, 0),
(3, 'name', 'text', 0, None, 0),
(4, 'rec_src', 'text', 0, None, 0)
]
self.assertEqual(result, expected)
self.create_customers()
ts = self.start_ts
# print(etl)
self.executescript(etl, {'start_ts': ts, 'end_ts': ts + 2})
result1 = list(self.cur.execute('SELECT * FROM db.customer_s ORDER BY load_dts'))
expected1 = [
('198001010101', 1622549400.0, '198001010101', 'Michael', 'db'),
('199001010101', 1622549401.0, '199001010101', 'Jessica', 'db'),
]
self.assertEqual(result1, expected1)
self.executescript(etl, {'start_ts': ts + 2, 'end_ts': ts + 4})
result2 = list(self.cur.execute('SELECT * FROM db.customer_s ORDER BY load_dts'))
expected2 = expected1 + [
('199201010101', 1622549402.0, '199201010101', 'Ashley', 'db'),
]
self.assertEqual(result2, expected2)
|
[
"datetime.datetime.fromisoformat",
"sqlite3.connect",
"collections.namedtuple",
"dwgenerator.dbobjects.Column",
"dwgenerator.templates.Templates"
] |
[((360, 461), 'collections.namedtuple', 'namedtuple', (['"""TableMapping"""', '"""source_schema source_table source_filter target_schema target_table"""'], {}), "('TableMapping',\n 'source_schema source_table source_filter target_schema target_table')\n", (370, 461), False, 'from collections import namedtuple\n'), ((476, 594), 'collections.namedtuple', 'namedtuple', (['"""ColumnMapping"""', '"""src_schema src_table src_column transformation tgt_schema tgt_table tgt_column"""'], {}), "('ColumnMapping',\n 'src_schema src_table src_column transformation tgt_schema tgt_table tgt_column'\n )\n", (486, 594), False, 'from collections import namedtuple\n'), ((810, 832), 'dwgenerator.templates.Templates', 'Templates', (['self.dbtype'], {}), '(self.dbtype)\n', (819, 832), False, 'from dwgenerator.templates import Templates\n'), ((855, 882), 'sqlite3.connect', 'sqlite3.connect', (['""":memory:"""'], {}), "(':memory:')\n", (870, 882), False, 'import sqlite3\n'), ((725, 776), 'datetime.datetime.fromisoformat', 'datetime.fromisoformat', (['"""2021-06-01T12:10:00+00:00"""'], {}), "('2021-06-01T12:10:00+00:00')\n", (747, 776), False, 'from datetime import datetime\n'), ((1187, 1217), 'dwgenerator.dbobjects.Column', 'Column', (['"""customer_key"""', '"""text"""'], {}), "('customer_key', 'text')\n", (1193, 1217), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n'), ((1227, 1248), 'dwgenerator.dbobjects.Column', 'Column', (['"""ssn"""', '"""text"""'], {}), "('ssn', 'text')\n", (1233, 1248), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n'), ((1258, 1287), 'dwgenerator.dbobjects.Column', 'Column', (['"""load_dts"""', '"""numeric"""'], {}), "('load_dts', 'numeric')\n", (1264, 1287), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n'), ((1297, 1322), 'dwgenerator.dbobjects.Column', 'Column', (['"""rec_src"""', '"""text"""'], {}), "('rec_src', 'text')\n", (1303, 1322), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n'), ((1546, 1589), 'dwgenerator.dbobjects.Column', 'Column', (['"""sales_line_customer_l_key"""', '"""text"""'], {}), "('sales_line_customer_l_key', 'text')\n", (1552, 1589), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n'), ((1599, 1631), 'dwgenerator.dbobjects.Column', 'Column', (['"""sales_line_key"""', '"""text"""'], {}), "('sales_line_key', 'text')\n", (1605, 1631), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n'), ((1641, 1671), 'dwgenerator.dbobjects.Column', 'Column', (['"""customer_key"""', '"""text"""'], {}), "('customer_key', 'text')\n", (1647, 1671), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n'), ((1681, 1710), 'dwgenerator.dbobjects.Column', 'Column', (['"""load_dts"""', '"""numeric"""'], {}), "('load_dts', 'numeric')\n", (1687, 1710), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n'), ((1720, 1745), 'dwgenerator.dbobjects.Column', 'Column', (['"""rec_src"""', '"""text"""'], {}), "('rec_src', 'text')\n", (1726, 1745), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n'), ((1945, 1975), 'dwgenerator.dbobjects.Column', 'Column', (['"""customer_key"""', '"""text"""'], {}), "('customer_key', 'text')\n", (1951, 1975), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n'), ((1985, 2014), 'dwgenerator.dbobjects.Column', 'Column', (['"""load_dts"""', '"""numeric"""'], {}), "('load_dts', 'numeric')\n", (1991, 2014), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n'), ((2024, 2045), 'dwgenerator.dbobjects.Column', 'Column', (['"""ssn"""', '"""text"""'], {}), "('ssn', 'text')\n", (2030, 2045), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n'), ((2055, 2077), 'dwgenerator.dbobjects.Column', 'Column', (['"""name"""', '"""text"""'], {}), "('name', 'text')\n", (2061, 2077), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n'), ((2087, 2112), 'dwgenerator.dbobjects.Column', 'Column', (['"""rec_src"""', '"""text"""'], {}), "('rec_src', 'text')\n", (2093, 2112), False, 'from dwgenerator.dbobjects import Schema, Table, Column, create_typed_table, Hub, Link, Satellite, MetaDataError, MetaDataWarning\n')]
|
#coding: utf-8
from __future__ import unicode_literals
import sys
import webtest
from webtest.debugapp import debug_app
from webob import Request
from webob.response import gzip_app_iter
from webtest.compat import PY3
from tests.compat import unittest
import webbrowser
def links_app(environ, start_response):
req = Request(environ)
status = "200 OK"
responses = {
'/': """
<html>
<head><title>page with links</title></head>
<body>
<a href="/foo/">Foo</a>
<a href='bar'>Bar</a>
<a href='baz/' id='id_baz'>Baz</a>
<a href='#' id='fake_baz'>Baz</a>
<a href='javascript:alert("123")' id='js_baz'>Baz</a>
<script>
var link = "<a href='/boo/'>Boo</a>";
</script>
<a href='/spam/'>Click me!</a>
<a href='/egg/'>Click me!</a>
<button
id="button1"
onclick="location.href='/foo/'"
>Button</button>
<button
id="button2">Button</button>
<button
id="button3"
onclick="lomistakecation.href='/foo/'"
>Button</button>
</body>
</html>
""",
'/foo/': (
'<html><body>This is foo. <a href="bar">Bar</a> '
'</body></html>'
),
'/foo/bar': '<html><body>This is foobar.</body></html>',
'/bar': '<html><body>This is bar.</body></html>',
'/baz/': '<html><body>This is baz.</body></html>',
'/spam/': '<html><body>This is spam.</body></html>',
'/egg/': '<html><body>Just eggs.</body></html>',
'/utf8/': """
<html>
<head><title>Тестовая страница</title></head>
<body>
<a href='/foo/'>Менделеев</a>
<a href='/baz/' title='Поэт'>Пушкин</a>
<img src='/egg/' title='Поэт'>
<script>
var link = "<a href='/boo/'>Злодейская ссылка</a>";
</script>
</body>
</html>
""",
'/no_form/': """
<html>
<head><title>Page without form</title></head>
<body>
<h1>This is not the form you are looking for</h1>
</body>
</html>
""",
'/one_forms/': """
<html>
<head><title>Page without form</title></head>
<body>
<form method="POST" id="first_form"></form>
</body>
</html>
""",
'/many_forms/': """
<html>
<head><title>Page without form</title></head>
<body>
<form method="POST" id="first_form"></form>
<form method="POST" id="second_form"></form>
</body>
</html>
""",
'/html_in_anchor/': """
<html>
<head><title>Page with HTML in an anchor tag</title></head>
<body>
<a href='/foo/'>Foo Bar<span class='baz qux'>Quz</span></a>
</body>
</html>
""",
'/json/': '{"foo": "bar"}',
}
utf8_paths = ['/utf8/']
body = responses[req.path_info]
body = body.encode('utf8')
headers = [
('Content-Type', str('text/html')),
('Content-Length', str(len(body)))
]
if req.path_info in utf8_paths:
headers[0] = ('Content-Type', str('text/html; charset=utf-8'))
# PEP 3333 requires native strings:
headers = [(str(k), str(v)) for k, v in headers]
start_response(str(status), headers)
return [body]
def gzipped_app(environ, start_response):
status = "200 OK"
encoded_body = list(gzip_app_iter([b'test']))
headers = [
('Content-Type', str('text/html')),
('Content-Encoding', str('gzip')),
]
# PEP 3333 requires native strings:
headers = [(str(k), str(v)) for k, v in headers]
start_response(str(status), headers)
return encoded_body
class TestResponse(unittest.TestCase):
def test_repr(self):
def _repr(v):
br = repr(v)
if len(br) > 18:
br = br[:10] + '...' + br[-5:]
br += '/%s' % len(v)
return br
app = webtest.TestApp(debug_app)
res = app.post('/')
self.assertEqual(
repr(res),
'<200 OK text/plain body=%s>' % _repr(res.body)
)
res.content_type = None
self.assertEqual(
repr(res),
'<200 OK body=%s>' % _repr(res.body)
)
res.location = 'http://pylons.org'
self.assertEqual(
repr(res),
'<200 OK location: http://pylons.org body=%s>' % _repr(res.body)
)
res.body = b''
self.assertEqual(
repr(res),
'<200 OK location: http://pylons.org no body>'
)
def test_mustcontains(self):
app = webtest.TestApp(debug_app)
res = app.post('/', params='foobar')
res.mustcontain('foobar')
self.assertRaises(IndexError, res.mustcontain, 'not found')
res.mustcontain('foobar', no='not found')
res.mustcontain('foobar', no=['not found', 'not found either'])
self.assertRaises(IndexError, res.mustcontain, no='foobar')
self.assertRaises(
TypeError,
res.mustcontain, invalid_param='foobar'
)
def test_click(self):
app = webtest.TestApp(links_app)
self.assertIn('This is foo.', app.get('/').click('Foo'))
self.assertIn(
'This is foobar.',
app.get('/').click('Foo').click('Bar')
)
self.assertIn('This is bar.', app.get('/').click('Bar'))
# should skip non-clickable links
self.assertIn(
'This is baz.',
app.get('/').click('Baz')
)
self.assertIn('This is baz.', app.get('/').click(linkid='id_baz'))
self.assertIn('This is baz.', app.get('/').click(href='baz/'))
self.assertIn(
'This is spam.',
app.get('/').click('Click me!', index=0)
)
self.assertIn(
'Just eggs.',
app.get('/').click('Click me!', index=1)
)
self.assertIn(
'This is foo.',
app.get('/html_in_anchor/').click('baz qux')
)
def dont_match_anchor_tag():
app.get('/html_in_anchor/').click('href')
self.assertRaises(IndexError, dont_match_anchor_tag)
def multiple_links():
app.get('/').click('Click me!')
self.assertRaises(IndexError, multiple_links)
def invalid_index():
app.get('/').click('Click me!', index=2)
self.assertRaises(IndexError, invalid_index)
def no_links_found():
app.get('/').click('Ham')
self.assertRaises(IndexError, no_links_found)
def tag_inside_script():
app.get('/').click('Boo')
self.assertRaises(IndexError, tag_inside_script)
def test_click_utf8(self):
app = webtest.TestApp(links_app, use_unicode=False)
resp = app.get('/utf8/')
self.assertEqual(resp.charset, 'utf-8')
if not PY3:
# No need to deal with that in Py3
self.assertIn("Тестовая страница".encode('utf8'), resp)
self.assertIn("Тестовая страница", resp, resp)
target = 'Менделеев'.encode('utf8')
self.assertIn('This is foo.', resp.click(target, verbose=True))
def test_click_u(self):
app = webtest.TestApp(links_app)
resp = app.get('/utf8/')
self.assertIn("Тестовая страница", resp)
self.assertIn('This is foo.', resp.click('Менделеев'))
def test_clickbutton(self):
app = webtest.TestApp(links_app)
self.assertIn(
'This is foo.',
app.get('/').clickbutton(buttonid='button1', verbose=True)
)
self.assertRaises(
IndexError,
app.get('/').clickbutton, buttonid='button2'
)
self.assertRaises(
IndexError,
app.get('/').clickbutton, buttonid='button3'
)
def test_referer(self):
app = webtest.TestApp(links_app)
resp = app.get('/').click('Foo')
self.assertIn('Referer', resp.request.headers)
self.assertEqual(resp.request.headers['Referer'], 'http://localhost/')
resp = app.get('/').clickbutton(buttonid='button1')
self.assertIn('Referer', resp.request.headers)
self.assertEqual(resp.request.headers['Referer'], 'http://localhost/')
resp = app.get('/one_forms/').form.submit()
self.assertIn('Referer', resp.request.headers)
self.assertEqual(resp.request.headers['Referer'], 'http://localhost/one_forms/')
def test_xml_attribute(self):
app = webtest.TestApp(links_app)
resp = app.get('/no_form/')
self.assertRaises(
AttributeError,
getattr,
resp, 'xml'
)
resp.content_type = 'text/xml'
resp.xml
@unittest.skipIf('PyPy' in sys.version, 'skip lxml tests on pypy')
def test_lxml_attribute(self):
app = webtest.TestApp(links_app)
resp = app.post('/')
resp.content_type = 'text/xml'
print(resp.body)
print(resp.lxml)
def test_html_attribute(self):
app = webtest.TestApp(links_app)
res = app.post('/')
res.content_type = 'text/plain'
self.assertRaises(
AttributeError,
getattr, res, 'html'
)
def test_no_form(self):
app = webtest.TestApp(links_app)
resp = app.get('/no_form/')
self.assertRaises(
TypeError,
getattr,
resp, 'form'
)
def test_one_forms(self):
app = webtest.TestApp(links_app)
resp = app.get('/one_forms/')
self.assertEqual(resp.form.id, 'first_form')
def test_too_many_forms(self):
app = webtest.TestApp(links_app)
resp = app.get('/many_forms/')
self.assertRaises(
TypeError,
getattr,
resp, 'form'
)
def test_showbrowser(self):
def open_new(f):
self.filename = f
webbrowser.open_new = open_new
app = webtest.TestApp(debug_app)
res = app.post('/')
res.showbrowser()
def test_unicode_normal_body(self):
app = webtest.TestApp(debug_app)
res = app.post('/')
self.assertRaises(
AttributeError,
getattr, res, 'unicode_normal_body'
)
res.charset = 'latin1'
res.body = 'été'.encode('latin1')
self.assertEqual(res.unicode_normal_body, 'été')
def test_testbody(self):
app = webtest.TestApp(debug_app)
res = app.post('/')
res.charset = 'utf8'
res.body = 'été'.encode('latin1')
res.testbody
def test_xml(self):
app = webtest.TestApp(links_app)
resp = app.get('/no_form/')
self.assertRaises(
AttributeError,
getattr,
resp, 'xml'
)
resp.content_type = 'text/xml'
resp.xml
def test_json(self):
app = webtest.TestApp(links_app)
resp = app.get('/json/')
with self.assertRaises(AttributeError):
resp.json
resp.content_type = 'text/json'
self.assertIn('foo', resp.json)
resp.content_type = 'application/json'
self.assertIn('foo', resp.json)
resp.content_type = 'application/vnd.webtest+json'
self.assertIn('foo', resp.json)
def test_unicode(self):
app = webtest.TestApp(links_app)
resp = app.get('/')
if not PY3:
unicode(resp)
print(resp.__unicode__())
def test_content_dezips(self):
app = webtest.TestApp(gzipped_app)
resp = app.get('/')
self.assertEqual(resp.body, b'test')
class TestFollow(unittest.TestCase):
def get_redirects_app(self, count=1, locations=None):
"""Return an app that issues a redirect ``count`` times"""
remaining_redirects = [count] # this means "nonlocal"
if locations is None:
locations = ['/'] * count
def app(environ, start_response):
headers = [('Content-Type', str('text/html'))]
if remaining_redirects[0] == 0:
status = "200 OK"
body = b"done"
else:
status = "302 Found"
body = b''
nextloc = str(locations.pop(0))
headers.append(('location', nextloc))
remaining_redirects[0] -= 1
headers.append(('Content-Length', str(len(body))))
# PEP 3333 requires native strings:
headers = [(str(k), str(v)) for k, v in headers]
start_response(str(status), headers)
return [body]
return webtest.TestApp(app)
def test_follow_with_cookie(self):
app = webtest.TestApp(debug_app)
app.get('/?header-set-cookie=foo=bar')
self.assertEqual(app.cookies['foo'], 'bar')
resp = app.get('/?status=302%20Found&header-location=/')
resp = resp.follow()
resp.mustcontain('HTTP_COOKIE: foo=bar')
def test_follow(self):
app = self.get_redirects_app(1)
resp = app.get('/')
self.assertEqual(resp.status_int, 302)
resp = resp.follow()
self.assertEqual(resp.body, b'done')
# can't follow non-redirect
self.assertRaises(AssertionError, resp.follow)
def test_follow_relative(self):
app = self.get_redirects_app(2, ['hello/foo/', 'bar'])
resp = app.get('/')
self.assertEqual(resp.status_int, 302)
resp = resp.follow()
self.assertEqual(resp.status_int, 302)
resp = resp.follow()
self.assertEqual(resp.body, b'done')
self.assertEqual(resp.request.url, 'http://localhost/hello/foo/bar')
def test_follow_twice(self):
app = self.get_redirects_app(2)
resp = app.get('/').follow()
self.assertEqual(resp.status_int, 302)
resp = resp.follow()
self.assertEqual(resp.status_int, 200)
def test_maybe_follow_200(self):
app = self.get_redirects_app(0)
resp = app.get('/').maybe_follow()
self.assertEqual(resp.body, b'done')
def test_maybe_follow_once(self):
app = self.get_redirects_app(1)
resp = app.get('/').maybe_follow()
self.assertEqual(resp.body, b'done')
def test_maybe_follow_twice(self):
app = self.get_redirects_app(2)
resp = app.get('/').maybe_follow()
self.assertEqual(resp.body, b'done')
def test_maybe_follow_infinite(self):
app = self.get_redirects_app(100000)
self.assertRaises(AssertionError, app.get('/').maybe_follow)
|
[
"tests.compat.unittest.skipIf",
"webob.response.gzip_app_iter",
"webtest.TestApp",
"webob.Request"
] |
[((327, 343), 'webob.Request', 'Request', (['environ'], {}), '(environ)\n', (334, 343), False, 'from webob import Request\n'), ((9481, 9546), 'tests.compat.unittest.skipIf', 'unittest.skipIf', (["('PyPy' in sys.version)", '"""skip lxml tests on pypy"""'], {}), "('PyPy' in sys.version, 'skip lxml tests on pypy')\n", (9496, 9546), False, 'from tests.compat import unittest\n'), ((4078, 4102), 'webob.response.gzip_app_iter', 'gzip_app_iter', (["[b'test']"], {}), "([b'test'])\n", (4091, 4102), False, 'from webob.response import gzip_app_iter\n'), ((4635, 4661), 'webtest.TestApp', 'webtest.TestApp', (['debug_app'], {}), '(debug_app)\n', (4650, 4661), False, 'import webtest\n'), ((5318, 5344), 'webtest.TestApp', 'webtest.TestApp', (['debug_app'], {}), '(debug_app)\n', (5333, 5344), False, 'import webtest\n'), ((5835, 5861), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {}), '(links_app)\n', (5850, 5861), False, 'import webtest\n'), ((7455, 7500), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {'use_unicode': '(False)'}), '(links_app, use_unicode=False)\n', (7470, 7500), False, 'import webtest\n'), ((7943, 7969), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {}), '(links_app)\n', (7958, 7969), False, 'import webtest\n'), ((8163, 8189), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {}), '(links_app)\n', (8178, 8189), False, 'import webtest\n'), ((8601, 8627), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {}), '(links_app)\n', (8616, 8627), False, 'import webtest\n'), ((9244, 9270), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {}), '(links_app)\n', (9259, 9270), False, 'import webtest\n'), ((9596, 9622), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {}), '(links_app)\n', (9611, 9622), False, 'import webtest\n'), ((9791, 9817), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {}), '(links_app)\n', (9806, 9817), False, 'import webtest\n'), ((10027, 10053), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {}), '(links_app)\n', (10042, 10053), False, 'import webtest\n'), ((10242, 10268), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {}), '(links_app)\n', (10257, 10268), False, 'import webtest\n'), ((10411, 10437), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {}), '(links_app)\n', (10426, 10437), False, 'import webtest\n'), ((10726, 10752), 'webtest.TestApp', 'webtest.TestApp', (['debug_app'], {}), '(debug_app)\n', (10741, 10752), False, 'import webtest\n'), ((10862, 10888), 'webtest.TestApp', 'webtest.TestApp', (['debug_app'], {}), '(debug_app)\n', (10877, 10888), False, 'import webtest\n'), ((11204, 11230), 'webtest.TestApp', 'webtest.TestApp', (['debug_app'], {}), '(debug_app)\n', (11219, 11230), False, 'import webtest\n'), ((11390, 11416), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {}), '(links_app)\n', (11405, 11416), False, 'import webtest\n'), ((11661, 11687), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {}), '(links_app)\n', (11676, 11687), False, 'import webtest\n'), ((12104, 12130), 'webtest.TestApp', 'webtest.TestApp', (['links_app'], {}), '(links_app)\n', (12119, 12130), False, 'import webtest\n'), ((12291, 12319), 'webtest.TestApp', 'webtest.TestApp', (['gzipped_app'], {}), '(gzipped_app)\n', (12306, 12319), False, 'import webtest\n'), ((13394, 13414), 'webtest.TestApp', 'webtest.TestApp', (['app'], {}), '(app)\n', (13409, 13414), False, 'import webtest\n'), ((13469, 13495), 'webtest.TestApp', 'webtest.TestApp', (['debug_app'], {}), '(debug_app)\n', (13484, 13495), False, 'import webtest\n')]
|
import numpy as np
from pyqmc.energy import energy
from pyqmc.accumulators import LinearTransform
def test_transform():
""" Just prints things out;
TODO: figure out a thing to test.
"""
from pyscf import gto, scf
import pyqmc
r = 1.54 / 0.529177
mol = gto.M(
atom="H 0. 0. 0.; H 0. 0. %g" % r,
ecp="bfd",
basis="bfd_vtz",
unit="bohr",
verbose=1,
)
mf = scf.RHF(mol).run()
wf = pyqmc.slater_jastrow(mol, mf)
enacc = pyqmc.EnergyAccumulator(mol)
print(list(wf.parameters.keys()))
transform = LinearTransform(wf.parameters)
x = transform.serialize_parameters(wf.parameters)
nconfig = 10
configs = pyqmc.initial_guess(mol, nconfig)
wf.recompute(configs)
pgrad = wf.pgradient()
gradtrans = transform.serialize_gradients(pgrad)
assert gradtrans.shape[1] == len(x)
assert gradtrans.shape[0] == nconfig
if __name__ == "__main__":
test_transform()
|
[
"pyqmc.slater_jastrow",
"pyqmc.accumulators.LinearTransform",
"pyscf.gto.M",
"pyscf.scf.RHF",
"pyqmc.EnergyAccumulator",
"pyqmc.initial_guess"
] |
[((284, 381), 'pyscf.gto.M', 'gto.M', ([], {'atom': "('H 0. 0. 0.; H 0. 0. %g' % r)", 'ecp': '"""bfd"""', 'basis': '"""bfd_vtz"""', 'unit': '"""bohr"""', 'verbose': '(1)'}), "(atom='H 0. 0. 0.; H 0. 0. %g' % r, ecp='bfd', basis='bfd_vtz', unit=\n 'bohr', verbose=1)\n", (289, 381), False, 'from pyscf import gto, scf\n'), ((461, 490), 'pyqmc.slater_jastrow', 'pyqmc.slater_jastrow', (['mol', 'mf'], {}), '(mol, mf)\n', (481, 490), False, 'import pyqmc\n'), ((503, 531), 'pyqmc.EnergyAccumulator', 'pyqmc.EnergyAccumulator', (['mol'], {}), '(mol)\n', (526, 531), False, 'import pyqmc\n'), ((586, 616), 'pyqmc.accumulators.LinearTransform', 'LinearTransform', (['wf.parameters'], {}), '(wf.parameters)\n', (601, 616), False, 'from pyqmc.accumulators import LinearTransform\n'), ((703, 736), 'pyqmc.initial_guess', 'pyqmc.initial_guess', (['mol', 'nconfig'], {}), '(mol, nconfig)\n', (722, 736), False, 'import pyqmc\n'), ((433, 445), 'pyscf.scf.RHF', 'scf.RHF', (['mol'], {}), '(mol)\n', (440, 445), False, 'from pyscf import gto, scf\n')]
|
"""ProbsMeasurer's module."""
import numpy as np
from mlscratch.tensor import Tensor
from .measurer import Measurer
class ProbsMeasurer(Measurer[float]):
"""Computes how many samples were evaluated correctly by
getting the most probable label/index in the probability array."""
def measure(
self,
result: Tensor,
expected: Tensor) -> float:
batch_size, *_ = result.shape
result_max_indices = np.argmax(result, axis=-1)
expected_max_indices = np.argmax(expected, axis=-1)
asserts = np.sum(result_max_indices == expected_max_indices)
return asserts / batch_size
|
[
"numpy.sum",
"numpy.argmax"
] |
[((459, 485), 'numpy.argmax', 'np.argmax', (['result'], {'axis': '(-1)'}), '(result, axis=-1)\n', (468, 485), True, 'import numpy as np\n'), ((517, 545), 'numpy.argmax', 'np.argmax', (['expected'], {'axis': '(-1)'}), '(expected, axis=-1)\n', (526, 545), True, 'import numpy as np\n'), ((564, 614), 'numpy.sum', 'np.sum', (['(result_max_indices == expected_max_indices)'], {}), '(result_max_indices == expected_max_indices)\n', (570, 614), True, 'import numpy as np\n')]
|
from rest_framework import permissions, renderers, viewsets
from cbe.physical_object.models import Structure, Vehicle, Device, Owner
from cbe.physical_object.serializers import StructureSerializer, VehicleSerializer, DeviceSerializer
class StructureViewSet(viewsets.ModelViewSet):
queryset = Structure.objects.all()
serializer_class = StructureSerializer
permission_classes = (permissions.DjangoModelPermissions, )
class VehicleViewSet(viewsets.ModelViewSet):
queryset = Vehicle.objects.all()
serializer_class = VehicleSerializer
permission_classes = (permissions.DjangoModelPermissions, )
class DeviceViewSet(viewsets.ModelViewSet):
queryset = Device.objects.all()
serializer_class = DeviceSerializer
permission_classes = (permissions.DjangoModelPermissions, )
|
[
"cbe.physical_object.models.Vehicle.objects.all",
"cbe.physical_object.models.Structure.objects.all",
"cbe.physical_object.models.Device.objects.all"
] |
[((299, 322), 'cbe.physical_object.models.Structure.objects.all', 'Structure.objects.all', ([], {}), '()\n', (320, 322), False, 'from cbe.physical_object.models import Structure, Vehicle, Device, Owner\n'), ((496, 517), 'cbe.physical_object.models.Vehicle.objects.all', 'Vehicle.objects.all', ([], {}), '()\n', (515, 517), False, 'from cbe.physical_object.models import Structure, Vehicle, Device, Owner\n'), ((684, 704), 'cbe.physical_object.models.Device.objects.all', 'Device.objects.all', ([], {}), '()\n', (702, 704), False, 'from cbe.physical_object.models import Structure, Vehicle, Device, Owner\n')]
|
from django.db import models
from rest_framework import serializers
from django.contrib import auth
from django.core.validators import MaxValueValidator, MinValueValidator
from datetime import datetime
class Message(models.Model):
subject = models.CharField(max_length=200)
body = models.TextField()
class MessageSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Message
fields = ('url', 'subject', 'body', 'pk')
class User(auth.models.User):
user_ptr = None
class PlantMoistLvl(models.Model):
VERY_LOW = 0
LOW = 1
MID = 2
HIGH = 3
VERY_HIGH = 4
LEVEL_CHOICES = (
(VERY_LOW, 'Very Low'),
(LOW, 'Low'),
(MID, 'Medium'),
(HIGH, 'High'),
(VERY_HIGH, 'Very High'),
)
plant_name = models.CharField(max_length=100, default='')
min_moist_lvl = models.IntegerField(
choices= LEVEL_CHOICES,
default= VERY_LOW
)
max_moist_lvl = models.IntegerField(
choices= LEVEL_CHOICES,
default= VERY_HIGH
)
def __str__(self):
lvl_choice_arr = ['Very Low', 'Low', 'Medium', 'High', 'Very High']
return self.plant_name +', Min: '+ lvl_choice_arr[self.min_moist_lvl]+', Max: '+ lvl_choice_arr[self.max_moist_lvl]
class PlantPh(models.Model):
plant_name = models.CharField(max_length=100, default='')
min_ph = models.DecimalField(max_digits=10, decimal_places=2, default=0,validators=[MaxValueValidator(14), MinValueValidator(0)])
max_ph = models.DecimalField(max_digits=10, decimal_places=2, default=14,validators=[MaxValueValidator(14), MinValueValidator(0)])
def __str__(self):
return self.plant_name +', Min: '+ str(self.min_ph)+', Max: '+ str(self.max_ph)
class PlantLifeCycle(models.Model):
ANNUAL = 0
BIENNIAL = 1
PERENNIAL = 2
CYCLE_CHOICES = (
(ANNUAL, 'Annual - life shorter than a year'),
(BIENNIAL, 'Biennial - life around a year to two years'),
(PERENNIAL, 'Perennial - life about more than many years')
)
plant_name = models.CharField(max_length=100, default='')
life_cycle = models.IntegerField(
choices= CYCLE_CHOICES,
default= ANNUAL
)
def __str__(self):
choice_arr = ['Annual', 'Biennial', 'Perennial']
return self.plant_name +', '+ choice_arr[self.life_cycle]+' Plant'
class Plant(models.Model):
moist_data = models.ForeignKey(PlantMoistLvl, on_delete=models.CASCADE, default=0)
ph_data = models.ForeignKey(PlantPh, on_delete=models.CASCADE, default=0)
lifecycle_data = models.ForeignKey(PlantLifeCycle, on_delete=models.CASCADE, default=0)
plant_name = models.CharField(max_length=100, default='')
def __str__(self):
return self.plant_name
class NpkPerPh(models.Model):
LOW = 1
MID = 2
HIGH = 3
LEVEL_CHOICES = (
(LOW, 'Low'),
(MID, 'Medium'),
(HIGH, 'High'),
)
min_ph = models.DecimalField(max_digits=10, decimal_places=2,validators=[MaxValueValidator(14), MinValueValidator(0)], default=0)
max_ph = models.DecimalField(max_digits=10, decimal_places=2,validators=[MaxValueValidator(14), MinValueValidator(0)], default=0)
n_lvl = models.IntegerField(
choices= LEVEL_CHOICES
)
p_lvl = models.IntegerField(
choices= LEVEL_CHOICES
)
k_lvl = models.IntegerField(
choices= LEVEL_CHOICES
)
def __str__(self):
return str(self.min_ph) +' - '+str(self.max_ph)
class SoilType(models.Model):
VERY_LOW = 0
LOW = 1
MID = 2
HIGH = 3
VERY_HIGH = 4
LEVEL_CHOICES = (
(VERY_LOW, 'Very Low'),
(LOW, 'Low'),
(MID, 'Medium'),
(HIGH, 'High'),
(VERY_HIGH, 'Very High'),
)
name = models.CharField(max_length=100)
good_for_min_moist_lvl = models.IntegerField(
choices= LEVEL_CHOICES
)
good_for_max_moist_lvl = models.IntegerField(
choices= LEVEL_CHOICES
)
def __str__(self):
return self.name
class SoilProfile(models.Model):
owner = models.ForeignKey(auth.models.User, on_delete=models.CASCADE)
name = models.CharField(max_length=100)
location =models.CharField(max_length=256)
def __str__(self):
user = auth.models.User.objects.get(pk=self.owner.pk)
return user.username + " - " + self.name
class SensorRecord(models.Model):
soil_id = models.ForeignKey(SoilProfile, on_delete=models.CASCADE)
moist = models.DecimalField(max_digits=10, decimal_places=2, default=0)
ph = models.DecimalField(max_digits=10, decimal_places=2, default=7)
record_date = models.DateTimeField(default=datetime.now, null=True)
record_frequency_min = models.DecimalField(max_digits=10, decimal_places=2, validators=[MaxValueValidator(10080), MinValueValidator(0.1)], default=0.1)
def __str__(self):
soil = SoilProfile.objects.get(pk=self.soil_id.pk)
return soil.name + " - " + str(self.record_date)
class Recommendation(models.Model):
LOW = 1
MID = 2
HIGH = 3
LEVEL_CHOICES = (
(LOW, 'Low'),
(MID, 'Medium'),
(HIGH, 'High'),
)
soil_id = models.ForeignKey(SoilProfile, on_delete=models.CASCADE)
recco_time = models.DateTimeField(default=datetime.now, blank=True)
recco_n_lvl = models.IntegerField(
choices= LEVEL_CHOICES
)
recco_p_lvl = models.IntegerField(
choices= LEVEL_CHOICES
)
recco_k_lvl = models.IntegerField(
choices= LEVEL_CHOICES
)
def __str__(self):
soil = SoilProfile.objects.get(pk=self.soil_id.pk)
return soil.name
class RecommendedPlant(models.Model):
recco_id = models.ForeignKey(Recommendation, on_delete=models.CASCADE)
plant_name = models.CharField(max_length=100, default='')
soil_type_name = models.CharField(max_length=100, default='')
def __str__(self):
return self.plant_name + ", " + self.soil_type_name
|
[
"django.db.models.TextField",
"django.contrib.auth.models.User.objects.get",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.core.validators.MinValueValidator",
"django.db.models.IntegerField",
"django.db.models.DecimalField",
"django.db.models.DateTimeField",
"django.core.validators.MaxValueValidator"
] |
[((247, 279), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (263, 279), False, 'from django.db import models\n'), ((291, 309), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (307, 309), False, 'from django.db import models\n'), ((807, 851), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'default': '""""""'}), "(max_length=100, default='')\n", (823, 851), False, 'from django.db import models\n'), ((872, 932), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'LEVEL_CHOICES', 'default': 'VERY_LOW'}), '(choices=LEVEL_CHOICES, default=VERY_LOW)\n', (891, 932), False, 'from django.db import models\n'), ((977, 1038), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'LEVEL_CHOICES', 'default': 'VERY_HIGH'}), '(choices=LEVEL_CHOICES, default=VERY_HIGH)\n', (996, 1038), False, 'from django.db import models\n'), ((1333, 1377), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'default': '""""""'}), "(max_length=100, default='')\n", (1349, 1377), False, 'from django.db import models\n'), ((2078, 2122), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'default': '""""""'}), "(max_length=100, default='')\n", (2094, 2122), False, 'from django.db import models\n'), ((2140, 2198), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'CYCLE_CHOICES', 'default': 'ANNUAL'}), '(choices=CYCLE_CHOICES, default=ANNUAL)\n', (2159, 2198), False, 'from django.db import models\n'), ((2423, 2492), 'django.db.models.ForeignKey', 'models.ForeignKey', (['PlantMoistLvl'], {'on_delete': 'models.CASCADE', 'default': '(0)'}), '(PlantMoistLvl, on_delete=models.CASCADE, default=0)\n', (2440, 2492), False, 'from django.db import models\n'), ((2507, 2570), 'django.db.models.ForeignKey', 'models.ForeignKey', (['PlantPh'], {'on_delete': 'models.CASCADE', 'default': '(0)'}), '(PlantPh, on_delete=models.CASCADE, default=0)\n', (2524, 2570), False, 'from django.db import models\n'), ((2592, 2662), 'django.db.models.ForeignKey', 'models.ForeignKey', (['PlantLifeCycle'], {'on_delete': 'models.CASCADE', 'default': '(0)'}), '(PlantLifeCycle, on_delete=models.CASCADE, default=0)\n', (2609, 2662), False, 'from django.db import models\n'), ((2680, 2724), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'default': '""""""'}), "(max_length=100, default='')\n", (2696, 2724), False, 'from django.db import models\n'), ((3226, 3268), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'LEVEL_CHOICES'}), '(choices=LEVEL_CHOICES)\n', (3245, 3268), False, 'from django.db import models\n'), ((3296, 3338), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'LEVEL_CHOICES'}), '(choices=LEVEL_CHOICES)\n', (3315, 3338), False, 'from django.db import models\n'), ((3366, 3408), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'LEVEL_CHOICES'}), '(choices=LEVEL_CHOICES)\n', (3385, 3408), False, 'from django.db import models\n'), ((3782, 3814), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (3798, 3814), False, 'from django.db import models\n'), ((3844, 3886), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'LEVEL_CHOICES'}), '(choices=LEVEL_CHOICES)\n', (3863, 3886), False, 'from django.db import models\n'), ((3931, 3973), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'LEVEL_CHOICES'}), '(choices=LEVEL_CHOICES)\n', (3950, 3973), False, 'from django.db import models\n'), ((4083, 4144), 'django.db.models.ForeignKey', 'models.ForeignKey', (['auth.models.User'], {'on_delete': 'models.CASCADE'}), '(auth.models.User, on_delete=models.CASCADE)\n', (4100, 4144), False, 'from django.db import models\n'), ((4156, 4188), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (4172, 4188), False, 'from django.db import models\n'), ((4203, 4235), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)'}), '(max_length=256)\n', (4219, 4235), False, 'from django.db import models\n'), ((4423, 4479), 'django.db.models.ForeignKey', 'models.ForeignKey', (['SoilProfile'], {'on_delete': 'models.CASCADE'}), '(SoilProfile, on_delete=models.CASCADE)\n', (4440, 4479), False, 'from django.db import models\n'), ((4492, 4555), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(10)', 'decimal_places': '(2)', 'default': '(0)'}), '(max_digits=10, decimal_places=2, default=0)\n', (4511, 4555), False, 'from django.db import models\n'), ((4565, 4628), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(10)', 'decimal_places': '(2)', 'default': '(7)'}), '(max_digits=10, decimal_places=2, default=7)\n', (4584, 4628), False, 'from django.db import models\n'), ((4647, 4700), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'datetime.now', 'null': '(True)'}), '(default=datetime.now, null=True)\n', (4667, 4700), False, 'from django.db import models\n'), ((5183, 5239), 'django.db.models.ForeignKey', 'models.ForeignKey', (['SoilProfile'], {'on_delete': 'models.CASCADE'}), '(SoilProfile, on_delete=models.CASCADE)\n', (5200, 5239), False, 'from django.db import models\n'), ((5257, 5311), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'datetime.now', 'blank': '(True)'}), '(default=datetime.now, blank=True)\n', (5277, 5311), False, 'from django.db import models\n'), ((5330, 5372), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'LEVEL_CHOICES'}), '(choices=LEVEL_CHOICES)\n', (5349, 5372), False, 'from django.db import models\n'), ((5406, 5448), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'LEVEL_CHOICES'}), '(choices=LEVEL_CHOICES)\n', (5425, 5448), False, 'from django.db import models\n'), ((5482, 5524), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'LEVEL_CHOICES'}), '(choices=LEVEL_CHOICES)\n', (5501, 5524), False, 'from django.db import models\n'), ((5701, 5760), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Recommendation'], {'on_delete': 'models.CASCADE'}), '(Recommendation, on_delete=models.CASCADE)\n', (5718, 5760), False, 'from django.db import models\n'), ((5778, 5822), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'default': '""""""'}), "(max_length=100, default='')\n", (5794, 5822), False, 'from django.db import models\n'), ((5844, 5888), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'default': '""""""'}), "(max_length=100, default='')\n", (5860, 5888), False, 'from django.db import models\n'), ((4274, 4320), 'django.contrib.auth.models.User.objects.get', 'auth.models.User.objects.get', ([], {'pk': 'self.owner.pk'}), '(pk=self.owner.pk)\n', (4302, 4320), False, 'from django.contrib import auth\n'), ((1466, 1487), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(14)'], {}), '(14)\n', (1483, 1487), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n'), ((1489, 1509), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(0)'], {}), '(0)\n', (1506, 1509), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n'), ((1601, 1622), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(14)'], {}), '(14)\n', (1618, 1622), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n'), ((1624, 1644), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(0)'], {}), '(0)\n', (1641, 1644), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n'), ((3023, 3044), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(14)'], {}), '(14)\n', (3040, 3044), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n'), ((3046, 3066), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(0)'], {}), '(0)\n', (3063, 3066), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n'), ((3157, 3178), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(14)'], {}), '(14)\n', (3174, 3178), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n'), ((3180, 3200), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(0)'], {}), '(0)\n', (3197, 3200), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n'), ((4793, 4817), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(10080)'], {}), '(10080)\n', (4810, 4817), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n'), ((4819, 4841), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(0.1)'], {}), '(0.1)\n', (4836, 4841), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n')]
|
import os
import re
import sys
from subprocess import check_output
def check_install_name(name):
"""Verify that the install_name is correct on mac"""
libname = "lib" + name + ".dylib"
path = os.path.join(sys.prefix, "lib", libname)
otool = check_output(["otool", "-L", path]).decode("utf8")
self_line = otool.splitlines()[1]
install_name = self_line.strip().split()[0]
pat = "@rpath/lib{}\.\d+\.dylib".format(name)
assert re.match(pat, install_name), "{} != {}".format(install_name, pat)
if sys.platform == "darwin":
for lib in (
"amd",
"btf",
"camd",
"ccolamd",
"cholmod",
"colamd",
"cxsparse",
"klu",
"ldl",
"rbio",
"spqr",
"suitesparseconfig",
"umfpack",
):
check_install_name(lib)
|
[
"subprocess.check_output",
"os.path.join",
"re.match"
] |
[((205, 245), 'os.path.join', 'os.path.join', (['sys.prefix', '"""lib"""', 'libname'], {}), "(sys.prefix, 'lib', libname)\n", (217, 245), False, 'import os\n'), ((456, 483), 're.match', 're.match', (['pat', 'install_name'], {}), '(pat, install_name)\n', (464, 483), False, 'import re\n'), ((258, 293), 'subprocess.check_output', 'check_output', (["['otool', '-L', path]"], {}), "(['otool', '-L', path])\n", (270, 293), False, 'from subprocess import check_output\n')]
|
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras.optimizers import SGD
def createModel(totalPlayers):
cp =[]
for i in range(totalPlayers):
model = Sequential()
model.add(Dense(input_dim=3,units=7))
model.add(Activation("sigmoid"))
model.add(Dense(units=1))
model.add(Activation("sigmoid"))
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='mse',optimizer=sgd, metrics=['accuracy'])
cp.append(model)
return cp
|
[
"keras.models.Sequential",
"keras.layers.Dense",
"keras.optimizers.SGD",
"keras.layers.Activation"
] |
[((206, 218), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (216, 218), False, 'from keras.models import Sequential\n'), ((396, 450), 'keras.optimizers.SGD', 'SGD', ([], {'lr': '(0.01)', 'decay': '(1e-06)', 'momentum': '(0.9)', 'nesterov': '(True)'}), '(lr=0.01, decay=1e-06, momentum=0.9, nesterov=True)\n', (399, 450), False, 'from keras.optimizers import SGD\n'), ((237, 264), 'keras.layers.Dense', 'Dense', ([], {'input_dim': '(3)', 'units': '(7)'}), '(input_dim=3, units=7)\n', (242, 264), False, 'from keras.layers import Dense, Activation\n'), ((283, 304), 'keras.layers.Activation', 'Activation', (['"""sigmoid"""'], {}), "('sigmoid')\n", (293, 304), False, 'from keras.layers import Dense, Activation\n'), ((324, 338), 'keras.layers.Dense', 'Dense', ([], {'units': '(1)'}), '(units=1)\n', (329, 338), False, 'from keras.layers import Dense, Activation\n'), ((358, 379), 'keras.layers.Activation', 'Activation', (['"""sigmoid"""'], {}), "('sigmoid')\n", (368, 379), False, 'from keras.layers import Dense, Activation\n')]
|
import os
from db import Controller
db_path = os.path.join( __file__, "..", "RenderManager.db" )
Controller.init(db_path)
# Create Job
job = Controller.create_job(
r"J:\UCG\Episodes\Scenes\EP100\SH002.00A\UCG_EP100_SH002.00A_CMP.nk",
"WRITE_IMG",
r"J:\UCG\UCG_Nuke10.bat",
"renderNuke.py",
frames=[i for i in range(100)]
)
# Query Job
JobList = Controller.Job.select()
for job in JobList:
print("Job", job.code)
print("Status", job.status().name)
print("Left", len([frame.number for frame in job.frame_left()]))
print("Avg", job.avg_time())
print("Eta", job.eta())
print("Clients", job.clients())
print()
# for frame in job.frames():
# print("Frame", frame.number)
|
[
"db.Controller.Job.select",
"db.Controller.init",
"os.path.join"
] |
[((47, 95), 'os.path.join', 'os.path.join', (['__file__', '""".."""', '"""RenderManager.db"""'], {}), "(__file__, '..', 'RenderManager.db')\n", (59, 95), False, 'import os\n'), ((98, 122), 'db.Controller.init', 'Controller.init', (['db_path'], {}), '(db_path)\n', (113, 122), False, 'from db import Controller\n'), ((399, 422), 'db.Controller.Job.select', 'Controller.Job.select', ([], {}), '()\n', (420, 422), False, 'from db import Controller\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# 根据传入的文件夹地址遍历下面的js文件,生成统一导出的index.js,需要注意的是重名的模块
# 获取目录下文件
import os
file_name_list = []
file_rel_path_dict = {}
def handle_dir(path):
if os.path.isdir(path):
dir_files = os.listdir(path)
for dir_file in dir_files:
handle_dir(os.path.join(path, dir_file))
else:
# 获取对应的相对路径 记录到列表中
file_name = os.path.basename(path).rstrip('.js')
file_rel_path_dict[file_name] = os.path.relpath(path, os.getcwd())
# 获取基本文件名 记录到列表中
file_name_list.append(file_name)
write_lines = []
def create_index_js_file():
# 引入
for file_name in file_name_list:
write_lines.append('const {} = require(\'./{}\');\n'.format(file_name,file_rel_path_dict[file_name]))
write_lines.append('\n')
write_lines.append('module.exports = {\n')
# 导出
write_lines.append(','.join(file_name_list))
write_lines.append('\n}')
fo = open(os.path.join(os.getcwd(), "index_new.js"), "w")
fo.writelines(write_lines)
# 关闭文件
fo.close()
dir_path = input('please input dir path: ')
# dir_path = './lib'
dir_path = os.path.abspath(dir_path)
if os.path.isdir(dir_path):
handle_dir(dir_path)
create_index_js_file()
else:
print('please input dir!')
|
[
"os.path.abspath",
"os.path.basename",
"os.path.isdir",
"os.getcwd",
"os.path.join",
"os.listdir"
] |
[((1143, 1168), 'os.path.abspath', 'os.path.abspath', (['dir_path'], {}), '(dir_path)\n', (1158, 1168), False, 'import os\n'), ((1172, 1195), 'os.path.isdir', 'os.path.isdir', (['dir_path'], {}), '(dir_path)\n', (1185, 1195), False, 'import os\n'), ((195, 214), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (208, 214), False, 'import os\n'), ((236, 252), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (246, 252), False, 'import os\n'), ((497, 508), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (506, 508), False, 'import os\n'), ((973, 984), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (982, 984), False, 'import os\n'), ((311, 339), 'os.path.join', 'os.path.join', (['path', 'dir_file'], {}), '(path, dir_file)\n', (323, 339), False, 'import os\n'), ((398, 420), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (414, 420), False, 'import os\n')]
|
from django.contrib import admin
from .models import URL
admin.site.register(URL)
|
[
"django.contrib.admin.site.register"
] |
[((58, 82), 'django.contrib.admin.site.register', 'admin.site.register', (['URL'], {}), '(URL)\n', (77, 82), False, 'from django.contrib import admin\n')]
|
from django.db import models
from django.contrib.auth.models import User
class Customer(models.Model):
user = models.OneToOneField(User,null=True,blank=True,on_delete=models.CASCADE)
name= models.CharField(max_length=200,null=True)
email = models.CharField(max_length=200)
def __str__(self):
return self.name
class Mobile(models.Model):
name = models.CharField(max_length=200)
price = models.FloatField()
digital = models.BooleanField(default=False,null=True,blank=True)
image = models.ImageField(null=True, blank=True)
def __str__(self):
return self.name
@property
def imageURL(self):
try:
url = self.image.url
except:
url = ''
return url
# class Laptop(models.Model):
# name = models.CharField(max_length=200)
# price = models.FloatField()
# digital = models.BooleanField(default=False,null=True,blank=True)
# image = models.ImageField(null=True, blank=True)
# def __str__(self):
# return self.name
# @property
# def imageURL(self):
# try:
# url = self.image.url
# except:
# url = ''
# return url
# class Accessories(models.Model):
# name = models.CharField(max_length=200)
# price = models.FloatField()
# digital = models.BooleanField(default=False,null=True,blank=True)
# image = models.ImageField(null=True, blank=True)
# def __str__(self):
# return self.name
# @property
# def imageURL(self):
# try:
# url = self.image.url
# except:
# url = ''
# return url
class Order(models.Model):
customer = models.ForeignKey(Customer,on_delete=models.CASCADE,blank=True,null=True)
date_ordered = models.DateTimeField(auto_now_add=True)
completed = models.BooleanField(default=False,null=True,blank=False)
transaction_id = models.CharField(max_length=200)
def __str__(self):
return str(self.id)
@property
def get_cart_total(self):
orderitems = OrderItem.objects.all()
# total = sum([item.get_laptop_total for item in orderitems]) + sum([item.get_acc_total for item in orderitems])
total = sum([item.get_mobile_total for item in orderitems])
return total
@property
def get_cart_item(self):
orderitems = OrderItem.objects.all()
total = sum([i.quantity for i in orderitems])
return total
@property
def shipping(self):
shipping = False
orderitems = self.orderitem_set.all()
for i in orderitems:
if i.product_m.digital == False:
shipping = True
return shipping
class OrderItem(models.Model):
product_m = models.ForeignKey(Mobile,on_delete=models.SET_NULL,null=True)
# product_l = models.ForeignKey(Laptop,on_delete=models.SET_NULL,null=True)
# product_a = models.ForeignKey(Accessories,on_delete=models.SET_NULL,null=True)
order = models.ForeignKey(Order,on_delete=models.SET_NULL,null=True,blank=True)
quantity = models.IntegerField(default=0,null=True,blank=True)
date_added = models.DateTimeField(auto_now_add=True)
# def __str__(self):
# if (self.product_l__name is not None):
# return str(self.product_l__name)
# elif (self.product_a__name is not None):
# return str(self.product_a__name)
# elif (self.product_m__name is not None):
# return str(self.product_m__name)
# @property
# def get_laptop_total(self):
# if self.product_l:
# total_l = self.product_l.price * self.quantity
# return total_l
# else:
# return 0
# @property
# def get_acc_total(self):
# if self.product_a:
# total_a = self.product_a.price * self.quantity
# return total_a
# else:
# return 0
@property
def get_mobile_total(self):
if self.product_m:
total_m = self.product_m.price * self.quantity
return total_m
else:
return 0
class ShippingAddress(models.Model):
customer = models.ForeignKey(Customer,on_delete=models.SET_NULL,null=True)
order = models.ForeignKey(Order,on_delete=models.SET_NULL,null=True)
address = models.CharField(max_length=200,null=False)
city = models.CharField(max_length=200,null=False)
zipcode = models.CharField(max_length=200,null=False)
state = models.CharField(max_length=200,null=False)
date_added = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.address
|
[
"django.db.models.OneToOneField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.FloatField",
"django.db.models.BooleanField",
"django.db.models.ImageField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField"
] |
[((115, 190), 'django.db.models.OneToOneField', 'models.OneToOneField', (['User'], {'null': '(True)', 'blank': '(True)', 'on_delete': 'models.CASCADE'}), '(User, null=True, blank=True, on_delete=models.CASCADE)\n', (135, 190), False, 'from django.db import models\n'), ((198, 241), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)'}), '(max_length=200, null=True)\n', (214, 241), False, 'from django.db import models\n'), ((253, 285), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (269, 285), False, 'from django.db import models\n'), ((380, 412), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (396, 412), False, 'from django.db import models\n'), ((425, 444), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (442, 444), False, 'from django.db import models\n'), ((459, 516), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'null': '(True)', 'blank': '(True)'}), '(default=False, null=True, blank=True)\n', (478, 516), False, 'from django.db import models\n'), ((527, 567), 'django.db.models.ImageField', 'models.ImageField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (544, 567), False, 'from django.db import models\n'), ((1740, 1816), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Customer'], {'on_delete': 'models.CASCADE', 'blank': '(True)', 'null': '(True)'}), '(Customer, on_delete=models.CASCADE, blank=True, null=True)\n', (1757, 1816), False, 'from django.db import models\n'), ((1833, 1872), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1853, 1872), False, 'from django.db import models\n'), ((1889, 1947), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'null': '(True)', 'blank': '(False)'}), '(default=False, null=True, blank=False)\n', (1908, 1947), False, 'from django.db import models\n'), ((1967, 1999), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1983, 1999), False, 'from django.db import models\n'), ((2832, 2895), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Mobile'], {'on_delete': 'models.SET_NULL', 'null': '(True)'}), '(Mobile, on_delete=models.SET_NULL, null=True)\n', (2849, 2895), False, 'from django.db import models\n'), ((3071, 3145), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Order'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'blank': '(True)'}), '(Order, on_delete=models.SET_NULL, null=True, blank=True)\n', (3088, 3145), False, 'from django.db import models\n'), ((3163, 3216), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'null': '(True)', 'blank': '(True)'}), '(default=0, null=True, blank=True)\n', (3182, 3216), False, 'from django.db import models\n'), ((3236, 3275), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (3256, 3275), False, 'from django.db import models\n'), ((4338, 4403), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Customer'], {'on_delete': 'models.SET_NULL', 'null': '(True)'}), '(Customer, on_delete=models.SET_NULL, null=True)\n', (4355, 4403), False, 'from django.db import models\n'), ((4414, 4476), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Order'], {'on_delete': 'models.SET_NULL', 'null': '(True)'}), '(Order, on_delete=models.SET_NULL, null=True)\n', (4431, 4476), False, 'from django.db import models\n'), ((4489, 4533), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(False)'}), '(max_length=200, null=False)\n', (4505, 4533), False, 'from django.db import models\n'), ((4544, 4588), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(False)'}), '(max_length=200, null=False)\n', (4560, 4588), False, 'from django.db import models\n'), ((4602, 4646), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(False)'}), '(max_length=200, null=False)\n', (4618, 4646), False, 'from django.db import models\n'), ((4658, 4702), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(False)'}), '(max_length=200, null=False)\n', (4674, 4702), False, 'from django.db import models\n'), ((4719, 4758), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (4739, 4758), False, 'from django.db import models\n')]
|
from math import pi as PI
import torch
class Spherical(object):
r"""Saves the globally normalized three-dimensional spatial relation of
linked nodes as spherical coordinates (mapped to the fixed interval
:math:`[0, 1]`) in its edge attributes.
Args:
cat (bool, optional): Concat pseudo-coordinates to edge attributes
instead of replacing them. (default: :obj:`True`)
.. testsetup::
import torch
from torch_geometric.data import Data
.. testcode::
from torch_geometric.transforms import Spherical
pos = torch.tensor([[0, 0, 0], [0, 1, 1]], dtype=torch.float)
edge_index = torch.tensor([[0, 1], [1, 0]])
data = Data(edge_index=edge_index, pos=pos)
data = Spherical()(data)
print(data.edge_attr)
.. testoutput::
tensor([[1.0000, 0.2500, 0.0000],
[1.0000, 0.7500, 1.0000]])
"""
def __init__(self, cat=True):
self.cat = cat
def __call__(self, data):
(row, col), pos, pseudo = data.edge_index, data.pos, data.edge_attr
assert pos.dim() == 2 and pos.size(1) == 3
cart = pos[col] - pos[row]
rho = torch.norm(cart, p=2, dim=-1)
rho = rho / rho.max()
theta = torch.atan2(cart[..., 1], cart[..., 0]) / (2 * PI)
theta += (theta < 0).type_as(theta)
phi = torch.acos(cart[..., 2] / rho) / PI
spher = torch.stack([rho, theta, phi], dim=1)
if pseudo is not None and self.cat:
pseudo = pseudo.view(-1, 1) if pseudo.dim() == 1 else pseudo
data.edge_attr = torch.cat([pseudo, spher.type_as(pos)], dim=-1)
else:
data.edge_attr = spher
return data
def __repr__(self):
return '{}(cat={})'.format(self.__class__.__name__, self.cat)
|
[
"torch.norm",
"torch.atan2",
"torch.acos",
"torch.stack"
] |
[((1193, 1222), 'torch.norm', 'torch.norm', (['cart'], {'p': '(2)', 'dim': '(-1)'}), '(cart, p=2, dim=-1)\n', (1203, 1222), False, 'import torch\n'), ((1430, 1467), 'torch.stack', 'torch.stack', (['[rho, theta, phi]'], {'dim': '(1)'}), '([rho, theta, phi], dim=1)\n', (1441, 1467), False, 'import torch\n'), ((1269, 1308), 'torch.atan2', 'torch.atan2', (['cart[..., 1]', 'cart[..., 0]'], {}), '(cart[..., 1], cart[..., 0])\n', (1280, 1308), False, 'import torch\n'), ((1378, 1408), 'torch.acos', 'torch.acos', (['(cart[..., 2] / rho)'], {}), '(cart[..., 2] / rho)\n', (1388, 1408), False, 'import torch\n')]
|
import unittest
from ArmaProcess import ArmaProcess
import time
class ArmaProcessTestCase(unittest.TestCase):
def testGenSamples(self):
params = [3.75162180e-04, 1.70361201e+00, -7.30441228e-01, -6.22795336e-01, 3.05330848e-01]
fps = 100
ap = ArmaProcess(params[0], params[1:3], params[3:5], fps)
print(ap.generate_n(20))
for _ in range(3):
time.sleep(0.04)
print(ap.generate_frame())
|
[
"ArmaProcess.ArmaProcess",
"time.sleep"
] |
[((306, 359), 'ArmaProcess.ArmaProcess', 'ArmaProcess', (['params[0]', 'params[1:3]', 'params[3:5]', 'fps'], {}), '(params[0], params[1:3], params[3:5], fps)\n', (317, 359), False, 'from ArmaProcess import ArmaProcess\n'), ((480, 496), 'time.sleep', 'time.sleep', (['(0.04)'], {}), '(0.04)\n', (490, 496), False, 'import time\n')]
|
# -*- coding: utf-8 -*-
from unittest import TestCase
import pandas
from pandas.testing import assert_frame_equal
from tstoolbox import tstoolbox, tsutils
class TestRead(TestCase):
def setUp(self):
dr = pandas.date_range("2000-01-01", periods=2, freq="D")
ts = pandas.Series([4.5, 4.6], index=dr)
self.read_direct = pandas.DataFrame(ts, columns=["Value"])
self.read_direct.index.name = "Datetime"
self.read_direct = tsutils.memory_optimize(self.read_direct)
self.read_cli = b"""Datetime,Value
2000-01-01,4.5
2000-01-02,4.6
"""
dr = pandas.date_range("2000-01-01", periods=5, freq="D")
ts = pandas.Series([4.5, 4.6, 4.7, 4.8, 4.9], index=dr)
self.read_direct_sparse = pandas.DataFrame(ts, columns=["Value"])
self.read_direct_sparse.index.name = "Datetime"
self.read_direct_sparse = tsutils.memory_optimize(self.read_direct_sparse)
self.read_cli_sparse = b"""Datetime,Value
2000-01-01,4.5
2000-01-02,4.6
2000-01-03,4.7
2000-01-04,4.8
2000-01-05,4.9
"""
def test_read_direct(self):
"""Test read API for single column - daily."""
out = tstoolbox.read("tests/data_simple_extra_rows.csv", skiprows=2)
assert_frame_equal(out, self.read_direct)
def test_read_direct_sparse(self):
"""Test read API for single column - daily."""
out = tstoolbox.read("tests/data_simple_extra_rows_sparse.csv", skiprows=[4, 6])
assert_frame_equal(out, self.read_direct_sparse)
|
[
"pandas.DataFrame",
"pandas.testing.assert_frame_equal",
"pandas.date_range",
"tstoolbox.tstoolbox.read",
"tstoolbox.tsutils.memory_optimize",
"pandas.Series"
] |
[((220, 272), 'pandas.date_range', 'pandas.date_range', (['"""2000-01-01"""'], {'periods': '(2)', 'freq': '"""D"""'}), "('2000-01-01', periods=2, freq='D')\n", (237, 272), False, 'import pandas\n'), ((287, 322), 'pandas.Series', 'pandas.Series', (['[4.5, 4.6]'], {'index': 'dr'}), '([4.5, 4.6], index=dr)\n', (300, 322), False, 'import pandas\n'), ((351, 390), 'pandas.DataFrame', 'pandas.DataFrame', (['ts'], {'columns': "['Value']"}), "(ts, columns=['Value'])\n", (367, 390), False, 'import pandas\n'), ((467, 508), 'tstoolbox.tsutils.memory_optimize', 'tsutils.memory_optimize', (['self.read_direct'], {}), '(self.read_direct)\n', (490, 508), False, 'from tstoolbox import tstoolbox, tsutils\n'), ((601, 653), 'pandas.date_range', 'pandas.date_range', (['"""2000-01-01"""'], {'periods': '(5)', 'freq': '"""D"""'}), "('2000-01-01', periods=5, freq='D')\n", (618, 653), False, 'import pandas\n'), ((668, 718), 'pandas.Series', 'pandas.Series', (['[4.5, 4.6, 4.7, 4.8, 4.9]'], {'index': 'dr'}), '([4.5, 4.6, 4.7, 4.8, 4.9], index=dr)\n', (681, 718), False, 'import pandas\n'), ((754, 793), 'pandas.DataFrame', 'pandas.DataFrame', (['ts'], {'columns': "['Value']"}), "(ts, columns=['Value'])\n", (770, 793), False, 'import pandas\n'), ((884, 932), 'tstoolbox.tsutils.memory_optimize', 'tsutils.memory_optimize', (['self.read_direct_sparse'], {}), '(self.read_direct_sparse)\n', (907, 932), False, 'from tstoolbox import tstoolbox, tsutils\n'), ((1165, 1227), 'tstoolbox.tstoolbox.read', 'tstoolbox.read', (['"""tests/data_simple_extra_rows.csv"""'], {'skiprows': '(2)'}), "('tests/data_simple_extra_rows.csv', skiprows=2)\n", (1179, 1227), False, 'from tstoolbox import tstoolbox, tsutils\n'), ((1236, 1277), 'pandas.testing.assert_frame_equal', 'assert_frame_equal', (['out', 'self.read_direct'], {}), '(out, self.read_direct)\n', (1254, 1277), False, 'from pandas.testing import assert_frame_equal\n'), ((1387, 1461), 'tstoolbox.tstoolbox.read', 'tstoolbox.read', (['"""tests/data_simple_extra_rows_sparse.csv"""'], {'skiprows': '[4, 6]'}), "('tests/data_simple_extra_rows_sparse.csv', skiprows=[4, 6])\n", (1401, 1461), False, 'from tstoolbox import tstoolbox, tsutils\n'), ((1470, 1518), 'pandas.testing.assert_frame_equal', 'assert_frame_equal', (['out', 'self.read_direct_sparse'], {}), '(out, self.read_direct_sparse)\n', (1488, 1518), False, 'from pandas.testing import assert_frame_equal\n')]
|
"""Widgets Helper Library.
A library of `ipywidgets` wrappers for notebook based reports and voila dashboards.
The library includes both python code and html/css/js elements that can be found in the
`./widgets` folder.
"""
import os
from jinja2 import Template
def stylesheet():
"""Load a default CSS stylesheet from file."""
with open(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "widgets", "style.css")
) as f:
style = f.read()
return style
def price_card(ticker: str, price: str, price_color: str = "neutral_color") -> str:
"""Prepare a styled HTML element of a 128 by 128 price card.
Parameters
----------
ticker : str
Instrument ticker for the price card
price : str
Instrument price as a string
price_color : str, optional
The color of the price. Accepts "up_color", "down_color" and default "neutral_color"
Returns
-------
str
HTML code as string
"""
with open(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "widgets", "card.j2")
) as f:
template = Template(f.read())
card = template.render(ticker=ticker, price=price, price_color=price_color)
return card
|
[
"os.path.abspath"
] |
[((385, 410), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (400, 410), False, 'import os\n'), ((1034, 1059), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1049, 1059), False, 'import os\n')]
|
import pandas as pd
import numpy as np
class Stats(object):
'''
Produces stats given a schedule
'''
def __init__(self, games, agg_method, date_col, h_col, a_col, outcome_col, seg_vars = []):
self.games = games
self.agg_method = agg_method
self.date_col = date_col
self.h_col = h_col
self.a_col = a_col
self.outcome_col = outcome_col
self.seg_vars = seg_vars
# Inputs: number of past games, team id, date of current game
# Output: list of most recent n games
def get_last_n_games(self, n, team_id, curr_dt):
#Filter to get past games
games = self.games[self.games[self.date_col]<curr_dt]
#Filters to get past home and away games
a_games = games[games[self.a_col]==team_id]
h_games = games[games[self.h_col] == team_id]
all_games = a_games.append(h_games)
all_games['temp_days'] = [(pd.to_datetime(curr_dt) - pd.to_datetime(x)).days for x in all_games[self.date_col]]
all_games = all_games[all_games['temp_days']<=30]
all_games = all_games.drop('temp_days', axis=1)
all_games = all_games.sort_values(by=self.date_col, ascending=False)
n_games = all_games.head(n)
return n_games
def get_avg(self, games, col, team_id, opp):
h_games = games[games[self.h_col] == team_id]
a_games = games[games[self.a_col] == team_id]
if opp == 0:
a_col = 'A_' + col
h_col = 'H_' + col
else:
a_col = 'H_' + col
h_col = 'A_' + col
h_sum = np.sum(h_games[h_col])
a_sum = np.sum(a_games[a_col])
if len(games) == 0:
return -1
avg = (h_sum + a_sum)*1.0 / (len(games))
return avg
def back_to_back(self, games, curr_dt):
if len(games)==0:
return 0
latest_game = games.sort_values(by=self.date_col, ascending=False).head(1).reset_index(drop=True)
latest_date = latest_game.ix[0,self.date_col]
if (pd.to_datetime(curr_dt) - pd.to_datetime(latest_date)).days == 1:
return 1
return 0
def get_lastn_stats(self, n):
stats = pd.DataFrame()
for index, game in self.games.iterrows():
stats.set_value(index, self.outcome_col, game[self.outcome_col])
a_team = game[self.a_col]
a_games = self.get_last_n_games(n, a_team, game[self.date_col])
h_team = game[self.h_col]
h_games = self.get_last_n_games(n, h_team, game[self.date_col])
poss_cols = self.games.columns.values
poss_cols = self.search_for_cols('H_', poss_cols)
for col in poss_cols:
base_col = col[2:]
stats.set_value(index, ('H_' + base_col + '_' + str(n)), self.get_avg(h_games, base_col, h_team, 0))
stats.set_value(index, ('H_O_' + base_col + '_' + str(n)), self.get_avg(h_games, base_col, h_team, 1))
stats.set_value(index, ('A_' + base_col + '_' + str(n)), self.get_avg(a_games, base_col, a_team, 0))
stats.set_value(index, ('A_O_' + base_col + '_' + str(n)), self.get_avg(a_games, base_col, a_team, 1))
stats.set_value(index, 'H_BTB', self.back_to_back(h_games, game[self.date_col]))
stats.set_value(index, 'A_BTB', self.back_to_back(a_games, game[self.date_col]))
stats.set_value(index, 'H_'+str(n)+'_games', len(h_games))
stats.set_value(index, 'A_'+str(n)+'_games', len(a_games))
for col in self.seg_vars:
stats.set_value(index, col, game[col])
return stats
def search_for_cols(self, pfx, cols):
new_cols = []
pfx_len = len(pfx)
for col in cols:
if col[0:pfx_len] == pfx:
#if col != self.outcome_col:
if col != self.h_col:
if col != self.a_col:
new_cols.append(col)
return new_cols
def get_correl(self, stats):
cor = pd.DataFrame()
for col in stats.columns.values:
if col != self.outcome_col:
cor.set_value(col, 'Correlation', np.corrcoef(x=stats[col], y=stats[self.outcome_col])[0,1])
return cor
|
[
"pandas.DataFrame",
"numpy.corrcoef",
"pandas.to_datetime",
"numpy.sum"
] |
[((1583, 1605), 'numpy.sum', 'np.sum', (['h_games[h_col]'], {}), '(h_games[h_col])\n', (1589, 1605), True, 'import numpy as np\n'), ((1622, 1644), 'numpy.sum', 'np.sum', (['a_games[a_col]'], {}), '(a_games[a_col])\n', (1628, 1644), True, 'import numpy as np\n'), ((2182, 2196), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (2194, 2196), True, 'import pandas as pd\n'), ((4044, 4058), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (4056, 4058), True, 'import pandas as pd\n'), ((915, 938), 'pandas.to_datetime', 'pd.to_datetime', (['curr_dt'], {}), '(curr_dt)\n', (929, 938), True, 'import pandas as pd\n'), ((941, 958), 'pandas.to_datetime', 'pd.to_datetime', (['x'], {}), '(x)\n', (955, 958), True, 'import pandas as pd\n'), ((2027, 2050), 'pandas.to_datetime', 'pd.to_datetime', (['curr_dt'], {}), '(curr_dt)\n', (2041, 2050), True, 'import pandas as pd\n'), ((2053, 2080), 'pandas.to_datetime', 'pd.to_datetime', (['latest_date'], {}), '(latest_date)\n', (2067, 2080), True, 'import pandas as pd\n'), ((4190, 4242), 'numpy.corrcoef', 'np.corrcoef', ([], {'x': 'stats[col]', 'y': 'stats[self.outcome_col]'}), '(x=stats[col], y=stats[self.outcome_col])\n', (4201, 4242), True, 'import numpy as np\n')]
|
import math
from pydub import AudioSegment, silence
from pydub.utils import mediainfo
from dearpygui.core import *
import os
import csv
import re
import shutil
from google.cloud import storage
from google.cloud import speech_v1p1beta1 as speech
import config_helper
import time
import silence_cut
def to_millis(timestamp):
timestamp = str(timestamp)
hours, minutes, seconds = (["0", "0"] + timestamp.split(":"))[-3:]
hours = int(hours)
minutes = int(minutes)
seconds = float(seconds)
miliseconds = int(3600000 * hours + 60000 * minutes + 1000 * seconds)
return miliseconds
class Dataset_builder:
def __init__(self):
self.project_dir = None
self.speaker_text_path = None
self.wav_file_path = None
self.index_start = None
self.cut_length = None
self.split_method = None
self.contains_punc = None
self.google_cloud_credentials_path = None
self.transcription = None
def set_values(self, dataset_dir, speaker_text_path, wav_file_path, index_start, cut_length, split_method,
contains_punc, google_cloud_credentials_path, transcription=True):
self.project_dir = dataset_dir
self.speaker_text_path = speaker_text_path
self.wav_file_path = wav_file_path
self.index_start = index_start
if cut_length:
self.cut_length = float(cut_length)
self.split_method = split_method
self.contains_punc = contains_punc
self.google_cloud_credentials_path = google_cloud_credentials_path
self.transcription = transcription
def build_dataset(self):
print("running")
output_wavs_path = os.path.join(self.project_dir, "wavs")
if not os.path.exists(self.project_dir):
os.makedirs(self.project_dir)
if not os.path.exists(output_wavs_path):
os.mkdir(output_wavs_path)
if self.split_method == 0:
set_value("label_build_status", "Detecting silences. This may take several minutes...")
audio_name = self.wav_file_path
w = AudioSegment.from_wav(audio_name)
# s_len = 1000
#
# silence_cuts = silence.split_on_silence(w, min_silence_len=s_len, silence_thresh=-45, keep_silence=True)
#
# cuts = []
# final_cuts = []
#
# def split_wav(wav, l):
# if (wav.duration_seconds * 1000) < (self.cut_length * 1000):
# output = []
# output.append(wav)
# return output
#
# too_long = False
# while True:
# l -= 50
# if l == 0:
# print("Error, could not find small enough silence period for split, giving up")
# output = []
# output.append(wav)
# return output
#
# start = time.time_ns()
# splits = silence.split_on_silence(wav, min_silence_len=l, silence_thresh=-45, keep_silence=True)
# print("Splitting:", round((time.time_ns() - start) / 1000))
#
# start = time.time_ns()
# silence.detect_silence(wav, min_silence_len=l, silence_thresh=-45)
# print("Detecting:", round((time.time_ns() - start) / 1000))
#
# print(f"Trying resplit... (l={l})")
# for s in splits:
# if (s.duration_seconds * 1000) > (self.cut_length * 1000):
# too_long = True
# if too_long == True:
# too_long = False
# else:
# return splits
#
# # Keep splitting until all cuts are under max len
#
# for i, c in enumerate(silence_cuts):
# print(f"Checking phrase {i}/{len(silence_cuts)}...")
# c_splits = split_wav(c, 1000)
# for s in c_splits:
# cuts.append(s)
# # rebuild small cuts into larger, but below split len
# temp_cuts = AudioSegment.empty()
#
# for i, c in enumerate(cuts):
# prev_cuts = temp_cuts
# temp_cuts = temp_cuts + c
#
# if i == (len(cuts) - 1):
# #on final entry
# if (temp_cuts.duration_seconds * 1000) > (self.cut_length * 1000):
# final_cuts.append(prev_cuts)
# final_cuts.append(c)
# else:
# final_cuts.append(temp_cuts)
# else:
# if ((temp_cuts.duration_seconds * 1000) + (cuts[i+1].duration_seconds * 1000)) > (self.cut_length * 1000):
# # combine failed, too long, add what has already been concatenated
# final_cuts.append(temp_cuts)
# temp_cuts = AudioSegment.empty()
segment_size = 25
min_len = int(get_value("input_min_seg_length")) / segment_size
max_len = int(get_value("input_max_seg_length")) / segment_size
final_cuts = silence_cut.speed_slice(w, segment_size=25, min_segments_in_slice=int(min_len),
max_segments_in_slice=int(max_len),
padding_start=int(get_value("input_padding_start")),
padding_end=int(get_value("input_padding_end")))
for i, w in enumerate(final_cuts):
output_wav_file = os.path.join(output_wavs_path, str(i + 1) + ".wav")
w.export(output_wav_file, format="wav")
# Process each cut into google API and add result to csv
output_csv_file = os.path.join(self.project_dir, "output.csv")
print("writing to: " + output_csv_file)
with open(output_csv_file, 'w') as f:
bucket_name = get_value("input_storage_bucket")
newline = ''
for i, c in enumerate(final_cuts):
x = i + 1
if not self.transcription:
f.write("{}wavs/{}.wav|".format(newline, x))
newline = '\n'
continue
print(f"Transcribing entry {x}/{len(final_cuts)}")
self.upload_blob(bucket_name, os.path.join(output_wavs_path, str(x) + ".wav"), "temp_audio.wav",
google_cloud_credentials_path=self.google_cloud_credentials_path)
gcs_uri = "gs://{}/temp_audio.wav".format(bucket_name)
client = speech.SpeechClient.from_service_account_json(filename=self.google_cloud_credentials_path)
audio = speech.RecognitionAudio(uri=gcs_uri)
info = mediainfo(os.path.join(output_wavs_path, str(x) + ".wav"))
sample_rate = info['sample_rate']
if get_value("input_use_videomodel") == 1:
print("Using enchanced google model...")
config = speech.RecognitionConfig(
encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16,
sample_rate_hertz=int(sample_rate),
language_code=config_helper.cfg_get("transcription", "language_code"),
enable_automatic_punctuation=True,
enable_word_time_offsets=False,
enable_speaker_diarization=False,
# enhanced model for better performance?
use_enhanced=True,
model="video", # "phone_call or video"
)
else:
config = speech.RecognitionConfig(
encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16,
sample_rate_hertz=int(sample_rate),
language_code=config_helper.cfg_get("transcription", "language_code"),
enable_automatic_punctuation=True,
enable_word_time_offsets=False,
enable_speaker_diarization=False,
)
operation = client.long_running_recognize(config=config, audio=audio)
response = operation.result(timeout=28800)
for result in response.results:
text = result.alternatives[0].transcript
# replace some symbols and google API word choice
text = text.replace("%", " percent")
text = text.replace("cuz", "cause")
text = text.replace("-", " ")
text = text.replace("&", "and")
print(text)
set_value("label_build_status", text)
f.write("{}wavs/{}.wav|{}".format(newline, x, text))
newline = '\n'
print('\a') # system beep
set_value("label_build_status", "Done!")
print("Done running builder!")
else:
# Aeneas mode
if not get_value("label_speaker_text_path") or not get_value("label_wav_file_path"):
print("Error, please choose text and/or audio files.")
return
if not os.path.exists("aeneas_out"):
os.mkdir("aeneas_out")
else:
shutil.rmtree("aeneas_out")
os.mkdir("aeneas_out")
if not os.path.exists("aeneas_prepped"):
os.mkdir("aeneas_prepped")
else:
shutil.rmtree("aeneas_prepped")
os.mkdir("aeneas_prepped")
audio_name = self.wav_file_path
with open(self.speaker_text_path, 'r', encoding="utf8") as f:
text = f.read()
text = text.replace(';', '.')
text = text.replace(':', '.')
text = text.replace('-', ' ')
text = text.replace('”', '')
text = text.replace('“', '')
text = text.replace('"', '.')
text = text.replace('—', ' ')
text = text.replace('’', '\'')
text = text.replace(' –', '.')
text = text.strip('\n')
if self.contains_punc:
# remove any duplicate whitespace between words
text = " ".join(text.split())
phrase_splits = re.split(r'(?<=[\.\!\?])\s*',
text) # split on white space between sentences
phrase_splits = list(filter(None, phrase_splits)) # remove empty splits
else:
# no punctuation from speech to text, so we must divid text by word count
phrase_splits = []
temp_line = []
text_split = text.split()
word_count_limit = 16
while len(text_split) > 0:
while len(temp_line) < word_count_limit and len(text_split) > 0:
temp_line.append(text_split.pop(0))
phrase_splits.append(" ".join(temp_line))
temp_line = []
with open('aeneas_prepped/split_text', 'w') as f:
newline = ''
for s in phrase_splits:
if s:
stripped = s.strip() # remove whitespace
f.write(newline + stripped)
newline = '\n'
# os.system('python -m aeneas.tools.execute_task ' + audio_name + ' aeneas_prepped/split_text "task_adjust_boundary_percent_value=50|task_adjust_boundary_algorithm=percent|task_language=en|is_text_type=plain|os_task_file_format=csv" ' + 'aeneas_out/' + audio_name_no_ext + '.csv')
os.system(
'python -m aeneas.tools.execute_task ' + audio_name + ' aeneas_prepped/split_text "task_adjust_boundary_percent_value=50|task_adjust_boundary_algorithm=percent|task_language=en|is_text_type=plain|os_task_file_format=csv" ' + 'aeneas_out/' + os.path.basename(
self.project_dir) + '.csv')
output_exists = False
if os.path.exists("{}/output.csv".format(os.path.basename(self.project_dir))):
# if file exists then prepare for append
output_exists = True
new_csv_file = open("{}/output.csv".format(os.path.basename(self.project_dir)), 'a')
if output_exists:
new_csv_file.write("\n")
with open('aeneas_out/' + os.path.basename(self.project_dir) + '.csv', 'r') as csv_file:
index_count = int(self.index_start)
csv_reader = csv.reader(csv_file, delimiter=',')
csv_reader = list(csv_reader) # convert to list
row_count = len(csv_reader)
newline = ""
for row in csv_reader:
beginning_cut = float(row[1])
end_cut = float(row[2])
text_out = row[3]
text_out = text_out.strip()
print("{} {} {} ".format(beginning_cut, end_cut, text_out))
c_length = end_cut - beginning_cut
# if cut is longer than cut length then split it even more
cut_length = float(self.cut_length)
if c_length > cut_length:
more_cuts = open("aeneas_prepped/temp.csv", 'w')
# save the current cut wav file to run on aeneas again
w = AudioSegment.from_wav(audio_name)
wav_cut = w[(beginning_cut * 1000):(end_cut * 1000)]
wav_cut.export("aeneas_prepped/tempcut.wav", format="wav")
split_list = []
num_cuts = math.ceil(c_length / cut_length)
text_list = text_out.split()
text_list_len = len(text_list)
split_len = math.ceil(text_list_len / num_cuts)
print("too long, making extra {} cuts. with length {}".format(num_cuts, split_len))
for i in range(1, num_cuts + 1):
words = []
for j in range(0, split_len):
if not text_list:
break
words.append(text_list.pop(0))
split_list.append(" ".join(words))
print(split_list)
print()
newline_splits = ''
for phrase in split_list:
more_cuts.write(newline_splits + phrase)
newline_splits = '\n'
more_cuts.close()
os.system(
'python -m aeneas.tools.execute_task ' + "aeneas_prepped/tempcut.wav" + ' aeneas_prepped/temp.csv "task_adjust_boundary_percent_value=50|task_adjust_boundary_algorithm=percent|task_language=en|is_text_type=plain|os_task_file_format=csv" ' + 'aeneas_out/temp_out.csv')
csv_file_temp = open('aeneas_out/temp_out.csv', 'r')
csv_reader_temp = csv.reader(csv_file_temp, delimiter=',')
csv_reader_temp = list(csv_reader_temp) # convert to list
row_count = len(csv_reader_temp)
w = AudioSegment.from_wav("aeneas_prepped/tempcut.wav")
for row in csv_reader_temp:
beginning_cut = float(row[1])
end_cut = float(row[2])
text_out = row[3]
text_out = text_out.strip()
wav_cut = w[(beginning_cut * 1000):(end_cut * 1000)]
new_wav_filename = "wavs/" + str(index_count) + ".wav"
new_csv_file.write("{}{}|{}".format(newline, new_wav_filename, text_out))
wav_cut.export("{}/{}".format(os.path.basename(self.project_dir), new_wav_filename),
format="wav")
index_count += 1
newline = '\n'
csv_file_temp.close()
else:
w = AudioSegment.from_wav(audio_name)
wav_cut = w[(beginning_cut * 1000):(end_cut * 1000)]
new_wav_filename = "wavs/" + str(index_count) + ".wav"
new_csv_file.write("{}{}|{}".format(newline, new_wav_filename, text_out))
wav_cut.export("{}/{}".format(os.path.basename(self.project_dir), new_wav_filename),
format="wav")
index_count += 1
newline = '\n'
new_csv_file.close()
set_value("label_build_status", "Building dataset done!")
# Remove temporary directories
shutil.rmtree("aeneas_prepped")
shutil.rmtree("aeneas_out")
print('\a') # system beep
print("Done with Aeneas!")
def upload_blob(self, bucket_name, source_file_name, destination_blob_name, google_cloud_credentials_path=None):
if not google_cloud_credentials_path:
google_cloud_credentials_path = self.google_cloud_credentials_path
storage_client = storage.Client.from_service_account_json(json_credentials_path=google_cloud_credentials_path)
bucket = storage_client.bucket(bucket_name)
blob = bucket.blob(destination_blob_name)
blob.upload_from_filename(source_file_name)
# print("File {} uploaded to {}.".format(source_file_name, destination_blob_name))
def diarization(self, wavfile, bucket_name, project_dir, google_cloud_credentials_path, project_name=None):
if not os.path.exists(project_dir):
os.makedirs(project_dir)
if project_name:
dianame = "diarization-" + project_name + "-" + str(round(time.time_ns() / 1000))
else:
dianame = "diarization-" + os.path.basename(wavfile) + "-" + str(round(time.time_ns() / 1000))
output_dir = os.path.join(project_dir, dianame)
os.mkdir(output_dir)
print("Uploading {} to google cloud storage bucket".format(wavfile))
set_value("label_diarization_run_info", "Uploading file to cloud storage bucket...")
self.upload_blob(bucket_name, wavfile, "temp_audio.wav", google_cloud_credentials_path)
gcs_uri = "gs://{}/temp_audio.wav".format(bucket_name)
set_value("label_diarization_run_info", "Finished uploading.")
client = speech.SpeechClient.from_service_account_json(filename=google_cloud_credentials_path)
audio = speech.RecognitionAudio(uri=gcs_uri)
info = mediainfo(wavfile)
sample_rate = info['sample_rate']
print("Transcribing {} with audio rate {}".format(wavfile, sample_rate))
config = speech.RecognitionConfig(
encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16,
sample_rate_hertz=int(sample_rate),
language_code=config_helper.cfg_get("transcription", "language_code"),
enable_automatic_punctuation=True,
enable_word_time_offsets=True,
enable_speaker_diarization=True,
diarization_speaker_count=int(get_value("input_diarization_num")),
)
operation = client.long_running_recognize(config=config, audio=audio)
print("Waiting for operation to complete, this may take several minutes...")
set_value("label_diarization_run_info", "Waiting for operation to complete, this may take several minutes...")
response = operation.result(timeout=28800)
result = response.results[-1]
words = result.alternatives[0].words
active_speaker = 1
transcript = []
current_cut = 0
previous_cut = 0
speaker_wavs = []
for x in range(int(get_value("input_diarization_num"))):
speaker_wavs.append(AudioSegment.empty())
transcript.append("")
w = AudioSegment.from_wav(wavfile)
for word in words:
if word.speaker_tag == active_speaker:
end_time = word.end_time
current_cut = end_time.total_seconds() * 1e3
# print(current_cut)
transcript[active_speaker - 1] += word.word + ' '
else:
# speaker has changed
transcript[active_speaker - 1] += word.word + ' '
w_cut = w[(previous_cut):current_cut]
previous_cut = current_cut
speaker_wavs[active_speaker - 1] = speaker_wavs[active_speaker - 1] + w_cut
active_speaker = word.speaker_tag
# finish last wav cut
w_cut = w[previous_cut:current_cut]
speaker_wavs[active_speaker - 1] = speaker_wavs[active_speaker - 1] + w_cut
for i, wave in enumerate(speaker_wavs):
speaker_output = os.path.join(output_dir, "speaker_{}.wav".format(i + 1))
speaker_wavs[i].export(speaker_output, format="wav")
for i, text in enumerate(transcript):
speaker_output = os.path.join(output_dir, "speaker_{}.txt".format(i + 1))
f = open(speaker_output, 'w')
f.write(transcript[i])
f.close()
set_value("label_diarization_run_info", "Done!")
print("Done with diarization!")
print('\a') # system beep
|
[
"os.mkdir",
"re.split",
"os.makedirs",
"google.cloud.storage.Client.from_service_account_json",
"google.cloud.speech_v1p1beta1.SpeechClient.from_service_account_json",
"csv.reader",
"os.path.basename",
"math.ceil",
"os.path.exists",
"os.system",
"google.cloud.speech_v1p1beta1.RecognitionAudio",
"pydub.AudioSegment.from_wav",
"pydub.AudioSegment.empty",
"pydub.utils.mediainfo",
"config_helper.cfg_get",
"time.time_ns",
"shutil.rmtree",
"os.path.join"
] |
[((1751, 1789), 'os.path.join', 'os.path.join', (['self.project_dir', '"""wavs"""'], {}), "(self.project_dir, 'wavs')\n", (1763, 1789), False, 'import os\n'), ((19002, 19100), 'google.cloud.storage.Client.from_service_account_json', 'storage.Client.from_service_account_json', ([], {'json_credentials_path': 'google_cloud_credentials_path'}), '(json_credentials_path=\n google_cloud_credentials_path)\n', (19042, 19100), False, 'from google.cloud import storage\n'), ((19809, 19843), 'os.path.join', 'os.path.join', (['project_dir', 'dianame'], {}), '(project_dir, dianame)\n', (19821, 19843), False, 'import os\n'), ((19853, 19873), 'os.mkdir', 'os.mkdir', (['output_dir'], {}), '(output_dir)\n', (19861, 19873), False, 'import os\n'), ((20301, 20391), 'google.cloud.speech_v1p1beta1.SpeechClient.from_service_account_json', 'speech.SpeechClient.from_service_account_json', ([], {'filename': 'google_cloud_credentials_path'}), '(filename=\n google_cloud_credentials_path)\n', (20346, 20391), True, 'from google.cloud import speech_v1p1beta1 as speech\n'), ((20404, 20440), 'google.cloud.speech_v1p1beta1.RecognitionAudio', 'speech.RecognitionAudio', ([], {'uri': 'gcs_uri'}), '(uri=gcs_uri)\n', (20427, 20440), True, 'from google.cloud import speech_v1p1beta1 as speech\n'), ((20457, 20475), 'pydub.utils.mediainfo', 'mediainfo', (['wavfile'], {}), '(wavfile)\n', (20466, 20475), False, 'from pydub.utils import mediainfo\n'), ((21812, 21842), 'pydub.AudioSegment.from_wav', 'AudioSegment.from_wav', (['wavfile'], {}), '(wavfile)\n', (21833, 21842), False, 'from pydub import AudioSegment, silence\n'), ((1808, 1840), 'os.path.exists', 'os.path.exists', (['self.project_dir'], {}), '(self.project_dir)\n', (1822, 1840), False, 'import os\n'), ((1855, 1884), 'os.makedirs', 'os.makedirs', (['self.project_dir'], {}), '(self.project_dir)\n', (1866, 1884), False, 'import os\n'), ((1903, 1935), 'os.path.exists', 'os.path.exists', (['output_wavs_path'], {}), '(output_wavs_path)\n', (1917, 1935), False, 'import os\n'), ((1950, 1976), 'os.mkdir', 'os.mkdir', (['output_wavs_path'], {}), '(output_wavs_path)\n', (1958, 1976), False, 'import os\n'), ((2180, 2213), 'pydub.AudioSegment.from_wav', 'AudioSegment.from_wav', (['audio_name'], {}), '(audio_name)\n', (2201, 2213), False, 'from pydub import AudioSegment, silence\n'), ((6204, 6248), 'os.path.join', 'os.path.join', (['self.project_dir', '"""output.csv"""'], {}), "(self.project_dir, 'output.csv')\n", (6216, 6248), False, 'import os\n'), ((19476, 19503), 'os.path.exists', 'os.path.exists', (['project_dir'], {}), '(project_dir)\n', (19490, 19503), False, 'import os\n'), ((19518, 19542), 'os.makedirs', 'os.makedirs', (['project_dir'], {}), '(project_dir)\n', (19529, 19542), False, 'import os\n'), ((10042, 10070), 'os.path.exists', 'os.path.exists', (['"""aeneas_out"""'], {}), "('aeneas_out')\n", (10056, 10070), False, 'import os\n'), ((10089, 10111), 'os.mkdir', 'os.mkdir', (['"""aeneas_out"""'], {}), "('aeneas_out')\n", (10097, 10111), False, 'import os\n'), ((10148, 10175), 'shutil.rmtree', 'shutil.rmtree', (['"""aeneas_out"""'], {}), "('aeneas_out')\n", (10161, 10175), False, 'import shutil\n'), ((10193, 10215), 'os.mkdir', 'os.mkdir', (['"""aeneas_out"""'], {}), "('aeneas_out')\n", (10201, 10215), False, 'import os\n'), ((10238, 10270), 'os.path.exists', 'os.path.exists', (['"""aeneas_prepped"""'], {}), "('aeneas_prepped')\n", (10252, 10270), False, 'import os\n'), ((10289, 10315), 'os.mkdir', 'os.mkdir', (['"""aeneas_prepped"""'], {}), "('aeneas_prepped')\n", (10297, 10315), False, 'import os\n'), ((10352, 10383), 'shutil.rmtree', 'shutil.rmtree', (['"""aeneas_prepped"""'], {}), "('aeneas_prepped')\n", (10365, 10383), False, 'import shutil\n'), ((10401, 10427), 'os.mkdir', 'os.mkdir', (['"""aeneas_prepped"""'], {}), "('aeneas_prepped')\n", (10409, 10427), False, 'import os\n'), ((18564, 18595), 'shutil.rmtree', 'shutil.rmtree', (['"""aeneas_prepped"""'], {}), "('aeneas_prepped')\n", (18577, 18595), False, 'import shutil\n'), ((18613, 18640), 'shutil.rmtree', 'shutil.rmtree', (['"""aeneas_out"""'], {}), "('aeneas_out')\n", (18626, 18640), False, 'import shutil\n'), ((20794, 20849), 'config_helper.cfg_get', 'config_helper.cfg_get', (['"""transcription"""', '"""language_code"""'], {}), "('transcription', 'language_code')\n", (20815, 20849), False, 'import config_helper\n'), ((21740, 21760), 'pydub.AudioSegment.empty', 'AudioSegment.empty', ([], {}), '()\n', (21758, 21760), False, 'from pydub import AudioSegment, silence\n'), ((7133, 7228), 'google.cloud.speech_v1p1beta1.SpeechClient.from_service_account_json', 'speech.SpeechClient.from_service_account_json', ([], {'filename': 'self.google_cloud_credentials_path'}), '(filename=self.\n google_cloud_credentials_path)\n', (7178, 7228), True, 'from google.cloud import speech_v1p1beta1 as speech\n'), ((7255, 7291), 'google.cloud.speech_v1p1beta1.RecognitionAudio', 'speech.RecognitionAudio', ([], {'uri': 'gcs_uri'}), '(uri=gcs_uri)\n', (7278, 7291), True, 'from google.cloud import speech_v1p1beta1 as speech\n'), ((11248, 11286), 're.split', 're.split', (['"""(?<=[\\\\.\\\\!\\\\?])\\\\s*"""', 'text'], {}), "('(?<=[\\\\.\\\\!\\\\?])\\\\s*', text)\n", (11256, 11286), False, 'import re\n'), ((13712, 13747), 'csv.reader', 'csv.reader', (['csv_file'], {'delimiter': '""","""'}), "(csv_file, delimiter=',')\n", (13722, 13747), False, 'import csv\n'), ((13184, 13218), 'os.path.basename', 'os.path.basename', (['self.project_dir'], {}), '(self.project_dir)\n', (13200, 13218), False, 'import os\n'), ((13388, 13422), 'os.path.basename', 'os.path.basename', (['self.project_dir'], {}), '(self.project_dir)\n', (13404, 13422), False, 'import os\n'), ((19719, 19744), 'os.path.basename', 'os.path.basename', (['wavfile'], {}), '(wavfile)\n', (19735, 19744), False, 'import os\n'), ((13014, 13048), 'os.path.basename', 'os.path.basename', (['self.project_dir'], {}), '(self.project_dir)\n', (13030, 13048), False, 'import os\n'), ((14691, 14724), 'pydub.AudioSegment.from_wav', 'AudioSegment.from_wav', (['audio_name'], {}), '(audio_name)\n', (14712, 14724), False, 'from pydub import AudioSegment, silence\n'), ((14982, 15014), 'math.ceil', 'math.ceil', (['(c_length / cut_length)'], {}), '(c_length / cut_length)\n', (14991, 15014), False, 'import math\n'), ((15174, 15209), 'math.ceil', 'math.ceil', (['(text_list_len / num_cuts)'], {}), '(text_list_len / num_cuts)\n', (15183, 15209), False, 'import math\n'), ((16127, 16417), 'os.system', 'os.system', (['(\'python -m aeneas.tools.execute_task \' + \'aeneas_prepped/tempcut.wav\' +\n \' aeneas_prepped/temp.csv "task_adjust_boundary_percent_value=50|task_adjust_boundary_algorithm=percent|task_language=en|is_text_type=plain|os_task_file_format=csv" \'\n + \'aeneas_out/temp_out.csv\')'], {}), '(\'python -m aeneas.tools.execute_task \' +\n \'aeneas_prepped/tempcut.wav\' +\n \' aeneas_prepped/temp.csv "task_adjust_boundary_percent_value=50|task_adjust_boundary_algorithm=percent|task_language=en|is_text_type=plain|os_task_file_format=csv" \'\n + \'aeneas_out/temp_out.csv\')\n', (16136, 16417), False, 'import os\n'), ((16570, 16610), 'csv.reader', 'csv.reader', (['csv_file_temp'], {'delimiter': '""","""'}), "(csv_file_temp, delimiter=',')\n", (16580, 16610), False, 'import csv\n'), ((16796, 16847), 'pydub.AudioSegment.from_wav', 'AudioSegment.from_wav', (['"""aeneas_prepped/tempcut.wav"""'], {}), "('aeneas_prepped/tempcut.wav')\n", (16817, 16847), False, 'from pydub import AudioSegment, silence\n'), ((17819, 17852), 'pydub.AudioSegment.from_wav', 'AudioSegment.from_wav', (['audio_name'], {}), '(audio_name)\n', (17840, 17852), False, 'from pydub import AudioSegment, silence\n'), ((19640, 19654), 'time.time_ns', 'time.time_ns', ([], {}), '()\n', (19652, 19654), False, 'import time\n'), ((19763, 19777), 'time.time_ns', 'time.time_ns', ([], {}), '()\n', (19775, 19777), False, 'import time\n'), ((7823, 7878), 'config_helper.cfg_get', 'config_helper.cfg_get', (['"""transcription"""', '"""language_code"""'], {}), "('transcription', 'language_code')\n", (7844, 7878), False, 'import config_helper\n'), ((8564, 8619), 'config_helper.cfg_get', 'config_helper.cfg_get', (['"""transcription"""', '"""language_code"""'], {}), "('transcription', 'language_code')\n", (8585, 8619), False, 'import config_helper\n'), ((13556, 13590), 'os.path.basename', 'os.path.basename', (['self.project_dir'], {}), '(self.project_dir)\n', (13572, 13590), False, 'import os\n'), ((18181, 18215), 'os.path.basename', 'os.path.basename', (['self.project_dir'], {}), '(self.project_dir)\n', (18197, 18215), False, 'import os\n'), ((17485, 17519), 'os.path.basename', 'os.path.basename', (['self.project_dir'], {}), '(self.project_dir)\n', (17501, 17519), False, 'import os\n')]
|
# Copyright 2016 Ifwe Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestConnections(unittest2.TestCase):
def setUp(self):
self.protocol = 'mysql+mysqldb'
self.db_user = 'testuser'
self.db_password = '<PASSWORD>'
self.hostname = 'opsdb.tagged.com'
self.db_name = 'TagOpsDB'
@unittest2.skip('not currently valid')
def test_create_dbconn_string(self):
from tagopsdb.database import create_dbconn_string
params = dict(hostname=self.hostname, db_name=self.db_name)
dbconn_string = create_dbconn_string(self.db_user, self.db_password,
**params)
expect_str = (
self.protocol + '://' + self.db_user + ':' +
self.db_password + '@' + self.hostname + '/' +
self.db_name
)
self.assertEquals(dbconn_string, expect_str)
|
[
"tagopsdb.database.create_dbconn_string",
"unittest2.skip"
] |
[((855, 892), 'unittest2.skip', 'unittest2.skip', (['"""not currently valid"""'], {}), "('not currently valid')\n", (869, 892), False, 'import unittest2\n'), ((1086, 1148), 'tagopsdb.database.create_dbconn_string', 'create_dbconn_string', (['self.db_user', 'self.db_password'], {}), '(self.db_user, self.db_password, **params)\n', (1106, 1148), False, 'from tagopsdb.database import create_dbconn_string\n')]
|
'''
Based on:
Gravity Turn Maneuver with direct multiple shooting using CVodes
(c) <NAME>
https://mintoc.de/index.php/Gravity_Turn_Maneuver_(Casadi)
https://github.com/zegkljan/kos-stuff/tree/master/non-kos-tools/gturn
----------------------------------------------------------------
'''
import sys
from pathlib import Path
import casadi as cs
import numpy as np
import pandas as pd
from rocket_input import read_rocket_config
# noinspection PyPep8Naming
def compute_gravity_turn(m0, m1, g0, r0, Isp0, Isp1, Fmax, cd, A, H, rho, h_obj,
v_obj, q_obj, N=300, vel_eps=1e-3):
'''
Computes gravity turn profile
:params:
m0: wet (launch) mass (kg or ton)
m1: dry mass (kg or ton)
g0: gravitational acceleration at zero altitude (m * s^-2 or km * s^-2)
r0: "orbit" radius at zero altitude (body radius) (m or km)
Isp0: specific impulse of the engine(s) at zero altitude (s)
Isp1: specific impulse of the engine(s) in vacuum (s)
Fmax: maximum thrust of the engine(s) (N or MN)
cd: drag coefficient
A: reference area of the vehicle (m^2)
H: scale height of the atmosphere (m or km)
rho: density of the atmosphere at zero altitude (kg * m^-3)
h_obj: target altitude (m or km)
v_obj: target velocity (m * s^-1 of km * s^-1)
q_obj: target angle to vertical (rad)
N: number of shooting interval
vel_eps: initial velocity (must be nonzero, e.g. a very small number)
(m * s^-1 or km * s^-1)
:returns:
a dictionary with results
'''
# Create symbolic variables
x = cs.SX.sym('[m, v, q, h, d]') # Vehicle state
u = cs.SX.sym('u') # Vehicle controls
T = cs.SX.sym('T') # Time horizon (s)
# Introduce symbolic expressions for important composite terms
Fthrust = Fmax * u
Fdrag = 0.5 * A * cd * rho * cs.exp(-x[3] / H) * x[1] ** 2
r = x[3] + r0
g = g0 * (r0 / r) ** 2
vhor = x[1] * cs.sin(x[2])
vver = x[1] * cs.cos(x[2])
Isp = Isp1 + (Isp0 - Isp1) * cs.exp(-x[3] / H)
# Build symbolic expressions for ODE right hand side
mdot = -(Fthrust / (Isp * g0))
vdot = (Fthrust - Fdrag) / x[0] - g * cs.cos(x[2])
hdot = vver
ddot = vhor / r
qdot = g * cs.sin(x[2]) / x[1] - ddot
# Build the DAE function
ode = [mdot, vdot, qdot, hdot, ddot]
quad = u
dae = {'x': x, 'p': cs.vertcat(u, T), 'ode': T * cs.vertcat(*ode), 'quad': T * quad}
I = cs.integrator(
'I', 'cvodes', dae,
{'t0': 0.0, 'tf': 1.0 / N, 'nonlinear_solver_iteration': 'functional'}
)
# Specify upper and lower bounds as well as initial values for DAE
# parameters, states and controls
p_min = [0.0]
p_max = [600.0]
p_init = [300.0]
u_min = [0.0]
u_max = [1.0]
u_init = [0.5]
x0_min = [m0, vel_eps, 0.0, 0.0, 0.0]
x0_max = [m0, vel_eps, 0.5 * cs.pi, 0.0, 0.0]
x0_init = [m0, vel_eps, 0.05 * cs.pi, 0.0, 0.0]
xf_min = [m1, v_obj, q_obj, h_obj, 0.0]
xf_max = [m0, v_obj, q_obj, h_obj, cs.inf]
xf_init = [m1, v_obj, q_obj, h_obj, 0.0]
x_min = [m1, vel_eps, 0.0, 0.0, 0.0]
x_max = [m0, cs.inf, cs.pi, cs.inf, cs.inf]
x_init = [0.5 * (m0 + m1), 0.5 * v_obj, 0.5 * q_obj, 0.5 * h_obj, 0.0]
# Useful variable block sizes
npars = 1 # Number of parameters
nx = x.size1() # Number of states
nu = u.size1() # Number of controls
ns = nx + nu # Number of variables per shooting interval
# Introduce symbolic variables and disassemble them into blocks
V = cs.MX.sym('X', N * ns + nx + npars)
P = V[0]
X = [V[(npars + i * ns):(npars + i * ns + nx)] for i in range(0, N + 1)]
U = [V[(npars + i * ns + nx):(npars + (i + 1) * ns)] for i in range(0, N)]
# Nonlinear constraints and Lagrange objective
G = []
F = 0.0
# Build DMS structure
x0 = p_init + x0_init
for i in range(0, N):
Y = I(x0=X[i], p=cs.vertcat(U[i], P))
G += [Y['xf'] - X[i + 1]]
F = F + Y['qf']
frac = float(i + 1) / N
x0 = x0 + u_init + [x0_init[i] + frac * (xf_init[i] - x0_init[i])
for i in range(0, nx)]
# Lower and upper bounds for solver
lbg = 0.0
ubg = 0.0
lbx = p_min + x0_min + u_min + (N - 1) * (x_min + u_min) + xf_min
ubx = p_max + x0_max + u_max + (N - 1) * (x_max + u_max) + xf_max
# Solve the problem using IPOPT
nlp = {'x': V, 'f': (m0 - X[-1][0]) / (m0 - m1), 'g': cs.vertcat(*G)}
S = cs.nlpsol(
'S', 'ipopt', nlp, {'ipopt': {'tol': 1e-4, 'print_level': 5, 'max_iter': 500}}
)
r = S(x0=x0, lbx=lbx, ubx=ubx, lbg=lbg, ubg=ubg)
print('RESULT: {}'.format(S.stats()['return_status']))
if S.stats()['return_status'] in {'Invalid_Number_Detected'}:
return None
# Extract state sequences and parameters from result
x = r['x']
f = r['f']
T = float(x[0])
t = np.linspace(0, T, N + 1)
m = np.array(x[npars::ns]).squeeze()
v = np.array(x[npars + 1::ns]).squeeze()
q = np.array(x[npars + 2::ns]).squeeze()
h = np.array(x[npars + 3::ns]).squeeze()
d = np.array(x[npars + 4::ns]).squeeze()
u = np.concatenate((np.array(x[npars + nx::ns]).squeeze(), [0.0]))
return {
'time': t,
'mass': m,
'vel': v,
'alt': h,
'control': u,
'hor_angle': d,
'ver_angle': q
}
def main(config_file):
( rocket_params,
environment_params,
model_params,
io_params
) = read_rocket_config(config_file)
# Vehicle parameters
m0 = (rocket_params.fuel_mass +
rocket_params.dry_mass) # Launch mass (kg or ton)
m1 = rocket_params.dry_mass # Dry mass (kg or ton)
Isp0 = rocket_params.motor_isp0 # Specific impulse at zero altude (s)
Isp1 = rocket_params.motor_isp1 # Specific impulse at vacuum (s)
A = rocket_params.rocket_area # Reference area (m^2)
Fmax = rocket_params.max_thrust # Maximum thrust (N or MN)
vel_eps = rocket_params.vel # Initial velocity (m/s or km/s)
# Environmental parameters
g0 = environment_params.gravity # Gravitational acceleration at altitude zero (m/s^2 or km/s^2)
r0 = environment_params.radius # Radius at altitude zero (m or km)
cd = environment_params.drag_coefficient # Drag coefficients
H = environment_params.scale_height # Scale height (m or km)
rho = environment_params.density # Density at altitude zero (x 1000)
# Model and target orbit parameters
N = model_params.N # Number of shooting intervals
h_obj = model_params.h_obj # Target altitude (m or km)
v_obj = model_params.v_obj # Target velocity (m/s or km/s)
q_obj = model_params.q_obj / 180 * cs.pi # Target angle to vertical (rad)
# output file
model_file = model_params.model_file
result = compute_gravity_turn(
m0, m1, g0, r0, Isp0, Isp1, Fmax,
cd, A, H, rho, h_obj,
v_obj, q_obj, N=N, vel_eps=vel_eps
)
result_df = pd.DataFrame(result)
result_df.to_excel(model_file, index=False)
print(result_df.head())
if __name__ == '__main__':
config_file_name = 'None'
if len(sys.argv) == 2:
config_file_name = sys.argv[1]
config_file_name = Path(config_file_name)
if not config_file_name.is_file():
print(f'incorrect config file: {config_file_name}')
exit()
main(config_file_name)
|
[
"pandas.DataFrame",
"casadi.nlpsol",
"casadi.SX.sym",
"casadi.exp",
"casadi.integrator",
"casadi.cos",
"casadi.sin",
"rocket_input.read_rocket_config",
"casadi.vertcat",
"pathlib.Path",
"numpy.array",
"casadi.MX.sym",
"numpy.linspace"
] |
[((1656, 1684), 'casadi.SX.sym', 'cs.SX.sym', (['"""[m, v, q, h, d]"""'], {}), "('[m, v, q, h, d]')\n", (1665, 1684), True, 'import casadi as cs\n'), ((1710, 1724), 'casadi.SX.sym', 'cs.SX.sym', (['"""u"""'], {}), "('u')\n", (1719, 1724), True, 'import casadi as cs\n'), ((1753, 1767), 'casadi.SX.sym', 'cs.SX.sym', (['"""T"""'], {}), "('T')\n", (1762, 1767), True, 'import casadi as cs\n'), ((2507, 2616), 'casadi.integrator', 'cs.integrator', (['"""I"""', '"""cvodes"""', 'dae', "{'t0': 0.0, 'tf': 1.0 / N, 'nonlinear_solver_iteration': 'functional'}"], {}), "('I', 'cvodes', dae, {'t0': 0.0, 'tf': 1.0 / N,\n 'nonlinear_solver_iteration': 'functional'})\n", (2520, 2616), True, 'import casadi as cs\n'), ((3601, 3636), 'casadi.MX.sym', 'cs.MX.sym', (['"""X"""', '(N * ns + nx + npars)'], {}), "('X', N * ns + nx + npars)\n", (3610, 3636), True, 'import casadi as cs\n'), ((4550, 4645), 'casadi.nlpsol', 'cs.nlpsol', (['"""S"""', '"""ipopt"""', 'nlp', "{'ipopt': {'tol': 0.0001, 'print_level': 5, 'max_iter': 500}}"], {}), "('S', 'ipopt', nlp, {'ipopt': {'tol': 0.0001, 'print_level': 5,\n 'max_iter': 500}})\n", (4559, 4645), True, 'import casadi as cs\n'), ((4968, 4992), 'numpy.linspace', 'np.linspace', (['(0)', 'T', '(N + 1)'], {}), '(0, T, N + 1)\n', (4979, 4992), True, 'import numpy as np\n'), ((5572, 5603), 'rocket_input.read_rocket_config', 'read_rocket_config', (['config_file'], {}), '(config_file)\n', (5590, 5603), False, 'from rocket_input import read_rocket_config\n'), ((7219, 7239), 'pandas.DataFrame', 'pd.DataFrame', (['result'], {}), '(result)\n', (7231, 7239), True, 'import pandas as pd\n'), ((7465, 7487), 'pathlib.Path', 'Path', (['config_file_name'], {}), '(config_file_name)\n', (7469, 7487), False, 'from pathlib import Path\n'), ((2005, 2017), 'casadi.sin', 'cs.sin', (['x[2]'], {}), '(x[2])\n', (2011, 2017), True, 'import casadi as cs\n'), ((2036, 2048), 'casadi.cos', 'cs.cos', (['x[2]'], {}), '(x[2])\n', (2042, 2048), True, 'import casadi as cs\n'), ((2434, 2450), 'casadi.vertcat', 'cs.vertcat', (['u', 'T'], {}), '(u, T)\n', (2444, 2450), True, 'import casadi as cs\n'), ((4526, 4540), 'casadi.vertcat', 'cs.vertcat', (['*G'], {}), '(*G)\n', (4536, 4540), True, 'import casadi as cs\n'), ((1912, 1929), 'casadi.exp', 'cs.exp', (['(-x[3] / H)'], {}), '(-x[3] / H)\n', (1918, 1929), True, 'import casadi as cs\n'), ((2082, 2099), 'casadi.exp', 'cs.exp', (['(-x[3] / H)'], {}), '(-x[3] / H)\n', (2088, 2099), True, 'import casadi as cs\n'), ((2235, 2247), 'casadi.cos', 'cs.cos', (['x[2]'], {}), '(x[2])\n', (2241, 2247), True, 'import casadi as cs\n'), ((2463, 2479), 'casadi.vertcat', 'cs.vertcat', (['*ode'], {}), '(*ode)\n', (2473, 2479), True, 'import casadi as cs\n'), ((5001, 5023), 'numpy.array', 'np.array', (['x[npars::ns]'], {}), '(x[npars::ns])\n', (5009, 5023), True, 'import numpy as np\n'), ((5042, 5068), 'numpy.array', 'np.array', (['x[npars + 1::ns]'], {}), '(x[npars + 1::ns])\n', (5050, 5068), True, 'import numpy as np\n'), ((5087, 5113), 'numpy.array', 'np.array', (['x[npars + 2::ns]'], {}), '(x[npars + 2::ns])\n', (5095, 5113), True, 'import numpy as np\n'), ((5132, 5158), 'numpy.array', 'np.array', (['x[npars + 3::ns]'], {}), '(x[npars + 3::ns])\n', (5140, 5158), True, 'import numpy as np\n'), ((5177, 5203), 'numpy.array', 'np.array', (['x[npars + 4::ns]'], {}), '(x[npars + 4::ns])\n', (5185, 5203), True, 'import numpy as np\n'), ((2299, 2311), 'casadi.sin', 'cs.sin', (['x[2]'], {}), '(x[2])\n', (2305, 2311), True, 'import casadi as cs\n'), ((3985, 4004), 'casadi.vertcat', 'cs.vertcat', (['U[i]', 'P'], {}), '(U[i], P)\n', (3995, 4004), True, 'import casadi as cs\n'), ((5238, 5265), 'numpy.array', 'np.array', (['x[npars + nx::ns]'], {}), '(x[npars + nx::ns])\n', (5246, 5265), True, 'import numpy as np\n')]
|
__import__("pkg_resources").declare_namespace(__name__)
from contextlib import contextmanager
from .minimal_packages import MinimalPackagesWorkaround, MinimalPackagesMixin
from .windows import WindowsWorkaround, is_windows
from .virtualenv import VirtualenvWorkaround
from .egg import Scripts
class AbsoluteExecutablePathMixin(object):
def is_relative_paths_option_set(self):
relative_paths = self.options.get('relative-paths',
self.buildout.get('buildout').get('relative-paths', 'false'))
return relative_paths in [True, 'true']
def set_executable_path(self):
if is_windows and not self.is_relative_paths_option_set():
python_executable = self.buildout.get('buildout').get('executable')
self.options['executable'] = python_executable
class Scripts(Scripts, AbsoluteExecutablePathMixin, MinimalPackagesMixin):
def install(self):
self.set_executable_path()
installed_files = super(Scripts, self).install()
WindowsWorkaround.apply(self, False, installed_files)
MinimalPackagesWorkaround.apply(self, installed_files)
VirtualenvWorkaround.apply(self, installed_files)
return installed_files
update = install
@contextmanager
def patch(parent, name, value):
previous = getattr(parent, name, None)
setattr(parent, name, value)
try:
yield
finally:
setattr(parent, name, previous)
@contextmanager
def patch_get_entry_map_for_gui_scripts():
import pkg_resources
_get_entry_map = pkg_resources.get_entry_map
def get_entry_map(dist, group=None):
return _get_entry_map(dist, "gui_scripts")
with patch(pkg_resources, "get_entry_map", get_entry_map):
yield
@contextmanager
def patch_get_entry_info_for_gui_scripts():
import pkg_resources
def get_entry_info(self, group, name):
return self.get_entry_map("gui_scripts" if group == "console_scripts" else group).get(name)
with patch(pkg_resources.Distribution, "get_entry_info", get_entry_info):
yield
class GuiScripts(Scripts, AbsoluteExecutablePathMixin, MinimalPackagesMixin):
def install(self):
with patch_get_entry_map_for_gui_scripts():
with patch_get_entry_info_for_gui_scripts():
self.set_executable_path()
installed_files = super(GuiScripts, self).install()
WindowsWorkaround.apply(self, True, installed_files)
MinimalPackagesWorkaround.apply(self, installed_files)
return installed_files
update = install
# used as entry point to gui-script-test
def nothing():
pass
def patch_buildout_wheel():
import buildout.wheel
import glob
WheelInstaller = buildout.wheel.WheelInstaller
def wrapper(func):
def wrapper(basename):
return WheelInstaller((glob.glob('{}*'.format(basename)) + [basename])[0])
return wrapper
buildout.wheel.WheelInstaller = wrapper(buildout.wheel.WheelInstaller)
def _get_matching_dist_in_location(dist, location):
"""
Check if `locations` contain only the one intended dist.
Return the dist with metadata in the new location.
"""
# Getting the dist from the environment causes the
# distribution meta data to be read. Cloning isn't
# good enough.
import pkg_resources
env = pkg_resources.Environment([location])
dists = [ d for project_name in env for d in env[project_name] ]
dist_infos = [ (d.project_name, d.version) for d in dists ]
if dist_infos == [(dist.project_name, dist.version)]:
return dists.pop()
if dist_infos == [(dist.project_name.lower(), dist.version)]:
return dists.pop()
def patch_zc_buildout_easy_install():
import zc.buildout.easy_install
zc.buildout.easy_install._get_matching_dist_in_location = _get_matching_dist_in_location
# buildout.wheel on Windows is having problems installing non-lower-case wheels
try:
patch_buildout_wheel()
except ImportError:
pass
patch_zc_buildout_easy_install()
|
[
"pkg_resources.Environment"
] |
[((3403, 3440), 'pkg_resources.Environment', 'pkg_resources.Environment', (['[location]'], {}), '([location])\n', (3428, 3440), False, 'import pkg_resources\n')]
|
# https://github.com/tensorflow/examples/blob/master/community/en/transformer_chatbot.ipynb
import tensorflow as tf
# assert tf.__version__.startswith('2')
tf.random.set_seed(1234)
import tensorflow_datasets as tfds
import os
import re
import numpy as np
import matplotlib.pyplot as plt
import pickle
from functions import *
from hparams import *
with open('tokenizer/tokenizer.pickle', 'rb') as handle:
tokenizer = pickle.load(handle)
with open("tokenizer/START_TOKEN", "r") as f:
START_TOKEN = [int(f.read())]
with open("tokenizer/END_TOKEN", "r") as f:
END_TOKEN = [int(f.read())]
with open("tokenizer/VOCAB_SIZE", "r") as f:
VOCAB_SIZE = int(f.read())
def evaluate(sentence):
sentence = preprocess_sentence(sentence)
sentence = tf.expand_dims(
START_TOKEN + tokenizer.encode(sentence) + END_TOKEN, axis=0)
output = tf.expand_dims(START_TOKEN, 0)
for i in range(MAX_LENGTH):
predictions = model(inputs=[sentence, output], training=False)
# select the last word from the seq_len dimension
predictions = predictions[:, -1:, :]
predicted_id = tf.cast(tf.argmax(predictions, axis=-1), tf.int32)
# return the result if the predicted_id is equal to the end token
if tf.equal(predicted_id, END_TOKEN[0]):
break
# concatenated the predicted_id to the output which is given to the decoder
# as its input.
output = tf.concat([output, predicted_id], axis=-1)
return tf.squeeze(output, axis=0)
def predict(sentence):
prediction = evaluate(sentence)
predicted_sentence = tokenizer.decode(
[i for i in prediction if i < tokenizer.vocab_size])
return predicted_sentence
def create_model():
model = transformer(
vocab_size=VOCAB_SIZE,
num_layers=NUM_LAYERS,
units=UNITS,
d_model=D_MODEL,
num_heads=NUM_HEADS,
dropout=DROPOUT)
learning_rate = CustomSchedule(D_MODEL)
optimizer = tf.keras.optimizers.Adam(learning_rate, beta_1=0.9, beta_2=0.98, epsilon=1e-9)
model.compile(optimizer=optimizer, loss=loss_function, metrics=[accuracy])
return model
model = create_model()
checkpoint_path = "model/cp.ckpt"
model.load_weights(checkpoint_path)
while True:
question = input("\n--> ")
if question == "" or question == None:
continue
output = predict(question)
print(output)
|
[
"tensorflow.random.set_seed",
"tensorflow.argmax",
"tensorflow.concat",
"pickle.load",
"tensorflow.keras.optimizers.Adam",
"tensorflow.equal",
"tensorflow.squeeze",
"tensorflow.expand_dims"
] |
[((157, 181), 'tensorflow.random.set_seed', 'tf.random.set_seed', (['(1234)'], {}), '(1234)\n', (175, 181), True, 'import tensorflow as tf\n'), ((425, 444), 'pickle.load', 'pickle.load', (['handle'], {}), '(handle)\n', (436, 444), False, 'import pickle\n'), ((861, 891), 'tensorflow.expand_dims', 'tf.expand_dims', (['START_TOKEN', '(0)'], {}), '(START_TOKEN, 0)\n', (875, 891), True, 'import tensorflow as tf\n'), ((1451, 1477), 'tensorflow.squeeze', 'tf.squeeze', (['output'], {'axis': '(0)'}), '(output, axis=0)\n', (1461, 1477), True, 'import tensorflow as tf\n'), ((1918, 1997), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', (['learning_rate'], {'beta_1': '(0.9)', 'beta_2': '(0.98)', 'epsilon': '(1e-09)'}), '(learning_rate, beta_1=0.9, beta_2=0.98, epsilon=1e-09)\n', (1942, 1997), True, 'import tensorflow as tf\n'), ((1234, 1270), 'tensorflow.equal', 'tf.equal', (['predicted_id', 'END_TOKEN[0]'], {}), '(predicted_id, END_TOKEN[0])\n', (1242, 1270), True, 'import tensorflow as tf\n'), ((1398, 1440), 'tensorflow.concat', 'tf.concat', (['[output, predicted_id]'], {'axis': '(-1)'}), '([output, predicted_id], axis=-1)\n', (1407, 1440), True, 'import tensorflow as tf\n'), ((1113, 1144), 'tensorflow.argmax', 'tf.argmax', (['predictions'], {'axis': '(-1)'}), '(predictions, axis=-1)\n', (1122, 1144), True, 'import tensorflow as tf\n')]
|
#!/usr/bin/env python
# coding: utf-8
# # Visualizing Naive Bayes
#
# In this lab, we will cover an essential part of data analysis that has not been included in the lecture videos. As we stated in the previous module, data visualization gives insight into the expected performance of any model.
#
# In the following exercise, you are going to make a visual inspection of the tweets dataset using the Naïve Bayes features. We will see how we can understand the log-likelihood ratio explained in the videos as a pair of numerical features that can be fed in a machine learning algorithm.
#
# At the end of this lab, we will introduce the concept of __confidence ellipse__ as a tool for representing the Naïve Bayes model visually.
# In[1]:
import numpy as np # Library for linear algebra and math utils
import pandas as pd # Dataframe library
import matplotlib.pyplot as plt # Library for plots
from utils import confidence_ellipse # Function to add confidence ellipses to charts
# ## Calculate the likelihoods for each tweet
#
# For each tweet, we have calculated the likelihood of the tweet to be positive and the likelihood to be negative. We have calculated in different columns the numerator and denominator of the likelihood ratio introduced previously.
#
# $$log \frac{P(tweet|pos)}{P(tweet|neg)} = log(P(tweet|pos)) - log(P(tweet|neg)) $$
# $$positive = log(P(tweet|pos)) = \sum_{i=0}^{n}{log P(W_i|pos)}$$
# $$negative = log(P(tweet|neg)) = \sum_{i=0}^{n}{log P(W_i|neg)}$$
#
# We did not include the code because this is part of this week's assignment. The __'bayes_features.csv'__ file contains the final result of this process.
#
# The cell below loads the table in a dataframe. Dataframes are data structures that simplify the manipulation of data, allowing filtering, slicing, joining, and summarization.
# In[2]:
data = pd.read_csv('data/bayes_features.csv') # Load the data from the csv file
data.head(5) # Print the first 5 tweets features. Each row represents a tweet
# In[3]:
# Plot the samples using columns 1 and 2 of the matrix
fig, ax = plt.subplots(figsize = (8, 8)) #Create a new figure with a custom size
colors = ['red', 'green'] # Define a color palete
sentiments = ['negative', 'positive']
index = data.index
# Color base on sentiment
for sentiment in data.sentiment.unique():
ix = index[data.sentiment == sentiment]
ax.scatter(data.iloc[ix].positive, data.iloc[ix].negative, c=colors[int(sentiment)], s=0.1, marker='*', label=sentiments[int(sentiment)])
ax.legend(loc='best')
# Custom limits for this chart
plt.xlim(-250,0)
plt.ylim(-250,0)
plt.xlabel("Positive") # x-axis label
plt.ylabel("Negative") # y-axis label
plt.show()
# # Using Confidence Ellipses to interpret Naïve Bayes
#
# In this section, we will use the [confidence ellipse]( https://matplotlib.org/3.1.1/gallery/statistics/confidence_ellipse.html#sphx-glr-gallery-statistics-confidence-ellipse-py) to give us an idea of what the Naïve Bayes model see.
#
# A confidence ellipse is a way to visualize a 2D random variable. It is a better way than plotting the points over a cartesian plane because, with big datasets, the points can overlap badly and hide the real distribution of the data. Confidence ellipses summarize the information of the dataset with only four parameters:
#
# * Center: It is the numerical mean of the attributes
# * Height and width: Related with the variance of each attribute. The user must specify the desired amount of standard deviations used to plot the ellipse.
# * Angle: Related with the covariance among attributes.
#
# The parameter __n_std__ stands for the number of standard deviations bounded by the ellipse. Remember that for normal random distributions:
#
# * About 68% of the area under the curve falls within 1 standard deviation around the mean.
# * About 95% of the area under the curve falls within 2 standard deviations around the mean.
# * About 99.7% of the area under the curve falls within 3 standard deviations around the mean.
#
# <img src=./images/std.jpg width="400" >
#
#
# In the next chart, we will plot the data and its corresponding confidence ellipses using 2 std and 3 std.
# In[ ]:
# Plot the samples using columns 1 and 2 of the matrix
fig, ax = plt.subplots(figsize = (8, 8))
colors = ['red', 'green'] # Define a color palete
sentiments = ['negative', 'positive']
index = data.index
# Color base on sentiment
for sentiment in data.sentiment.unique():
ix = index[data.sentiment == sentiment]
ax.scatter(data.iloc[ix].positive, data.iloc[ix].negative, c=colors[int(sentiment)], s=0.1, marker='*', label=sentiments[int(sentiment)])
# Custom limits for this chart
plt.xlim(-200, 40)
plt.ylim(-200, 40)
plt.xlabel("Positive") # x-axis label
plt.ylabel("Negative") # y-axis label
data_pos = data[data.sentiment == 1] # Filter only the positive samples
data_neg = data[data.sentiment == 0] # Filter only the negative samples
# Print confidence ellipses of 2 std
confidence_ellipse(data_pos.positive, data_pos.negative, ax, n_std=2, edgecolor='black', label=r'$2\sigma$' )
confidence_ellipse(data_neg.positive, data_neg.negative, ax, n_std=2, edgecolor='orange')
# Print confidence ellipses of 3 std
confidence_ellipse(data_pos.positive, data_pos.negative, ax, n_std=3, edgecolor='black', linestyle=':', label=r'$3\sigma$')
confidence_ellipse(data_neg.positive, data_neg.negative, ax, n_std=3, edgecolor='orange', linestyle=':')
ax.legend(loc='lower right')
plt.show()
# In the next cell, we will modify the features of the samples with positive sentiment (1), in a way that the two distributions overlap. In this case, the Naïve Bayes method will produce a lower accuracy than with the original data.
# In[ ]:
data2 = data.copy() # Copy the whole data frame
# The following 2 lines only modify the entries in the data frame where sentiment == 1
data2.negative[data.sentiment == 1] = data2.negative * 1.5 + 50 # Modify the negative attribute
data2.positive[data.sentiment == 1] = data2.positive / 1.5 - 50 # Modify the positive attribute
# Now let us plot the two distributions and the confidence ellipses
# In[ ]:
# Plot the samples using columns 1 and 2 of the matrix
fig, ax = plt.subplots(figsize = (8, 8))
colors = ['red', 'green'] # Define a color palete
sentiments = ['negative', 'positive']
index = data2.index
# Color base on sentiment
for sentiment in data2.sentiment.unique():
ix = index[data2.sentiment == sentiment]
ax.scatter(data2.iloc[ix].positive, data2.iloc[ix].negative, c=colors[int(sentiment)], s=0.1, marker='*', label=sentiments[int(sentiment)])
#ax.scatter(data2.positive, data2.negative, c=[colors[int(k)] for k in data2.sentiment], s = 0.1, marker='*') # Plot a dot for tweet
# Custom limits for this chart
plt.xlim(-200,40)
plt.ylim(-200,40)
plt.xlabel("Positive") # x-axis label
plt.ylabel("Negative") # y-axis label
data_pos = data2[data2.sentiment == 1] # Filter only the positive samples
data_neg = data[data2.sentiment == 0] # Filter only the negative samples
# Print confidence ellipses of 2 std
confidence_ellipse(data_pos.positive, data_pos.negative, ax, n_std=2, edgecolor='black', label=r'$2\sigma$' )
confidence_ellipse(data_neg.positive, data_neg.negative, ax, n_std=2, edgecolor='orange')
# Print confidence ellipses of 3 std
confidence_ellipse(data_pos.positive, data_pos.negative, ax, n_std=3, edgecolor='black', linestyle=':', label=r'$3\sigma$')
confidence_ellipse(data_neg.positive, data_neg.negative, ax, n_std=3, edgecolor='orange', linestyle=':')
ax.legend(loc='lower right')
plt.show()
# To give away: Understanding the data allows us to predict if the method will perform well or not. Alternatively, it will allow us to understand why it worked well or bad.
|
[
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.show",
"matplotlib.pyplot.ylim",
"pandas.read_csv",
"utils.confidence_ellipse",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.subplots"
] |
[((1860, 1898), 'pandas.read_csv', 'pd.read_csv', (['"""data/bayes_features.csv"""'], {}), "('data/bayes_features.csv')\n", (1871, 1898), True, 'import pandas as pd\n'), ((2092, 2120), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (2104, 2120), True, 'import matplotlib.pyplot as plt\n'), ((2591, 2608), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-250)', '(0)'], {}), '(-250, 0)\n', (2599, 2608), True, 'import matplotlib.pyplot as plt\n'), ((2608, 2625), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-250)', '(0)'], {}), '(-250, 0)\n', (2616, 2625), True, 'import matplotlib.pyplot as plt\n'), ((2626, 2648), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Positive"""'], {}), "('Positive')\n", (2636, 2648), True, 'import matplotlib.pyplot as plt\n'), ((2664, 2686), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Negative"""'], {}), "('Negative')\n", (2674, 2686), True, 'import matplotlib.pyplot as plt\n'), ((2702, 2712), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2710, 2712), True, 'import matplotlib.pyplot as plt\n'), ((4275, 4303), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (4287, 4303), True, 'import matplotlib.pyplot as plt\n'), ((4702, 4720), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-200)', '(40)'], {}), '(-200, 40)\n', (4710, 4720), True, 'import matplotlib.pyplot as plt\n'), ((4721, 4739), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-200)', '(40)'], {}), '(-200, 40)\n', (4729, 4739), True, 'import matplotlib.pyplot as plt\n'), ((4741, 4763), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Positive"""'], {}), "('Positive')\n", (4751, 4763), True, 'import matplotlib.pyplot as plt\n'), ((4780, 4802), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Negative"""'], {}), "('Negative')\n", (4790, 4802), True, 'import matplotlib.pyplot as plt\n'), ((5004, 5116), 'utils.confidence_ellipse', 'confidence_ellipse', (['data_pos.positive', 'data_pos.negative', 'ax'], {'n_std': '(2)', 'edgecolor': '"""black"""', 'label': '"""$2\\\\sigma$"""'}), "(data_pos.positive, data_pos.negative, ax, n_std=2,\n edgecolor='black', label='$2\\\\sigma$')\n", (5022, 5116), False, 'from utils import confidence_ellipse\n'), ((5114, 5207), 'utils.confidence_ellipse', 'confidence_ellipse', (['data_neg.positive', 'data_neg.negative', 'ax'], {'n_std': '(2)', 'edgecolor': '"""orange"""'}), "(data_neg.positive, data_neg.negative, ax, n_std=2,\n edgecolor='orange')\n", (5132, 5207), False, 'from utils import confidence_ellipse\n'), ((5242, 5369), 'utils.confidence_ellipse', 'confidence_ellipse', (['data_pos.positive', 'data_pos.negative', 'ax'], {'n_std': '(3)', 'edgecolor': '"""black"""', 'linestyle': '""":"""', 'label': '"""$3\\\\sigma$"""'}), "(data_pos.positive, data_pos.negative, ax, n_std=3,\n edgecolor='black', linestyle=':', label='$3\\\\sigma$')\n", (5260, 5369), False, 'from utils import confidence_ellipse\n'), ((5366, 5474), 'utils.confidence_ellipse', 'confidence_ellipse', (['data_neg.positive', 'data_neg.negative', 'ax'], {'n_std': '(3)', 'edgecolor': '"""orange"""', 'linestyle': '""":"""'}), "(data_neg.positive, data_neg.negative, ax, n_std=3,\n edgecolor='orange', linestyle=':')\n", (5384, 5474), False, 'from utils import confidence_ellipse\n'), ((5501, 5511), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5509, 5511), True, 'import matplotlib.pyplot as plt\n'), ((6235, 6263), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (6247, 6263), True, 'import matplotlib.pyplot as plt\n'), ((6801, 6819), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-200)', '(40)'], {}), '(-200, 40)\n', (6809, 6819), True, 'import matplotlib.pyplot as plt\n'), ((6821, 6839), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-200)', '(40)'], {}), '(-200, 40)\n', (6829, 6839), True, 'import matplotlib.pyplot as plt\n'), ((6840, 6862), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Positive"""'], {}), "('Positive')\n", (6850, 6862), True, 'import matplotlib.pyplot as plt\n'), ((6878, 6900), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Negative"""'], {}), "('Negative')\n", (6888, 6900), True, 'import matplotlib.pyplot as plt\n'), ((7104, 7216), 'utils.confidence_ellipse', 'confidence_ellipse', (['data_pos.positive', 'data_pos.negative', 'ax'], {'n_std': '(2)', 'edgecolor': '"""black"""', 'label': '"""$2\\\\sigma$"""'}), "(data_pos.positive, data_pos.negative, ax, n_std=2,\n edgecolor='black', label='$2\\\\sigma$')\n", (7122, 7216), False, 'from utils import confidence_ellipse\n'), ((7214, 7307), 'utils.confidence_ellipse', 'confidence_ellipse', (['data_neg.positive', 'data_neg.negative', 'ax'], {'n_std': '(2)', 'edgecolor': '"""orange"""'}), "(data_neg.positive, data_neg.negative, ax, n_std=2,\n edgecolor='orange')\n", (7232, 7307), False, 'from utils import confidence_ellipse\n'), ((7342, 7469), 'utils.confidence_ellipse', 'confidence_ellipse', (['data_pos.positive', 'data_pos.negative', 'ax'], {'n_std': '(3)', 'edgecolor': '"""black"""', 'linestyle': '""":"""', 'label': '"""$3\\\\sigma$"""'}), "(data_pos.positive, data_pos.negative, ax, n_std=3,\n edgecolor='black', linestyle=':', label='$3\\\\sigma$')\n", (7360, 7469), False, 'from utils import confidence_ellipse\n'), ((7466, 7574), 'utils.confidence_ellipse', 'confidence_ellipse', (['data_neg.positive', 'data_neg.negative', 'ax'], {'n_std': '(3)', 'edgecolor': '"""orange"""', 'linestyle': '""":"""'}), "(data_neg.positive, data_neg.negative, ax, n_std=3,\n edgecolor='orange', linestyle=':')\n", (7484, 7574), False, 'from utils import confidence_ellipse\n'), ((7601, 7611), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7609, 7611), True, 'import matplotlib.pyplot as plt\n')]
|
import sys
import csv
import json
def main(data_csv, outfile='out.json'):
with open(data_csv, 'r', encoding='utf-8-sig') as datafile:
reader = csv.DictReader(datafile)
output = {
'parks': [dict(row) for row in reader],
}
with open(outfile, 'w') as out:
json.dump(output, out)
if __name__ == '__main__':
infile, outfile = sys.argv[1], sys.argv[2]
print('Writing data from {} to {}'.format(infile, outfile))
main(sys.argv[1], sys.argv[2])
|
[
"json.dump",
"csv.DictReader"
] |
[((157, 181), 'csv.DictReader', 'csv.DictReader', (['datafile'], {}), '(datafile)\n', (171, 181), False, 'import csv\n'), ((315, 337), 'json.dump', 'json.dump', (['output', 'out'], {}), '(output, out)\n', (324, 337), False, 'import json\n')]
|
# Recognise Faces using some classification algorithm - like Logistic, KNN, SVM etc.
# 1. load the training data (numpy arrays of all the persons)
# x- values are stored in the numpy arrays
# y-values we need to assign for each person
# 2. Read a video stream using opencv
# 3. extract faces out of it
# 4. use knn to find the prediction of face (int)
# 5. map the predicted id to name of the user
# 6. Display the predictions on the screen - bounding box and name
import cv2
import numpy as np
import os
from datetime import datetime
import time
########## KNN CODE ############
def distance(v1, v2):
# Eucledian
return np.sqrt(((v1 - v2) ** 2).sum())
def markAttendence(name):
with open('present.csv', 'r+') as f:
total_student_in_class = f.readline()
print(total_student_in_class)
nameList = []
absstuds = []
for line in total_student_in_class:
entry = line.split(',')
nameList.append(entry[0])
if name not in nameList:
now = datetime.now()
dtString = now.strftime('%H:%M:%S')
f.writelines(f'\nthe present students are : \n{name},{dtString}')
def maarkattndnce(namees):
with open('absent.csv', 'r+') as f:
absstuds = []
for nam in total_student_in_class:
if nam not in class_total_present:
entry = nam.split(',')
absstuds.append(entry[0])
if namees not in absstuds:
f.writelines(f'\nabsent students are : \n{absstuds}')
def knn(train, test, k=5):
dist = []
for i in range(train.shape[0]):
# Get the vector and label
ix = train[i, :-1]
iy = train[i, -1]
# Compute the distance from test point
d = distance(test, ix)
dist.append([d, iy])
# Sort based on distance and get top k
dk = sorted(dist, key=lambda x: x[0])[:k]
# Retrieve only the labels
labels = np.array(dk)[:, -1]
# Get frequencies of each label
output = np.unique(labels, return_counts=True)
# Find max frequency and corresponding label
index = np.argmax(output[1])
return output[0][index]
################################
# Init Camera
cap = cv2.VideoCapture(0)
# Face Detection
face_cascade = cv2.CascadeClassifier("haarcascade_frontalface_alt.xml")
skip = 0
dataset_path = "C:/Users/Samarth/Desktop/knn/data/"
face_data = []
number = []
labels = []
class_id = 0 # Labels for the given file
names = {} # Mapping btw id - name
# Data Preparation
for fx in os.listdir(dataset_path):
if fx.endswith('.npy'):
# Create a mapping btw class_id and name
names[class_id] = fx[:-4]
print("Loaded " + fx)
data_item = np.load(dataset_path + fx)
face_data.append(data_item)
# Create Labels for the class
target = class_id * np.ones((data_item.shape[0],))
class_id += 1
labels.append(target)
face_dataset = np.concatenate(face_data, axis=0)
face_labels = np.concatenate(labels, axis=0).reshape((-1, 1))
print(face_dataset.shape)
print(face_labels.shape)
trainset = np.concatenate((face_dataset, face_labels), axis=1)
print(trainset.shape)
# Testing
attn = []
appn = []
while True:
ret, frame = cap.read()
if ret == False:
continue
faces = face_cascade.detectMultiScale(frame, 1.3, 5)
if (len(faces) == 0):
continue
for face in faces:
x, y, w, h = face
# Get the face ROI
offset = 10
face_section = frame[y - offset:y + h + offset, x - offset:x + w + offset]
face_section = cv2.resize(face_section, (100, 100))
# Predicted Label (out)
out = knn(trainset, face_section.flatten())
# Display on the screen the name and rectangle around it
pred_name = names[int(out)]
cv2.putText(frame, pred_name, (x, y - 10), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0), 2, cv2.LINE_AA)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 255), 2)
if pred_name not in attn:
attn.append(pred_name)
else:
continue
markAttendence(pred_name)
cv2.imshow("Faces", frame)
path = "C:/Users/Samarth/Desktop/knn/data/"
images = [] # LIST CONTAINING ALL THE IMAGES
className = [] # LIST CONTAINING ALL THE CORRESPONDING CLASS Names
myList = os.listdir(path)
for cl in myList:
curImg = cv2.imread(f'{path}/{cl}')
images.append(curImg)
className.append(os.path.splitext(cl)[0])
total_student_in_class = list(className) ###the toatl students in this class
print(total_student_in_class)
class_total_present = list(attn)
#print(attn)
res_list = []
for i in total_student_in_class:
if i not in class_total_present:
res_list.append(i)
print(res_list)
maarkattndnce(i)
# ai = tuple(total_student_in_class) #name of all the students as a tuple
#print(ai)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
|
[
"cv2.resize",
"numpy.load",
"cv2.putText",
"numpy.argmax",
"cv2.waitKey",
"numpy.unique",
"cv2.imshow",
"numpy.ones",
"cv2.VideoCapture",
"cv2.rectangle",
"cv2.imread",
"numpy.array",
"os.path.splitext",
"cv2.CascadeClassifier",
"cv2.destroyAllWindows",
"datetime.datetime.now",
"os.listdir",
"numpy.concatenate"
] |
[((2298, 2317), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (2314, 2317), False, 'import cv2\n'), ((2354, 2410), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""haarcascade_frontalface_alt.xml"""'], {}), "('haarcascade_frontalface_alt.xml')\n", (2375, 2410), False, 'import cv2\n'), ((2635, 2659), 'os.listdir', 'os.listdir', (['dataset_path'], {}), '(dataset_path)\n', (2645, 2659), False, 'import os\n'), ((3066, 3099), 'numpy.concatenate', 'np.concatenate', (['face_data'], {'axis': '(0)'}), '(face_data, axis=0)\n', (3080, 3099), True, 'import numpy as np\n'), ((3232, 3283), 'numpy.concatenate', 'np.concatenate', (['(face_dataset, face_labels)'], {'axis': '(1)'}), '((face_dataset, face_labels), axis=1)\n', (3246, 3283), True, 'import numpy as np\n'), ((5246, 5269), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (5267, 5269), False, 'import cv2\n'), ((2083, 2120), 'numpy.unique', 'np.unique', (['labels'], {'return_counts': '(True)'}), '(labels, return_counts=True)\n', (2092, 2120), True, 'import numpy as np\n'), ((2184, 2204), 'numpy.argmax', 'np.argmax', (['output[1]'], {}), '(output[1])\n', (2193, 2204), True, 'import numpy as np\n'), ((4303, 4329), 'cv2.imshow', 'cv2.imshow', (['"""Faces"""', 'frame'], {}), "('Faces', frame)\n", (4313, 4329), False, 'import cv2\n'), ((4519, 4535), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (4529, 4535), False, 'import os\n'), ((2010, 2022), 'numpy.array', 'np.array', (['dk'], {}), '(dk)\n', (2018, 2022), True, 'import numpy as np\n'), ((2827, 2853), 'numpy.load', 'np.load', (['(dataset_path + fx)'], {}), '(dataset_path + fx)\n', (2834, 2853), True, 'import numpy as np\n'), ((3115, 3145), 'numpy.concatenate', 'np.concatenate', (['labels'], {'axis': '(0)'}), '(labels, axis=0)\n', (3129, 3145), True, 'import numpy as np\n'), ((3741, 3777), 'cv2.resize', 'cv2.resize', (['face_section', '(100, 100)'], {}), '(face_section, (100, 100))\n', (3751, 3777), False, 'import cv2\n'), ((3980, 4085), 'cv2.putText', 'cv2.putText', (['frame', 'pred_name', '(x, y - 10)', 'cv2.FONT_HERSHEY_SIMPLEX', '(1)', '(255, 0, 0)', '(2)', 'cv2.LINE_AA'], {}), '(frame, pred_name, (x, y - 10), cv2.FONT_HERSHEY_SIMPLEX, 1, (\n 255, 0, 0), 2, cv2.LINE_AA)\n', (3991, 4085), False, 'import cv2\n'), ((4090, 4152), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x, y)', '(x + w, y + h)', '(0, 255, 255)', '(2)'], {}), '(frame, (x, y), (x + w, y + h), (0, 255, 255), 2)\n', (4103, 4152), False, 'import cv2\n'), ((4579, 4605), 'cv2.imread', 'cv2.imread', (['f"""{path}/{cl}"""'], {}), "(f'{path}/{cl}')\n", (4589, 4605), False, 'import cv2\n'), ((5166, 5180), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (5177, 5180), False, 'import cv2\n'), ((1069, 1083), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1081, 1083), False, 'from datetime import datetime\n'), ((2961, 2991), 'numpy.ones', 'np.ones', (['(data_item.shape[0],)'], {}), '((data_item.shape[0],))\n', (2968, 2991), True, 'import numpy as np\n'), ((4663, 4683), 'os.path.splitext', 'os.path.splitext', (['cl'], {}), '(cl)\n', (4679, 4683), False, 'import os\n')]
|
import pint
from . import resources
try:
import importlib.resources as pkg_resources
except ImportError:
# Try backported to PY<37 `importlib_resources`.
import importlib_resources as pkg_resources
# Load the file stream for the units file
unit_file = pkg_resources.open_text(resources, "unit_def.txt")
# Setup pint for the package
ureg = pint.UnitRegistry()
Q_ = ureg.Quantity
ureg.load_definitions(unit_file)
|
[
"importlib_resources.open_text",
"pint.UnitRegistry"
] |
[((267, 317), 'importlib_resources.open_text', 'pkg_resources.open_text', (['resources', '"""unit_def.txt"""'], {}), "(resources, 'unit_def.txt')\n", (290, 317), True, 'import importlib_resources as pkg_resources\n'), ((355, 374), 'pint.UnitRegistry', 'pint.UnitRegistry', ([], {}), '()\n', (372, 374), False, 'import pint\n')]
|
from django.test import TestCase
from corehq.apps.accounting.models import SoftwarePlanEdition
from corehq.apps.accounting.tests.utils import DomainSubscriptionMixin
from corehq.apps.accounting.utils import clear_plan_version_cache
from corehq.apps.domain.models import Domain
from corehq.messaging.smsbackends.test.models import SQLTestSMSBackend
from corehq.apps.sms.api import incoming, send_sms_to_verified_number
from corehq.apps.sms.messages import MSG_OPTED_IN, MSG_OPTED_OUT, get_message
from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend
from corehq.apps.sms.tests.util import (
delete_domain_phone_numbers,
setup_default_sms_test_backend,
)
from corehq.form_processor.tests.utils import FormProcessorTestUtils
class OptTestCase(DomainSubscriptionMixin, TestCase):
@classmethod
def setUpClass(cls):
super(OptTestCase, cls).setUpClass()
cls.domain = 'opt-test'
cls.domain_obj = Domain(name=cls.domain)
cls.domain_obj.sms_case_registration_enabled = True
cls.domain_obj.save()
cls.setup_subscription(cls.domain, SoftwarePlanEdition.ADVANCED)
cls.backend, cls.backend_mapping = setup_default_sms_test_backend()
cls.custom_backend = SQLTestSMSBackend.objects.create(
name='MOBILE_BACKEND_CUSTOM_TEST',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
opt_in_keywords=['RESTART'],
opt_out_keywords=['RESTOP']
)
cls.custom_backend_mapping = SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='1',
backend=cls.custom_backend,
)
@classmethod
def tearDownClass(cls):
cls.backend_mapping.delete()
cls.backend.delete()
cls.custom_backend_mapping.delete()
cls.custom_backend.delete()
FormProcessorTestUtils.delete_all_cases(cls.domain)
cls.teardown_subscriptions()
cls.domain_obj.delete()
clear_plan_version_cache()
super(OptTestCase, cls).tearDownClass()
def tearDown(self):
PhoneBlacklist.objects.all().delete()
SMS.objects.filter(domain=self.domain).delete()
delete_domain_phone_numbers(self.domain)
def get_last_sms(self, phone_number):
return SMS.objects.filter(domain=self.domain, phone_number=phone_number).order_by('-date')[0]
def test_opt_out_and_opt_in(self):
self.assertEqual(PhoneBlacklist.objects.count(), 0)
incoming('99912345678', 'join opt-test', 'GVI')
v = PhoneNumber.get_two_way_number('99912345678')
self.assertIsNotNone(v)
incoming('99912345678', 'stop', 'GVI')
self.assertEqual(PhoneBlacklist.objects.count(), 1)
phone_number = PhoneBlacklist.objects.get(phone_number='99912345678')
self.assertFalse(phone_number.send_sms)
self.assertEqual(phone_number.domain, self.domain)
self.assertIsNotNone(phone_number.last_sms_opt_out_timestamp)
self.assertIsNone(phone_number.last_sms_opt_in_timestamp)
sms = self.get_last_sms('+99912345678')
self.assertEqual(sms.direction, 'O')
self.assertEqual(sms.text, get_message(MSG_OPTED_OUT, context=('START',)))
incoming('99912345678', 'start', 'GVI')
self.assertEqual(PhoneBlacklist.objects.count(), 1)
phone_number = PhoneBlacklist.objects.get(phone_number='99912345678')
self.assertTrue(phone_number.send_sms)
self.assertEqual(phone_number.domain, self.domain)
self.assertIsNotNone(phone_number.last_sms_opt_out_timestamp)
self.assertIsNotNone(phone_number.last_sms_opt_in_timestamp)
sms = self.get_last_sms('+99912345678')
self.assertEqual(sms.direction, 'O')
self.assertEqual(sms.text, get_message(MSG_OPTED_IN, context=('STOP',)))
def test_sending_to_opted_out_number(self):
self.assertEqual(PhoneBlacklist.objects.count(), 0)
incoming('99912345678', 'join opt-test', 'GVI')
v = PhoneNumber.get_two_way_number('99912345678')
self.assertIsNotNone(v)
send_sms_to_verified_number(v, 'hello')
sms = self.get_last_sms('+99912345678')
self.assertEqual(sms.direction, 'O')
self.assertEqual(sms.text, 'hello')
incoming('99912345678', 'stop', 'GVI')
self.assertEqual(PhoneBlacklist.objects.count(), 1)
phone_number = PhoneBlacklist.objects.get(phone_number='99912345678')
self.assertFalse(phone_number.send_sms)
send_sms_to_verified_number(v, 'hello')
sms = self.get_last_sms('+99912345678')
self.assertEqual(sms.direction, 'O')
self.assertEqual(sms.text, 'hello')
self.assertTrue(sms.error)
self.assertEqual(sms.system_error_message, SMS.ERROR_PHONE_NUMBER_OPTED_OUT)
incoming('99912345678', 'start', 'GVI')
self.assertEqual(PhoneBlacklist.objects.count(), 1)
phone_number = PhoneBlacklist.objects.get(phone_number='99912345678')
self.assertTrue(phone_number.send_sms)
send_sms_to_verified_number(v, 'hello')
sms = self.get_last_sms('+99912345678')
self.assertEqual(sms.direction, 'O')
self.assertEqual(sms.text, 'hello')
self.assertFalse(sms.error)
self.assertIsNone(sms.system_error_message)
def test_custom_opt_keywords(self):
self.assertEqual(PhoneBlacklist.objects.count(), 0)
incoming('19912345678', 'join opt-test', 'TEST')
v = PhoneNumber.get_two_way_number('19912345678')
self.assertIsNotNone(v)
send_sms_to_verified_number(v, 'hello')
sms = self.get_last_sms('+19912345678')
self.assertEqual(sms.direction, 'O')
self.assertEqual(sms.text, 'hello')
incoming('19912345678', 'restop', 'TEST')
self.assertEqual(PhoneBlacklist.objects.count(), 1)
phone_number = PhoneBlacklist.objects.get(phone_number='19912345678')
self.assertFalse(phone_number.send_sms)
send_sms_to_verified_number(v, 'hello')
sms = self.get_last_sms('+19912345678')
self.assertEqual(sms.direction, 'O')
self.assertEqual(sms.text, 'hello')
self.assertTrue(sms.error)
self.assertEqual(sms.system_error_message, SMS.ERROR_PHONE_NUMBER_OPTED_OUT)
incoming('19912345678', 'restart', 'TEST')
self.assertEqual(PhoneBlacklist.objects.count(), 1)
phone_number = PhoneBlacklist.objects.get(phone_number='19912345678')
self.assertTrue(phone_number.send_sms)
send_sms_to_verified_number(v, 'hello')
sms = self.get_last_sms('+19912345678')
self.assertEqual(sms.direction, 'O')
self.assertEqual(sms.text, 'hello')
self.assertFalse(sms.error)
self.assertIsNone(sms.system_error_message)
|
[
"corehq.apps.sms.api.send_sms_to_verified_number",
"corehq.apps.sms.tests.util.setup_default_sms_test_backend",
"corehq.apps.sms.tests.util.delete_domain_phone_numbers",
"corehq.apps.sms.models.PhoneBlacklist.objects.get",
"corehq.apps.sms.messages.get_message",
"corehq.apps.sms.models.SQLMobileBackendMapping.objects.create",
"corehq.apps.domain.models.Domain",
"corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.get_api_id",
"corehq.apps.accounting.utils.clear_plan_version_cache",
"corehq.apps.sms.models.PhoneBlacklist.objects.count",
"corehq.apps.sms.models.PhoneBlacklist.objects.all",
"corehq.apps.sms.models.PhoneNumber.get_two_way_number",
"corehq.apps.sms.api.incoming",
"corehq.apps.sms.models.SMS.objects.filter",
"corehq.form_processor.tests.utils.FormProcessorTestUtils.delete_all_cases"
] |
[((990, 1013), 'corehq.apps.domain.models.Domain', 'Domain', ([], {'name': 'cls.domain'}), '(name=cls.domain)\n', (996, 1013), False, 'from corehq.apps.domain.models import Domain\n'), ((1221, 1253), 'corehq.apps.sms.tests.util.setup_default_sms_test_backend', 'setup_default_sms_test_backend', ([], {}), '()\n', (1251, 1253), False, 'from corehq.apps.sms.tests.util import delete_domain_phone_numbers, setup_default_sms_test_backend\n'), ((1574, 1708), 'corehq.apps.sms.models.SQLMobileBackendMapping.objects.create', 'SQLMobileBackendMapping.objects.create', ([], {'is_global': '(True)', 'backend_type': 'SQLMobileBackend.SMS', 'prefix': '"""1"""', 'backend': 'cls.custom_backend'}), "(is_global=True, backend_type=\n SQLMobileBackend.SMS, prefix='1', backend=cls.custom_backend)\n", (1612, 1708), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((1963, 2014), 'corehq.form_processor.tests.utils.FormProcessorTestUtils.delete_all_cases', 'FormProcessorTestUtils.delete_all_cases', (['cls.domain'], {}), '(cls.domain)\n', (2002, 2014), False, 'from corehq.form_processor.tests.utils import FormProcessorTestUtils\n'), ((2092, 2118), 'corehq.apps.accounting.utils.clear_plan_version_cache', 'clear_plan_version_cache', ([], {}), '()\n', (2116, 2118), False, 'from corehq.apps.accounting.utils import clear_plan_version_cache\n'), ((2302, 2342), 'corehq.apps.sms.tests.util.delete_domain_phone_numbers', 'delete_domain_phone_numbers', (['self.domain'], {}), '(self.domain)\n', (2329, 2342), False, 'from corehq.apps.sms.tests.util import delete_domain_phone_numbers, setup_default_sms_test_backend\n'), ((2597, 2644), 'corehq.apps.sms.api.incoming', 'incoming', (['"""99912345678"""', '"""join opt-test"""', '"""GVI"""'], {}), "('99912345678', 'join opt-test', 'GVI')\n", (2605, 2644), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((2657, 2702), 'corehq.apps.sms.models.PhoneNumber.get_two_way_number', 'PhoneNumber.get_two_way_number', (['"""99912345678"""'], {}), "('99912345678')\n", (2687, 2702), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((2744, 2782), 'corehq.apps.sms.api.incoming', 'incoming', (['"""99912345678"""', '"""stop"""', '"""GVI"""'], {}), "('99912345678', 'stop', 'GVI')\n", (2752, 2782), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((2866, 2920), 'corehq.apps.sms.models.PhoneBlacklist.objects.get', 'PhoneBlacklist.objects.get', ([], {'phone_number': '"""99912345678"""'}), "(phone_number='99912345678')\n", (2892, 2920), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((3350, 3389), 'corehq.apps.sms.api.incoming', 'incoming', (['"""99912345678"""', '"""start"""', '"""GVI"""'], {}), "('99912345678', 'start', 'GVI')\n", (3358, 3389), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((3473, 3527), 'corehq.apps.sms.models.PhoneBlacklist.objects.get', 'PhoneBlacklist.objects.get', ([], {'phone_number': '"""99912345678"""'}), "(phone_number='99912345678')\n", (3499, 3527), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((4066, 4113), 'corehq.apps.sms.api.incoming', 'incoming', (['"""99912345678"""', '"""join opt-test"""', '"""GVI"""'], {}), "('99912345678', 'join opt-test', 'GVI')\n", (4074, 4113), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((4126, 4171), 'corehq.apps.sms.models.PhoneNumber.get_two_way_number', 'PhoneNumber.get_two_way_number', (['"""99912345678"""'], {}), "('99912345678')\n", (4156, 4171), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((4213, 4252), 'corehq.apps.sms.api.send_sms_to_verified_number', 'send_sms_to_verified_number', (['v', '"""hello"""'], {}), "(v, 'hello')\n", (4240, 4252), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((4399, 4437), 'corehq.apps.sms.api.incoming', 'incoming', (['"""99912345678"""', '"""stop"""', '"""GVI"""'], {}), "('99912345678', 'stop', 'GVI')\n", (4407, 4437), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((4521, 4575), 'corehq.apps.sms.models.PhoneBlacklist.objects.get', 'PhoneBlacklist.objects.get', ([], {'phone_number': '"""99912345678"""'}), "(phone_number='99912345678')\n", (4547, 4575), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((4633, 4672), 'corehq.apps.sms.api.send_sms_to_verified_number', 'send_sms_to_verified_number', (['v', '"""hello"""'], {}), "(v, 'hello')\n", (4660, 4672), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((4939, 4978), 'corehq.apps.sms.api.incoming', 'incoming', (['"""99912345678"""', '"""start"""', '"""GVI"""'], {}), "('99912345678', 'start', 'GVI')\n", (4947, 4978), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((5062, 5116), 'corehq.apps.sms.models.PhoneBlacklist.objects.get', 'PhoneBlacklist.objects.get', ([], {'phone_number': '"""99912345678"""'}), "(phone_number='99912345678')\n", (5088, 5116), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((5173, 5212), 'corehq.apps.sms.api.send_sms_to_verified_number', 'send_sms_to_verified_number', (['v', '"""hello"""'], {}), "(v, 'hello')\n", (5200, 5212), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((5548, 5596), 'corehq.apps.sms.api.incoming', 'incoming', (['"""19912345678"""', '"""join opt-test"""', '"""TEST"""'], {}), "('19912345678', 'join opt-test', 'TEST')\n", (5556, 5596), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((5609, 5654), 'corehq.apps.sms.models.PhoneNumber.get_two_way_number', 'PhoneNumber.get_two_way_number', (['"""19912345678"""'], {}), "('19912345678')\n", (5639, 5654), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((5696, 5735), 'corehq.apps.sms.api.send_sms_to_verified_number', 'send_sms_to_verified_number', (['v', '"""hello"""'], {}), "(v, 'hello')\n", (5723, 5735), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((5882, 5923), 'corehq.apps.sms.api.incoming', 'incoming', (['"""19912345678"""', '"""restop"""', '"""TEST"""'], {}), "('19912345678', 'restop', 'TEST')\n", (5890, 5923), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((6007, 6061), 'corehq.apps.sms.models.PhoneBlacklist.objects.get', 'PhoneBlacklist.objects.get', ([], {'phone_number': '"""19912345678"""'}), "(phone_number='19912345678')\n", (6033, 6061), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((6119, 6158), 'corehq.apps.sms.api.send_sms_to_verified_number', 'send_sms_to_verified_number', (['v', '"""hello"""'], {}), "(v, 'hello')\n", (6146, 6158), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((6425, 6467), 'corehq.apps.sms.api.incoming', 'incoming', (['"""19912345678"""', '"""restart"""', '"""TEST"""'], {}), "('19912345678', 'restart', 'TEST')\n", (6433, 6467), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((6551, 6605), 'corehq.apps.sms.models.PhoneBlacklist.objects.get', 'PhoneBlacklist.objects.get', ([], {'phone_number': '"""19912345678"""'}), "(phone_number='19912345678')\n", (6577, 6605), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((6662, 6701), 'corehq.apps.sms.api.send_sms_to_verified_number', 'send_sms_to_verified_number', (['v', '"""hello"""'], {}), "(v, 'hello')\n", (6689, 6701), False, 'from corehq.apps.sms.api import incoming, send_sms_to_verified_number\n'), ((2553, 2583), 'corehq.apps.sms.models.PhoneBlacklist.objects.count', 'PhoneBlacklist.objects.count', ([], {}), '()\n', (2581, 2583), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((2808, 2838), 'corehq.apps.sms.models.PhoneBlacklist.objects.count', 'PhoneBlacklist.objects.count', ([], {}), '()\n', (2836, 2838), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((3293, 3339), 'corehq.apps.sms.messages.get_message', 'get_message', (['MSG_OPTED_OUT'], {'context': "('START',)"}), "(MSG_OPTED_OUT, context=('START',))\n", (3304, 3339), False, 'from corehq.apps.sms.messages import MSG_OPTED_IN, MSG_OPTED_OUT, get_message\n'), ((3415, 3445), 'corehq.apps.sms.models.PhoneBlacklist.objects.count', 'PhoneBlacklist.objects.count', ([], {}), '()\n', (3443, 3445), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((3902, 3946), 'corehq.apps.sms.messages.get_message', 'get_message', (['MSG_OPTED_IN'], {'context': "('STOP',)"}), "(MSG_OPTED_IN, context=('STOP',))\n", (3913, 3946), False, 'from corehq.apps.sms.messages import MSG_OPTED_IN, MSG_OPTED_OUT, get_message\n'), ((4022, 4052), 'corehq.apps.sms.models.PhoneBlacklist.objects.count', 'PhoneBlacklist.objects.count', ([], {}), '()\n', (4050, 4052), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((4463, 4493), 'corehq.apps.sms.models.PhoneBlacklist.objects.count', 'PhoneBlacklist.objects.count', ([], {}), '()\n', (4491, 4493), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((5004, 5034), 'corehq.apps.sms.models.PhoneBlacklist.objects.count', 'PhoneBlacklist.objects.count', ([], {}), '()\n', (5032, 5034), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((5504, 5534), 'corehq.apps.sms.models.PhoneBlacklist.objects.count', 'PhoneBlacklist.objects.count', ([], {}), '()\n', (5532, 5534), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((5949, 5979), 'corehq.apps.sms.models.PhoneBlacklist.objects.count', 'PhoneBlacklist.objects.count', ([], {}), '()\n', (5977, 5979), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((6493, 6523), 'corehq.apps.sms.models.PhoneBlacklist.objects.count', 'PhoneBlacklist.objects.count', ([], {}), '()\n', (6521, 6523), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((1414, 1444), 'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.get_api_id', 'SQLTestSMSBackend.get_api_id', ([], {}), '()\n', (1442, 1444), False, 'from corehq.messaging.smsbackends.test.models import SQLTestSMSBackend\n'), ((2200, 2228), 'corehq.apps.sms.models.PhoneBlacklist.objects.all', 'PhoneBlacklist.objects.all', ([], {}), '()\n', (2226, 2228), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((2246, 2284), 'corehq.apps.sms.models.SMS.objects.filter', 'SMS.objects.filter', ([], {'domain': 'self.domain'}), '(domain=self.domain)\n', (2264, 2284), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n'), ((2401, 2466), 'corehq.apps.sms.models.SMS.objects.filter', 'SMS.objects.filter', ([], {'domain': 'self.domain', 'phone_number': 'phone_number'}), '(domain=self.domain, phone_number=phone_number)\n', (2419, 2466), False, 'from corehq.apps.sms.models import SMS, PhoneBlacklist, PhoneNumber, SQLMobileBackendMapping, SQLMobileBackend\n')]
|
import torch
from torch import Tensor
from torchir.utils import identity_grid
def bending_energy_3d(
coord_grid: Tensor, vector_dim: int = -1, dvf_input: bool = False
) -> Tensor:
"""Calculates bending energy penalty for a 3D coordinate grid.
For further details regarding this regularization please read the work by `Rueckert 1999`_.
Args:
coord_grid: 3D coordinate grid, i.e. a 5D Tensor with standard dimensions
(n_samples, 3, z, y, x).
vector_dim: Specifies the location of the vector dimension. Default: -1
dvf_input: If ``True``, coord_grid is assumed a displacement vector field and
an identity_grid will be added. Default: ``False``
Returns:
Bending energy per instance in the batch.
.. _Rueckert 1999: https://ieeexplore.ieee.org/document/796284
"""
assert coord_grid.ndim == 5, "Input tensor should be 5D, i.e. 3D images."
if vector_dim != 1:
coord_grid = coord_grid.movedim(vector_dim, -1)
if dvf_input:
coord_grid = coord_grid + identity_grid(coord_grid.shape[2:], stackdim=0)
d_z = torch.diff(coord_grid, dim=1)
d_y = torch.diff(coord_grid, dim=2)
d_x = torch.diff(coord_grid, dim=3)
d_zz = torch.diff(d_z, dim=1)[:, :, :-2, :-2]
d_zy = torch.diff(d_z, dim=2)[:, :-1, :-1, :-2]
d_zx = torch.diff(d_z, dim=3)[:, :-1, :-2, :-1]
d_yy = torch.diff(d_y, dim=2)[:, :-2, :, :-2]
d_yx = torch.diff(d_y, dim=3)[:, :-2, :-1, :-1]
d_xx = torch.diff(d_x, dim=3)[:, :-2, :-2, :]
return torch.mean(
d_zz ** 2 + d_yy ** 2 + d_xx ** 2 + 2 * (d_zy ** 2 + d_zx ** 2 + d_yx ** 2),
axis=(1, 2, 3, 4),
)
def bending_energy_2d(
coord_grid: Tensor, vector_dim: int = -1, dvf_input: bool = False
) -> Tensor:
"""Calculates bending energy penalty for a 2D coordinate grid.
For further details regarding this regularization please read the work by `Rueckert 1999`_.
Args:
coord_grid: 2D coordinate grid, i.e. a 4D Tensor with standard dimensions
(n_samples, 2, y, x).
vector_dim: Specifies the location of the vector dimension. Default: -1
dvf_input: If ``True``, coord_grid is assumed a displacement vector field and
an identity_grid will be added. Default: ``False``
Returns:
Bending energy per instance in the batch.
.. _Rueckert 1999: https://ieeexplore.ieee.org/document/796284
"""
assert coord_grid.ndim == 4, "Input tensor should be 4D, i.e. 2D images."
if vector_dim != 1:
coord_grid = coord_grid.movedim(vector_dim, -1)
if dvf_input:
coord_grid = coord_grid + identity_grid(coord_grid.shape[2:], stackdim=0)
d_y = torch.diff(coord_grid, dim=1)
d_x = torch.diff(coord_grid, dim=2)
d_yy = torch.diff(d_y, dim=1)[:, :, :-2]
d_yx = torch.diff(d_y, dim=2)[:, :-1, :-1]
d_xx = torch.diff(d_x, dim=2)[:, :-2, :]
return torch.mean(d_yy ** 2 + d_xx ** 2 + 2 * d_yx ** 2, axis=(1, 2, 3))
|
[
"torch.mean",
"torchir.utils.identity_grid",
"torch.diff"
] |
[((1114, 1143), 'torch.diff', 'torch.diff', (['coord_grid'], {'dim': '(1)'}), '(coord_grid, dim=1)\n', (1124, 1143), False, 'import torch\n'), ((1154, 1183), 'torch.diff', 'torch.diff', (['coord_grid'], {'dim': '(2)'}), '(coord_grid, dim=2)\n', (1164, 1183), False, 'import torch\n'), ((1194, 1223), 'torch.diff', 'torch.diff', (['coord_grid'], {'dim': '(3)'}), '(coord_grid, dim=3)\n', (1204, 1223), False, 'import torch\n'), ((1543, 1653), 'torch.mean', 'torch.mean', (['(d_zz ** 2 + d_yy ** 2 + d_xx ** 2 + 2 * (d_zy ** 2 + d_zx ** 2 + d_yx ** 2))'], {'axis': '(1, 2, 3, 4)'}), '(d_zz ** 2 + d_yy ** 2 + d_xx ** 2 + 2 * (d_zy ** 2 + d_zx ** 2 +\n d_yx ** 2), axis=(1, 2, 3, 4))\n', (1553, 1653), False, 'import torch\n'), ((2705, 2734), 'torch.diff', 'torch.diff', (['coord_grid'], {'dim': '(1)'}), '(coord_grid, dim=1)\n', (2715, 2734), False, 'import torch\n'), ((2745, 2774), 'torch.diff', 'torch.diff', (['coord_grid'], {'dim': '(2)'}), '(coord_grid, dim=2)\n', (2755, 2774), False, 'import torch\n'), ((2925, 2990), 'torch.mean', 'torch.mean', (['(d_yy ** 2 + d_xx ** 2 + 2 * d_yx ** 2)'], {'axis': '(1, 2, 3)'}), '(d_yy ** 2 + d_xx ** 2 + 2 * d_yx ** 2, axis=(1, 2, 3))\n', (2935, 2990), False, 'import torch\n'), ((1236, 1258), 'torch.diff', 'torch.diff', (['d_z'], {'dim': '(1)'}), '(d_z, dim=1)\n', (1246, 1258), False, 'import torch\n'), ((1286, 1308), 'torch.diff', 'torch.diff', (['d_z'], {'dim': '(2)'}), '(d_z, dim=2)\n', (1296, 1308), False, 'import torch\n'), ((1338, 1360), 'torch.diff', 'torch.diff', (['d_z'], {'dim': '(3)'}), '(d_z, dim=3)\n', (1348, 1360), False, 'import torch\n'), ((1390, 1412), 'torch.diff', 'torch.diff', (['d_y'], {'dim': '(2)'}), '(d_y, dim=2)\n', (1400, 1412), False, 'import torch\n'), ((1440, 1462), 'torch.diff', 'torch.diff', (['d_y'], {'dim': '(3)'}), '(d_y, dim=3)\n', (1450, 1462), False, 'import torch\n'), ((1492, 1514), 'torch.diff', 'torch.diff', (['d_x'], {'dim': '(3)'}), '(d_x, dim=3)\n', (1502, 1514), False, 'import torch\n'), ((2787, 2809), 'torch.diff', 'torch.diff', (['d_y'], {'dim': '(1)'}), '(d_y, dim=1)\n', (2797, 2809), False, 'import torch\n'), ((2832, 2854), 'torch.diff', 'torch.diff', (['d_y'], {'dim': '(2)'}), '(d_y, dim=2)\n', (2842, 2854), False, 'import torch\n'), ((2879, 2901), 'torch.diff', 'torch.diff', (['d_x'], {'dim': '(2)'}), '(d_x, dim=2)\n', (2889, 2901), False, 'import torch\n'), ((1055, 1102), 'torchir.utils.identity_grid', 'identity_grid', (['coord_grid.shape[2:]'], {'stackdim': '(0)'}), '(coord_grid.shape[2:], stackdim=0)\n', (1068, 1102), False, 'from torchir.utils import identity_grid\n'), ((2646, 2693), 'torchir.utils.identity_grid', 'identity_grid', (['coord_grid.shape[2:]'], {'stackdim': '(0)'}), '(coord_grid.shape[2:], stackdim=0)\n', (2659, 2693), False, 'from torchir.utils import identity_grid\n')]
|
#!/usr/bin/env python3
# 600C_palindrom.py - Codeforces.com/problemset/problem/600/C by Sergey 2015
import unittest
import sys
###############################################################################
# Palindrom Class (Main Program)
###############################################################################
class Palindrom:
""" Palindrom representation """
def __init__(self, test_inputs=None):
""" Default constructor """
it = iter(test_inputs.split("\n")) if test_inputs else None
def uinput():
return next(it) if it else sys.stdin.readline().rstrip()
# Reading single elements
self.s = uinput()
self.cnt = {}
for c in self.s:
self.cnt[c] = self.cnt.get(c, 0) + 1
self.pcnt = dict(self.cnt)
for i in reversed(sorted(self.pcnt)):
if self.pcnt[i] % 2:
self.pcnt[i] -= 1
found = 0
for j in sorted(self.pcnt):
if self.pcnt[j] % 2:
self.pcnt[j] += 1
found = 1
break
if not found:
self.pcnt[i] += 1
def calculate(self):
""" Main calcualtion function of the class """
result = []
mid = []
for c in sorted(self.pcnt):
n = self.pcnt[c]
if n > 0:
for j in range(n // 2):
result.append(c)
if n % 2:
mid.append(c)
return "".join(result + mid + list(reversed(result)))
###############################################################################
# Unit Tests
###############################################################################
class unitTests(unittest.TestCase):
def test_single_test(self):
""" Palindrom class testing """
# Constructor test
test = "aabc"
d = Palindrom(test)
self.assertEqual(d.cnt["c"], 1)
self.assertEqual(d.pcnt["c"], 0)
# Sample test
self.assertEqual(Palindrom(test).calculate(), "abba")
# Sample test
test = "aabcd"
self.assertEqual(Palindrom(test).calculate(), "abcba")
# Sample test
test = "aabbcccdd"
self.assertEqual(Palindrom(test).calculate(), "abcdcdcba")
# My tests
test = ""
# self.assertEqual(Palindrom(test).calculate(), "0")
# Time limit test
# self.time_limit_test(5000)
def time_limit_test(self, nmax):
""" Timelimit testing """
import random
import timeit
# Random inputs
test = str(nmax) + " " + str(nmax) + "\n"
numnums = [str(i) + " " + str(i+1) for i in range(nmax)]
test += "\n".join(numnums) + "\n"
nums = [random.randint(1, 10000) for i in range(nmax)]
test += " ".join(map(str, nums)) + "\n"
# Run the test
start = timeit.default_timer()
d = Palindrom(test)
calc = timeit.default_timer()
d.calculate()
stop = timeit.default_timer()
print("\nTimelimit Test: " +
"{0:.3f}s (init {1:.3f}s calc {2:.3f}s)".
format(stop-start, calc-start, stop-calc))
if __name__ == "__main__":
# Avoiding recursion limitaions
sys.setrecursionlimit(100000)
if sys.argv[-1] == "-ut":
unittest.main(argv=[" "])
# Print the result string
sys.stdout.write(Palindrom().calculate())
|
[
"unittest.main",
"random.randint",
"timeit.default_timer",
"sys.setrecursionlimit",
"sys.stdin.readline"
] |
[((3352, 3381), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(100000)'], {}), '(100000)\n', (3373, 3381), False, 'import sys\n'), ((2984, 3006), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3004, 3006), False, 'import timeit\n'), ((3050, 3072), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3070, 3072), False, 'import timeit\n'), ((3110, 3132), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3130, 3132), False, 'import timeit\n'), ((3421, 3446), 'unittest.main', 'unittest.main', ([], {'argv': "[' ']"}), "(argv=[' '])\n", (3434, 3446), False, 'import unittest\n'), ((2849, 2873), 'random.randint', 'random.randint', (['(1)', '(10000)'], {}), '(1, 10000)\n', (2863, 2873), False, 'import random\n'), ((588, 608), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (606, 608), False, 'import sys\n')]
|
import os
"""Script used to define constants"""
PRIVATE_KEY = os.getenv(
'VESICASH_PRIVATE_KEY',
'<KEY>'
)
HEADERS = {'V-Private-Key': PRIVATE_KEY}
api_url = ''
mode = os.getenv('VESICASH_MODE')
if(mode == 'sandbox'):
API_URL = 'https://sandbox.api.vesicash.com/v1/'
else:
API_URL = 'https://api.vesicash.com/v1/'
|
[
"os.getenv"
] |
[((64, 106), 'os.getenv', 'os.getenv', (['"""VESICASH_PRIVATE_KEY"""', '"""<KEY>"""'], {}), "('VESICASH_PRIVATE_KEY', '<KEY>')\n", (73, 106), False, 'import os\n'), ((178, 204), 'os.getenv', 'os.getenv', (['"""VESICASH_MODE"""'], {}), "('VESICASH_MODE')\n", (187, 204), False, 'import os\n')]
|
import telebot
from settings import TOKEN
from telebot import types
import random
bot = telebot.TeleBot(TOKEN)
file = open('affirmations.txt', 'r', encoding='UTF-8')
affirmations = file.read().split('\n')
file.close()
@bot.message_handler(content_types=['text'])
def get_text_messages(message):
username = message.from_user.username
if message.text == 'Привет' or message.text == 'привет':
bot.send_message(message.from_user.id, f'Привет, {username}\nНапиши: "Аффирмация"')
bot.register_next_step_handler(message, give_affirmation)
elif message.text == '/help':
bot.send_message(message.from_user.id, 'Напиши: "Привет"')
else:
bot.send_message(message.from_user.id, 'Я тебя не понимаю. Напиши /help.')
@bot.message_handler(func=lambda m: True)
def give_affirmation(message):
if message.text == 'аффирмация' or message.text == 'Аффирмация':
keyboard = types.InlineKeyboardMarkup()
key_affirmation = types.InlineKeyboardButton(text='Получить позитивную аффирмацию', callback_data='get_affirm')
keyboard.add(key_affirmation)
bot.send_message(message.from_user.id, text='Чтобы получить позитивную аффирмацию, нажми на кнопку: ',
reply_markup=keyboard)
@bot.callback_query_handler(func=lambda call: True)
def callback_worker(call):
if call.data == 'get_affirm':
bot.send_message(call.message.chat.id, random.choice(affirmations))
bot.infinity_polling()
|
[
"telebot.types.InlineKeyboardButton",
"telebot.TeleBot",
"random.choice",
"telebot.types.InlineKeyboardMarkup"
] |
[((89, 111), 'telebot.TeleBot', 'telebot.TeleBot', (['TOKEN'], {}), '(TOKEN)\n', (104, 111), False, 'import telebot\n'), ((917, 945), 'telebot.types.InlineKeyboardMarkup', 'types.InlineKeyboardMarkup', ([], {}), '()\n', (943, 945), False, 'from telebot import types\n'), ((972, 1069), 'telebot.types.InlineKeyboardButton', 'types.InlineKeyboardButton', ([], {'text': '"""Получить позитивную аффирмацию"""', 'callback_data': '"""get_affirm"""'}), "(text='Получить позитивную аффирмацию',\n callback_data='get_affirm')\n", (998, 1069), False, 'from telebot import types\n'), ((1425, 1452), 'random.choice', 'random.choice', (['affirmations'], {}), '(affirmations)\n', (1438, 1452), False, 'import random\n')]
|
# -*- coding: UTF-8 -*-
# -----------------------------------------------------------------------------
#
# P A G E B O T
#
# Copyright (c) 2016+ <NAME> + <NAME>
# www.pagebot.io
# Licensed under MIT conditions
#
# Supporting DrawBot, www.drawbot.com
# Supporting Flat, xxyxyz.org/flat
# -----------------------------------------------------------------------------
#
# calendars/__init__.py
#
from pagebot.publications.calendars.photocalendar import PhotoCalendar
CALENDAR_CLASSES = {
'Photo': PhotoCalendar, # Eanch month a photo and a table of month days
}
if __name__ == "__main__":
import doctest
import sys
sys.exit(doctest.testmod()[0])
|
[
"doctest.testmod"
] |
[((665, 682), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (680, 682), False, 'import doctest\n')]
|
from django.forms import Select
from django.utils import translation
from django.utils.translation import ugettext as _
from directory_components import forms, fields
from directory_constants import choices
class SearchForm(forms.Form):
term = fields.CharField(
max_length=255,
required=False,
)
industries = fields.ChoiceField(
required=False,
choices=(
(('', _('All industries')),) + choices.INDUSTRIES
),
widget=Select(attrs={'dir': 'ltr'})
)
def get_language_form_initial_data():
return {
'lang': translation.get_language()
}
|
[
"django.utils.translation.ugettext",
"django.utils.translation.get_language",
"django.forms.Select",
"directory_components.fields.CharField"
] |
[((252, 300), 'directory_components.fields.CharField', 'fields.CharField', ([], {'max_length': '(255)', 'required': '(False)'}), '(max_length=255, required=False)\n', (268, 300), False, 'from directory_components import forms, fields\n'), ((595, 621), 'django.utils.translation.get_language', 'translation.get_language', ([], {}), '()\n', (619, 621), False, 'from django.utils import translation\n'), ((491, 519), 'django.forms.Select', 'Select', ([], {'attrs': "{'dir': 'ltr'}"}), "(attrs={'dir': 'ltr'})\n", (497, 519), False, 'from django.forms import Select\n'), ((421, 440), 'django.utils.translation.ugettext', '_', (['"""All industries"""'], {}), "('All industries')\n", (422, 440), True, 'from django.utils.translation import ugettext as _\n')]
|
r"""Generation of C code dealing with the Mathieu group Mat24
Generating the ``mmgroup.mat24`` extension
..........................................
Function ``mat24_make_c_code()`` generates C code for basic computations
in the Golay code, its cocode, and the Mathieu group Mat24. It also
generates code for computations in the Parker loop and in its
automorphism group.
The generated C modules are used in the python extension
``mmgroup.mat24``. The functions used by that extension are contained
in a shared library with name ``mmgroup_mat24.dll``. The reason for
creating such a shared library is that that these functions are also
called by C functions written for other python extensions.
We use the C code generation mechanism in class
``generate_c.TableGenerator``. Here a .c file and a .h file is
crreated from file ``mat24_functions.ske`` in subdirectory
``src/mmgroup/dev/mat24``. The .ske file is like a .c file, but
augmented with some code generation statements for entering tables
and automatically generated code into the .c file to be generated.
This .ske file may also have statements for automatically
generating a .h file declaring the exported functions.
We create an instance ``tg`` of class ``TableGenerator`` for
generating the .c files. The table generator ``tg`` takes two
dictionaries ``tables`` and ``directives`` as arguments. These
dictionaries provide user-defined tables and directives for the
code generator. Class ``Mat24`` in module
``mmgroup.dev.mat24.mat24_ref`` has methods ``tables()``
and ``directives()`` creating the required tables and directives.
Generating the ``mmgroup.generators`` extension
.............................................
Function ``generators_make_c_code`` generates C code for computing
the monomial part of the operation of the elements :math`\xi` and
:math`\xi^2`of the monster group. These C functions are used for
computing (rather large) tables required for the implmentation of
the functions that compute the operation :math`\xi` and :math`\xi^2`
on a representation of the monster.
The generation of the ``mmgroup.generators`` extension is similar
to the generation of the ``mmgroup.mat24`` extension. Here the
list of .ske file is given in the list GENERATORS_C_FILES.
For each file in that list a C file is created.
A common header with name given by H_GENERATORS_NAME is created
from all these .ske files, prependend by the header files in
the list GENERATORS_H_FILES. A .pxd file with name
PXD_GENERATORS_NAME is created from that header file. That .pxd
file will also contain the declarations in the string
PXD_DECLARATIONS.
All input files are read fom the directory SKE_DIR.
Location of the output files
............................
The location of the generated output files is controlled by certain
variables in module config.py. Each of these variables specifies the
name of a directory.
Files with extension .c, .h go to the directory ``C_DIR``. Files with
extension .pxd, .pxi, .pyx go to the directory ``PXD_DIR``.
"""
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import types
import sys
import re
import os
import subprocess
import shutil
from config import SRC_DIR, DEV_DIR, C_DIR, PXD_DIR
from config import REAL_SRC_DIR
sys.path.append(REAL_SRC_DIR)
from mmgroup.dev.mat24.mat24_ref import Mat24
from mmgroup.dev.generators.gen_xi_ref import GenXi
from mmgroup.generate_c import TableGenerator, make_doc
from mmgroup.generate_c import pxd_to_pyx
########################################################################
# Generate mat24_functions.c
########################################################################
pxd_declarations = """
from libc.stdint cimport uint32_t, uint16_t, uint8_t
"""
def mat24_make_c_code():
"""Create .c and .h file with the functionality of class Mat24
The input of this function is the file MAT24_C_FILE.ske that
contains a (much faster) C version of the functions in class
mmgroup.dev.mat24.mat24_ref.Mat24.
The functions in the .ske file make use of the tables that have
been generated for this module and also of some functions for
generating C code automatically. An example where generating
code automatically makes sense is the matrix multiplication with
a constant bit matrix.
The code generating process is described in class TableGenerator
in module make_c_tables.
"""
print("Creating C source from file mat24_functions.ske\n")
MAT24_C_FILE = "mat24_functions"
SKE_DIR = os.path.join(DEV_DIR, "mat24")
# The follwing two tables can't easily be computed earlier
Mat24.tables["Mat24_doc_basis"] = Mat24.str_basis()
generator = TableGenerator(Mat24.tables, Mat24.directives)
f = os.path.join(SKE_DIR, MAT24_C_FILE)
path_ = os.path.join(C_DIR, MAT24_C_FILE)
#print("pwd", os.getcwd())
#print(os.path.realpath(path_ + ".c"))
generator.generate(f + ".ske", path_ + ".c", path_ + ".h")
## generator.export_tables(file_name = "mat24_export.py")
generator.generate_pxd(
os.path.join(PXD_DIR, MAT24_C_FILE + ".pxd"),
MAT24_C_FILE + ".h",
pxd_declarations
)
print("C files for extension mat24 have been created" )
########################################################################
# Generate c files for module 'generators'
########################################################################
SKE_DIR = os.path.join(DEV_DIR, "generators")
GENERATORS_C_FILES = [
"gen_xi_functions",
"mm_group_n",
"gen_leech",
"gen_leech3",
"gen_leech_reduce",
"gen_random",
]
GENERATORS_H_START = """
// %%GEN h
#ifndef MMGROUP_GENERATORS_H
#define MMGROUP_GENERATORS_H
// %%GEN c
"""
GENERATORS_H_END = """
// %%GEN h
#endif // ifndef MMGROUP_GENERATORS_H
// %%GEN c
"""
GENERATORS_H_FILES = [
GENERATORS_H_START,
"mmgroup_generators.h",
]
GENERATORS_TABLE_CLASSES = [
GenXi
]
H_GENERATORS_NAME = "mmgroup_generators.h"
PXD_GENERATORS_NAME = "generators.pxd"
PXI_GENERATORS_NAME = "generators.pxi"
PXD_DECLARATIONS = """
from libc.stdint cimport uint64_t, uint32_t, uint16_t, uint8_t
from libc.stdint cimport int64_t, int32_t
"""
def generators_make_c_code():
"""Create .c and .h file with the functionality of class Mat24Xi
"""
print("Creating C sources for the 'generators' extension\n")
# Setp table and directives for code generation
GenXi.tables["GenXi_doc"] = GenXi # can't do this earlier
tables = {}
directives = {}
for table_class in GENERATORS_TABLE_CLASSES:
table_instance = table_class()
tables.update(table_instance.tables)
directives.update(table_instance.directives)
print(tables.keys())
tg = TableGenerator(tables, directives)
# Generate c files
all_ske_files = [os.path.join(SKE_DIR, name)
for name in GENERATORS_H_FILES]
for name in GENERATORS_C_FILES:
ske_file = name + ".ske"
ske_path = os.path.join(SKE_DIR, ske_file)
c_file = name + ".c"
c_path = os.path.join(C_DIR, c_file)
print("Creating %s from %s" % (c_file, ske_file))
tg.generate(ske_path, c_path)
all_ske_files.append(ske_path)
# generate .h file
all_ske_files.append(GENERATORS_H_END)
h_file = H_GENERATORS_NAME
h_path = os.path.join(C_DIR, h_file)
pxd_file = PXD_GENERATORS_NAME
print("Creating %s from previous .ske files" % h_file)
tg.generate(all_ske_files, None, h_path)
# generate .pxd file
tg.generate_pxd(
os.path.join(PXD_DIR, PXD_GENERATORS_NAME),
h_file,
PXD_DECLARATIONS
)
print("C files for extension 'generators' have been created" )
# generate .pxi file
def pxi_comment(text, f):
print("\n" + "#"*70 + "\n### %s\n" % text + "#"*70 + "\n\n",
file=f
)
f_pxi = open(os.path.join(PXD_DIR, PXI_GENERATORS_NAME), "wt")
pxi_comment(
"Wrappers for C functions from file %s" % PXD_GENERATORS_NAME,
f_pxi
)
print(PXD_DECLARATIONS, file = f_pxi)
pxi_content = pxd_to_pyx(
os.path.join(PXD_DIR, PXD_GENERATORS_NAME),
os.path.split(PXD_GENERATORS_NAME)[0],
select = True
)
print(pxi_content, file = f_pxi)
f_pxi.close()
########################################################################
# Main program
########################################################################
if __name__ == "__main__":
mat24_make_c_code()
generators_make_c_code()
|
[
"sys.path.append",
"mmgroup.generate_c.TableGenerator",
"mmgroup.dev.mat24.mat24_ref.Mat24.str_basis",
"os.path.split",
"os.path.join"
] |
[((3316, 3345), 'sys.path.append', 'sys.path.append', (['REAL_SRC_DIR'], {}), '(REAL_SRC_DIR)\n', (3331, 3345), False, 'import sys\n'), ((5515, 5550), 'os.path.join', 'os.path.join', (['DEV_DIR', '"""generators"""'], {}), "(DEV_DIR, 'generators')\n", (5527, 5550), False, 'import os\n'), ((4594, 4624), 'os.path.join', 'os.path.join', (['DEV_DIR', '"""mat24"""'], {}), "(DEV_DIR, 'mat24')\n", (4606, 4624), False, 'import os\n'), ((4726, 4743), 'mmgroup.dev.mat24.mat24_ref.Mat24.str_basis', 'Mat24.str_basis', ([], {}), '()\n', (4741, 4743), False, 'from mmgroup.dev.mat24.mat24_ref import Mat24\n'), ((4760, 4806), 'mmgroup.generate_c.TableGenerator', 'TableGenerator', (['Mat24.tables', 'Mat24.directives'], {}), '(Mat24.tables, Mat24.directives)\n', (4774, 4806), False, 'from mmgroup.generate_c import TableGenerator, make_doc\n'), ((4815, 4850), 'os.path.join', 'os.path.join', (['SKE_DIR', 'MAT24_C_FILE'], {}), '(SKE_DIR, MAT24_C_FILE)\n', (4827, 4850), False, 'import os\n'), ((4863, 4896), 'os.path.join', 'os.path.join', (['C_DIR', 'MAT24_C_FILE'], {}), '(C_DIR, MAT24_C_FILE)\n', (4875, 4896), False, 'import os\n'), ((6820, 6854), 'mmgroup.generate_c.TableGenerator', 'TableGenerator', (['tables', 'directives'], {}), '(tables, directives)\n', (6834, 6854), False, 'from mmgroup.generate_c import TableGenerator, make_doc\n'), ((7411, 7438), 'os.path.join', 'os.path.join', (['C_DIR', 'h_file'], {}), '(C_DIR, h_file)\n', (7423, 7438), False, 'import os\n'), ((5135, 5179), 'os.path.join', 'os.path.join', (['PXD_DIR', "(MAT24_C_FILE + '.pxd')"], {}), "(PXD_DIR, MAT24_C_FILE + '.pxd')\n", (5147, 5179), False, 'import os\n'), ((6900, 6927), 'os.path.join', 'os.path.join', (['SKE_DIR', 'name'], {}), '(SKE_DIR, name)\n', (6912, 6927), False, 'import os\n'), ((7057, 7088), 'os.path.join', 'os.path.join', (['SKE_DIR', 'ske_file'], {}), '(SKE_DIR, ske_file)\n', (7069, 7088), False, 'import os\n'), ((7135, 7162), 'os.path.join', 'os.path.join', (['C_DIR', 'c_file'], {}), '(C_DIR, c_file)\n', (7147, 7162), False, 'import os\n'), ((7634, 7676), 'os.path.join', 'os.path.join', (['PXD_DIR', 'PXD_GENERATORS_NAME'], {}), '(PXD_DIR, PXD_GENERATORS_NAME)\n', (7646, 7676), False, 'import os\n'), ((7965, 8007), 'os.path.join', 'os.path.join', (['PXD_DIR', 'PXI_GENERATORS_NAME'], {}), '(PXD_DIR, PXI_GENERATORS_NAME)\n', (7977, 8007), False, 'import os\n'), ((8204, 8246), 'os.path.join', 'os.path.join', (['PXD_DIR', 'PXD_GENERATORS_NAME'], {}), '(PXD_DIR, PXD_GENERATORS_NAME)\n', (8216, 8246), False, 'import os\n'), ((8256, 8290), 'os.path.split', 'os.path.split', (['PXD_GENERATORS_NAME'], {}), '(PXD_GENERATORS_NAME)\n', (8269, 8290), False, 'import os\n')]
|
from __future__ import unicode_literals
import json
from werkzeug.exceptions import BadRequest
class RedshiftClientError(BadRequest):
def __init__(self, code, message):
super(RedshiftClientError, self).__init__()
self.description = json.dumps({
"Error": {
"Code": code,
"Message": message,
'Type': 'Sender',
},
'RequestId': '6876f774-7273-11e4-85dc-39e55ca848d1',
})
class ClusterNotFoundError(RedshiftClientError):
def __init__(self, cluster_identifier):
super(ClusterNotFoundError, self).__init__(
'ClusterNotFound',
"Cluster {0} not found.".format(cluster_identifier))
class ClusterSubnetGroupNotFoundError(RedshiftClientError):
def __init__(self, subnet_identifier):
super(ClusterSubnetGroupNotFoundError, self).__init__(
'ClusterSubnetGroupNotFound',
"Subnet group {0} not found.".format(subnet_identifier))
class ClusterSecurityGroupNotFoundError(RedshiftClientError):
def __init__(self, group_identifier):
super(ClusterSecurityGroupNotFoundError, self).__init__(
'ClusterSecurityGroupNotFound',
"Security group {0} not found.".format(group_identifier))
class ClusterParameterGroupNotFoundError(RedshiftClientError):
def __init__(self, group_identifier):
super(ClusterParameterGroupNotFoundError, self).__init__(
'ClusterParameterGroupNotFound',
"Parameter group {0} not found.".format(group_identifier))
class InvalidSubnetError(RedshiftClientError):
def __init__(self, subnet_identifier):
super(InvalidSubnetError, self).__init__(
'InvalidSubnet',
"Subnet {0} not found.".format(subnet_identifier))
|
[
"json.dumps"
] |
[((255, 387), 'json.dumps', 'json.dumps', (["{'Error': {'Code': code, 'Message': message, 'Type': 'Sender'}, 'RequestId':\n '6876f774-7273-11e4-85dc-39e55ca848d1'}"], {}), "({'Error': {'Code': code, 'Message': message, 'Type': 'Sender'},\n 'RequestId': '6876f774-7273-11e4-85dc-39e55ca848d1'})\n", (265, 387), False, 'import json\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.